2020-06-11 00:54:34 +02:00
|
|
|
from collections import defaultdict
|
2021-02-16 01:19:02 +01:00
|
|
|
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Union
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.conf import settings
|
2020-08-21 17:12:05 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2020-06-21 02:36:20 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.db import transaction
|
2016-05-29 16:41:41 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2020-06-26 15:17:12 +02:00
|
|
|
from django.utils.translation import override as override_language
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-08-21 17:12:05 +02:00
|
|
|
from zerver.context_processors import get_valid_realm_from_request
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.decorator import (
|
2020-06-24 03:22:41 +02:00
|
|
|
authenticated_json_view,
|
2020-06-11 00:54:34 +02:00
|
|
|
require_non_guest_user,
|
2020-06-24 03:22:41 +02:00
|
|
|
require_post,
|
2020-06-11 00:54:34 +02:00
|
|
|
require_realm_admin,
|
|
|
|
)
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
bulk_add_subscriptions,
|
|
|
|
bulk_remove_subscriptions,
|
|
|
|
do_add_default_stream,
|
|
|
|
do_add_streams_to_default_stream_group,
|
|
|
|
do_change_default_stream_group_description,
|
|
|
|
do_change_default_stream_group_name,
|
|
|
|
do_change_stream_description,
|
|
|
|
do_change_stream_invite_only,
|
2020-06-14 18:57:02 +02:00
|
|
|
do_change_stream_message_retention_days,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_change_stream_post_policy,
|
|
|
|
do_change_subscription_property,
|
|
|
|
do_create_default_stream_group,
|
|
|
|
do_deactivate_stream,
|
|
|
|
do_delete_messages,
|
|
|
|
do_get_streams,
|
|
|
|
do_remove_default_stream,
|
|
|
|
do_remove_default_stream_group,
|
|
|
|
do_remove_streams_from_default_stream_group,
|
|
|
|
do_rename_stream,
|
|
|
|
do_send_messages,
|
|
|
|
gather_subscriptions,
|
2020-08-08 18:30:28 +02:00
|
|
|
get_default_streams_for_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_subscriber_emails,
|
|
|
|
internal_prep_private_message,
|
|
|
|
internal_prep_stream_message,
|
|
|
|
)
|
2020-06-14 18:57:02 +02:00
|
|
|
from zerver.lib.exceptions import ErrorCode, JsonableError, OrganizationOwnerRequired
|
2020-06-21 11:14:35 +02:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.response import json_error, json_success
|
2020-06-21 11:14:35 +02:00
|
|
|
from zerver.lib.retention import parse_message_retention_days
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.streams import (
|
2020-09-29 18:06:50 +02:00
|
|
|
StreamDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
access_default_stream_group_by_id,
|
|
|
|
access_stream_by_id,
|
|
|
|
access_stream_by_name,
|
|
|
|
access_stream_for_delete_or_update,
|
2020-08-21 17:12:05 +02:00
|
|
|
access_web_public_stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_stream_name,
|
|
|
|
check_stream_name_available,
|
|
|
|
filter_stream_authorization,
|
|
|
|
list_to_streams,
|
|
|
|
)
|
2020-08-21 17:12:05 +02:00
|
|
|
from zerver.lib.topic import (
|
|
|
|
get_topic_history_for_public_stream,
|
|
|
|
get_topic_history_for_stream,
|
|
|
|
messages_for_topic,
|
|
|
|
)
|
2020-06-22 22:37:00 +02:00
|
|
|
from zerver.lib.types import Validator
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_bool,
|
|
|
|
check_capped_string,
|
|
|
|
check_color,
|
|
|
|
check_dict,
|
|
|
|
check_dict_only,
|
|
|
|
check_int,
|
|
|
|
check_int_in,
|
|
|
|
check_list,
|
|
|
|
check_string,
|
2020-06-14 18:57:02 +02:00
|
|
|
check_string_or_int,
|
2020-06-20 10:37:43 +02:00
|
|
|
check_union,
|
2020-06-11 00:54:34 +02:00
|
|
|
to_non_negative_int,
|
|
|
|
)
|
|
|
|
from zerver.models import (
|
|
|
|
Realm,
|
|
|
|
Stream,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
get_active_user,
|
|
|
|
get_active_user_profile_by_id_in_realm,
|
|
|
|
get_system_bot,
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PrincipalError(JsonableError):
|
2017-07-21 02:17:28 +02:00
|
|
|
code = ErrorCode.UNAUTHORIZED_PRINCIPAL
|
2021-02-12 08:20:45 +01:00
|
|
|
data_fields = ["principal"]
|
2017-07-21 02:06:40 +02:00
|
|
|
http_status_code = 403
|
|
|
|
|
2020-04-09 19:07:57 +02:00
|
|
|
def __init__(self, principal: Union[int, str]) -> None:
|
|
|
|
self.principal: Union[int, str] = principal
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-21 02:17:28 +02:00
|
|
|
@staticmethod
|
2018-04-24 03:47:28 +02:00
|
|
|
def msg_format() -> str:
|
2017-07-21 02:17:28 +02:00
|
|
|
return _("User not authorized to execute queries on behalf of '{principal}'")
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-09 19:07:57 +02:00
|
|
|
def principal_to_user_profile(agent: UserProfile, principal: Union[str, int]) -> UserProfile:
|
2015-11-23 14:35:16 +01:00
|
|
|
try:
|
2020-04-09 19:07:57 +02:00
|
|
|
if isinstance(principal, str):
|
|
|
|
return get_active_user(principal, agent.realm)
|
|
|
|
else:
|
|
|
|
return get_active_user_profile_by_id_in_realm(principal, agent.realm)
|
2015-11-23 14:35:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Zulip in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def check_if_removing_someone_else(
|
|
|
|
user_profile: UserProfile, principals: Optional[Union[List[str], List[int]]]
|
|
|
|
) -> bool:
|
2020-06-05 22:05:13 +02:00
|
|
|
if principals is None or len(principals) == 0:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if len(principals) > 1:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if isinstance(principals[0], int):
|
|
|
|
return principals[0] != user_profile.id
|
|
|
|
else:
|
|
|
|
return principals[0] != user_profile.email
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def deactivate_stream_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int
|
|
|
|
) -> HttpResponse:
|
2020-08-19 21:38:38 +02:00
|
|
|
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
|
2020-06-29 15:02:07 +02:00
|
|
|
do_deactivate_stream(stream, acting_user=user_profile)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_default_stream(
|
2021-04-07 22:00:44 +02:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2020-08-08 00:49:24 +02:00
|
|
|
if stream.invite_only:
|
|
|
|
return json_error(_("Private streams cannot be made default."))
|
2017-01-30 04:23:08 +01:00
|
|
|
do_add_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_default_stream_group(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_name: str = REQ(),
|
|
|
|
description: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams.append(stream)
|
2017-11-14 20:51:34 +01:00
|
|
|
do_create_default_stream_group(user_profile.realm, group_name, description, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_default_stream_group_info(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_id: int,
|
2021-04-07 22:00:44 +02:00
|
|
|
new_group_name: Optional[str] = REQ(json_validator=check_string, default=None),
|
|
|
|
new_description: Optional[str] = REQ(json_validator=check_string, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2017-11-14 21:06:02 +01:00
|
|
|
if not new_group_name and not new_description:
|
|
|
|
return json_error(_('You must pass "new_description" or "new_group_name".'))
|
2017-11-14 20:51:34 +01:00
|
|
|
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
2017-11-14 21:06:02 +01:00
|
|
|
if new_group_name is not None:
|
|
|
|
do_change_default_stream_group_name(user_profile.realm, group, new_group_name)
|
|
|
|
if new_description is not None:
|
|
|
|
do_change_default_stream_group_description(user_profile.realm, group, new_description)
|
2017-11-14 20:51:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_default_stream_group_streams(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
group_id: int,
|
|
|
|
op: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_names: List[str] = REQ(json_validator=check_list(check_string)),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams.append(stream)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if op == "add":
|
2017-11-14 20:33:09 +01:00
|
|
|
do_add_streams_to_default_stream_group(user_profile.realm, group, streams)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif op == "remove":
|
2017-11-14 20:33:09 +01:00
|
|
|
do_remove_streams_from_default_stream_group(user_profile.realm, group, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
else:
|
2017-11-14 20:33:09 +01:00
|
|
|
return json_error(_('Invalid value for "op". Specify one of "add" or "remove".'))
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def remove_default_stream_group(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, group_id: int
|
|
|
|
) -> None:
|
2017-11-14 20:33:09 +01:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
|
|
|
do_remove_default_stream_group(user_profile.realm, group)
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def remove_default_stream(
|
2021-04-07 22:00:44 +02:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_id: int = REQ(json_validator=check_int)
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(
|
2020-03-22 20:29:49 +01:00
|
|
|
user_profile,
|
|
|
|
stream_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
allow_realm_admin=True,
|
2020-03-22 20:29:49 +01:00
|
|
|
)
|
2017-01-30 04:25:40 +01:00
|
|
|
do_remove_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_stream_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
|
|
|
description: Optional[str] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_capped_string(Stream.MAX_DESCRIPTION_LENGTH), default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
is_private: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
|
|
|
is_announcement_only: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_post_policy: Optional[int] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES), default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
|
|
|
new_name: Optional[str] = REQ(json_validator=check_string, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days: Optional[Union[int, str]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_string_or_int, default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2018-02-12 16:02:19 +01:00
|
|
|
# We allow realm administrators to to update the stream name and
|
|
|
|
# description even for private streams.
|
2020-08-19 21:38:38 +02:00
|
|
|
(stream, sub) = access_stream_for_delete_or_update(user_profile, stream_id)
|
2020-06-14 18:57:02 +02:00
|
|
|
|
|
|
|
if message_retention_days is not None:
|
|
|
|
if not user_profile.is_realm_owner:
|
|
|
|
raise OrganizationOwnerRequired()
|
|
|
|
user_profile.realm.ensure_not_on_limited_plan()
|
2020-06-21 11:14:35 +02:00
|
|
|
message_retention_days_value = parse_message_retention_days(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
|
|
|
|
)
|
2020-06-14 18:57:02 +02:00
|
|
|
do_change_stream_message_retention_days(stream, message_retention_days_value)
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if description is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "\n" in description:
|
2019-02-20 21:09:21 +01:00
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
description = description.replace("\n", " ")
|
2017-01-30 04:14:12 +01:00
|
|
|
do_change_stream_description(stream, description)
|
2017-01-30 01:48:38 +01:00
|
|
|
if new_name is not None:
|
2017-01-30 03:59:25 +01:00
|
|
|
new_name = new_name.strip()
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name == new_name:
|
2017-01-30 04:03:32 +01:00
|
|
|
return json_error(_("Stream already has that name!"))
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name.lower() != new_name.lower():
|
|
|
|
# Check that the stream name is available (unless we are
|
|
|
|
# are only changing the casing of the stream name).
|
|
|
|
check_stream_name_available(user_profile.realm, new_name)
|
2019-01-05 12:47:38 +01:00
|
|
|
do_rename_stream(stream, new_name, user_profile)
|
2018-05-12 07:25:42 +02:00
|
|
|
if is_announcement_only is not None:
|
2020-02-04 21:50:55 +01:00
|
|
|
# is_announcement_only is a legacy way to specify
|
|
|
|
# stream_post_policy. We can probably just delete this code,
|
|
|
|
# since we're not aware of clients that used it, but we're
|
|
|
|
# keeping it for backwards-compatibility for now.
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_EVERYONE
|
|
|
|
if is_announcement_only:
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_ADMINS
|
|
|
|
if stream_post_policy is not None:
|
|
|
|
do_change_stream_post_policy(stream, stream_post_policy)
|
2018-02-12 16:02:19 +01:00
|
|
|
|
|
|
|
# But we require even realm administrators to be actually
|
|
|
|
# subscribed to make a private stream public.
|
2016-12-23 09:15:30 +01:00
|
|
|
if is_private is not None:
|
2020-08-08 18:30:28 +02:00
|
|
|
default_stream_ids = {s.id for s in get_default_streams_for_realm(stream.realm_id)}
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2020-08-08 18:30:28 +02:00
|
|
|
if is_private and stream.id in default_stream_ids:
|
|
|
|
return json_error(_("Default streams cannot be made private."))
|
2018-04-27 01:00:26 +02:00
|
|
|
do_change_stream_invite_only(stream, is_private, history_public_to_subscribers)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-08 21:58:38 +02:00
|
|
|
@has_request_variables
|
|
|
|
def list_subscriptions_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
include_subscribers: bool = REQ(json_validator=check_bool, default=False),
|
2019-08-08 21:58:38 +02:00
|
|
|
) -> HttpResponse:
|
|
|
|
subscribed, _ = gather_subscriptions(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
include_subscribers=include_subscribers,
|
2019-08-08 21:58:38 +02:00
|
|
|
)
|
|
|
|
return json_success({"subscriptions": subscribed})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-25 01:01:10 +02:00
|
|
|
add_subscriptions_schema = check_list(
|
|
|
|
check_dict_only(
|
2021-02-12 08:20:45 +01:00
|
|
|
required_keys=[("name", check_string)],
|
2020-06-25 01:01:10 +02:00
|
|
|
optional_keys=[
|
2021-02-12 08:20:45 +01:00
|
|
|
("color", check_color),
|
|
|
|
("description", check_capped_string(Stream.MAX_DESCRIPTION_LENGTH)),
|
2020-06-25 01:01:10 +02:00
|
|
|
],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
remove_subscriptions_schema = check_list(check_string)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
delete: Iterable[str] = REQ(json_validator=remove_subscriptions_schema, default=[]),
|
|
|
|
add: Iterable[Mapping[str, str]] = REQ(json_validator=add_subscriptions_schema, default=[]),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
if not add and not delete:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-16 01:19:02 +01:00
|
|
|
thunks = [
|
|
|
|
lambda: add_subscriptions_backend(request, user_profile, streams_raw=add),
|
|
|
|
lambda: remove_subscriptions_backend(request, user_profile, streams_raw=delete),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2021-02-16 01:19:02 +01:00
|
|
|
return compose_views(thunks)
|
2016-09-12 17:21:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-16 01:19:02 +01:00
|
|
|
def compose_views(thunks: List[Callable[[], HttpResponse]]) -> HttpResponse:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-16 01:19:02 +01:00
|
|
|
This takes a series of thunks and calls them in sequence, and it
|
|
|
|
smushes all the json results into a single response when
|
|
|
|
everything goes right. (This helps clients avoid extra latency
|
|
|
|
hops.) It rolls back the transaction when things go wrong in any
|
|
|
|
one of the composed methods.
|
2016-09-12 17:21:49 +02:00
|
|
|
|
|
|
|
TODO: Move this a utils-like module if we end up using it more widely.
|
2021-02-16 01:19:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-09-12 17:21:49 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
json_dict: Dict[str, Any] = {}
|
2016-09-12 17:21:49 +02:00
|
|
|
with transaction.atomic():
|
2021-02-16 01:19:02 +01:00
|
|
|
for thunk in thunks:
|
|
|
|
response = thunk()
|
2016-09-12 17:21:49 +02:00
|
|
|
if response.status_code != 200:
|
|
|
|
raise JsonableError(response.content)
|
2020-08-07 01:09:47 +02:00
|
|
|
json_dict.update(orjson.loads(response.content))
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:37:00 +02:00
|
|
|
check_principals: Validator[Union[List[str], List[int]]] = check_union(
|
|
|
|
[check_list(check_string), check_list(check_int)],
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def remove_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
streams_raw: Iterable[str] = REQ("subscriptions", json_validator=remove_subscriptions_schema),
|
2021-02-12 08:19:30 +01:00
|
|
|
principals: Optional[Union[List[str], List[int]]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_principals, default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-06-05 22:05:13 +02:00
|
|
|
removing_someone_else = check_if_removing_someone_else(user_profile, principals)
|
2018-02-20 18:56:01 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
streams_as_dict: List[StreamDict] = []
|
2016-11-20 21:55:50 +01:00
|
|
|
for stream_name in streams_raw:
|
|
|
|
streams_as_dict.append({"name": stream_name.strip()})
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
streams, __ = list_to_streams(
|
|
|
|
streams_as_dict, user_profile, admin_access_required=removing_someone_else
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if principals:
|
2021-02-12 08:19:30 +01:00
|
|
|
people_to_unsub = {
|
|
|
|
principal_to_user_profile(user_profile, principal) for principal in principals
|
|
|
|
}
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2020-04-09 21:51:58 +02:00
|
|
|
people_to_unsub = {user_profile}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: Dict[str, List[str]] = dict(removed=[], not_removed=[])
|
2021-02-12 08:19:30 +01:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(
|
|
|
|
people_to_unsub, streams, request.client, acting_user=user_profile
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
for (subscriber, removed_stream) in removed:
|
|
|
|
result["removed"].append(removed_stream.name)
|
|
|
|
for (subscriber, not_subscribed_stream) in not_subscribed:
|
2019-10-13 05:30:34 +02:00
|
|
|
result["not_removed"].append(not_subscribed_stream.name)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def you_were_just_subscribed_message(
|
|
|
|
acting_user: UserProfile, recipient_user: UserProfile, stream_names: Set[str]
|
|
|
|
) -> str:
|
2020-09-02 06:20:26 +02:00
|
|
|
subscriptions = sorted(stream_names)
|
2017-05-16 17:10:42 +02:00
|
|
|
if len(subscriptions) == 1:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(recipient_user.default_language):
|
2020-06-30 13:37:47 +02:00
|
|
|
return _("{user_full_name} subscribed you to the stream {stream_name}.").format(
|
|
|
|
user_full_name=f"@**{acting_user.full_name}**",
|
|
|
|
stream_name=f"#**{subscriptions[0]}**",
|
2020-06-26 15:17:12 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
with override_language(recipient_user.default_language):
|
2020-06-30 13:37:47 +02:00
|
|
|
message = _("{user_full_name} subscribed you to the following streams:").format(
|
|
|
|
user_full_name=f"@**{acting_user.full_name}**",
|
2020-06-15 23:22:24 +02:00
|
|
|
)
|
2018-06-19 18:22:27 +02:00
|
|
|
message += "\n\n"
|
|
|
|
for stream_name in subscriptions:
|
2020-06-10 06:41:04 +02:00
|
|
|
message += f"* #**{stream_name}**\n"
|
2018-06-19 18:22:27 +02:00
|
|
|
return message
|
2017-05-16 17:10:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-22 22:37:00 +02:00
|
|
|
RETENTION_DEFAULT: Union[str, int] = "realm_default"
|
|
|
|
EMPTY_PRINCIPALS: Union[Sequence[str], Sequence[int]] = []
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-04 19:14:29 +02:00
|
|
|
@require_non_guest_user
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def add_subscriptions_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-02-16 01:19:02 +01:00
|
|
|
streams_raw: Iterable[Mapping[str, str]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
"subscriptions", json_validator=add_subscriptions_schema
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
invite_only: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_post_policy: int = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_int_in(Stream.STREAM_POST_POLICY_TYPES),
|
2021-02-12 08:19:30 +01:00
|
|
|
default=Stream.STREAM_POST_POLICY_EVERYONE,
|
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
history_public_to_subscribers: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days: Union[str, int] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_string_or_int, default=RETENTION_DEFAULT
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
announce: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
principals: Union[Sequence[str], Sequence[int]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_principals,
|
2021-02-12 08:19:30 +01:00
|
|
|
default=EMPTY_PRINCIPALS,
|
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
authorization_errors_fatal: bool = REQ(json_validator=check_bool, default=True),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-13 15:16:27 +02:00
|
|
|
realm = user_profile.realm
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts = []
|
2019-01-10 15:03:15 +01:00
|
|
|
color_map = {}
|
2016-06-03 00:34:22 +02:00
|
|
|
for stream_dict in streams_raw:
|
2019-01-10 15:03:15 +01:00
|
|
|
# 'color' field is optional
|
|
|
|
# check for its presence in the streams_raw first
|
2021-02-12 08:20:45 +01:00
|
|
|
if "color" in stream_dict:
|
|
|
|
color_map[stream_dict["name"]] = stream_dict["color"]
|
2019-01-10 15:03:15 +01:00
|
|
|
|
2020-09-29 18:06:50 +02:00
|
|
|
stream_dict_copy: StreamDict = {}
|
|
|
|
stream_dict_copy["name"] = stream_dict["name"].strip()
|
|
|
|
|
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
if "description" in stream_dict:
|
|
|
|
stream_dict_copy["description"] = stream_dict["description"].replace("\n", " ")
|
|
|
|
|
2016-11-21 00:16:52 +01:00
|
|
|
stream_dict_copy["invite_only"] = invite_only
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_dict_copy["stream_post_policy"] = stream_post_policy
|
2018-04-27 01:00:26 +02:00
|
|
|
stream_dict_copy["history_public_to_subscribers"] = history_public_to_subscribers
|
2020-06-21 11:14:35 +02:00
|
|
|
stream_dict_copy["message_retention_days"] = parse_message_retention_days(
|
2021-02-12 08:19:30 +01:00
|
|
|
message_retention_days, Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP
|
|
|
|
)
|
2020-09-29 18:06:50 +02:00
|
|
|
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts.append(stream_dict_copy)
|
|
|
|
|
|
|
|
# Validation of the streams arguments, including enforcement of
|
2017-01-30 07:01:19 +01:00
|
|
|
# can_create_streams policy and check_stream_name policy is inside
|
2016-11-20 21:55:50 +01:00
|
|
|
# list_to_streams.
|
2021-02-12 08:19:30 +01:00
|
|
|
existing_streams, created_streams = list_to_streams(stream_dicts, user_profile, autocreate=True)
|
|
|
|
authorized_streams, unauthorized_streams = filter_stream_authorization(
|
|
|
|
user_profile, existing_streams
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("Unable to access stream ({stream_name}).").format(
|
|
|
|
stream_name=unauthorized_streams[0].name,
|
|
|
|
)
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0:
|
2020-10-13 15:16:27 +02:00
|
|
|
if realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("You can only invite other Zephyr mirroring users to private streams.")
|
|
|
|
)
|
2018-07-30 01:25:13 +02:00
|
|
|
if not user_profile.can_subscribe_other_users():
|
2021-04-07 21:18:33 +02:00
|
|
|
# Guest users case will not be handled here as it will
|
|
|
|
# be handled by the decorator above.
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2021-02-12 08:19:30 +01:00
|
|
|
subscribers = {
|
|
|
|
principal_to_user_profile(user_profile, principal) for principal in principals
|
|
|
|
}
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2020-04-09 21:51:58 +02:00
|
|
|
subscribers = {user_profile}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(
|
|
|
|
realm, streams, subscribers, acting_user=user_profile, color_map=color_map
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-08-18 05:02:02 +02:00
|
|
|
# We can assume unique emails here for now, but we should eventually
|
|
|
|
# convert this function to be more id-centric.
|
2020-09-02 08:14:51 +02:00
|
|
|
email_to_user_profile: Dict[str, UserProfile] = {}
|
2017-08-18 05:02:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result: Dict[str, Any] = dict(
|
|
|
|
subscribed=defaultdict(list), already_subscribed=defaultdict(list)
|
|
|
|
)
|
2020-10-14 16:35:03 +02:00
|
|
|
for sub_info in subscribed:
|
|
|
|
subscriber = sub_info.user
|
|
|
|
stream = sub_info.stream
|
2015-11-23 14:35:16 +01:00
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
2017-08-18 05:02:02 +02:00
|
|
|
email_to_user_profile[subscriber.email] = subscriber
|
2020-10-14 16:35:03 +02:00
|
|
|
for sub_info in already_subscribed:
|
|
|
|
subscriber = sub_info.user
|
|
|
|
stream = sub_info.stream
|
2015-11-23 14:35:16 +01:00
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
|
|
|
|
2020-10-13 22:43:05 +02:00
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
|
|
|
|
send_messages_for_new_subscribers(
|
|
|
|
user_profile=user_profile,
|
|
|
|
subscribers=subscribers,
|
|
|
|
new_subscriptions=result["subscribed"],
|
|
|
|
email_to_user_profile=email_to_user_profile,
|
|
|
|
created_streams=created_streams,
|
|
|
|
announce=announce,
|
|
|
|
)
|
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [s.name for s in unauthorized_streams]
|
|
|
|
return json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-13 22:43:05 +02:00
|
|
|
def send_messages_for_new_subscribers(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
subscribers: Set[UserProfile],
|
|
|
|
new_subscriptions: Dict[str, List[str]],
|
|
|
|
email_to_user_profile: Dict[str, UserProfile],
|
|
|
|
created_streams: List[Stream],
|
|
|
|
announce: bool,
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
If you are subscribing lots of new users to new streams,
|
|
|
|
this function can be pretty expensive in terms of generating
|
|
|
|
lots of queries and sending lots of messages. We isolate
|
|
|
|
the code partly to make it easier to test things like
|
|
|
|
excessive query counts by mocking this function so that it
|
|
|
|
doesn't drown out query counts from other code.
|
|
|
|
"""
|
2020-04-09 21:51:58 +02:00
|
|
|
bots = {subscriber.email: subscriber.is_bot for subscriber in subscribers}
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
newly_created_stream_names = {s.name for s in created_streams}
|
2017-05-16 01:32:50 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2020-10-13 22:43:05 +02:00
|
|
|
if new_subscriptions:
|
|
|
|
for email, subscribed_stream_names in new_subscriptions.items():
|
2015-11-23 14:35:16 +01:00
|
|
|
if email == user_profile.email:
|
|
|
|
# Don't send a Zulip if you invited yourself.
|
|
|
|
continue
|
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
|
|
|
|
2017-05-16 01:32:50 +02:00
|
|
|
# For each user, we notify them about newly subscribed streams, except for
|
|
|
|
# streams that were newly created.
|
|
|
|
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
|
|
|
|
|
|
|
|
if not notify_stream_names:
|
|
|
|
continue
|
|
|
|
|
2020-06-26 15:17:12 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
|
|
|
recipient_user = email_to_user_profile[email]
|
|
|
|
|
2017-05-16 17:10:42 +02:00
|
|
|
msg = you_were_just_subscribed_message(
|
|
|
|
acting_user=user_profile,
|
2020-06-26 15:17:12 +02:00
|
|
|
recipient_user=recipient_user,
|
2017-05-16 17:10:42 +02:00
|
|
|
stream_names=notify_stream_names,
|
|
|
|
)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
2020-06-26 15:17:12 +02:00
|
|
|
recipient_user=recipient_user,
|
2021-02-12 08:19:30 +01:00
|
|
|
content=msg,
|
|
|
|
)
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2019-07-11 21:07:29 +02:00
|
|
|
if announce and len(created_streams) > 0:
|
2017-08-24 02:35:16 +02:00
|
|
|
notifications_stream = user_profile.realm.get_notifications_stream()
|
2015-11-23 14:35:16 +01:00
|
|
|
if notifications_stream is not None:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(notifications_stream.realm.default_language):
|
|
|
|
if len(created_streams) > 1:
|
2020-06-30 14:12:27 +02:00
|
|
|
content = _("{user_name} created the following streams: {stream_str}.")
|
2020-06-26 15:17:12 +02:00
|
|
|
else:
|
2020-06-30 14:12:27 +02:00
|
|
|
content = _("{user_name} created a new stream {stream_str}.")
|
2021-02-12 08:20:45 +01:00
|
|
|
topic = _("new streams")
|
2020-06-26 15:17:12 +02:00
|
|
|
|
2020-06-30 14:12:27 +02:00
|
|
|
content = content.format(
|
|
|
|
user_name=f"@_**{user_profile.full_name}|{user_profile.id}**",
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_str=", ".join(f"#**{s.name}**" for s in created_streams),
|
2020-06-30 14:12:27 +02:00
|
|
|
)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2016-12-03 00:04:17 +01:00
|
|
|
notifications.append(
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message(
|
|
|
|
sender=sender,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream=notifications_stream,
|
2017-04-27 19:44:09 +02:00
|
|
|
topic=topic,
|
2019-07-11 23:05:38 +02:00
|
|
|
content=content,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2019-02-07 02:05:34 +01:00
|
|
|
)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2019-07-11 20:51:23 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm and len(created_streams) > 0:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 00:03:21 +02:00
|
|
|
for stream in created_streams:
|
2020-06-26 15:17:12 +02:00
|
|
|
with override_language(stream.realm.default_language):
|
|
|
|
notifications.append(
|
|
|
|
internal_prep_stream_message(
|
|
|
|
sender=sender,
|
|
|
|
stream=stream,
|
|
|
|
topic=Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
|
2021-02-12 08:20:45 +01:00
|
|
|
content=_("Stream created by {user_name}.").format(
|
2020-06-30 14:12:27 +02:00
|
|
|
user_name=f"@_**{user_profile.full_name}|{user_profile.id}**",
|
2020-06-26 15:17:12 +02:00
|
|
|
),
|
2020-06-15 23:22:24 +02:00
|
|
|
),
|
2020-06-26 15:17:12 +02:00
|
|
|
)
|
2017-04-27 00:03:21 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(notifications) > 0:
|
2019-07-24 08:47:38 +02:00
|
|
|
do_send_messages(notifications, mark_as_read=[user_profile.id])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_subscribers_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_id: int = REQ("stream", converter=to_non_negative_int),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(
|
|
|
|
user_profile,
|
|
|
|
stream_id,
|
|
|
|
allow_realm_admin=True,
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return json_success({"subscribers": subscribers})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
2019-02-28 22:20:24 +01:00
|
|
|
def get_streams_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
include_public: bool = REQ(json_validator=check_bool, default=True),
|
|
|
|
include_web_public: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_subscribed: bool = REQ(json_validator=check_bool, default=True),
|
|
|
|
include_all_active: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_default: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
include_owner_subscribed: bool = REQ(json_validator=check_bool, default=False),
|
2019-02-28 22:20:24 +01:00
|
|
|
) -> HttpResponse:
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
streams = do_get_streams(
|
|
|
|
user_profile,
|
|
|
|
include_public=include_public,
|
|
|
|
include_web_public=include_web_public,
|
|
|
|
include_subscribed=include_subscribed,
|
|
|
|
include_all_active=include_all_active,
|
|
|
|
include_default=include_default,
|
|
|
|
include_owner_subscribed=include_owner_subscribed,
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
@has_request_variables
|
2020-08-21 17:12:05 +02:00
|
|
|
def get_topics_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
stream_id: int = REQ(converter=to_non_negative_int, path_only=True),
|
|
|
|
) -> HttpResponse:
|
2020-08-21 17:12:05 +02:00
|
|
|
|
|
|
|
if not maybe_user_profile.is_authenticated:
|
|
|
|
is_web_public_query = True
|
|
|
|
user_profile: Optional[UserProfile] = None
|
|
|
|
else:
|
|
|
|
is_web_public_query = False
|
|
|
|
assert isinstance(maybe_user_profile, UserProfile)
|
|
|
|
user_profile = maybe_user_profile
|
|
|
|
assert user_profile is not None
|
2016-10-27 15:54:49 +02:00
|
|
|
|
2020-08-21 17:12:05 +02:00
|
|
|
if is_web_public_query:
|
|
|
|
realm = get_valid_realm_from_request(request)
|
|
|
|
stream = access_web_public_stream(stream_id, realm)
|
2020-10-16 17:45:21 +02:00
|
|
|
result = get_topic_history_for_public_stream(recipient_id=stream.recipient_id)
|
2020-08-21 17:12:05 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
assert user_profile is not None
|
|
|
|
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2020-08-21 17:12:05 +02:00
|
|
|
|
|
|
|
result = get_topic_history_for_stream(
|
|
|
|
user_profile=user_profile,
|
2020-10-16 17:45:21 +02:00
|
|
|
recipient_id=stream.recipient_id,
|
2020-08-21 17:12:05 +02:00
|
|
|
public_history=stream.is_history_public_to_subscribers(),
|
|
|
|
)
|
2016-10-27 15:54:49 +02:00
|
|
|
|
|
|
|
return json_success(dict(topics=result))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-18 17:40:54 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def delete_in_topic(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int = REQ(converter=to_non_negative_int),
|
|
|
|
topic_name: str = REQ("topic_name"),
|
|
|
|
) -> HttpResponse:
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
2020-02-11 16:04:05 +01:00
|
|
|
messages = messages_for_topic(stream.recipient_id, topic_name)
|
2019-01-18 17:40:54 +01:00
|
|
|
if not stream.is_history_public_to_subscribers():
|
|
|
|
# Don't allow the user to delete messages that they don't have access to.
|
|
|
|
deletable_message_ids = UserMessage.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile=user_profile, message_id__in=messages
|
|
|
|
).values_list("message_id", flat=True)
|
|
|
|
messages = [message for message in messages if message.id in deletable_message_ids]
|
2019-01-18 17:40:54 +01:00
|
|
|
|
2019-11-12 21:20:31 +01:00
|
|
|
do_delete_messages(user_profile.realm, messages)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 03:22:41 +02:00
|
|
|
@require_post
|
|
|
|
@authenticated_json_view
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_stream_exists(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_name: str = REQ("stream"),
|
2021-04-07 22:00:44 +02:00
|
|
|
autosubscribe: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
|
|
|
try:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2017-01-12 01:41:16 +01:00
|
|
|
except JsonableError as e:
|
2017-07-25 02:15:40 +02:00
|
|
|
return json_error(e.msg, status=404)
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# access_stream functions return a subscription if and only if we
|
|
|
|
# are already subscribed.
|
2017-07-25 02:15:40 +02:00
|
|
|
result = {"subscribed": sub is not None}
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# If we got here, we're either subscribed or the stream is public.
|
|
|
|
# So if we're not yet subscribed and autosubscribe is enabled, we
|
|
|
|
# should join.
|
|
|
|
if sub is None and autosubscribe:
|
2021-02-12 08:19:30 +01:00
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [user_profile], acting_user=user_profile
|
|
|
|
)
|
2017-01-12 01:41:16 +01:00
|
|
|
result["subscribed"] = True
|
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-01-03 18:31:43 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_get_stream_id(
|
2021-02-12 08:20:45 +01:00
|
|
|
request: HttpRequest, user_profile: UserProfile, stream_name: str = REQ("stream")
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-10-16 18:00:07 +02:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, stream_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
return json_success({"stream_id": stream.id})
|
2017-01-03 18:31:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def update_subscriptions_property(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
stream_id: int = REQ(json_validator=check_int),
|
2021-02-12 08:19:30 +01:00
|
|
|
property: str = REQ(),
|
|
|
|
value: str = REQ(),
|
|
|
|
) -> HttpResponse:
|
|
|
|
subscription_data = [{"property": property, "stream_id": stream_id, "value": value}]
|
|
|
|
return update_subscription_properties_backend(
|
|
|
|
request, user_profile, subscription_data=subscription_data
|
|
|
|
)
|
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscription_properties_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
subscription_data: List[Dict[str, Any]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_list(
|
2021-02-12 08:19:30 +01:00
|
|
|
check_dict(
|
|
|
|
[
|
|
|
|
("stream_id", check_int),
|
|
|
|
("property", check_string),
|
|
|
|
("value", check_union([check_string, check_bool])),
|
|
|
|
]
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
[{"stream_id": "1", "property": "is_muted", "value": False},
|
2017-05-09 07:01:42 +02:00
|
|
|
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
property_converters = {
|
|
|
|
"color": check_color,
|
|
|
|
"in_home_view": check_bool,
|
|
|
|
"is_muted": check_bool,
|
|
|
|
"desktop_notifications": check_bool,
|
|
|
|
"audible_notifications": check_bool,
|
|
|
|
"push_notifications": check_bool,
|
|
|
|
"email_notifications": check_bool,
|
|
|
|
"pin_to_top": check_bool,
|
|
|
|
"wildcard_mentions_notify": check_bool,
|
|
|
|
}
|
2015-11-23 14:35:16 +01:00
|
|
|
response_data = []
|
|
|
|
|
|
|
|
for change in subscription_data:
|
2017-05-09 07:01:42 +02:00
|
|
|
stream_id = change["stream_id"]
|
2015-11-23 14:35:16 +01:00
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
2020-06-15 23:22:24 +02:00
|
|
|
return json_error(_("Unknown subscription property: {}").format(property))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-10-16 17:25:48 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id)
|
2017-01-30 01:45:00 +01:00
|
|
|
if sub is None:
|
2020-06-15 23:22:24 +02:00
|
|
|
return json_error(_("Not subscribed to stream id {}").format(stream_id))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
try:
|
|
|
|
value = property_converters[property](property, value)
|
|
|
|
except ValidationError as error:
|
|
|
|
return json_error(error.message)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_change_subscription_property(
|
|
|
|
user_profile, sub, stream, property, value, acting_user=user_profile
|
|
|
|
)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
response_data.append({"stream_id": stream_id, "property": property, "value": value})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
return json_success({"subscription_data": response_data})
|