2018-04-24 03:47:28 +02:00
|
|
|
from typing import Any, Optional, Tuple, List, Set, Iterable, Mapping, Callable, Dict, \
|
2017-11-02 08:53:30 +01:00
|
|
|
Union
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2016-05-29 16:41:41 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-21 02:17:28 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError, ErrorCode
|
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.decorator import authenticated_json_post_view, \
|
2018-05-04 19:14:29 +02:00
|
|
|
require_realm_admin, to_non_negative_int, require_non_guest_user
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.actions import bulk_remove_subscriptions, \
|
2017-04-27 20:42:13 +02:00
|
|
|
do_change_subscription_property, internal_prep_private_message, \
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message, \
|
2019-02-02 23:53:22 +01:00
|
|
|
gather_subscriptions, \
|
2015-11-23 14:35:16 +01:00
|
|
|
bulk_add_subscriptions, do_send_messages, get_subscriber_emails, do_rename_stream, \
|
2017-01-30 03:52:55 +01:00
|
|
|
do_deactivate_stream, do_change_stream_invite_only, do_add_default_stream, \
|
|
|
|
do_change_stream_description, do_get_streams, \
|
2020-02-04 21:50:55 +01:00
|
|
|
do_remove_default_stream, do_change_stream_post_policy, do_delete_messages, \
|
2017-11-01 18:20:34 +01:00
|
|
|
do_create_default_stream_group, do_add_streams_to_default_stream_group, \
|
|
|
|
do_remove_streams_from_default_stream_group, do_remove_default_stream_group, \
|
2020-02-04 21:50:55 +01:00
|
|
|
do_change_default_stream_group_description, do_change_default_stream_group_name
|
2019-02-02 23:53:22 +01:00
|
|
|
from zerver.lib.response import json_success, json_error
|
2017-01-30 02:57:24 +01:00
|
|
|
from zerver.lib.streams import access_stream_by_id, access_stream_by_name, \
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name, check_stream_name_available, filter_stream_authorization, \
|
2018-02-12 16:02:19 +01:00
|
|
|
list_to_streams, access_stream_for_delete_or_update, access_default_stream_group_by_id
|
2019-01-18 17:40:54 +01:00
|
|
|
from zerver.lib.topic import get_topic_history_for_stream, messages_for_topic
|
2017-05-09 07:01:42 +02:00
|
|
|
from zerver.lib.validator import check_string, check_int, check_list, check_dict, \
|
2020-02-04 21:50:55 +01:00
|
|
|
check_bool, check_variable_type, check_capped_string, check_color, check_dict_only, \
|
|
|
|
check_int_in
|
2019-04-08 19:23:00 +02:00
|
|
|
from zerver.models import UserProfile, Stream, Realm, UserMessage, \
|
2018-05-21 03:54:42 +02:00
|
|
|
get_system_bot, get_active_user
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
from collections import defaultdict
|
|
|
|
import ujson
|
|
|
|
|
|
|
|
class PrincipalError(JsonableError):
|
2017-07-21 02:17:28 +02:00
|
|
|
code = ErrorCode.UNAUTHORIZED_PRINCIPAL
|
|
|
|
data_fields = ['principal']
|
2017-07-21 02:06:40 +02:00
|
|
|
http_status_code = 403
|
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def __init__(self, principal: str) -> None:
|
|
|
|
self.principal = principal # type: str
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-21 02:17:28 +02:00
|
|
|
@staticmethod
|
2018-04-24 03:47:28 +02:00
|
|
|
def msg_format() -> str:
|
2017-07-21 02:17:28 +02:00
|
|
|
return _("User not authorized to execute queries on behalf of '{principal}'")
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def principal_to_user_profile(agent: UserProfile, principal: str) -> UserProfile:
|
2015-11-23 14:35:16 +01:00
|
|
|
try:
|
2018-05-21 03:54:42 +02:00
|
|
|
return get_active_user(principal, agent.realm)
|
2015-11-23 14:35:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Zulip in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
@require_realm_admin
|
2018-03-12 00:29:21 +01:00
|
|
|
def deactivate_stream_backend(request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
stream_id: int) -> HttpResponse:
|
2018-02-12 16:02:19 +01:00
|
|
|
stream = access_stream_for_delete_or_update(user_profile, stream_id)
|
2017-01-30 01:52:48 +01:00
|
|
|
do_deactivate_stream(stream)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2018-03-12 00:29:21 +01:00
|
|
|
def add_default_stream(request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
stream_name: str=REQ()) -> HttpResponse:
|
2017-01-30 04:23:08 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
do_add_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def create_default_stream_group(request: HttpRequest, user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
group_name: str=REQ(), description: str=REQ(),
|
|
|
|
stream_names: List[str]=REQ(validator=check_list(check_string))) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
streams.append(stream)
|
2017-11-14 20:51:34 +01:00
|
|
|
do_create_default_stream_group(user_profile.realm, group_name, description, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2017-11-14 20:51:34 +01:00
|
|
|
def update_default_stream_group_info(request: HttpRequest, user_profile: UserProfile, group_id: int,
|
2019-11-13 08:17:49 +01:00
|
|
|
new_group_name: Optional[str]=REQ(validator=check_string, default=None),
|
|
|
|
new_description: Optional[str]=REQ(validator=check_string,
|
|
|
|
default=None)) -> None:
|
2017-11-14 21:06:02 +01:00
|
|
|
if not new_group_name and not new_description:
|
|
|
|
return json_error(_('You must pass "new_description" or "new_group_name".'))
|
2017-11-14 20:51:34 +01:00
|
|
|
|
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id,)
|
2017-11-14 21:06:02 +01:00
|
|
|
if new_group_name is not None:
|
|
|
|
do_change_default_stream_group_name(user_profile.realm, group, new_group_name)
|
|
|
|
if new_description is not None:
|
|
|
|
do_change_default_stream_group_description(user_profile.realm, group, new_description)
|
2017-11-14 20:51:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def update_default_stream_group_streams(request: HttpRequest, user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
group_id: int, op: str=REQ(),
|
|
|
|
stream_names: List[str]=REQ(
|
2017-11-15 19:57:52 +01:00
|
|
|
validator=check_list(check_string))) -> None:
|
2017-11-14 20:33:09 +01:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id,)
|
2017-11-01 18:20:34 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
streams.append(stream)
|
|
|
|
|
|
|
|
if op == 'add':
|
2017-11-14 20:33:09 +01:00
|
|
|
do_add_streams_to_default_stream_group(user_profile.realm, group, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
elif op == 'remove':
|
2017-11-14 20:33:09 +01:00
|
|
|
do_remove_streams_from_default_stream_group(user_profile.realm, group, streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
else:
|
2017-11-14 20:33:09 +01:00
|
|
|
return json_error(_('Invalid value for "op". Specify one of "add" or "remove".'))
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2017-11-10 04:33:28 +01:00
|
|
|
def remove_default_stream_group(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
group_id: int) -> None:
|
2017-11-14 20:33:09 +01:00
|
|
|
group = access_default_stream_group_by_id(user_profile.realm, group_id)
|
|
|
|
do_remove_default_stream_group(user_profile.realm, group)
|
2017-11-01 18:20:34 +01:00
|
|
|
return json_success()
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2018-03-12 00:29:21 +01:00
|
|
|
def remove_default_stream(request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2020-03-22 20:29:49 +01:00
|
|
|
stream_id: int=REQ(validator=check_int)) -> HttpResponse:
|
|
|
|
(stream, recipient, sub) = access_stream_by_id(
|
|
|
|
user_profile,
|
|
|
|
stream_id,
|
|
|
|
allow_realm_admin=True
|
|
|
|
)
|
2017-01-30 04:25:40 +01:00
|
|
|
do_remove_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_stream_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
|
|
|
stream_id: int,
|
2018-04-30 08:59:51 +02:00
|
|
|
description: Optional[str]=REQ(validator=check_capped_string(
|
|
|
|
Stream.MAX_DESCRIPTION_LENGTH), default=None),
|
2017-12-08 17:42:11 +01:00
|
|
|
is_private: Optional[bool]=REQ(validator=check_bool, default=None),
|
2018-05-12 07:25:42 +02:00
|
|
|
is_announcement_only: Optional[bool]=REQ(validator=check_bool, default=None),
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_post_policy: Optional[int]=REQ(validator=check_int_in(
|
|
|
|
Stream.STREAM_POST_POLICY_TYPES), default=None),
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers: Optional[bool]=REQ(validator=check_bool, default=None),
|
2018-04-24 03:47:28 +02:00
|
|
|
new_name: Optional[str]=REQ(validator=check_string, default=None),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2018-02-12 16:02:19 +01:00
|
|
|
# We allow realm administrators to to update the stream name and
|
|
|
|
# description even for private streams.
|
|
|
|
stream = access_stream_for_delete_or_update(user_profile, stream_id)
|
2015-11-23 14:35:16 +01:00
|
|
|
if description is not None:
|
2019-02-20 21:09:21 +01:00
|
|
|
if '\n' in description:
|
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
description = description.replace("\n", " ")
|
2017-01-30 04:14:12 +01:00
|
|
|
do_change_stream_description(stream, description)
|
2017-01-30 01:48:38 +01:00
|
|
|
if new_name is not None:
|
2017-01-30 03:59:25 +01:00
|
|
|
new_name = new_name.strip()
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name == new_name:
|
2017-01-30 04:03:32 +01:00
|
|
|
return json_error(_("Stream already has that name!"))
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name.lower() != new_name.lower():
|
|
|
|
# Check that the stream name is available (unless we are
|
|
|
|
# are only changing the casing of the stream name).
|
|
|
|
check_stream_name_available(user_profile.realm, new_name)
|
2019-01-05 12:47:38 +01:00
|
|
|
do_rename_stream(stream, new_name, user_profile)
|
2018-05-12 07:25:42 +02:00
|
|
|
if is_announcement_only is not None:
|
2020-02-04 21:50:55 +01:00
|
|
|
# is_announcement_only is a legacy way to specify
|
|
|
|
# stream_post_policy. We can probably just delete this code,
|
|
|
|
# since we're not aware of clients that used it, but we're
|
|
|
|
# keeping it for backwards-compatibility for now.
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_EVERYONE
|
|
|
|
if is_announcement_only:
|
|
|
|
stream_post_policy = Stream.STREAM_POST_POLICY_ADMINS
|
|
|
|
if stream_post_policy is not None:
|
|
|
|
do_change_stream_post_policy(stream, stream_post_policy)
|
2018-02-12 16:02:19 +01:00
|
|
|
|
|
|
|
# But we require even realm administrators to be actually
|
|
|
|
# subscribed to make a private stream public.
|
2016-12-23 09:15:30 +01:00
|
|
|
if is_private is not None:
|
2018-02-12 16:02:19 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2018-04-27 01:00:26 +02:00
|
|
|
do_change_stream_invite_only(stream, is_private, history_public_to_subscribers)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2019-08-08 21:58:38 +02:00
|
|
|
@has_request_variables
|
|
|
|
def list_subscriptions_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
include_subscribers: bool=REQ(validator=check_bool, default=False),
|
|
|
|
) -> HttpResponse:
|
|
|
|
subscribed, _ = gather_subscriptions(
|
|
|
|
user_profile, include_subscribers=include_subscribers
|
|
|
|
)
|
|
|
|
return json_success({"subscriptions": subscribed})
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-11-02 08:53:30 +01:00
|
|
|
FuncKwargPair = Tuple[Callable[..., HttpResponse], Dict[str, Union[int, Iterable[Any]]]]
|
2016-07-22 18:03:37 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscriptions_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
delete: Iterable[str]=REQ(validator=check_list(check_string), default=[]),
|
2017-12-08 17:42:11 +01:00
|
|
|
add: Iterable[Mapping[str, Any]]=REQ(
|
|
|
|
validator=check_list(check_dict([('name', check_string)])), default=[]),
|
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
if not add and not delete:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-09-12 17:21:49 +02:00
|
|
|
method_kwarg_pairs = [
|
|
|
|
(add_subscriptions_backend, dict(streams_raw=add)),
|
|
|
|
(remove_subscriptions_backend, dict(streams_raw=delete))
|
2017-05-17 22:16:12 +02:00
|
|
|
] # type: List[FuncKwargPair]
|
2016-09-12 17:21:49 +02:00
|
|
|
return compose_views(request, user_profile, method_kwarg_pairs)
|
|
|
|
|
zerver/views: Revert to Python 2 typing syntax for now (storage, streams).
This reverts commit 620b2cd6e.
Contributors setting up a new development environment were getting
errors like this:
```
++ dirname tools/do-destroy-rebuild-database
[...]
+ ./manage.py purge_queue --all
Traceback (most recent call last):
[...]
File "/home/zulipdev/zulip/zproject/legacy_urls.py", line 3, in <module>
import zerver.views.streams
File "/home/zulipdev/zulip/zerver/views/streams.py", line 187, in <module>
method_kwarg_pairs: List[FuncKwargPair]) -> HttpResponse:
File "/usr/lib/python3.5/typing.py", line 1025, in __getitem__
tvars = _type_vars(params)
[...]
File "/usr/lib/python3.5/typing.py", line 277, in _get_type_vars
for t in types:
TypeError: 'ellipsis' object is not iterable
```
The issue appears to be that we're using the `typing` module from the
3.5 stdlib, rather than the `typing=3.6.2` in our requirements files,
and that doesn't understand the `Callable[..., HttpResponse]` that
appears in the definition of `FuncKwargPair`.
Revert for now to get provision working again; at least one person
reports that reverting this sufficed. We'll need to do more testing
before putting this change back in.
2017-12-13 19:20:11 +01:00
|
|
|
def compose_views(request, user_profile, method_kwarg_pairs):
|
|
|
|
# type: (HttpRequest, UserProfile, List[FuncKwargPair]) -> HttpResponse
|
2016-09-12 17:21:49 +02:00
|
|
|
'''
|
|
|
|
This takes a series of view methods from method_kwarg_pairs and calls
|
|
|
|
them in sequence, and it smushes all the json results into a single
|
|
|
|
response when everything goes right. (This helps clients avoid extra
|
|
|
|
latency hops.) It rolls back the transaction when things go wrong in
|
|
|
|
any one of the composed methods.
|
|
|
|
|
|
|
|
TODO: Move this a utils-like module if we end up using it more widely.
|
|
|
|
'''
|
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
json_dict = {} # type: Dict[str, Any]
|
2016-09-12 17:21:49 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
for method, kwargs in method_kwarg_pairs:
|
|
|
|
response = method(request, user_profile, **kwargs)
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise JsonableError(response.content)
|
|
|
|
json_dict.update(ujson.loads(response.content))
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def remove_subscriptions_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
streams_raw: Iterable[str]=REQ("subscriptions", validator=check_list(check_string)),
|
|
|
|
principals: Optional[Iterable[str]]=REQ(validator=check_list(check_string), default=None),
|
2017-12-08 17:42:11 +01:00
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
removing_someone_else = principals and \
|
|
|
|
set(principals) != set((user_profile.email,))
|
2018-02-20 18:56:01 +01:00
|
|
|
|
2016-02-08 03:59:38 +01:00
|
|
|
if removing_someone_else and not user_profile.is_realm_admin:
|
2015-11-23 14:35:16 +01:00
|
|
|
# You can only unsubscribe other people from a stream if you are a realm
|
2018-02-20 18:56:01 +01:00
|
|
|
# admin (whether the stream is public or private).
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("This action requires administrative rights"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-11-20 21:55:50 +01:00
|
|
|
streams_as_dict = []
|
|
|
|
for stream_name in streams_raw:
|
|
|
|
streams_as_dict.append({"name": stream_name.strip()})
|
|
|
|
|
|
|
|
streams, __ = list_to_streams(streams_as_dict, user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if principals:
|
|
|
|
people_to_unsub = set(principal_to_user_profile(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile, principal) for principal in principals)
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
people_to_unsub = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2019-10-13 05:30:34 +02:00
|
|
|
result = dict(removed=[], not_removed=[]) # type: Dict[str, List[str]]
|
2017-07-17 00:40:15 +02:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams,
|
2018-03-14 00:13:21 +01:00
|
|
|
request.client,
|
2017-07-17 00:40:15 +02:00
|
|
|
acting_user=user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
for (subscriber, removed_stream) in removed:
|
|
|
|
result["removed"].append(removed_stream.name)
|
|
|
|
for (subscriber, not_subscribed_stream) in not_subscribed:
|
2019-10-13 05:30:34 +02:00
|
|
|
result["not_removed"].append(not_subscribed_stream.name)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
2018-03-12 00:29:21 +01:00
|
|
|
def you_were_just_subscribed_message(acting_user: UserProfile,
|
2018-06-19 18:22:27 +02:00
|
|
|
stream_names: Set[str]) -> str:
|
2017-05-16 17:10:42 +02:00
|
|
|
subscriptions = sorted(list(stream_names))
|
|
|
|
if len(subscriptions) == 1:
|
2019-07-11 01:52:48 +02:00
|
|
|
return _("@**%(full_name)s** subscribed you to the stream #**%(stream_name)s**.") % \
|
2019-04-20 03:49:03 +02:00
|
|
|
{"full_name": acting_user.full_name,
|
|
|
|
"stream_name": subscriptions[0]}
|
2017-05-16 17:10:42 +02:00
|
|
|
|
2019-07-11 01:52:48 +02:00
|
|
|
message = _("@**%(full_name)s** subscribed you to the following streams:") % \
|
2019-04-20 03:49:03 +02:00
|
|
|
{"full_name": acting_user.full_name}
|
2018-06-19 18:22:27 +02:00
|
|
|
message += "\n\n"
|
|
|
|
for stream_name in subscriptions:
|
|
|
|
message += "* #**%s**\n" % (stream_name,)
|
|
|
|
return message
|
2017-05-16 17:10:42 +02:00
|
|
|
|
2018-05-04 19:14:29 +02:00
|
|
|
@require_non_guest_user
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def add_subscriptions_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
2019-02-20 21:09:21 +01:00
|
|
|
streams_raw: Iterable[Dict[str, str]]=REQ(
|
2019-01-29 07:02:16 +01:00
|
|
|
"subscriptions", validator=check_list(check_dict_only(
|
|
|
|
[('name', check_string)], optional_keys=[
|
|
|
|
('color', check_color),
|
|
|
|
('description', check_capped_string(Stream.MAX_DESCRIPTION_LENGTH)),
|
|
|
|
])
|
|
|
|
)),
|
2017-12-08 17:42:11 +01:00
|
|
|
invite_only: bool=REQ(validator=check_bool, default=False),
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_post_policy: int=REQ(validator=check_int_in(
|
|
|
|
Stream.STREAM_POST_POLICY_TYPES), default=Stream.STREAM_POST_POLICY_EVERYONE),
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers: Optional[bool]=REQ(validator=check_bool, default=None),
|
2017-12-08 17:42:11 +01:00
|
|
|
announce: bool=REQ(validator=check_bool, default=False),
|
2018-04-24 03:47:28 +02:00
|
|
|
principals: List[str]=REQ(validator=check_list(check_string), default=[]),
|
2017-12-08 17:42:11 +01:00
|
|
|
authorization_errors_fatal: bool=REQ(validator=check_bool, default=True),
|
|
|
|
) -> HttpResponse:
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts = []
|
2019-01-10 15:03:15 +01:00
|
|
|
color_map = {}
|
2016-06-03 00:34:22 +02:00
|
|
|
for stream_dict in streams_raw:
|
2019-01-10 15:03:15 +01:00
|
|
|
# 'color' field is optional
|
|
|
|
# check for its presence in the streams_raw first
|
|
|
|
if 'color' in stream_dict:
|
|
|
|
color_map[stream_dict['name']] = stream_dict['color']
|
2019-02-20 21:09:21 +01:00
|
|
|
if 'description' in stream_dict:
|
|
|
|
# We don't allow newline characters in stream descriptions.
|
|
|
|
stream_dict['description'] = stream_dict['description'].replace("\n", " ")
|
2019-01-10 15:03:15 +01:00
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
stream_dict_copy = {} # type: Dict[str, Any]
|
2016-11-20 21:55:50 +01:00
|
|
|
for field in stream_dict:
|
|
|
|
stream_dict_copy[field] = stream_dict[field]
|
|
|
|
# Strip the stream name here.
|
|
|
|
stream_dict_copy['name'] = stream_dict_copy['name'].strip()
|
2016-11-21 00:16:52 +01:00
|
|
|
stream_dict_copy["invite_only"] = invite_only
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_dict_copy["stream_post_policy"] = stream_post_policy
|
2018-04-27 01:00:26 +02:00
|
|
|
stream_dict_copy["history_public_to_subscribers"] = history_public_to_subscribers
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts.append(stream_dict_copy)
|
|
|
|
|
|
|
|
# Validation of the streams arguments, including enforcement of
|
2017-01-30 07:01:19 +01:00
|
|
|
# can_create_streams policy and check_stream_name policy is inside
|
2016-11-20 21:55:50 +01:00
|
|
|
# list_to_streams.
|
2015-11-23 14:35:16 +01:00
|
|
|
existing_streams, created_streams = \
|
2016-11-21 00:16:52 +01:00
|
|
|
list_to_streams(stream_dicts, user_profile, autocreate=True)
|
2015-11-23 14:35:16 +01:00
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unable to access stream (%s).") % unauthorized_streams[0].name)
|
2015-11-23 14:35:16 +01:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0:
|
2016-07-27 01:45:29 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
|
2018-06-27 00:04:03 +02:00
|
|
|
return json_error(_("You can only invite other Zephyr mirroring users to private streams."))
|
2018-07-30 01:25:13 +02:00
|
|
|
if not user_profile.can_subscribe_other_users():
|
2019-04-08 19:23:00 +02:00
|
|
|
if user_profile.realm.invite_to_stream_policy == Realm.INVITE_TO_STREAM_POLICY_ADMINS:
|
|
|
|
return json_error(_("Only administrators can modify other users' subscriptions."))
|
|
|
|
# Realm.INVITE_TO_STREAM_POLICY_MEMBERS only fails if the
|
|
|
|
# user is a guest, which happens in the decorator above.
|
|
|
|
assert user_profile.realm.invite_to_stream_policy == \
|
|
|
|
Realm.INVITE_TO_STREAM_POLICY_WAITING_PERIOD
|
2018-07-30 01:25:13 +02:00
|
|
|
return json_error(_("Your account is too new to modify other users' subscriptions."))
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
subscribers = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-17 00:40:15 +02:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers,
|
2019-01-10 15:03:15 +01:00
|
|
|
acting_user=user_profile, color_map=color_map)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-08-18 05:02:02 +02:00
|
|
|
# We can assume unique emails here for now, but we should eventually
|
|
|
|
# convert this function to be more id-centric.
|
2018-04-24 03:47:28 +02:00
|
|
|
email_to_user_profile = dict() # type: Dict[str, UserProfile]
|
2017-08-18 05:02:02 +02:00
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any]
|
2015-11-23 14:35:16 +01:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
2017-08-18 05:02:02 +02:00
|
|
|
email_to_user_profile[subscriber.email] = subscriber
|
2015-11-23 14:35:16 +01:00
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
|
|
|
|
|
|
|
bots = dict((subscriber.email, subscriber.is_bot) for subscriber in subscribers)
|
|
|
|
|
2017-09-19 19:07:42 +02:00
|
|
|
newly_created_stream_names = {s.name for s in created_streams}
|
2017-05-16 01:32:50 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0 and result["subscribed"]:
|
2017-09-27 09:58:12 +02:00
|
|
|
for email, subscribed_stream_names in result["subscribed"].items():
|
2015-11-23 14:35:16 +01:00
|
|
|
if email == user_profile.email:
|
|
|
|
# Don't send a Zulip if you invited yourself.
|
|
|
|
continue
|
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
|
|
|
|
2017-05-16 01:32:50 +02:00
|
|
|
# For each user, we notify them about newly subscribed streams, except for
|
|
|
|
# streams that were newly created.
|
|
|
|
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
|
|
|
|
|
|
|
|
if not notify_stream_names:
|
|
|
|
continue
|
|
|
|
|
2017-05-16 17:10:42 +02:00
|
|
|
msg = you_were_just_subscribed_message(
|
|
|
|
acting_user=user_profile,
|
|
|
|
stream_names=notify_stream_names,
|
|
|
|
)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 20:42:13 +02:00
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
2017-08-18 05:02:02 +02:00
|
|
|
recipient_user=email_to_user_profile[email],
|
2017-04-27 20:42:13 +02:00
|
|
|
content=msg))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2019-07-11 21:07:29 +02:00
|
|
|
if announce and len(created_streams) > 0:
|
2017-08-24 02:35:16 +02:00
|
|
|
notifications_stream = user_profile.realm.get_notifications_stream()
|
2015-11-23 14:35:16 +01:00
|
|
|
if notifications_stream is not None:
|
|
|
|
if len(created_streams) > 1:
|
2019-07-11 23:05:38 +02:00
|
|
|
content = _("@_**%(user_name)s|%(user_id)d** created the following streams: %(stream_str)s.")
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2019-07-11 23:05:38 +02:00
|
|
|
content = _("@_**%(user_name)s|%(user_id)d** created a new stream %(stream_str)s.")
|
|
|
|
content = content % {
|
|
|
|
'user_name': user_profile.full_name,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'stream_str': ", ".join('#**%s**' % (s.name,) for s in created_streams)}
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2019-07-11 23:05:38 +02:00
|
|
|
topic = _('new streams')
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2016-12-03 00:04:17 +01:00
|
|
|
notifications.append(
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream=notifications_stream,
|
2017-04-27 19:44:09 +02:00
|
|
|
topic=topic,
|
2019-07-11 23:05:38 +02:00
|
|
|
content=content,
|
2019-02-07 02:05:34 +01:00
|
|
|
)
|
|
|
|
)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2019-07-11 20:51:23 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm and len(created_streams) > 0:
|
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 00:03:21 +02:00
|
|
|
for stream in created_streams:
|
2019-07-11 20:51:23 +02:00
|
|
|
notifications.append(
|
|
|
|
internal_prep_stream_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
stream=stream,
|
2019-07-19 20:23:05 +02:00
|
|
|
topic=Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
|
2019-07-19 20:13:21 +02:00
|
|
|
content=_('Stream created by @_**%(user_name)s|%(user_id)d**.') % {
|
|
|
|
'user_name': user_profile.full_name,
|
|
|
|
'user_id': user_profile.id}
|
2019-07-11 20:51:23 +02:00
|
|
|
)
|
|
|
|
)
|
2017-04-27 00:03:21 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(notifications) > 0:
|
2019-07-24 08:47:38 +02:00
|
|
|
do_send_messages(notifications, mark_as_read=[user_profile.id])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
if not authorization_errors_fatal:
|
2017-09-19 19:07:42 +02:00
|
|
|
result["unauthorized"] = [s.name for s in unauthorized_streams]
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success(result)
|
|
|
|
|
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def get_subscribers_backend(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
stream_id: int=REQ('stream', converter=to_non_negative_int)) -> HttpResponse:
|
2018-02-14 17:59:01 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id,
|
|
|
|
allow_realm_admin=True)
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
|
|
|
|
|
|
|
return json_success({'subscribers': subscribers})
|
|
|
|
|
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
2019-02-28 22:20:24 +01:00
|
|
|
def get_streams_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
|
|
|
include_public: bool=REQ(validator=check_bool, default=True),
|
|
|
|
include_subscribed: bool=REQ(validator=check_bool, default=True),
|
|
|
|
include_all_active: bool=REQ(validator=check_bool, default=False),
|
|
|
|
include_default: bool=REQ(validator=check_bool, default=False),
|
|
|
|
include_owner_subscribed: bool=REQ(validator=check_bool, default=False)
|
|
|
|
) -> HttpResponse:
|
2016-05-20 22:08:42 +02:00
|
|
|
|
|
|
|
streams = do_get_streams(user_profile, include_public=include_public,
|
|
|
|
include_subscribed=include_subscribed,
|
|
|
|
include_all_active=include_all_active,
|
2019-02-28 22:20:24 +01:00
|
|
|
include_default=include_default,
|
|
|
|
include_owner_subscribed=include_owner_subscribed)
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def get_topics_backend(request: HttpRequest, user_profile: UserProfile,
|
2019-08-17 01:21:08 +02:00
|
|
|
stream_id: int=REQ(converter=to_non_negative_int,
|
|
|
|
path_only=True)) -> HttpResponse:
|
2017-01-30 01:54:46 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2016-10-27 15:54:49 +02:00
|
|
|
|
|
|
|
result = get_topic_history_for_stream(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
2018-05-02 17:52:54 +02:00
|
|
|
public_history=stream.is_history_public_to_subscribers(),
|
2016-10-27 15:54:49 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return json_success(dict(topics=result))
|
|
|
|
|
2019-01-18 17:40:54 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def delete_in_topic(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
stream_id: int=REQ(converter=to_non_negative_int),
|
|
|
|
topic_name: str=REQ("topic_name")) -> HttpResponse:
|
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
|
|
|
|
2020-02-11 16:04:05 +01:00
|
|
|
messages = messages_for_topic(stream.recipient_id, topic_name)
|
2019-01-18 17:40:54 +01:00
|
|
|
if not stream.is_history_public_to_subscribers():
|
|
|
|
# Don't allow the user to delete messages that they don't have access to.
|
|
|
|
deletable_message_ids = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile, message_id__in=messages).values_list("message_id", flat=True)
|
|
|
|
messages = [message for message in messages if message.id in
|
|
|
|
deletable_message_ids]
|
|
|
|
|
2019-11-12 21:20:31 +01:00
|
|
|
do_delete_messages(user_profile.realm, messages)
|
2019-01-18 17:40:54 +01:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2018-04-24 03:47:28 +02:00
|
|
|
def json_stream_exists(request: HttpRequest, user_profile: UserProfile, stream_name: str=REQ("stream"),
|
2017-12-08 17:42:11 +01:00
|
|
|
autosubscribe: bool=REQ(validator=check_bool, default=False)) -> HttpResponse:
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
|
|
|
try:
|
2017-01-12 01:41:16 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
except JsonableError as e:
|
2017-07-25 02:15:40 +02:00
|
|
|
return json_error(e.msg, status=404)
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# access_stream functions return a subscription if and only if we
|
|
|
|
# are already subscribed.
|
2017-07-25 02:15:40 +02:00
|
|
|
result = {"subscribed": sub is not None}
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# If we got here, we're either subscribed or the stream is public.
|
|
|
|
# So if we're not yet subscribed and autosubscribe is enabled, we
|
|
|
|
# should join.
|
|
|
|
if sub is None and autosubscribe:
|
2017-07-17 00:40:15 +02:00
|
|
|
bulk_add_subscriptions([stream], [user_profile], acting_user=user_profile)
|
2017-01-12 01:41:16 +01:00
|
|
|
result["subscribed"] = True
|
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2017-01-03 18:31:43 +01:00
|
|
|
@has_request_variables
|
2018-03-12 00:29:21 +01:00
|
|
|
def json_get_stream_id(request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2018-04-24 03:47:28 +02:00
|
|
|
stream_name: str=REQ('stream')) -> HttpResponse:
|
2017-01-30 01:57:25 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
return json_success({'stream_id': stream.id})
|
2017-01-03 18:31:43 +01:00
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
@has_request_variables
|
2018-03-12 00:29:21 +01:00
|
|
|
def update_subscriptions_property(request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2019-11-13 08:17:49 +01:00
|
|
|
stream_id: int=REQ(validator=check_int),
|
2018-03-12 00:29:21 +01:00
|
|
|
property: str=REQ(),
|
|
|
|
value: str=REQ()) -> HttpResponse:
|
2017-05-09 22:29:59 +02:00
|
|
|
subscription_data = [{"property": property,
|
|
|
|
"stream_id": stream_id,
|
|
|
|
"value": value}]
|
|
|
|
return update_subscription_properties_backend(request, user_profile,
|
|
|
|
subscription_data=subscription_data)
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-12-08 17:42:11 +01:00
|
|
|
def update_subscription_properties_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile,
|
|
|
|
subscription_data: List[Dict[str, Any]]=REQ(
|
|
|
|
validator=check_list(
|
|
|
|
check_dict([("stream_id", check_int),
|
|
|
|
("property", check_string),
|
|
|
|
("value", check_variable_type([check_string, check_bool]))])
|
|
|
|
)
|
|
|
|
),
|
|
|
|
) -> HttpResponse:
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
2018-08-02 23:46:05 +02:00
|
|
|
[{"stream_id": "1", "property": "is_muted", "value": False},
|
2017-05-09 07:01:42 +02:00
|
|
|
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
2019-01-14 07:50:23 +01:00
|
|
|
property_converters = {"color": check_color, "in_home_view": check_bool,
|
2018-08-02 23:46:05 +02:00
|
|
|
"is_muted": check_bool,
|
2015-11-23 14:35:16 +01:00
|
|
|
"desktop_notifications": check_bool,
|
2016-07-01 07:26:09 +02:00
|
|
|
"audible_notifications": check_bool,
|
2017-08-17 16:55:32 +02:00
|
|
|
"push_notifications": check_bool,
|
2017-11-21 04:35:26 +01:00
|
|
|
"email_notifications": check_bool,
|
2019-11-26 02:37:12 +01:00
|
|
|
"pin_to_top": check_bool,
|
|
|
|
"wildcard_mentions_notify": check_bool}
|
2015-11-23 14:35:16 +01:00
|
|
|
response_data = []
|
|
|
|
|
|
|
|
for change in subscription_data:
|
2017-05-09 07:01:42 +02:00
|
|
|
stream_id = change["stream_id"]
|
2015-11-23 14:35:16 +01:00
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unknown subscription property: %s") % (property,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-05-09 07:01:42 +02:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2017-01-30 01:45:00 +01:00
|
|
|
if sub is None:
|
2017-05-09 07:01:42 +02:00
|
|
|
return json_error(_("Not subscribed to stream id %d") % (stream_id,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
property_conversion = property_converters[property](property, value)
|
|
|
|
if property_conversion:
|
|
|
|
return json_error(property_conversion)
|
|
|
|
|
2017-03-05 01:30:48 +01:00
|
|
|
do_change_subscription_property(user_profile, sub, stream,
|
2015-11-23 14:35:16 +01:00
|
|
|
property, value)
|
|
|
|
|
2017-05-09 07:01:42 +02:00
|
|
|
response_data.append({'stream_id': stream_id,
|
2015-11-23 14:35:16 +01:00
|
|
|
'property': property,
|
|
|
|
'value': value})
|
|
|
|
|
|
|
|
return json_success({"subscription_data": response_data})
|