2015-11-23 14:35:16 +01:00
|
|
|
from __future__ import absolute_import
|
2017-07-10 06:10:34 +02:00
|
|
|
from typing import Any, Optional, Tuple, List, Set, Iterable, Mapping, Callable, Dict, Text
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2016-05-29 16:41:41 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-05-29 16:41:41 +02:00
|
|
|
from zerver.lib.request import JsonableError, REQ, has_request_variables
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.decorator import authenticated_json_post_view, \
|
2017-05-23 19:51:25 +02:00
|
|
|
authenticated_json_view, require_realm_admin, to_non_negative_int
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.actions import bulk_remove_subscriptions, \
|
2017-04-27 20:42:13 +02:00
|
|
|
do_change_subscription_property, internal_prep_private_message, \
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message, \
|
2017-01-30 06:42:09 +01:00
|
|
|
gather_subscriptions, subscribed_to_stream, \
|
2015-11-23 14:35:16 +01:00
|
|
|
bulk_add_subscriptions, do_send_messages, get_subscriber_emails, do_rename_stream, \
|
2017-01-30 03:52:55 +01:00
|
|
|
do_deactivate_stream, do_change_stream_invite_only, do_add_default_stream, \
|
|
|
|
do_change_stream_description, do_get_streams, \
|
2017-04-27 00:03:21 +02:00
|
|
|
do_remove_default_stream, get_topic_history_for_stream, \
|
|
|
|
prep_stream_welcome_message
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.response import json_success, json_error, json_response
|
2017-01-30 02:57:24 +01:00
|
|
|
from zerver.lib.streams import access_stream_by_id, access_stream_by_name, \
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name, check_stream_name_available, filter_stream_authorization, \
|
|
|
|
list_to_streams
|
2017-05-09 07:01:42 +02:00
|
|
|
from zerver.lib.validator import check_string, check_int, check_list, check_dict, \
|
2015-11-23 14:35:16 +01:00
|
|
|
check_bool, check_variable_type
|
2016-12-30 11:42:59 +01:00
|
|
|
from zerver.models import UserProfile, Stream, Realm, Subscription, \
|
2017-05-22 23:37:15 +02:00
|
|
|
Recipient, get_recipient, get_stream, get_active_user_dicts_in_realm, \
|
2017-05-22 23:08:10 +02:00
|
|
|
get_system_bot, get_user
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
from collections import defaultdict
|
|
|
|
import ujson
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-03-11 10:57:29 +01:00
|
|
|
import six
|
2016-01-26 01:54:56 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
class PrincipalError(JsonableError):
|
2017-07-20 01:23:12 +02:00
|
|
|
def __init__(self, principal):
|
|
|
|
# type: (Text) -> None
|
2017-05-17 22:16:12 +02:00
|
|
|
self.principal = principal # type: Text
|
2017-07-20 01:23:12 +02:00
|
|
|
self.http_status_code = 403 # type: int
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: () -> Text
|
2015-11-23 14:35:16 +01:00
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (UserProfile, Text) -> UserProfile
|
2015-11-23 14:35:16 +01:00
|
|
|
try:
|
2017-05-22 23:08:10 +02:00
|
|
|
return get_user(principal, agent.realm)
|
2015-11-23 14:35:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Zulip in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
@require_realm_admin
|
2016-12-30 11:42:59 +01:00
|
|
|
def deactivate_stream_backend(request, user_profile, stream_id):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 01:52:48 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
|
|
|
do_deactivate_stream(stream)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-05-31 16:29:39 +02:00
|
|
|
def add_default_stream(request, user_profile, stream_name=REQ()):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 04:23:08 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
do_add_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-05-31 16:29:39 +02:00
|
|
|
def remove_default_stream(request, user_profile, stream_name=REQ()):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 04:25:40 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
do_remove_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-12-30 11:42:59 +01:00
|
|
|
def update_stream_backend(request, user_profile, stream_id,
|
2016-09-28 09:07:09 +02:00
|
|
|
description=REQ(validator=check_string, default=None),
|
2016-12-23 09:15:30 +01:00
|
|
|
is_private=REQ(validator=check_bool, default=None),
|
2016-09-28 09:07:09 +02:00
|
|
|
new_name=REQ(validator=check_string, default=None)):
|
2016-12-30 11:42:59 +01:00
|
|
|
# type: (HttpRequest, UserProfile, int, Optional[Text], Optional[bool], Optional[Text]) -> HttpResponse
|
2017-01-30 01:51:43 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if description is not None:
|
2017-01-30 04:14:12 +01:00
|
|
|
do_change_stream_description(stream, description)
|
2017-01-30 01:48:38 +01:00
|
|
|
if new_name is not None:
|
2017-01-30 03:59:25 +01:00
|
|
|
new_name = new_name.strip()
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name == new_name:
|
2017-01-30 04:03:32 +01:00
|
|
|
return json_error(_("Stream already has that name!"))
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name.lower() != new_name.lower():
|
|
|
|
# Check that the stream name is available (unless we are
|
|
|
|
# are only changing the casing of the stream name).
|
|
|
|
check_stream_name_available(user_profile.realm, new_name)
|
2017-01-30 04:05:39 +01:00
|
|
|
do_rename_stream(stream, new_name)
|
2016-12-23 09:15:30 +01:00
|
|
|
if is_private is not None:
|
2017-01-30 03:52:55 +01:00
|
|
|
do_change_stream_invite_only(stream, is_private)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2016-06-03 00:34:22 +02:00
|
|
|
# type: (HttpRequest, UserProfile) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)[0]})
|
|
|
|
|
2016-09-12 17:21:49 +02:00
|
|
|
FuncKwargPair = Tuple[Callable[..., HttpResponse], Dict[str, Iterable[Any]]]
|
2016-07-22 18:03:37 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
|
|
|
delete=REQ(validator=check_list(check_string), default=[]),
|
2016-06-03 20:21:57 +02:00
|
|
|
add=REQ(validator=check_list(check_dict([('name', check_string)])), default=[])):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Text], Iterable[Mapping[str, Any]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
if not add and not delete:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-09-12 17:21:49 +02:00
|
|
|
method_kwarg_pairs = [
|
|
|
|
(add_subscriptions_backend, dict(streams_raw=add)),
|
|
|
|
(remove_subscriptions_backend, dict(streams_raw=delete))
|
2017-05-17 22:16:12 +02:00
|
|
|
] # type: List[FuncKwargPair]
|
2016-09-12 17:21:49 +02:00
|
|
|
return compose_views(request, user_profile, method_kwarg_pairs)
|
|
|
|
|
|
|
|
def compose_views(request, user_profile, method_kwarg_pairs):
|
|
|
|
# type: (HttpRequest, UserProfile, List[FuncKwargPair]) -> HttpResponse
|
|
|
|
'''
|
|
|
|
This takes a series of view methods from method_kwarg_pairs and calls
|
|
|
|
them in sequence, and it smushes all the json results into a single
|
|
|
|
response when everything goes right. (This helps clients avoid extra
|
|
|
|
latency hops.) It rolls back the transaction when things go wrong in
|
|
|
|
any one of the composed methods.
|
|
|
|
|
|
|
|
TODO: Move this a utils-like module if we end up using it more widely.
|
|
|
|
'''
|
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
json_dict = {} # type: Dict[str, Any]
|
2016-09-12 17:21:49 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
for method, kwargs in method_kwarg_pairs:
|
|
|
|
response = method(request, user_profile, **kwargs)
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise JsonableError(response.content)
|
|
|
|
json_dict.update(ujson.loads(response.content))
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = REQ("subscriptions", validator=check_list(check_string)),
|
|
|
|
principals = REQ(validator=check_list(check_string), default=None)):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Text], Optional[Iterable[Text]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
removing_someone_else = principals and \
|
|
|
|
set(principals) != set((user_profile.email,))
|
2016-02-08 03:59:38 +01:00
|
|
|
if removing_someone_else and not user_profile.is_realm_admin:
|
2015-11-23 14:35:16 +01:00
|
|
|
# You can only unsubscribe other people from a stream if you are a realm
|
|
|
|
# admin.
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("This action requires administrative rights"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-11-20 21:55:50 +01:00
|
|
|
streams_as_dict = []
|
|
|
|
for stream_name in streams_raw:
|
|
|
|
streams_as_dict.append({"name": stream_name.strip()})
|
|
|
|
|
|
|
|
streams, __ = list_to_streams(streams_as_dict, user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
if removing_someone_else and stream.invite_only and \
|
|
|
|
not subscribed_to_stream(user_profile, stream):
|
|
|
|
# Even as an admin, you can't remove other people from an
|
|
|
|
# invite-only stream you're not on.
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Cannot administer invite-only streams this way"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if principals:
|
|
|
|
people_to_unsub = set(principal_to_user_profile(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile, principal) for principal in principals)
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
people_to_unsub = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
result = dict(removed=[], not_subscribed=[]) # type: Dict[str, List[Text]]
|
2017-07-17 00:40:15 +02:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams,
|
|
|
|
acting_user=user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
for (subscriber, stream) in removed:
|
|
|
|
result["removed"].append(stream.name)
|
|
|
|
for (subscriber, stream) in not_subscribed:
|
|
|
|
result["not_subscribed"].append(stream.name)
|
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
2017-05-16 17:10:42 +02:00
|
|
|
def you_were_just_subscribed_message(acting_user, stream_names, private_stream_names):
|
|
|
|
# type: (UserProfile, Set[Text], Set[Text]) -> Text
|
|
|
|
|
|
|
|
# stream_names is the list of streams for which we should send notifications.
|
|
|
|
#
|
|
|
|
# We only use private_stream_names to see which of those names
|
|
|
|
# are private; it can possibly be a superset of stream_names due to the way the
|
|
|
|
# calling code is structured.
|
|
|
|
|
|
|
|
subscriptions = sorted(list(stream_names))
|
|
|
|
|
|
|
|
msg = "Hi there! We thought you'd like to know that %s just subscribed you to " % (
|
|
|
|
acting_user.full_name,)
|
|
|
|
|
|
|
|
if len(subscriptions) == 1:
|
|
|
|
invite_only = subscriptions[0] in private_stream_names
|
|
|
|
msg += "the%s stream #**%s**." % (" **invite-only**" if invite_only else "",
|
|
|
|
subscriptions[0])
|
|
|
|
else:
|
|
|
|
msg += "the following streams: \n\n"
|
|
|
|
for stream_name in subscriptions:
|
|
|
|
invite_only = stream_name in private_stream_names
|
|
|
|
msg += "* #**%s**%s\n" % (stream_name,
|
|
|
|
" (**invite-only**)" if invite_only else "")
|
|
|
|
|
|
|
|
public_stream_names = stream_names - private_stream_names
|
|
|
|
if public_stream_names:
|
|
|
|
msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it."
|
|
|
|
|
|
|
|
return msg
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
|
|
|
def add_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = REQ("subscriptions",
|
2016-12-03 00:04:17 +01:00
|
|
|
validator=check_list(check_dict([('name', check_string)]))),
|
2015-11-23 14:35:16 +01:00
|
|
|
invite_only = REQ(validator=check_bool, default=False),
|
|
|
|
announce = REQ(validator=check_bool, default=False),
|
2017-02-11 05:45:18 +01:00
|
|
|
principals = REQ(validator=check_list(check_string), default=[]),
|
2015-11-23 14:35:16 +01:00
|
|
|
authorization_errors_fatal = REQ(validator=check_bool, default=True)):
|
2017-02-11 05:45:18 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Mapping[str, Text]], bool, bool, List[Text], bool) -> HttpResponse
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts = []
|
2016-06-03 00:34:22 +02:00
|
|
|
for stream_dict in streams_raw:
|
2017-05-17 22:16:12 +02:00
|
|
|
stream_dict_copy = {} # type: Dict[str, Any]
|
2016-11-20 21:55:50 +01:00
|
|
|
for field in stream_dict:
|
|
|
|
stream_dict_copy[field] = stream_dict[field]
|
|
|
|
# Strip the stream name here.
|
|
|
|
stream_dict_copy['name'] = stream_dict_copy['name'].strip()
|
2016-11-21 00:16:52 +01:00
|
|
|
stream_dict_copy["invite_only"] = invite_only
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts.append(stream_dict_copy)
|
|
|
|
|
|
|
|
# Validation of the streams arguments, including enforcement of
|
2017-01-30 07:01:19 +01:00
|
|
|
# can_create_streams policy and check_stream_name policy is inside
|
2016-11-20 21:55:50 +01:00
|
|
|
# list_to_streams.
|
2015-11-23 14:35:16 +01:00
|
|
|
existing_streams, created_streams = \
|
2016-11-21 00:16:52 +01:00
|
|
|
list_to_streams(stream_dicts, user_profile, autocreate=True)
|
2015-11-23 14:35:16 +01:00
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unable to access stream (%s).") % unauthorized_streams[0].name)
|
2015-11-23 14:35:16 +01:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0:
|
2016-07-27 01:45:29 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
|
|
|
|
return json_error(_("You can only invite other Zephyr mirroring users to invite-only streams."))
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
subscribers = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-07-17 00:40:15 +02:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers,
|
|
|
|
acting_user=user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any]
|
2015-11-23 14:35:16 +01:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
|
|
|
|
|
|
|
bots = dict((subscriber.email, subscriber.is_bot) for subscriber in subscribers)
|
|
|
|
|
2017-05-16 01:32:50 +02:00
|
|
|
newly_created_stream_names = {stream.name for stream in created_streams}
|
2017-05-16 17:10:42 +02:00
|
|
|
private_stream_names = {stream.name for stream in streams if stream.invite_only}
|
2017-05-16 01:32:50 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0 and result["subscribed"]:
|
2017-05-16 01:32:50 +02:00
|
|
|
for email, subscribed_stream_names in six.iteritems(result["subscribed"]):
|
2015-11-23 14:35:16 +01:00
|
|
|
if email == user_profile.email:
|
|
|
|
# Don't send a Zulip if you invited yourself.
|
|
|
|
continue
|
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
|
|
|
|
2017-05-16 01:32:50 +02:00
|
|
|
# For each user, we notify them about newly subscribed streams, except for
|
|
|
|
# streams that were newly created.
|
|
|
|
notify_stream_names = set(subscribed_stream_names) - newly_created_stream_names
|
|
|
|
|
|
|
|
if not notify_stream_names:
|
|
|
|
continue
|
|
|
|
|
2017-05-16 17:10:42 +02:00
|
|
|
msg = you_were_just_subscribed_message(
|
|
|
|
acting_user=user_profile,
|
|
|
|
stream_names=notify_stream_names,
|
|
|
|
private_stream_names=private_stream_names
|
|
|
|
)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 20:42:13 +02:00
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
recipient_email=email,
|
|
|
|
content=msg))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if announce and len(created_streams) > 0:
|
2017-02-11 05:45:39 +01:00
|
|
|
notifications_stream = user_profile.realm.notifications_stream # type: Optional[Stream]
|
2015-11-23 14:35:16 +01:00
|
|
|
if notifications_stream is not None:
|
|
|
|
if len(created_streams) > 1:
|
2016-11-17 21:42:22 +01:00
|
|
|
stream_msg = "the following streams: %s" % (", ".join('#**%s**' % s.name for s in created_streams))
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-11-17 21:42:22 +01:00
|
|
|
stream_msg = "a new stream #**%s**." % created_streams[0].name
|
|
|
|
msg = ("%s just created %s" % (user_profile.full_name, stream_msg))
|
2017-04-27 19:44:09 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
2017-04-27 19:44:09 +02:00
|
|
|
stream_name = notifications_stream.name
|
|
|
|
topic = 'Streams'
|
|
|
|
|
2016-12-03 00:04:17 +01:00
|
|
|
notifications.append(
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic=topic,
|
|
|
|
content=msg))
|
|
|
|
|
2017-04-27 00:03:21 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm:
|
|
|
|
for stream in created_streams:
|
|
|
|
notifications.append(prep_stream_welcome_message(stream))
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(notifications) > 0:
|
|
|
|
do_send_messages(notifications)
|
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [stream.name for stream in unauthorized_streams]
|
|
|
|
return json_success(result)
|
|
|
|
|
|
|
|
@has_request_variables
|
2016-12-30 11:42:59 +01:00
|
|
|
def get_subscribers_backend(request, user_profile,
|
|
|
|
stream_id=REQ('stream', converter=to_non_negative_int)):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 02:01:53 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
|
|
|
|
|
|
|
return json_success({'subscribers': subscribers})
|
|
|
|
|
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
|
|
|
def get_streams_backend(request, user_profile,
|
|
|
|
include_public=REQ(validator=check_bool, default=True),
|
|
|
|
include_subscribed=REQ(validator=check_bool, default=True),
|
2016-05-20 22:08:42 +02:00
|
|
|
include_all_active=REQ(validator=check_bool, default=False),
|
|
|
|
include_default=REQ(validator=check_bool, default=False)):
|
|
|
|
# type: (HttpRequest, UserProfile, bool, bool, bool, bool) -> HttpResponse
|
|
|
|
|
|
|
|
streams = do_get_streams(user_profile, include_public=include_public,
|
|
|
|
include_subscribed=include_subscribed,
|
|
|
|
include_all_active=include_all_active,
|
|
|
|
include_default=include_default)
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_topics_backend(request, user_profile,
|
|
|
|
stream_id=REQ(converter=to_non_negative_int)):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 01:54:46 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2016-10-27 15:54:49 +02:00
|
|
|
|
|
|
|
result = get_topic_history_for_stream(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Our data structure here is a list of tuples of
|
|
|
|
# (topic name, unread count), and it's reverse chronological,
|
|
|
|
# so the most recent topic is the first element of the list.
|
|
|
|
return json_success(dict(topics=result))
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2017-01-12 01:41:16 +01:00
|
|
|
def json_stream_exists(request, user_profile, stream_name=REQ("stream"),
|
|
|
|
autosubscribe=REQ(validator=check_bool, default=False)):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text, bool) -> HttpResponse
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
|
|
|
try:
|
2017-01-12 01:41:16 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
except JsonableError as e:
|
|
|
|
result = {"exists": False}
|
2017-07-20 00:22:36 +02:00
|
|
|
return json_error(e.msg, data=result, status=404)
|
2017-01-12 01:41:16 +01:00
|
|
|
|
|
|
|
# access_stream functions return a subscription if and only if we
|
|
|
|
# are already subscribed.
|
|
|
|
result = {"exists": True,
|
|
|
|
"subscribed": sub is not None}
|
|
|
|
|
|
|
|
# If we got here, we're either subscribed or the stream is public.
|
|
|
|
# So if we're not yet subscribed and autosubscribe is enabled, we
|
|
|
|
# should join.
|
|
|
|
if sub is None and autosubscribe:
|
2017-07-17 00:40:15 +02:00
|
|
|
bulk_add_subscriptions([stream], [user_profile], acting_user=user_profile)
|
2017-01-12 01:41:16 +01:00
|
|
|
result["subscribed"] = True
|
|
|
|
|
2017-05-17 22:16:12 +02:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2017-01-03 18:31:43 +01:00
|
|
|
@has_request_variables
|
2017-01-30 01:57:25 +01:00
|
|
|
def json_get_stream_id(request, user_profile, stream_name=REQ('stream')):
|
2017-01-03 18:31:43 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 01:57:25 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
return json_success({'stream_id': stream.id})
|
2017-01-03 18:31:43 +01:00
|
|
|
|
2017-05-09 22:29:59 +02:00
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_property(request, user_profile, stream_id=REQ(), property=REQ(), value=REQ()):
|
|
|
|
# type: (HttpRequest, UserProfile, int, str, str) -> HttpResponse
|
|
|
|
subscription_data = [{"property": property,
|
|
|
|
"stream_id": stream_id,
|
|
|
|
"value": value}]
|
|
|
|
return update_subscription_properties_backend(request, user_profile,
|
|
|
|
subscription_data=subscription_data)
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
2017-05-09 07:01:42 +02:00
|
|
|
def update_subscription_properties_backend(request, user_profile, subscription_data=REQ(
|
2015-11-23 14:35:16 +01:00
|
|
|
validator=check_list(
|
2017-05-09 07:01:42 +02:00
|
|
|
check_dict([("stream_id", check_int),
|
2016-06-03 20:21:57 +02:00
|
|
|
("property", check_string),
|
|
|
|
("value", check_variable_type(
|
|
|
|
[check_string, check_bool]))])))):
|
2016-06-03 00:34:22 +02:00
|
|
|
# type: (HttpRequest, UserProfile, List[Dict[str, Any]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
2017-05-09 07:01:42 +02:00
|
|
|
[{"stream_id": "1", "property": "in_home_view", "value": False},
|
|
|
|
{"stream_id": "1", "property": "color", "value": "#c2c2c2"}]
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
property_converters = {"color": check_string, "in_home_view": check_bool,
|
|
|
|
"desktop_notifications": check_bool,
|
2016-07-01 07:26:09 +02:00
|
|
|
"audible_notifications": check_bool,
|
|
|
|
"pin_to_top": check_bool}
|
2015-11-23 14:35:16 +01:00
|
|
|
response_data = []
|
|
|
|
|
|
|
|
for change in subscription_data:
|
2017-05-09 07:01:42 +02:00
|
|
|
stream_id = change["stream_id"]
|
2015-11-23 14:35:16 +01:00
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unknown subscription property: %s") % (property,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-05-09 07:01:42 +02:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2017-01-30 01:45:00 +01:00
|
|
|
if sub is None:
|
2017-05-09 07:01:42 +02:00
|
|
|
return json_error(_("Not subscribed to stream id %d") % (stream_id,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
property_conversion = property_converters[property](property, value)
|
|
|
|
if property_conversion:
|
|
|
|
return json_error(property_conversion)
|
|
|
|
|
2017-03-05 01:30:48 +01:00
|
|
|
do_change_subscription_property(user_profile, sub, stream,
|
2015-11-23 14:35:16 +01:00
|
|
|
property, value)
|
|
|
|
|
2017-05-09 07:01:42 +02:00
|
|
|
response_data.append({'stream_id': stream_id,
|
2015-11-23 14:35:16 +01:00
|
|
|
'property': property,
|
|
|
|
'value': value})
|
|
|
|
|
|
|
|
return json_success({"subscription_data": response_data})
|