2015-11-23 14:35:16 +01:00
|
|
|
from __future__ import absolute_import
|
2016-09-12 17:21:49 +02:00
|
|
|
from typing import Any, Optional, Tuple, List, Set, Iterable, Mapping, Callable, Dict
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2015-11-23 14:35:16 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.db import transaction
|
2016-05-29 16:41:41 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-05-29 16:41:41 +02:00
|
|
|
from zerver.lib.request import JsonableError, REQ, has_request_variables
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.decorator import authenticated_json_post_view, \
|
2016-05-29 16:41:41 +02:00
|
|
|
authenticated_json_view, \
|
2016-10-27 15:54:49 +02:00
|
|
|
get_user_profile_by_email, require_realm_admin, to_non_negative_int
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.actions import bulk_remove_subscriptions, \
|
2017-04-27 20:42:13 +02:00
|
|
|
do_change_subscription_property, internal_prep_private_message, \
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message, \
|
2017-01-30 06:42:09 +01:00
|
|
|
gather_subscriptions, subscribed_to_stream, \
|
2015-11-23 14:35:16 +01:00
|
|
|
bulk_add_subscriptions, do_send_messages, get_subscriber_emails, do_rename_stream, \
|
2017-01-30 03:52:55 +01:00
|
|
|
do_deactivate_stream, do_change_stream_invite_only, do_add_default_stream, \
|
|
|
|
do_change_stream_description, do_get_streams, \
|
2017-04-27 00:03:21 +02:00
|
|
|
do_remove_default_stream, get_topic_history_for_stream, \
|
|
|
|
prep_stream_welcome_message
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.response import json_success, json_error, json_response
|
2017-01-30 02:57:24 +01:00
|
|
|
from zerver.lib.streams import access_stream_by_id, access_stream_by_name, \
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name, check_stream_name_available, filter_stream_authorization, \
|
|
|
|
list_to_streams
|
2015-11-23 14:35:16 +01:00
|
|
|
from zerver.lib.validator import check_string, check_list, check_dict, \
|
|
|
|
check_bool, check_variable_type
|
2016-12-30 11:42:59 +01:00
|
|
|
from zerver.models import UserProfile, Stream, Realm, Subscription, \
|
2017-01-30 07:01:19 +01:00
|
|
|
Recipient, get_recipient, get_stream, get_active_user_dicts_in_realm
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
from collections import defaultdict
|
|
|
|
import ujson
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-03-11 10:57:29 +01:00
|
|
|
import six
|
2016-12-25 00:44:26 +01:00
|
|
|
from typing import Text
|
2016-01-26 01:54:56 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
class PrincipalError(JsonableError):
|
2016-04-28 01:23:45 +02:00
|
|
|
def __init__(self, principal, status_code=403):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (Text, int) -> None
|
|
|
|
self.principal = principal # type: Text
|
2016-06-03 00:34:22 +02:00
|
|
|
self.status_code = status_code # type: int
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: () -> Text
|
2015-11-23 14:35:16 +01:00
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (UserProfile, Text) -> UserProfile
|
2015-11-23 14:35:16 +01:00
|
|
|
principal_doesnt_exist = False
|
|
|
|
try:
|
|
|
|
principal_user_profile = get_user_profile_by_email(principal)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
principal_doesnt_exist = True
|
|
|
|
|
2017-01-24 05:50:04 +01:00
|
|
|
if (principal_doesnt_exist or
|
|
|
|
agent.realm != principal_user_profile.realm):
|
2015-11-23 14:35:16 +01:00
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Zulip in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
return principal_user_profile
|
|
|
|
|
|
|
|
@require_realm_admin
|
2016-12-30 11:42:59 +01:00
|
|
|
def deactivate_stream_backend(request, user_profile, stream_id):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 01:52:48 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
|
|
|
do_deactivate_stream(stream)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-05-31 16:29:39 +02:00
|
|
|
def add_default_stream(request, user_profile, stream_name=REQ()):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 04:23:08 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
do_add_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-05-31 16:29:39 +02:00
|
|
|
def remove_default_stream(request, user_profile, stream_name=REQ()):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 04:25:40 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
do_remove_default_stream(stream)
|
2016-05-26 14:02:37 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2016-12-30 11:42:59 +01:00
|
|
|
def update_stream_backend(request, user_profile, stream_id,
|
2016-09-28 09:07:09 +02:00
|
|
|
description=REQ(validator=check_string, default=None),
|
2016-12-23 09:15:30 +01:00
|
|
|
is_private=REQ(validator=check_bool, default=None),
|
2016-09-28 09:07:09 +02:00
|
|
|
new_name=REQ(validator=check_string, default=None)):
|
2016-12-30 11:42:59 +01:00
|
|
|
# type: (HttpRequest, UserProfile, int, Optional[Text], Optional[bool], Optional[Text]) -> HttpResponse
|
2017-01-30 01:51:43 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if description is not None:
|
2017-01-30 04:14:12 +01:00
|
|
|
do_change_stream_description(stream, description)
|
2017-01-30 01:48:38 +01:00
|
|
|
if new_name is not None:
|
2017-01-30 03:59:25 +01:00
|
|
|
new_name = new_name.strip()
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name == new_name:
|
2017-01-30 04:03:32 +01:00
|
|
|
return json_error(_("Stream already has that name!"))
|
2017-02-01 23:20:46 +01:00
|
|
|
if stream.name.lower() != new_name.lower():
|
|
|
|
# Check that the stream name is available (unless we are
|
|
|
|
# are only changing the casing of the stream name).
|
|
|
|
check_stream_name_available(user_profile.realm, new_name)
|
2017-01-30 04:05:39 +01:00
|
|
|
do_rename_stream(stream, new_name)
|
2016-12-23 09:15:30 +01:00
|
|
|
if is_private is not None:
|
2017-01-30 03:52:55 +01:00
|
|
|
do_change_stream_invite_only(stream, is_private)
|
2016-10-21 07:34:04 +02:00
|
|
|
return json_success()
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2016-06-03 00:34:22 +02:00
|
|
|
# type: (HttpRequest, UserProfile) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)[0]})
|
|
|
|
|
2016-09-12 17:21:49 +02:00
|
|
|
FuncKwargPair = Tuple[Callable[..., HttpResponse], Dict[str, Iterable[Any]]]
|
2016-07-22 18:03:37 +02:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
|
|
|
delete=REQ(validator=check_list(check_string), default=[]),
|
2016-06-03 20:21:57 +02:00
|
|
|
add=REQ(validator=check_list(check_dict([('name', check_string)])), default=[])):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Text], Iterable[Mapping[str, Any]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
if not add and not delete:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_('Nothing to do. Specify at least one of "add" or "delete".'))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-09-12 17:21:49 +02:00
|
|
|
method_kwarg_pairs = [
|
|
|
|
(add_subscriptions_backend, dict(streams_raw=add)),
|
|
|
|
(remove_subscriptions_backend, dict(streams_raw=delete))
|
|
|
|
] # type: List[FuncKwargPair]
|
|
|
|
return compose_views(request, user_profile, method_kwarg_pairs)
|
|
|
|
|
|
|
|
def compose_views(request, user_profile, method_kwarg_pairs):
|
|
|
|
# type: (HttpRequest, UserProfile, List[FuncKwargPair]) -> HttpResponse
|
|
|
|
'''
|
|
|
|
This takes a series of view methods from method_kwarg_pairs and calls
|
|
|
|
them in sequence, and it smushes all the json results into a single
|
|
|
|
response when everything goes right. (This helps clients avoid extra
|
|
|
|
latency hops.) It rolls back the transaction when things go wrong in
|
|
|
|
any one of the composed methods.
|
|
|
|
|
|
|
|
TODO: Move this a utils-like module if we end up using it more widely.
|
|
|
|
'''
|
|
|
|
|
2016-01-26 01:54:56 +01:00
|
|
|
json_dict = {} # type: Dict[str, Any]
|
2016-09-12 17:21:49 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
for method, kwargs in method_kwarg_pairs:
|
|
|
|
response = method(request, user_profile, **kwargs)
|
|
|
|
if response.status_code != 200:
|
|
|
|
raise JsonableError(response.content)
|
|
|
|
json_dict.update(ujson.loads(response.content))
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = REQ("subscriptions", validator=check_list(check_string)),
|
|
|
|
principals = REQ(validator=check_list(check_string), default=None)):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Text], Optional[Iterable[Text]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
removing_someone_else = principals and \
|
|
|
|
set(principals) != set((user_profile.email,))
|
2016-02-08 03:59:38 +01:00
|
|
|
if removing_someone_else and not user_profile.is_realm_admin:
|
2015-11-23 14:35:16 +01:00
|
|
|
# You can only unsubscribe other people from a stream if you are a realm
|
|
|
|
# admin.
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("This action requires administrative rights"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-11-20 21:55:50 +01:00
|
|
|
streams_as_dict = []
|
|
|
|
for stream_name in streams_raw:
|
|
|
|
streams_as_dict.append({"name": stream_name.strip()})
|
|
|
|
|
|
|
|
streams, __ = list_to_streams(streams_as_dict, user_profile)
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
if removing_someone_else and stream.invite_only and \
|
|
|
|
not subscribed_to_stream(user_profile, stream):
|
|
|
|
# Even as an admin, you can't remove other people from an
|
|
|
|
# invite-only stream you're not on.
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Cannot administer invite-only streams this way"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if principals:
|
|
|
|
people_to_unsub = set(principal_to_user_profile(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile, principal) for principal in principals)
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
people_to_unsub = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2016-12-25 00:44:26 +01:00
|
|
|
result = dict(removed=[], not_subscribed=[]) # type: Dict[str, List[Text]]
|
2015-11-23 14:35:16 +01:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams)
|
|
|
|
|
|
|
|
for (subscriber, stream) in removed:
|
|
|
|
result["removed"].append(stream.name)
|
|
|
|
for (subscriber, stream) in not_subscribed:
|
|
|
|
result["not_subscribed"].append(stream.name)
|
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def add_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = REQ("subscriptions",
|
2016-12-03 00:04:17 +01:00
|
|
|
validator=check_list(check_dict([('name', check_string)]))),
|
2015-11-23 14:35:16 +01:00
|
|
|
invite_only = REQ(validator=check_bool, default=False),
|
|
|
|
announce = REQ(validator=check_bool, default=False),
|
2017-02-11 05:45:18 +01:00
|
|
|
principals = REQ(validator=check_list(check_string), default=[]),
|
2015-11-23 14:35:16 +01:00
|
|
|
authorization_errors_fatal = REQ(validator=check_bool, default=True)):
|
2017-02-11 05:45:18 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Iterable[Mapping[str, Text]], bool, bool, List[Text], bool) -> HttpResponse
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts = []
|
2016-06-03 00:34:22 +02:00
|
|
|
for stream_dict in streams_raw:
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dict_copy = {} # type: Dict[str, Any]
|
|
|
|
for field in stream_dict:
|
|
|
|
stream_dict_copy[field] = stream_dict[field]
|
|
|
|
# Strip the stream name here.
|
|
|
|
stream_dict_copy['name'] = stream_dict_copy['name'].strip()
|
2016-11-21 00:16:52 +01:00
|
|
|
stream_dict_copy["invite_only"] = invite_only
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dicts.append(stream_dict_copy)
|
|
|
|
|
|
|
|
# Validation of the streams arguments, including enforcement of
|
2017-01-30 07:01:19 +01:00
|
|
|
# can_create_streams policy and check_stream_name policy is inside
|
2016-11-20 21:55:50 +01:00
|
|
|
# list_to_streams.
|
2015-11-23 14:35:16 +01:00
|
|
|
existing_streams, created_streams = \
|
2016-11-21 00:16:52 +01:00
|
|
|
list_to_streams(stream_dicts, user_profile, autocreate=True)
|
2015-11-23 14:35:16 +01:00
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unable to access stream (%s).") % unauthorized_streams[0].name)
|
2015-11-23 14:35:16 +01:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0:
|
2016-07-27 01:45:29 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm and not all(stream.invite_only for stream in streams):
|
|
|
|
return json_error(_("You can only invite other Zephyr mirroring users to invite-only streams."))
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
|
|
|
else:
|
2016-01-26 02:00:00 +01:00
|
|
|
subscribers = set([user_profile])
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers)
|
|
|
|
|
2016-01-26 01:54:56 +01:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any]
|
2015-11-23 14:35:16 +01:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
|
|
|
|
|
|
|
private_streams = dict((stream.name, stream.invite_only) for stream in streams)
|
|
|
|
bots = dict((subscriber.email, subscriber.is_bot) for subscriber in subscribers)
|
|
|
|
|
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2017-02-11 05:45:18 +01:00
|
|
|
if len(principals) > 0 and result["subscribed"]:
|
2016-03-11 10:57:29 +01:00
|
|
|
for email, subscriptions in six.iteritems(result["subscribed"]):
|
2015-11-23 14:35:16 +01:00
|
|
|
if email == user_profile.email:
|
|
|
|
# Don't send a Zulip if you invited yourself.
|
|
|
|
continue
|
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
|
|
|
|
|
|
|
if len(subscriptions) == 1:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
2016-11-17 21:42:22 +01:00
|
|
|
"subscribed you to the%s stream #**%s**."
|
2015-11-23 14:35:16 +01:00
|
|
|
% (user_profile.full_name,
|
|
|
|
" **invite-only**" if private_streams[subscriptions[0]] else "",
|
|
|
|
subscriptions[0],
|
2016-12-01 06:16:45 +01:00
|
|
|
))
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
|
|
|
"subscribed you to the following streams: \n\n"
|
|
|
|
% (user_profile.full_name,))
|
|
|
|
for stream in subscriptions:
|
2016-11-17 21:42:22 +01:00
|
|
|
msg += "* #**%s**%s\n" % (
|
2015-11-23 14:35:16 +01:00
|
|
|
stream,
|
|
|
|
" (**invite-only**)" if private_streams[stream] else "")
|
|
|
|
|
|
|
|
if len([s for s in subscriptions if not private_streams[s]]) > 0:
|
|
|
|
msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it."
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
sender = get_user_profile_by_email(settings.NOTIFICATION_BOT)
|
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
recipient_email=email,
|
|
|
|
content=msg))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
if announce and len(created_streams) > 0:
|
2017-02-11 05:45:39 +01:00
|
|
|
notifications_stream = user_profile.realm.notifications_stream # type: Optional[Stream]
|
2015-11-23 14:35:16 +01:00
|
|
|
if notifications_stream is not None:
|
|
|
|
if len(created_streams) > 1:
|
2016-11-17 21:42:22 +01:00
|
|
|
stream_msg = "the following streams: %s" % (", ".join('#**%s**' % s.name for s in created_streams))
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-11-17 21:42:22 +01:00
|
|
|
stream_msg = "a new stream #**%s**." % created_streams[0].name
|
|
|
|
msg = ("%s just created %s" % (user_profile.full_name, stream_msg))
|
2017-04-27 19:44:09 +02:00
|
|
|
|
|
|
|
sender = get_user_profile_by_email(settings.NOTIFICATION_BOT)
|
|
|
|
stream_name = notifications_stream.name
|
|
|
|
topic = 'Streams'
|
|
|
|
|
2016-12-03 00:04:17 +01:00
|
|
|
notifications.append(
|
2017-04-27 19:44:09 +02:00
|
|
|
internal_prep_stream_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
stream_name=stream_name,
|
|
|
|
topic=topic,
|
|
|
|
content=msg))
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
else:
|
2016-11-17 21:42:22 +01:00
|
|
|
msg = ("Hi there! %s just created a new stream #**%s**."
|
|
|
|
% (user_profile.full_name, created_streams[0].name))
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
sender = get_user_profile_by_email(settings.NOTIFICATION_BOT)
|
2015-11-23 14:35:16 +01:00
|
|
|
for realm_user_dict in get_active_user_dicts_in_realm(user_profile.realm):
|
|
|
|
# Don't announce to yourself or to people you explicitly added
|
|
|
|
# (who will get the notification above instead).
|
|
|
|
if realm_user_dict['email'] in principals or realm_user_dict['email'] == user_profile.email:
|
|
|
|
continue
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
recipient_email = realm_user_dict['email']
|
|
|
|
|
|
|
|
notifications.append(
|
|
|
|
internal_prep_private_message(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=sender,
|
|
|
|
recipient_email=recipient_email,
|
|
|
|
content=msg))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-04-27 00:03:21 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm:
|
|
|
|
for stream in created_streams:
|
|
|
|
notifications.append(prep_stream_welcome_message(stream))
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
if len(notifications) > 0:
|
|
|
|
do_send_messages(notifications)
|
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [stream.name for stream in unauthorized_streams]
|
|
|
|
return json_success(result)
|
|
|
|
|
|
|
|
@has_request_variables
|
2016-12-30 11:42:59 +01:00
|
|
|
def get_subscribers_backend(request, user_profile,
|
|
|
|
stream_id=REQ('stream', converter=to_non_negative_int)):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 02:01:53 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2015-11-23 14:35:16 +01:00
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
|
|
|
|
|
|
|
return json_success({'subscribers': subscribers})
|
|
|
|
|
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
|
|
|
def get_streams_backend(request, user_profile,
|
|
|
|
include_public=REQ(validator=check_bool, default=True),
|
|
|
|
include_subscribed=REQ(validator=check_bool, default=True),
|
2016-05-20 22:08:42 +02:00
|
|
|
include_all_active=REQ(validator=check_bool, default=False),
|
|
|
|
include_default=REQ(validator=check_bool, default=False)):
|
|
|
|
# type: (HttpRequest, UserProfile, bool, bool, bool, bool) -> HttpResponse
|
|
|
|
|
|
|
|
streams = do_get_streams(user_profile, include_public=include_public,
|
|
|
|
include_subscribed=include_subscribed,
|
|
|
|
include_all_active=include_all_active,
|
|
|
|
include_default=include_default)
|
2015-11-23 14:35:16 +01:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_topics_backend(request, user_profile,
|
|
|
|
stream_id=REQ(converter=to_non_negative_int)):
|
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2017-01-30 01:54:46 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_id(user_profile, stream_id)
|
2016-10-27 15:54:49 +02:00
|
|
|
|
|
|
|
result = get_topic_history_for_stream(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Our data structure here is a list of tuples of
|
|
|
|
# (topic name, unread count), and it's reverse chronological,
|
|
|
|
# so the most recent topic is the first element of the list.
|
|
|
|
return json_success(dict(topics=result))
|
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2017-01-12 01:41:16 +01:00
|
|
|
def json_stream_exists(request, user_profile, stream_name=REQ("stream"),
|
|
|
|
autosubscribe=REQ(validator=check_bool, default=False)):
|
2016-12-25 00:44:26 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text, bool) -> HttpResponse
|
2017-01-30 07:01:19 +01:00
|
|
|
check_stream_name(stream_name)
|
2016-12-30 11:42:59 +01:00
|
|
|
|
|
|
|
try:
|
2017-01-12 01:41:16 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
except JsonableError as e:
|
|
|
|
result = {"exists": False}
|
|
|
|
return json_error(e.error, data=result, status=404)
|
|
|
|
|
|
|
|
# access_stream functions return a subscription if and only if we
|
|
|
|
# are already subscribed.
|
|
|
|
result = {"exists": True,
|
|
|
|
"subscribed": sub is not None}
|
|
|
|
|
|
|
|
# If we got here, we're either subscribed or the stream is public.
|
|
|
|
# So if we're not yet subscribed and autosubscribe is enabled, we
|
|
|
|
# should join.
|
|
|
|
if sub is None and autosubscribe:
|
|
|
|
bulk_add_subscriptions([stream], [user_profile])
|
|
|
|
result["subscribed"] = True
|
|
|
|
|
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
2016-12-30 11:42:59 +01:00
|
|
|
|
2017-01-03 18:31:43 +01:00
|
|
|
@has_request_variables
|
2017-01-30 01:57:25 +01:00
|
|
|
def json_get_stream_id(request, user_profile, stream_name=REQ('stream')):
|
2017-01-03 18:31:43 +01:00
|
|
|
# type: (HttpRequest, UserProfile, Text) -> HttpResponse
|
2017-01-30 01:57:25 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
return json_success({'stream_id': stream.id})
|
2017-01-03 18:31:43 +01:00
|
|
|
|
2015-11-23 14:35:16 +01:00
|
|
|
@authenticated_json_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_subscription_property(request, user_profile, subscription_data=REQ(
|
|
|
|
validator=check_list(
|
2016-06-03 20:21:57 +02:00
|
|
|
check_dict([("stream", check_string),
|
|
|
|
("property", check_string),
|
|
|
|
("value", check_variable_type(
|
|
|
|
[check_string, check_bool]))])))):
|
2016-06-03 00:34:22 +02:00
|
|
|
# type: (HttpRequest, UserProfile, List[Dict[str, Any]]) -> HttpResponse
|
2015-11-23 14:35:16 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
|
|
|
[{"stream": "devel", "property": "in_home_view", "value": False},
|
|
|
|
{"stream": "devel", "property": "color", "value": "#c2c2c2"}]
|
|
|
|
"""
|
|
|
|
if request.method != "POST":
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Invalid verb"))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
property_converters = {"color": check_string, "in_home_view": check_bool,
|
|
|
|
"desktop_notifications": check_bool,
|
2016-07-01 07:26:09 +02:00
|
|
|
"audible_notifications": check_bool,
|
|
|
|
"pin_to_top": check_bool}
|
2015-11-23 14:35:16 +01:00
|
|
|
response_data = []
|
|
|
|
|
|
|
|
for change in subscription_data:
|
|
|
|
stream_name = change["stream"]
|
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unknown subscription property: %s") % (property,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
2017-01-30 01:45:00 +01:00
|
|
|
(stream, recipient, sub) = access_stream_by_name(user_profile, stream_name)
|
|
|
|
if sub is None:
|
|
|
|
return json_error(_("Not subscribed to stream %s") % (stream_name,))
|
2015-11-23 14:35:16 +01:00
|
|
|
|
|
|
|
property_conversion = property_converters[property](property, value)
|
|
|
|
if property_conversion:
|
|
|
|
return json_error(property_conversion)
|
|
|
|
|
2017-03-05 01:30:48 +01:00
|
|
|
do_change_subscription_property(user_profile, sub, stream,
|
2015-11-23 14:35:16 +01:00
|
|
|
property, value)
|
|
|
|
|
|
|
|
response_data.append({'stream': stream_name,
|
|
|
|
'property': property,
|
|
|
|
'value': value})
|
|
|
|
|
|
|
|
return json_success({"subscription_data": response_data})
|