2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2013-11-21 01:30:20 +01:00
|
|
|
from django.contrib.auth import authenticate, login, get_backends
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.core.urlresolvers import reverse
|
2013-11-08 17:53:41 +01:00
|
|
|
from django.http import HttpResponseRedirect, HttpResponseForbidden, HttpResponse
|
2013-06-17 18:01:22 +02:00
|
|
|
from django.shortcuts import render_to_response, redirect
|
2013-01-08 23:26:40 +01:00
|
|
|
from django.template import RequestContext, loader
|
2013-11-04 17:22:58 +01:00
|
|
|
from django.utils.timezone import now
|
2013-08-16 22:55:50 +02:00
|
|
|
from django.utils.cache import patch_cache_control
|
2012-09-29 00:49:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2012-12-11 23:42:32 +01:00
|
|
|
from django.core import validators
|
2013-03-05 23:45:02 +01:00
|
|
|
from django.contrib.auth.views import login as django_login_page, \
|
|
|
|
logout_then_login as django_logout_then_login
|
2013-03-06 20:53:54 +01:00
|
|
|
from django.db.models import Q, F
|
2015-08-18 20:32:56 +02:00
|
|
|
from django.forms.models import model_to_dict
|
2015-08-21 01:59:39 +02:00
|
|
|
from django.core.mail import send_mail
|
2015-01-29 08:59:41 +01:00
|
|
|
from django.middleware.csrf import get_token
|
2013-11-04 17:22:58 +01:00
|
|
|
from django.db import transaction
|
2015-08-18 20:32:56 +02:00
|
|
|
from zerver.models import Message, UserProfile, Stream, Subscription, Huddle, \
|
|
|
|
Recipient, Realm, UserMessage, DefaultStream, RealmEmoji, RealmAlias, \
|
|
|
|
RealmFilter, bulk_get_recipients, \
|
2013-12-09 23:17:16 +01:00
|
|
|
PreregistrationUser, get_client, MitUser, UserActivity, PushDeviceToken, \
|
2013-11-04 17:22:58 +01:00
|
|
|
get_stream, bulk_get_streams, UserPresence, \
|
2015-09-20 19:50:06 +02:00
|
|
|
get_recipient, valid_stream_name, \
|
2013-12-12 18:36:32 +01:00
|
|
|
split_email_to_domain, resolve_email_to_domain, email_to_username, get_realm, \
|
2015-08-20 02:38:32 +02:00
|
|
|
completely_open, get_unique_open_realm, get_active_user_dicts_in_realm, remote_user_to_email
|
2013-12-12 18:36:32 +01:00
|
|
|
from zerver.lib.actions import bulk_remove_subscriptions, do_change_password, \
|
2014-01-21 19:27:22 +01:00
|
|
|
do_change_full_name, do_change_enable_desktop_notifications, do_change_is_admin, \
|
2013-12-12 18:36:32 +01:00
|
|
|
do_change_enter_sends, do_change_enable_sounds, do_activate_user, do_create_user, \
|
2015-10-13 21:33:54 +02:00
|
|
|
do_change_subscription_property, internal_send_message, \
|
2014-01-27 20:13:26 +01:00
|
|
|
create_stream_if_needed, gather_subscriptions, subscribed_to_stream, \
|
2013-12-12 18:36:32 +01:00
|
|
|
update_user_presence, bulk_add_subscriptions, do_events_register, \
|
2013-05-08 15:28:27 +02:00
|
|
|
get_status_dict, do_change_enable_offline_email_notifications, \
|
2015-10-13 21:30:22 +02:00
|
|
|
do_change_enable_digest_emails, do_set_realm_name, do_set_realm_restricted_to_domain, \
|
|
|
|
do_set_realm_invite_required, do_set_realm_invite_by_admins_only, internal_prep_message, \
|
2013-11-16 17:11:15 +01:00
|
|
|
do_send_messages, get_default_subs, do_deactivate_user, do_reactivate_user, \
|
2013-09-03 22:41:17 +02:00
|
|
|
user_email_is_unique, do_invite_users, do_refer_friend, compute_mit_user_fullname, \
|
2013-09-13 19:30:05 +02:00
|
|
|
do_add_alert_words, do_remove_alert_words, do_set_alert_words, get_subscriber_emails, \
|
2013-11-26 00:08:17 +01:00
|
|
|
do_set_muted_topics, do_rename_stream, clear_followup_emails_queue, \
|
2015-08-21 01:56:10 +02:00
|
|
|
do_change_enable_offline_push_notifications, \
|
2014-01-02 23:20:33 +01:00
|
|
|
do_deactivate_stream, do_change_autoscroll_forever, do_make_stream_public, \
|
2014-02-26 20:02:43 +01:00
|
|
|
do_add_default_stream, do_change_default_all_public_streams, \
|
|
|
|
do_change_default_desktop_notifications, \
|
2014-02-13 19:39:54 +01:00
|
|
|
do_change_default_events_register_stream, do_change_default_sending_stream, \
|
2014-02-05 21:41:01 +01:00
|
|
|
do_change_enable_stream_desktop_notifications, do_change_enable_stream_sounds, \
|
2014-02-26 20:02:43 +01:00
|
|
|
do_change_stream_description, do_get_streams, do_make_stream_private, \
|
2014-02-26 20:54:59 +01:00
|
|
|
do_regenerate_api_key, do_remove_default_stream, do_update_pointer, \
|
2015-10-13 21:30:22 +02:00
|
|
|
do_change_avatar_source, do_change_twenty_four_hour_time, do_change_left_side_userlist, \
|
|
|
|
realm_user_count
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.create_user import random_api_key
|
2013-10-16 17:24:52 +02:00
|
|
|
from zerver.lib.push_notifications import num_push_devices_for_user
|
2013-11-13 18:04:31 +01:00
|
|
|
from zerver.forms import RegistrationForm, HomepageForm, ToSForm, \
|
2014-01-07 19:51:18 +01:00
|
|
|
CreateUserForm, is_inactive, OurAuthenticationForm
|
2013-12-12 23:15:19 +01:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2013-11-21 01:30:20 +01:00
|
|
|
from django_auth_ldap.backend import LDAPBackend, _LDAPUser
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib import bugdown
|
2013-09-03 22:41:17 +02:00
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2014-02-14 21:55:20 +01:00
|
|
|
from zerver.lib.validator import check_string, check_list, check_dict, \
|
|
|
|
check_int, check_bool, check_variable_type
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.decorator import require_post, \
|
2012-12-02 20:51:51 +01:00
|
|
|
authenticated_api_view, authenticated_json_post_view, \
|
2014-02-13 23:47:57 +01:00
|
|
|
has_request_variables, authenticated_json_view, to_non_negative_int, \
|
2014-01-22 20:20:10 +01:00
|
|
|
JsonableError, get_user_profile_by_email, REQ, require_realm_admin, \
|
|
|
|
RequestVariableConversionError
|
2013-11-12 22:26:17 +01:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_url
|
2013-10-23 16:46:18 +02:00
|
|
|
from zerver.lib.upload import upload_message_image_through_web_client, upload_avatar_image, \
|
2014-05-06 03:48:23 +02:00
|
|
|
get_signed_upload_url, get_realm_for_filename
|
2013-12-12 23:15:19 +01:00
|
|
|
from zerver.lib.response import json_success, json_error, json_response
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
2013-11-06 17:23:19 +01:00
|
|
|
from zerver.lib.utils import statsd, generate_random_token, statsd_key
|
2015-08-19 02:58:20 +02:00
|
|
|
from zproject.backends import password_auth_enabled, dev_auth_enabled
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
|
2015-01-29 08:59:41 +01:00
|
|
|
import requests
|
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
import subprocess
|
2013-08-09 20:26:35 +02:00
|
|
|
import calendar
|
2012-08-28 18:44:51 +02:00
|
|
|
import datetime
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2012-08-28 18:44:51 +02:00
|
|
|
import simplejson
|
2012-09-07 19:20:04 +02:00
|
|
|
import re
|
2012-10-04 20:27:49 +02:00
|
|
|
import urllib
|
2012-10-26 22:02:51 +02:00
|
|
|
import base64
|
2013-05-06 17:14:59 +02:00
|
|
|
import time
|
2013-05-20 19:09:18 +02:00
|
|
|
import logging
|
2013-10-28 15:54:32 +01:00
|
|
|
import os
|
2014-03-28 00:49:20 +01:00
|
|
|
import jwt
|
2015-01-29 08:59:41 +01:00
|
|
|
import hashlib
|
|
|
|
import hmac
|
2013-01-31 21:06:59 +01:00
|
|
|
from collections import defaultdict
|
2012-10-16 21:15:01 +02:00
|
|
|
|
2013-10-17 19:21:18 +02:00
|
|
|
from zerver.lib.rest import rest_dispatch as _rest_dispatch
|
|
|
|
rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs)))
|
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False):
|
|
|
|
"""Converts plaintext stream names to a list of Streams, validating input in the process
|
|
|
|
|
2013-10-09 16:55:17 +02:00
|
|
|
For each stream name, we validate it to ensure it meets our
|
|
|
|
requirements for a proper stream name: that is, that it is shorter
|
|
|
|
than Stream.MAX_NAME_LENGTH characters and passes
|
|
|
|
valid_stream_name.
|
2013-01-30 22:40:00 +01:00
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream names to process
|
|
|
|
@param user_profile The user for whom we are retreiving the streams
|
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
@param invite_only Whether newly created streams should have the invite_only bit set
|
|
|
|
"""
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams = []
|
|
|
|
created_streams = []
|
2013-01-30 22:40:00 +01:00
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
stream_set = set(stream_name.strip() for stream_name in streams_raw)
|
|
|
|
rejects = []
|
|
|
|
for stream_name in stream_set:
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2013-01-30 22:40:00 +01:00
|
|
|
raise JsonableError("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
|
|
|
raise JsonableError("Invalid stream name (%s)." % (stream_name,))
|
|
|
|
|
2013-08-15 22:44:50 +02:00
|
|
|
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
|
2013-06-27 22:52:05 +02:00
|
|
|
|
|
|
|
for stream_name in stream_set:
|
2013-08-15 22:44:50 +02:00
|
|
|
stream = existing_stream_map.get(stream_name.lower())
|
2013-01-30 22:40:00 +01:00
|
|
|
if stream is None:
|
|
|
|
rejects.append(stream_name)
|
|
|
|
else:
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
if autocreate:
|
|
|
|
for stream_name in rejects:
|
|
|
|
stream, created = create_stream_if_needed(user_profile.realm,
|
2013-08-15 22:47:16 +02:00
|
|
|
stream_name,
|
|
|
|
invite_only=invite_only)
|
|
|
|
if created:
|
|
|
|
created_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
elif rejects:
|
|
|
|
raise JsonableError("Stream(s) (%s) do not exist" % ", ".join(rejects))
|
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
return existing_streams, created_streams
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-01-09 22:47:09 +01:00
|
|
|
class PrincipalError(JsonableError):
|
|
|
|
def __init__(self, principal):
|
|
|
|
self.principal = principal
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
|
|
|
principal_doesnt_exist = False
|
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
principal_user_profile = get_user_profile_by_email(principal)
|
2013-01-09 22:47:09 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
principal_doesnt_exist = True
|
|
|
|
|
|
|
|
if (principal_doesnt_exist
|
|
|
|
or agent.realm != principal_user_profile.realm):
|
|
|
|
# We have to make sure we don't leak information about which users
|
2013-08-06 21:32:15 +02:00
|
|
|
# are registered for Zulip in a different realm. We could do
|
2013-01-09 22:47:09 +01:00
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
return principal_user_profile
|
|
|
|
|
2014-03-27 23:50:37 +01:00
|
|
|
def name_changes_disabled(realm):
|
2015-08-20 23:05:56 +02:00
|
|
|
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
|
2014-03-27 23:50:37 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
@require_post
|
2012-10-16 21:42:40 +02:00
|
|
|
def accounts_register(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
key = request.POST['key']
|
2012-10-29 19:08:18 +01:00
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=key)
|
2012-12-11 23:42:32 +01:00
|
|
|
prereg_user = confirmation.content_object
|
|
|
|
email = prereg_user.email
|
2012-10-29 19:08:18 +01:00
|
|
|
mit_beta_user = isinstance(confirmation.content_object, MitUser)
|
2014-01-07 19:13:45 +01:00
|
|
|
try:
|
|
|
|
existing_user_profile = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
existing_user_profile = None
|
2012-10-29 19:08:18 +01:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
validators.validate_email(email)
|
2013-02-06 22:26:04 +01:00
|
|
|
# If someone invited you, you are joining their realm regardless
|
|
|
|
# of your e-mail address.
|
2013-02-08 17:20:42 +01:00
|
|
|
#
|
|
|
|
# MitUsers can't be referred and don't have a referred_by field.
|
|
|
|
if not mit_beta_user and prereg_user.referred_by:
|
2015-08-20 02:38:32 +02:00
|
|
|
realm = prereg_user.referred_by.realm
|
|
|
|
domain = realm.domain
|
|
|
|
if realm.restricted_to_domain and domain != resolve_email_to_domain(email):
|
|
|
|
return render_to_response("zerver/closed_realm.html", {"closed_domain_name": realm.name})
|
2013-08-15 18:44:08 +02:00
|
|
|
elif not mit_beta_user and prereg_user.realm:
|
2013-08-02 20:32:56 +02:00
|
|
|
# You have a realm set, even though nobody referred you. This
|
|
|
|
# happens if you sign up through a special URL for an open
|
|
|
|
# realm.
|
|
|
|
domain = prereg_user.realm.domain
|
2013-02-06 22:26:04 +01:00
|
|
|
else:
|
2013-11-22 23:48:00 +01:00
|
|
|
domain = resolve_email_to_domain(email)
|
2012-09-25 22:58:59 +02:00
|
|
|
|
2014-01-07 19:04:26 +01:00
|
|
|
realm = get_realm(domain)
|
|
|
|
if realm and realm.deactivated:
|
|
|
|
# The user is trying to register for a deactivated realm. Advise them to
|
|
|
|
# contact support.
|
|
|
|
return render_to_response("zerver/deactivated.html",
|
2015-09-30 02:58:50 +02:00
|
|
|
{"deactivated_domain_name": realm.name,
|
|
|
|
"zulip_administrator": settings.ZULIP_ADMINISTRATOR})
|
2014-01-07 19:04:26 +01:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2014-01-07 19:13:45 +01:00
|
|
|
if existing_user_profile is not None and existing_user_profile.is_mirror_dummy:
|
|
|
|
# Mirror dummy users to be activated must be inactive
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-10-29 19:08:18 +01:00
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
2013-07-08 17:57:04 +02:00
|
|
|
user_email_is_unique(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-04 20:27:49 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-09-29 00:49:34 +02:00
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
name_validated = False
|
|
|
|
full_name = None
|
|
|
|
|
2012-10-02 22:20:07 +02:00
|
|
|
if request.POST.get('from_confirmation'):
|
2013-11-21 04:57:23 +01:00
|
|
|
try:
|
|
|
|
del request.session['authenticated_full_name']
|
|
|
|
except KeyError:
|
|
|
|
pass
|
2013-08-15 18:43:42 +02:00
|
|
|
if domain == "mit.edu":
|
|
|
|
hesiod_name = compute_mit_user_fullname(email)
|
|
|
|
form = RegistrationForm(
|
|
|
|
initial={'full_name': hesiod_name if "@" not in hesiod_name else ""})
|
2013-11-21 04:57:23 +01:00
|
|
|
name_validated = True
|
2013-11-21 01:30:20 +01:00
|
|
|
elif settings.POPULATE_PROFILE_VIA_LDAP:
|
|
|
|
for backend in get_backends():
|
|
|
|
if isinstance(backend, LDAPBackend):
|
|
|
|
ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs
|
2013-11-21 04:57:23 +01:00
|
|
|
try:
|
|
|
|
request.session['authenticated_full_name'] = ldap_attrs[settings.AUTH_LDAP_USER_ATTR_MAP['full_name']][0]
|
|
|
|
name_validated = True
|
|
|
|
# We don't use initial= here, because if the form is
|
|
|
|
# complete (that is, no additional fields need to be
|
|
|
|
# filled out by the user) we want the form to validate,
|
|
|
|
# so they can be directly registered without having to
|
|
|
|
# go through this interstitial.
|
|
|
|
form = RegistrationForm(
|
|
|
|
{'full_name': request.session['authenticated_full_name']})
|
|
|
|
# FIXME: This will result in the user getting
|
|
|
|
# validation errors if they have to enter a password.
|
|
|
|
# Not relevant for ONLY_SSO, though.
|
|
|
|
break
|
|
|
|
except TypeError:
|
|
|
|
# Let the user fill out a name and/or try another backend
|
|
|
|
form = RegistrationForm()
|
2015-01-29 09:00:23 +01:00
|
|
|
elif 'full_name' in request.POST:
|
|
|
|
form = RegistrationForm(
|
|
|
|
initial={'full_name': request.POST.get('full_name')}
|
|
|
|
)
|
2013-08-15 18:43:42 +02:00
|
|
|
else:
|
|
|
|
form = RegistrationForm()
|
2012-10-02 22:20:07 +02:00
|
|
|
else:
|
2013-11-21 04:57:23 +01:00
|
|
|
postdata = request.POST.copy()
|
2014-03-27 23:50:37 +01:00
|
|
|
if name_changes_disabled(realm):
|
2013-11-21 04:57:23 +01:00
|
|
|
# If we populate profile information via LDAP and we have a
|
|
|
|
# verified name from you on file, use that. Otherwise, fall
|
|
|
|
# back to the full name in the request.
|
|
|
|
try:
|
|
|
|
postdata.update({'full_name': request.session['authenticated_full_name']})
|
|
|
|
name_validated = True
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
form = RegistrationForm(postdata)
|
2014-03-28 00:45:03 +01:00
|
|
|
if not password_auth_enabled(realm):
|
|
|
|
form['password'].field.required = False
|
2013-11-21 04:57:23 +01:00
|
|
|
|
|
|
|
if form.is_valid():
|
2014-03-28 00:45:03 +01:00
|
|
|
if password_auth_enabled(realm):
|
2013-11-21 04:57:23 +01:00
|
|
|
password = form.cleaned_data['password']
|
|
|
|
else:
|
|
|
|
# SSO users don't need no passwords
|
|
|
|
password = None
|
|
|
|
|
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
short_name = email_to_username(email)
|
|
|
|
first_in_realm = len(UserProfile.objects.filter(realm=realm, is_bot=False)) == 0
|
|
|
|
|
|
|
|
# FIXME: sanitize email addresses and fullname
|
2014-01-07 19:13:45 +01:00
|
|
|
if existing_user_profile is not None and existing_user_profile.is_mirror_dummy:
|
2013-11-21 04:57:23 +01:00
|
|
|
try:
|
2014-01-07 19:13:45 +01:00
|
|
|
user_profile = existing_user_profile
|
2013-12-09 22:37:04 +01:00
|
|
|
do_activate_user(user_profile)
|
|
|
|
do_change_password(user_profile, password)
|
|
|
|
do_change_full_name(user_profile, full_name)
|
2013-11-21 04:57:23 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
2015-10-13 21:33:54 +02:00
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
newsletter_data={"IP": request.META['REMOTE_ADDR']})
|
2013-11-21 04:57:23 +01:00
|
|
|
else:
|
2015-10-13 21:33:54 +02:00
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name,
|
|
|
|
prereg_user=prereg_user,
|
|
|
|
newsletter_data={"IP": request.META['REMOTE_ADDR']})
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
# This logs you in using the ZulipDummyBackend, since honestly nothing
|
|
|
|
# more fancy than this is required.
|
2013-12-09 23:30:28 +01:00
|
|
|
login(request, authenticate(username=user_profile.email, use_dummy_backend=True))
|
2013-07-11 21:40:52 +02:00
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
if first_in_realm:
|
2014-01-22 20:04:34 +01:00
|
|
|
do_change_is_admin(user_profile, True)
|
2013-11-21 04:57:23 +01:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.initial_invite_page'))
|
|
|
|
else:
|
|
|
|
return HttpResponseRedirect(reverse('zerver.views.home'))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/register.html',
|
2013-04-23 23:46:12 +02:00
|
|
|
{'form': form,
|
|
|
|
'company_name': domain,
|
|
|
|
'email': email,
|
|
|
|
'key': key,
|
2013-11-21 04:57:23 +01:00
|
|
|
'full_name': request.session.get('authenticated_full_name', None),
|
2014-03-28 00:45:03 +01:00
|
|
|
'lock_name': name_validated and name_changes_disabled(realm),
|
|
|
|
# password_auth_enabled is normally set via our context processor,
|
|
|
|
# but for the registration form, there is no logged in user yet, so
|
|
|
|
# we have to set it here.
|
|
|
|
'password_auth_enabled': password_auth_enabled(realm),
|
2013-04-23 23:46:12 +02:00
|
|
|
},
|
2012-10-15 22:52:08 +02:00
|
|
|
context_instance=RequestContext(request))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-01-08 23:26:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def accounts_accept_terms(request):
|
2013-04-23 23:46:12 +02:00
|
|
|
email = request.user.email
|
2013-11-22 23:48:00 +01:00
|
|
|
domain = resolve_email_to_domain(email)
|
2013-01-08 23:26:40 +01:00
|
|
|
if request.method == "POST":
|
|
|
|
form = ToSForm(request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
send_mail('Terms acceptance for ' + full_name,
|
2013-07-29 23:03:31 +02:00
|
|
|
loader.render_to_string('zerver/tos_accept_body.txt',
|
2013-01-08 23:26:40 +01:00
|
|
|
{'name': full_name,
|
|
|
|
'email': email,
|
|
|
|
'ip': request.META['REMOTE_ADDR'],
|
|
|
|
'browser': request.META['HTTP_USER_AGENT']}),
|
2013-10-04 19:59:25 +02:00
|
|
|
settings.EMAIL_HOST_USER,
|
2013-07-24 23:41:24 +02:00
|
|
|
["all@zulip.com"])
|
2013-03-29 17:39:53 +01:00
|
|
|
do_change_full_name(request.user, full_name)
|
2013-01-08 23:26:40 +01:00
|
|
|
return redirect(home)
|
|
|
|
|
|
|
|
else:
|
|
|
|
form = ToSForm()
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_accept_terms.html',
|
2013-07-18 18:48:56 +02:00
|
|
|
{ 'form': form, 'company_name': domain, 'email': email },
|
2013-01-08 23:26:40 +01:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-08-23 20:49:06 +02:00
|
|
|
from zerver.lib.ccache import make_ccache
|
|
|
|
|
|
|
|
@authenticated_json_view
|
|
|
|
@has_request_variables
|
|
|
|
def webathena_kerberos_login(request, user_profile,
|
|
|
|
cred=REQ(default=None)):
|
|
|
|
if cred is None:
|
|
|
|
return json_error("Could not find Kerberos credential")
|
|
|
|
if not user_profile.realm.domain == "mit.edu":
|
|
|
|
return json_error("Webathena login only for mit.edu realm")
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsed_cred = ujson.loads(cred)
|
|
|
|
user = parsed_cred["cname"]["nameString"][0]
|
2013-08-28 21:58:34 +02:00
|
|
|
if user == "golem":
|
|
|
|
# Hack for an mit.edu user whose Kerberos username doesn't
|
|
|
|
# match what he zephyrs as
|
|
|
|
user = "ctl"
|
2013-08-23 20:49:06 +02:00
|
|
|
assert(user == user_profile.email.split("@")[0])
|
|
|
|
ccache = make_ccache(parsed_cred)
|
|
|
|
except Exception:
|
|
|
|
return json_error("Invalid Kerberos cache")
|
|
|
|
|
|
|
|
# TODO: Send these data via (say) rabbitmq
|
|
|
|
try:
|
2013-10-04 20:23:48 +02:00
|
|
|
subprocess.check_call(["ssh", "zulip@zmirror2.zulip.net", "--",
|
2013-10-04 19:19:57 +02:00
|
|
|
"/home/zulip/zulip/bots/process_ccache",
|
2013-08-23 20:49:06 +02:00
|
|
|
user,
|
|
|
|
user_profile.api_key,
|
|
|
|
base64.b64encode(ccache)])
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Error updating the user's ccache")
|
|
|
|
return json_error("We were unable to setup mirroring for you")
|
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-04-17 17:24:07 +02:00
|
|
|
def api_endpoint_docs(request):
|
2013-07-29 23:03:31 +02:00
|
|
|
raw_calls = open('templates/zerver/api_content.json', 'r').read()
|
2013-06-18 23:55:55 +02:00
|
|
|
calls = ujson.loads(raw_calls)
|
2013-04-09 22:20:46 +02:00
|
|
|
langs = set()
|
|
|
|
for call in calls:
|
2014-01-03 18:37:37 +01:00
|
|
|
call["endpoint"] = "%s/v1/%s" % (settings.EXTERNAL_API_URI, call["endpoint"])
|
2013-12-04 20:57:44 +01:00
|
|
|
call["example_request"]["curl"] = call["example_request"]["curl"].replace("https://api.zulip.com", settings.EXTERNAL_API_URI)
|
2013-08-08 17:45:25 +02:00
|
|
|
response = call['example_response']
|
|
|
|
if not '\n' in response:
|
|
|
|
# For 1-line responses, pretty-print them
|
|
|
|
extended_response = response.replace(", ", ",\n ")
|
|
|
|
else:
|
|
|
|
extended_response = response
|
|
|
|
call['rendered_response'] = bugdown.convert("~~~ .py\n" + extended_response + "\n~~~\n", "default")
|
2013-04-09 22:20:46 +02:00
|
|
|
for example_type in ('request', 'response'):
|
|
|
|
for lang in call.get('example_' + example_type, []):
|
|
|
|
langs.add(lang)
|
|
|
|
return render_to_response(
|
2013-07-29 23:03:31 +02:00
|
|
|
'zerver/api_endpoints.html', {
|
2013-04-09 22:20:46 +02:00
|
|
|
'content': calls,
|
|
|
|
'langs': langs,
|
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_invite_users(request, user_profile, invitee_emails=REQ):
|
2012-12-11 23:42:32 +01:00
|
|
|
if not invitee_emails:
|
|
|
|
return json_error("You must specify at least one email address.")
|
|
|
|
|
2013-01-03 16:50:46 +01:00
|
|
|
invitee_emails = set(re.split(r'[, \n]', invitee_emails))
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
stream_names = request.POST.getlist('stream')
|
|
|
|
if not stream_names:
|
|
|
|
return json_error("You must specify at least one stream for invitees to join.")
|
|
|
|
|
2014-01-15 22:21:43 +01:00
|
|
|
# We unconditionally sub you to the notifications stream if it
|
|
|
|
# exists and is public.
|
|
|
|
notifications_stream = user_profile.realm.notifications_stream
|
|
|
|
if notifications_stream and not notifications_stream.invite_only:
|
|
|
|
stream_names.append(notifications_stream.name)
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-11 23:42:32 +01:00
|
|
|
return json_error("Stream does not exist: %s. No invites were sent." % stream_name)
|
2013-01-14 21:47:17 +01:00
|
|
|
streams.append(stream)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
2013-02-06 17:27:40 +01:00
|
|
|
else:
|
|
|
|
return json_success()
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-08-07 17:59:45 +02:00
|
|
|
def create_homepage_form(request, user_info=None):
|
|
|
|
if user_info:
|
|
|
|
return HomepageForm(user_info, domain=request.session.get("domain"))
|
|
|
|
# An empty fields dict is not treated the same way as not
|
|
|
|
# providing it.
|
|
|
|
return HomepageForm(domain=request.session.get("domain"))
|
|
|
|
|
2013-11-04 23:08:39 +01:00
|
|
|
def maybe_send_to_registration(request, email, full_name=''):
|
|
|
|
form = create_homepage_form(request, user_info={'email': email})
|
|
|
|
request.verified_email = None
|
|
|
|
if form.is_valid():
|
|
|
|
# Construct a PreregistrationUser object and send the user over to
|
|
|
|
# the confirmation view.
|
2013-11-14 06:05:55 +01:00
|
|
|
prereg_user = None
|
|
|
|
if settings.ONLY_SSO:
|
|
|
|
try:
|
|
|
|
prereg_user = PreregistrationUser.objects.filter(email__iexact=email).latest("invited_at")
|
|
|
|
except PreregistrationUser.DoesNotExist:
|
|
|
|
prereg_user = create_preregistration_user(email, request)
|
|
|
|
else:
|
|
|
|
prereg_user = create_preregistration_user(email, request)
|
|
|
|
|
2013-11-04 23:08:39 +01:00
|
|
|
return redirect("".join((
|
2014-03-28 00:42:32 +01:00
|
|
|
settings.EXTERNAL_URI_SCHEME,
|
2015-01-31 04:46:48 +01:00
|
|
|
request.get_host(),
|
2013-11-16 02:48:34 +01:00
|
|
|
"/",
|
2013-11-04 23:08:39 +01:00
|
|
|
# Split this so we only get the part after the /
|
|
|
|
Confirmation.objects.get_link_for_object(prereg_user).split("/", 3)[3],
|
2013-11-05 22:13:59 +01:00
|
|
|
'?full_name=',
|
2013-11-04 23:08:39 +01:00
|
|
|
# urllib does not handle Unicode, so coerece to encoded byte string
|
|
|
|
# Explanation: http://stackoverflow.com/a/5605354/90777
|
|
|
|
urllib.quote_plus(full_name.encode('utf8')))))
|
|
|
|
else:
|
2013-11-13 21:16:06 +01:00
|
|
|
return render_to_response('zerver/accounts_home.html', {'form': form},
|
|
|
|
context_instance=RequestContext(request))
|
2013-11-04 23:08:39 +01:00
|
|
|
|
2014-03-28 00:47:36 +01:00
|
|
|
def login_or_register_remote_user(request, remote_username, user_profile, full_name=''):
|
2015-01-31 04:46:48 +01:00
|
|
|
if user_profile is None or user_profile.is_mirror_dummy:
|
2014-03-28 00:47:36 +01:00
|
|
|
# Since execution has reached here, the client specified a remote user
|
|
|
|
# but no associated user account exists. Send them over to the
|
|
|
|
# PreregistrationUser flow.
|
|
|
|
return maybe_send_to_registration(request, remote_user_to_email(remote_username), full_name)
|
|
|
|
else:
|
|
|
|
login(request, user_profile)
|
|
|
|
return HttpResponseRedirect("%s%s" % (settings.EXTERNAL_URI_SCHEME,
|
2015-02-06 05:49:33 +01:00
|
|
|
request.get_host()))
|
2014-03-28 00:47:36 +01:00
|
|
|
|
2013-11-04 23:16:46 +01:00
|
|
|
def remote_user_sso(request):
|
|
|
|
try:
|
|
|
|
remote_user = request.META["REMOTE_USER"]
|
2013-11-05 23:50:28 +01:00
|
|
|
except KeyError:
|
2013-11-04 23:16:46 +01:00
|
|
|
raise JsonableError("No REMOTE_USER set.")
|
|
|
|
|
2014-03-28 00:47:36 +01:00
|
|
|
user_profile = authenticate(remote_user=remote_user)
|
|
|
|
return login_or_register_remote_user(request, remote_user, user_profile)
|
2013-11-04 23:16:46 +01:00
|
|
|
|
2014-03-28 00:49:20 +01:00
|
|
|
@csrf_exempt
|
|
|
|
def remote_user_jwt(request):
|
|
|
|
try:
|
|
|
|
json_web_token = request.POST["json_web_token"]
|
|
|
|
payload, signing_input, header, signature = jwt.load(json_web_token)
|
|
|
|
except KeyError:
|
|
|
|
raise JsonableError("No JSON web token passed in request")
|
|
|
|
except jwt.DecodeError:
|
|
|
|
raise JsonableError("Bad JSON web token")
|
|
|
|
|
|
|
|
remote_user = payload.get("user", None)
|
|
|
|
if remote_user is None:
|
|
|
|
raise JsonableError("No user specified in JSON web token claims")
|
|
|
|
domain = payload.get('realm', None)
|
|
|
|
if domain is None:
|
|
|
|
raise JsonableError("No domain specified in JSON web token claims")
|
|
|
|
|
|
|
|
email = "%s@%s" % (remote_user, domain)
|
|
|
|
|
|
|
|
try:
|
|
|
|
jwt.verify_signature(payload, signing_input, header, signature,
|
|
|
|
settings.JWT_AUTH_KEYS[domain])
|
2014-04-07 20:01:38 +02:00
|
|
|
# We do all the authentication we need here (otherwise we'd have to
|
|
|
|
# duplicate work), but we need to call authenticate with some backend so
|
|
|
|
# that the request.backend attribute gets set.
|
|
|
|
user_profile = authenticate(username=email, use_dummy_backend=True)
|
2014-03-28 00:49:20 +01:00
|
|
|
except (jwt.DecodeError, jwt.ExpiredSignature):
|
|
|
|
raise JsonableError("Bad JSON web token signature")
|
|
|
|
except KeyError:
|
|
|
|
raise JsonableError("Realm not authorized for JWT login")
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
user_profile = None
|
|
|
|
|
|
|
|
return login_or_register_remote_user(request, email, user_profile, remote_user)
|
2013-11-04 23:16:46 +01:00
|
|
|
|
2015-01-29 08:59:41 +01:00
|
|
|
def google_oauth2_csrf(request, value):
|
|
|
|
return hmac.new(get_token(request).encode('utf-8'), value, hashlib.sha256).hexdigest()
|
|
|
|
|
|
|
|
def start_google_oauth2(request):
|
|
|
|
uri = 'https://accounts.google.com/o/oauth2/auth?'
|
|
|
|
cur_time = str(int(time.time()))
|
|
|
|
csrf_state = '{}:{}'.format(
|
|
|
|
cur_time,
|
|
|
|
google_oauth2_csrf(request, cur_time),
|
|
|
|
)
|
|
|
|
prams = {
|
|
|
|
'response_type': 'code',
|
|
|
|
'client_id': settings.GOOGLE_OAUTH2_CLIENT_ID,
|
|
|
|
'redirect_uri': ''.join((
|
|
|
|
settings.EXTERNAL_URI_SCHEME,
|
2015-02-06 05:14:04 +01:00
|
|
|
request.get_host(),
|
2015-01-29 08:59:41 +01:00
|
|
|
reverse('zerver.views.finish_google_oauth2'),
|
|
|
|
)),
|
|
|
|
'scope': 'profile email',
|
|
|
|
'state': csrf_state,
|
|
|
|
}
|
|
|
|
return redirect(uri + urllib.urlencode(prams))
|
|
|
|
|
2015-10-02 03:53:54 +02:00
|
|
|
# Workaround to support the Python-requests 1.0 transition of .json
|
|
|
|
# from a property to a function
|
|
|
|
requests_json_is_function = callable(requests.Response.json)
|
|
|
|
def extract_json_response(resp):
|
|
|
|
if requests_json_is_function:
|
|
|
|
return resp.json()
|
|
|
|
else:
|
|
|
|
return resp.json
|
|
|
|
|
2015-01-29 08:59:41 +01:00
|
|
|
def finish_google_oauth2(request):
|
|
|
|
error = request.GET.get('error')
|
|
|
|
if error == 'access_denied':
|
|
|
|
return redirect('/')
|
|
|
|
elif error is not None:
|
|
|
|
logging.error('Error from google oauth2 login %r', request.GET)
|
|
|
|
return HttpResponse(status=400)
|
|
|
|
|
|
|
|
value, hmac_value = request.GET.get('state').split(':')
|
|
|
|
if hmac_value != google_oauth2_csrf(request, value):
|
|
|
|
raise Exception('Google oauth2 CSRF error')
|
|
|
|
|
|
|
|
resp = requests.post(
|
|
|
|
'https://www.googleapis.com/oauth2/v3/token',
|
|
|
|
data={
|
|
|
|
'code': request.GET.get('code'),
|
|
|
|
'client_id': settings.GOOGLE_OAUTH2_CLIENT_ID,
|
|
|
|
'client_secret': settings.GOOGLE_OAUTH2_CLIENT_SECRET,
|
|
|
|
'redirect_uri': ''.join((
|
|
|
|
settings.EXTERNAL_URI_SCHEME,
|
2015-02-06 05:32:59 +01:00
|
|
|
request.get_host(),
|
2015-01-29 08:59:41 +01:00
|
|
|
reverse('zerver.views.finish_google_oauth2'),
|
|
|
|
)),
|
|
|
|
'grant_type': 'authorization_code',
|
|
|
|
},
|
|
|
|
)
|
|
|
|
if resp.status_code != 200:
|
|
|
|
raise Exception('Could not convert google pauth2 code to access_token\r%r' % resp.text)
|
2015-10-02 03:53:54 +02:00
|
|
|
access_token = extract_json_response(resp)['access_token']
|
2015-01-29 08:59:41 +01:00
|
|
|
|
|
|
|
resp = requests.get(
|
|
|
|
'https://www.googleapis.com/plus/v1/people/me',
|
|
|
|
params={'access_token': access_token}
|
|
|
|
)
|
|
|
|
if resp.status_code != 200:
|
|
|
|
raise Exception('Google login failed making API call\r%r' % resp.text)
|
2015-10-02 03:53:54 +02:00
|
|
|
body = extract_json_response(resp)
|
2015-01-29 08:59:41 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
full_name = body['name']['formatted']
|
|
|
|
except KeyError:
|
|
|
|
# Only google+ users have a formated name. I am ignoring i18n here.
|
2015-02-06 18:56:39 +01:00
|
|
|
full_name = u'{} {}'.format(
|
2015-01-29 08:59:41 +01:00
|
|
|
body['name']['givenName'], body['name']['familyName']
|
|
|
|
)
|
|
|
|
for email in body['emails']:
|
|
|
|
if email['type'] == 'account':
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise Exception('Google oauth2 account email not found %r' % body)
|
|
|
|
email_address = email['value']
|
|
|
|
user_profile = authenticate(username=email_address, use_dummy_backend=True)
|
|
|
|
return login_or_register_remote_user(request, email_address, user_profile, full_name)
|
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
def login_page(request, **kwargs):
|
2015-08-19 02:58:20 +02:00
|
|
|
extra_context = kwargs.pop('extra_context',{})
|
|
|
|
if dev_auth_enabled():
|
2015-08-19 05:35:29 +02:00
|
|
|
users = UserProfile.objects.filter(is_bot=False, is_active=True)
|
|
|
|
extra_context['direct_admins'] = sorted([u.email for u in users if u.is_admin()])
|
|
|
|
extra_context['direct_users'] = sorted([u.email for u in users if not u.is_admin()])
|
2014-01-07 19:51:18 +01:00
|
|
|
template_response = django_login_page(
|
2015-08-19 02:58:20 +02:00
|
|
|
request, authentication_form=OurAuthenticationForm,
|
|
|
|
extra_context=extra_context, **kwargs)
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2012-10-11 19:15:41 +02:00
|
|
|
template_response.context_data['email'] = request.GET['email']
|
2012-09-29 00:49:34 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
2014-01-07 19:51:18 +01:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
return template_response
|
|
|
|
|
2015-08-19 02:58:20 +02:00
|
|
|
def dev_direct_login(request, **kwargs):
|
|
|
|
# This function allows logging in without a password and should only be called in development environments.
|
|
|
|
# It may be called if the DevAuthBackend is included in settings.AUTHENTICATION_BACKENDS
|
2015-08-21 09:18:44 +02:00
|
|
|
if (not dev_auth_enabled()) or settings.PRODUCTION:
|
2015-08-19 02:58:20 +02:00
|
|
|
# This check is probably not required, since authenticate would fail without an enabled DevAuthBackend.
|
|
|
|
raise Exception('Direct login not supported.')
|
|
|
|
email = request.POST['direct_email']
|
|
|
|
user_profile = authenticate(username=email)
|
|
|
|
login(request, user_profile)
|
|
|
|
return HttpResponseRedirect("%s%s" % (settings.EXTERNAL_URI_SCHEME,
|
|
|
|
request.get_host()))
|
|
|
|
|
2013-07-11 21:40:52 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-12-17 20:09:05 +01:00
|
|
|
def json_bulk_invite_users(request, user_profile,
|
|
|
|
invitee_emails=REQ(validator=check_list(check_string))):
|
2013-07-11 21:40:52 +02:00
|
|
|
invitee_emails = set(invitee_emails)
|
|
|
|
streams = get_default_subs(user_profile)
|
|
|
|
|
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
|
|
|
|
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
|
|
|
else:
|
2014-01-09 22:19:47 +01:00
|
|
|
# Report bulk invites to internal Zulip.
|
|
|
|
invited = PreregistrationUser.objects.filter(referred_by=user_profile)
|
|
|
|
internal_message = "%s <`%s`> invited %d people to Zulip." % (
|
|
|
|
user_profile.full_name, user_profile.email, invited.count())
|
|
|
|
internal_send_message(settings.NEW_USER_BOT, "stream", "signups",
|
|
|
|
user_profile.realm.domain, internal_message)
|
2013-07-11 21:40:52 +02:00
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def initial_invite_page(request):
|
|
|
|
user = request.user
|
|
|
|
# Only show the bulk-invite page for the first user in a realm
|
|
|
|
domain_count = len(UserProfile.objects.filter(realm=user.realm))
|
|
|
|
if domain_count > 1:
|
2013-07-29 23:03:31 +02:00
|
|
|
return redirect('zerver.views.home')
|
2013-07-11 21:40:52 +02:00
|
|
|
|
|
|
|
params = {'company_name': user.realm.domain}
|
|
|
|
|
|
|
|
if (user.realm.restricted_to_domain):
|
|
|
|
params['invite_suffix'] = user.realm.domain
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/initial_invite_page.html', params,
|
2013-07-11 21:40:52 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-03-05 23:45:02 +01:00
|
|
|
@require_post
|
|
|
|
def logout_then_login(request, **kwargs):
|
|
|
|
return django_logout_then_login(request, kwargs)
|
|
|
|
|
2013-08-02 20:32:56 +02:00
|
|
|
def create_preregistration_user(email, request):
|
|
|
|
domain = request.session.get("domain")
|
2013-12-09 19:40:48 +01:00
|
|
|
if completely_open(domain):
|
|
|
|
# Clear the "domain" from the session object; it's no longer needed
|
|
|
|
request.session["domain"] = None
|
|
|
|
|
|
|
|
# The user is trying to sign up for a completely open realm,
|
|
|
|
# so create them a PreregistrationUser for that realm
|
|
|
|
return PreregistrationUser.objects.create(email=email,
|
|
|
|
realm=get_realm(domain))
|
|
|
|
|
2013-08-15 18:44:08 +02:00
|
|
|
# MIT users who are not explicitly signing up for an open realm
|
|
|
|
# require special handling (They may already have an (inactive)
|
|
|
|
# account, for example)
|
2013-12-09 19:40:48 +01:00
|
|
|
if split_email_to_domain(email) == "mit.edu":
|
|
|
|
return MitUser.objects.get_or_create(email=email)[0]
|
|
|
|
return PreregistrationUser.objects.create(email=email)
|
2013-08-02 20:32:56 +02:00
|
|
|
|
|
|
|
def accounts_home_with_domain(request, domain):
|
|
|
|
if completely_open(domain):
|
|
|
|
# You can sign up for a completely open realm through a
|
|
|
|
# special registration path that contains the domain in the
|
|
|
|
# URL. We store this information in the session rather than
|
|
|
|
# elsewhere because we don't have control over URL or form
|
|
|
|
# data for folks registering through OpenID.
|
|
|
|
request.session["domain"] = domain
|
|
|
|
return accounts_home(request)
|
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.accounts_home'))
|
2013-08-02 20:32:56 +02:00
|
|
|
|
2014-01-31 20:46:45 +01:00
|
|
|
def send_registration_completion_email(email, request):
|
|
|
|
"""
|
|
|
|
Send an email with a confirmation link to the provided e-mail so the user
|
|
|
|
can complete their registration.
|
|
|
|
"""
|
|
|
|
prereg_user = create_preregistration_user(email, request)
|
|
|
|
context = {'support_email': settings.ZULIP_ADMINISTRATOR,
|
2015-08-21 11:48:43 +02:00
|
|
|
'voyager': settings.VOYAGER}
|
2014-01-31 20:46:45 +01:00
|
|
|
Confirmation.objects.send_confirmation(prereg_user, email,
|
|
|
|
additional_context=context)
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
def accounts_home(request):
|
2015-08-20 02:38:32 +02:00
|
|
|
# First we populate request.session with a domain if
|
|
|
|
# there is a single realm, which is open.
|
|
|
|
# This is then used in HomepageForm and in creating a PreregistrationUser
|
|
|
|
unique_realm = get_unique_open_realm()
|
|
|
|
if unique_realm:
|
|
|
|
request.session['domain'] = unique_realm.domain
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
if request.method == 'POST':
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request, user_info=request.POST)
|
2012-09-28 22:47:05 +02:00
|
|
|
if form.is_valid():
|
2013-01-08 20:24:47 +01:00
|
|
|
email = form.cleaned_data['email']
|
2014-01-31 20:46:45 +01:00
|
|
|
send_registration_completion_email(email, request)
|
2013-03-28 19:21:29 +01:00
|
|
|
return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email}))
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
|
|
|
email = request.POST['email']
|
2013-02-11 19:37:31 +01:00
|
|
|
# Note: We don't check for uniqueness
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-10 22:30:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-11-21 21:14:55 +01:00
|
|
|
else:
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request)
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_home.html',
|
2013-08-02 20:32:56 +02:00
|
|
|
{'form': form, 'current_url': request.get_full_path},
|
2012-09-04 23:21:30 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-09-04 21:02:11 +02:00
|
|
|
def approximate_unread_count(user_profile):
|
2013-09-10 10:50:07 +02:00
|
|
|
not_in_home_view_recipients = [sub.recipient.id for sub in \
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, in_home_view=False)]
|
|
|
|
|
2013-11-21 17:26:17 +01:00
|
|
|
muted_topics = ujson.loads(user_profile.muted_topics)
|
|
|
|
# If muted_topics is empty, it looks like []. If it is non-empty, it look
|
|
|
|
# like [[u'devel', u'test']]. We should switch to a consistent envelope, but
|
|
|
|
# until we do we still have both in the database.
|
|
|
|
if muted_topics:
|
|
|
|
muted_topics = muted_topics[0]
|
|
|
|
|
2013-09-10 10:50:07 +02:00
|
|
|
return UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile, message_id__gt=user_profile.pointer).exclude(
|
|
|
|
message__recipient__type=Recipient.STREAM,
|
2013-11-21 17:26:17 +01:00
|
|
|
message__recipient__id__in=not_in_home_view_recipients).exclude(
|
|
|
|
message__subject__in=muted_topics).exclude(
|
|
|
|
flags=UserMessage.flags.read).count()
|
2013-08-09 20:26:35 +02:00
|
|
|
|
|
|
|
def sent_time_in_epoch_seconds(user_message):
|
|
|
|
# user_message is a UserMessage object.
|
|
|
|
if not user_message:
|
|
|
|
return None
|
|
|
|
# We have USE_TZ = True, so our datetime objects are timezone-aware.
|
|
|
|
# Return the epoch seconds in UTC.
|
|
|
|
return calendar.timegm(user_message.message.pub_date.utctimetuple())
|
|
|
|
|
2012-10-29 19:56:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2012-08-28 18:44:51 +02:00
|
|
|
def home(request):
|
2012-11-21 00:42:16 +01:00
|
|
|
# We need to modify the session object every two weeks or it will expire.
|
|
|
|
# This line makes reloading the page a sufficient action to keep the
|
|
|
|
# session alive.
|
|
|
|
request.session.modified = True
|
|
|
|
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = request.user
|
2013-09-30 22:34:08 +02:00
|
|
|
request._email = request.user.email
|
2013-10-18 21:19:34 +02:00
|
|
|
request.client = get_client("website")
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-12-13 20:20:28 +01:00
|
|
|
narrow = []
|
|
|
|
narrow_stream = None
|
2014-01-17 00:05:35 +01:00
|
|
|
narrow_topic = request.GET.get("topic")
|
2014-01-17 00:06:30 +01:00
|
|
|
if request.GET.get("stream"):
|
2013-12-13 20:20:28 +01:00
|
|
|
try:
|
2014-01-17 00:06:30 +01:00
|
|
|
narrow_stream = get_stream(request.GET.get("stream"), user_profile.realm)
|
2013-12-13 20:20:28 +01:00
|
|
|
assert(narrow_stream is not None)
|
|
|
|
assert(narrow_stream.is_public())
|
|
|
|
narrow = [["stream", narrow_stream.name]]
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Narrow parsing")
|
2014-01-17 00:05:35 +01:00
|
|
|
if narrow_topic is not None:
|
|
|
|
narrow.append(["topic", narrow_topic])
|
2013-12-13 20:20:28 +01:00
|
|
|
|
2013-10-18 21:19:34 +02:00
|
|
|
register_ret = do_events_register(user_profile, request.client,
|
2013-12-13 20:20:28 +01:00
|
|
|
apply_markdown=True, narrow=narrow)
|
2013-03-28 18:09:27 +01:00
|
|
|
user_has_messages = (register_ret['max_message_id'] != -1)
|
2013-02-06 00:33:45 +01:00
|
|
|
|
2013-05-17 21:28:51 +02:00
|
|
|
# Reset our don't-spam-users-with-email counter since the
|
|
|
|
# user has since logged in
|
|
|
|
if not user_profile.last_reminder is None:
|
|
|
|
user_profile.last_reminder = None
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["last_reminder"])
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
# Brand new users get the tutorial
|
|
|
|
needs_tutorial = settings.TUTORIAL_ENABLED and \
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status != UserProfile.TUTORIAL_FINISHED
|
2013-03-12 04:54:17 +01:00
|
|
|
|
2014-01-14 23:53:35 +01:00
|
|
|
first_in_realm = realm_user_count(user_profile.realm) == 1
|
2014-01-09 22:53:18 +01:00
|
|
|
# If you are the only person in the realm and you didn't invite
|
|
|
|
# anyone, we'll continue to encourage you to do so on the frontend.
|
2014-01-14 23:53:35 +01:00
|
|
|
prompt_for_invites = first_in_realm and \
|
2014-01-09 22:53:18 +01:00
|
|
|
not PreregistrationUser.objects.filter(referred_by=user_profile).count()
|
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
if user_profile.pointer == -1 and user_has_messages:
|
2013-02-06 00:33:45 +01:00
|
|
|
# Put the new user's pointer at the bottom
|
|
|
|
#
|
|
|
|
# This improves performance, because we limit backfilling of messages
|
|
|
|
# before the pointer. It's also likely that someone joining an
|
|
|
|
# organization is interested in recent messages more than the very
|
|
|
|
# first messages on the system.
|
|
|
|
|
2013-08-09 23:01:37 +02:00
|
|
|
register_ret['pointer'] = register_ret['max_message_id']
|
2013-02-06 00:33:45 +01:00
|
|
|
user_profile.last_pointer_updater = request.session.session_key
|
2012-09-12 22:55:37 +02:00
|
|
|
|
2013-08-09 20:26:35 +02:00
|
|
|
if user_profile.pointer == -1:
|
|
|
|
latest_read = None
|
|
|
|
else:
|
2013-08-19 18:10:32 +02:00
|
|
|
try:
|
|
|
|
latest_read = UserMessage.objects.get(user_profile=user_profile,
|
|
|
|
message__id=user_profile.pointer)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
# Don't completely fail if your saved pointer ID is invalid
|
|
|
|
logging.warning("%s has invalid pointer %s" % (user_profile.email, user_profile.pointer))
|
|
|
|
latest_read = None
|
|
|
|
|
2014-01-14 17:06:32 +01:00
|
|
|
desktop_notifications_enabled = user_profile.enable_desktop_notifications
|
|
|
|
if narrow_stream is not None:
|
|
|
|
desktop_notifications_enabled = False
|
|
|
|
|
2014-01-15 22:21:43 +01:00
|
|
|
if user_profile.realm.notifications_stream:
|
|
|
|
notifications_stream = user_profile.realm.notifications_stream.name
|
|
|
|
else:
|
|
|
|
notifications_stream = ""
|
|
|
|
|
2013-03-25 23:49:38 +01:00
|
|
|
# Pass parameters to the client-side JavaScript code.
|
|
|
|
# These end up in a global JavaScript Object named 'page_params'.
|
2013-12-14 00:00:32 +01:00
|
|
|
page_params = dict(
|
2015-08-21 11:48:43 +02:00
|
|
|
voyager = settings.VOYAGER,
|
2013-03-25 23:49:38 +01:00
|
|
|
debug_mode = settings.DEBUG,
|
2013-10-11 21:36:49 +02:00
|
|
|
test_suite = settings.TEST_SUITE,
|
2013-03-25 23:49:38 +01:00
|
|
|
poll_timeout = settings.POLL_TIMEOUT,
|
2013-11-13 21:31:09 +01:00
|
|
|
login_page = settings.HOME_NOT_LOGGED_IN,
|
2014-03-28 00:45:03 +01:00
|
|
|
password_auth_enabled = password_auth_enabled(user_profile.realm),
|
2013-03-28 18:09:27 +01:00
|
|
|
have_initial_messages = user_has_messages,
|
2014-01-23 21:04:46 +01:00
|
|
|
subbed_info = register_ret['subscriptions'],
|
2013-06-12 21:15:32 +02:00
|
|
|
unsubbed_info = register_ret['unsubscribed'],
|
2013-10-21 19:37:52 +02:00
|
|
|
email_dict = register_ret['email_dict'],
|
2013-03-29 15:35:37 +01:00
|
|
|
people_list = register_ret['realm_users'],
|
2014-02-26 00:12:14 +01:00
|
|
|
bot_list = register_ret['realm_bots'],
|
2013-03-28 18:09:27 +01:00
|
|
|
initial_pointer = register_ret['pointer'],
|
2013-04-03 22:00:02 +02:00
|
|
|
initial_presences = register_ret['presences'],
|
2013-05-06 17:14:59 +02:00
|
|
|
initial_servertime = time.time(), # Used for calculating relative presence age
|
2013-03-25 23:49:38 +01:00
|
|
|
fullname = user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
email = user_profile.email,
|
2013-03-25 23:49:38 +01:00
|
|
|
domain = user_profile.realm.domain,
|
2014-01-29 19:08:09 +01:00
|
|
|
realm_name = register_ret['realm_name'],
|
2015-08-20 02:38:32 +02:00
|
|
|
realm_invite_required = register_ret['realm_invite_required'],
|
2015-08-20 21:25:30 +02:00
|
|
|
realm_invite_by_admins_only = register_ret['realm_invite_by_admins_only'],
|
2015-08-20 02:38:32 +02:00
|
|
|
realm_restricted_to_domain = register_ret['realm_restricted_to_domain'],
|
2013-03-25 23:49:38 +01:00
|
|
|
enter_sends = user_profile.enter_sends,
|
2015-08-20 23:59:44 +02:00
|
|
|
left_side_userlist = register_ret['left_side_userlist'],
|
2013-07-26 16:51:02 +02:00
|
|
|
referrals = register_ret['referrals'],
|
2013-08-22 19:54:35 +02:00
|
|
|
realm_emoji = register_ret['realm_emoji'],
|
2013-03-25 23:49:38 +01:00
|
|
|
needs_tutorial = needs_tutorial,
|
2014-01-14 23:53:35 +01:00
|
|
|
first_in_realm = first_in_realm,
|
2014-01-09 22:53:18 +01:00
|
|
|
prompt_for_invites = prompt_for_invites,
|
2014-01-15 22:21:43 +01:00
|
|
|
notifications_stream = notifications_stream,
|
2014-02-05 21:41:01 +01:00
|
|
|
|
|
|
|
# Stream message notification settings:
|
|
|
|
stream_desktop_notifications_enabled =
|
|
|
|
user_profile.enable_stream_desktop_notifications,
|
|
|
|
stream_sounds_enabled = user_profile.enable_stream_sounds,
|
|
|
|
|
|
|
|
# Private message and @-mention notification settings:
|
|
|
|
desktop_notifications_enabled = desktop_notifications_enabled,
|
2013-05-03 21:49:01 +02:00
|
|
|
sounds_enabled =
|
|
|
|
user_profile.enable_sounds,
|
2013-05-07 23:19:52 +02:00
|
|
|
enable_offline_email_notifications =
|
|
|
|
user_profile.enable_offline_email_notifications,
|
2013-10-16 17:24:52 +02:00
|
|
|
enable_offline_push_notifications =
|
|
|
|
user_profile.enable_offline_push_notifications,
|
2015-08-19 22:35:46 +02:00
|
|
|
twenty_four_hour_time = register_ret['twenty_four_hour_time'],
|
2014-02-05 21:41:01 +01:00
|
|
|
|
2013-12-02 02:00:23 +01:00
|
|
|
enable_digest_emails = user_profile.enable_digest_emails,
|
2013-03-28 18:09:27 +01:00
|
|
|
event_queue_id = register_ret['queue_id'],
|
|
|
|
last_event_id = register_ret['last_event_id'],
|
2013-05-08 15:47:37 +02:00
|
|
|
max_message_id = register_ret['max_message_id'],
|
2013-09-04 21:02:11 +02:00
|
|
|
unread_count = approximate_unread_count(user_profile),
|
2013-08-09 20:26:35 +02:00
|
|
|
furthest_read_time = sent_time_in_epoch_seconds(latest_read),
|
2015-08-21 09:02:03 +02:00
|
|
|
staging = settings.ZULIP_COM_STAGING or settings.DEVELOPMENT,
|
2013-09-10 11:46:18 +02:00
|
|
|
alert_words = register_ret['alert_words'],
|
2013-09-27 19:38:56 +02:00
|
|
|
muted_topics = register_ret['muted_topics'],
|
2014-01-06 23:42:02 +01:00
|
|
|
realm_filters = register_ret['realm_filters'],
|
2013-11-19 20:21:12 +01:00
|
|
|
is_admin = user_profile.is_admin(),
|
2014-01-15 23:30:36 +01:00
|
|
|
can_create_streams = user_profile.can_create_streams(),
|
2014-03-27 23:50:37 +01:00
|
|
|
name_changes_disabled = name_changes_disabled(user_profile.realm),
|
2013-12-03 21:01:37 +01:00
|
|
|
has_mobile_devices = num_push_devices_for_user(user_profile) > 0,
|
2013-12-12 22:36:16 +01:00
|
|
|
autoscroll_forever = user_profile.autoscroll_forever,
|
2014-02-13 23:47:57 +01:00
|
|
|
default_desktop_notifications = user_profile.default_desktop_notifications,
|
2015-08-20 08:41:50 +02:00
|
|
|
avatar_url = avatar_url(user_profile),
|
|
|
|
mandatory_topics = user_profile.realm.mandatory_topics,
|
2015-08-20 09:11:11 +02:00
|
|
|
show_digest_email = user_profile.realm.show_digest_email,
|
2013-12-12 22:36:16 +01:00
|
|
|
)
|
2013-12-13 20:20:28 +01:00
|
|
|
if narrow_stream is not None:
|
|
|
|
# In narrow_stream context, initial pointer is just latest message
|
|
|
|
recipient = get_recipient(Recipient.STREAM, narrow_stream.id)
|
|
|
|
try:
|
|
|
|
initial_pointer = Message.objects.filter(recipient=recipient).order_by('id').reverse()[0].id
|
|
|
|
except IndexError:
|
|
|
|
initial_pointer = -1
|
|
|
|
page_params["narrow_stream"] = narrow_stream.name
|
2014-01-17 00:05:35 +01:00
|
|
|
if narrow_topic is not None:
|
|
|
|
page_params["narrow_topic"] = narrow_topic
|
2014-02-11 17:10:44 +01:00
|
|
|
page_params["narrow"] = [dict(operator=term[0], operand=term[1]) for term in narrow]
|
2013-12-13 20:20:28 +01:00
|
|
|
page_params["max_message_id"] = initial_pointer
|
|
|
|
page_params["initial_pointer"] = initial_pointer
|
|
|
|
page_params["have_initial_messages"] = (initial_pointer != -1)
|
2013-01-09 00:10:37 +01:00
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
statsd.incr('views.home')
|
2013-09-19 23:04:27 +02:00
|
|
|
show_invites = True
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2015-08-20 21:25:30 +02:00
|
|
|
# Some realms only allow admins to invite users
|
|
|
|
if user_profile.realm.invite_by_admins_only and not user_profile.is_admin():
|
2013-06-02 19:23:02 +02:00
|
|
|
show_invites = False
|
|
|
|
|
2014-07-15 23:21:03 +02:00
|
|
|
product_name = "Zulip"
|
|
|
|
page_params['product_name'] = product_name
|
2013-11-08 20:18:02 +01:00
|
|
|
request._log_data['extra'] = "[%s]" % (register_ret["queue_id"],)
|
2013-08-16 22:55:50 +02:00
|
|
|
response = render_to_response('zerver/index.html',
|
|
|
|
{'user_profile': user_profile,
|
2013-12-14 00:00:32 +01:00
|
|
|
'page_params' : simplejson.encoder.JSONEncoderForHTML().encode(page_params),
|
2013-08-16 22:55:50 +02:00
|
|
|
'nofontface': is_buggy_ua(request.META["HTTP_USER_AGENT"]),
|
2013-12-04 21:48:18 +01:00
|
|
|
'avatar_url': avatar_url(user_profile),
|
2013-08-16 22:55:50 +02:00
|
|
|
'show_debug':
|
|
|
|
settings.DEBUG and ('show_debug' in request.GET),
|
|
|
|
'show_invites': show_invites,
|
2013-11-19 20:21:12 +01:00
|
|
|
'is_admin': user_profile.is_admin(),
|
2013-08-27 21:15:15 +02:00
|
|
|
'show_webathena': user_profile.realm.domain == "mit.edu",
|
2013-12-12 22:36:16 +01:00
|
|
|
'enable_feedback': settings.ENABLE_FEEDBACK,
|
2013-12-13 22:10:06 +01:00
|
|
|
'embedded': narrow_stream is not None,
|
2014-07-15 23:21:03 +02:00
|
|
|
'product_name': product_name
|
2013-08-16 22:55:50 +02:00
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
|
|
|
|
return response
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-11-12 21:01:55 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def desktop_home(request):
|
|
|
|
return HttpResponseRedirect(reverse('zerver.views.home'))
|
|
|
|
|
2013-07-01 20:13:26 +02:00
|
|
|
def is_buggy_ua(agent):
|
|
|
|
"""Discrimiate CSS served to clients based on User Agent
|
|
|
|
|
|
|
|
Due to QTBUG-3467, @font-face is not supported in QtWebKit.
|
|
|
|
This may get fixed in the future, but for right now we can
|
|
|
|
just serve the more conservative CSS to all our desktop apps.
|
|
|
|
"""
|
2013-12-19 20:29:10 +01:00
|
|
|
return ("Humbug Desktop/" in agent or "Zulip Desktop/" in agent or "ZulipDesktop/" in agent) and \
|
2013-12-19 23:04:10 +01:00
|
|
|
not "Mac" in agent
|
2013-07-01 20:13:26 +02:00
|
|
|
|
2013-03-21 20:16:57 +01:00
|
|
|
def get_pointer_backend(request, user_profile):
|
|
|
|
return json_success({'pointer': user_profile.pointer})
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_update_pointer(request, user_profile):
|
2013-03-13 21:13:32 +01:00
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-13 21:13:32 +01:00
|
|
|
def update_pointer_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
pointer=REQ(converter=to_non_negative_int)):
|
2012-10-29 22:02:10 +01:00
|
|
|
if pointer <= user_profile.pointer:
|
|
|
|
return json_success()
|
|
|
|
|
2013-08-19 21:05:23 +02:00
|
|
|
try:
|
|
|
|
UserMessage.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__id=pointer
|
|
|
|
)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
raise JsonableError("Invalid message ID")
|
|
|
|
|
2013-12-03 18:00:12 +01:00
|
|
|
request._log_data["extra"] = "[%s]" % (pointer,)
|
2014-01-24 23:50:24 +01:00
|
|
|
update_flags = (request.client.name.lower() in ['android', "zulipandroid"])
|
|
|
|
do_update_pointer(user_profile, pointer, update_flags=update_flags)
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-09-05 22:21:25 +02:00
|
|
|
return json_success()
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-10-26 22:02:51 +02:00
|
|
|
def generate_client_id():
|
2013-08-08 16:50:58 +02:00
|
|
|
return generate_random_token(32)
|
2012-10-26 22:02:51 +02:00
|
|
|
|
2013-02-20 03:21:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_profile(request, user_profile):
|
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
2015-08-18 20:32:56 +02:00
|
|
|
# The order of creation of the various dictionaries are important.
|
|
|
|
# We filter on {userprofile,stream,subscription_recipient}_ids.
|
|
|
|
@require_realm_admin
|
|
|
|
def export(request, user_profile):
|
2015-09-23 18:18:33 +02:00
|
|
|
if (Message.objects.filter(sender__realm=user_profile.realm).count() > 1000000 or
|
|
|
|
UserMessage.objects.filter(user_profile__realm=user_profile.realm).count() > 3000000):
|
|
|
|
return json_error("Realm has too much data for non-batched export.")
|
2015-08-18 20:32:56 +02:00
|
|
|
|
|
|
|
response = {}
|
|
|
|
|
|
|
|
response['zerver_realm'] = [model_to_dict(x)
|
|
|
|
for x in Realm.objects.select_related().filter(id=user_profile.realm.id)]
|
|
|
|
|
|
|
|
response['zerver_userprofile'] = [model_to_dict(x, exclude=["password", "api_key"])
|
|
|
|
for x in UserProfile.objects.select_related().filter(realm=user_profile.realm)]
|
|
|
|
|
|
|
|
userprofile_ids = set(userprofile["id"] for userprofile in response['zerver_userprofile'])
|
|
|
|
|
|
|
|
response['zerver_stream'] = [model_to_dict(x, exclude=["email_token"])
|
|
|
|
for x in Stream.objects.select_related().filter(realm=user_profile.realm,invite_only=False)]
|
|
|
|
|
|
|
|
stream_ids = set(x["id"] for x in response['zerver_stream'])
|
|
|
|
|
|
|
|
response['zerver_usermessage'] = [model_to_dict(x) for x in UserMessage.objects.select_related()
|
|
|
|
if x.user_profile_id in userprofile_ids]
|
|
|
|
|
|
|
|
user_recipients = [model_to_dict(x)
|
|
|
|
for x in Recipient.objects.select_related().filter(type=1)
|
|
|
|
if x.type_id in userprofile_ids]
|
|
|
|
|
|
|
|
stream_recipients = [model_to_dict(x)
|
|
|
|
for x in Recipient.objects.select_related().filter(type=2)
|
|
|
|
if x.type_id in stream_ids]
|
|
|
|
|
|
|
|
stream_recipient_ids = set(x["id"] for x in stream_recipients)
|
|
|
|
|
|
|
|
# only check for subscriptions to streams
|
|
|
|
response['zerver_subscription'] = [model_to_dict(x) for x in Subscription.objects.select_related()
|
|
|
|
if x.user_profile_id in userprofile_ids
|
|
|
|
and x.recipient_id in stream_recipient_ids]
|
|
|
|
|
|
|
|
subscription_recipient_ids = set(x["recipient"] for x in response['zerver_subscription'])
|
|
|
|
|
|
|
|
huddle_recipients = [model_to_dict(r)
|
|
|
|
for r in Recipient.objects.select_related().filter(type=3)
|
|
|
|
if r.type_id in subscription_recipient_ids]
|
|
|
|
|
|
|
|
huddle_ids = set(x["type_id"] for x in huddle_recipients)
|
|
|
|
|
|
|
|
response["zerver_recipient"] = user_recipients + stream_recipients + huddle_recipients
|
|
|
|
|
|
|
|
response['zerver_huddle'] = [model_to_dict(h)
|
|
|
|
for h in Huddle.objects.select_related()
|
|
|
|
if h.id in huddle_ids]
|
|
|
|
|
|
|
|
recipient_ids = set(x["id"] for x in response['zerver_recipient'])
|
|
|
|
response["zerver_message"] = [model_to_dict(m) for m in Message.objects.select_related()
|
|
|
|
if m.recipient_id in recipient_ids
|
|
|
|
and m.sender_id in userprofile_ids]
|
|
|
|
|
|
|
|
for (table, model) in [("defaultstream", DefaultStream),
|
|
|
|
("realmemoji", RealmEmoji),
|
|
|
|
("realmalias", RealmAlias),
|
|
|
|
("realmfilter", RealmFilter)]:
|
|
|
|
response["zerver_"+table] = [model_to_dict(x) for x in
|
|
|
|
model.objects.select_related().filter(realm_id=user_profile.realm.id)]
|
|
|
|
|
|
|
|
return json_success(response)
|
|
|
|
|
2013-02-20 03:21:27 +01:00
|
|
|
def get_profile_backend(request, user_profile):
|
2012-11-07 19:05:23 +01:00
|
|
|
result = dict(pointer = user_profile.pointer,
|
|
|
|
client_id = generate_client_id(),
|
|
|
|
max_message_id = -1)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
|
|
|
|
if messages:
|
|
|
|
result['max_message_id'] = messages[0].id
|
|
|
|
|
|
|
|
return json_success(result)
|
2012-10-21 03:53:03 +02:00
|
|
|
|
2013-02-27 23:18:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_change_enter_sends(request, user_profile,
|
2014-02-14 15:48:42 +01:00
|
|
|
enter_sends=REQ('enter_sends', validator=check_bool)):
|
2013-02-27 23:18:38 +01:00
|
|
|
do_change_enter_sends(user_profile, enter_sends)
|
|
|
|
return json_success()
|
|
|
|
|
2014-01-15 00:06:20 +01:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_tutorial_send_message(request, user_profile, type=REQ,
|
|
|
|
recipient=REQ, topic=REQ, content=REQ):
|
|
|
|
"""
|
|
|
|
This function, used by the onboarding tutorial, causes the Tutorial Bot to
|
|
|
|
send you the message you pass in here. (That way, the Tutorial Bot's
|
|
|
|
messages to you get rendered by the server and therefore look like any other
|
|
|
|
message.)
|
|
|
|
"""
|
|
|
|
sender_name = "welcome-bot@zulip.com"
|
|
|
|
if type == 'stream':
|
|
|
|
internal_send_message(sender_name, "stream", recipient, topic, content,
|
|
|
|
realm=user_profile.realm)
|
|
|
|
return json_success()
|
|
|
|
# For now, there are no PM cases.
|
|
|
|
return json_error('Bad data passed in to tutorial_send_message')
|
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_tutorial_status(request, user_profile, status=REQ('status')):
|
2013-04-04 22:30:28 +02:00
|
|
|
if status == 'started':
|
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_STARTED
|
|
|
|
elif status == 'finished':
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_FINISHED
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["tutorial_status"])
|
2013-04-04 22:30:28 +02:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-01-10 20:47:05 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_public_streams(request, user_profile):
|
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
|
|
|
def get_streams_backend(request, user_profile,
|
2014-02-14 15:48:42 +01:00
|
|
|
include_public=REQ(validator=check_bool, default=True),
|
|
|
|
include_subscribed=REQ(validator=check_bool, default=True),
|
|
|
|
include_all_active=REQ(validator=check_bool, default=False)):
|
2014-01-24 19:47:18 +01:00
|
|
|
|
2014-03-02 06:29:15 +01:00
|
|
|
streams = do_get_streams(user_profile, include_public, include_subscribed,
|
|
|
|
include_all_active)
|
2012-10-11 19:31:21 +02:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
def get_public_streams_backend(request, user_profile):
|
|
|
|
return get_streams_backend(request, user_profile, include_public=True,
|
|
|
|
include_subscribed=False, include_all_active=False)
|
|
|
|
|
2014-01-28 23:22:50 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2015-08-20 02:38:32 +02:00
|
|
|
def update_realm(request, user_profile, name=REQ(validator=check_string, default=None),
|
|
|
|
restricted_to_domain=REQ(validator=check_bool, default=None),
|
2015-08-20 21:25:30 +02:00
|
|
|
invite_required=REQ(validator=check_bool,default=None),
|
|
|
|
invite_by_admins_only=REQ(validator=check_bool,default=None)):
|
2015-08-20 02:38:32 +02:00
|
|
|
realm = user_profile.realm
|
|
|
|
data = {}
|
|
|
|
if name is not None and realm.name != name:
|
|
|
|
do_set_realm_name(realm, name)
|
|
|
|
data['name'] = 'updated'
|
|
|
|
if restricted_to_domain is not None and realm.restricted_to_domain != restricted_to_domain:
|
|
|
|
do_set_realm_restricted_to_domain(realm, restricted_to_domain)
|
|
|
|
data['restricted_to_domain'] = restricted_to_domain
|
|
|
|
if invite_required is not None and realm.invite_required != invite_required:
|
|
|
|
do_set_realm_invite_required(realm, invite_required)
|
|
|
|
data['invite_required'] = invite_required
|
2015-08-20 21:25:30 +02:00
|
|
|
if invite_by_admins_only is not None and realm.invite_by_admins_only != invite_by_admins_only:
|
|
|
|
do_set_realm_invite_by_admins_only(realm, invite_by_admins_only)
|
|
|
|
data['invite_by_admins_only'] = invite_by_admins_only
|
2015-08-20 02:38:32 +02:00
|
|
|
return json_success(data)
|
2014-01-28 23:22:50 +01:00
|
|
|
|
2014-01-27 20:19:48 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def add_default_stream(request, user_profile, stream_name=REQ):
|
|
|
|
return json_success(do_add_default_stream(user_profile.realm, stream_name))
|
|
|
|
|
2014-01-27 21:56:09 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def remove_default_stream(request, user_profile, stream_name=REQ):
|
|
|
|
return json_success(do_remove_default_stream(user_profile.realm, stream_name))
|
|
|
|
|
2013-09-10 11:46:18 +02:00
|
|
|
@authenticated_json_post_view
|
2013-12-09 22:12:18 +01:00
|
|
|
@require_realm_admin
|
2013-12-16 16:27:58 +01:00
|
|
|
@has_request_variables
|
2013-09-10 11:46:18 +02:00
|
|
|
def json_rename_stream(request, user_profile, old_name=REQ, new_name=REQ):
|
|
|
|
return json_success(do_rename_stream(user_profile.realm, old_name, new_name))
|
|
|
|
|
2014-01-02 23:00:10 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def json_make_stream_public(request, user_profile, stream_name=REQ):
|
|
|
|
return json_success(do_make_stream_public(user_profile, user_profile.realm, stream_name))
|
|
|
|
|
2014-01-02 23:20:33 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def json_make_stream_private(request, user_profile, stream_name=REQ):
|
|
|
|
return json_success(do_make_stream_private(user_profile.realm, stream_name))
|
|
|
|
|
2014-01-22 20:20:10 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def update_stream_backend(request, user_profile, stream_name,
|
|
|
|
description=REQ(validator=check_string, default=None)):
|
|
|
|
if description is not None:
|
|
|
|
do_change_stream_description(user_profile.realm, stream_name, description)
|
|
|
|
return json_success({})
|
|
|
|
|
2013-06-24 21:23:45 +02:00
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2013-06-12 21:15:32 +02:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)[0]})
|
2012-10-11 19:31:21 +02:00
|
|
|
|
2015-08-19 21:04:49 +02:00
|
|
|
@transaction.atomic
|
2013-03-21 20:18:44 +01:00
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
2013-12-13 21:21:51 +01:00
|
|
|
delete=REQ(validator=check_list(check_string), default=[]),
|
|
|
|
add=REQ(validator=check_list(check_dict([['name', check_string]])), default=[])):
|
2013-03-21 20:18:44 +01:00
|
|
|
if not add and not delete:
|
|
|
|
return json_error('Nothing to do. Specify at least one of "add" or "delete".')
|
|
|
|
|
|
|
|
json_dict = {}
|
|
|
|
for method, items in ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)):
|
|
|
|
response = method(request, user_profile, streams_raw=items)
|
|
|
|
if response.status_code != 200:
|
|
|
|
transaction.rollback()
|
|
|
|
return response
|
2013-06-18 23:55:55 +02:00
|
|
|
json_dict.update(ujson.loads(response.content))
|
2013-03-21 20:18:44 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-16 20:11:08 +01:00
|
|
|
def json_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-11-15 17:36:20 +01:00
|
|
|
@has_request_variables
|
2012-11-16 20:11:08 +01:00
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
2014-01-30 20:29:18 +01:00
|
|
|
streams_raw = REQ("subscriptions", validator=check_list(check_string)),
|
|
|
|
principals = REQ(validator=check_list(check_string), default=None)):
|
|
|
|
|
|
|
|
removing_someone_else = principals and \
|
|
|
|
set(principals) != set((user_profile.email,))
|
|
|
|
if removing_someone_else and not user_profile.is_admin():
|
|
|
|
# You can only unsubscribe other people from a stream if you are a realm
|
|
|
|
# admin.
|
|
|
|
return json_error("This action requires administrative rights")
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
streams, _ = list_to_streams(streams_raw, user_profile)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
2014-01-30 20:29:18 +01:00
|
|
|
for stream in streams:
|
|
|
|
if removing_someone_else and stream.invite_only and \
|
|
|
|
not subscribed_to_stream(user_profile, stream):
|
|
|
|
# Even as an admin, you can't remove other people from an
|
|
|
|
# invite-only stream you're not on.
|
|
|
|
return json_error("Cannot administer invite-only streams this way")
|
|
|
|
|
|
|
|
if principals:
|
|
|
|
people_to_unsub = set(principal_to_user_profile(
|
|
|
|
user_profile, principal) for principal in principals)
|
|
|
|
else:
|
|
|
|
people_to_unsub = [user_profile]
|
|
|
|
|
2012-11-16 20:11:08 +01:00
|
|
|
result = dict(removed=[], not_subscribed=[])
|
2014-01-30 20:29:18 +01:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams)
|
|
|
|
|
2013-06-28 17:16:55 +02:00
|
|
|
for (subscriber, stream) in removed:
|
|
|
|
result["removed"].append(stream.name)
|
|
|
|
for (subscriber, stream) in not_subscribed:
|
|
|
|
result["not_subscribed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
def filter_stream_authorization(user_profile, streams):
|
|
|
|
streams_subscribed = set()
|
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams])
|
|
|
|
subs = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient__in=recipients_map.values(),
|
|
|
|
active=True)
|
|
|
|
|
|
|
|
for sub in subs:
|
|
|
|
streams_subscribed.add(sub.recipient.type_id)
|
|
|
|
|
|
|
|
unauthorized_streams = []
|
|
|
|
for stream in streams:
|
2013-08-22 17:49:02 +02:00
|
|
|
# The user is authorized for his own streams
|
|
|
|
if stream.id in streams_subscribed:
|
|
|
|
continue
|
|
|
|
|
2015-08-21 02:01:32 +02:00
|
|
|
# The user is not authorized for invite_only streams
|
|
|
|
if stream.invite_only:
|
2013-08-15 22:29:58 +02:00
|
|
|
unauthorized_streams.append(stream)
|
2013-08-22 17:49:02 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
streams = [stream for stream in streams if
|
|
|
|
stream.id not in set(stream.id for stream in unauthorized_streams)]
|
|
|
|
return streams, unauthorized_streams
|
|
|
|
|
2013-11-25 16:18:09 +01:00
|
|
|
def stream_link(stream_name):
|
|
|
|
"Escapes a stream name to make a #narrow/stream/stream_name link"
|
|
|
|
return "#narrow/stream/%s" % (urllib.quote(stream_name.encode('utf-8')),)
|
|
|
|
|
2014-01-17 20:00:04 +01:00
|
|
|
def stream_button(stream_name):
|
|
|
|
stream_name = stream_name.replace('\\', '\\\\')
|
|
|
|
stream_name = stream_name.replace(')', '\\)')
|
|
|
|
return '!_stream_subscribe_button(%s)' % (stream_name,)
|
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-16 19:43:27 +01:00
|
|
|
def add_subscriptions_backend(request, user_profile,
|
2013-12-17 22:24:25 +01:00
|
|
|
streams_raw = REQ("subscriptions",
|
|
|
|
validator=check_list(check_dict([['name', check_string]]))),
|
2014-02-14 15:48:42 +01:00
|
|
|
invite_only = REQ(validator=check_bool, default=False),
|
|
|
|
announce = REQ(validator=check_bool, default=False),
|
2013-12-13 20:24:18 +01:00
|
|
|
principals = REQ(validator=check_list(check_string), default=None),
|
2014-02-14 15:48:42 +01:00
|
|
|
authorization_errors_fatal = REQ(validator=check_bool, default=True)):
|
2013-01-31 21:06:59 +01:00
|
|
|
|
2014-01-15 22:31:38 +01:00
|
|
|
if not user_profile.can_create_streams():
|
|
|
|
return json_error('User cannot create streams.')
|
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names = []
|
2013-06-24 21:32:56 +02:00
|
|
|
for stream in streams_raw:
|
|
|
|
stream_name = stream["name"].strip()
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2012-10-31 18:36:08 +01:00
|
|
|
return json_error("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-31 23:04:55 +01:00
|
|
|
return json_error("Invalid stream name (%s)." % (stream_name,))
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names.append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
existing_streams, created_streams = \
|
|
|
|
list_to_streams(stream_names, user_profile, autocreate=True, invite_only=invite_only)
|
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
2013-08-15 23:17:00 +02:00
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2013-08-22 17:54:57 +02:00
|
|
|
return json_error("Unable to access stream (%s)." % unauthorized_streams[0].name)
|
2013-08-15 22:29:58 +02:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals is not None:
|
2013-08-15 00:50:19 +02:00
|
|
|
if user_profile.realm.domain == 'mit.edu' and not all(stream.invite_only for stream in streams):
|
|
|
|
return json_error("You can only invite other mit.edu users to invite-only streams.")
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
2013-01-09 22:47:09 +01:00
|
|
|
else:
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = [user_profile]
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers)
|
2013-01-23 20:39:02 +01:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list))
|
2013-06-25 19:26:58 +02:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
2013-01-31 21:12:53 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
private_streams = dict((stream.name, stream.invite_only) for stream in streams)
|
2013-11-14 14:14:23 +01:00
|
|
|
bots = dict((subscriber.email, subscriber.is_bot) for subscriber in subscribers)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2013-08-27 19:17:08 +02:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals and result["subscribed"]:
|
|
|
|
for email, subscriptions in result["subscribed"].iteritems():
|
2013-03-28 20:43:34 +01:00
|
|
|
if email == user_profile.email:
|
2013-08-06 21:32:15 +02:00
|
|
|
# Don't send a Zulip if you invited yourself.
|
2013-01-31 21:06:59 +01:00
|
|
|
continue
|
2013-11-14 14:14:23 +01:00
|
|
|
if bots[email]:
|
|
|
|
# Don't send invitation Zulips to bots
|
|
|
|
continue
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if len(subscriptions) == 1:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
2013-11-25 16:18:09 +01:00
|
|
|
"subscribed you to the%s stream [%s](%s)."
|
2013-01-31 21:12:53 +01:00
|
|
|
% (user_profile.full_name,
|
|
|
|
" **invite-only**" if private_streams[subscriptions[0]] else "",
|
2013-11-25 16:18:09 +01:00
|
|
|
subscriptions[0],
|
|
|
|
stream_link(subscriptions[0]),
|
|
|
|
))
|
2013-01-31 21:06:59 +01:00
|
|
|
else:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
|
|
|
"subscribed you to the following streams: \n\n"
|
|
|
|
% (user_profile.full_name,))
|
|
|
|
for stream in subscriptions:
|
2013-11-25 16:18:09 +01:00
|
|
|
msg += "* [%s](%s)%s\n" % (
|
2013-01-31 21:12:53 +01:00
|
|
|
stream,
|
2013-11-25 16:18:09 +01:00
|
|
|
stream_link(stream),
|
2013-01-31 21:12:53 +01:00
|
|
|
" (**invite-only**)" if private_streams[stream] else "")
|
2013-08-28 00:24:25 +02:00
|
|
|
|
|
|
|
if len([s for s in subscriptions if not private_streams[s]]) > 0:
|
|
|
|
msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it."
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-05-10 22:56:22 +02:00
|
|
|
"private", email, "", msg))
|
2013-08-27 19:17:08 +02:00
|
|
|
|
|
|
|
if announce and len(created_streams) > 0:
|
2013-10-02 23:40:21 +02:00
|
|
|
notifications_stream = user_profile.realm.notifications_stream
|
|
|
|
if notifications_stream is not None:
|
|
|
|
if len(created_streams) > 1:
|
|
|
|
stream_msg = "the following streams: %s" % \
|
2013-11-25 23:47:41 +01:00
|
|
|
(", ".join('`%s`' % (s.name,) for s in created_streams),)
|
2013-10-02 23:40:21 +02:00
|
|
|
else:
|
2013-11-25 23:47:41 +01:00
|
|
|
stream_msg = "a new stream `%s`" % (created_streams[0].name)
|
2014-01-17 20:00:04 +01:00
|
|
|
|
2014-01-22 18:38:51 +01:00
|
|
|
stream_buttons = ' '.join(stream_button(s.name) for s in created_streams)
|
2014-01-17 20:00:04 +01:00
|
|
|
msg = ("%s just created %s. %s" % (user_profile.full_name,
|
|
|
|
stream_msg, stream_buttons))
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-10-02 23:40:21 +02:00
|
|
|
"stream",
|
|
|
|
notifications_stream.name, "Streams", msg,
|
|
|
|
realm=notifications_stream.realm))
|
|
|
|
else:
|
2014-01-17 20:00:04 +01:00
|
|
|
msg = ("Hi there! %s just created a new stream '%s'. %s"
|
|
|
|
% (user_profile.full_name, created_streams[0].name, stream_button(created_streams[0].name)))
|
2013-10-23 23:16:39 +02:00
|
|
|
for realm_user_dict in get_active_user_dicts_in_realm(user_profile.realm):
|
2013-10-02 23:40:21 +02:00
|
|
|
# Don't announce to yourself or to people you explicitly added
|
|
|
|
# (who will get the notification above instead).
|
2013-10-23 23:16:39 +02:00
|
|
|
if realm_user_dict['email'] in principals or realm_user_dict['email'] == user_profile.email:
|
2013-10-02 23:40:21 +02:00
|
|
|
continue
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-10-02 23:40:21 +02:00
|
|
|
"private",
|
2013-10-23 23:16:39 +02:00
|
|
|
realm_user_dict['email'], "", msg))
|
2013-08-27 19:17:08 +02:00
|
|
|
|
|
|
|
if len(notifications) > 0:
|
2013-05-10 22:56:22 +02:00
|
|
|
do_send_messages(notifications)
|
2013-01-31 21:06:59 +01:00
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
2013-08-15 23:17:00 +02:00
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [stream.name for stream in unauthorized_streams]
|
2012-11-16 22:27:32 +01:00
|
|
|
return json_success(result)
|
2012-09-05 23:38:20 +02:00
|
|
|
|
2013-02-11 17:20:16 +01:00
|
|
|
def get_members_backend(request, user_profile):
|
2014-01-14 20:38:45 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
admins = set(user_profile.realm.get_admin_users())
|
2013-11-16 16:49:49 +01:00
|
|
|
members = []
|
2014-01-14 20:38:45 +01:00
|
|
|
for profile in UserProfile.objects.select_related().filter(realm=realm):
|
2014-07-18 06:16:14 +02:00
|
|
|
avatar_url = get_avatar_url(
|
|
|
|
profile.avatar_source,
|
|
|
|
profile.email
|
|
|
|
)
|
2013-11-16 16:49:49 +01:00
|
|
|
member = {"full_name": profile.full_name,
|
|
|
|
"is_bot": profile.is_bot,
|
|
|
|
"is_active": profile.is_active,
|
2014-01-14 20:38:45 +01:00
|
|
|
"is_admin": (profile in admins),
|
2014-07-18 06:16:14 +02:00
|
|
|
"email": profile.email,
|
|
|
|
"avatar_url": avatar_url,}
|
2013-11-16 16:49:49 +01:00
|
|
|
if profile.is_bot and profile.bot_owner is not None:
|
|
|
|
member["bot_owner"] = profile.bot_owner.email
|
|
|
|
members.append(member)
|
2013-02-11 17:20:16 +01:00
|
|
|
return json_success({'members': members})
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
2013-03-14 22:12:25 +01:00
|
|
|
@authenticated_json_post_view
|
2013-10-23 16:46:18 +02:00
|
|
|
@has_request_variables
|
2014-05-06 01:14:09 +02:00
|
|
|
def json_upload_file(request, user_profile):
|
2013-03-14 22:12:25 +01:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
return json_error("You must specify a file to upload")
|
|
|
|
if len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
|
|
|
user_file = request.FILES.values()[0]
|
2014-05-06 01:14:09 +02:00
|
|
|
uri = upload_message_image_through_web_client(request, user_file, user_profile)
|
2013-06-18 20:47:37 +02:00
|
|
|
return json_success({'uri': uri})
|
2013-03-14 22:12:25 +01:00
|
|
|
|
2014-10-30 01:01:15 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2013-10-25 20:02:44 +02:00
|
|
|
@has_request_variables
|
2014-10-30 01:01:15 +01:00
|
|
|
def get_uploaded_file(request, realm_id, filename,
|
2014-02-14 15:48:42 +01:00
|
|
|
redir=REQ(validator=check_bool, default=True)):
|
2013-10-28 16:13:53 +01:00
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
|
|
|
return HttpResponseForbidden() # Should have been served by nginx
|
|
|
|
|
2014-10-30 01:01:15 +01:00
|
|
|
user_profile = request.user
|
2014-05-06 03:48:23 +02:00
|
|
|
url_path = "%s/%s" % (realm_id, filename)
|
|
|
|
|
|
|
|
if realm_id == "unk":
|
|
|
|
realm_id = get_realm_for_filename(url_path)
|
|
|
|
if realm_id is None:
|
|
|
|
# File does not exist
|
|
|
|
return json_error("That file does not exist.", status=404)
|
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
# Internal users can access all uploads so we can receive attachments in cross-realm messages
|
|
|
|
if user_profile.realm.id == int(realm_id) or user_profile.realm.domain == 'zulip.com':
|
2014-05-06 03:48:23 +02:00
|
|
|
uri = get_signed_upload_url(url_path)
|
2013-10-25 20:02:44 +02:00
|
|
|
if redir:
|
|
|
|
return redirect(uri)
|
|
|
|
else:
|
|
|
|
return json_success({'uri': uri})
|
2013-10-23 16:46:18 +02:00
|
|
|
else:
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def get_subscribers_backend(request, user_profile, stream_name=REQ('stream')):
|
2013-09-30 21:58:36 +02:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
|
|
|
raise JsonableError("Stream does not exist: %s" % (stream_name,))
|
|
|
|
|
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2013-09-13 19:30:05 +02:00
|
|
|
return json_success({'subscribers': subscribers})
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-10-25 22:41:35 +02:00
|
|
|
def json_change_settings(request, user_profile,
|
|
|
|
full_name=REQ,
|
2013-11-04 23:42:31 +01:00
|
|
|
old_password=REQ(default=""),
|
|
|
|
new_password=REQ(default=""),
|
|
|
|
confirm_password=REQ(default="")):
|
2012-12-21 04:17:49 +01:00
|
|
|
if new_password != "" or confirm_password != "":
|
2012-09-21 19:32:01 +02:00
|
|
|
if new_password != confirm_password:
|
|
|
|
return json_error("New password must match confirmation password!")
|
2013-03-28 20:43:34 +01:00
|
|
|
if not authenticate(username=user_profile.email, password=old_password):
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_error("Wrong password!")
|
2013-03-08 19:58:18 +01:00
|
|
|
do_change_password(user_profile, new_password)
|
2012-09-21 19:32:01 +02:00
|
|
|
|
|
|
|
result = {}
|
2012-12-05 20:56:31 +01:00
|
|
|
if user_profile.full_name != full_name and full_name.strip() != "":
|
2014-04-07 02:00:28 +02:00
|
|
|
if name_changes_disabled(user_profile.realm):
|
2014-03-27 23:50:37 +01:00
|
|
|
# Failingly silently is fine -- they can't do it through the UI, so
|
|
|
|
# they'd have to be trying to break the rules.
|
2013-08-05 16:37:48 +02:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
new_full_name = full_name.strip()
|
|
|
|
if len(new_full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
return json_error("Name too long!")
|
|
|
|
do_change_full_name(user_profile, new_full_name)
|
|
|
|
result['full_name'] = new_full_name
|
2012-10-02 22:20:07 +02:00
|
|
|
|
2013-10-25 22:41:35 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2015-08-19 22:35:46 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_time_setting(request, user_profile, twenty_four_hour_time=REQ(validator=check_bool,default=None)):
|
|
|
|
result = {}
|
|
|
|
if twenty_four_hour_time is not None and \
|
|
|
|
user_profile.twenty_four_hour_time != twenty_four_hour_time:
|
|
|
|
do_change_twenty_four_hour_time(user_profile, twenty_four_hour_time)
|
|
|
|
|
|
|
|
result['twenty_four_hour_time'] = twenty_four_hour_time
|
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
2015-08-20 23:59:44 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_left_side_userlist(request, user_profile, left_side_userlist=REQ(validator=check_bool,default=None)):
|
|
|
|
result = {}
|
|
|
|
if left_side_userlist is not None and \
|
|
|
|
user_profile.left_side_userlist != left_side_userlist:
|
|
|
|
do_change_left_side_userlist(user_profile, left_side_userlist)
|
|
|
|
|
|
|
|
result['left_side_userlist'] = left_side_userlist
|
|
|
|
|
|
|
|
return json_success(result)
|
|
|
|
|
2013-10-25 22:41:35 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_change_notify_settings(request, user_profile,
|
2014-02-12 17:22:56 +01:00
|
|
|
enable_stream_desktop_notifications=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_stream_sounds=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_desktop_notifications=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_sounds=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_offline_email_notifications=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_offline_push_notifications=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
enable_digest_emails=REQ(validator=check_bool,
|
|
|
|
default=None)):
|
2013-10-25 22:41:35 +02:00
|
|
|
|
|
|
|
result = {}
|
|
|
|
|
2014-02-05 21:41:01 +01:00
|
|
|
# Stream notification settings.
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_stream_desktop_notifications is not None and \
|
|
|
|
user_profile.enable_stream_desktop_notifications != enable_stream_desktop_notifications:
|
2014-02-05 21:41:01 +01:00
|
|
|
do_change_enable_stream_desktop_notifications(
|
|
|
|
user_profile, enable_stream_desktop_notifications)
|
|
|
|
result['enable_stream_desktop_notifications'] = enable_stream_desktop_notifications
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_stream_sounds is not None and \
|
|
|
|
user_profile.enable_stream_sounds != enable_stream_sounds:
|
2014-02-05 21:41:01 +01:00
|
|
|
do_change_enable_stream_sounds(user_profile, enable_stream_sounds)
|
|
|
|
result['enable_stream_sounds'] = enable_stream_sounds
|
|
|
|
|
|
|
|
# PM and @-mention settings.
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_desktop_notifications is not None and \
|
|
|
|
user_profile.enable_desktop_notifications != enable_desktop_notifications:
|
2012-11-23 21:23:41 +01:00
|
|
|
do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications)
|
|
|
|
result['enable_desktop_notifications'] = enable_desktop_notifications
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_sounds is not None and \
|
|
|
|
user_profile.enable_sounds != enable_sounds:
|
2013-05-03 21:49:01 +02:00
|
|
|
do_change_enable_sounds(user_profile, enable_sounds)
|
|
|
|
result['enable_sounds'] = enable_sounds
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_offline_email_notifications is not None and \
|
|
|
|
user_profile.enable_offline_email_notifications != enable_offline_email_notifications:
|
2013-05-07 23:19:52 +02:00
|
|
|
do_change_enable_offline_email_notifications(user_profile, enable_offline_email_notifications)
|
|
|
|
result['enable_offline_email_notifications'] = enable_offline_email_notifications
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_offline_push_notifications is not None and \
|
|
|
|
user_profile.enable_offline_push_notifications != enable_offline_push_notifications:
|
2013-10-16 17:24:52 +02:00
|
|
|
do_change_enable_offline_push_notifications(user_profile, enable_offline_push_notifications)
|
|
|
|
result['enable_offline_push_notifications'] = enable_offline_push_notifications
|
|
|
|
|
2014-02-12 17:22:56 +01:00
|
|
|
if enable_digest_emails is not None and \
|
|
|
|
user_profile.enable_digest_emails != enable_digest_emails:
|
2013-12-02 02:00:23 +01:00
|
|
|
do_change_enable_digest_emails(user_profile, enable_digest_emails)
|
|
|
|
result['enable_digest_emails'] = enable_digest_emails
|
|
|
|
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2013-12-09 22:26:10 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
|
|
|
def create_user_backend(request, user_profile, email=REQ, password=REQ,
|
|
|
|
full_name=REQ, short_name=REQ):
|
|
|
|
form = CreateUserForm({'full_name': full_name, 'email': email})
|
|
|
|
if not form.is_valid():
|
|
|
|
return json_error('Bad name or username')
|
|
|
|
|
|
|
|
# Check that the new user's email address belongs to the admin's realm
|
|
|
|
realm = user_profile.realm
|
|
|
|
domain = resolve_email_to_domain(email)
|
|
|
|
if realm.domain != domain:
|
|
|
|
return json_error("Email '%s' does not belong to domain '%s'" % (email, realm.domain))
|
|
|
|
|
|
|
|
try:
|
|
|
|
get_user_profile_by_email(email)
|
|
|
|
return json_error("Email '%s' already in use" % (email,))
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2015-10-13 21:33:54 +02:00
|
|
|
do_create_user(email, password, realm, full_name, short_name)
|
2013-12-09 22:26:10 +01:00
|
|
|
return json_success()
|
|
|
|
|
2013-12-03 21:01:37 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_change_ui_settings(request, user_profile,
|
2014-02-13 23:47:57 +01:00
|
|
|
autoscroll_forever=REQ(validator=check_bool,
|
|
|
|
default=None),
|
|
|
|
default_desktop_notifications=REQ(validator=check_bool,
|
|
|
|
default=None)):
|
2013-12-03 21:01:37 +01:00
|
|
|
|
|
|
|
result = {}
|
|
|
|
|
2014-02-13 23:47:57 +01:00
|
|
|
if autoscroll_forever is not None and \
|
|
|
|
user_profile.autoscroll_forever != autoscroll_forever:
|
2013-12-03 21:01:37 +01:00
|
|
|
do_change_autoscroll_forever(user_profile, autoscroll_forever)
|
|
|
|
result['autoscroll_forever'] = autoscroll_forever
|
|
|
|
|
2014-02-13 23:47:57 +01:00
|
|
|
if default_desktop_notifications is not None and \
|
|
|
|
user_profile.default_desktop_notifications != default_desktop_notifications:
|
2014-01-16 22:48:50 +01:00
|
|
|
do_change_default_desktop_notifications(user_profile, default_desktop_notifications)
|
|
|
|
result['default_desktop_notifications'] = default_desktop_notifications
|
|
|
|
|
2013-12-03 21:01:37 +01:00
|
|
|
return json_success(result)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-12-13 20:20:28 +01:00
|
|
|
def json_stream_exists(request, user_profile, stream=REQ,
|
|
|
|
autosubscribe=REQ(default=False)):
|
|
|
|
return stream_exists_backend(request, user_profile, stream, autosubscribe)
|
2013-03-21 20:16:27 +01:00
|
|
|
|
2013-12-13 20:20:28 +01:00
|
|
|
def stream_exists_backend(request, user_profile, stream_name, autosubscribe):
|
2013-03-21 20:16:27 +01:00
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-10 23:00:50 +02:00
|
|
|
return json_error("Invalid characters in stream name")
|
2013-03-21 20:16:27 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-11-15 19:13:33 +01:00
|
|
|
result = {"exists": bool(stream)}
|
|
|
|
if stream is not None:
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2013-12-13 20:20:28 +01:00
|
|
|
if autosubscribe:
|
|
|
|
bulk_add_subscriptions([stream], [user_profile])
|
2012-11-15 19:13:33 +01:00
|
|
|
result["subscribed"] = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
active=True).exists()
|
2013-03-21 19:58:30 +01:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
|
|
|
return json_response(data=result, status=404)
|
2012-10-17 20:43:52 +02:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
def get_subscription_or_die(stream_name, user_profile):
|
2012-12-03 00:19:00 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream:
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Invalid stream %s" % (stream.name,))
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-12-03 00:19:00 +01:00
|
|
|
subscription = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient, active=True)
|
2013-01-18 18:25:36 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
if not subscription.exists():
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Not subscribed to stream %s" % (stream_name,))
|
2012-12-03 00:19:00 +01:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
return subscription
|
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
@authenticated_json_view
|
2013-04-08 19:34:04 +02:00
|
|
|
@has_request_variables
|
2014-02-14 21:55:20 +01:00
|
|
|
def json_subscription_property(request, user_profile, subscription_data=REQ(
|
|
|
|
validator=check_list(
|
|
|
|
check_dict([["stream", check_string],
|
|
|
|
["property", check_string],
|
|
|
|
["value", check_variable_type(
|
|
|
|
[check_string, check_bool])]])))):
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
2014-02-14 21:55:20 +01:00
|
|
|
This is the entry point to changing subscription properties. This
|
|
|
|
is a bulk endpoint: requestors always provide a subscription_data
|
|
|
|
list containing dictionaries for each stream of interest.
|
|
|
|
|
|
|
|
Requests are of the form:
|
|
|
|
|
|
|
|
[{"stream": "devel", "property": "in_home_view", "value": False},
|
|
|
|
{"stream": "devel", "property": "color", "value": "#c2c2c2"}]
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
2014-02-14 21:55:20 +01:00
|
|
|
if request.method != "POST":
|
2012-12-02 22:58:00 +01:00
|
|
|
return json_error("Invalid verb")
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2014-02-14 21:55:20 +01:00
|
|
|
property_converters = {"color": check_string, "in_home_view": check_bool,
|
|
|
|
"desktop_notifications": check_bool,
|
|
|
|
"audible_notifications": check_bool}
|
|
|
|
response_data = []
|
|
|
|
|
|
|
|
for change in subscription_data:
|
|
|
|
stream_name = change["stream"]
|
|
|
|
property = change["property"]
|
|
|
|
value = change["value"]
|
|
|
|
|
|
|
|
if property not in property_converters:
|
|
|
|
return json_error("Unknown subscription property: %s" % (property,))
|
|
|
|
|
|
|
|
sub = get_subscription_or_die(stream_name, user_profile)[0]
|
|
|
|
|
|
|
|
property_conversion = property_converters[property](property, value)
|
|
|
|
if property_conversion:
|
|
|
|
return json_error(property_conversion)
|
|
|
|
|
|
|
|
do_change_subscription_property(user_profile, sub, stream_name,
|
|
|
|
property, value)
|
|
|
|
|
|
|
|
response_data.append({'stream': stream_name,
|
|
|
|
'property': property,
|
|
|
|
'value': value})
|
|
|
|
|
|
|
|
return json_success({"subscription_data": response_data})
|
|
|
|
|
2012-10-17 22:36:49 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def api_fetch_api_key(request, username=REQ, password=REQ):
|
2014-01-10 23:48:05 +01:00
|
|
|
return_data = {}
|
|
|
|
if username == "google-oauth2-token":
|
|
|
|
user_profile = authenticate(google_oauth2_token=password, return_data=return_data)
|
|
|
|
else:
|
|
|
|
user_profile = authenticate(username=username, password=password)
|
2013-03-29 17:39:53 +01:00
|
|
|
if user_profile is None:
|
2014-01-10 23:48:05 +01:00
|
|
|
if return_data.get("valid_attestation") == True:
|
|
|
|
# We can leak that the user is unregistered iff they present a valid authentication string for the user.
|
2014-01-10 23:53:11 +01:00
|
|
|
return json_error("This user is not registered; do so from a browser.", data={"reason": "unregistered"}, status=403)
|
|
|
|
return json_error("Your username or password is incorrect.", data={"reason": "incorrect_creds"}, status=403)
|
2013-03-29 17:39:53 +01:00
|
|
|
if not user_profile.is_active:
|
2014-01-10 23:53:11 +01:00
|
|
|
return json_error("Your account has been disabled.", data={"reason": "disabled"}, status=403)
|
2014-01-31 19:50:19 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key, "email": user_profile.email})
|
2012-10-17 22:26:59 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-11-05 16:46:39 +01:00
|
|
|
def json_fetch_api_key(request, user_profile, password=REQ(default='')):
|
2014-03-28 00:45:03 +01:00
|
|
|
if password_auth_enabled(user_profile.realm) and not user_profile.check_password(password):
|
2012-10-17 22:26:59 +02:00
|
|
|
return json_error("Your username or password is incorrect.")
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2015-10-02 13:23:26 +02:00
|
|
|
@csrf_exempt
|
|
|
|
def api_fetch_google_client_id(request):
|
|
|
|
if not settings.GOOGLE_CLIENT_ID:
|
|
|
|
return json_error("GOOGLE_CLIENT_ID is not configured", status=400)
|
|
|
|
return json_success({"google_client_id": settings.GOOGLE_CLIENT_ID})
|
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
def get_status_list(requesting_user_profile):
|
2013-05-06 17:14:59 +02:00
|
|
|
return {'presences': get_status_dict(requesting_user_profile),
|
|
|
|
'server_timestamp': time.time()}
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
@has_request_variables
|
2014-01-02 00:02:01 +01:00
|
|
|
def update_active_status_backend(request, user_profile, status=REQ,
|
2014-02-14 15:48:42 +01:00
|
|
|
new_user_input=REQ(validator=check_bool, default=False)):
|
2013-04-03 22:00:02 +02:00
|
|
|
status_val = UserPresence.status_from_string(status)
|
|
|
|
if status_val is None:
|
|
|
|
raise JsonableError("Invalid presence status: %s" % (status,))
|
|
|
|
else:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_presence(user_profile, request.client, now(), status_val,
|
|
|
|
new_user_input)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret = get_status_list(user_profile)
|
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
try:
|
2013-10-04 22:06:47 +02:00
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="get_events_backend",
|
|
|
|
client__name="zephyr_mirror")
|
2013-04-30 19:17:38 +02:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret['zephyr_mirror_active'] = \
|
|
|
|
(activity.last_visit.replace(tzinfo=None) >
|
|
|
|
datetime.datetime.utcnow() - datetime.timedelta(minutes=5))
|
|
|
|
except UserActivity.DoesNotExist:
|
|
|
|
ret['zephyr_mirror_active'] = False
|
|
|
|
|
|
|
|
return json_success(ret)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2014-01-02 00:02:01 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_update_active_status(request, user_profile):
|
|
|
|
return update_active_status_backend(request, user_profile)
|
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_active_statuses(request, user_profile):
|
2013-03-13 20:02:57 +01:00
|
|
|
return json_success(get_status_list(user_profile))
|
2013-03-11 20:54:27 +01:00
|
|
|
|
2013-03-28 18:53:44 +01:00
|
|
|
# Read the source map information for decoding JavaScript backtraces
|
2013-03-28 20:49:08 +01:00
|
|
|
js_source_map = None
|
|
|
|
if not (settings.DEBUG or settings.TEST_SUITE):
|
2013-10-28 15:54:32 +01:00
|
|
|
js_source_map = SourceMap(os.path.join(
|
2013-07-12 22:01:31 +02:00
|
|
|
settings.DEPLOY_ROOT, 'prod-static/source-map'))
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-11-04 23:58:51 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_report_send_time(request, user_profile,
|
2013-11-06 18:57:38 +01:00
|
|
|
time=REQ(converter=to_non_negative_int),
|
2013-11-06 19:05:06 +01:00
|
|
|
received=REQ(converter=to_non_negative_int, default="(unknown)"),
|
2014-01-16 21:38:30 +01:00
|
|
|
displayed=REQ(converter=to_non_negative_int, default="(unknown)"),
|
2014-02-14 15:48:42 +01:00
|
|
|
locally_echoed=REQ(validator=check_bool, default=False),
|
|
|
|
rendered_content_disparity=REQ(validator=check_bool, default=False)):
|
2014-01-16 21:38:30 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms/echo:%s/diff:%s]" \
|
|
|
|
% (time, received, displayed, locally_echoed, rendered_content_disparity)
|
2013-11-06 21:45:03 +01:00
|
|
|
statsd.timing("endtoend.send_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), time)
|
2013-11-06 21:41:46 +01:00
|
|
|
if received != "(unknown)":
|
2013-11-06 21:45:03 +01:00
|
|
|
statsd.timing("endtoend.receive_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), received)
|
2013-11-06 21:41:46 +01:00
|
|
|
if displayed != "(unknown)":
|
2013-11-06 21:45:03 +01:00
|
|
|
statsd.timing("endtoend.displayed_time.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), displayed)
|
2014-01-16 21:38:30 +01:00
|
|
|
if locally_echoed:
|
|
|
|
statsd.incr('locally_echoed')
|
|
|
|
if rendered_content_disparity:
|
|
|
|
statsd.incr('render_disparity')
|
2013-11-04 23:58:51 +01:00
|
|
|
return json_success()
|
|
|
|
|
2013-12-06 00:03:08 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_report_narrow_time(request, user_profile,
|
|
|
|
initial_core=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free=REQ(converter=to_non_negative_int),
|
|
|
|
network=REQ(converter=to_non_negative_int)):
|
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms]" % (initial_core, initial_free, network)
|
|
|
|
statsd.timing("narrow.initial_core.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_core)
|
|
|
|
statsd.timing("narrow.initial_free.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_free)
|
|
|
|
statsd.timing("narrow.network.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), network)
|
|
|
|
return json_success()
|
|
|
|
|
2014-02-13 18:49:44 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_report_unnarrow_time(request, user_profile,
|
|
|
|
initial_core=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free=REQ(converter=to_non_negative_int)):
|
|
|
|
request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free)
|
|
|
|
statsd.timing("unnarrow.initial_core.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_core)
|
|
|
|
statsd.timing("unnarrow.initial_free.%s" % (statsd_key(user_profile.realm.domain, clean_periods=True),), initial_free)
|
|
|
|
return json_success()
|
|
|
|
|
2013-03-11 20:54:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_report_error(request, user_profile, message=REQ, stacktrace=REQ,
|
2014-02-14 15:48:42 +01:00
|
|
|
ui_message=REQ(validator=check_bool), user_agent=REQ,
|
2013-10-22 21:19:59 +02:00
|
|
|
href=REQ, log=REQ,
|
2014-02-14 16:50:42 +01:00
|
|
|
more_info=REQ(validator=check_dict([]), default=None)):
|
2013-03-27 18:31:18 +01:00
|
|
|
|
2013-11-13 21:35:04 +01:00
|
|
|
if not settings.ERROR_REPORTING:
|
|
|
|
return json_success()
|
|
|
|
|
2013-03-28 20:49:08 +01:00
|
|
|
if js_source_map:
|
2013-03-28 20:28:54 +01:00
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
try:
|
2013-11-13 19:12:22 +01:00
|
|
|
version = subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"])
|
2013-07-18 17:22:24 +02:00
|
|
|
except Exception:
|
2013-11-13 19:12:22 +01:00
|
|
|
version = None
|
|
|
|
|
|
|
|
queue_json_publish('error_reports', dict(
|
|
|
|
type = "browser",
|
|
|
|
report = dict(
|
|
|
|
user_email = user_profile.email,
|
|
|
|
user_full_name = user_profile.full_name,
|
|
|
|
user_visible = ui_message,
|
|
|
|
server_path = settings.DEPLOY_ROOT,
|
|
|
|
version = version,
|
|
|
|
user_agent = user_agent,
|
|
|
|
href = href,
|
|
|
|
message = message,
|
|
|
|
stacktrace = stacktrace,
|
|
|
|
log = log,
|
|
|
|
more_info = more_info,
|
|
|
|
)
|
|
|
|
), lambda x: None)
|
2013-10-22 21:19:59 +02:00
|
|
|
|
2013-03-11 20:54:27 +01:00
|
|
|
return json_success()
|
2013-03-14 23:21:53 +01:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_events_register(request, user_profile):
|
|
|
|
return events_register_backend(request, user_profile)
|
|
|
|
|
2013-04-02 22:42:51 +02:00
|
|
|
# Does not need to be authenticated because it's called from rest_dispatch
|
2013-03-14 23:21:53 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_events_register(request, user_profile,
|
2014-02-14 15:48:42 +01:00
|
|
|
apply_markdown=REQ(default=False, validator=check_bool),
|
2014-02-07 01:22:19 +01:00
|
|
|
all_public_streams=REQ(default=None, validator=check_bool)):
|
2013-03-14 23:21:53 +01:00
|
|
|
return events_register_backend(request, user_profile,
|
2013-10-17 23:51:25 +02:00
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
all_public_streams=all_public_streams)
|
2013-03-14 23:21:53 +01:00
|
|
|
|
2014-02-07 01:22:19 +01:00
|
|
|
def _default_all_public_streams(user_profile, all_public_streams):
|
|
|
|
if all_public_streams is not None:
|
|
|
|
return all_public_streams
|
|
|
|
else:
|
|
|
|
return user_profile.default_all_public_streams
|
|
|
|
|
|
|
|
def _default_narrow(user_profile, narrow):
|
|
|
|
default_stream = user_profile.default_events_register_stream
|
|
|
|
if not narrow and user_profile.default_events_register_stream is not None:
|
|
|
|
narrow = [('stream', default_stream.name)]
|
|
|
|
return narrow
|
|
|
|
|
2013-03-14 23:21:53 +01:00
|
|
|
@has_request_variables
|
|
|
|
def events_register_backend(request, user_profile, apply_markdown=True,
|
2014-02-07 01:22:19 +01:00
|
|
|
all_public_streams=None,
|
2013-12-13 20:51:53 +01:00
|
|
|
event_types=REQ(validator=check_list(check_string), default=None),
|
2013-12-17 23:25:36 +01:00
|
|
|
narrow=REQ(validator=check_list(check_list(check_string, length=2)), default=[]),
|
2013-08-05 22:09:12 +02:00
|
|
|
queue_lifespan_secs=REQ(converter=int, default=0)):
|
2014-02-07 01:22:19 +01:00
|
|
|
|
|
|
|
all_public_streams = _default_all_public_streams(user_profile, all_public_streams)
|
|
|
|
narrow = _default_narrow(user_profile, narrow)
|
|
|
|
|
2013-05-07 17:25:25 +02:00
|
|
|
ret = do_events_register(user_profile, request.client, apply_markdown,
|
2013-12-10 16:28:16 +01:00
|
|
|
event_types, queue_lifespan_secs, all_public_streams,
|
|
|
|
narrow=narrow)
|
2013-03-21 22:43:53 +01:00
|
|
|
return json_success(ret)
|
2013-04-24 17:18:49 +02:00
|
|
|
|
2014-02-11 17:14:33 +01:00
|
|
|
|
2013-07-08 23:34:43 +02:00
|
|
|
def deactivate_user_backend(request, user_profile, email):
|
2013-07-03 21:54:10 +02:00
|
|
|
try:
|
2013-07-08 23:34:43 +02:00
|
|
|
target = get_user_profile_by_email(email)
|
2013-07-03 21:54:10 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('No such user')
|
2014-02-11 17:14:33 +01:00
|
|
|
if target.is_bot:
|
|
|
|
return json_error('No such user')
|
|
|
|
return _deactivate_user_profile_backend(request, user_profile, target)
|
|
|
|
|
|
|
|
def deactivate_bot_backend(request, user_profile, email):
|
|
|
|
try:
|
|
|
|
target = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return json_error('No such bot')
|
|
|
|
if not target.is_bot:
|
|
|
|
return json_error('No such bot')
|
|
|
|
return _deactivate_user_profile_backend(request, user_profile, target)
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2014-02-11 17:14:33 +01:00
|
|
|
def _deactivate_user_profile_backend(request, user_profile, target):
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(target):
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('Insufficient permission')
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2013-11-16 17:11:15 +01:00
|
|
|
do_deactivate_user(target)
|
2013-07-03 21:54:10 +02:00
|
|
|
return json_success({})
|
|
|
|
|
2013-11-15 19:39:03 +01:00
|
|
|
def reactivate_user_backend(request, user_profile, email):
|
|
|
|
try:
|
|
|
|
target = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return json_error('No such user')
|
|
|
|
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
2013-11-16 17:11:15 +01:00
|
|
|
do_reactivate_user(target)
|
2013-11-15 19:39:03 +01:00
|
|
|
return json_success({})
|
|
|
|
|
2014-01-14 16:19:26 +01:00
|
|
|
@has_request_variables
|
|
|
|
def update_user_backend(request, user_profile, email,
|
2014-02-14 15:48:42 +01:00
|
|
|
is_admin=REQ(default=None, validator=check_bool)):
|
2014-01-14 16:19:26 +01:00
|
|
|
try:
|
|
|
|
target = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return json_error('No such user')
|
|
|
|
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
if is_admin is not None:
|
2014-01-21 19:27:22 +01:00
|
|
|
do_change_is_admin(target, is_admin)
|
2014-01-14 16:19:26 +01:00
|
|
|
return json_success({})
|
|
|
|
|
2013-12-09 22:12:18 +01:00
|
|
|
@require_realm_admin
|
2013-11-22 01:12:53 +01:00
|
|
|
def deactivate_stream_backend(request, user_profile, stream_name):
|
2014-01-27 21:11:01 +01:00
|
|
|
target = get_stream(stream_name, user_profile.realm)
|
|
|
|
if not target:
|
2013-11-22 01:12:53 +01:00
|
|
|
return json_error('No such stream name')
|
|
|
|
|
2014-01-27 20:13:26 +01:00
|
|
|
if target.invite_only and not subscribed_to_stream(user_profile, target):
|
2013-11-22 01:12:53 +01:00
|
|
|
return json_error('Cannot administer invite-only streams this way')
|
|
|
|
|
|
|
|
do_deactivate_stream(target)
|
|
|
|
return json_success({})
|
|
|
|
|
2013-11-12 22:26:17 +01:00
|
|
|
def avatar(request, email):
|
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
avatar_source = user_profile.avatar_source
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
avatar_source = 'G'
|
|
|
|
url = get_avatar_url(avatar_source, email)
|
|
|
|
if '?' in url:
|
|
|
|
sep = '&'
|
|
|
|
else:
|
|
|
|
sep = '?'
|
|
|
|
url += sep + request.META['QUERY_STRING']
|
|
|
|
return redirect(url)
|
|
|
|
|
2014-02-11 18:43:30 +01:00
|
|
|
def get_stream_name(stream):
|
|
|
|
if stream:
|
|
|
|
name = stream.name
|
|
|
|
else :
|
|
|
|
name = None
|
|
|
|
return name
|
|
|
|
|
|
|
|
def stream_or_none(stream_name, realm):
|
|
|
|
if stream_name == '':
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
stream = get_stream(stream_name, realm)
|
|
|
|
if not stream:
|
|
|
|
raise JsonableError('No such stream \'%s\'' % (stream_name, ))
|
|
|
|
return stream
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
@has_request_variables
|
2014-02-13 19:39:54 +01:00
|
|
|
def patch_bot_backend(request, user_profile, email,
|
|
|
|
full_name=REQ(default=None),
|
|
|
|
default_sending_stream=REQ(default=None),
|
|
|
|
default_events_register_stream=REQ(default=None),
|
|
|
|
default_all_public_streams=REQ(default=None, validator=check_bool)):
|
2013-07-16 22:25:34 +02:00
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
2013-07-23 15:57:32 +02:00
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-23 15:57:32 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
2014-02-13 19:39:54 +01:00
|
|
|
if full_name is not None:
|
|
|
|
do_change_full_name(bot, full_name)
|
|
|
|
if default_sending_stream is not None:
|
|
|
|
stream = stream_or_none(default_sending_stream, bot.realm)
|
|
|
|
do_change_default_sending_stream(bot, stream)
|
|
|
|
if default_events_register_stream is not None:
|
|
|
|
stream = stream_or_none(default_events_register_stream, bot.realm)
|
|
|
|
do_change_default_events_register_stream(bot, stream)
|
|
|
|
if default_all_public_streams is not None:
|
|
|
|
do_change_default_all_public_streams(bot, default_all_public_streams)
|
2013-07-29 16:27:18 +02:00
|
|
|
|
|
|
|
if len(request.FILES) == 0:
|
|
|
|
pass
|
|
|
|
elif len(request.FILES) == 1:
|
|
|
|
user_file = request.FILES.values()[0]
|
|
|
|
upload_avatar_image(user_file, user_profile, bot.email)
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
2014-02-26 20:54:59 +01:00
|
|
|
do_change_avatar_source(bot, avatar_source)
|
2013-07-29 16:27:18 +02:00
|
|
|
else:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
json_result = dict(
|
2014-02-13 19:39:54 +01:00
|
|
|
full_name=bot.full_name,
|
|
|
|
avatar_url=avatar_url(bot),
|
|
|
|
default_sending_stream=get_stream_name(bot.default_sending_stream),
|
|
|
|
default_events_register_stream=get_stream_name(bot.default_events_register_stream),
|
|
|
|
default_all_public_streams=bot.default_all_public_streams,
|
2013-07-16 22:25:34 +02:00
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-10-28 15:49:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_set_avatar(request, user_profile):
|
|
|
|
if len(request.FILES) != 1:
|
|
|
|
return json_error("You must upload exactly one avatar.")
|
|
|
|
|
|
|
|
user_file = request.FILES.values()[0]
|
|
|
|
upload_avatar_image(user_file, user_profile, user_profile.email)
|
2014-02-26 20:54:59 +01:00
|
|
|
do_change_avatar_source(user_profile, UserProfile.AVATAR_FROM_USER)
|
2013-10-28 15:49:38 +01:00
|
|
|
user_avatar_url = avatar_url(user_profile)
|
|
|
|
|
|
|
|
json_result = dict(
|
|
|
|
avatar_url = user_avatar_url
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-08-08 16:49:32 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_api_key(request, user_profile):
|
2014-02-26 20:02:43 +01:00
|
|
|
do_regenerate_api_key(user_profile)
|
2013-08-08 16:49:32 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = user_profile.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-07-19 17:45:06 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_bot_api_key(request, user_profile, email):
|
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-19 17:45:06 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
2014-02-26 20:02:43 +01:00
|
|
|
do_regenerate_api_key(bot)
|
2013-07-19 17:45:06 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = bot.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-05-03 00:26:53 +02:00
|
|
|
@has_request_variables
|
2014-02-11 18:43:30 +01:00
|
|
|
def add_bot_backend(request, user_profile, full_name=REQ, short_name=REQ,
|
|
|
|
default_sending_stream=REQ(default=None),
|
|
|
|
default_events_register_stream=REQ(default=None),
|
|
|
|
default_all_public_streams=REQ(validator=check_bool, default=None)):
|
2013-05-03 00:26:53 +02:00
|
|
|
short_name += "-bot"
|
|
|
|
email = short_name + "@" + user_profile.realm.domain
|
2013-12-09 22:26:10 +01:00
|
|
|
form = CreateUserForm({'full_name': full_name, 'email': email})
|
2013-05-03 00:26:53 +02:00
|
|
|
if not form.is_valid():
|
|
|
|
# We validate client-side as well
|
|
|
|
return json_error('Bad name or username')
|
|
|
|
|
|
|
|
try:
|
|
|
|
get_user_profile_by_email(email)
|
|
|
|
return json_error("Username already in use")
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
elif len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
else:
|
|
|
|
user_file = request.FILES.values()[0]
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_avatar_image(user_file, user_profile, email)
|
2013-06-14 20:03:11 +02:00
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
|
2014-02-11 18:43:30 +01:00
|
|
|
if default_sending_stream is not None:
|
|
|
|
default_sending_stream = stream_or_none(default_sending_stream, user_profile.realm)
|
|
|
|
if default_sending_stream and not default_sending_stream.is_public() and not \
|
|
|
|
subscribed_to_stream(user_profile, default_sending_stream):
|
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
if default_events_register_stream is not None:
|
|
|
|
default_events_register_stream = stream_or_none(default_events_register_stream,
|
|
|
|
user_profile.realm)
|
|
|
|
if default_events_register_stream and not default_events_register_stream.is_public() and not \
|
|
|
|
subscribed_to_stream(user_profile, default_events_register_stream):
|
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
|
|
|
|
bot_profile = do_create_user(email=email, password='',
|
|
|
|
realm=user_profile.realm, full_name=full_name,
|
|
|
|
short_name=short_name, active=True, bot=True,
|
|
|
|
bot_owner=user_profile,
|
|
|
|
avatar_source=avatar_source,
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams)
|
2013-06-14 20:03:11 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key=bot_profile.api_key,
|
2014-02-11 18:43:30 +01:00
|
|
|
avatar_url=avatar_url(bot_profile),
|
|
|
|
default_sending_stream=get_stream_name(bot_profile.default_sending_stream),
|
|
|
|
default_events_register_stream=get_stream_name(bot_profile.default_events_register_stream),
|
|
|
|
default_all_public_streams=bot_profile.default_all_public_streams,
|
2013-06-14 20:03:11 +02:00
|
|
|
)
|
|
|
|
return json_success(json_result)
|
2013-05-03 00:26:53 +02:00
|
|
|
|
2014-02-11 16:31:22 +01:00
|
|
|
def get_bots_backend(request, user_profile):
|
2013-05-03 00:26:53 +02:00
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
2015-08-19 21:05:56 +02:00
|
|
|
bot_profiles = bot_profiles.select_related('default_sending_stream', 'default_events_register_stream')
|
2013-06-27 23:05:36 +02:00
|
|
|
bot_profiles = bot_profiles.order_by('date_joined')
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
def bot_info(bot_profile):
|
2014-02-13 19:39:54 +01:00
|
|
|
default_sending_stream = get_stream_name(bot_profile.default_sending_stream)
|
|
|
|
default_events_register_stream = get_stream_name(bot_profile.default_events_register_stream)
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
return dict(
|
2014-02-13 19:39:54 +01:00
|
|
|
username=bot_profile.email,
|
|
|
|
full_name=bot_profile.full_name,
|
|
|
|
api_key=bot_profile.api_key,
|
|
|
|
avatar_url=avatar_url(bot_profile),
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=bot_profile.default_all_public_streams,
|
2013-06-14 20:03:11 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return json_success({'bots': map(bot_info, bot_profiles)})
|
2013-07-26 16:51:02 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_refer_friend(request, user_profile, email=REQ):
|
2013-08-12 18:41:54 +02:00
|
|
|
if not email:
|
|
|
|
return json_error("No email address specified")
|
2013-07-26 16:51:02 +02:00
|
|
|
if user_profile.invites_granted - user_profile.invites_used <= 0:
|
|
|
|
return json_error("Insufficient invites")
|
|
|
|
|
|
|
|
do_refer_friend(user_profile, email);
|
|
|
|
|
|
|
|
return json_success()
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def list_alert_words(request, user_profile):
|
|
|
|
return json_success({'alert_words': user_alert_words(user_profile)})
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_alert_words(request, user_profile,
|
2013-12-18 17:38:58 +01:00
|
|
|
alert_words=REQ(validator=check_list(check_string), default=[])):
|
2013-09-03 22:41:17 +02:00
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def set_alert_words(request, user_profile,
|
2013-12-18 17:38:58 +01:00
|
|
|
alert_words=REQ(validator=check_list(check_string), default=[])):
|
2013-09-03 22:41:17 +02:00
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def add_alert_words(request, user_profile,
|
2013-12-18 17:38:58 +01:00
|
|
|
alert_words=REQ(validator=check_list(check_string), default=[])):
|
2013-09-03 22:41:17 +02:00
|
|
|
do_add_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_alert_words(request, user_profile,
|
2013-12-18 17:38:58 +01:00
|
|
|
alert_words=REQ(validator=check_list(check_string), default=[])):
|
2013-09-03 22:41:17 +02:00
|
|
|
do_remove_alert_words(user_profile, alert_words)
|
2013-09-04 21:02:11 +02:00
|
|
|
return json_success()
|
2013-09-10 00:06:24 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_muted_topics(request, user_profile,
|
2013-12-18 18:02:10 +01:00
|
|
|
muted_topics=REQ(validator=check_list(check_list(check_string, length=2)), default=[])):
|
2013-09-10 00:06:24 +02:00
|
|
|
do_set_muted_topics(user_profile, muted_topics)
|
|
|
|
return json_success()
|
2013-10-15 20:19:41 +02:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
def add_push_device_token(request, user_profile, token, kind, ios_app_id=None):
|
2013-12-09 23:17:16 +01:00
|
|
|
if token == '' or len(token) > 4096:
|
|
|
|
return json_error('Empty or invalid length token')
|
|
|
|
|
|
|
|
# If another user was previously logged in on the same device and didn't
|
|
|
|
# properly log out, the token will still be registered to the wrong account
|
|
|
|
PushDeviceToken.objects.filter(token=token).delete()
|
2013-10-15 20:19:41 +02:00
|
|
|
|
2013-12-09 23:17:16 +01:00
|
|
|
# Overwrite with the latest value
|
2015-02-10 08:08:47 +01:00
|
|
|
token, created = PushDeviceToken.objects.get_or_create(user=user_profile,
|
|
|
|
token=token,
|
|
|
|
kind=kind,
|
|
|
|
ios_app_id=ios_app_id)
|
2013-10-16 20:57:12 +02:00
|
|
|
if not created:
|
|
|
|
token.last_updated = now()
|
|
|
|
token.save(update_fields=['last_updated'])
|
2013-10-15 20:19:41 +02:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
2015-02-10 08:08:47 +01:00
|
|
|
def add_apns_device_token(request, user_profile, token=REQ, appid=REQ(default=settings.ZULIP_IOS_APP_ID)):
|
|
|
|
return add_push_device_token(request, user_profile, token, PushDeviceToken.APNS, ios_app_id=appid)
|
2013-12-09 23:17:16 +01:00
|
|
|
|
2013-12-09 23:19:59 +01:00
|
|
|
@has_request_variables
|
|
|
|
def add_android_reg_id(request, user_profile, token=REQ):
|
|
|
|
return add_push_device_token(request, user_profile, token, PushDeviceToken.GCM)
|
|
|
|
|
2013-12-09 23:17:16 +01:00
|
|
|
def remove_push_device_token(request, user_profile, token, kind):
|
|
|
|
if token == '' or len(token) > 4096:
|
|
|
|
return json_error('Empty or invalid length token')
|
2013-10-15 20:19:41 +02:00
|
|
|
|
|
|
|
try:
|
2013-12-09 23:17:16 +01:00
|
|
|
token = PushDeviceToken.objects.get(token=token, kind=kind)
|
|
|
|
token.delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
return json_error("Token does not exist")
|
2013-10-15 20:19:41 +02:00
|
|
|
|
|
|
|
return json_success()
|
2013-11-08 17:53:41 +01:00
|
|
|
|
2013-12-09 23:17:16 +01:00
|
|
|
@has_request_variables
|
|
|
|
def remove_apns_device_token(request, user_profile, token=REQ):
|
|
|
|
return remove_push_device_token(request, user_profile, token, PushDeviceToken.APNS)
|
|
|
|
|
2013-12-09 23:19:59 +01:00
|
|
|
@has_request_variables
|
|
|
|
def remove_android_reg_id(request, user_profile, token=REQ):
|
|
|
|
return remove_push_device_token(request, user_profile, token, PushDeviceToken.GCM)
|
|
|
|
|
|
|
|
|
2013-11-08 17:53:41 +01:00
|
|
|
def generate_204(request):
|
|
|
|
return HttpResponse(content=None, status=204)
|
2013-11-25 16:47:19 +01:00
|
|
|
|
2013-11-26 00:08:17 +01:00
|
|
|
def process_unsubscribe(token, type, unsubscribe_function):
|
2013-11-25 16:47:19 +01:00
|
|
|
try:
|
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=token)
|
|
|
|
except Confirmation.DoesNotExist:
|
|
|
|
return render_to_response('zerver/unsubscribe_link_error.html')
|
|
|
|
|
|
|
|
user_profile = confirmation.content_object
|
2013-11-26 00:08:17 +01:00
|
|
|
unsubscribe_function(user_profile)
|
2013-11-25 16:47:19 +01:00
|
|
|
return render_to_response('zerver/unsubscribe_success.html',
|
2013-11-26 00:08:17 +01:00
|
|
|
{"subscription_type": type,
|
2013-11-25 16:47:19 +01:00
|
|
|
"external_host": settings.EXTERNAL_HOST})
|
|
|
|
|
2013-11-26 00:08:17 +01:00
|
|
|
# Email unsubscribe functions. All have the function signature
|
|
|
|
# processor(user_profile).
|
|
|
|
|
|
|
|
def do_missedmessage_unsubscribe(user_profile):
|
|
|
|
do_change_enable_offline_email_notifications(user_profile, False)
|
|
|
|
|
|
|
|
def do_welcome_unsubscribe(user_profile):
|
|
|
|
clear_followup_emails_queue(user_profile.email)
|
|
|
|
|
2013-12-02 01:39:10 +01:00
|
|
|
def do_digest_unsubscribe(user_profile):
|
|
|
|
do_change_enable_digest_emails(user_profile, False)
|
|
|
|
|
2013-11-26 23:21:03 +01:00
|
|
|
# The keys are part of the URL for the unsubscribe link and must be valid
|
|
|
|
# without encoding.
|
|
|
|
# The values are a tuple of (display name, unsubscribe function), where the
|
|
|
|
# display name is what we call this class of email in user-visible text.
|
|
|
|
email_unsubscribers = {
|
|
|
|
"missed_messages": ("missed messages", do_missedmessage_unsubscribe),
|
2013-12-02 01:39:10 +01:00
|
|
|
"welcome": ("welcome", do_welcome_unsubscribe),
|
|
|
|
"digest": ("digest", do_digest_unsubscribe)
|
2013-11-26 23:21:03 +01:00
|
|
|
}
|
2013-11-26 00:08:17 +01:00
|
|
|
|
2013-11-25 16:47:19 +01:00
|
|
|
# Login NOT required. These are for one-click unsubscribes.
|
|
|
|
def email_unsubscribe(request, type, token):
|
2013-11-26 00:08:17 +01:00
|
|
|
if type in email_unsubscribers:
|
2013-11-26 23:21:03 +01:00
|
|
|
display_name, unsubscribe_function = email_unsubscribers[type]
|
|
|
|
return process_unsubscribe(token, display_name, unsubscribe_function)
|
2013-11-25 16:47:19 +01:00
|
|
|
|
|
|
|
return render_to_response('zerver/unsubscribe_link_error.html', {},
|
|
|
|
context_instance=RequestContext(request))
|