2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.contrib.auth import authenticate, login
|
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.core.urlresolvers import reverse
|
2013-02-05 22:21:07 +01:00
|
|
|
from django.http import HttpResponseRedirect
|
2013-01-08 23:26:40 +01:00
|
|
|
from django.shortcuts import render_to_response, redirect
|
|
|
|
from django.template import RequestContext, loader
|
2013-02-07 20:54:43 +01:00
|
|
|
from django.utils.timezone import now
|
2012-09-29 00:49:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2012-12-11 23:42:32 +01:00
|
|
|
from django.core import validators
|
2013-03-05 23:45:02 +01:00
|
|
|
from django.contrib.auth.views import login as django_login_page, \
|
|
|
|
logout_then_login as django_logout_then_login
|
2013-03-06 20:53:54 +01:00
|
|
|
from django.db.models import Q, F
|
2013-03-11 20:54:27 +01:00
|
|
|
from django.core.mail import send_mail, mail_admins
|
2013-03-21 20:18:44 +01:00
|
|
|
from django.db import transaction
|
2012-10-10 22:53:24 +02:00
|
|
|
from zephyr.models import Message, UserProfile, Stream, Subscription, \
|
2013-02-07 20:54:43 +01:00
|
|
|
Recipient, get_huddle, Realm, UserMessage, \
|
2013-03-11 16:19:42 +01:00
|
|
|
PreregistrationUser, get_client, MitUser, User, UserActivity, \
|
2013-03-18 16:54:58 +01:00
|
|
|
MAX_SUBJECT_LENGTH, MAX_MESSAGE_LENGTH, get_stream, UserPresence, \
|
2013-03-18 18:57:34 +01:00
|
|
|
get_recipient, valid_stream_name
|
2013-01-10 22:01:33 +01:00
|
|
|
from zephyr.lib.actions import do_add_subscription, do_remove_subscription, \
|
2013-02-27 23:18:38 +01:00
|
|
|
do_change_password, create_mit_user_if_needed, do_change_full_name, \
|
|
|
|
do_change_enable_desktop_notifications, do_change_enter_sends, \
|
2013-03-18 18:57:34 +01:00
|
|
|
do_activate_user, add_default_subs, do_create_user, check_send_message, \
|
2012-12-07 01:05:14 +01:00
|
|
|
log_subscription_property_change, internal_send_message, \
|
2013-02-08 23:44:15 +01:00
|
|
|
create_stream_if_needed, gather_subscriptions, subscribed_to_stream, \
|
2013-03-18 18:57:34 +01:00
|
|
|
update_user_presence, set_stream_color, get_stream_colors, update_message_flags, \
|
2013-03-28 18:07:03 +01:00
|
|
|
recipient_for_emails, extract_recipients, do_events_register
|
2013-01-08 23:26:40 +01:00
|
|
|
from zephyr.forms import RegistrationForm, HomepageForm, ToSForm, is_unique, \
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive, isnt_mit
|
2013-02-05 22:21:07 +01:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-01-08 17:44:22 +01:00
|
|
|
from zephyr.decorator import require_post, \
|
2012-12-02 20:51:51 +01:00
|
|
|
authenticated_api_view, authenticated_json_post_view, \
|
2013-01-08 17:44:22 +01:00
|
|
|
has_request_variables, POST, authenticated_json_view, \
|
2013-01-31 21:12:53 +01:00
|
|
|
to_non_negative_int, json_to_dict, json_to_list, json_to_bool, \
|
2013-03-18 17:03:29 +01:00
|
|
|
JsonableError, RequestVariableMissingError, get_user_profile_by_email, \
|
2013-03-21 20:18:44 +01:00
|
|
|
get_user_profile_by_user_id, authenticated_rest_api_view, \
|
2013-03-22 15:58:52 +01:00
|
|
|
process_patch_as_post, REQ
|
2012-09-29 01:38:03 +02:00
|
|
|
from zephyr.lib.query import last_n
|
2012-10-17 04:07:35 +02:00
|
|
|
from zephyr.lib.avatar import gravatar_hash
|
2013-03-21 20:15:27 +01:00
|
|
|
from zephyr.lib.response import json_success, json_error, json_response, json_method_not_allowed
|
2013-02-08 23:44:15 +01:00
|
|
|
from zephyr.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
|
2013-03-15 19:51:19 +01:00
|
|
|
from zephyr.lib.cache import cache_with_key
|
2013-03-28 18:53:44 +01:00
|
|
|
from zephyr.lib.unminify import SourceMap
|
2013-01-23 23:24:44 +01:00
|
|
|
from zephyr import tornado_callbacks
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
import datetime
|
|
|
|
import simplejson
|
2012-09-07 19:20:04 +02:00
|
|
|
import re
|
2012-10-04 20:27:49 +02:00
|
|
|
import urllib
|
2012-10-16 21:15:01 +02:00
|
|
|
import time
|
2012-10-17 23:10:34 +02:00
|
|
|
import requests
|
2012-10-26 22:02:51 +02:00
|
|
|
import os
|
|
|
|
import base64
|
2013-03-28 18:53:44 +01:00
|
|
|
from os import path
|
2013-01-31 21:06:59 +01:00
|
|
|
from collections import defaultdict
|
2013-03-08 20:40:39 +01:00
|
|
|
from zephyr.lib import bugdown
|
2012-10-16 21:15:01 +02:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False):
|
|
|
|
"""Converts plaintext stream names to a list of Streams, validating input in the process
|
|
|
|
|
|
|
|
For each stream name, we validate it to ensure it meets our requirements for a proper
|
|
|
|
stream name: that is, that it is shorter than 30 characters and passes valid_stream_name.
|
|
|
|
|
|
|
|
We also ensure the stream is visible to the user_profile who made the request; a call
|
|
|
|
to list_to_streams will fail if one of the streams is invite_only and user_profile
|
|
|
|
is not already on the stream.
|
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream names to process
|
|
|
|
@param user_profile The user for whom we are retreiving the streams
|
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
@param invite_only Whether newly created streams should have the invite_only bit set
|
|
|
|
"""
|
|
|
|
streams = []
|
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
stream_set = set(stream_name.strip() for stream_name in streams_raw)
|
|
|
|
rejects = []
|
|
|
|
for stream_name in stream_set:
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2013-01-30 22:40:00 +01:00
|
|
|
raise JsonableError("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
|
|
|
raise JsonableError("Invalid stream name (%s)." % (stream_name,))
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
|
|
|
|
if stream is None:
|
|
|
|
rejects.append(stream_name)
|
|
|
|
else:
|
|
|
|
streams.append(stream)
|
|
|
|
# Verify we can access the stream
|
|
|
|
if stream.invite_only and not subscribed_to_stream(user_profile, stream):
|
|
|
|
raise JsonableError("Unable to access invite-only stream (%s)." % stream.name)
|
|
|
|
if autocreate:
|
|
|
|
for stream_name in rejects:
|
|
|
|
stream, created = create_stream_if_needed(user_profile.realm,
|
|
|
|
stream_name,
|
|
|
|
invite_only=invite_only)
|
|
|
|
streams.append(stream)
|
|
|
|
elif rejects:
|
|
|
|
raise JsonableError("Stream(s) (%s) do not exist" % ", ".join(rejects))
|
|
|
|
|
|
|
|
return streams
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
def send_signup_message(sender, signups_stream, user_profile, internal=False):
|
2012-12-07 00:02:53 +01:00
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
internal_send_message(sender,
|
2013-03-18 19:10:21 +01:00
|
|
|
"stream", signups_stream, user_profile.realm.domain,
|
2012-12-07 00:02:53 +01:00
|
|
|
"%s <`%s`> just signed up for Humbug!%s(total: **%i**)" % (
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.user.email,
|
2012-12-07 00:02:53 +01:00
|
|
|
internal_blurb,
|
2012-12-13 15:11:17 +01:00
|
|
|
UserProfile.objects.filter(realm=user_profile.realm,
|
|
|
|
user__is_active=True).count(),
|
2012-12-07 00:02:53 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
def notify_new_user(user_profile, internal=False):
|
|
|
|
send_signup_message("humbug+signups@humbughq.com", "signups", user_profile, internal)
|
|
|
|
|
2013-01-09 22:47:09 +01:00
|
|
|
class PrincipalError(JsonableError):
|
|
|
|
def __init__(self, principal):
|
|
|
|
self.principal = principal
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
|
|
|
principal_doesnt_exist = False
|
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
principal_user_profile = get_user_profile_by_email(principal)
|
2013-01-09 22:47:09 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
principal_doesnt_exist = True
|
|
|
|
|
|
|
|
if (principal_doesnt_exist
|
|
|
|
or agent.realm.domain == 'mit.edu'
|
|
|
|
or agent.realm != principal_user_profile.realm):
|
|
|
|
# We have to make sure we don't leak information about which users
|
|
|
|
# are registered for Humbug in a different realm. We could do
|
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
return principal_user_profile
|
|
|
|
|
2013-03-21 20:15:27 +01:00
|
|
|
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
|
|
|
|
|
|
|
|
@authenticated_rest_api_view
|
|
|
|
def rest_dispatch(request, user_profile, **kwargs):
|
|
|
|
supported_methods = {}
|
|
|
|
# duplicate kwargs so we can mutate the original as we go
|
|
|
|
for arg in list(kwargs):
|
|
|
|
if arg in METHODS:
|
|
|
|
supported_methods[arg] = kwargs[arg]
|
|
|
|
del kwargs[arg]
|
|
|
|
if request.method in supported_methods.keys():
|
|
|
|
return globals()[supported_methods[request.method]](request, user_profile, **kwargs)
|
|
|
|
return json_method_not_allowed(supported_methods.keys())
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
@require_post
|
2012-10-16 21:42:40 +02:00
|
|
|
def accounts_register(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
key = request.POST['key']
|
2012-10-29 19:08:18 +01:00
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=key)
|
2012-12-11 23:42:32 +01:00
|
|
|
prereg_user = confirmation.content_object
|
|
|
|
email = prereg_user.email
|
2012-10-29 19:08:18 +01:00
|
|
|
mit_beta_user = isinstance(confirmation.content_object, MitUser)
|
|
|
|
|
2013-02-06 22:26:04 +01:00
|
|
|
# If someone invited you, you are joining their realm regardless
|
|
|
|
# of your e-mail address.
|
2013-02-08 17:20:42 +01:00
|
|
|
#
|
|
|
|
# MitUsers can't be referred and don't have a referred_by field.
|
|
|
|
if not mit_beta_user and prereg_user.referred_by:
|
2013-02-06 22:26:04 +01:00
|
|
|
domain = prereg_user.referred_by.realm.domain
|
|
|
|
else:
|
|
|
|
domain = email.split('@')[-1]
|
2012-09-25 22:58:59 +02:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2012-10-29 19:08:18 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
# MIT users already exist, but are supposed to be inactive.
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-10-29 19:08:18 +01:00
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
|
|
|
is_unique(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-04 20:27:49 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-09-29 00:49:34 +02:00
|
|
|
|
2012-10-02 22:20:07 +02:00
|
|
|
if request.POST.get('from_confirmation'):
|
2012-09-28 22:47:05 +02:00
|
|
|
form = RegistrationForm()
|
2012-10-02 22:20:07 +02:00
|
|
|
else:
|
2012-08-28 18:44:51 +02:00
|
|
|
form = RegistrationForm(request.POST)
|
|
|
|
if form.is_valid():
|
2012-10-10 21:16:23 +02:00
|
|
|
password = form.cleaned_data['password']
|
2012-10-11 19:15:41 +02:00
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
short_name = email.split('@')[0]
|
2012-10-19 23:40:44 +02:00
|
|
|
(realm, _) = Realm.objects.get_or_create(domain=domain)
|
2012-10-11 16:57:47 +02:00
|
|
|
|
2012-12-05 20:56:31 +01:00
|
|
|
# FIXME: sanitize email addresses and fullname
|
2012-12-04 21:07:33 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
user = User.objects.get(email=email)
|
|
|
|
do_activate_user(user)
|
|
|
|
do_change_password(user, password)
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile = user.userprofile
|
|
|
|
do_change_full_name(user_profile, full_name)
|
2012-12-04 21:07:33 +01:00
|
|
|
else:
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name)
|
2012-12-11 23:42:32 +01:00
|
|
|
# We want to add the default subs list iff there were no subs
|
|
|
|
# specified when the user was invited.
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
if len(streams) == 0:
|
|
|
|
add_default_subs(user_profile)
|
|
|
|
else:
|
|
|
|
for stream in streams:
|
|
|
|
do_add_subscription(user_profile, stream)
|
|
|
|
if prereg_user.referred_by is not None:
|
|
|
|
# This is a cross-realm private message.
|
|
|
|
internal_send_message("humbug+signups@humbughq.com",
|
2013-03-18 19:10:21 +01:00
|
|
|
"private", prereg_user.referred_by.user.email, user_profile.realm.domain,
|
2012-12-11 23:42:32 +01:00
|
|
|
"%s <`%s`> accepted your invitation to join Humbug!" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.user.email,
|
|
|
|
)
|
|
|
|
)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-12-13 15:11:17 +01:00
|
|
|
notify_new_user(user_profile)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-09-21 16:10:36 +02:00
|
|
|
login(request, authenticate(username=email, password=password))
|
2012-08-28 18:44:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('zephyr.views.home'))
|
|
|
|
|
2012-10-15 22:52:08 +02:00
|
|
|
return render_to_response('zephyr/register.html',
|
2013-02-06 22:26:04 +01:00
|
|
|
{ 'form': form, 'company_name': domain, 'email': email, 'key': key },
|
2012-10-15 22:52:08 +02:00
|
|
|
context_instance=RequestContext(request))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-01-08 23:26:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def accounts_accept_terms(request):
|
|
|
|
email = request.user.email
|
|
|
|
company_name = email.split('@')[-1]
|
|
|
|
if request.method == "POST":
|
|
|
|
form = ToSForm(request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
send_mail('Terms acceptance for ' + full_name,
|
|
|
|
loader.render_to_string('zephyr/tos_accept_body.txt',
|
|
|
|
{'name': full_name,
|
|
|
|
'email': email,
|
|
|
|
'ip': request.META['REMOTE_ADDR'],
|
|
|
|
'browser': request.META['HTTP_USER_AGENT']}),
|
|
|
|
"humbug@humbughq.com",
|
|
|
|
["all@humbughq.com"])
|
|
|
|
do_change_full_name(request.user.userprofile, full_name)
|
|
|
|
return redirect(home)
|
|
|
|
|
|
|
|
else:
|
|
|
|
form = ToSForm()
|
|
|
|
return render_to_response('zephyr/accounts_accept_terms.html',
|
|
|
|
{ 'form': form, 'company_name': company_name, 'email': email },
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_invite_users(request, user_profile, invitee_emails=POST):
|
|
|
|
# Validation
|
2013-01-03 00:19:35 +01:00
|
|
|
if settings.ALLOW_REGISTER == False:
|
|
|
|
try:
|
|
|
|
isnt_mit(user_profile.user.email)
|
|
|
|
except ValidationError:
|
|
|
|
return json_error("Invitations are not enabled for MIT at this time.")
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
if not invitee_emails:
|
|
|
|
return json_error("You must specify at least one email address.")
|
|
|
|
|
2013-01-03 16:50:46 +01:00
|
|
|
invitee_emails = set(re.split(r'[, \n]', invitee_emails))
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
stream_names = request.POST.getlist('stream')
|
|
|
|
if not stream_names:
|
|
|
|
return json_error("You must specify at least one stream for invitees to join.")
|
|
|
|
|
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-11 23:42:32 +01:00
|
|
|
return json_error("Stream does not exist: %s. No invites were sent." % stream_name)
|
2013-01-14 21:47:17 +01:00
|
|
|
streams.append(stream)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
new_prereg_users = []
|
|
|
|
errors = []
|
|
|
|
skipped = []
|
|
|
|
for email in invitee_emails:
|
|
|
|
if email == '':
|
|
|
|
continue
|
2013-02-05 22:59:21 +01:00
|
|
|
|
|
|
|
if not validators.email_re.match(email):
|
2012-12-11 23:42:32 +01:00
|
|
|
errors.append((email, "Invalid address."))
|
|
|
|
continue
|
|
|
|
|
2013-02-05 22:59:21 +01:00
|
|
|
if user_profile.realm.restricted_to_domain and \
|
|
|
|
email.split('@', 1)[-1] != user_profile.realm.domain:
|
2012-12-11 23:42:32 +01:00
|
|
|
errors.append((email, "Outside your domain."))
|
|
|
|
continue
|
|
|
|
|
2013-01-03 00:19:35 +01:00
|
|
|
# Redundant check in case earlier validation preventing MIT users from
|
|
|
|
# inviting people fails.
|
|
|
|
if settings.ALLOW_REGISTER == False:
|
|
|
|
try:
|
|
|
|
isnt_mit(email)
|
|
|
|
except ValidationError:
|
|
|
|
errors.append((email, "Invitations are not enabled for MIT at this time."))
|
|
|
|
continue
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
try:
|
|
|
|
is_unique(email)
|
|
|
|
except ValidationError:
|
|
|
|
skipped.append((email, "Already has an account."))
|
|
|
|
continue
|
|
|
|
|
|
|
|
# The logged in user is the referrer.
|
|
|
|
user = PreregistrationUser(email=email, referred_by=user_profile)
|
|
|
|
|
|
|
|
# We save twice because you cannot associate a ManyToMany field
|
|
|
|
# on an unsaved object.
|
|
|
|
user.save()
|
|
|
|
user.streams = streams
|
|
|
|
user.save()
|
|
|
|
|
|
|
|
new_prereg_users.append(user)
|
|
|
|
|
|
|
|
if errors:
|
|
|
|
return json_error(data={'errors': errors},
|
2013-02-06 17:27:40 +01:00
|
|
|
msg="Some emails did not validate, so we didn't send any invitations.")
|
|
|
|
|
|
|
|
if skipped and len(skipped) == len(invitee_emails):
|
|
|
|
# All e-mails were skipped, so we didn't actually invite anyone.
|
|
|
|
return json_error(data={'errors': skipped},
|
|
|
|
msg="We weren't able to invite anyone.")
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
# If we encounter an exception at any point before now, there are no unwanted side-effects,
|
|
|
|
# since it is totally fine to have duplicate PreregistrationUsers
|
|
|
|
for user in new_prereg_users:
|
2013-01-03 17:45:03 +01:00
|
|
|
Confirmation.objects.send_confirmation(user, user.email,
|
|
|
|
additional_context={'referrer': user_profile},
|
|
|
|
subject_template_path='confirmation/invite_email_subject.txt',
|
|
|
|
body_template_path='confirmation/invite_email_body.txt')
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-02-06 17:27:40 +01:00
|
|
|
if skipped:
|
|
|
|
return json_error(data={'errors': skipped},
|
|
|
|
msg="Some of those addresses are already using Humbug, \
|
|
|
|
so we didn't send them an invitation. We did send invitations to everyone else!")
|
|
|
|
else:
|
|
|
|
return json_success()
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
def login_page(request, **kwargs):
|
|
|
|
template_response = django_login_page(request, **kwargs)
|
|
|
|
try:
|
2012-10-11 19:15:41 +02:00
|
|
|
template_response.context_data['email'] = request.GET['email']
|
2012-09-29 00:49:34 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return template_response
|
|
|
|
|
2013-03-05 23:45:02 +01:00
|
|
|
@require_post
|
|
|
|
def logout_then_login(request, **kwargs):
|
|
|
|
return django_logout_then_login(request, kwargs)
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
def accounts_home(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
if request.method == 'POST':
|
|
|
|
form = HomepageForm(request.POST)
|
|
|
|
if form.is_valid():
|
2013-01-08 20:24:47 +01:00
|
|
|
email = form.cleaned_data['email']
|
|
|
|
user = PreregistrationUser()
|
|
|
|
user.email = email
|
|
|
|
user.save()
|
2012-09-28 22:47:05 +02:00
|
|
|
Confirmation.objects.send_confirmation(user, user.email)
|
|
|
|
return HttpResponseRedirect(reverse('send_confirm', kwargs={'email':user.email}))
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
|
|
|
email = request.POST['email']
|
2013-02-11 19:37:31 +01:00
|
|
|
# Note: We don't check for uniqueness
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-10 22:30:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-11-21 21:14:55 +01:00
|
|
|
else:
|
|
|
|
form = HomepageForm()
|
|
|
|
return render_to_response('zephyr/accounts_home.html', {'form': form},
|
2012-09-04 23:21:30 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-10-29 19:56:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2012-08-28 18:44:51 +02:00
|
|
|
def home(request):
|
2012-11-21 00:42:16 +01:00
|
|
|
# We need to modify the session object every two weeks or it will expire.
|
|
|
|
# This line makes reloading the page a sufficient action to keep the
|
|
|
|
# session alive.
|
|
|
|
request.session.modified = True
|
|
|
|
|
2013-03-18 17:03:29 +01:00
|
|
|
user_profile = get_user_profile_by_user_id(request.user.id)
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
register_ret = do_events_register(user_profile, apply_markdown=True)
|
|
|
|
user_has_messages = (register_ret['max_message_id'] != -1)
|
2013-02-06 00:33:45 +01:00
|
|
|
|
2013-03-25 23:49:38 +01:00
|
|
|
# Brand new users get the tutorial.
|
|
|
|
# Compute this here, before we set user_profile.pointer below.
|
2013-03-18 18:40:47 +01:00
|
|
|
needs_tutorial = settings.TUTORIAL_ENABLED and user_profile.pointer == -1
|
2013-03-12 04:54:17 +01:00
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
if user_profile.pointer == -1 and user_has_messages:
|
2013-02-06 00:33:45 +01:00
|
|
|
# Put the new user's pointer at the bottom
|
|
|
|
#
|
|
|
|
# This improves performance, because we limit backfilling of messages
|
|
|
|
# before the pointer. It's also likely that someone joining an
|
|
|
|
# organization is interested in recent messages more than the very
|
|
|
|
# first messages on the system.
|
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
user_profile.pointer = register_ret['max_message_id']
|
2013-02-06 00:33:45 +01:00
|
|
|
user_profile.last_pointer_updater = request.session.session_key
|
2012-09-12 22:55:37 +02:00
|
|
|
|
2012-09-07 17:15:03 +02:00
|
|
|
# Populate personals autocomplete list based on everyone in your
|
|
|
|
# realm. Later we might want a 2-layer autocomplete, where we
|
|
|
|
# consider specially some sort of "buddy list" who e.g. you've
|
|
|
|
# talked to before, but for small organizations, the right list is
|
|
|
|
# everyone in your realm.
|
2012-10-12 17:26:04 +02:00
|
|
|
people = [{'email' : profile.user.email,
|
|
|
|
'full_name' : profile.full_name}
|
|
|
|
for profile in
|
2013-01-15 23:29:13 +01:00
|
|
|
UserProfile.objects.select_related().filter(realm=user_profile.realm)]
|
2012-09-04 20:31:23 +02:00
|
|
|
|
2013-03-25 23:49:38 +01:00
|
|
|
# Pass parameters to the client-side JavaScript code.
|
|
|
|
# These end up in a global JavaScript Object named 'page_params'.
|
|
|
|
page_params = simplejson.encoder.JSONEncoderForHTML().encode(dict(
|
|
|
|
debug_mode = settings.DEBUG,
|
|
|
|
poll_timeout = settings.POLL_TIMEOUT,
|
2013-03-28 18:09:27 +01:00
|
|
|
have_initial_messages = user_has_messages,
|
2013-03-25 23:49:38 +01:00
|
|
|
stream_list = gather_subscriptions(user_profile),
|
|
|
|
people_list = people,
|
2013-03-28 18:09:27 +01:00
|
|
|
initial_pointer = register_ret['pointer'],
|
2013-03-25 23:49:38 +01:00
|
|
|
fullname = user_profile.full_name,
|
|
|
|
email = user_profile.user.email,
|
|
|
|
domain = user_profile.realm.domain,
|
|
|
|
enter_sends = user_profile.enter_sends,
|
|
|
|
needs_tutorial = needs_tutorial,
|
|
|
|
desktop_notifications_enabled =
|
|
|
|
user_profile.enable_desktop_notifications,
|
2013-03-28 18:09:27 +01:00
|
|
|
event_queue_id = register_ret['queue_id'],
|
|
|
|
last_event_id = register_ret['last_event_id'],
|
|
|
|
max_message_id = register_ret['max_message_id']
|
2013-03-25 23:49:38 +01:00
|
|
|
))
|
2013-01-09 00:10:37 +01:00
|
|
|
|
2013-01-03 00:19:35 +01:00
|
|
|
try:
|
|
|
|
isnt_mit(user_profile.user.email)
|
|
|
|
show_invites = True
|
|
|
|
except ValidationError:
|
|
|
|
show_invites = settings.ALLOW_REGISTER
|
|
|
|
|
2012-08-30 19:56:15 +02:00
|
|
|
return render_to_response('zephyr/index.html',
|
2012-09-26 19:44:21 +02:00
|
|
|
{'user_profile': user_profile,
|
2013-03-25 23:49:38 +01:00
|
|
|
'page_params' : page_params,
|
2012-10-17 04:07:35 +02:00
|
|
|
'email_hash' : gravatar_hash(user_profile.user.email),
|
2012-09-26 00:26:35 +02:00
|
|
|
'show_debug':
|
2012-12-05 19:32:09 +01:00
|
|
|
settings.DEBUG and ('show_debug' in request.GET),
|
2013-03-25 23:49:38 +01:00
|
|
|
'show_invites': show_invites
|
2013-01-16 00:52:57 +01:00
|
|
|
},
|
2012-08-28 18:44:51 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-03-21 20:16:57 +01:00
|
|
|
def get_pointer_backend(request, user_profile):
|
|
|
|
return json_success({'pointer': user_profile.pointer})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2013-03-13 21:13:32 +01:00
|
|
|
def api_update_pointer(request, user_profile):
|
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_update_pointer(request, user_profile):
|
2013-03-13 21:13:32 +01:00
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-13 21:13:32 +01:00
|
|
|
def update_pointer_backend(request, user_profile,
|
2012-12-19 20:26:48 +01:00
|
|
|
pointer=POST(converter=to_non_negative_int)):
|
2012-10-29 22:02:10 +01:00
|
|
|
if pointer <= user_profile.pointer:
|
|
|
|
return json_success()
|
|
|
|
|
2012-09-06 20:52:23 +02:00
|
|
|
user_profile.pointer = pointer
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["pointer"])
|
2012-10-17 23:10:23 +02:00
|
|
|
|
2013-03-26 19:40:28 +01:00
|
|
|
if request.client.name.lower() in ['android', 'iphone']:
|
2013-03-06 20:53:54 +01:00
|
|
|
# TODO (leo)
|
|
|
|
# Until we handle the new read counts in the mobile apps natively,
|
|
|
|
# this is a shim that will mark as read any messages up until the
|
|
|
|
# pointer move
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__lte=pointer,
|
|
|
|
flags=~UserMessage.flags.read) \
|
2013-03-13 22:33:24 +01:00
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
2013-03-06 20:53:54 +01:00
|
|
|
|
2012-11-08 21:49:04 +01:00
|
|
|
if settings.TORNADO_SERVER:
|
2013-01-23 23:24:44 +01:00
|
|
|
tornado_callbacks.send_notification(dict(
|
|
|
|
type = 'pointer_update',
|
2012-12-31 23:19:59 +01:00
|
|
|
user = user_profile.id,
|
2013-03-13 21:13:32 +01:00
|
|
|
new_pointer = pointer))
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-09-05 22:21:25 +02:00
|
|
|
return json_success()
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_get_old_messages(request, user_profile):
|
2013-03-22 15:58:52 +01:00
|
|
|
return get_old_messages_backend(request, user_profile,
|
2012-10-26 16:42:03 +02:00
|
|
|
apply_markdown=True)
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-15 18:16:38 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:39:43 +01:00
|
|
|
def api_get_old_messages(request, user_profile,
|
|
|
|
apply_markdown=POST(default=False,
|
|
|
|
converter=simplejson.loads)):
|
2013-03-22 15:58:52 +01:00
|
|
|
return get_old_messages_backend(request, user_profile,
|
2012-11-15 18:16:38 +01:00
|
|
|
apply_markdown=apply_markdown)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
class BadNarrowOperator(Exception):
|
|
|
|
def __init__(self, desc):
|
|
|
|
self.desc = desc
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return 'Invalid narrow operator: ' + self.desc
|
|
|
|
|
|
|
|
class NarrowBuilder(object):
|
|
|
|
def __init__(self, user_profile):
|
|
|
|
self.user_profile = user_profile
|
|
|
|
|
2013-01-30 19:05:03 +01:00
|
|
|
def __call__(self, query, operator, operand):
|
2012-12-19 23:58:02 +01:00
|
|
|
# We have to be careful here because we're letting users call a method
|
|
|
|
# by name! The prefix 'by_' prevents it from colliding with builtin
|
|
|
|
# Python __magic__ stuff.
|
|
|
|
method_name = 'by_' + operator.replace('-', '_')
|
2013-01-30 20:59:56 +01:00
|
|
|
if method_name == 'by_search':
|
|
|
|
return self.do_search(query, operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
method = getattr(self, method_name, None)
|
|
|
|
if method is None:
|
|
|
|
raise BadNarrowOperator('unknown operator ' + operator)
|
2013-01-30 19:05:03 +01:00
|
|
|
return query.filter(method(operand))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
|
|
|
def by_is(self, operand):
|
|
|
|
if operand == 'private-message':
|
2013-03-11 15:47:29 +01:00
|
|
|
return (Q(message__recipient__type=Recipient.PERSONAL) |
|
|
|
|
Q(message__recipient__type=Recipient.HUDDLE))
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator("unknown 'is' operand " + operand)
|
|
|
|
|
|
|
|
def by_stream(self, operand):
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(operand, self.user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator('unknown stream ' + operand)
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)
|
2013-03-11 15:47:29 +01:00
|
|
|
return Q(message__recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
|
|
|
def by_subject(self, operand):
|
2013-03-11 15:47:29 +01:00
|
|
|
return Q(message__subject__iexact=operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-02-28 22:10:22 +01:00
|
|
|
def by_sender(self, operand):
|
2013-03-11 15:47:29 +01:00
|
|
|
return Q(message__sender__user__email__iexact=operand)
|
2013-02-28 22:10:22 +01:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
def by_pm_with(self, operand):
|
|
|
|
if ',' in operand:
|
|
|
|
# Huddle
|
|
|
|
try:
|
|
|
|
emails = [e.strip() for e in operand.split(',')]
|
|
|
|
recipient = recipient_for_emails(emails, False,
|
|
|
|
self.user_profile, self.user_profile)
|
|
|
|
except ValidationError:
|
|
|
|
raise BadNarrowOperator('unknown recipient ' + operand)
|
2013-03-11 15:47:29 +01:00
|
|
|
return Q(message__recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
else:
|
|
|
|
# Personal message
|
2013-03-18 16:54:58 +01:00
|
|
|
self_recipient = get_recipient(Recipient.PERSONAL, type_id=self.user_profile.id)
|
2013-01-03 19:31:58 +01:00
|
|
|
if operand == self.user_profile.user.email:
|
|
|
|
# Personals with self
|
2013-03-11 15:47:29 +01:00
|
|
|
return Q(message__recipient__type=Recipient.PERSONAL,
|
|
|
|
message__sender=self.user_profile, message__recipient=self_recipient)
|
2013-01-03 19:31:58 +01:00
|
|
|
|
|
|
|
# Personals with other user; include both directions.
|
2012-12-19 23:58:02 +01:00
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
narrow_profile = get_user_profile_by_email(operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
2013-03-18 16:54:58 +01:00
|
|
|
narrow_recipient = get_recipient(Recipient.PERSONAL, narrow_profile.id)
|
2013-03-11 15:47:29 +01:00
|
|
|
return ((Q(message__sender=narrow_profile) & Q(message__recipient=self_recipient)) |
|
|
|
|
(Q(message__sender=self.user_profile) & Q(message__recipient=narrow_recipient)))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-30 20:59:56 +01:00
|
|
|
def do_search(self, query, operand):
|
|
|
|
if "postgres" in settings.DATABASES["default"]["ENGINE"]:
|
2013-02-15 21:24:58 +01:00
|
|
|
sql = "search_tsvector @@ plainto_tsquery('pg_catalog.english', %s)"
|
|
|
|
return query.extra(where=[sql], params=[operand])
|
2013-01-30 20:59:56 +01:00
|
|
|
else:
|
2013-02-15 21:24:58 +01:00
|
|
|
for word in operand.split():
|
2013-03-11 15:47:29 +01:00
|
|
|
query = query.filter(Q(message__content__icontains=word) |
|
|
|
|
Q(message__subject__icontains=word))
|
2013-01-31 23:05:47 +01:00
|
|
|
return query
|
2013-01-30 20:59:56 +01:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-02 21:43:49 +01:00
|
|
|
def narrow_parameter(json):
|
|
|
|
# FIXME: A hack to support old mobile clients
|
|
|
|
if json == '{}':
|
|
|
|
return None
|
|
|
|
|
|
|
|
data = json_to_list(json)
|
|
|
|
for elem in data:
|
|
|
|
if not isinstance(elem, list):
|
|
|
|
raise ValueError("element is not a list")
|
|
|
|
if (len(elem) != 2
|
|
|
|
or any(not isinstance(x, str) and not isinstance(x, unicode)
|
|
|
|
for x in elem)):
|
|
|
|
raise ValueError("element is not a string pair")
|
|
|
|
return data
|
|
|
|
|
2013-01-16 00:52:57 +01:00
|
|
|
def get_public_stream(request, stream, realm):
|
|
|
|
if not valid_stream_name(stream):
|
|
|
|
raise JsonableError("Invalid stream name")
|
|
|
|
stream = get_stream(stream, realm)
|
|
|
|
if stream is None:
|
|
|
|
raise JsonableError("Stream does not exist")
|
|
|
|
if not stream.is_public():
|
|
|
|
raise JsonableError("Stream is not public")
|
|
|
|
return stream
|
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-22 15:58:52 +01:00
|
|
|
def get_old_messages_backend(request, user_profile,
|
|
|
|
anchor = REQ(converter=int),
|
|
|
|
num_before = REQ(converter=to_non_negative_int),
|
|
|
|
num_after = REQ(converter=to_non_negative_int),
|
|
|
|
narrow = REQ('narrow', converter=narrow_parameter, default=None),
|
|
|
|
stream = REQ(default=None),
|
|
|
|
apply_markdown=True):
|
2013-01-15 20:55:13 +01:00
|
|
|
if stream is not None:
|
2013-01-16 00:52:57 +01:00
|
|
|
stream = get_public_stream(request, stream, user_profile.realm)
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2013-03-06 21:04:53 +01:00
|
|
|
query = UserMessage.objects.select_related('message').filter(message__recipient=recipient,
|
|
|
|
user_profile=user_profile) \
|
2013-03-11 15:47:29 +01:00
|
|
|
.order_by('id')
|
2013-01-15 20:55:13 +01:00
|
|
|
else:
|
2013-03-11 15:47:29 +01:00
|
|
|
query = UserMessage.objects.select_related().filter(user_profile=user_profile) \
|
|
|
|
.order_by('id')
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
if narrow is not None:
|
|
|
|
build = NarrowBuilder(user_profile)
|
|
|
|
for operator, operand in narrow:
|
2013-01-30 19:05:03 +01:00
|
|
|
query = build(query, operator, operand)
|
2012-11-18 22:53:50 +01:00
|
|
|
|
2012-11-01 21:31:31 +01:00
|
|
|
# We add 1 to the number of messages requested to ensure that the
|
|
|
|
# resulting list always contains the anchor message
|
|
|
|
if num_before != 0 and num_after == 0:
|
|
|
|
num_before += 1
|
2013-03-11 15:47:29 +01:00
|
|
|
messages = last_n(num_before, query.filter(message__id__lte=anchor))
|
2012-11-01 21:31:31 +01:00
|
|
|
elif num_before == 0 and num_after != 0:
|
|
|
|
num_after += 1
|
2013-03-11 15:47:29 +01:00
|
|
|
messages = query.filter(message__id__gte=anchor)[:num_after]
|
2012-10-24 21:07:43 +02:00
|
|
|
else:
|
2012-11-01 21:31:31 +01:00
|
|
|
num_after += 1
|
2013-03-11 15:47:29 +01:00
|
|
|
messages = (last_n(num_before, query.filter(message__id__lt=anchor))
|
|
|
|
+ list(query.filter(message__id__gte=anchor)[:num_after]))
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2013-03-11 15:47:29 +01:00
|
|
|
message_list = [dict(umessage.message.to_dict(apply_markdown),
|
|
|
|
**umessage.flags_dict())
|
|
|
|
for umessage in messages]
|
|
|
|
ret = {'messages': message_list,
|
2012-10-24 21:07:43 +02:00
|
|
|
"result": "success",
|
2012-11-01 20:59:32 +01:00
|
|
|
"msg": ""}
|
2012-10-26 16:42:03 +02:00
|
|
|
return json_success(ret)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2012-10-26 22:02:51 +02:00
|
|
|
def generate_client_id():
|
|
|
|
return base64.b16encode(os.urandom(16)).lower()
|
|
|
|
|
2013-02-20 03:21:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_profile(request, user_profile):
|
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-21 03:53:03 +02:00
|
|
|
def api_get_profile(request, user_profile):
|
2013-02-20 03:21:27 +01:00
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_profile_backend(request, user_profile):
|
2012-11-07 19:05:23 +01:00
|
|
|
result = dict(pointer = user_profile.pointer,
|
|
|
|
client_id = generate_client_id(),
|
|
|
|
max_message_id = -1)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
|
|
|
|
if messages:
|
|
|
|
result['max_message_id'] = messages[0].id
|
|
|
|
|
|
|
|
return json_success(result)
|
2012-10-21 03:53:03 +02:00
|
|
|
|
2013-03-06 21:04:53 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_update_flags(request, user_profile, messages=POST('messages', converter=json_to_list),
|
|
|
|
operation=POST('op'),
|
|
|
|
flag=POST('flag'),
|
|
|
|
all=POST('all', converter=json_to_bool, default=False)):
|
|
|
|
update_message_flags(user_profile, operation, flag, messages, all)
|
|
|
|
return json_success({'result': 'success',
|
|
|
|
'msg': ''})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def api_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-10-01 21:36:44 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def json_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-09-06 21:52:03 +02:00
|
|
|
|
2013-02-27 23:18:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_change_enter_sends(request, user_profile, enter_sends=POST('enter_sends', json_to_bool)):
|
|
|
|
do_change_enter_sends(user_profile, enter_sends)
|
|
|
|
return json_success()
|
|
|
|
|
2012-11-08 19:33:47 +01:00
|
|
|
# Currently tabbott/extra@mit.edu is our only superuser. TODO: Make
|
|
|
|
# this a real superuser security check.
|
2012-10-03 22:32:50 +02:00
|
|
|
def is_super_user_api(request):
|
2012-11-08 19:33:47 +01:00
|
|
|
return request.POST.get("api-key") in ["xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"]
|
2012-09-21 16:40:46 +02:00
|
|
|
|
2013-02-05 22:56:04 +01:00
|
|
|
def mit_to_mit(user_profile, email):
|
|
|
|
# Are the sender and recipient both @mit.edu addresses?
|
|
|
|
# We have to handle this specially, inferring the domain from the
|
|
|
|
# e-mail address, because the recipient may not existing in Humbug
|
|
|
|
# and we may need to make a stub MIT user on the fly.
|
|
|
|
if not validators.email_re.match(email):
|
|
|
|
return False
|
|
|
|
|
|
|
|
if user_profile.realm.domain != "mit.edu":
|
2012-10-22 22:34:56 +02:00
|
|
|
return False
|
|
|
|
|
2013-02-05 22:56:04 +01:00
|
|
|
domain = email.split("@", 1)[1]
|
|
|
|
return user_profile.realm.domain == domain
|
|
|
|
|
2012-11-15 16:44:08 +01:00
|
|
|
def create_mirrored_message_users(request, user_profile, recipients):
|
2012-11-14 20:26:06 +01:00
|
|
|
if "sender" not in request.POST:
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
sender_email = request.POST["sender"].strip().lower()
|
|
|
|
referenced_users = set([sender_email])
|
2012-11-13 23:38:49 +01:00
|
|
|
if request.POST['type'] == 'private':
|
2012-11-15 16:44:08 +01:00
|
|
|
for email in recipients:
|
2012-11-15 17:03:33 +01:00
|
|
|
referenced_users.add(email.lower())
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Check that all referenced users are in our realm:
|
|
|
|
for email in referenced_users:
|
2013-02-05 22:56:04 +01:00
|
|
|
if not mit_to_mit(user_profile, email):
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Create users for the referenced users, if needed.
|
|
|
|
for email in referenced_users:
|
2012-11-13 22:25:50 +01:00
|
|
|
create_mit_user_if_needed(user_profile.realm, email)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2013-03-18 17:03:29 +01:00
|
|
|
sender = get_user_profile_by_email(sender_email)
|
2012-10-22 22:34:56 +02:00
|
|
|
return (True, sender)
|
2012-09-06 22:00:39 +02:00
|
|
|
|
2013-03-05 19:57:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_tutorial_send_message(request, user_profile,
|
|
|
|
message_type_name = POST('type'),
|
|
|
|
subject_name = POST('subject', lambda x: x.strip(), None),
|
|
|
|
message_content=POST('content')):
|
|
|
|
"""
|
|
|
|
This function, used by the onboarding tutorial, causes the
|
|
|
|
Tutorial Bot to send you the message you pass in here.
|
|
|
|
(That way, the Tutorial Bot's messages to you get rendered
|
|
|
|
by the server and therefore look like any other message.)
|
|
|
|
"""
|
|
|
|
sender_name = "humbug+tutorial@humbughq.com"
|
|
|
|
if message_type_name == 'private':
|
|
|
|
# For now, we discard the recipient on PMs; the tutorial bot
|
|
|
|
# can only send to you.
|
|
|
|
internal_send_message(sender_name,
|
2013-03-18 19:10:21 +01:00
|
|
|
"private",
|
2013-03-05 19:57:38 +01:00
|
|
|
user_profile.user.email,
|
|
|
|
"",
|
2013-03-07 04:58:25 +01:00
|
|
|
message_content,
|
|
|
|
realm=user_profile.realm)
|
2013-03-05 19:57:38 +01:00
|
|
|
return json_success()
|
|
|
|
elif message_type_name == 'stream':
|
|
|
|
tutorial_stream_name = 'tutorial-%s' % user_profile.user.email.split('@')[0]
|
2013-03-20 01:16:41 +01:00
|
|
|
tutorial_stream_name = tutorial_stream_name[:Stream.MAX_NAME_LENGTH]
|
2013-03-05 19:57:38 +01:00
|
|
|
## TODO: For open realms, we need to use the full name here,
|
|
|
|
## so that me@gmail.com and me@hotmail.com don't get the same stream.
|
|
|
|
internal_send_message(sender_name,
|
2013-03-18 19:10:21 +01:00
|
|
|
"stream",
|
2013-03-05 19:57:38 +01:00
|
|
|
tutorial_stream_name,
|
|
|
|
subject_name,
|
2013-03-07 04:58:25 +01:00
|
|
|
message_content,
|
|
|
|
realm=user_profile.realm)
|
2013-03-05 19:57:38 +01:00
|
|
|
return json_success()
|
|
|
|
return json_error('Bad data passed in to tutorial_send_message')
|
|
|
|
|
2012-10-03 21:31:44 +02:00
|
|
|
# We do not @require_login for send_message_backend, since it is used
|
|
|
|
# both from the API and the web service. Code calling
|
|
|
|
# send_message_backend should either check the API key or check that
|
|
|
|
# the user is logged in.
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-18 22:07:56 +01:00
|
|
|
def send_message_backend(request, user_profile,
|
2012-11-14 23:05:48 +01:00
|
|
|
message_type_name = POST('type'),
|
2012-11-15 16:54:35 +01:00
|
|
|
message_to = POST('to', converter=extract_recipients),
|
2012-11-15 17:34:17 +01:00
|
|
|
forged = POST(default=False),
|
2012-11-27 00:37:26 +01:00
|
|
|
subject_name = POST('subject', lambda x: x.strip(), None),
|
2012-11-14 23:05:48 +01:00
|
|
|
message_content = POST('content')):
|
2013-03-18 22:07:56 +01:00
|
|
|
client = request.client
|
2012-10-22 22:34:56 +02:00
|
|
|
is_super_user = is_super_user_api(request)
|
|
|
|
if forged and not is_super_user:
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
if client.name == "zephyr_mirror":
|
2012-10-22 22:34:56 +02:00
|
|
|
# Here's how security works for non-superuser mirroring:
|
|
|
|
#
|
2012-11-08 00:38:21 +01:00
|
|
|
# The message must be (1) a private message (2) that
|
2012-10-22 22:34:56 +02:00
|
|
|
# is both sent and received exclusively by other users in your
|
|
|
|
# realm which (3) must be the MIT realm and (4) you must have
|
|
|
|
# received the message.
|
|
|
|
#
|
|
|
|
# If that's the case, we let it through, but we still have the
|
|
|
|
# security flaw that we're trusting your Hesiod data for users
|
|
|
|
# you report having sent you a message.
|
|
|
|
if "sender" not in request.POST:
|
|
|
|
return json_error("Missing sender")
|
2012-11-08 00:38:21 +01:00
|
|
|
if message_type_name != "private" and not is_super_user:
|
2012-10-22 22:34:56 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2012-11-15 16:54:35 +01:00
|
|
|
(valid_input, mirror_sender) = \
|
|
|
|
create_mirrored_message_users(request, user_profile, message_to)
|
2012-10-22 22:34:56 +02:00
|
|
|
if not valid_input:
|
|
|
|
return json_error("Invalid mirrored message")
|
|
|
|
if user_profile.realm.domain != "mit.edu":
|
|
|
|
return json_error("Invalid mirrored realm")
|
|
|
|
sender = mirror_sender
|
2012-11-14 23:07:09 +01:00
|
|
|
else:
|
|
|
|
sender = user_profile
|
2012-09-07 19:20:04 +02:00
|
|
|
|
2013-03-18 19:17:18 +01:00
|
|
|
ret = check_send_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, forged=forged,
|
|
|
|
forged_timestamp = request.POST.get('time'),
|
|
|
|
forwarder_user_profile=user_profile)
|
|
|
|
if ret is not None:
|
|
|
|
return json_error(ret)
|
|
|
|
return json_success()
|
2013-03-18 18:21:22 +01:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-11 19:31:21 +02:00
|
|
|
def api_get_public_streams(request, user_profile):
|
2013-01-10 20:47:05 +01:00
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_public_streams(request, user_profile):
|
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_public_streams_backend(request, user_profile):
|
2013-01-10 20:47:25 +01:00
|
|
|
if user_profile.realm.domain == "mit.edu" and not is_super_user_api(request):
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2012-11-08 20:38:17 +01:00
|
|
|
# Only get streams someone is currently subscribed to
|
|
|
|
subs_filter = Subscription.objects.filter(active=True).values('recipient_id')
|
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
2012-11-01 22:04:11 +01:00
|
|
|
streams = sorted(stream.name for stream in
|
2012-11-08 20:38:17 +01:00
|
|
|
Stream.objects.filter(id__in = stream_ids,
|
2013-01-30 23:12:23 +01:00
|
|
|
realm=user_profile.realm,
|
|
|
|
invite_only=False))
|
2012-10-11 19:31:21 +02:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_list_subscriptions(request, user_profile):
|
2013-03-21 19:26:28 +01:00
|
|
|
return list_subscriptions_backend(request, user_profile)
|
2012-10-11 19:31:21 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_list_subscriptions(request, user_profile):
|
2013-03-21 19:26:28 +01:00
|
|
|
return list_subscriptions_backend(request, user_profile)
|
|
|
|
|
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)})
|
2012-09-18 16:30:25 +02:00
|
|
|
|
2013-03-21 20:18:44 +01:00
|
|
|
@process_patch_as_post
|
|
|
|
@transaction.commit_on_success
|
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
|
|
|
delete=POST(converter=json_to_list, default=[]),
|
|
|
|
add=POST(converter=json_to_list, default=[])):
|
|
|
|
if not add and not delete:
|
|
|
|
return json_error('Nothing to do. Specify at least one of "add" or "delete".')
|
|
|
|
|
|
|
|
json_dict = {}
|
|
|
|
for method, items in ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)):
|
|
|
|
response = method(request, user_profile, streams_raw=items)
|
|
|
|
if response.status_code != 200:
|
|
|
|
transaction.rollback()
|
|
|
|
return response
|
|
|
|
json_dict.update(simplejson.loads(response.content))
|
|
|
|
return json_success(json_dict)
|
|
|
|
|
2012-11-16 20:15:03 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-16 20:11:08 +01:00
|
|
|
def json_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-11-15 17:36:20 +01:00
|
|
|
@has_request_variables
|
2012-11-16 20:11:08 +01:00
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
2012-12-12 20:11:35 +01:00
|
|
|
streams_raw = POST("subscriptions", json_to_list)):
|
2013-01-30 22:40:00 +01:00
|
|
|
|
|
|
|
streams = list_to_streams(streams_raw, user_profile)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
result = dict(removed=[], not_subscribed=[])
|
|
|
|
for stream in streams:
|
|
|
|
did_remove = do_remove_subscription(user_profile, stream)
|
|
|
|
if did_remove:
|
2012-12-06 16:24:01 +01:00
|
|
|
result["removed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
else:
|
2012-12-06 16:24:01 +01:00
|
|
|
result["not_subscribed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-16 19:43:27 +01:00
|
|
|
def add_subscriptions_backend(request, user_profile,
|
2013-01-09 22:47:09 +01:00
|
|
|
streams_raw = POST('subscriptions', json_to_list),
|
2013-01-31 21:12:53 +01:00
|
|
|
invite_only = POST('invite_only', json_to_bool, default=False),
|
2013-01-31 21:06:59 +01:00
|
|
|
principals = POST('principals', json_to_list, default=None),):
|
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names = []
|
2012-10-31 18:36:08 +01:00
|
|
|
for stream_name in streams_raw:
|
|
|
|
stream_name = stream_name.strip()
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2012-10-31 18:36:08 +01:00
|
|
|
return json_error("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-31 23:04:55 +01:00
|
|
|
return json_error("Invalid stream name (%s)." % (stream_name,))
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names.append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals is not None:
|
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
2013-01-09 22:47:09 +01:00
|
|
|
else:
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = [user_profile]
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
streams = list_to_streams(streams_raw, user_profile, autocreate=True, invite_only=invite_only)
|
2013-01-31 21:12:53 +01:00
|
|
|
private_streams = {}
|
2013-01-30 22:40:00 +01:00
|
|
|
result = dict(subscribed=[], already_subscribed=[])
|
2013-01-23 20:39:02 +01:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list))
|
|
|
|
for stream in streams:
|
2013-01-31 21:06:59 +01:00
|
|
|
for subscriber in subscribers:
|
|
|
|
did_subscribe = do_add_subscription(subscriber, stream)
|
|
|
|
if did_subscribe:
|
|
|
|
result["subscribed"][subscriber.user.email].append(stream.name)
|
|
|
|
else:
|
|
|
|
result["already_subscribed"][subscriber.user.email].append(stream.name)
|
2013-01-31 21:12:53 +01:00
|
|
|
private_streams[stream.name] = stream.invite_only
|
|
|
|
|
2013-01-09 22:47:09 +01:00
|
|
|
# Inform the user if someone else subscribed them to stuff
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals and result["subscribed"]:
|
|
|
|
for email, subscriptions in result["subscribed"].iteritems():
|
|
|
|
if email == user_profile.user.email:
|
|
|
|
# Don't send a Humbug if you invited yourself.
|
|
|
|
continue
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if len(subscriptions) == 1:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
2013-01-31 21:12:53 +01:00
|
|
|
"subscribed you to the%s stream '%s'"
|
|
|
|
% (user_profile.full_name,
|
|
|
|
" **invite-only**" if private_streams[subscriptions[0]] else "",
|
|
|
|
subscriptions[0]))
|
2013-01-31 21:06:59 +01:00
|
|
|
else:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
|
|
|
"subscribed you to the following streams: \n\n"
|
|
|
|
% (user_profile.full_name,))
|
|
|
|
for stream in subscriptions:
|
2013-01-31 21:12:53 +01:00
|
|
|
msg += "* %s%s\n" % (
|
|
|
|
stream,
|
|
|
|
" (**invite-only**)" if private_streams[stream] else "")
|
2013-01-31 21:06:59 +01:00
|
|
|
internal_send_message("humbug+notifications@humbughq.com",
|
2013-03-18 19:10:21 +01:00
|
|
|
"private", email, "", msg)
|
2013-01-31 21:06:59 +01:00
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
2012-11-16 22:27:32 +01:00
|
|
|
return json_success(result)
|
2012-09-05 23:38:20 +02:00
|
|
|
|
2013-02-11 17:20:16 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_members_backend(request, user_profile):
|
|
|
|
members = [(profile.full_name, profile.user.email) for profile in \
|
|
|
|
UserProfile.objects.select_related().filter(realm=user_profile.realm)]
|
|
|
|
return json_success({'members': members})
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def get_subscribers_backend(request, user_profile, stream_name=POST('stream')):
|
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
return json_error("You cannot get subscribers in this realm")
|
|
|
|
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
2013-01-03 00:41:46 +01:00
|
|
|
return json_error("Stream does not exist: %s" % stream_name)
|
2013-02-04 23:41:49 +01:00
|
|
|
|
|
|
|
if stream.invite_only and not subscribed_to_stream(user_profile, stream):
|
|
|
|
return json_error("Unable to retrieve subscribers for invite-only stream")
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
subscriptions = Subscription.objects.filter(recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream.id,
|
|
|
|
active=True).select_related()
|
|
|
|
|
|
|
|
return json_success({'subscribers': [subscription.user_profile.user.email
|
|
|
|
for subscription in subscriptions]})
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_change_settings(request, user_profile, full_name=POST,
|
|
|
|
old_password=POST, new_password=POST,
|
2012-11-23 21:23:41 +01:00
|
|
|
confirm_password=POST,
|
|
|
|
# enable_desktop_notification needs to default to False
|
|
|
|
# because browsers POST nothing for an unchecked checkbox
|
2012-12-05 23:58:21 +01:00
|
|
|
enable_desktop_notifications=POST(converter=lambda x: x == "on",
|
|
|
|
default=False)):
|
2012-12-21 04:17:49 +01:00
|
|
|
if new_password != "" or confirm_password != "":
|
2012-09-21 19:32:01 +02:00
|
|
|
if new_password != confirm_password:
|
|
|
|
return json_error("New password must match confirmation password!")
|
|
|
|
if not authenticate(username=user_profile.user.email, password=old_password):
|
|
|
|
return json_error("Wrong password!")
|
2012-10-25 21:39:34 +02:00
|
|
|
do_change_password(user_profile.user, new_password)
|
2012-09-21 19:32:01 +02:00
|
|
|
|
|
|
|
result = {}
|
2012-12-05 20:56:31 +01:00
|
|
|
if user_profile.full_name != full_name and full_name.strip() != "":
|
|
|
|
do_change_full_name(user_profile, full_name.strip())
|
2012-09-21 19:32:01 +02:00
|
|
|
result['full_name'] = full_name
|
2012-10-02 22:20:07 +02:00
|
|
|
|
2012-11-23 21:23:41 +01:00
|
|
|
if user_profile.enable_desktop_notifications != enable_desktop_notifications:
|
|
|
|
do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications)
|
|
|
|
result['enable_desktop_notifications'] = enable_desktop_notifications
|
|
|
|
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_stream_exists(request, user_profile, stream=POST):
|
2013-03-21 20:16:27 +01:00
|
|
|
return stream_exists_backend(request, user_profile, stream)
|
|
|
|
|
|
|
|
def stream_exists_backend(request, user_profile, stream_name):
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-10 23:00:50 +02:00
|
|
|
return json_error("Invalid characters in stream name")
|
2013-03-21 20:16:27 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-11-15 19:13:33 +01:00
|
|
|
result = {"exists": bool(stream)}
|
|
|
|
if stream is not None:
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-11-15 19:13:33 +01:00
|
|
|
result["subscribed"] = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
active=True).exists()
|
2013-03-21 19:58:30 +01:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
|
|
|
return json_response(data=result, status=404)
|
2012-10-17 20:43:52 +02:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
def get_subscription_or_die(stream_name, user_profile):
|
2012-12-03 00:19:00 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream:
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Invalid stream %s" % (stream.name,))
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-12-03 00:19:00 +01:00
|
|
|
subscription = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient, active=True)
|
2013-01-18 18:25:36 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
if not subscription.exists():
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Not subscribed to stream %s" % (stream_name,))
|
2012-12-03 00:19:00 +01:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
return subscription
|
|
|
|
|
2013-01-18 22:08:40 +01:00
|
|
|
def set_in_home_view(user_profile, stream_name, value):
|
|
|
|
subscription = get_subscription_or_die(stream_name, user_profile)[0]
|
|
|
|
|
|
|
|
subscription.in_home_view = value
|
2013-03-21 21:29:28 +01:00
|
|
|
subscription.save(update_fields=["in_home_view"])
|
2013-01-18 22:08:40 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
class SubscriptionProperties(object):
|
|
|
|
"""
|
|
|
|
A class for managing GET and POST requests for subscription properties. The
|
|
|
|
name for a request handler is <request type>_<property name>.
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
Requests must have already been authenticated before being processed here.
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
Requests that set or change subscription properties should typically log the
|
|
|
|
change through log_event.
|
|
|
|
"""
|
2013-01-18 22:08:40 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
def __call__(self, request, user_profile, property):
|
|
|
|
property_method = getattr(self, "%s_%s" % (request.method.lower(), property), None)
|
|
|
|
if not property_method:
|
|
|
|
return json_error("Unknown property or invalid verb for %s" % (property,))
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
return property_method(request, user_profile)
|
|
|
|
|
|
|
|
def request_property(self, request_dict, property):
|
2013-01-18 18:20:58 +01:00
|
|
|
try:
|
|
|
|
return request_dict[property].strip()
|
|
|
|
except KeyError:
|
|
|
|
raise RequestVariableMissingError(property)
|
2012-12-02 22:58:00 +01:00
|
|
|
|
|
|
|
def get_stream_colors(self, request, user_profile):
|
2013-03-10 19:36:45 +01:00
|
|
|
return json_success({"stream_colors": get_stream_colors(user_profile)})
|
2012-12-02 22:58:00 +01:00
|
|
|
|
|
|
|
def post_stream_colors(self, request, user_profile):
|
|
|
|
stream_name = self.request_property(request.POST, "stream_name")
|
2012-12-03 00:19:00 +01:00
|
|
|
color = self.request_property(request.POST, "color")
|
2012-12-02 22:58:00 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
set_stream_color(user_profile, stream_name, color)
|
|
|
|
log_subscription_property_change(user_profile.user.email, "stream_color",
|
|
|
|
{"stream_name": stream_name, "color": color})
|
2012-12-02 22:58:00 +01:00
|
|
|
return json_success()
|
|
|
|
|
2013-01-18 22:08:40 +01:00
|
|
|
def post_in_home_view(self, request, user_profile):
|
|
|
|
stream_name = self.request_property(request.POST, "stream_name")
|
|
|
|
value = self.request_property(request.POST, "in_home_view").lower()
|
|
|
|
|
|
|
|
if value == "true":
|
|
|
|
value = True
|
|
|
|
elif value == "false":
|
|
|
|
value = False
|
|
|
|
else:
|
|
|
|
raise JsonableError("Invalid value for `in_home_view`.")
|
|
|
|
|
|
|
|
set_in_home_view(user_profile, stream_name, value)
|
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
subscription_properties = SubscriptionProperties()
|
|
|
|
|
|
|
|
def make_property_call(request, query_dict, user_profile):
|
2013-01-18 18:29:54 +01:00
|
|
|
try:
|
|
|
|
property = query_dict["property"].strip()
|
|
|
|
except KeyError:
|
2012-12-02 22:58:00 +01:00
|
|
|
return json_error("Missing property")
|
|
|
|
|
|
|
|
return subscription_properties(request, user_profile, property.lower())
|
|
|
|
|
|
|
|
def make_get_property_call(request, user_profile):
|
|
|
|
return make_property_call(request, request.GET, user_profile)
|
|
|
|
|
|
|
|
def make_post_property_call(request, user_profile):
|
|
|
|
return make_property_call(request, request.POST, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_view
|
|
|
|
def json_subscription_property(request, user_profile):
|
|
|
|
"""
|
|
|
|
This is the entry point to accessing or changing subscription
|
|
|
|
properties. Authentication happens here.
|
|
|
|
|
|
|
|
Add a handler for a new subscription property in SubscriptionProperties.
|
|
|
|
"""
|
|
|
|
if request.method == "GET":
|
|
|
|
return make_get_property_call(request, user_profile)
|
|
|
|
elif request.method == "POST":
|
|
|
|
return make_post_property_call(request, user_profile)
|
|
|
|
else:
|
|
|
|
return json_error("Invalid verb")
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-10-17 22:36:49 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_fetch_api_key(request, username=POST, password=POST):
|
2012-10-17 20:43:52 +02:00
|
|
|
user = authenticate(username=username, password=password)
|
|
|
|
if user is None:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your username or password is incorrect.", status=403)
|
2012-10-17 20:43:52 +02:00
|
|
|
if not user.is_active:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your account has been disabled.", status=403)
|
2012-10-30 19:39:15 +01:00
|
|
|
return json_success({"api_key": user.userprofile.api_key})
|
2012-10-17 22:26:59 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_fetch_api_key(request, user_profile, password=POST):
|
2012-10-17 22:26:59 +02:00
|
|
|
if not request.user.check_password(password):
|
|
|
|
return json_error("Your username or password is incorrect.")
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
class ActivityTable(object):
|
2012-12-05 17:57:12 +01:00
|
|
|
def __init__(self, client_name, queries, default_tab=False):
|
|
|
|
self.default_tab = default_tab
|
2012-12-04 22:14:44 +01:00
|
|
|
self.has_pointer = False
|
|
|
|
self.rows = {}
|
|
|
|
for url, query_name in queries:
|
|
|
|
if 'pointer' in query_name:
|
|
|
|
self.has_pointer = True
|
|
|
|
for record in UserActivity.objects.filter(
|
|
|
|
query=url,
|
2013-03-28 16:11:37 +01:00
|
|
|
client__name__startswith=client_name).select_related():
|
2012-12-04 22:14:44 +01:00
|
|
|
row = self.rows.setdefault(record.user_profile.user.email, {})
|
2012-12-13 00:04:00 +01:00
|
|
|
row['realm'] = record.user_profile.realm.domain
|
2013-01-18 19:29:53 +01:00
|
|
|
row['full_name'] = record.user_profile.full_name
|
2013-02-11 16:33:16 +01:00
|
|
|
row['email'] = record.user_profile.user.email
|
2012-12-04 22:14:44 +01:00
|
|
|
row[query_name + '_count'] = record.count
|
|
|
|
row[query_name + '_last' ] = record.last_visit
|
|
|
|
|
2012-12-04 23:54:14 +01:00
|
|
|
for row in self.rows.values():
|
|
|
|
# kind of a hack
|
|
|
|
last_action = max(v for v in row.values() if isinstance(v, datetime.datetime))
|
|
|
|
age = now() - last_action
|
|
|
|
if age < datetime.timedelta(minutes=10):
|
|
|
|
row['class'] = 'recently_active'
|
|
|
|
elif age >= datetime.timedelta(days=1):
|
|
|
|
row['class'] = 'long_inactive'
|
2012-12-05 00:05:10 +01:00
|
|
|
row['age'] = age
|
|
|
|
|
|
|
|
def sorted_rows(self):
|
|
|
|
return sorted(self.rows.iteritems(), key=lambda (k,r): r['age'])
|
2012-12-04 23:54:14 +01:00
|
|
|
|
2012-12-05 19:32:09 +01:00
|
|
|
def can_view_activity(request):
|
|
|
|
return request.user.userprofile.realm.domain == 'humbughq.com'
|
|
|
|
|
2012-11-09 00:09:58 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def get_activity(request):
|
2012-12-05 19:32:09 +01:00
|
|
|
if not can_view_activity(request):
|
2012-11-09 00:09:58 +01:00
|
|
|
return HttpResponseRedirect(reverse('zephyr.views.login_page'))
|
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
web_queries = (
|
|
|
|
("/json/get_updates", "get_updates"),
|
|
|
|
("/json/send_message", "send_message"),
|
|
|
|
("/json/update_pointer", "update_pointer"),
|
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
api_queries = (
|
|
|
|
("/api/v1/get_messages", "get_updates"),
|
|
|
|
("/api/v1/send_message", "send_message"),
|
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
|
|
|
return render_to_response('zephyr/activity.html',
|
2012-12-04 22:14:44 +01:00
|
|
|
{ 'data': {
|
2012-12-05 17:57:12 +01:00
|
|
|
'Website': ActivityTable('website', web_queries, default_tab=True),
|
2012-12-04 22:14:44 +01:00
|
|
|
'Mirror': ActivityTable('zephyr_mirror', api_queries),
|
2013-01-22 21:26:06 +01:00
|
|
|
'API': ActivityTable('API', api_queries),
|
|
|
|
'Android': ActivityTable('Android', api_queries),
|
|
|
|
'iPhone': ActivityTable('iPhone', api_queries)
|
2012-12-04 22:14:44 +01:00
|
|
|
}}, context_instance=RequestContext(request))
|
2012-11-19 23:52:36 +01:00
|
|
|
|
|
|
|
@authenticated_api_view
|
|
|
|
@has_request_variables
|
|
|
|
def api_github_landing(request, user_profile, event=POST,
|
2012-12-12 20:11:35 +01:00
|
|
|
payload=POST(converter=json_to_dict)):
|
2012-11-19 23:52:36 +01:00
|
|
|
# TODO: this should all be moved to an external bot
|
|
|
|
|
|
|
|
repository = payload['repository']
|
|
|
|
|
2013-03-08 04:15:54 +01:00
|
|
|
# CUSTOMER18 has requested not to get pull request notifications
|
|
|
|
if event == 'pull_request' and user_profile.realm.domain not in ['customer18.invalid', 'humbughq.com']:
|
2012-11-19 23:52:36 +01:00
|
|
|
pull_req = payload['pull_request']
|
|
|
|
|
|
|
|
subject = "%s: pull request %d" % (repository['name'],
|
|
|
|
pull_req['number'])
|
2012-11-28 20:34:10 +01:00
|
|
|
content = ("Pull request from %s [%s](%s):\n\n %s\n\n> %s"
|
|
|
|
% (pull_req['user']['login'],
|
|
|
|
payload['action'],
|
|
|
|
pull_req['html_url'],
|
2012-11-19 23:52:36 +01:00
|
|
|
pull_req['title'],
|
|
|
|
pull_req['body']))
|
|
|
|
elif event == 'push':
|
|
|
|
short_ref = re.sub(r'^refs/heads/', '', payload['ref'])
|
2013-03-06 21:57:59 +01:00
|
|
|
# This is a bit hackish, but is basically so that CUSTOMER18 doesn't
|
|
|
|
# get spammed when people commit to non-master all over the place.
|
|
|
|
# Long-term, this will be replaced by some GitHub configuration
|
|
|
|
# option of which branches to notify on.
|
|
|
|
if short_ref != 'master' and user_profile.realm.domain in ['customer18.invalid', 'humbughq.com']:
|
|
|
|
return json_success()
|
|
|
|
|
2012-12-05 22:24:03 +01:00
|
|
|
subject = repository['name']
|
|
|
|
if re.match(r'^0+$', payload['after']):
|
|
|
|
content = "%s deleted branch %s" % (payload['pusher']['name'],
|
|
|
|
short_ref)
|
|
|
|
elif len(payload['commits']) == 0:
|
|
|
|
content = ("%s [force pushed](%s) to branch %s. Head is now %s"
|
|
|
|
% (payload['pusher']['name'],
|
|
|
|
payload['compare'],
|
2012-12-06 22:01:22 +01:00
|
|
|
short_ref,
|
2012-12-05 22:24:03 +01:00
|
|
|
payload['after'][:7]))
|
|
|
|
else:
|
|
|
|
content = ("%s [pushed](%s) to branch %s\n\n"
|
|
|
|
% (payload['pusher']['name'],
|
|
|
|
payload['compare'],
|
|
|
|
short_ref))
|
2012-12-11 00:47:56 +01:00
|
|
|
num_commits = len(payload['commits'])
|
|
|
|
max_commits = 10
|
|
|
|
truncated_commits = payload['commits'][:max_commits]
|
|
|
|
for commit in truncated_commits:
|
2012-12-05 22:24:03 +01:00
|
|
|
short_id = commit['id'][:7]
|
|
|
|
(short_commit_msg, _, _) = commit['message'].partition("\n")
|
|
|
|
content += "* [%s](%s): %s\n" % (short_id, commit['url'],
|
2012-12-06 22:01:22 +01:00
|
|
|
short_commit_msg)
|
2012-12-11 00:47:56 +01:00
|
|
|
if (num_commits > max_commits):
|
|
|
|
content += ("\n[and %d more commits]"
|
|
|
|
% (num_commits - max_commits,))
|
2012-11-19 23:52:36 +01:00
|
|
|
else:
|
|
|
|
# We don't handle other events even though we get notified
|
|
|
|
# about them
|
|
|
|
return json_success()
|
|
|
|
|
2012-12-07 01:05:14 +01:00
|
|
|
if len(subject) > MAX_SUBJECT_LENGTH:
|
2012-11-19 23:52:36 +01:00
|
|
|
subject = subject[:57].rstrip() + '...'
|
|
|
|
|
2013-03-18 22:07:56 +01:00
|
|
|
request.client = get_client("github_bot")
|
|
|
|
return send_message_backend(request, user_profile,
|
2012-11-19 23:52:36 +01:00
|
|
|
message_type_name="stream",
|
|
|
|
message_to=["commits"],
|
|
|
|
forged=False, subject_name=subject,
|
|
|
|
message_content=content)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-15 19:51:19 +01:00
|
|
|
@cache_with_key(lambda user_profile: user_profile.realm_id, timeout=60)
|
2013-02-11 21:47:45 +01:00
|
|
|
def get_status_list(requesting_user_profile):
|
|
|
|
def presence_to_dict(presence):
|
|
|
|
if presence.status == UserPresence.ACTIVE:
|
|
|
|
presence_val = 'active'
|
|
|
|
elif presence.status == UserPresence.IDLE:
|
|
|
|
presence_val = 'idle'
|
|
|
|
else:
|
|
|
|
raise JsonableError("Invalid presence value in db: %s" % (presence,))
|
|
|
|
|
|
|
|
return {'status' : presence_val,
|
|
|
|
'timestamp': datetime_to_timestamp(presence.timestamp)}
|
|
|
|
|
|
|
|
|
|
|
|
user_statuses = defaultdict(dict)
|
2013-02-12 18:09:05 +01:00
|
|
|
|
|
|
|
# Return no status info for MIT
|
|
|
|
if requesting_user_profile.realm.domain == 'mit.edu':
|
2013-03-13 20:02:57 +01:00
|
|
|
return {'presences': user_statuses}
|
2013-02-12 18:09:05 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
for presence in UserPresence.objects.filter(
|
2013-02-12 20:00:49 +01:00
|
|
|
user_profile__realm=requesting_user_profile.realm).select_related(
|
|
|
|
'user_profile', 'user_profile__user', 'client'):
|
2013-02-11 21:47:45 +01:00
|
|
|
|
|
|
|
user_statuses[presence.user_profile.user.email][presence.client.name] = \
|
|
|
|
presence_to_dict(presence)
|
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
return {'presences': user_statuses}
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_update_active_status(request, user_profile,
|
|
|
|
status=POST):
|
2013-02-11 21:47:45 +01:00
|
|
|
if status == 'active':
|
|
|
|
status_val = UserPresence.ACTIVE
|
|
|
|
elif status == 'idle':
|
|
|
|
status_val = UserPresence.IDLE
|
|
|
|
else:
|
|
|
|
raise JsonableError("Invalid presence status: %s" % (status,))
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-26 19:40:28 +01:00
|
|
|
update_user_presence(user_profile, request.client, now(), status_val)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret = get_status_list(user_profile)
|
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
try:
|
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="/api/v1/get_messages",
|
|
|
|
client__name="zephyr_mirror")
|
|
|
|
ret['zephyr_mirror_active'] = \
|
|
|
|
(activity.last_visit.replace(tzinfo=None) >
|
|
|
|
datetime.datetime.utcnow() - datetime.timedelta(minutes=5))
|
|
|
|
except UserActivity.DoesNotExist:
|
|
|
|
ret['zephyr_mirror_active'] = False
|
|
|
|
|
|
|
|
return json_success(ret)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_active_statuses(request, user_profile):
|
2013-03-13 20:02:57 +01:00
|
|
|
return json_success(get_status_list(user_profile))
|
2013-03-11 20:54:27 +01:00
|
|
|
|
2013-03-28 18:53:44 +01:00
|
|
|
# Read the source map information for decoding JavaScript backtraces
|
2013-03-28 20:49:08 +01:00
|
|
|
js_source_map = None
|
|
|
|
if not (settings.DEBUG or settings.TEST_SUITE):
|
2013-03-28 20:28:54 +01:00
|
|
|
js_source_map = SourceMap(path.join(
|
|
|
|
settings.SITE_ROOT, '../prod-static/source-map/app.js.map'))
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-03-11 20:54:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-03-15 22:30:07 +01:00
|
|
|
def json_report_error(request, user_profile, message=POST, stacktrace=POST,
|
2013-03-27 18:31:18 +01:00
|
|
|
ui_message=POST(converter=json_to_bool), user_agent=POST):
|
|
|
|
subject = "error for %s" % (user_profile.user.email,)
|
|
|
|
if ui_message:
|
|
|
|
subject = "User-visible browser " + subject
|
|
|
|
else:
|
|
|
|
subject = "Browser " + subject
|
|
|
|
|
2013-03-28 20:49:08 +01:00
|
|
|
if js_source_map:
|
2013-03-28 20:28:54 +01:00
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-03-27 18:31:18 +01:00
|
|
|
mail_admins(subject,
|
|
|
|
"Message:\n%s\n\nStacktrace:\n%s\n\nUser agent:\n%s\n\n"
|
|
|
|
"User saw error in UI: %s"
|
|
|
|
% (message, stacktrace, user_agent, ui_message))
|
2013-03-11 20:54:27 +01:00
|
|
|
return json_success()
|
2013-03-14 23:21:53 +01:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_events_register(request, user_profile):
|
|
|
|
return events_register_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_api_view
|
|
|
|
@has_request_variables
|
|
|
|
def api_events_register(request, user_profile,
|
|
|
|
apply_markdown=POST(default=False, converter=json_to_bool)):
|
|
|
|
return events_register_backend(request, user_profile,
|
|
|
|
apply_markdown=apply_markdown)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def events_register_backend(request, user_profile, apply_markdown=True,
|
2013-03-22 22:43:49 +01:00
|
|
|
event_types=POST(converter=json_to_list, default=None)):
|
2013-03-28 18:07:03 +01:00
|
|
|
ret = do_events_register(user_profile, apply_markdown, event_types)
|
2013-03-21 22:43:53 +01:00
|
|
|
return json_success(ret)
|