2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.contrib.auth import authenticate, login
|
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.core.urlresolvers import reverse
|
2012-11-02 21:29:11 +01:00
|
|
|
from django.http import HttpResponse, HttpResponseRedirect
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.shortcuts import render_to_response
|
|
|
|
from django.template import RequestContext
|
2012-11-08 00:48:43 +01:00
|
|
|
from django.utils.timezone import utc, now
|
2012-09-29 00:49:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.contrib.auth.views import login as django_login_page
|
2012-11-09 01:06:07 +01:00
|
|
|
from django.db.models import Q
|
2012-10-10 22:53:24 +02:00
|
|
|
from zephyr.models import Message, UserProfile, Stream, Subscription, \
|
2012-10-15 23:07:33 +02:00
|
|
|
Recipient, get_display_recipient, get_huddle, Realm, UserMessage, \
|
2012-10-25 21:39:34 +02:00
|
|
|
do_add_subscription, do_remove_subscription, do_change_password, \
|
2012-11-23 21:23:41 +01:00
|
|
|
do_change_full_name, do_change_enable_desktop_notifications, \
|
2012-12-04 21:07:33 +01:00
|
|
|
do_activate_user, add_default_subs, do_create_user, do_send_message, \
|
2012-12-01 04:37:18 +01:00
|
|
|
create_mit_user_if_needed, create_stream_if_needed, StreamColor, \
|
2012-12-03 00:19:00 +01:00
|
|
|
PreregistrationUser, get_client, MitUser, User, UserActivity, \
|
2012-12-07 01:05:14 +01:00
|
|
|
log_subscription_property_change, internal_send_message, \
|
|
|
|
MAX_SUBJECT_LENGTH
|
2012-10-29 19:08:18 +01:00
|
|
|
from zephyr.forms import RegistrationForm, HomepageForm, is_unique, \
|
|
|
|
is_active
|
2012-10-01 21:36:44 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-11-06 20:27:55 +01:00
|
|
|
from zephyr.decorator import asynchronous, require_post, \
|
2012-12-02 20:51:51 +01:00
|
|
|
authenticated_api_view, authenticated_json_post_view, \
|
2012-11-28 06:16:28 +01:00
|
|
|
internal_notify_view, RespondAsynchronously, \
|
2012-12-02 22:58:00 +01:00
|
|
|
has_request_variables, POST, authenticated_json_view
|
2012-09-29 01:38:03 +02:00
|
|
|
from zephyr.lib.query import last_n
|
2012-10-17 04:07:35 +02:00
|
|
|
from zephyr.lib.avatar import gravatar_hash
|
2012-11-06 20:31:53 +01:00
|
|
|
from zephyr.lib.response import json_success, json_error
|
2012-12-05 19:53:23 +01:00
|
|
|
from zephyr.lib.time import timestamp_to_datetime
|
2012-08-28 22:56:21 +02:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
import datetime
|
|
|
|
import simplejson
|
2012-09-05 01:11:25 +02:00
|
|
|
import socket
|
2012-09-07 19:20:04 +02:00
|
|
|
import re
|
2012-10-04 20:27:49 +02:00
|
|
|
import urllib
|
2012-10-16 21:15:01 +02:00
|
|
|
import time
|
2012-10-17 23:10:34 +02:00
|
|
|
import requests
|
2012-10-26 22:02:51 +02:00
|
|
|
import os
|
|
|
|
import base64
|
2012-10-16 21:15:01 +02:00
|
|
|
|
|
|
|
SERVER_GENERATION = int(time.time())
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-11-01 21:31:31 +01:00
|
|
|
def to_non_negative_int(x):
|
|
|
|
x = int(x)
|
2012-12-07 19:15:58 +01:00
|
|
|
if x < 0:
|
|
|
|
raise ValueError("argument is negative")
|
2012-11-01 21:31:31 +01:00
|
|
|
return x
|
|
|
|
|
2012-10-10 23:01:28 +02:00
|
|
|
def get_stream(stream_name, realm):
|
2012-11-01 20:28:44 +01:00
|
|
|
try:
|
|
|
|
return Stream.objects.get(name__iexact=stream_name, realm=realm)
|
|
|
|
except Stream.DoesNotExist:
|
2012-10-02 17:53:14 +02:00
|
|
|
return None
|
|
|
|
|
2012-12-07 00:02:53 +01:00
|
|
|
def notify_new_user(user, internal=False):
|
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
|
|
|
internal_send_message("humbug+signups@humbughq.com",
|
|
|
|
Recipient.STREAM, "signups", user.realm.domain,
|
|
|
|
"%s <`%s`> just signed up for Humbug!%s(total: **%i**)" % (
|
|
|
|
user.full_name,
|
|
|
|
user.user.email,
|
|
|
|
internal_blurb,
|
|
|
|
UserProfile.objects.filter(realm=user.realm, user__is_active=True).count(),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
@require_post
|
2012-10-16 21:42:40 +02:00
|
|
|
def accounts_register(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
key = request.POST['key']
|
2012-10-29 19:08:18 +01:00
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=key)
|
|
|
|
email = confirmation.content_object.email
|
|
|
|
mit_beta_user = isinstance(confirmation.content_object, MitUser)
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
company_name = email.split('@')[-1]
|
2012-09-25 22:58:59 +02:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2012-10-29 19:08:18 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
# MIT users already exist, but are supposed to be inactive.
|
|
|
|
is_active(email)
|
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
|
|
|
is_unique(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-04 20:27:49 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-09-29 00:49:34 +02:00
|
|
|
|
2012-10-02 22:20:07 +02:00
|
|
|
if request.POST.get('from_confirmation'):
|
2012-09-28 22:47:05 +02:00
|
|
|
form = RegistrationForm()
|
2012-10-02 22:20:07 +02:00
|
|
|
else:
|
2012-08-28 18:44:51 +02:00
|
|
|
form = RegistrationForm(request.POST)
|
|
|
|
if form.is_valid():
|
2012-10-10 21:16:23 +02:00
|
|
|
password = form.cleaned_data['password']
|
2012-10-11 19:15:41 +02:00
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
short_name = email.split('@')[0]
|
2012-10-25 22:16:05 +02:00
|
|
|
domain = email.split('@')[-1]
|
2012-10-19 23:40:44 +02:00
|
|
|
(realm, _) = Realm.objects.get_or_create(domain=domain)
|
2012-10-11 16:57:47 +02:00
|
|
|
|
2012-12-05 20:56:31 +01:00
|
|
|
# FIXME: sanitize email addresses and fullname
|
2012-12-04 21:07:33 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
user = User.objects.get(email=email)
|
|
|
|
do_activate_user(user)
|
|
|
|
do_change_password(user, password)
|
|
|
|
do_change_full_name(user.userprofile, full_name)
|
|
|
|
else:
|
|
|
|
user = do_create_user(email, password, realm, full_name, short_name)
|
|
|
|
add_default_subs(user)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-12-07 00:02:53 +01:00
|
|
|
notify_new_user(user)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-09-21 16:10:36 +02:00
|
|
|
login(request, authenticate(username=email, password=password))
|
2012-08-28 18:44:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('zephyr.views.home'))
|
|
|
|
|
2012-10-15 22:52:08 +02:00
|
|
|
return render_to_response('zephyr/register.html',
|
|
|
|
{ 'form': form, 'company_name': company_name, 'email': email, 'key': key },
|
|
|
|
context_instance=RequestContext(request))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
def login_page(request, **kwargs):
|
|
|
|
template_response = django_login_page(request, **kwargs)
|
|
|
|
try:
|
2012-10-11 19:15:41 +02:00
|
|
|
template_response.context_data['email'] = request.GET['email']
|
2012-09-29 00:49:34 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return template_response
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
def accounts_home(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
if request.method == 'POST':
|
|
|
|
form = HomepageForm(request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
try:
|
2012-09-29 00:49:34 +02:00
|
|
|
email = form.cleaned_data['email']
|
|
|
|
user = PreregistrationUser.objects.get(email=email)
|
2012-09-28 22:47:05 +02:00
|
|
|
except PreregistrationUser.DoesNotExist:
|
|
|
|
user = PreregistrationUser()
|
2012-09-29 00:49:34 +02:00
|
|
|
user.email = email
|
2012-09-28 22:47:05 +02:00
|
|
|
user.save()
|
|
|
|
Confirmation.objects.send_confirmation(user, user.email)
|
|
|
|
return HttpResponseRedirect(reverse('send_confirm', kwargs={'email':user.email}))
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
|
|
|
email = request.POST['email']
|
|
|
|
is_unique(email)
|
|
|
|
except ValidationError:
|
2012-10-10 22:30:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-11-21 21:14:55 +01:00
|
|
|
else:
|
|
|
|
form = HomepageForm()
|
|
|
|
return render_to_response('zephyr/accounts_home.html', {'form': form},
|
2012-09-04 23:21:30 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-10-29 19:56:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2012-08-28 18:44:51 +02:00
|
|
|
def home(request):
|
2012-11-21 00:42:16 +01:00
|
|
|
# We need to modify the session object every two weeks or it will expire.
|
|
|
|
# This line makes reloading the page a sufficient action to keep the
|
|
|
|
# session alive.
|
|
|
|
request.session.modified = True
|
|
|
|
|
2012-09-07 17:04:41 +02:00
|
|
|
user_profile = UserProfile.objects.get(user=request.user)
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-10-15 23:07:33 +02:00
|
|
|
num_messages = UserMessage.objects.filter(user_profile=user_profile).count()
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2012-10-15 23:07:33 +02:00
|
|
|
if user_profile.pointer == -1 and num_messages > 0:
|
2012-10-20 02:43:25 +02:00
|
|
|
# Put the new user's pointer at the bottom
|
|
|
|
#
|
|
|
|
# This improves performance, because we limit backfilling of messages
|
|
|
|
# before the pointer. It's also likely that someone joining an
|
|
|
|
# organization is interested in recent messages more than the very
|
|
|
|
# first messages on the system.
|
|
|
|
|
|
|
|
max_id = (UserMessage.objects.filter(user_profile=user_profile)
|
|
|
|
.order_by('message')
|
|
|
|
.reverse()[0]).message_id
|
|
|
|
user_profile.pointer = max_id
|
2012-10-17 17:42:40 +02:00
|
|
|
user_profile.last_pointer_updater = request.session.session_key
|
2012-09-12 22:55:37 +02:00
|
|
|
|
2012-09-07 17:15:03 +02:00
|
|
|
# Populate personals autocomplete list based on everyone in your
|
|
|
|
# realm. Later we might want a 2-layer autocomplete, where we
|
|
|
|
# consider specially some sort of "buddy list" who e.g. you've
|
|
|
|
# talked to before, but for small organizations, the right list is
|
|
|
|
# everyone in your realm.
|
2012-10-12 17:26:04 +02:00
|
|
|
people = [{'email' : profile.user.email,
|
|
|
|
'full_name' : profile.full_name}
|
|
|
|
for profile in
|
2012-10-15 17:39:10 +02:00
|
|
|
UserProfile.objects.select_related().filter(realm=user_profile.realm) if
|
2012-09-07 17:26:58 +02:00
|
|
|
profile != user_profile]
|
2012-09-04 20:31:23 +02:00
|
|
|
|
2012-10-22 20:15:25 +02:00
|
|
|
subscriptions = Subscription.objects.select_related().filter(user_profile_id=user_profile, active=True)
|
2012-10-10 23:01:28 +02:00
|
|
|
streams = [get_display_recipient(sub.recipient) for sub in subscriptions
|
2012-10-10 22:57:21 +02:00
|
|
|
if sub.recipient.type == Recipient.STREAM]
|
2012-09-05 22:15:38 +02:00
|
|
|
|
2012-08-30 19:56:15 +02:00
|
|
|
return render_to_response('zephyr/index.html',
|
2012-09-26 19:44:21 +02:00
|
|
|
{'user_profile': user_profile,
|
2012-10-17 04:07:35 +02:00
|
|
|
'email_hash' : gravatar_hash(user_profile.user.email),
|
2012-10-10 23:53:00 +02:00
|
|
|
'people' : people,
|
|
|
|
'streams' : streams,
|
2012-09-27 22:12:57 +02:00
|
|
|
'have_initial_messages':
|
2012-10-15 23:07:33 +02:00
|
|
|
'true' if num_messages > 0 else 'false',
|
2012-09-26 00:26:35 +02:00
|
|
|
'show_debug':
|
2012-12-05 19:32:09 +01:00
|
|
|
settings.DEBUG and ('show_debug' in request.GET),
|
|
|
|
'show_activity': can_view_activity(request) },
|
2012-08-28 18:44:51 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_update_pointer(request, user_profile, updater=POST('client_id')):
|
2012-10-26 22:02:51 +02:00
|
|
|
return update_pointer_backend(request, user_profile, updater)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_update_pointer(request, user_profile):
|
2012-10-26 22:02:51 +02:00
|
|
|
return update_pointer_backend(request, user_profile,
|
|
|
|
request.session.session_key)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
|
|
|
def update_pointer_backend(request, user_profile, updater, pointer=POST(converter=int)):
|
2012-09-06 20:52:23 +02:00
|
|
|
if pointer < 0:
|
|
|
|
return json_error("Invalid pointer value")
|
|
|
|
|
2012-10-29 22:02:10 +01:00
|
|
|
if pointer <= user_profile.pointer:
|
|
|
|
return json_success()
|
|
|
|
|
2012-09-06 20:52:23 +02:00
|
|
|
user_profile.pointer = pointer
|
2012-10-26 22:02:51 +02:00
|
|
|
user_profile.last_pointer_updater = updater
|
2012-09-06 20:52:23 +02:00
|
|
|
user_profile.save()
|
2012-10-17 23:10:23 +02:00
|
|
|
|
2012-11-08 21:49:04 +01:00
|
|
|
if settings.TORNADO_SERVER:
|
2012-11-13 22:21:17 +01:00
|
|
|
requests.post(settings.TORNADO_SERVER + '/notify_pointer_update', data=dict(
|
|
|
|
secret = settings.SHARED_SECRET,
|
|
|
|
user = user_profile.user.id,
|
|
|
|
new_pointer = pointer,
|
|
|
|
pointer_updater = updater))
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-09-05 22:21:25 +02:00
|
|
|
return json_success()
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_get_old_messages(request, user_profile):
|
2012-10-26 16:42:03 +02:00
|
|
|
return get_old_messages_backend(request, user_profile=user_profile,
|
|
|
|
apply_markdown=True)
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-15 18:16:38 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:39:43 +01:00
|
|
|
def api_get_old_messages(request, user_profile,
|
|
|
|
apply_markdown=POST(default=False,
|
|
|
|
converter=simplejson.loads)):
|
2012-10-26 16:42:03 +02:00
|
|
|
return get_old_messages_backend(request, user_profile=user_profile,
|
2012-11-15 18:16:38 +01:00
|
|
|
apply_markdown=apply_markdown)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-01 21:31:31 +01:00
|
|
|
def get_old_messages_backend(request, anchor = POST(converter=to_non_negative_int),
|
|
|
|
num_before = POST(converter=to_non_negative_int),
|
|
|
|
num_after = POST(converter=to_non_negative_int),
|
2012-11-09 01:06:07 +01:00
|
|
|
narrow = POST('narrow', converter=simplejson.loads),
|
2012-11-02 19:57:17 +01:00
|
|
|
user_profile=None, apply_markdown=True):
|
2012-10-24 21:07:43 +02:00
|
|
|
query = Message.objects.select_related().filter(usermessage__user_profile = user_profile).order_by('id')
|
|
|
|
|
2012-11-09 01:06:07 +01:00
|
|
|
if 'recipient_id' in narrow:
|
|
|
|
query = query.filter(recipient_id = narrow['recipient_id'])
|
2012-11-16 21:27:19 +01:00
|
|
|
if 'stream' in narrow:
|
2012-11-20 23:07:23 +01:00
|
|
|
stream = Stream.objects.get(realm=user_profile.realm, name__iexact=narrow['stream'])
|
2012-11-16 21:27:19 +01:00
|
|
|
recipient = Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id)
|
|
|
|
query = query.filter(recipient_id = recipient.id)
|
2012-11-09 01:06:07 +01:00
|
|
|
|
|
|
|
if 'one_on_one_email' in narrow:
|
|
|
|
query = query.filter(recipient__type=Recipient.PERSONAL)
|
|
|
|
recipient_user = UserProfile.objects.get(user__email = narrow['one_on_one_email'])
|
|
|
|
recipient = Recipient.objects.get(type=Recipient.PERSONAL, type_id=recipient_user.id)
|
2012-12-02 19:54:39 +01:00
|
|
|
# If we are narrowed to personals with ourself, we want to search for personals where the user
|
|
|
|
# with address "one_on_one_email" is the sender *and* the recipient, not personals where the user
|
|
|
|
# with address "one_on_one_email is the sender *or* the recipient.
|
|
|
|
if narrow['one_on_one_email'] == user_profile.user.email:
|
|
|
|
query = query.filter(Q(sender__user__email=narrow['one_on_one_email']) & Q(recipient=recipient))
|
|
|
|
else:
|
|
|
|
query = query.filter(Q(sender__user__email=narrow['one_on_one_email']) | Q(recipient=recipient))
|
2012-12-03 19:49:12 +01:00
|
|
|
elif 'type' in narrow and (narrow['type'] == "private" or narrow['type'] == "all_private_messages"):
|
2012-11-09 01:06:07 +01:00
|
|
|
query = query.filter(Q(recipient__type=Recipient.PERSONAL) | Q(recipient__type=Recipient.HUDDLE))
|
|
|
|
|
|
|
|
if 'subject' in narrow:
|
|
|
|
query = query.filter(subject = narrow['subject'])
|
|
|
|
|
2012-11-18 22:53:50 +01:00
|
|
|
if 'searchterm' in narrow:
|
|
|
|
query = query.filter(Q(content__icontains=narrow['searchterm']) |
|
|
|
|
Q(subject__icontains=narrow['searchterm']))
|
|
|
|
|
2012-11-01 21:31:31 +01:00
|
|
|
# We add 1 to the number of messages requested to ensure that the
|
|
|
|
# resulting list always contains the anchor message
|
|
|
|
if num_before != 0 and num_after == 0:
|
|
|
|
num_before += 1
|
|
|
|
messages = last_n(num_before, query.filter(id__lte=anchor))
|
|
|
|
elif num_before == 0 and num_after != 0:
|
|
|
|
num_after += 1
|
|
|
|
messages = query.filter(id__gte=anchor)[:num_after]
|
2012-10-24 21:07:43 +02:00
|
|
|
else:
|
2012-11-01 21:31:31 +01:00
|
|
|
num_after += 1
|
|
|
|
messages = (last_n(num_before, query.filter(id__lt=anchor))
|
|
|
|
+ list(query.filter(id__gte=anchor)[:num_after]))
|
2012-10-24 21:07:43 +02:00
|
|
|
|
|
|
|
ret = {'messages': [message.to_dict(apply_markdown) for message in messages],
|
|
|
|
"result": "success",
|
2012-11-01 20:59:32 +01:00
|
|
|
"msg": ""}
|
2012-10-26 16:42:03 +02:00
|
|
|
return json_success(ret)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2012-10-24 20:42:53 +02:00
|
|
|
@asynchronous
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_get_updates(request, user_profile, handler):
|
2012-10-26 22:02:51 +02:00
|
|
|
client_id = request.session.session_key
|
|
|
|
return get_updates_backend(request, user_profile, handler, client_id,
|
2012-11-27 20:57:53 +01:00
|
|
|
client=request._client, apply_markdown=True)
|
2012-10-24 20:42:53 +02:00
|
|
|
|
|
|
|
@asynchronous
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:39:43 +01:00
|
|
|
def api_get_messages(request, user_profile, handler, client_id=POST(default=None),
|
|
|
|
apply_markdown=POST(default=False, converter=simplejson.loads)):
|
2012-10-26 22:02:51 +02:00
|
|
|
return get_updates_backend(request, user_profile, handler, client_id,
|
2012-11-27 20:39:43 +01:00
|
|
|
apply_markdown=apply_markdown,
|
2012-11-27 20:57:53 +01:00
|
|
|
client=request._client)
|
2012-10-24 20:42:53 +02:00
|
|
|
|
2012-10-25 21:30:42 +02:00
|
|
|
def format_updates_response(messages=[], apply_markdown=True,
|
|
|
|
user_profile=None, new_pointer=None,
|
2012-11-27 21:33:28 +01:00
|
|
|
client=None, update_types=[],
|
|
|
|
client_server_generation=None):
|
2012-11-27 20:57:53 +01:00
|
|
|
if client is not None and client.name.endswith("_mirror"):
|
|
|
|
messages = [m for m in messages if m.sending_client.name != client.name]
|
2012-10-19 17:34:48 +02:00
|
|
|
ret = {'messages': [message.to_dict(apply_markdown) for message in messages],
|
|
|
|
"result": "success",
|
|
|
|
"msg": "",
|
2012-10-26 00:07:49 +02:00
|
|
|
'update_types': update_types}
|
2012-11-27 21:33:28 +01:00
|
|
|
if client_server_generation is not None:
|
|
|
|
ret['server_generation'] = SERVER_GENERATION
|
2012-10-23 22:13:27 +02:00
|
|
|
if new_pointer is not None:
|
2012-10-17 23:10:34 +02:00
|
|
|
ret['new_pointer'] = new_pointer
|
2012-11-20 22:13:06 +01:00
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
try:
|
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="/api/v1/get_messages",
|
|
|
|
client__name="zephyr_mirror")
|
|
|
|
ret['zephyr_mirror_active'] = \
|
|
|
|
(activity.last_visit.replace(tzinfo=None) >
|
|
|
|
datetime.datetime.utcnow() - datetime.timedelta(minutes=5))
|
|
|
|
except UserActivity.DoesNotExist:
|
2012-12-03 17:46:28 +01:00
|
|
|
ret['zephyr_mirror_active'] = False
|
2012-11-20 22:13:06 +01:00
|
|
|
|
2012-10-19 17:34:48 +02:00
|
|
|
return ret
|
|
|
|
|
2012-11-09 17:43:42 +01:00
|
|
|
def return_messages_immediately(user_profile, client_id, last,
|
2012-11-27 21:06:17 +01:00
|
|
|
client_server_generation,
|
2012-11-27 21:26:48 +01:00
|
|
|
client_pointer, dont_block, **kwargs):
|
2012-10-25 23:32:20 +02:00
|
|
|
if last is None:
|
|
|
|
# When an API user is first querying the server to subscribe,
|
|
|
|
# there's no reason to reply immediately.
|
2012-11-27 21:06:17 +01:00
|
|
|
# TODO: Make this work with server_generation
|
2012-10-25 23:32:20 +02:00
|
|
|
return None
|
2012-11-05 23:54:09 +01:00
|
|
|
|
|
|
|
if UserMessage.objects.filter(user_profile=user_profile).count() == 0:
|
|
|
|
# The client has no messages, so we should immediately start long-polling
|
|
|
|
return None
|
|
|
|
|
2012-10-25 23:32:20 +02:00
|
|
|
if last < 0:
|
|
|
|
return {"msg": "Invalid 'last' argument", "result": "error"}
|
2012-11-05 23:54:09 +01:00
|
|
|
|
2012-10-17 23:10:34 +02:00
|
|
|
new_pointer = None
|
2012-10-25 23:32:20 +02:00
|
|
|
query = Message.objects.select_related().filter(usermessage__user_profile = user_profile).order_by('id')
|
2012-09-29 01:38:03 +02:00
|
|
|
|
2012-10-25 23:32:20 +02:00
|
|
|
messages = query.filter(id__gt=last)[:400]
|
|
|
|
|
2012-10-19 21:37:37 +02:00
|
|
|
# Filter for mirroring before checking whether there are any
|
2012-10-11 18:59:54 +02:00
|
|
|
# messages to pass on. If we don't do this, when the only message
|
2012-10-19 21:37:37 +02:00
|
|
|
# to forward is one that was sent via the mirroring, the API
|
|
|
|
# client will end up in an endless loop requesting more data from
|
|
|
|
# us.
|
2012-11-27 20:57:53 +01:00
|
|
|
if "client" in kwargs and kwargs["client"].name.endswith("_mirror"):
|
2012-10-19 21:37:37 +02:00
|
|
|
messages = [m for m in messages if
|
2012-11-27 20:57:53 +01:00
|
|
|
m.sending_client.name != kwargs["client"].name]
|
2012-10-11 18:59:54 +02:00
|
|
|
|
2012-10-26 00:07:49 +02:00
|
|
|
update_types = []
|
2012-09-28 22:25:31 +02:00
|
|
|
if messages:
|
2012-10-26 00:07:49 +02:00
|
|
|
update_types.append("new_messages")
|
2012-10-25 21:30:42 +02:00
|
|
|
|
2012-11-20 23:15:50 +01:00
|
|
|
if dont_block:
|
|
|
|
update_types.append("nonblocking_request")
|
|
|
|
|
2012-11-27 21:37:16 +01:00
|
|
|
if (client_server_generation is not None and
|
|
|
|
client_server_generation != SERVER_GENERATION):
|
2012-10-26 00:07:49 +02:00
|
|
|
update_types.append("client_reload")
|
2012-10-25 21:30:42 +02:00
|
|
|
|
2012-10-29 22:02:10 +01:00
|
|
|
ptr = user_profile.pointer
|
2012-10-29 22:02:10 +01:00
|
|
|
if (client_pointer is not None and ptr > client_pointer):
|
2012-10-29 22:02:10 +01:00
|
|
|
new_pointer = ptr
|
|
|
|
update_types.append("pointer_update")
|
2012-10-26 00:07:49 +02:00
|
|
|
|
|
|
|
if update_types:
|
|
|
|
return format_updates_response(messages=messages,
|
|
|
|
user_profile=user_profile,
|
2012-10-25 21:30:42 +02:00
|
|
|
new_pointer=new_pointer,
|
2012-11-27 21:33:28 +01:00
|
|
|
client_server_generation=client_server_generation,
|
2012-10-26 00:07:49 +02:00
|
|
|
update_types=update_types,
|
2012-10-25 21:30:42 +02:00
|
|
|
**kwargs)
|
2012-10-16 21:15:01 +02:00
|
|
|
|
2012-10-24 20:28:36 +02:00
|
|
|
return None
|
2012-09-28 22:25:31 +02:00
|
|
|
|
2012-10-24 20:55:31 +02:00
|
|
|
def send_with_safety_check(response, handler, apply_markdown=True, **kwargs):
|
|
|
|
# Make sure that Markdown rendering really happened, if requested.
|
|
|
|
# This is a security issue because it's where we escape HTML.
|
|
|
|
# c.f. ticket #64
|
|
|
|
#
|
|
|
|
# apply_markdown=True is the fail-safe default.
|
2012-11-01 20:53:40 +01:00
|
|
|
if response['result'] == 'success' and apply_markdown:
|
2012-10-24 20:55:31 +02:00
|
|
|
for msg in response['messages']:
|
|
|
|
if msg['content_type'] != 'text/html':
|
|
|
|
handler.set_status(500)
|
|
|
|
handler.finish('Internal error: bad message format')
|
|
|
|
return
|
2012-11-05 23:25:24 +01:00
|
|
|
if response['result'] == 'error':
|
|
|
|
handler.set_status(400)
|
2012-10-24 20:55:31 +02:00
|
|
|
handler.finish(response)
|
|
|
|
|
2012-11-09 17:43:42 +01:00
|
|
|
@has_request_variables
|
|
|
|
def get_updates_backend(request, user_profile, handler, client_id,
|
|
|
|
last = POST(converter=int, default=None),
|
2012-11-27 21:37:16 +01:00
|
|
|
client_server_generation = POST(whence='server_generation', default=None,
|
|
|
|
converter=int),
|
2012-10-29 22:02:10 +01:00
|
|
|
client_pointer = POST(whence='pointer', converter=int, default=None),
|
2012-11-20 23:15:50 +01:00
|
|
|
dont_block = POST(converter=simplejson.loads, default=False),
|
2012-11-09 17:43:42 +01:00
|
|
|
**kwargs):
|
2012-11-27 21:06:17 +01:00
|
|
|
resp = return_messages_immediately(user_profile, client_id, last,
|
2012-11-09 17:43:42 +01:00
|
|
|
client_server_generation,
|
2012-10-29 22:02:10 +01:00
|
|
|
client_pointer,
|
2012-11-20 23:15:50 +01:00
|
|
|
dont_block, **kwargs)
|
2012-11-01 20:53:40 +01:00
|
|
|
if resp is not None:
|
2012-10-24 20:55:31 +02:00
|
|
|
send_with_safety_check(resp, handler, **kwargs)
|
2012-11-28 06:16:28 +01:00
|
|
|
|
|
|
|
# We have already invoked handler.finish(), so we bypass the usual view
|
|
|
|
# response path. We are "responding asynchronously" except that it
|
|
|
|
# already happened. This is slightly weird, but lets us share
|
|
|
|
# send_with_safety_check with the code below.
|
|
|
|
return RespondAsynchronously
|
2012-09-28 21:53:20 +02:00
|
|
|
|
2012-11-28 06:17:02 +01:00
|
|
|
# Enter long-polling mode.
|
|
|
|
#
|
|
|
|
# Instead of responding to the client right away, leave our connection open
|
|
|
|
# and return to the Tornado main loop. One of the notify_* views will
|
|
|
|
# eventually invoke one of these callbacks, which will send the delayed
|
|
|
|
# response.
|
2012-11-01 20:53:40 +01:00
|
|
|
|
2012-10-17 23:10:34 +02:00
|
|
|
def cb(**cb_kwargs):
|
2012-08-28 22:56:21 +02:00
|
|
|
if handler.request.connection.stream.closed():
|
|
|
|
return
|
|
|
|
try:
|
2012-12-05 18:56:09 +01:00
|
|
|
# It would be nice to be able to do these checks in
|
2012-12-04 17:58:09 +01:00
|
|
|
# UserProfile.receive, but it doesn't know what the value
|
|
|
|
# of "last" was for each callback.
|
|
|
|
if last is not None and "messages" in cb_kwargs:
|
2012-12-05 18:56:09 +01:00
|
|
|
messages = cb_kwargs["messages"]
|
|
|
|
|
|
|
|
# Make sure the client doesn't get a message twice
|
|
|
|
# when messages are processed out of order.
|
|
|
|
if messages[0].id <= last:
|
|
|
|
# We must return a response because we don't have
|
|
|
|
# a way to re-queue a callback and so the client
|
|
|
|
# must do it by making a new request
|
|
|
|
handler.finish({"result": "success",
|
|
|
|
"msg": "",
|
|
|
|
'update_types': []})
|
|
|
|
return
|
|
|
|
|
2012-12-04 17:58:09 +01:00
|
|
|
# We need to check whether there are any new messages
|
|
|
|
# between the client's get_updates call and the
|
|
|
|
# message we're about to return to the client and
|
|
|
|
# return them as well or the client will miss them.
|
|
|
|
# See #174.
|
|
|
|
extra_messages = (Message.objects.select_related()
|
|
|
|
.filter(usermessage__user_profile = user_profile,
|
|
|
|
id__gt = last,
|
|
|
|
id__lt = messages[0].id)
|
|
|
|
.order_by('id'))
|
|
|
|
if extra_messages:
|
|
|
|
new_messages = list(extra_messages)
|
|
|
|
new_messages.append(messages[0])
|
|
|
|
cb_kwargs["messages"] = new_messages
|
|
|
|
|
2012-10-17 23:10:34 +02:00
|
|
|
kwargs.update(cb_kwargs)
|
2012-11-27 21:33:28 +01:00
|
|
|
res = format_updates_response(user_profile=user_profile,
|
2012-11-29 19:43:11 +01:00
|
|
|
client_server_generation=client_server_generation,
|
2012-11-27 21:33:28 +01:00
|
|
|
**kwargs)
|
2012-10-24 20:55:31 +02:00
|
|
|
send_with_safety_check(res, handler, **kwargs)
|
2012-09-07 19:46:50 +02:00
|
|
|
except socket.error:
|
2012-08-28 22:56:21 +02:00
|
|
|
pass
|
|
|
|
|
2012-10-17 23:10:34 +02:00
|
|
|
user_profile.add_receive_callback(handler.async_callback(cb))
|
2012-11-27 23:04:16 +01:00
|
|
|
if client_pointer is not None:
|
|
|
|
user_profile.add_pointer_update_callback(handler.async_callback(cb))
|
2012-09-27 22:14:14 +02:00
|
|
|
|
2012-11-28 06:16:28 +01:00
|
|
|
# runtornado recognizes this special return value.
|
|
|
|
return RespondAsynchronously
|
|
|
|
|
2012-10-26 22:02:51 +02:00
|
|
|
def generate_client_id():
|
|
|
|
return base64.b16encode(os.urandom(16)).lower()
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-21 03:53:03 +02:00
|
|
|
def api_get_profile(request, user_profile):
|
2012-11-07 19:05:23 +01:00
|
|
|
result = dict(pointer = user_profile.pointer,
|
|
|
|
client_id = generate_client_id(),
|
|
|
|
max_message_id = -1)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
|
|
|
|
if messages:
|
|
|
|
result['max_message_id'] = messages[0].id
|
|
|
|
|
|
|
|
return json_success(result)
|
2012-10-21 03:53:03 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def api_send_message(request, user_profile):
|
|
|
|
return send_message_backend(request, user_profile, request._client)
|
2012-10-01 21:36:44 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def json_send_message(request, user_profile):
|
|
|
|
return send_message_backend(request, user_profile, request._client)
|
2012-09-06 21:52:03 +02:00
|
|
|
|
2012-11-08 19:33:47 +01:00
|
|
|
# Currently tabbott/extra@mit.edu is our only superuser. TODO: Make
|
|
|
|
# this a real superuser security check.
|
2012-10-03 22:32:50 +02:00
|
|
|
def is_super_user_api(request):
|
2012-11-08 19:33:47 +01:00
|
|
|
return request.POST.get("api-key") in ["xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"]
|
2012-09-21 16:40:46 +02:00
|
|
|
|
2012-11-13 21:36:47 +01:00
|
|
|
def already_sent_mirrored_message(message):
|
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
|
|
|
# For huddle messages, we use a 10-second window because the
|
|
|
|
# timestamps aren't guaranteed to actually match between two
|
|
|
|
# copies of the same message.
|
|
|
|
time_window = datetime.timedelta(seconds=10)
|
|
|
|
else:
|
|
|
|
time_window = datetime.timedelta(seconds=0)
|
|
|
|
|
|
|
|
# Since our database doesn't store timestamps with
|
|
|
|
# better-than-second resolution, we should do our comparisons
|
|
|
|
# using objects at second resolution
|
|
|
|
pub_date_lowres = message.pub_date.replace(microsecond=0)
|
2012-11-01 21:06:28 +01:00
|
|
|
return Message.objects.filter(
|
2012-11-13 21:36:47 +01:00
|
|
|
sender=message.sender,
|
|
|
|
recipient=message.recipient,
|
|
|
|
content=message.content,
|
|
|
|
subject=message.subject,
|
|
|
|
sending_client=message.sending_client,
|
|
|
|
pub_date__gte=pub_date_lowres - time_window,
|
|
|
|
pub_date__lte=pub_date_lowres + time_window).exists()
|
2012-09-27 20:54:57 +02:00
|
|
|
|
2012-10-22 22:34:56 +02:00
|
|
|
# Validte that the passed in object is an email address from the user's realm
|
|
|
|
# TODO: Check that it's a real email address here.
|
|
|
|
def same_realm_email(user_profile, email):
|
|
|
|
try:
|
|
|
|
domain = email.split("@", 1)[1]
|
2012-10-22 23:20:38 +02:00
|
|
|
return user_profile.realm.domain == domain
|
2012-10-22 22:34:56 +02:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2012-11-15 16:54:35 +01:00
|
|
|
def extract_recipients(raw_recipients):
|
2012-11-07 17:53:58 +01:00
|
|
|
try:
|
2012-11-15 16:54:35 +01:00
|
|
|
recipients = simplejson.loads(raw_recipients)
|
2012-11-07 17:53:58 +01:00
|
|
|
except simplejson.decoder.JSONDecodeError:
|
2012-11-15 16:54:35 +01:00
|
|
|
recipients = [raw_recipients]
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-15 17:26:48 +01:00
|
|
|
# Strip recipients, and then remove any duplicates and any that
|
|
|
|
# are the empty string after being stripped.
|
2012-11-15 16:54:35 +01:00
|
|
|
recipients = [recipient.strip() for recipient in recipients]
|
2012-11-15 17:26:48 +01:00
|
|
|
return list(set(recipient for recipient in recipients if recipient))
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-15 16:44:08 +01:00
|
|
|
def create_mirrored_message_users(request, user_profile, recipients):
|
2012-11-14 20:26:06 +01:00
|
|
|
if "sender" not in request.POST:
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
sender_email = request.POST["sender"].strip().lower()
|
|
|
|
referenced_users = set([sender_email])
|
2012-11-13 23:38:49 +01:00
|
|
|
if request.POST['type'] == 'private':
|
2012-11-15 16:44:08 +01:00
|
|
|
for email in recipients:
|
2012-11-15 17:03:33 +01:00
|
|
|
referenced_users.add(email.lower())
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Check that all referenced users are in our realm:
|
|
|
|
for email in referenced_users:
|
|
|
|
if not same_realm_email(user_profile, email):
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Create users for the referenced users, if needed.
|
|
|
|
for email in referenced_users:
|
2012-11-13 22:25:50 +01:00
|
|
|
create_mit_user_if_needed(user_profile.realm, email)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
sender = UserProfile.objects.get(user__email=sender_email)
|
2012-10-22 22:34:56 +02:00
|
|
|
return (True, sender)
|
2012-09-06 22:00:39 +02:00
|
|
|
|
2012-10-03 21:31:44 +02:00
|
|
|
# We do not @require_login for send_message_backend, since it is used
|
|
|
|
# both from the API and the web service. Code calling
|
|
|
|
# send_message_backend should either check the API key or check that
|
|
|
|
# the user is logged in.
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:44:58 +01:00
|
|
|
def send_message_backend(request, user_profile, client,
|
2012-11-14 23:05:48 +01:00
|
|
|
message_type_name = POST('type'),
|
2012-11-15 16:54:35 +01:00
|
|
|
message_to = POST('to', converter=extract_recipients),
|
2012-11-15 17:34:17 +01:00
|
|
|
forged = POST(default=False),
|
2012-11-27 00:37:26 +01:00
|
|
|
subject_name = POST('subject', lambda x: x.strip(), None),
|
2012-11-14 23:05:48 +01:00
|
|
|
message_content = POST('content')):
|
2012-10-22 22:34:56 +02:00
|
|
|
is_super_user = is_super_user_api(request)
|
|
|
|
if forged and not is_super_user:
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2012-11-15 17:29:32 +01:00
|
|
|
if len(message_to) == 0:
|
|
|
|
return json_error("Message must have recipients.")
|
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
if client.name == "zephyr_mirror":
|
2012-10-22 22:34:56 +02:00
|
|
|
# Here's how security works for non-superuser mirroring:
|
|
|
|
#
|
2012-11-08 00:38:21 +01:00
|
|
|
# The message must be (1) a private message (2) that
|
2012-10-22 22:34:56 +02:00
|
|
|
# is both sent and received exclusively by other users in your
|
|
|
|
# realm which (3) must be the MIT realm and (4) you must have
|
|
|
|
# received the message.
|
|
|
|
#
|
|
|
|
# If that's the case, we let it through, but we still have the
|
|
|
|
# security flaw that we're trusting your Hesiod data for users
|
|
|
|
# you report having sent you a message.
|
|
|
|
if "sender" not in request.POST:
|
|
|
|
return json_error("Missing sender")
|
2012-11-08 00:38:21 +01:00
|
|
|
if message_type_name != "private" and not is_super_user:
|
2012-10-22 22:34:56 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2012-11-15 16:54:35 +01:00
|
|
|
(valid_input, mirror_sender) = \
|
|
|
|
create_mirrored_message_users(request, user_profile, message_to)
|
2012-10-22 22:34:56 +02:00
|
|
|
if not valid_input:
|
|
|
|
return json_error("Invalid mirrored message")
|
|
|
|
if user_profile.realm.domain != "mit.edu":
|
|
|
|
return json_error("Invalid mirrored realm")
|
|
|
|
sender = mirror_sender
|
2012-11-14 23:07:09 +01:00
|
|
|
else:
|
|
|
|
sender = user_profile
|
2012-09-07 19:20:04 +02:00
|
|
|
|
2012-10-10 22:57:21 +02:00
|
|
|
if message_type_name == 'stream':
|
2012-11-27 00:37:26 +01:00
|
|
|
if subject_name is None:
|
2012-10-11 00:01:39 +02:00
|
|
|
return json_error("Missing subject")
|
2012-11-15 17:29:32 +01:00
|
|
|
if len(message_to) > 1:
|
2012-11-15 16:39:32 +01:00
|
|
|
return json_error("Cannot send to multiple streams")
|
|
|
|
stream_name = message_to[0].strip()
|
2012-10-29 22:33:17 +01:00
|
|
|
if stream_name == "":
|
|
|
|
return json_error("Stream can't be empty")
|
|
|
|
if subject_name == "":
|
|
|
|
return json_error("Subject can't be empty")
|
2012-10-25 23:07:32 +02:00
|
|
|
if len(stream_name) > 30:
|
2012-10-25 21:59:48 +02:00
|
|
|
return json_error("Stream name too long")
|
2012-12-07 01:05:14 +01:00
|
|
|
if len(subject_name) > MAX_SUBJECT_LENGTH:
|
2012-10-25 21:59:48 +02:00
|
|
|
return json_error("Subject too long")
|
2012-09-07 19:20:04 +02:00
|
|
|
|
2012-10-10 22:59:40 +02:00
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-10 23:01:28 +02:00
|
|
|
return json_error("Invalid stream name")
|
2012-10-05 00:11:12 +02:00
|
|
|
## FIXME: Commented out temporarily while we figure out what we want
|
2012-10-11 00:01:39 +02:00
|
|
|
# if not valid_stream_name(subject_name):
|
|
|
|
# return json_error("Invalid subject name")
|
2012-10-04 20:23:51 +02:00
|
|
|
|
2012-10-30 20:02:28 +01:00
|
|
|
try:
|
|
|
|
stream = Stream.objects.get(realm=user_profile.realm, name__iexact=stream_name)
|
|
|
|
except Stream.DoesNotExist:
|
|
|
|
return json_error("Stream does not exist")
|
2012-10-10 22:57:21 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
2012-11-08 00:38:21 +01:00
|
|
|
elif message_type_name == 'private':
|
2012-10-22 22:34:56 +02:00
|
|
|
recipient_profile_ids = set()
|
2012-11-15 16:54:35 +01:00
|
|
|
for email in message_to:
|
2012-10-22 22:34:56 +02:00
|
|
|
try:
|
2012-11-15 16:54:35 +01:00
|
|
|
recipient_profile_ids.add(UserProfile.objects.get(user__email__iexact=email).id)
|
2012-10-22 22:34:56 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2012-11-15 16:54:35 +01:00
|
|
|
return json_error("Invalid email '%s'" % (email,))
|
2012-11-08 21:49:38 +01:00
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
if client.name == "zephyr_mirror":
|
2012-11-15 21:35:23 +01:00
|
|
|
if user_profile.id not in recipient_profile_ids and not forged:
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2012-11-08 21:49:38 +01:00
|
|
|
# If the private message is just between the sender and
|
|
|
|
# another person, force it to be a personal internally
|
|
|
|
if (len(recipient_profile_ids) == 2
|
2012-12-05 18:31:51 +01:00
|
|
|
and sender.id in recipient_profile_ids):
|
|
|
|
recipient_profile_ids.remove(sender.id)
|
2012-11-08 21:49:38 +01:00
|
|
|
|
2012-10-22 22:34:56 +02:00
|
|
|
if len(recipient_profile_ids) > 1:
|
2012-11-08 00:38:21 +01:00
|
|
|
# Make sure the sender is included in huddle messages
|
2012-10-22 22:34:56 +02:00
|
|
|
recipient_profile_ids.add(sender.id)
|
|
|
|
huddle = get_huddle(list(recipient_profile_ids))
|
2012-09-10 19:43:11 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=huddle.id, type=Recipient.HUDDLE)
|
2012-09-04 23:20:21 +02:00
|
|
|
else:
|
2012-10-22 22:34:56 +02:00
|
|
|
recipient = Recipient.objects.get(type_id=list(recipient_profile_ids)[0],
|
2012-09-07 20:14:13 +02:00
|
|
|
type=Recipient.PERSONAL)
|
2012-09-04 23:43:56 +02:00
|
|
|
else:
|
2012-10-03 00:10:55 +02:00
|
|
|
return json_error("Invalid message type")
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2012-10-03 21:05:48 +02:00
|
|
|
message = Message()
|
2012-10-22 22:34:56 +02:00
|
|
|
message.sender = sender
|
2012-11-02 19:57:17 +01:00
|
|
|
message.content = message_content
|
2012-10-03 21:05:48 +02:00
|
|
|
message.recipient = recipient
|
2012-10-10 22:57:21 +02:00
|
|
|
if message_type_name == 'stream':
|
2012-10-11 00:01:39 +02:00
|
|
|
message.subject = subject_name
|
2012-10-22 22:34:56 +02:00
|
|
|
if forged:
|
2012-10-03 21:05:48 +02:00
|
|
|
# Forged messages come with a timestamp
|
2012-12-05 19:53:23 +01:00
|
|
|
message.pub_date = timestamp_to_datetime(request.POST['time'])
|
2012-09-21 23:23:15 +02:00
|
|
|
else:
|
2012-11-08 00:48:43 +01:00
|
|
|
message.pub_date = now()
|
2012-11-27 20:44:58 +01:00
|
|
|
message.sending_client = client
|
2012-11-13 21:36:47 +01:00
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
if client.name == "zephyr_mirror" and already_sent_mirrored_message(message):
|
2012-11-13 21:36:47 +01:00
|
|
|
return json_success()
|
|
|
|
|
2012-10-19 21:37:37 +02:00
|
|
|
do_send_message(message)
|
2012-08-28 21:27:42 +02:00
|
|
|
|
2012-09-06 21:36:44 +02:00
|
|
|
return json_success()
|
2012-08-30 18:04:35 +02:00
|
|
|
|
2012-11-28 05:37:13 +01:00
|
|
|
@internal_notify_view
|
2012-11-02 23:48:12 +01:00
|
|
|
def notify_new_message(request):
|
2012-10-30 00:17:26 +01:00
|
|
|
# If a message for some reason has no recipients (e.g. it is sent
|
|
|
|
# by a bot to a stream that nobody is subscribed to), just skip
|
|
|
|
# the message gracefully
|
|
|
|
if request.POST["users"] == "":
|
2012-10-31 20:57:45 +01:00
|
|
|
return json_success()
|
2012-10-30 00:17:26 +01:00
|
|
|
|
2012-10-09 22:21:03 +02:00
|
|
|
# FIXME: better query
|
2012-10-17 01:58:20 +02:00
|
|
|
users = [UserProfile.objects.get(id=user)
|
2012-12-03 21:59:22 +01:00
|
|
|
for user in simplejson.loads(request.POST['users'])]
|
2012-10-09 22:21:03 +02:00
|
|
|
message = Message.objects.get(id=request.POST['message'])
|
|
|
|
|
2012-11-13 22:12:27 +01:00
|
|
|
# Cause message.to_dict() to return the dicts already rendered in the other process.
|
|
|
|
#
|
|
|
|
# We decode this JSON only to eventually re-encode it as JSON.
|
|
|
|
# This isn't trivial to fix, because we do access some fields in the meantime
|
|
|
|
# (see send_with_safety_check). It's probably not a big deal.
|
|
|
|
message.precomputed_dicts = simplejson.loads(request.POST['rendered'])
|
|
|
|
|
2012-10-09 22:21:03 +02:00
|
|
|
for user in users:
|
|
|
|
user.receive(message)
|
|
|
|
|
2012-10-31 20:57:45 +01:00
|
|
|
return json_success()
|
2012-10-09 22:21:03 +02:00
|
|
|
|
2012-11-28 05:37:13 +01:00
|
|
|
@internal_notify_view
|
2012-11-02 23:48:12 +01:00
|
|
|
def notify_pointer_update(request):
|
2012-10-17 23:10:34 +02:00
|
|
|
# FIXME: better query
|
|
|
|
user_profile = UserProfile.objects.get(id=request.POST['user'])
|
|
|
|
new_pointer = int(request.POST['new_pointer'])
|
2012-10-23 20:03:45 +02:00
|
|
|
pointer_updater = request.POST['pointer_updater']
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-10-23 20:03:45 +02:00
|
|
|
user_profile.update_pointer(new_pointer, pointer_updater)
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-10-31 20:57:45 +01:00
|
|
|
return json_success()
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-11 19:31:21 +02:00
|
|
|
def api_get_public_streams(request, user_profile):
|
2012-11-08 20:38:17 +01:00
|
|
|
# Only get streams someone is currently subscribed to
|
|
|
|
subs_filter = Subscription.objects.filter(active=True).values('recipient_id')
|
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
2012-11-01 22:04:11 +01:00
|
|
|
streams = sorted(stream.name for stream in
|
2012-11-08 20:38:17 +01:00
|
|
|
Stream.objects.filter(id__in = stream_ids,
|
|
|
|
realm=user_profile.realm))
|
2012-10-11 19:31:21 +02:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2012-12-05 22:21:20 +01:00
|
|
|
default_stream_color = "#c2c2c2"
|
|
|
|
|
2012-12-01 04:37:18 +01:00
|
|
|
def get_stream_color(sub):
|
|
|
|
try:
|
|
|
|
return StreamColor.objects.get(subscription=sub).color
|
|
|
|
except StreamColor.DoesNotExist:
|
2012-12-05 22:21:20 +01:00
|
|
|
return default_stream_color
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-09-21 19:34:05 +02:00
|
|
|
def gather_subscriptions(user_profile):
|
2012-12-05 22:21:20 +01:00
|
|
|
# This is a little awkward because the StreamColor table has foreign keys
|
|
|
|
# to Subscription, but not vice versa, and not all Subscriptions have a
|
|
|
|
# StreamColor.
|
|
|
|
#
|
|
|
|
# We could do this with a single OUTER JOIN query but Django's ORM does
|
|
|
|
# not provide a simple way to specify one.
|
|
|
|
|
2012-09-18 16:30:25 +02:00
|
|
|
# For now, don't display the subscription for your ability to receive personals.
|
2012-12-05 22:21:20 +01:00
|
|
|
subs = Subscription.objects.filter(
|
|
|
|
user_profile = user_profile,
|
|
|
|
active = True,
|
|
|
|
recipient__type = Recipient.STREAM)
|
|
|
|
with_color = StreamColor.objects.filter(subscription__in = subs).select_related()
|
|
|
|
no_color = subs.exclude(id__in = with_color.values('subscription_id')).select_related()
|
|
|
|
|
|
|
|
result = [(get_display_recipient(sc.subscription.recipient), sc.color)
|
|
|
|
for sc in with_color]
|
|
|
|
result.extend((get_display_recipient(sub.recipient), default_stream_color)
|
|
|
|
for sub in no_color)
|
|
|
|
|
|
|
|
return sorted(result)
|
2012-09-18 16:30:25 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_list_subscriptions(request, user_profile):
|
2012-11-16 19:43:27 +01:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)})
|
2012-10-11 19:31:21 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_list_subscriptions(request, user_profile):
|
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)})
|
2012-09-18 16:30:25 +02:00
|
|
|
|
2012-11-16 20:15:03 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-16 20:11:08 +01:00
|
|
|
def json_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-11-15 17:36:20 +01:00
|
|
|
@has_request_variables
|
2012-11-16 20:11:08 +01:00
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = POST("subscriptions", simplejson.loads)):
|
|
|
|
if not isinstance(streams_raw, list):
|
|
|
|
return json_error("'subscriptions' argument must be a list")
|
|
|
|
|
|
|
|
streams = []
|
|
|
|
for stream_name in set(stream_name.strip() for stream_name in streams_raw):
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
|
|
|
return json_error("Stream %s does not exist" % stream_name)
|
|
|
|
streams.append(stream)
|
|
|
|
|
|
|
|
result = dict(removed=[], not_subscribed=[])
|
|
|
|
for stream in streams:
|
|
|
|
did_remove = do_remove_subscription(user_profile, stream)
|
|
|
|
if did_remove:
|
2012-12-06 16:24:01 +01:00
|
|
|
result["removed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
else:
|
2012-12-06 16:24:01 +01:00
|
|
|
result["not_subscribed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-10-10 22:59:40 +02:00
|
|
|
def valid_stream_name(name):
|
2012-11-07 21:42:32 +01:00
|
|
|
return name != ""
|
2012-10-01 21:31:30 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-16 19:43:27 +01:00
|
|
|
def add_subscriptions_backend(request, user_profile,
|
|
|
|
streams_raw = POST('subscriptions', simplejson.loads)):
|
2012-11-16 22:27:28 +01:00
|
|
|
if not isinstance(streams_raw, list):
|
|
|
|
return json_error("'subscriptions' argument must be a list")
|
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names = []
|
2012-10-31 18:36:08 +01:00
|
|
|
for stream_name in streams_raw:
|
|
|
|
stream_name = stream_name.strip()
|
|
|
|
if len(stream_name) > 30:
|
|
|
|
return json_error("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-31 23:04:55 +01:00
|
|
|
return json_error("Invalid stream name (%s)." % (stream_name,))
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names.append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
result = dict(subscribed=[], already_subscribed=[])
|
|
|
|
for stream_name in set(stream_names):
|
2012-10-11 21:34:17 +02:00
|
|
|
stream = create_stream_if_needed(user_profile.realm, stream_name)
|
2012-10-20 21:43:13 +02:00
|
|
|
did_subscribe = do_add_subscription(user_profile, stream)
|
|
|
|
if did_subscribe:
|
2012-11-16 22:27:32 +01:00
|
|
|
result["subscribed"].append(stream_name)
|
2012-10-20 21:43:13 +02:00
|
|
|
else:
|
2012-11-16 22:27:32 +01:00
|
|
|
result["already_subscribed"].append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
return json_success(result)
|
2012-09-05 23:38:20 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_change_settings(request, user_profile, full_name=POST,
|
|
|
|
old_password=POST, new_password=POST,
|
2012-11-23 21:23:41 +01:00
|
|
|
confirm_password=POST,
|
|
|
|
# enable_desktop_notification needs to default to False
|
|
|
|
# because browsers POST nothing for an unchecked checkbox
|
2012-12-05 23:58:21 +01:00
|
|
|
enable_desktop_notifications=POST(converter=lambda x: x == "on",
|
|
|
|
default=False)):
|
2012-09-21 19:32:01 +02:00
|
|
|
if new_password != "":
|
|
|
|
if new_password != confirm_password:
|
|
|
|
return json_error("New password must match confirmation password!")
|
|
|
|
if not authenticate(username=user_profile.user.email, password=old_password):
|
|
|
|
return json_error("Wrong password!")
|
2012-10-25 21:39:34 +02:00
|
|
|
do_change_password(user_profile.user, new_password)
|
2012-09-21 19:32:01 +02:00
|
|
|
|
|
|
|
result = {}
|
2012-12-05 20:56:31 +01:00
|
|
|
if user_profile.full_name != full_name and full_name.strip() != "":
|
|
|
|
do_change_full_name(user_profile, full_name.strip())
|
2012-09-21 19:32:01 +02:00
|
|
|
result['full_name'] = full_name
|
2012-10-02 22:20:07 +02:00
|
|
|
|
2012-11-23 21:23:41 +01:00
|
|
|
if user_profile.enable_desktop_notifications != enable_desktop_notifications:
|
|
|
|
do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications)
|
|
|
|
result['enable_desktop_notifications'] = enable_desktop_notifications
|
|
|
|
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_stream_exists(request, user_profile, stream=POST):
|
2012-10-10 22:59:40 +02:00
|
|
|
if not valid_stream_name(stream):
|
2012-10-10 23:00:50 +02:00
|
|
|
return json_error("Invalid characters in stream name")
|
2012-11-15 19:13:33 +01:00
|
|
|
stream = get_stream(stream, user_profile.realm)
|
|
|
|
result = {"exists": bool(stream)}
|
|
|
|
if stream is not None:
|
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
result["subscribed"] = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
active=True).exists()
|
|
|
|
return json_success(result)
|
2012-10-17 20:43:52 +02:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
def set_stream_color(user_profile, stream_name, color):
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream:
|
|
|
|
return json_error("Invalid stream %s" % (stream.name,))
|
2012-12-03 00:19:00 +01:00
|
|
|
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
subscription = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient, active=True)
|
|
|
|
if not subscription.exists():
|
|
|
|
return json_error("Not subscribed to stream %s" % (stream_name,))
|
|
|
|
|
|
|
|
stream_color, _ = StreamColor.objects.get_or_create(subscription=subscription[0])
|
|
|
|
# TODO: sanitize color.
|
|
|
|
stream_color.color = color
|
|
|
|
stream_color.save()
|
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
class SubscriptionProperties(object):
|
|
|
|
"""
|
|
|
|
A class for managing GET and POST requests for subscription properties. The
|
|
|
|
name for a request handler is <request type>_<property name>.
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
Requests must have already been authenticated before being processed here.
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
Requests that set or change subscription properties should typically log the
|
|
|
|
change through log_event.
|
|
|
|
"""
|
|
|
|
def __call__(self, request, user_profile, property):
|
|
|
|
property_method = getattr(self, "%s_%s" % (request.method.lower(), property), None)
|
|
|
|
if not property_method:
|
|
|
|
return json_error("Unknown property or invalid verb for %s" % (property,))
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
return property_method(request, user_profile)
|
|
|
|
|
|
|
|
def request_property(self, request_dict, property):
|
|
|
|
return request_dict.get(property, "").strip()
|
|
|
|
|
|
|
|
def get_stream_colors(self, request, user_profile):
|
2012-12-05 22:51:20 +01:00
|
|
|
return json_success({"stream_colors": gather_subscriptions(user_profile)})
|
2012-12-02 22:58:00 +01:00
|
|
|
|
|
|
|
def post_stream_colors(self, request, user_profile):
|
|
|
|
stream_name = self.request_property(request.POST, "stream_name")
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream_name:
|
|
|
|
return json_error("Missing stream_name")
|
2012-12-03 00:19:00 +01:00
|
|
|
color = self.request_property(request.POST, "color")
|
2012-12-03 02:02:38 +01:00
|
|
|
if not color:
|
|
|
|
return json_error("Missing color")
|
2012-12-02 22:58:00 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
set_stream_color(user_profile, stream_name, color)
|
|
|
|
log_subscription_property_change(user_profile.user.email, "stream_color",
|
|
|
|
{"stream_name": stream_name, "color": color})
|
2012-12-02 22:58:00 +01:00
|
|
|
return json_success()
|
|
|
|
|
|
|
|
subscription_properties = SubscriptionProperties()
|
|
|
|
|
|
|
|
def make_property_call(request, query_dict, user_profile):
|
|
|
|
property = query_dict.get("property").strip()
|
|
|
|
if not property:
|
|
|
|
return json_error("Missing property")
|
|
|
|
|
|
|
|
return subscription_properties(request, user_profile, property.lower())
|
|
|
|
|
|
|
|
def make_get_property_call(request, user_profile):
|
|
|
|
return make_property_call(request, request.GET, user_profile)
|
|
|
|
|
|
|
|
def make_post_property_call(request, user_profile):
|
|
|
|
return make_property_call(request, request.POST, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_view
|
|
|
|
def json_subscription_property(request, user_profile):
|
|
|
|
"""
|
|
|
|
This is the entry point to accessing or changing subscription
|
|
|
|
properties. Authentication happens here.
|
|
|
|
|
|
|
|
Add a handler for a new subscription property in SubscriptionProperties.
|
|
|
|
"""
|
|
|
|
if request.method == "GET":
|
|
|
|
return make_get_property_call(request, user_profile)
|
|
|
|
elif request.method == "POST":
|
|
|
|
return make_post_property_call(request, user_profile)
|
|
|
|
else:
|
|
|
|
return json_error("Invalid verb")
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-10-17 22:36:49 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_fetch_api_key(request, username=POST, password=POST):
|
2012-10-17 20:43:52 +02:00
|
|
|
user = authenticate(username=username, password=password)
|
|
|
|
if user is None:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your username or password is incorrect.", status=403)
|
2012-10-17 20:43:52 +02:00
|
|
|
if not user.is_active:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your account has been disabled.", status=403)
|
2012-10-30 19:39:15 +01:00
|
|
|
return json_success({"api_key": user.userprofile.api_key})
|
2012-10-17 22:26:59 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_fetch_api_key(request, user_profile, password=POST):
|
2012-10-17 22:26:59 +02:00
|
|
|
if not request.user.check_password(password):
|
|
|
|
return json_error("Your username or password is incorrect.")
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
class ActivityTable(object):
|
2012-12-05 17:57:12 +01:00
|
|
|
def __init__(self, client_name, queries, default_tab=False):
|
|
|
|
self.default_tab = default_tab
|
2012-12-04 22:14:44 +01:00
|
|
|
self.has_pointer = False
|
|
|
|
self.rows = {}
|
|
|
|
for url, query_name in queries:
|
|
|
|
if 'pointer' in query_name:
|
|
|
|
self.has_pointer = True
|
|
|
|
for record in UserActivity.objects.filter(
|
|
|
|
query=url,
|
|
|
|
client__name=client_name):
|
|
|
|
row = self.rows.setdefault(record.user_profile.user.email, {})
|
|
|
|
row[query_name + '_count'] = record.count
|
|
|
|
row[query_name + '_last' ] = record.last_visit
|
|
|
|
|
2012-12-04 23:54:14 +01:00
|
|
|
for row in self.rows.values():
|
|
|
|
# kind of a hack
|
|
|
|
last_action = max(v for v in row.values() if isinstance(v, datetime.datetime))
|
|
|
|
age = now() - last_action
|
|
|
|
if age < datetime.timedelta(minutes=10):
|
|
|
|
row['class'] = 'recently_active'
|
|
|
|
elif age >= datetime.timedelta(days=1):
|
|
|
|
row['class'] = 'long_inactive'
|
2012-12-05 00:05:10 +01:00
|
|
|
row['age'] = age
|
|
|
|
|
|
|
|
def sorted_rows(self):
|
|
|
|
return sorted(self.rows.iteritems(), key=lambda (k,r): r['age'])
|
2012-12-04 23:54:14 +01:00
|
|
|
|
2012-12-05 19:32:09 +01:00
|
|
|
def can_view_activity(request):
|
|
|
|
return request.user.userprofile.realm.domain == 'humbughq.com'
|
|
|
|
|
2012-11-09 00:09:58 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def get_activity(request):
|
2012-12-05 19:32:09 +01:00
|
|
|
if not can_view_activity(request):
|
2012-11-09 00:09:58 +01:00
|
|
|
return HttpResponseRedirect(reverse('zephyr.views.login_page'))
|
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
web_queries = (
|
|
|
|
("/json/get_updates", "get_updates"),
|
|
|
|
("/json/send_message", "send_message"),
|
|
|
|
("/json/update_pointer", "update_pointer"),
|
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
api_queries = (
|
|
|
|
("/api/v1/get_messages", "get_updates"),
|
|
|
|
("/api/v1/send_message", "send_message"),
|
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
|
|
|
return render_to_response('zephyr/activity.html',
|
2012-12-04 22:14:44 +01:00
|
|
|
{ 'data': {
|
2012-12-05 17:57:12 +01:00
|
|
|
'Website': ActivityTable('website', web_queries, default_tab=True),
|
2012-12-04 22:14:44 +01:00
|
|
|
'Mirror': ActivityTable('zephyr_mirror', api_queries),
|
|
|
|
'API': ActivityTable('API', api_queries)
|
|
|
|
}}, context_instance=RequestContext(request))
|
2012-11-19 23:52:36 +01:00
|
|
|
|
|
|
|
@authenticated_api_view
|
|
|
|
@has_request_variables
|
|
|
|
def api_github_landing(request, user_profile, event=POST,
|
|
|
|
payload=POST(converter=simplejson.loads)):
|
|
|
|
# TODO: this should all be moved to an external bot
|
|
|
|
|
|
|
|
repository = payload['repository']
|
|
|
|
|
|
|
|
if event == 'pull_request':
|
|
|
|
pull_req = payload['pull_request']
|
|
|
|
|
|
|
|
subject = "%s: pull request %d" % (repository['name'],
|
|
|
|
pull_req['number'])
|
2012-11-28 20:34:10 +01:00
|
|
|
content = ("Pull request from %s [%s](%s):\n\n %s\n\n> %s"
|
|
|
|
% (pull_req['user']['login'],
|
|
|
|
payload['action'],
|
|
|
|
pull_req['html_url'],
|
2012-11-19 23:52:36 +01:00
|
|
|
pull_req['title'],
|
|
|
|
pull_req['body']))
|
|
|
|
elif event == 'push':
|
|
|
|
short_ref = re.sub(r'^refs/heads/', '', payload['ref'])
|
2012-12-05 22:24:03 +01:00
|
|
|
subject = repository['name']
|
|
|
|
if re.match(r'^0+$', payload['after']):
|
|
|
|
content = "%s deleted branch %s" % (payload['pusher']['name'],
|
|
|
|
short_ref)
|
|
|
|
elif len(payload['commits']) == 0:
|
|
|
|
content = ("%s [force pushed](%s) to branch %s. Head is now %s"
|
|
|
|
% (payload['pusher']['name'],
|
|
|
|
payload['compare'],
|
2012-12-06 22:01:22 +01:00
|
|
|
short_ref,
|
2012-12-05 22:24:03 +01:00
|
|
|
payload['after'][:7]))
|
|
|
|
else:
|
|
|
|
content = ("%s [pushed](%s) to branch %s\n\n"
|
|
|
|
% (payload['pusher']['name'],
|
|
|
|
payload['compare'],
|
|
|
|
short_ref))
|
|
|
|
for commit in payload['commits']:
|
|
|
|
short_id = commit['id'][:7]
|
|
|
|
(short_commit_msg, _, _) = commit['message'].partition("\n")
|
|
|
|
content += "* [%s](%s): %s\n" % (short_id, commit['url'],
|
2012-12-06 22:01:22 +01:00
|
|
|
short_commit_msg)
|
2012-11-19 23:52:36 +01:00
|
|
|
else:
|
|
|
|
# We don't handle other events even though we get notified
|
|
|
|
# about them
|
|
|
|
return json_success()
|
|
|
|
|
2012-12-07 01:05:14 +01:00
|
|
|
if len(subject) > MAX_SUBJECT_LENGTH:
|
2012-11-19 23:52:36 +01:00
|
|
|
subject = subject[:57].rstrip() + '...'
|
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
return send_message_backend(request, user_profile, get_client("github_bot"),
|
2012-11-19 23:52:36 +01:00
|
|
|
message_type_name="stream",
|
|
|
|
message_to=["commits"],
|
|
|
|
forged=False, subject_name=subject,
|
|
|
|
message_content=content)
|