2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.contrib.auth import authenticate, login
|
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.core.urlresolvers import reverse
|
2013-10-23 16:46:18 +02:00
|
|
|
from django.http import HttpResponseRedirect, HttpResponseForbidden
|
2013-06-17 18:01:22 +02:00
|
|
|
from django.shortcuts import render_to_response, redirect
|
2013-01-08 23:26:40 +01:00
|
|
|
from django.template import RequestContext, loader
|
2013-11-04 17:22:58 +01:00
|
|
|
from django.utils.timezone import now
|
2013-10-02 04:19:03 +02:00
|
|
|
from django.utils.html import mark_safe
|
2013-08-16 22:55:50 +02:00
|
|
|
from django.utils.cache import patch_cache_control
|
2012-09-29 00:49:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2012-12-11 23:42:32 +01:00
|
|
|
from django.core import validators
|
2013-03-05 23:45:02 +01:00
|
|
|
from django.contrib.auth.views import login as django_login_page, \
|
|
|
|
logout_then_login as django_logout_then_login
|
2013-03-06 20:53:54 +01:00
|
|
|
from django.db.models import Q, F
|
2013-07-25 21:15:04 +02:00
|
|
|
from django.core.mail import send_mail, mail_admins, EmailMessage
|
2013-11-04 17:22:58 +01:00
|
|
|
from django.db import transaction
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.models import Message, UserProfile, Stream, Subscription, \
|
2013-06-27 23:57:45 +02:00
|
|
|
Recipient, Realm, UserMessage, bulk_get_recipients, \
|
2013-10-02 04:19:03 +02:00
|
|
|
PreregistrationUser, get_client, MitUser, UserActivity, UserActivityInterval, \
|
2013-11-04 17:22:58 +01:00
|
|
|
get_stream, bulk_get_streams, UserPresence, \
|
|
|
|
get_recipient, valid_stream_name, to_dict_cache_key_id, \
|
2013-07-18 18:48:56 +02:00
|
|
|
extract_message_dict, stringify_message_dict, parse_usermessage_flags, \
|
2013-08-27 19:17:08 +02:00
|
|
|
email_to_domain, email_to_username, get_realm, completely_open, \
|
2013-10-23 23:16:39 +02:00
|
|
|
is_super_user, AppleDeviceToken, get_active_user_dicts_in_realm
|
2013-11-04 17:22:58 +01:00
|
|
|
from zerver.lib.actions import bulk_remove_subscriptions, \
|
2013-10-17 17:19:44 +02:00
|
|
|
do_change_password, create_mirror_user_if_needed, compute_irc_user_fullname, \
|
2013-10-29 16:00:20 +01:00
|
|
|
compute_jabber_user_fullname, do_change_full_name, \
|
2013-05-03 21:49:01 +02:00
|
|
|
do_change_enable_desktop_notifications, do_change_enter_sends, do_change_enable_sounds, \
|
2013-11-04 17:22:58 +01:00
|
|
|
do_activate_user, do_create_user, check_send_message, \
|
2013-07-16 21:56:20 +02:00
|
|
|
do_change_subscription_property, internal_send_message, \
|
2013-11-04 17:22:58 +01:00
|
|
|
create_stream_if_needed, gather_subscriptions, \
|
2013-08-08 20:38:24 +02:00
|
|
|
update_user_presence, bulk_add_subscriptions, do_update_message_flags, \
|
2013-06-27 16:41:58 +02:00
|
|
|
recipient_for_emails, extract_recipients, do_events_register, \
|
2013-05-08 15:28:27 +02:00
|
|
|
get_status_dict, do_change_enable_offline_email_notifications, \
|
2013-10-31 15:56:30 +01:00
|
|
|
do_update_message, internal_prep_message, \
|
2013-11-04 17:22:58 +01:00
|
|
|
do_send_messages, get_default_subs, do_deactivate, \
|
2013-09-03 22:41:17 +02:00
|
|
|
user_email_is_unique, do_invite_users, do_refer_friend, compute_mit_user_fullname, \
|
2013-09-13 19:30:05 +02:00
|
|
|
do_add_alert_words, do_remove_alert_words, do_set_alert_words, get_subscriber_emails, \
|
2013-11-04 17:22:58 +01:00
|
|
|
do_set_muted_topics, do_rename_stream, \
|
2013-10-16 17:24:52 +02:00
|
|
|
notify_for_streams_by_default, do_change_enable_offline_push_notifications
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.create_user import random_api_key
|
2013-10-16 17:24:52 +02:00
|
|
|
from zerver.lib.push_notifications import num_push_devices_for_user
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.forms import RegistrationForm, HomepageForm, ToSForm, CreateBotForm, \
|
2013-11-04 17:22:58 +01:00
|
|
|
is_inactive
|
2013-06-19 23:10:13 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt, csrf_protect
|
2013-06-27 20:03:51 +02:00
|
|
|
from django_openid_auth.views import default_render_failure, login_complete
|
2013-04-23 23:46:12 +02:00
|
|
|
from openid.consumer.consumer import SUCCESS as openid_SUCCESS
|
|
|
|
from openid.extensions import ax
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib import bugdown
|
2013-09-03 22:41:17 +02:00
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.decorator import require_post, \
|
2012-12-02 20:51:51 +01:00
|
|
|
authenticated_api_view, authenticated_json_post_view, \
|
2013-05-08 20:16:16 +02:00
|
|
|
has_request_variables, authenticated_json_view, \
|
2013-01-31 21:12:53 +01:00
|
|
|
to_non_negative_int, json_to_dict, json_to_list, json_to_bool, \
|
2013-11-04 17:22:58 +01:00
|
|
|
JsonableError, get_user_profile_by_email, process_as_post, REQ, \
|
2013-10-22 15:39:39 +02:00
|
|
|
zulip_internal
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.query import last_n
|
|
|
|
from zerver.lib.avatar import avatar_url
|
2013-10-23 16:46:18 +02:00
|
|
|
from zerver.lib.upload import upload_message_image_through_web_client, upload_avatar_image, \
|
|
|
|
get_signed_upload_url
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.response import json_success, json_error, json_response, json_method_not_allowed
|
2013-11-04 17:22:58 +01:00
|
|
|
from zerver.lib.cache import generic_bulk_cached_fetch
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
2013-08-08 16:50:58 +02:00
|
|
|
from zerver.lib.utils import statsd, generate_random_token
|
2013-10-02 04:19:03 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver import tornado_callbacks
|
2013-06-19 20:43:45 +02:00
|
|
|
from django.db import connection
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
import subprocess
|
2013-08-09 20:26:35 +02:00
|
|
|
import calendar
|
2012-08-28 18:44:51 +02:00
|
|
|
import datetime
|
2013-10-02 04:19:03 +02:00
|
|
|
import itertools
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2012-08-28 18:44:51 +02:00
|
|
|
import simplejson
|
2012-09-07 19:20:04 +02:00
|
|
|
import re
|
2012-10-04 20:27:49 +02:00
|
|
|
import urllib
|
2012-10-26 22:02:51 +02:00
|
|
|
import base64
|
2013-05-06 17:14:59 +02:00
|
|
|
import time
|
2013-05-20 19:09:18 +02:00
|
|
|
import logging
|
2013-10-28 15:54:32 +01:00
|
|
|
import os
|
2013-01-31 21:06:59 +01:00
|
|
|
from collections import defaultdict
|
2012-10-16 21:15:01 +02:00
|
|
|
|
2013-10-17 19:21:18 +02:00
|
|
|
from zerver.lib.rest import rest_dispatch as _rest_dispatch
|
|
|
|
rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs)))
|
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False):
|
|
|
|
"""Converts plaintext stream names to a list of Streams, validating input in the process
|
|
|
|
|
2013-10-09 16:55:17 +02:00
|
|
|
For each stream name, we validate it to ensure it meets our
|
|
|
|
requirements for a proper stream name: that is, that it is shorter
|
|
|
|
than Stream.MAX_NAME_LENGTH characters and passes
|
|
|
|
valid_stream_name.
|
2013-01-30 22:40:00 +01:00
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream names to process
|
|
|
|
@param user_profile The user for whom we are retreiving the streams
|
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
@param invite_only Whether newly created streams should have the invite_only bit set
|
|
|
|
"""
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams = []
|
|
|
|
created_streams = []
|
2013-01-30 22:40:00 +01:00
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
stream_set = set(stream_name.strip() for stream_name in streams_raw)
|
|
|
|
rejects = []
|
|
|
|
for stream_name in stream_set:
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2013-01-30 22:40:00 +01:00
|
|
|
raise JsonableError("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
|
|
|
raise JsonableError("Invalid stream name (%s)." % (stream_name,))
|
|
|
|
|
2013-08-15 22:44:50 +02:00
|
|
|
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
|
2013-06-27 22:52:05 +02:00
|
|
|
|
|
|
|
for stream_name in stream_set:
|
2013-08-15 22:44:50 +02:00
|
|
|
stream = existing_stream_map.get(stream_name.lower())
|
2013-01-30 22:40:00 +01:00
|
|
|
if stream is None:
|
|
|
|
rejects.append(stream_name)
|
|
|
|
else:
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
if autocreate:
|
|
|
|
for stream_name in rejects:
|
|
|
|
stream, created = create_stream_if_needed(user_profile.realm,
|
2013-08-15 22:47:16 +02:00
|
|
|
stream_name,
|
|
|
|
invite_only=invite_only)
|
|
|
|
if created:
|
|
|
|
created_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
elif rejects:
|
|
|
|
raise JsonableError("Stream(s) (%s) do not exist" % ", ".join(rejects))
|
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
return existing_streams, created_streams
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-10-02 23:40:21 +02:00
|
|
|
def send_signup_message(sender, signups_stream, user_profile,
|
|
|
|
internal=False, realm=None):
|
2012-12-07 00:02:53 +01:00
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
2013-10-02 23:40:21 +02:00
|
|
|
# Send notification to realm notifications stream if it exists
|
|
|
|
# Don't send notification for the first user in a realm
|
2013-10-30 20:44:27 +01:00
|
|
|
user_dicts = get_active_user_dicts_in_realm(user_profile.realm)
|
|
|
|
realm_user_count = len([user_dict for user_dict in user_dicts if not user_dict["is_bot"]])
|
2013-10-02 23:40:21 +02:00
|
|
|
if user_profile.realm.notifications_stream is not None and realm_user_count > 1:
|
|
|
|
internal_send_message(sender, "stream",
|
|
|
|
user_profile.realm.notifications_stream.name,
|
|
|
|
"New users", "%s just signed up for Zulip. Say hello!" % \
|
|
|
|
(user_profile.full_name,),
|
|
|
|
realm=user_profile.realm)
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
internal_send_message(sender,
|
2013-03-18 19:10:21 +01:00
|
|
|
"stream", signups_stream, user_profile.realm.domain,
|
2013-07-15 23:38:04 +02:00
|
|
|
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
user_profile.email,
|
2012-12-07 00:02:53 +01:00
|
|
|
internal_blurb,
|
2013-10-30 20:44:27 +01:00
|
|
|
realm_user_count,
|
2012-12-07 00:02:53 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
def notify_new_user(user_profile, internal=False):
|
2013-10-31 18:33:19 +01:00
|
|
|
if settings.NEW_USER_BOT is not None:
|
|
|
|
send_signup_message(settings.NEW_USER_BOT, "signups", user_profile, internal)
|
2013-04-22 17:03:38 +02:00
|
|
|
statsd.gauge("users.signups.%s" % (user_profile.realm.domain.replace('.', '_')), 1, delta=True)
|
2013-01-17 20:24:07 +01:00
|
|
|
|
2013-01-09 22:47:09 +01:00
|
|
|
class PrincipalError(JsonableError):
|
|
|
|
def __init__(self, principal):
|
|
|
|
self.principal = principal
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
|
|
|
principal_doesnt_exist = False
|
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
principal_user_profile = get_user_profile_by_email(principal)
|
2013-01-09 22:47:09 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
principal_doesnt_exist = True
|
|
|
|
|
|
|
|
if (principal_doesnt_exist
|
|
|
|
or agent.realm != principal_user_profile.realm):
|
|
|
|
# We have to make sure we don't leak information about which users
|
2013-08-06 21:32:15 +02:00
|
|
|
# are registered for Zulip in a different realm. We could do
|
2013-01-09 22:47:09 +01:00
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
return principal_user_profile
|
|
|
|
|
2013-05-17 21:45:37 +02:00
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def beta_signup_submission(request, name=REQ, email=REQ,
|
|
|
|
company=REQ, count=REQ, product=REQ):
|
|
|
|
content = """Name: %s
|
|
|
|
Email: %s
|
|
|
|
Company: %s
|
|
|
|
# users: %s
|
|
|
|
Currently using: %s""" % (name, email, company, count, product,)
|
2013-07-15 23:38:04 +02:00
|
|
|
subject = "Interest in Zulip: %s" % (company,)
|
2013-10-04 19:59:25 +02:00
|
|
|
from_email = '"%s" <zulip+signups@zulip.com>' % (name,)
|
|
|
|
to_email = '"Zulip Signups" <zulip+signups@zulip.com>'
|
2013-07-25 21:15:04 +02:00
|
|
|
headers = {'Reply-To' : '"%s" <%s>' % (name, email,)}
|
|
|
|
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
|
|
|
|
msg.send()
|
2013-05-17 21:45:37 +02:00
|
|
|
return json_success()
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
@require_post
|
2012-10-16 21:42:40 +02:00
|
|
|
def accounts_register(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
key = request.POST['key']
|
2012-10-29 19:08:18 +01:00
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=key)
|
2012-12-11 23:42:32 +01:00
|
|
|
prereg_user = confirmation.content_object
|
|
|
|
email = prereg_user.email
|
2012-10-29 19:08:18 +01:00
|
|
|
mit_beta_user = isinstance(confirmation.content_object, MitUser)
|
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
validators.validate_email(email)
|
2013-02-06 22:26:04 +01:00
|
|
|
# If someone invited you, you are joining their realm regardless
|
|
|
|
# of your e-mail address.
|
2013-02-08 17:20:42 +01:00
|
|
|
#
|
|
|
|
# MitUsers can't be referred and don't have a referred_by field.
|
|
|
|
if not mit_beta_user and prereg_user.referred_by:
|
2013-02-06 22:26:04 +01:00
|
|
|
domain = prereg_user.referred_by.realm.domain
|
2013-08-15 18:44:08 +02:00
|
|
|
elif not mit_beta_user and prereg_user.realm:
|
2013-08-02 20:32:56 +02:00
|
|
|
# You have a realm set, even though nobody referred you. This
|
|
|
|
# happens if you sign up through a special URL for an open
|
|
|
|
# realm.
|
|
|
|
domain = prereg_user.realm.domain
|
2013-02-06 22:26:04 +01:00
|
|
|
else:
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2012-09-25 22:58:59 +02:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2012-10-29 19:08:18 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
# MIT users already exist, but are supposed to be inactive.
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-10-29 19:08:18 +01:00
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
2013-07-08 17:57:04 +02:00
|
|
|
user_email_is_unique(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-04 20:27:49 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-09-29 00:49:34 +02:00
|
|
|
|
2012-10-02 22:20:07 +02:00
|
|
|
if request.POST.get('from_confirmation'):
|
2013-08-15 18:43:42 +02:00
|
|
|
if domain == "mit.edu":
|
|
|
|
hesiod_name = compute_mit_user_fullname(email)
|
|
|
|
form = RegistrationForm(
|
|
|
|
initial={'full_name': hesiod_name if "@" not in hesiod_name else ""})
|
|
|
|
else:
|
|
|
|
form = RegistrationForm()
|
2012-10-02 22:20:07 +02:00
|
|
|
else:
|
2012-08-28 18:44:51 +02:00
|
|
|
form = RegistrationForm(request.POST)
|
|
|
|
if form.is_valid():
|
2012-10-10 21:16:23 +02:00
|
|
|
password = form.cleaned_data['password']
|
2012-10-11 19:15:41 +02:00
|
|
|
full_name = form.cleaned_data['full_name']
|
2013-07-18 18:48:56 +02:00
|
|
|
short_name = email_to_username(email)
|
2012-10-19 23:40:44 +02:00
|
|
|
(realm, _) = Realm.objects.get_or_create(domain=domain)
|
2013-07-11 21:40:52 +02:00
|
|
|
first_in_realm = len(UserProfile.objects.filter(realm=realm)) == 0
|
2012-10-11 16:57:47 +02:00
|
|
|
|
2012-12-05 20:56:31 +01:00
|
|
|
# FIXME: sanitize email addresses and fullname
|
2012-12-04 21:07:33 +01:00
|
|
|
if mit_beta_user:
|
2013-08-15 00:07:46 +02:00
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name)
|
2013-03-08 19:58:18 +01:00
|
|
|
do_activate_user(user_profile)
|
|
|
|
do_change_password(user_profile, password)
|
2012-12-13 15:11:17 +01:00
|
|
|
do_change_full_name(user_profile, full_name)
|
2012-12-04 21:07:33 +01:00
|
|
|
else:
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name)
|
2013-06-27 16:41:58 +02:00
|
|
|
# We want to add the default subs list iff there were no subs
|
|
|
|
# specified when the user was invited.
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
if len(streams) == 0:
|
|
|
|
streams = get_default_subs(user_profile)
|
2013-09-25 23:12:33 +02:00
|
|
|
bulk_add_subscriptions(streams, [user_profile])
|
2013-06-27 16:41:58 +02:00
|
|
|
|
2013-08-14 23:25:38 +02:00
|
|
|
# Give you the last 100 messages on your streams, so you have
|
|
|
|
# something to look at in your home view once you finish the
|
|
|
|
# tutorial.
|
2013-09-27 17:47:20 +02:00
|
|
|
one_week_ago = now() - datetime.timedelta(weeks=1)
|
2013-08-14 23:25:38 +02:00
|
|
|
recipients = Recipient.objects.filter(type=Recipient.STREAM,
|
|
|
|
type_id__in=[stream.id for stream in streams])
|
2013-09-27 17:47:20 +02:00
|
|
|
messages = Message.objects.filter(recipient_id__in=recipients, pub_date__gt=one_week_ago).order_by("-id")[0:100]
|
2013-08-14 23:25:38 +02:00
|
|
|
if len(messages) > 0:
|
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message=message,
|
|
|
|
flags=UserMessage.flags.read)
|
|
|
|
for message in messages]
|
|
|
|
|
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2013-08-09 21:34:50 +02:00
|
|
|
|
2013-10-31 18:33:19 +01:00
|
|
|
if prereg_user.referred_by is not None and settings.NEW_USER_BOT is not None:
|
2012-12-11 23:42:32 +01:00
|
|
|
# This is a cross-realm private message.
|
2013-10-31 18:33:19 +01:00
|
|
|
internal_send_message(settings.NEW_USER_BOT,
|
2013-03-28 20:43:34 +01:00
|
|
|
"private", prereg_user.referred_by.email, user_profile.realm.domain,
|
2013-07-15 23:38:04 +02:00
|
|
|
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
2012-12-11 23:42:32 +01:00
|
|
|
user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
user_profile.email,
|
2012-12-11 23:42:32 +01:00
|
|
|
)
|
|
|
|
)
|
2013-04-09 19:10:40 +02:00
|
|
|
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as inactive
|
|
|
|
# so we can find the PreregistrationUser that we are actually working
|
|
|
|
# with here
|
|
|
|
PreregistrationUser.objects.filter(email=email) \
|
|
|
|
.exclude(id=prereg_user.id) \
|
|
|
|
.update(status=0)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-12-13 15:11:17 +01:00
|
|
|
notify_new_user(user_profile)
|
2013-04-02 19:59:12 +02:00
|
|
|
queue_json_publish(
|
|
|
|
"signups",
|
|
|
|
{
|
|
|
|
'EMAIL': email,
|
|
|
|
'merge_vars': {
|
|
|
|
'NAME': full_name,
|
2013-05-14 21:20:12 +02:00
|
|
|
'REALM': domain,
|
2013-04-02 19:59:12 +02:00
|
|
|
'OPTIN_IP': request.META['REMOTE_ADDR'],
|
|
|
|
'OPTIN_TIME': datetime.datetime.isoformat(datetime.datetime.now()),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
lambda event: None)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-09-21 16:10:36 +02:00
|
|
|
login(request, authenticate(username=email, password=password))
|
2013-07-11 21:40:52 +02:00
|
|
|
|
|
|
|
if first_in_realm:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.initial_invite_page'))
|
2013-07-11 21:40:52 +02:00
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.home'))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/register.html',
|
2013-04-23 23:46:12 +02:00
|
|
|
{'form': form,
|
|
|
|
'company_name': domain,
|
|
|
|
'email': email,
|
|
|
|
'key': key,
|
|
|
|
'gafyd_name': request.POST.get('gafyd_name', False),
|
|
|
|
},
|
2012-10-15 22:52:08 +02:00
|
|
|
context_instance=RequestContext(request))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-01-08 23:26:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def accounts_accept_terms(request):
|
2013-04-23 23:46:12 +02:00
|
|
|
email = request.user.email
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2013-01-08 23:26:40 +01:00
|
|
|
if request.method == "POST":
|
|
|
|
form = ToSForm(request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
send_mail('Terms acceptance for ' + full_name,
|
2013-07-29 23:03:31 +02:00
|
|
|
loader.render_to_string('zerver/tos_accept_body.txt',
|
2013-01-08 23:26:40 +01:00
|
|
|
{'name': full_name,
|
|
|
|
'email': email,
|
|
|
|
'ip': request.META['REMOTE_ADDR'],
|
|
|
|
'browser': request.META['HTTP_USER_AGENT']}),
|
2013-10-04 19:59:25 +02:00
|
|
|
settings.EMAIL_HOST_USER,
|
2013-07-24 23:41:24 +02:00
|
|
|
["all@zulip.com"])
|
2013-03-29 17:39:53 +01:00
|
|
|
do_change_full_name(request.user, full_name)
|
2013-01-08 23:26:40 +01:00
|
|
|
return redirect(home)
|
|
|
|
|
|
|
|
else:
|
|
|
|
form = ToSForm()
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_accept_terms.html',
|
2013-07-18 18:48:56 +02:00
|
|
|
{ 'form': form, 'company_name': domain, 'email': email },
|
2013-01-08 23:26:40 +01:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-08-23 20:49:06 +02:00
|
|
|
from zerver.lib.ccache import make_ccache
|
|
|
|
|
|
|
|
@authenticated_json_view
|
|
|
|
@has_request_variables
|
|
|
|
def webathena_kerberos_login(request, user_profile,
|
|
|
|
cred=REQ(default=None)):
|
|
|
|
if cred is None:
|
|
|
|
return json_error("Could not find Kerberos credential")
|
|
|
|
if not user_profile.realm.domain == "mit.edu":
|
|
|
|
return json_error("Webathena login only for mit.edu realm")
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsed_cred = ujson.loads(cred)
|
|
|
|
user = parsed_cred["cname"]["nameString"][0]
|
2013-08-28 21:58:34 +02:00
|
|
|
if user == "golem":
|
|
|
|
# Hack for an mit.edu user whose Kerberos username doesn't
|
|
|
|
# match what he zephyrs as
|
|
|
|
user = "ctl"
|
2013-08-23 20:49:06 +02:00
|
|
|
assert(user == user_profile.email.split("@")[0])
|
|
|
|
ccache = make_ccache(parsed_cred)
|
|
|
|
except Exception:
|
|
|
|
return json_error("Invalid Kerberos cache")
|
|
|
|
|
|
|
|
# TODO: Send these data via (say) rabbitmq
|
|
|
|
try:
|
2013-10-04 20:23:48 +02:00
|
|
|
subprocess.check_call(["ssh", "zulip@zmirror2.zulip.net", "--",
|
2013-10-04 19:19:57 +02:00
|
|
|
"/home/zulip/zulip/bots/process_ccache",
|
2013-08-23 20:49:06 +02:00
|
|
|
user,
|
|
|
|
user_profile.api_key,
|
|
|
|
base64.b64encode(ccache)])
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Error updating the user's ccache")
|
|
|
|
return json_error("We were unable to setup mirroring for you")
|
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-04-17 17:24:07 +02:00
|
|
|
def api_endpoint_docs(request):
|
2013-07-29 23:03:31 +02:00
|
|
|
raw_calls = open('templates/zerver/api_content.json', 'r').read()
|
2013-06-18 23:55:55 +02:00
|
|
|
calls = ujson.loads(raw_calls)
|
2013-04-09 22:20:46 +02:00
|
|
|
langs = set()
|
|
|
|
for call in calls:
|
2013-08-08 17:45:25 +02:00
|
|
|
response = call['example_response']
|
|
|
|
if not '\n' in response:
|
|
|
|
# For 1-line responses, pretty-print them
|
|
|
|
extended_response = response.replace(", ", ",\n ")
|
|
|
|
else:
|
|
|
|
extended_response = response
|
|
|
|
call['rendered_response'] = bugdown.convert("~~~ .py\n" + extended_response + "\n~~~\n", "default")
|
2013-04-09 22:20:46 +02:00
|
|
|
for example_type in ('request', 'response'):
|
|
|
|
for lang in call.get('example_' + example_type, []):
|
|
|
|
langs.add(lang)
|
|
|
|
return render_to_response(
|
2013-07-29 23:03:31 +02:00
|
|
|
'zerver/api_endpoints.html', {
|
2013-04-09 22:20:46 +02:00
|
|
|
'content': calls,
|
|
|
|
'langs': langs,
|
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_invite_users(request, user_profile, invitee_emails=REQ):
|
2012-12-11 23:42:32 +01:00
|
|
|
if not invitee_emails:
|
|
|
|
return json_error("You must specify at least one email address.")
|
|
|
|
|
2013-01-03 16:50:46 +01:00
|
|
|
invitee_emails = set(re.split(r'[, \n]', invitee_emails))
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
stream_names = request.POST.getlist('stream')
|
|
|
|
if not stream_names:
|
|
|
|
return json_error("You must specify at least one stream for invitees to join.")
|
|
|
|
|
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-11 23:42:32 +01:00
|
|
|
return json_error("Stream does not exist: %s. No invites were sent." % stream_name)
|
2013-01-14 21:47:17 +01:00
|
|
|
streams.append(stream)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
2013-02-06 17:27:40 +01:00
|
|
|
else:
|
|
|
|
return json_success()
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-08-07 17:59:45 +02:00
|
|
|
def create_homepage_form(request, user_info=None):
|
|
|
|
if user_info:
|
|
|
|
return HomepageForm(user_info, domain=request.session.get("domain"))
|
|
|
|
# An empty fields dict is not treated the same way as not
|
|
|
|
# providing it.
|
|
|
|
return HomepageForm(domain=request.session.get("domain"))
|
|
|
|
|
2013-04-23 23:46:12 +02:00
|
|
|
def handle_openid_errors(request, issue, openid_response=None):
|
|
|
|
if issue == "Unknown user":
|
|
|
|
if openid_response is not None and openid_response.status == openid_SUCCESS:
|
|
|
|
ax_response = ax.FetchResponse.fromSuccessResponse(openid_response)
|
|
|
|
google_email = openid_response.getSigned('http://openid.net/srv/ax/1.0', 'value.email')
|
|
|
|
full_name = " ".join((
|
|
|
|
ax_response.get('http://axschema.org/namePerson/first')[0],
|
|
|
|
ax_response.get('http://axschema.org/namePerson/last')[0]))
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request, user_info={'email': google_email})
|
2013-04-23 23:46:12 +02:00
|
|
|
request.verified_email = None
|
|
|
|
if form.is_valid():
|
|
|
|
# Construct a PreregistrationUser object and send the user over to
|
|
|
|
# the confirmation view.
|
2013-08-02 20:32:56 +02:00
|
|
|
prereg_user = create_preregistration_user(google_email, request)
|
2013-04-23 23:46:12 +02:00
|
|
|
return redirect("".join((
|
2013-04-24 21:58:10 +02:00
|
|
|
"/",
|
|
|
|
# Split this so we only get the part after the /
|
|
|
|
Confirmation.objects.get_link_for_object(prereg_user).split("/", 3)[3],
|
2013-04-23 23:46:12 +02:00
|
|
|
'?gafyd_name=',
|
2013-08-14 20:59:29 +02:00
|
|
|
# urllib does not handle Unicode, so coerece to encoded byte string
|
|
|
|
# Explanation: http://stackoverflow.com/a/5605354/90777
|
|
|
|
urllib.quote_plus(full_name.encode('utf8')))))
|
2013-04-23 23:46:12 +02:00
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_home.html', {'form': form})
|
2013-04-23 23:46:12 +02:00
|
|
|
return default_render_failure(request, issue)
|
|
|
|
|
|
|
|
def process_openid_login(request):
|
|
|
|
return login_complete(request, render_failure=handle_openid_errors)
|
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
def login_page(request, **kwargs):
|
|
|
|
template_response = django_login_page(request, **kwargs)
|
|
|
|
try:
|
2012-10-11 19:15:41 +02:00
|
|
|
template_response.context_data['email'] = request.GET['email']
|
2012-09-29 00:49:34 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return template_response
|
|
|
|
|
2013-07-11 21:40:52 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_bulk_invite_users(request, user_profile, invitee_emails=REQ(converter=json_to_list)):
|
|
|
|
invitee_emails = set(invitee_emails)
|
|
|
|
streams = get_default_subs(user_profile)
|
|
|
|
|
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
|
|
|
|
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
|
|
|
else:
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def initial_invite_page(request):
|
|
|
|
user = request.user
|
|
|
|
# Only show the bulk-invite page for the first user in a realm
|
|
|
|
domain_count = len(UserProfile.objects.filter(realm=user.realm))
|
|
|
|
if domain_count > 1:
|
2013-07-29 23:03:31 +02:00
|
|
|
return redirect('zerver.views.home')
|
2013-07-11 21:40:52 +02:00
|
|
|
|
|
|
|
params = {'company_name': user.realm.domain}
|
|
|
|
|
|
|
|
if (user.realm.restricted_to_domain):
|
|
|
|
params['invite_suffix'] = user.realm.domain
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/initial_invite_page.html', params,
|
2013-07-11 21:40:52 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-03-05 23:45:02 +01:00
|
|
|
@require_post
|
|
|
|
def logout_then_login(request, **kwargs):
|
|
|
|
return django_logout_then_login(request, kwargs)
|
|
|
|
|
2013-08-02 20:32:56 +02:00
|
|
|
def create_preregistration_user(email, request):
|
|
|
|
domain = request.session.get("domain")
|
2013-08-07 17:59:45 +02:00
|
|
|
if not completely_open(domain):
|
2013-08-02 20:32:56 +02:00
|
|
|
domain = None
|
2013-08-15 18:44:08 +02:00
|
|
|
# MIT users who are not explicitly signing up for an open realm
|
|
|
|
# require special handling (They may already have an (inactive)
|
|
|
|
# account, for example)
|
|
|
|
if email_to_domain(email) == "mit.edu" and not domain:
|
|
|
|
prereg_user, created = MitUser.objects.get_or_create(email=email)
|
|
|
|
else:
|
|
|
|
prereg_user = PreregistrationUser(email=email, realm=get_realm(domain))
|
|
|
|
prereg_user.save()
|
2013-08-02 20:32:56 +02:00
|
|
|
|
|
|
|
request.session["domain"] = None
|
|
|
|
|
|
|
|
return prereg_user
|
|
|
|
|
|
|
|
def accounts_home_with_domain(request, domain):
|
|
|
|
if completely_open(domain):
|
|
|
|
# You can sign up for a completely open realm through a
|
|
|
|
# special registration path that contains the domain in the
|
|
|
|
# URL. We store this information in the session rather than
|
|
|
|
# elsewhere because we don't have control over URL or form
|
|
|
|
# data for folks registering through OpenID.
|
|
|
|
request.session["domain"] = domain
|
|
|
|
return accounts_home(request)
|
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.accounts_home'))
|
2013-08-02 20:32:56 +02:00
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
def accounts_home(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
if request.method == 'POST':
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request, user_info=request.POST)
|
2012-09-28 22:47:05 +02:00
|
|
|
if form.is_valid():
|
2013-01-08 20:24:47 +01:00
|
|
|
email = form.cleaned_data['email']
|
2013-08-02 20:32:56 +02:00
|
|
|
prereg_user = create_preregistration_user(email, request)
|
2013-03-28 19:21:29 +01:00
|
|
|
Confirmation.objects.send_confirmation(prereg_user, email)
|
|
|
|
return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email}))
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
|
|
|
email = request.POST['email']
|
2013-02-11 19:37:31 +01:00
|
|
|
# Note: We don't check for uniqueness
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-10 22:30:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-11-21 21:14:55 +01:00
|
|
|
else:
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request)
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_home.html',
|
2013-08-02 20:32:56 +02:00
|
|
|
{'form': form, 'current_url': request.get_full_path},
|
2012-09-04 23:21:30 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-09-04 21:02:11 +02:00
|
|
|
def approximate_unread_count(user_profile):
|
2013-09-10 10:50:07 +02:00
|
|
|
not_in_home_view_recipients = [sub.recipient.id for sub in \
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, in_home_view=False)]
|
|
|
|
|
|
|
|
# Don't include messages that aren't in your home view, as they might never
|
|
|
|
# be read.
|
|
|
|
return UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile, message_id__gt=user_profile.pointer).exclude(
|
|
|
|
message__recipient__type=Recipient.STREAM,
|
|
|
|
message__recipient__id__in=not_in_home_view_recipients).count()
|
2013-08-09 20:26:35 +02:00
|
|
|
|
|
|
|
def sent_time_in_epoch_seconds(user_message):
|
|
|
|
# user_message is a UserMessage object.
|
|
|
|
if not user_message:
|
|
|
|
return None
|
|
|
|
# We have USE_TZ = True, so our datetime objects are timezone-aware.
|
|
|
|
# Return the epoch seconds in UTC.
|
|
|
|
return calendar.timegm(user_message.message.pub_date.utctimetuple())
|
|
|
|
|
2012-10-29 19:56:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2012-08-28 18:44:51 +02:00
|
|
|
def home(request):
|
2012-11-21 00:42:16 +01:00
|
|
|
# We need to modify the session object every two weeks or it will expire.
|
|
|
|
# This line makes reloading the page a sufficient action to keep the
|
|
|
|
# session alive.
|
|
|
|
request.session.modified = True
|
|
|
|
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = request.user
|
2013-09-30 22:34:08 +02:00
|
|
|
request._email = request.user.email
|
2013-10-18 21:19:34 +02:00
|
|
|
request.client = get_client("website")
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-10-18 21:19:34 +02:00
|
|
|
register_ret = do_events_register(user_profile, request.client,
|
2013-05-07 17:25:25 +02:00
|
|
|
apply_markdown=True)
|
2013-03-28 18:09:27 +01:00
|
|
|
user_has_messages = (register_ret['max_message_id'] != -1)
|
2013-02-06 00:33:45 +01:00
|
|
|
|
2013-05-17 21:28:51 +02:00
|
|
|
# Reset our don't-spam-users-with-email counter since the
|
|
|
|
# user has since logged in
|
|
|
|
if not user_profile.last_reminder is None:
|
|
|
|
user_profile.last_reminder = None
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["last_reminder"])
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
# Brand new users get the tutorial
|
|
|
|
needs_tutorial = settings.TUTORIAL_ENABLED and \
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status != UserProfile.TUTORIAL_FINISHED
|
2013-03-12 04:54:17 +01:00
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
if user_profile.pointer == -1 and user_has_messages:
|
2013-02-06 00:33:45 +01:00
|
|
|
# Put the new user's pointer at the bottom
|
|
|
|
#
|
|
|
|
# This improves performance, because we limit backfilling of messages
|
|
|
|
# before the pointer. It's also likely that someone joining an
|
|
|
|
# organization is interested in recent messages more than the very
|
|
|
|
# first messages on the system.
|
|
|
|
|
2013-08-09 23:01:37 +02:00
|
|
|
register_ret['pointer'] = register_ret['max_message_id']
|
2013-02-06 00:33:45 +01:00
|
|
|
user_profile.last_pointer_updater = request.session.session_key
|
2012-09-12 22:55:37 +02:00
|
|
|
|
2013-08-09 20:26:35 +02:00
|
|
|
if user_profile.pointer == -1:
|
|
|
|
latest_read = None
|
|
|
|
else:
|
2013-08-19 18:10:32 +02:00
|
|
|
try:
|
|
|
|
latest_read = UserMessage.objects.get(user_profile=user_profile,
|
|
|
|
message__id=user_profile.pointer)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
# Don't completely fail if your saved pointer ID is invalid
|
|
|
|
logging.warning("%s has invalid pointer %s" % (user_profile.email, user_profile.pointer))
|
|
|
|
latest_read = None
|
|
|
|
|
2013-03-25 23:49:38 +01:00
|
|
|
# Pass parameters to the client-side JavaScript code.
|
|
|
|
# These end up in a global JavaScript Object named 'page_params'.
|
|
|
|
page_params = simplejson.encoder.JSONEncoderForHTML().encode(dict(
|
2013-10-25 23:37:42 +02:00
|
|
|
local_server = settings.LOCAL_SERVER,
|
2013-03-25 23:49:38 +01:00
|
|
|
debug_mode = settings.DEBUG,
|
2013-10-11 21:36:49 +02:00
|
|
|
test_suite = settings.TEST_SUITE,
|
2013-03-25 23:49:38 +01:00
|
|
|
poll_timeout = settings.POLL_TIMEOUT,
|
2013-03-28 18:09:27 +01:00
|
|
|
have_initial_messages = user_has_messages,
|
2013-03-29 19:15:25 +01:00
|
|
|
stream_list = register_ret['subscriptions'],
|
2013-06-12 21:15:32 +02:00
|
|
|
unsubbed_info = register_ret['unsubscribed'],
|
2013-10-21 19:37:52 +02:00
|
|
|
email_dict = register_ret['email_dict'],
|
2013-03-29 15:35:37 +01:00
|
|
|
people_list = register_ret['realm_users'],
|
2013-03-28 18:09:27 +01:00
|
|
|
initial_pointer = register_ret['pointer'],
|
2013-04-03 22:00:02 +02:00
|
|
|
initial_presences = register_ret['presences'],
|
2013-05-06 17:14:59 +02:00
|
|
|
initial_servertime = time.time(), # Used for calculating relative presence age
|
2013-03-25 23:49:38 +01:00
|
|
|
fullname = user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
email = user_profile.email,
|
2013-03-25 23:49:38 +01:00
|
|
|
domain = user_profile.realm.domain,
|
2013-10-17 17:58:47 +02:00
|
|
|
realm_name = user_profile.realm.name,
|
2013-03-25 23:49:38 +01:00
|
|
|
enter_sends = user_profile.enter_sends,
|
2013-07-26 16:51:02 +02:00
|
|
|
referrals = register_ret['referrals'],
|
2013-08-22 19:54:35 +02:00
|
|
|
realm_emoji = register_ret['realm_emoji'],
|
2013-03-25 23:49:38 +01:00
|
|
|
needs_tutorial = needs_tutorial,
|
|
|
|
desktop_notifications_enabled =
|
|
|
|
user_profile.enable_desktop_notifications,
|
2013-05-03 21:49:01 +02:00
|
|
|
sounds_enabled =
|
|
|
|
user_profile.enable_sounds,
|
2013-05-07 23:19:52 +02:00
|
|
|
enable_offline_email_notifications =
|
|
|
|
user_profile.enable_offline_email_notifications,
|
2013-10-16 17:24:52 +02:00
|
|
|
enable_offline_push_notifications =
|
|
|
|
user_profile.enable_offline_push_notifications,
|
2013-03-28 18:09:27 +01:00
|
|
|
event_queue_id = register_ret['queue_id'],
|
|
|
|
last_event_id = register_ret['last_event_id'],
|
2013-05-08 15:47:37 +02:00
|
|
|
max_message_id = register_ret['max_message_id'],
|
2013-09-04 21:02:11 +02:00
|
|
|
unread_count = approximate_unread_count(user_profile),
|
2013-08-09 20:26:35 +02:00
|
|
|
furthest_read_time = sent_time_in_epoch_seconds(latest_read),
|
2013-09-03 22:41:17 +02:00
|
|
|
staging = settings.STAGING_DEPLOYED or not settings.DEPLOYED,
|
2013-09-10 11:46:18 +02:00
|
|
|
alert_words = register_ret['alert_words'],
|
2013-09-27 19:38:56 +02:00
|
|
|
muted_topics = register_ret['muted_topics'],
|
2013-09-20 15:52:21 +02:00
|
|
|
show_admin = user_profile.show_admin,
|
2013-10-16 17:24:52 +02:00
|
|
|
notify_for_streams_by_default = notify_for_streams_by_default(user_profile),
|
|
|
|
has_mobile_devices = num_push_devices_for_user(user_profile) > 0
|
2013-03-25 23:49:38 +01:00
|
|
|
))
|
2013-01-09 00:10:37 +01:00
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
statsd.incr('views.home')
|
2013-09-19 23:04:27 +02:00
|
|
|
show_invites = True
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2013-06-02 19:23:02 +02:00
|
|
|
# For the CUSTOMER4 student realm, only let instructors (who have
|
|
|
|
# @customer4.invalid addresses) invite new users.
|
|
|
|
if ((user_profile.realm.domain == "users.customer4.invalid") and
|
|
|
|
(not user_profile.email.lower().endswith("@customer4.invalid"))):
|
|
|
|
show_invites = False
|
|
|
|
|
2013-08-16 22:55:50 +02:00
|
|
|
response = render_to_response('zerver/index.html',
|
|
|
|
{'user_profile': user_profile,
|
|
|
|
'page_params' : page_params,
|
|
|
|
'avatar_url': avatar_url(user_profile),
|
|
|
|
'nofontface': is_buggy_ua(request.META["HTTP_USER_AGENT"]),
|
|
|
|
'show_debug':
|
|
|
|
settings.DEBUG and ('show_debug' in request.GET),
|
|
|
|
'show_invites': show_invites,
|
|
|
|
'show_admin': user_profile.show_admin,
|
2013-08-27 21:15:15 +02:00
|
|
|
'show_webathena': user_profile.realm.domain == "mit.edu",
|
2013-08-16 22:55:50 +02:00
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
|
|
|
|
return response
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-01 20:13:26 +02:00
|
|
|
def is_buggy_ua(agent):
|
|
|
|
"""Discrimiate CSS served to clients based on User Agent
|
|
|
|
|
|
|
|
Due to QTBUG-3467, @font-face is not supported in QtWebKit.
|
|
|
|
This may get fixed in the future, but for right now we can
|
|
|
|
just serve the more conservative CSS to all our desktop apps.
|
|
|
|
"""
|
2013-07-15 23:38:04 +02:00
|
|
|
return ("Humbug Desktop/" in agent or "Zulip Desktop/" in agent) and \
|
|
|
|
not "Macintosh" in agent
|
2013-07-01 20:13:26 +02:00
|
|
|
|
2013-03-21 20:16:57 +01:00
|
|
|
def get_pointer_backend(request, user_profile):
|
|
|
|
return json_success({'pointer': user_profile.pointer})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2013-03-13 21:13:32 +01:00
|
|
|
def api_update_pointer(request, user_profile):
|
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_update_pointer(request, user_profile):
|
2013-03-13 21:13:32 +01:00
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-13 21:13:32 +01:00
|
|
|
def update_pointer_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
pointer=REQ(converter=to_non_negative_int)):
|
2012-10-29 22:02:10 +01:00
|
|
|
if pointer <= user_profile.pointer:
|
|
|
|
return json_success()
|
|
|
|
|
2013-08-19 21:05:23 +02:00
|
|
|
try:
|
|
|
|
UserMessage.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__id=pointer
|
|
|
|
)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
raise JsonableError("Invalid message ID")
|
|
|
|
|
2013-04-24 16:47:01 +02:00
|
|
|
prev_pointer = user_profile.pointer
|
2012-09-06 20:52:23 +02:00
|
|
|
user_profile.pointer = pointer
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["pointer"])
|
2012-10-17 23:10:23 +02:00
|
|
|
|
2013-03-26 19:40:28 +01:00
|
|
|
if request.client.name.lower() in ['android', 'iphone']:
|
2013-03-06 20:53:54 +01:00
|
|
|
# TODO (leo)
|
|
|
|
# Until we handle the new read counts in the mobile apps natively,
|
|
|
|
# this is a shim that will mark as read any messages up until the
|
|
|
|
# pointer move
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
2013-04-24 16:47:01 +02:00
|
|
|
message__id__gt=prev_pointer,
|
2013-03-06 20:53:54 +01:00
|
|
|
message__id__lte=pointer,
|
|
|
|
flags=~UserMessage.flags.read) \
|
2013-03-13 22:33:24 +01:00
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
2013-03-06 20:53:54 +01:00
|
|
|
|
2012-11-08 21:49:04 +01:00
|
|
|
if settings.TORNADO_SERVER:
|
2013-01-23 23:24:44 +01:00
|
|
|
tornado_callbacks.send_notification(dict(
|
|
|
|
type = 'pointer_update',
|
2012-12-31 23:19:59 +01:00
|
|
|
user = user_profile.id,
|
2013-03-13 21:13:32 +01:00
|
|
|
new_pointer = pointer))
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-09-05 22:21:25 +02:00
|
|
|
return json_success()
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_get_old_messages(request, user_profile):
|
2013-07-01 20:19:40 +02:00
|
|
|
return get_old_messages_backend(request, user_profile)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-15 18:16:38 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:39:43 +01:00
|
|
|
def api_get_old_messages(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
apply_markdown=REQ(default=False,
|
2013-06-18 23:55:55 +02:00
|
|
|
converter=ujson.loads)):
|
2013-03-22 15:58:52 +01:00
|
|
|
return get_old_messages_backend(request, user_profile,
|
2012-11-15 18:16:38 +01:00
|
|
|
apply_markdown=apply_markdown)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
class BadNarrowOperator(Exception):
|
|
|
|
def __init__(self, desc):
|
|
|
|
self.desc = desc
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return 'Invalid narrow operator: ' + self.desc
|
|
|
|
|
|
|
|
class NarrowBuilder(object):
|
2013-04-03 22:31:04 +02:00
|
|
|
def __init__(self, user_profile, prefix):
|
2012-12-19 23:58:02 +01:00
|
|
|
self.user_profile = user_profile
|
2013-04-03 22:31:04 +02:00
|
|
|
self.prefix = prefix
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-30 19:05:03 +01:00
|
|
|
def __call__(self, query, operator, operand):
|
2012-12-19 23:58:02 +01:00
|
|
|
# We have to be careful here because we're letting users call a method
|
|
|
|
# by name! The prefix 'by_' prevents it from colliding with builtin
|
|
|
|
# Python __magic__ stuff.
|
|
|
|
method_name = 'by_' + operator.replace('-', '_')
|
2013-01-30 20:59:56 +01:00
|
|
|
if method_name == 'by_search':
|
|
|
|
return self.do_search(query, operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
method = getattr(self, method_name, None)
|
|
|
|
if method is None:
|
|
|
|
raise BadNarrowOperator('unknown operator ' + operator)
|
2013-01-30 19:05:03 +01:00
|
|
|
return query.filter(method(operand))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
# Wrapper for Q() which adds self.prefix to all the keys
|
|
|
|
def pQ(self, **kwargs):
|
|
|
|
return Q(**dict((self.prefix + key, kwargs[key]) for key in kwargs.keys()))
|
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
def by_is(self, operand):
|
2013-07-10 03:22:34 +02:00
|
|
|
if operand == 'private':
|
2013-04-03 22:31:04 +02:00
|
|
|
return (self.pQ(recipient__type=Recipient.PERSONAL) |
|
|
|
|
self.pQ(recipient__type=Recipient.HUDDLE))
|
2013-03-27 18:29:12 +01:00
|
|
|
elif operand == 'starred':
|
|
|
|
return Q(flags=UserMessage.flags.starred)
|
2013-05-29 00:33:03 +02:00
|
|
|
elif operand == 'mentioned':
|
|
|
|
return Q(flags=UserMessage.flags.mentioned)
|
2013-08-30 21:15:01 +02:00
|
|
|
elif operand == 'alerted':
|
|
|
|
return Q(flags=UserMessage.flags.mentioned)
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator("unknown 'is' operand " + operand)
|
|
|
|
|
|
|
|
def by_stream(self, operand):
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(operand, self.user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator('unknown stream ' + operand)
|
2013-06-25 23:12:59 +02:00
|
|
|
|
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to "social" to also show messages to /^(un)*social(.d)*$/
|
|
|
|
# (unsocial, ununsocial, social.d, etc)
|
2013-07-31 19:56:10 +02:00
|
|
|
m = re.search(r'^(?:un)*(.+?)(?:\.d)*$', stream.name, re.IGNORECASE)
|
2013-07-16 22:37:37 +02:00
|
|
|
if m:
|
|
|
|
base_stream_name = m.group(1)
|
|
|
|
else:
|
|
|
|
base_stream_name = stream.name
|
|
|
|
|
2013-06-25 23:12:59 +02:00
|
|
|
matching_streams = Stream.objects.filter(realm=self.user_profile.realm,
|
2013-07-31 19:56:10 +02:00
|
|
|
name__iregex=r'^(un)*%s(\.d)*$' % (re.escape(base_stream_name),))
|
2013-07-02 17:55:45 +02:00
|
|
|
matching_stream_ids = [matching_stream.id for matching_stream in matching_streams]
|
2013-06-27 23:57:45 +02:00
|
|
|
recipients = bulk_get_recipients(Recipient.STREAM, matching_stream_ids).values()
|
|
|
|
return self.pQ(recipient__in=recipients)
|
2013-06-25 23:12:59 +02:00
|
|
|
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-07-16 22:52:02 +02:00
|
|
|
def by_topic(self, operand):
|
2013-07-31 21:11:25 +02:00
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to topic "foo" to also show messages to /^foo(.d)*$/
|
|
|
|
# (foo, foo.d, foo.d.d, etc)
|
|
|
|
m = re.search(r'^(.*?)(?:\.d)*$', operand, re.IGNORECASE)
|
|
|
|
if m:
|
|
|
|
base_topic = m.group(1)
|
|
|
|
else:
|
|
|
|
base_topic = operand
|
|
|
|
|
|
|
|
# Additionally, MIT users expect the empty instance and
|
|
|
|
# instance "personal" to be the same.
|
|
|
|
if base_topic in ('', 'personal', '(instance "")'):
|
|
|
|
regex = r'^(|personal|\(instance ""\))(\.d)*$'
|
|
|
|
else:
|
|
|
|
regex = r'^%s(\.d)*$' % (re.escape(base_topic),)
|
|
|
|
|
|
|
|
return self.pQ(subject__iregex=regex)
|
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(subject__iexact=operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-02-28 22:10:22 +01:00
|
|
|
def by_sender(self, operand):
|
2013-10-21 19:51:44 +02:00
|
|
|
try:
|
|
|
|
sender = get_user_profile_by_email(operand)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
|
|
|
return self.pQ(sender=sender)
|
2013-02-28 22:10:22 +01:00
|
|
|
|
2013-07-31 20:33:38 +02:00
|
|
|
def by_near(self, operand):
|
|
|
|
return Q()
|
|
|
|
|
2013-07-31 20:54:51 +02:00
|
|
|
def by_id(self, operand):
|
|
|
|
return self.pQ(id=operand)
|
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
def by_pm_with(self, operand):
|
|
|
|
if ',' in operand:
|
|
|
|
# Huddle
|
|
|
|
try:
|
|
|
|
emails = [e.strip() for e in operand.split(',')]
|
|
|
|
recipient = recipient_for_emails(emails, False,
|
|
|
|
self.user_profile, self.user_profile)
|
|
|
|
except ValidationError:
|
|
|
|
raise BadNarrowOperator('unknown recipient ' + operand)
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
else:
|
|
|
|
# Personal message
|
2013-03-18 16:54:58 +01:00
|
|
|
self_recipient = get_recipient(Recipient.PERSONAL, type_id=self.user_profile.id)
|
2013-03-28 20:43:34 +01:00
|
|
|
if operand == self.user_profile.email:
|
2013-01-03 19:31:58 +01:00
|
|
|
# Personals with self
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient__type=Recipient.PERSONAL,
|
|
|
|
sender=self.user_profile, recipient=self_recipient)
|
2013-01-03 19:31:58 +01:00
|
|
|
|
|
|
|
# Personals with other user; include both directions.
|
2012-12-19 23:58:02 +01:00
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
narrow_profile = get_user_profile_by_email(operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
2013-03-18 16:54:58 +01:00
|
|
|
narrow_recipient = get_recipient(Recipient.PERSONAL, narrow_profile.id)
|
2013-04-03 22:31:04 +02:00
|
|
|
return ((self.pQ(sender=narrow_profile) & self.pQ(recipient=self_recipient)) |
|
|
|
|
(self.pQ(sender=self.user_profile) & self.pQ(recipient=narrow_recipient)))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-30 20:59:56 +01:00
|
|
|
def do_search(self, query, operand):
|
|
|
|
if "postgres" in settings.DATABASES["default"]["ENGINE"]:
|
2013-10-26 02:23:40 +02:00
|
|
|
tsquery = "plainto_tsquery('zulip.english_us_search', %s)"
|
2013-04-29 22:53:57 +02:00
|
|
|
where = "search_tsvector @@ " + tsquery
|
2013-10-26 02:23:40 +02:00
|
|
|
match_content = "ts_headline('zulip.english_us_search', rendered_content, " \
|
2013-04-29 22:53:57 +02:00
|
|
|
+ tsquery + ", 'StartSel=\"<span class=\"\"highlight\"\">\", StopSel=</span>, " \
|
|
|
|
"HighlightAll=TRUE')"
|
|
|
|
# We HTML-escape the subject in Postgres to avoid doing a server round-trip
|
2013-10-26 02:23:40 +02:00
|
|
|
match_subject = "ts_headline('zulip.english_us_search', escape_html(subject), " \
|
2013-04-29 22:53:57 +02:00
|
|
|
+ tsquery + ", 'StartSel=\"<span class=\"\"highlight\"\">\", StopSel=</span>, " \
|
|
|
|
"HighlightAll=TRUE')"
|
|
|
|
|
2013-05-14 00:17:01 +02:00
|
|
|
# Do quoted string matching. We really want phrase
|
|
|
|
# search here so we can ignore punctuation and do
|
|
|
|
# stemming, but there isn't a standard phrase search
|
|
|
|
# mechanism in Postgres
|
|
|
|
for term in re.findall('"[^"]+"|\S+', operand):
|
|
|
|
if term[0] == '"' and term[-1] == '"':
|
|
|
|
term = term[1:-1]
|
|
|
|
query = query.filter(self.pQ(content__icontains=term) |
|
|
|
|
self.pQ(subject__icontains=term))
|
|
|
|
|
2013-04-29 22:53:57 +02:00
|
|
|
return query.extra(select={'match_content': match_content,
|
|
|
|
'match_subject': match_subject},
|
|
|
|
where=[where],
|
|
|
|
select_params=[operand, operand], params=[operand])
|
2013-01-30 20:59:56 +01:00
|
|
|
else:
|
2013-02-15 21:24:58 +01:00
|
|
|
for word in operand.split():
|
2013-04-03 22:31:04 +02:00
|
|
|
query = query.filter(self.pQ(content__icontains=word) |
|
|
|
|
self.pQ(subject__icontains=word))
|
2013-01-31 23:05:47 +01:00
|
|
|
return query
|
2013-01-30 20:59:56 +01:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-02 21:43:49 +01:00
|
|
|
def narrow_parameter(json):
|
|
|
|
# FIXME: A hack to support old mobile clients
|
|
|
|
if json == '{}':
|
|
|
|
return None
|
|
|
|
|
|
|
|
data = json_to_list(json)
|
|
|
|
for elem in data:
|
|
|
|
if not isinstance(elem, list):
|
|
|
|
raise ValueError("element is not a list")
|
|
|
|
if (len(elem) != 2
|
|
|
|
or any(not isinstance(x, str) and not isinstance(x, unicode)
|
|
|
|
for x in elem)):
|
|
|
|
raise ValueError("element is not a string pair")
|
|
|
|
return data
|
|
|
|
|
2013-04-03 22:08:01 +02:00
|
|
|
def is_public_stream(request, stream, realm):
|
2013-01-16 00:52:57 +01:00
|
|
|
if not valid_stream_name(stream):
|
|
|
|
raise JsonableError("Invalid stream name")
|
|
|
|
stream = get_stream(stream, realm)
|
|
|
|
if stream is None:
|
2013-04-03 22:08:01 +02:00
|
|
|
return False
|
|
|
|
return stream.is_public()
|
2013-01-16 00:52:57 +01:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-22 15:58:52 +01:00
|
|
|
def get_old_messages_backend(request, user_profile,
|
|
|
|
anchor = REQ(converter=int),
|
|
|
|
num_before = REQ(converter=to_non_negative_int),
|
|
|
|
num_after = REQ(converter=to_non_negative_int),
|
|
|
|
narrow = REQ('narrow', converter=narrow_parameter, default=None),
|
2013-07-01 20:19:40 +02:00
|
|
|
apply_markdown=REQ(default=True,
|
|
|
|
converter=ujson.loads)):
|
2013-04-03 22:08:01 +02:00
|
|
|
include_history = False
|
|
|
|
if narrow is not None:
|
|
|
|
for operator, operand in narrow:
|
|
|
|
if operator == "stream":
|
|
|
|
if is_public_stream(request, operand, user_profile.realm):
|
|
|
|
include_history = True
|
|
|
|
# Disable historical messages if the user is narrowing to show
|
|
|
|
# only starred messages (or anything else that's a property on
|
|
|
|
# the UserMessage table). There cannot be historical messages
|
|
|
|
# in these cases anyway.
|
|
|
|
for operator, operand in narrow:
|
|
|
|
if operator == "is" and operand == "starred":
|
|
|
|
include_history = False
|
|
|
|
|
|
|
|
if include_history:
|
|
|
|
prefix = ""
|
2013-05-02 18:51:09 +02:00
|
|
|
query = Message.objects.only("id").order_by('id')
|
2013-01-15 20:55:13 +01:00
|
|
|
else:
|
2013-04-03 22:31:04 +02:00
|
|
|
prefix = "message__"
|
2013-05-02 18:51:09 +02:00
|
|
|
# Conceptually this query should be
|
|
|
|
# UserMessage.objects.filter(user_profile=user_profile).order_by('message')
|
|
|
|
#
|
|
|
|
# However, our do_search code above requires that there be a
|
|
|
|
# unique 'rendered_content' row in the query, so we need to
|
|
|
|
# somehow get the 'message' table into the query without
|
|
|
|
# actually fetching all the rows from the message table (since
|
|
|
|
# doing so would cause Django to consume a lot of resources
|
|
|
|
# rendering them). The following achieves these objectives.
|
|
|
|
query = UserMessage.objects.select_related("message").only("flags", "id", "message__id") \
|
|
|
|
.filter(user_profile=user_profile).order_by('message')
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2013-10-18 21:17:26 +02:00
|
|
|
# Add some metadata to our logging data for narrows
|
|
|
|
if narrow is not None:
|
|
|
|
operator_data = ",".join(operator for (operator, operand) in narrow)
|
|
|
|
request._extra_log_data = "[%s]" % (operator_data,)
|
|
|
|
|
2013-04-23 19:26:29 +02:00
|
|
|
num_extra_messages = 1
|
2013-04-29 22:53:57 +02:00
|
|
|
is_search = False
|
2013-04-23 19:26:29 +02:00
|
|
|
|
2013-06-19 20:43:45 +02:00
|
|
|
if narrow is None:
|
|
|
|
use_raw_query = True
|
|
|
|
else:
|
|
|
|
use_raw_query = False
|
2013-04-23 19:26:29 +02:00
|
|
|
num_extra_messages = 0
|
2013-04-03 22:31:04 +02:00
|
|
|
build = NarrowBuilder(user_profile, prefix)
|
2012-12-19 23:58:02 +01:00
|
|
|
for operator, operand in narrow:
|
2013-04-29 22:53:57 +02:00
|
|
|
if operator == 'search':
|
|
|
|
is_search = True
|
2013-01-30 19:05:03 +01:00
|
|
|
query = build(query, operator, operand)
|
2012-11-18 22:53:50 +01:00
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
def add_prefix(**kwargs):
|
|
|
|
return dict((prefix + key, kwargs[key]) for key in kwargs.keys())
|
|
|
|
|
2013-04-23 19:26:29 +02:00
|
|
|
# We add 1 to the number of messages requested if no narrow was
|
|
|
|
# specified to ensure that the resulting list always contains the
|
|
|
|
# anchor message. If a narrow was specified, the anchor message
|
|
|
|
# might not match the narrow anyway.
|
2013-06-19 22:36:58 +02:00
|
|
|
if num_after != 0:
|
2013-04-23 19:26:29 +02:00
|
|
|
num_after += num_extra_messages
|
2012-10-24 21:07:43 +02:00
|
|
|
else:
|
2013-06-19 22:36:58 +02:00
|
|
|
num_before += num_extra_messages
|
|
|
|
|
|
|
|
before_result = []
|
|
|
|
after_result = []
|
|
|
|
if num_before != 0:
|
|
|
|
before_anchor = anchor
|
|
|
|
if num_after != 0:
|
|
|
|
# Don't include the anchor in both the before query and the after query
|
|
|
|
before_anchor = anchor - 1
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
cursor = connection.cursor()
|
2013-10-18 17:39:51 +02:00
|
|
|
# These queries should always be equivalent to what we
|
|
|
|
# would do in the !use_raw_query case. In this case we
|
|
|
|
# don't actually need the zerver_message join at all.
|
|
|
|
cursor.execute("SELECT message_id, flags FROM zerver_usermessage "
|
|
|
|
"WHERE user_profile_id = %s and message_id <= %s " +
|
2013-06-19 20:43:45 +02:00
|
|
|
"ORDER BY message_id DESC LIMIT %s", [user_profile.id, before_anchor, num_before])
|
|
|
|
before_result = reversed(cursor.fetchall())
|
|
|
|
else:
|
|
|
|
before_result = last_n(num_before, query.filter(**add_prefix(id__lte=before_anchor)))
|
2013-06-19 22:36:58 +02:00
|
|
|
if num_after != 0:
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
cursor = connection.cursor()
|
2013-10-18 17:39:51 +02:00
|
|
|
# These queries should always be equivalent to what we
|
|
|
|
# would do in the !use_raw_query case. In this case we
|
|
|
|
# don't actually need the zerver_message join at all.
|
|
|
|
cursor.execute("SELECT message_id, flags FROM zerver_usermessage "
|
|
|
|
"WHERE user_profile_id = %s and message_id >= %s " +
|
2013-06-19 20:43:45 +02:00
|
|
|
"ORDER BY message_id LIMIT %s", [user_profile.id, anchor, num_after])
|
|
|
|
after_result = cursor.fetchall()
|
|
|
|
else:
|
|
|
|
after_result = query.filter(**add_prefix(id__gte=anchor))[:num_after]
|
2013-06-19 22:36:58 +02:00
|
|
|
query_result = list(before_result) + list(after_result)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2013-04-22 16:29:57 +02:00
|
|
|
# The following is a little messy, but ensures that the code paths
|
|
|
|
# are similar regardless of the value of include_history. The
|
|
|
|
# 'user_messages' dictionary maps each message to the user's
|
|
|
|
# UserMessage object for that message, which we will attach to the
|
|
|
|
# rendered message dict before returning it. We attempt to
|
|
|
|
# bulk-fetch rendered message dicts from memcached using the
|
|
|
|
# 'messages' list.
|
2013-04-29 22:53:57 +02:00
|
|
|
search_fields = dict()
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids = []
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = {}
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
for row in query_result:
|
|
|
|
(message_id, flags_val) = row
|
|
|
|
user_message_flags[message_id] = parse_usermessage_flags(flags_val)
|
|
|
|
message_ids.append(message_id)
|
|
|
|
elif include_history:
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__in=query_result))
|
2013-04-29 22:53:57 +02:00
|
|
|
for message in query_result:
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids.append(message.id)
|
2013-06-19 21:05:22 +02:00
|
|
|
if user_message_flags.get(message.id) is None:
|
|
|
|
user_message_flags[message.id] = ["read", "historical"]
|
2013-04-29 22:53:57 +02:00
|
|
|
if is_search:
|
|
|
|
search_fields[message.id] = dict([('match_subject', message.match_subject),
|
|
|
|
('match_content', message.match_content)])
|
2013-04-03 22:08:01 +02:00
|
|
|
else:
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = dict((user_message.message_id, user_message.flags_list())
|
|
|
|
for user_message in query_result)
|
2013-04-29 22:53:57 +02:00
|
|
|
for user_message in query_result:
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids.append(user_message.message_id)
|
2013-04-29 22:53:57 +02:00
|
|
|
if is_search:
|
2013-05-02 18:51:09 +02:00
|
|
|
search_fields[user_message.message_id] = \
|
2013-04-29 22:53:57 +02:00
|
|
|
dict([('match_subject', user_message.match_subject),
|
|
|
|
('match_content', user_message.match_content)])
|
2013-04-22 16:29:57 +02:00
|
|
|
|
2013-09-21 18:43:43 +02:00
|
|
|
cache_transformer = lambda row: Message.build_dict_from_raw_db_row(row, apply_markdown)
|
|
|
|
id_fetcher = lambda row: row['id']
|
|
|
|
|
2013-06-27 21:55:42 +02:00
|
|
|
message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown),
|
2013-09-21 18:43:43 +02:00
|
|
|
Message.get_raw_db_rows,
|
2013-06-27 21:55:42 +02:00
|
|
|
message_ids,
|
2013-09-21 18:43:43 +02:00
|
|
|
id_fetcher=id_fetcher,
|
|
|
|
cache_transformer=cache_transformer,
|
2013-06-27 21:55:42 +02:00
|
|
|
extractor=extract_message_dict,
|
|
|
|
setter=stringify_message_dict)
|
2013-04-25 20:41:54 +02:00
|
|
|
|
2013-04-22 16:29:57 +02:00
|
|
|
message_list = []
|
2013-05-02 18:51:09 +02:00
|
|
|
for message_id in message_ids:
|
2013-06-27 21:55:42 +02:00
|
|
|
msg_dict = message_dicts[message_id]
|
2013-06-19 21:05:22 +02:00
|
|
|
msg_dict.update({"flags": user_message_flags[message_id]})
|
2013-06-27 21:55:42 +02:00
|
|
|
msg_dict.update(search_fields.get(message_id, {}))
|
2013-04-29 22:53:57 +02:00
|
|
|
message_list.append(msg_dict)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
|
|
|
statsd.incr('loaded_old_messages', len(message_list))
|
2013-03-11 15:47:29 +01:00
|
|
|
ret = {'messages': message_list,
|
2012-10-24 21:07:43 +02:00
|
|
|
"result": "success",
|
2012-11-01 20:59:32 +01:00
|
|
|
"msg": ""}
|
2012-10-26 16:42:03 +02:00
|
|
|
return json_success(ret)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2012-10-26 22:02:51 +02:00
|
|
|
def generate_client_id():
|
2013-08-08 16:50:58 +02:00
|
|
|
return generate_random_token(32)
|
2012-10-26 22:02:51 +02:00
|
|
|
|
2013-02-20 03:21:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_profile(request, user_profile):
|
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-21 03:53:03 +02:00
|
|
|
def api_get_profile(request, user_profile):
|
2013-02-20 03:21:27 +01:00
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_profile_backend(request, user_profile):
|
2012-11-07 19:05:23 +01:00
|
|
|
result = dict(pointer = user_profile.pointer,
|
|
|
|
client_id = generate_client_id(),
|
|
|
|
max_message_id = -1)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
|
|
|
|
if messages:
|
|
|
|
result['max_message_id'] = messages[0].id
|
|
|
|
|
|
|
|
return json_success(result)
|
2012-10-21 03:53:03 +02:00
|
|
|
|
2013-03-06 21:04:53 +01:00
|
|
|
@authenticated_json_post_view
|
2013-08-08 20:38:24 +02:00
|
|
|
def json_update_flags(request, user_profile):
|
|
|
|
return update_message_flags(request, user_profile);
|
|
|
|
|
2013-03-06 21:04:53 +01:00
|
|
|
@has_request_variables
|
2013-08-08 20:38:24 +02:00
|
|
|
def update_message_flags(request, user_profile, messages=REQ('messages', converter=json_to_list),
|
2013-05-08 20:16:16 +02:00
|
|
|
operation=REQ('op'), flag=REQ('flag'),
|
|
|
|
all=REQ('all', converter=json_to_bool, default=False)):
|
2013-08-08 20:38:24 +02:00
|
|
|
do_update_message_flags(user_profile, operation, flag, messages, all)
|
2013-03-06 21:04:53 +01:00
|
|
|
return json_success({'result': 'success',
|
2013-05-02 21:14:17 +02:00
|
|
|
'messages': messages,
|
2013-03-06 21:04:53 +01:00
|
|
|
'msg': ''})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def api_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-10-01 21:36:44 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def json_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-09-06 21:52:03 +02:00
|
|
|
|
2013-02-27 23:18:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_change_enter_sends(request, user_profile,
|
|
|
|
enter_sends=REQ('enter_sends', json_to_bool)):
|
2013-02-27 23:18:38 +01:00
|
|
|
do_change_enter_sends(user_profile, enter_sends)
|
|
|
|
return json_success()
|
|
|
|
|
2012-10-03 22:32:50 +02:00
|
|
|
def is_super_user_api(request):
|
2013-08-26 18:09:17 +02:00
|
|
|
return request.user.is_authenticated() and is_super_user(request.user)
|
2012-09-21 16:40:46 +02:00
|
|
|
|
2013-02-05 22:56:04 +01:00
|
|
|
def mit_to_mit(user_profile, email):
|
|
|
|
# Are the sender and recipient both @mit.edu addresses?
|
|
|
|
# We have to handle this specially, inferring the domain from the
|
2013-08-06 21:32:15 +02:00
|
|
|
# e-mail address, because the recipient may not existing in Zulip
|
2013-02-05 22:56:04 +01:00
|
|
|
# and we may need to make a stub MIT user on the fly.
|
2013-07-18 18:48:56 +02:00
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
2013-02-05 22:56:04 +01:00
|
|
|
return False
|
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
return user_profile.realm.domain == "mit.edu" and domain == "mit.edu"
|
2013-02-05 22:56:04 +01:00
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
def same_realm_irc_user(user_profile, email):
|
|
|
|
# Check whether the target email address is an IRC user in the
|
|
|
|
# same realm as user_profile, i.e. if the domain were example.com,
|
|
|
|
# the IRC user would need to be username@irc.example.com
|
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
domain = email_to_domain(email)
|
|
|
|
|
2013-10-21 22:16:51 +02:00
|
|
|
return user_profile.realm.domain == domain.replace("irc.", "")
|
2013-10-17 17:19:44 +02:00
|
|
|
|
2013-10-29 16:00:20 +01:00
|
|
|
def same_realm_user(user_profile, email):
|
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
domain = email_to_domain(email)
|
|
|
|
|
|
|
|
return user_profile.realm.domain == domain
|
|
|
|
|
2012-11-15 16:44:08 +01:00
|
|
|
def create_mirrored_message_users(request, user_profile, recipients):
|
2012-11-14 20:26:06 +01:00
|
|
|
if "sender" not in request.POST:
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
sender_email = request.POST["sender"].strip().lower()
|
|
|
|
referenced_users = set([sender_email])
|
2012-11-13 23:38:49 +01:00
|
|
|
if request.POST['type'] == 'private':
|
2012-11-15 16:44:08 +01:00
|
|
|
for email in recipients:
|
2012-11-15 17:03:33 +01:00
|
|
|
referenced_users.add(email.lower())
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
if request.client.name == "zephyr_mirror":
|
|
|
|
user_check = mit_to_mit
|
|
|
|
fullname_function = compute_mit_user_fullname
|
|
|
|
elif request.client.name == "irc_mirror":
|
|
|
|
user_check = same_realm_irc_user
|
|
|
|
fullname_function = compute_irc_user_fullname
|
2013-10-29 16:00:20 +01:00
|
|
|
elif request.client.name == "jabber_mirror":
|
|
|
|
user_check = same_realm_user
|
|
|
|
fullname_function = compute_jabber_user_fullname
|
2013-10-17 17:19:44 +02:00
|
|
|
else:
|
|
|
|
# Unrecognized mirroring client
|
|
|
|
return (False, None)
|
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
for email in referenced_users:
|
2013-10-17 17:19:44 +02:00
|
|
|
# Check that all referenced users are in our realm:
|
|
|
|
if not user_check(user_profile, email):
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Create users for the referenced users, if needed.
|
|
|
|
for email in referenced_users:
|
2013-10-17 17:19:44 +02:00
|
|
|
create_mirror_user_if_needed(user_profile.realm, email, fullname_function)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2013-03-18 17:03:29 +01:00
|
|
|
sender = get_user_profile_by_email(sender_email)
|
2012-10-22 22:34:56 +02:00
|
|
|
return (True, sender)
|
2012-09-06 22:00:39 +02:00
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_tutorial_status(request, user_profile, status=REQ('status')):
|
2013-04-04 22:30:28 +02:00
|
|
|
if status == 'started':
|
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_STARTED
|
|
|
|
elif status == 'finished':
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_FINISHED
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["tutorial_status"])
|
2013-04-04 22:30:28 +02:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_update_message(request, user_profile):
|
|
|
|
return update_message_backend(request, user_profile)
|
|
|
|
|
2013-05-15 00:22:16 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_fetch_raw_message(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int)):
|
|
|
|
try:
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
except Message.DoesNotExist:
|
|
|
|
return json_error("No such message")
|
|
|
|
|
|
|
|
if message.sender != user_profile:
|
|
|
|
return json_error("Message was not sent by you")
|
|
|
|
|
|
|
|
return json_success({"raw_content": message.content})
|
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
@has_request_variables
|
|
|
|
def update_message_backend(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int),
|
|
|
|
subject=REQ(default=None),
|
2013-09-13 18:12:29 +02:00
|
|
|
propagate_mode=REQ(default="change-one"),
|
2013-05-14 21:18:11 +02:00
|
|
|
content=REQ(default=None)):
|
|
|
|
if subject is None and content is None:
|
|
|
|
return json_error("Nothing to change")
|
2013-09-13 18:12:29 +02:00
|
|
|
do_update_message(user_profile, message_id, subject, propagate_mode, content)
|
2013-05-14 21:18:11 +02:00
|
|
|
return json_success()
|
|
|
|
|
2012-10-03 21:31:44 +02:00
|
|
|
# We do not @require_login for send_message_backend, since it is used
|
|
|
|
# both from the API and the web service. Code calling
|
|
|
|
# send_message_backend should either check the API key or check that
|
|
|
|
# the user is logged in.
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-18 22:07:56 +01:00
|
|
|
def send_message_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
message_type_name = REQ('type'),
|
|
|
|
message_to = REQ('to', converter=extract_recipients),
|
|
|
|
forged = REQ(default=False),
|
|
|
|
subject_name = REQ('subject', lambda x: x.strip(), None),
|
2013-08-08 22:50:08 +02:00
|
|
|
message_content = REQ('content'),
|
|
|
|
domain = REQ('domain', default=None)):
|
2013-03-18 22:07:56 +01:00
|
|
|
client = request.client
|
2012-10-22 22:34:56 +02:00
|
|
|
is_super_user = is_super_user_api(request)
|
|
|
|
if forged and not is_super_user:
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2013-08-08 22:50:08 +02:00
|
|
|
realm = None
|
2013-10-08 21:02:47 +02:00
|
|
|
if domain and domain != user_profile.realm.domain:
|
2013-08-08 22:50:08 +02:00
|
|
|
if not is_super_user:
|
|
|
|
# The email gateway bot needs to be able to send messages in
|
|
|
|
# any realm.
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
realm = get_realm(domain)
|
|
|
|
if not realm:
|
|
|
|
return json_error("Unknown domain " + domain)
|
|
|
|
|
2013-10-29 16:00:20 +01:00
|
|
|
if client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror"]:
|
2013-10-17 17:19:44 +02:00
|
|
|
# Here's how security works for mirroring:
|
|
|
|
#
|
|
|
|
# For private messages, the message must be (1) both sent and
|
|
|
|
# received exclusively by users in your realm, and (2)
|
|
|
|
# received by the forwarding user.
|
2012-10-22 22:34:56 +02:00
|
|
|
#
|
2013-10-17 17:19:44 +02:00
|
|
|
# For stream messages, the message must be (1) being forwarded
|
|
|
|
# by an API superuser for your realm and (2) being sent to a
|
2013-10-29 16:00:20 +01:00
|
|
|
# mirrored stream (any stream for the Zephyr and Jabber
|
|
|
|
# mirrors, but only streams with names starting with a "#" for
|
|
|
|
# IRC mirrors)
|
2012-10-22 22:34:56 +02:00
|
|
|
#
|
2013-10-17 17:19:44 +02:00
|
|
|
# The security checks are split between the below code
|
|
|
|
# (especially create_mirrored_message_users which checks the
|
|
|
|
# same-realm constraint) and recipient_for_emails (which
|
|
|
|
# checks that PMs are received by the forwarding user)
|
2012-10-22 22:34:56 +02:00
|
|
|
if "sender" not in request.POST:
|
|
|
|
return json_error("Missing sender")
|
2012-11-08 00:38:21 +01:00
|
|
|
if message_type_name != "private" and not is_super_user:
|
2012-10-22 22:34:56 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2012-11-15 16:54:35 +01:00
|
|
|
(valid_input, mirror_sender) = \
|
|
|
|
create_mirrored_message_users(request, user_profile, message_to)
|
2012-10-22 22:34:56 +02:00
|
|
|
if not valid_input:
|
|
|
|
return json_error("Invalid mirrored message")
|
2013-10-17 17:19:44 +02:00
|
|
|
if client.name == "zephyr_mirror" and user_profile.realm.domain != "mit.edu":
|
2012-10-22 22:34:56 +02:00
|
|
|
return json_error("Invalid mirrored realm")
|
2013-10-17 17:19:44 +02:00
|
|
|
if (client.name == "irc_mirror" and message_type_name != "private" and
|
2013-10-21 22:16:51 +02:00
|
|
|
not message_to[0].startswith("#")):
|
2013-10-17 17:19:44 +02:00
|
|
|
return json_error("IRC stream names must start with #")
|
2012-10-22 22:34:56 +02:00
|
|
|
sender = mirror_sender
|
2012-11-14 23:07:09 +01:00
|
|
|
else:
|
|
|
|
sender = user_profile
|
2012-09-07 19:20:04 +02:00
|
|
|
|
2013-03-18 19:17:18 +01:00
|
|
|
ret = check_send_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, forged=forged,
|
|
|
|
forged_timestamp = request.POST.get('time'),
|
2013-08-08 22:50:08 +02:00
|
|
|
forwarder_user_profile=user_profile, realm=realm)
|
2013-08-08 19:37:40 +02:00
|
|
|
return json_success({"id": ret})
|
2013-03-18 18:21:22 +01:00
|
|
|
|
2013-07-30 23:25:00 +02:00
|
|
|
@has_request_variables
|
|
|
|
def render_message_backend(request, user_profile, content=REQ):
|
|
|
|
rendered_content = bugdown.convert(content, user_profile.realm.domain)
|
|
|
|
return json_success({"rendered": rendered_content})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-11 19:31:21 +02:00
|
|
|
def api_get_public_streams(request, user_profile):
|
2013-01-10 20:47:05 +01:00
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_public_streams(request, user_profile):
|
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
|
|
|
def get_streams_backend(request, user_profile,
|
|
|
|
include_public=REQ(converter=json_to_bool, default=True),
|
|
|
|
include_subscribed=REQ(converter=json_to_bool, default=True),
|
|
|
|
include_all_active=REQ(converter=json_to_bool, default=False)):
|
2013-08-22 17:48:21 +02:00
|
|
|
if include_all_active and not is_super_user_api(request):
|
2013-08-22 17:37:02 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2013-01-10 20:47:25 +01:00
|
|
|
|
2013-08-22 17:49:02 +02:00
|
|
|
# Listing public streams are disabled for some users (e.g. a
|
|
|
|
# contractor for CUSTOMER5) and for the mit.edu realm.
|
|
|
|
include_public = include_public and not (user_profile.public_streams_disabled or
|
|
|
|
user_profile.realm.domain == "mit.edu")
|
2013-08-22 17:48:21 +02:00
|
|
|
|
2012-11-08 20:38:17 +01:00
|
|
|
# Only get streams someone is currently subscribed to
|
|
|
|
subs_filter = Subscription.objects.filter(active=True).values('recipient_id')
|
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
2013-08-22 17:37:02 +02:00
|
|
|
|
|
|
|
# Start out with all active streams in the realm
|
2013-08-14 19:38:30 +02:00
|
|
|
query = Stream.objects.filter(id__in = stream_ids, realm=user_profile.realm)
|
2013-08-22 17:37:02 +02:00
|
|
|
|
|
|
|
if not include_all_active:
|
|
|
|
user_subs = Subscription.objects.select_related("recipient").filter(
|
|
|
|
active=True, user_profile=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM)
|
|
|
|
|
|
|
|
if include_subscribed:
|
|
|
|
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
|
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
|
|
|
|
|
|
|
if include_subscribed and include_public:
|
|
|
|
query = query.filter(recipient_check | invite_only_check)
|
|
|
|
elif include_public:
|
|
|
|
query = query.filter(invite_only_check)
|
|
|
|
elif include_subscribed:
|
|
|
|
query = query.filter(recipient_check)
|
|
|
|
else:
|
|
|
|
# We're including nothing, so don't bother hitting the DB.
|
|
|
|
query = []
|
2013-08-14 19:38:30 +02:00
|
|
|
|
|
|
|
streams = sorted({"name": stream.name} for stream in query)
|
2012-10-11 19:31:21 +02:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
def get_public_streams_backend(request, user_profile):
|
|
|
|
return get_streams_backend(request, user_profile, include_public=True,
|
|
|
|
include_subscribed=False, include_all_active=False)
|
|
|
|
|
2013-09-10 11:46:18 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_rename_stream(request, user_profile, old_name=REQ, new_name=REQ):
|
|
|
|
if not user_profile.has_perm('administer', user_profile.realm):
|
|
|
|
return json_error("Insufficient permission to rename stream")
|
|
|
|
|
|
|
|
return json_success(do_rename_stream(user_profile.realm, old_name, new_name))
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_list_subscriptions(request, user_profile):
|
2013-06-24 21:23:45 +02:00
|
|
|
return list_subscriptions_backend(request, user_profile)
|
|
|
|
|
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2013-06-12 21:15:32 +02:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)[0]})
|
2012-10-11 19:31:21 +02:00
|
|
|
|
2013-03-21 20:18:44 +01:00
|
|
|
@transaction.commit_on_success
|
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
delete=REQ(converter=json_to_list, default=[]),
|
|
|
|
add=REQ(converter=json_to_list, default=[])):
|
2013-03-21 20:18:44 +01:00
|
|
|
if not add and not delete:
|
|
|
|
return json_error('Nothing to do. Specify at least one of "add" or "delete".')
|
|
|
|
|
|
|
|
json_dict = {}
|
|
|
|
for method, items in ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)):
|
|
|
|
response = method(request, user_profile, streams_raw=items)
|
|
|
|
if response.status_code != 200:
|
|
|
|
transaction.rollback()
|
|
|
|
return response
|
2013-06-18 23:55:55 +02:00
|
|
|
json_dict.update(ujson.loads(response.content))
|
2013-03-21 20:18:44 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
2012-11-16 20:15:03 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-16 20:11:08 +01:00
|
|
|
def json_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-11-15 17:36:20 +01:00
|
|
|
@has_request_variables
|
2012-11-16 20:11:08 +01:00
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
streams_raw = REQ("subscriptions", json_to_list)):
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
streams, _ = list_to_streams(streams_raw, user_profile)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
result = dict(removed=[], not_subscribed=[])
|
2013-06-28 17:16:55 +02:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions([user_profile], streams)
|
|
|
|
for (subscriber, stream) in removed:
|
|
|
|
result["removed"].append(stream.name)
|
|
|
|
for (subscriber, stream) in not_subscribed:
|
|
|
|
result["not_subscribed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
def filter_stream_authorization(user_profile, streams):
|
|
|
|
streams_subscribed = set()
|
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams])
|
|
|
|
subs = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient__in=recipients_map.values(),
|
|
|
|
active=True)
|
|
|
|
|
|
|
|
for sub in subs:
|
|
|
|
streams_subscribed.add(sub.recipient.type_id)
|
|
|
|
|
|
|
|
unauthorized_streams = []
|
|
|
|
for stream in streams:
|
2013-08-22 17:49:02 +02:00
|
|
|
# The user is authorized for his own streams
|
|
|
|
if stream.id in streams_subscribed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# The user is not authorized for invite_only streams, and if
|
|
|
|
# the user has public streams disabled, nothing is authorized
|
|
|
|
if stream.invite_only or user_profile.public_streams_disabled:
|
2013-08-15 22:29:58 +02:00
|
|
|
unauthorized_streams.append(stream)
|
2013-08-22 17:49:02 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
streams = [stream for stream in streams if
|
|
|
|
stream.id not in set(stream.id for stream in unauthorized_streams)]
|
|
|
|
return streams, unauthorized_streams
|
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-16 19:43:27 +01:00
|
|
|
def add_subscriptions_backend(request, user_profile,
|
2013-08-15 23:17:00 +02:00
|
|
|
streams_raw = REQ("subscriptions", json_to_list),
|
|
|
|
invite_only = REQ(converter=json_to_bool, default=False),
|
2013-08-27 19:17:08 +02:00
|
|
|
announce = REQ(converter=json_to_bool, default=False),
|
2013-08-15 23:17:00 +02:00
|
|
|
principals = REQ(converter=json_to_list, default=None),
|
|
|
|
authorization_errors_fatal = REQ(converter=json_to_bool, default=True)):
|
2013-01-31 21:06:59 +01:00
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names = []
|
2013-06-24 21:32:56 +02:00
|
|
|
for stream in streams_raw:
|
|
|
|
if not isinstance(stream, dict):
|
|
|
|
return json_error("Malformed request")
|
|
|
|
stream_name = stream["name"].strip()
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2012-10-31 18:36:08 +01:00
|
|
|
return json_error("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-31 23:04:55 +01:00
|
|
|
return json_error("Invalid stream name (%s)." % (stream_name,))
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names.append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
existing_streams, created_streams = \
|
|
|
|
list_to_streams(stream_names, user_profile, autocreate=True, invite_only=invite_only)
|
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
2013-08-15 23:17:00 +02:00
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2013-08-22 17:54:57 +02:00
|
|
|
return json_error("Unable to access stream (%s)." % unauthorized_streams[0].name)
|
2013-08-15 22:29:58 +02:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals is not None:
|
2013-08-15 00:50:19 +02:00
|
|
|
if user_profile.realm.domain == 'mit.edu' and not all(stream.invite_only for stream in streams):
|
|
|
|
return json_error("You can only invite other mit.edu users to invite-only streams.")
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
2013-01-09 22:47:09 +01:00
|
|
|
else:
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = [user_profile]
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers)
|
2013-01-23 20:39:02 +01:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list))
|
2013-06-25 19:26:58 +02:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
2013-01-31 21:12:53 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
private_streams = dict((stream.name, stream.invite_only) for stream in streams)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2013-08-27 19:17:08 +02:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals and result["subscribed"]:
|
|
|
|
for email, subscriptions in result["subscribed"].iteritems():
|
2013-03-28 20:43:34 +01:00
|
|
|
if email == user_profile.email:
|
2013-08-06 21:32:15 +02:00
|
|
|
# Don't send a Zulip if you invited yourself.
|
2013-01-31 21:06:59 +01:00
|
|
|
continue
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if len(subscriptions) == 1:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
2013-01-31 21:12:53 +01:00
|
|
|
"subscribed you to the%s stream '%s'"
|
|
|
|
% (user_profile.full_name,
|
|
|
|
" **invite-only**" if private_streams[subscriptions[0]] else "",
|
|
|
|
subscriptions[0]))
|
2013-01-31 21:06:59 +01:00
|
|
|
else:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
|
|
|
"subscribed you to the following streams: \n\n"
|
|
|
|
% (user_profile.full_name,))
|
|
|
|
for stream in subscriptions:
|
2013-01-31 21:12:53 +01:00
|
|
|
msg += "* %s%s\n" % (
|
|
|
|
stream,
|
|
|
|
" (**invite-only**)" if private_streams[stream] else "")
|
2013-08-28 00:24:25 +02:00
|
|
|
|
|
|
|
if len([s for s in subscriptions if not private_streams[s]]) > 0:
|
|
|
|
msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it."
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-05-10 22:56:22 +02:00
|
|
|
"private", email, "", msg))
|
2013-08-27 19:17:08 +02:00
|
|
|
|
|
|
|
if announce and len(created_streams) > 0:
|
2013-10-02 23:40:21 +02:00
|
|
|
notifications_stream = user_profile.realm.notifications_stream
|
|
|
|
if notifications_stream is not None:
|
|
|
|
if len(created_streams) > 1:
|
|
|
|
stream_msg = "the following streams: %s" % \
|
|
|
|
(", ".join('`%s`' % (s.name,) for s in created_streams),)
|
|
|
|
else:
|
|
|
|
stream_msg = "a new stream `%s`" % (created_streams[0].name)
|
|
|
|
msg = ("%s just created %s. To join, click the gear "
|
|
|
|
"in the left-side streams list.") % (user_profile.full_name, stream_msg)
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-10-02 23:40:21 +02:00
|
|
|
"stream",
|
|
|
|
notifications_stream.name, "Streams", msg,
|
|
|
|
realm=notifications_stream.realm))
|
|
|
|
else:
|
2013-10-07 21:39:40 +02:00
|
|
|
msg = ("Hi there! %s just created a new stream '%s'. "
|
|
|
|
"To join, click the gear in the left-side streams list."
|
|
|
|
% (user_profile.full_name, created_streams[0].name))
|
2013-10-23 23:16:39 +02:00
|
|
|
for realm_user_dict in get_active_user_dicts_in_realm(user_profile.realm):
|
2013-10-02 23:40:21 +02:00
|
|
|
# Don't announce to yourself or to people you explicitly added
|
|
|
|
# (who will get the notification above instead).
|
2013-10-23 23:16:39 +02:00
|
|
|
if realm_user_dict['email'] in principals or realm_user_dict['email'] == user_profile.email:
|
2013-10-02 23:40:21 +02:00
|
|
|
continue
|
2013-10-31 18:33:19 +01:00
|
|
|
notifications.append(internal_prep_message(settings.NOTIFICATION_BOT,
|
2013-10-02 23:40:21 +02:00
|
|
|
"private",
|
2013-10-23 23:16:39 +02:00
|
|
|
realm_user_dict['email'], "", msg))
|
2013-08-27 19:17:08 +02:00
|
|
|
|
|
|
|
if len(notifications) > 0:
|
2013-05-10 22:56:22 +02:00
|
|
|
do_send_messages(notifications)
|
2013-01-31 21:06:59 +01:00
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
2013-08-15 23:17:00 +02:00
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [stream.name for stream in unauthorized_streams]
|
2012-11-16 22:27:32 +01:00
|
|
|
return json_success(result)
|
2012-09-05 23:38:20 +02:00
|
|
|
|
2013-02-11 17:20:16 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_members_backend(request, user_profile):
|
2013-06-24 20:34:45 +02:00
|
|
|
members = [{"full_name": profile.full_name,
|
|
|
|
"email": profile.email} for profile in \
|
2013-02-11 17:20:16 +01:00
|
|
|
UserProfile.objects.select_related().filter(realm=user_profile.realm)]
|
|
|
|
return json_success({'members': members})
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
2013-03-14 22:12:25 +01:00
|
|
|
@authenticated_json_post_view
|
2013-10-23 16:46:18 +02:00
|
|
|
@has_request_variables
|
|
|
|
def json_upload_file(request, user_profile, private=REQ(converter=json_to_bool, default=None)):
|
2013-03-14 22:12:25 +01:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
return json_error("You must specify a file to upload")
|
|
|
|
if len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
|
|
|
user_file = request.FILES.values()[0]
|
2013-10-23 16:46:18 +02:00
|
|
|
uri = upload_message_image_through_web_client(request, user_file, user_profile, private=private)
|
2013-06-18 20:47:37 +02:00
|
|
|
return json_success({'uri': uri})
|
2013-03-14 22:12:25 +01:00
|
|
|
|
2013-10-25 20:02:44 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_uploaded_file(request, user_profile, realm_id, filename,
|
|
|
|
redir=REQ(converter=json_to_bool, default=True)):
|
2013-10-28 16:13:53 +01:00
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
|
|
|
return HttpResponseForbidden() # Should have been served by nginx
|
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
# Internal users can access all uploads so we can receive attachments in cross-realm messages
|
|
|
|
if user_profile.realm.id == int(realm_id) or user_profile.realm.domain == 'zulip.com':
|
2013-10-25 20:02:44 +02:00
|
|
|
uri = get_signed_upload_url("%s/%s" % (realm_id, filename))
|
|
|
|
if redir:
|
|
|
|
return redirect(uri)
|
|
|
|
else:
|
|
|
|
return json_success({'uri': uri})
|
2013-10-23 16:46:18 +02:00
|
|
|
else:
|
|
|
|
return HttpResponseForbidden()
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def get_subscribers_backend(request, user_profile, stream_name=REQ('stream')):
|
2013-09-30 21:58:36 +02:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
|
|
|
raise JsonableError("Stream does not exist: %s" % (stream_name,))
|
|
|
|
|
|
|
|
subscribers = get_subscriber_emails(stream, user_profile)
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2013-09-13 19:30:05 +02:00
|
|
|
return json_success({'subscribers': subscribers})
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-10-25 22:41:35 +02:00
|
|
|
def json_change_settings(request, user_profile,
|
|
|
|
full_name=REQ,
|
|
|
|
old_password=REQ,
|
|
|
|
new_password=REQ,
|
|
|
|
confirm_password=REQ):
|
2012-12-21 04:17:49 +01:00
|
|
|
if new_password != "" or confirm_password != "":
|
2012-09-21 19:32:01 +02:00
|
|
|
if new_password != confirm_password:
|
|
|
|
return json_error("New password must match confirmation password!")
|
2013-03-28 20:43:34 +01:00
|
|
|
if not authenticate(username=user_profile.email, password=old_password):
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_error("Wrong password!")
|
2013-03-08 19:58:18 +01:00
|
|
|
do_change_password(user_profile, new_password)
|
2012-09-21 19:32:01 +02:00
|
|
|
|
|
|
|
result = {}
|
2012-12-05 20:56:31 +01:00
|
|
|
if user_profile.full_name != full_name and full_name.strip() != "":
|
2013-08-05 16:37:48 +02:00
|
|
|
if user_profile.realm.domain == "users.customer4.invalid":
|
|
|
|
# At the request of the facilitators, CUSTOMER4
|
|
|
|
# students can't change their names. Failingly silently is
|
|
|
|
# fine -- they can't do it through the UI, so they'd have
|
|
|
|
# to be trying to break the rules.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
new_full_name = full_name.strip()
|
|
|
|
if len(new_full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
return json_error("Name too long!")
|
|
|
|
do_change_full_name(user_profile, new_full_name)
|
|
|
|
result['full_name'] = new_full_name
|
2012-10-02 22:20:07 +02:00
|
|
|
|
2013-10-25 22:41:35 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_change_notify_settings(request, user_profile,
|
|
|
|
# enable_desktop_notification needs to default to False
|
|
|
|
# because browsers POST nothing for an unchecked checkbox
|
|
|
|
enable_desktop_notifications=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False),
|
|
|
|
enable_sounds=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False),
|
|
|
|
enable_offline_email_notifications=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False),
|
|
|
|
enable_offline_push_notifications=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False)):
|
|
|
|
|
|
|
|
result = {}
|
|
|
|
|
2012-11-23 21:23:41 +01:00
|
|
|
if user_profile.enable_desktop_notifications != enable_desktop_notifications:
|
|
|
|
do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications)
|
|
|
|
result['enable_desktop_notifications'] = enable_desktop_notifications
|
|
|
|
|
2013-05-03 21:49:01 +02:00
|
|
|
if user_profile.enable_sounds != enable_sounds:
|
|
|
|
do_change_enable_sounds(user_profile, enable_sounds)
|
|
|
|
result['enable_sounds'] = enable_sounds
|
|
|
|
|
2013-05-07 23:19:52 +02:00
|
|
|
if user_profile.enable_offline_email_notifications != enable_offline_email_notifications:
|
|
|
|
do_change_enable_offline_email_notifications(user_profile, enable_offline_email_notifications)
|
|
|
|
result['enable_offline_email_notifications'] = enable_offline_email_notifications
|
|
|
|
|
2013-10-16 17:24:52 +02:00
|
|
|
if user_profile.enable_offline_push_notifications != enable_offline_push_notifications:
|
|
|
|
do_change_enable_offline_push_notifications(user_profile, enable_offline_push_notifications)
|
|
|
|
result['enable_offline_push_notifications'] = enable_offline_push_notifications
|
|
|
|
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_stream_exists(request, user_profile, stream=REQ):
|
2013-03-21 20:16:27 +01:00
|
|
|
return stream_exists_backend(request, user_profile, stream)
|
|
|
|
|
|
|
|
def stream_exists_backend(request, user_profile, stream_name):
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-10 23:00:50 +02:00
|
|
|
return json_error("Invalid characters in stream name")
|
2013-03-21 20:16:27 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-11-15 19:13:33 +01:00
|
|
|
result = {"exists": bool(stream)}
|
|
|
|
if stream is not None:
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-11-15 19:13:33 +01:00
|
|
|
result["subscribed"] = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
active=True).exists()
|
2013-03-21 19:58:30 +01:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
|
|
|
return json_response(data=result, status=404)
|
2012-10-17 20:43:52 +02:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
def get_subscription_or_die(stream_name, user_profile):
|
2012-12-03 00:19:00 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream:
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Invalid stream %s" % (stream.name,))
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-12-03 00:19:00 +01:00
|
|
|
subscription = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient, active=True)
|
2013-01-18 18:25:36 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
if not subscription.exists():
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Not subscribed to stream %s" % (stream_name,))
|
2012-12-03 00:19:00 +01:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
return subscription
|
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
@authenticated_json_view
|
2013-04-08 19:34:04 +02:00
|
|
|
@has_request_variables
|
|
|
|
def json_subscription_property(request, user_profile, stream_name=REQ,
|
|
|
|
property=REQ):
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to accessing or changing subscription
|
2013-04-08 19:34:04 +02:00
|
|
|
properties.
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
2013-04-08 19:34:04 +02:00
|
|
|
property_converters = dict(color=lambda x: x,
|
2013-04-09 02:14:13 +02:00
|
|
|
in_home_view=json_to_bool,
|
|
|
|
notifications=json_to_bool)
|
2013-04-08 19:34:04 +02:00
|
|
|
if property not in property_converters:
|
|
|
|
return json_error("Unknown subscription property: %s" % (property,))
|
|
|
|
|
|
|
|
sub = get_subscription_or_die(stream_name, user_profile)[0]
|
2012-12-02 22:58:00 +01:00
|
|
|
if request.method == "GET":
|
2013-04-08 19:34:04 +02:00
|
|
|
return json_success({'stream_name': stream_name,
|
|
|
|
'value': getattr(sub, property)})
|
2012-12-02 22:58:00 +01:00
|
|
|
elif request.method == "POST":
|
2013-04-08 19:34:04 +02:00
|
|
|
@has_request_variables
|
|
|
|
def do_set_property(request,
|
2013-05-08 20:16:16 +02:00
|
|
|
value=REQ(converter=property_converters[property])):
|
2013-07-16 21:56:20 +02:00
|
|
|
do_change_subscription_property(user_profile, sub, stream_name,
|
|
|
|
property, value)
|
2013-04-08 19:34:04 +02:00
|
|
|
do_set_property(request)
|
|
|
|
return json_success()
|
2012-12-02 22:58:00 +01:00
|
|
|
else:
|
|
|
|
return json_error("Invalid verb")
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-10-17 22:36:49 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def api_fetch_api_key(request, username=REQ, password=REQ):
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = authenticate(username=username, password=password)
|
|
|
|
if user_profile is None:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your username or password is incorrect.", status=403)
|
2013-03-29 17:39:53 +01:00
|
|
|
if not user_profile.is_active:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your account has been disabled.", status=403)
|
2013-03-29 17:39:53 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-10-17 22:26:59 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_fetch_api_key(request, user_profile, password=REQ):
|
2013-03-29 17:39:53 +01:00
|
|
|
if not user_profile.check_password(password):
|
2012-10-17 22:26:59 +02:00
|
|
|
return json_error("Your username or password is incorrect.")
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
class ActivityTable(object):
|
2013-09-26 00:15:12 +02:00
|
|
|
def __init__(self, realm, client_name, queries):
|
2013-09-25 23:47:54 +02:00
|
|
|
self.realm = realm
|
2013-09-26 01:22:23 +02:00
|
|
|
self.summary_mode = realm is None
|
2012-12-04 22:14:44 +01:00
|
|
|
self.has_pointer = False
|
|
|
|
self.rows = {}
|
2013-04-02 17:15:09 +02:00
|
|
|
|
|
|
|
def do_url(query_name, url):
|
2013-09-24 20:29:46 +02:00
|
|
|
fields = [
|
|
|
|
'user_profile__realm__domain',
|
|
|
|
'user_profile__full_name',
|
|
|
|
'user_profile__email',
|
|
|
|
'count',
|
|
|
|
'last_visit'
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2012-12-04 22:14:44 +01:00
|
|
|
query=url,
|
2013-10-02 03:19:40 +02:00
|
|
|
client__name__startswith=client_name,
|
|
|
|
user_profile__realm__domain=realm
|
2013-09-24 20:29:46 +02:00
|
|
|
)
|
|
|
|
records = records.select_related().only(*fields)
|
|
|
|
|
2013-09-24 21:32:53 +02:00
|
|
|
count_field = query_name + '_count'
|
|
|
|
last_visit_field = query_name + '_last'
|
|
|
|
|
2013-09-24 20:29:46 +02:00
|
|
|
for record in records:
|
2013-09-24 21:32:53 +02:00
|
|
|
domain = record.user_profile.realm.domain
|
|
|
|
email = record.user_profile.email
|
|
|
|
full_name = record.user_profile.full_name
|
|
|
|
count = record.count
|
|
|
|
last_visit = record.last_visit
|
|
|
|
|
2013-10-02 03:19:40 +02:00
|
|
|
row = self.rows.setdefault(email,
|
|
|
|
{'realm': domain,
|
|
|
|
'full_name': full_name,
|
|
|
|
'email': email,
|
|
|
|
'type': 'user'})
|
|
|
|
row[count_field] = count
|
|
|
|
row[last_visit_field] = last_visit
|
2013-09-24 22:02:41 +02:00
|
|
|
|
|
|
|
|
2013-04-02 17:17:00 +02:00
|
|
|
for query_name, urls in queries:
|
2013-04-02 17:15:09 +02:00
|
|
|
if 'pointer' in query_name:
|
|
|
|
self.has_pointer = True
|
2013-04-02 17:17:00 +02:00
|
|
|
for url in urls:
|
|
|
|
do_url(query_name, url)
|
2013-04-02 17:15:09 +02:00
|
|
|
|
2012-12-04 23:54:14 +01:00
|
|
|
for row in self.rows.values():
|
|
|
|
# kind of a hack
|
|
|
|
last_action = max(v for v in row.values() if isinstance(v, datetime.datetime))
|
|
|
|
age = now() - last_action
|
|
|
|
if age < datetime.timedelta(minutes=10):
|
|
|
|
row['class'] = 'recently_active'
|
|
|
|
elif age >= datetime.timedelta(days=1):
|
|
|
|
row['class'] = 'long_inactive'
|
2012-12-05 00:05:10 +01:00
|
|
|
row['age'] = age
|
|
|
|
|
|
|
|
def sorted_rows(self):
|
2013-09-24 22:31:30 +02:00
|
|
|
keyfunc = lambda (k,r): (r['realm'], -1 * r.get('send_message_count', 0))
|
|
|
|
return sorted(self.rows.iteritems(), key=keyfunc)
|
2012-12-04 23:54:14 +01:00
|
|
|
|
2013-09-26 01:22:23 +02:00
|
|
|
def content(self):
|
|
|
|
return loader.render_to_string('zerver/activity_table.html', dict(table=self))
|
|
|
|
|
2013-09-26 16:46:30 +02:00
|
|
|
|
|
|
|
def dictfetchall(cursor):
|
|
|
|
"Returns all rows from a cursor as a dict"
|
|
|
|
desc = cursor.description
|
|
|
|
return [
|
|
|
|
dict(zip([col[0] for col in desc], row))
|
|
|
|
for row in cursor.fetchall()
|
|
|
|
]
|
|
|
|
|
2013-10-03 05:18:23 +02:00
|
|
|
def realm_summary_table(realm_minutes):
|
2013-09-26 16:46:30 +02:00
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
realm.domain,
|
2013-09-26 21:39:46 +02:00
|
|
|
coalesce(user_counts.active_user_count, 0) active_user_count,
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
2013-10-11 21:32:30 +02:00
|
|
|
AND not is_bot
|
|
|
|
) user_profile_count,
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
|
|
|
AND is_bot
|
|
|
|
) bot_count
|
2013-09-26 16:46:30 +02:00
|
|
|
FROM zerver_realm realm
|
|
|
|
LEFT OUTER JOIN
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
up.realm_id realm_id,
|
|
|
|
count(distinct(ua.user_profile_id)) active_user_count
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'/json/update_pointer'
|
|
|
|
)
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '1 day'
|
|
|
|
GROUP BY realm_id
|
|
|
|
) user_counts
|
|
|
|
ON user_counts.realm_id = realm.id
|
2013-10-11 21:15:26 +02:00
|
|
|
WHERE
|
|
|
|
realm.domain not in ('zulip.com', 'customer4.invalid')
|
|
|
|
AND EXISTS (
|
2013-09-26 16:46:30 +02:00
|
|
|
SELECT *
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'/json/update_pointer'
|
|
|
|
)
|
|
|
|
AND
|
|
|
|
up.realm_id = realm.id
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '2 week'
|
|
|
|
)
|
|
|
|
ORDER BY active_user_count DESC, domain ASC
|
|
|
|
'''
|
|
|
|
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
2013-09-26 20:46:35 +02:00
|
|
|
|
2013-10-03 05:18:23 +02:00
|
|
|
# augment data with realm_minutes
|
2013-10-11 21:22:16 +02:00
|
|
|
total_hours = 0
|
2013-10-03 05:18:23 +02:00
|
|
|
for row in rows:
|
|
|
|
domain = row['domain']
|
|
|
|
minutes = realm_minutes.get(domain, 0)
|
2013-10-11 21:22:16 +02:00
|
|
|
hours = minutes / 60.0
|
|
|
|
total_hours += hours
|
|
|
|
row['hours'] = str(int(hours))
|
2013-10-11 21:44:42 +02:00
|
|
|
try:
|
|
|
|
row['hours_per_user'] = '%.1f' % (hours / row['active_user_count'],)
|
|
|
|
except:
|
|
|
|
pass
|
2013-10-03 05:18:23 +02:00
|
|
|
|
2013-10-03 05:30:35 +02:00
|
|
|
# create totals
|
|
|
|
total_active_user_count = 0
|
|
|
|
total_user_profile_count = 0
|
2013-10-11 21:32:30 +02:00
|
|
|
total_bot_count = 0
|
2013-10-03 05:30:35 +02:00
|
|
|
for row in rows:
|
|
|
|
total_active_user_count += int(row['active_user_count'])
|
|
|
|
total_user_profile_count += int(row['user_profile_count'])
|
2013-10-11 21:32:30 +02:00
|
|
|
total_bot_count += int(row['bot_count'])
|
2013-10-03 05:30:35 +02:00
|
|
|
|
|
|
|
rows.append(dict(
|
|
|
|
domain='Total',
|
|
|
|
active_user_count=total_active_user_count,
|
|
|
|
user_profile_count=total_user_profile_count,
|
2013-10-11 21:32:30 +02:00
|
|
|
bot_count=total_bot_count,
|
2013-10-11 21:22:16 +02:00
|
|
|
hours=int(total_hours)
|
2013-10-03 05:30:35 +02:00
|
|
|
))
|
|
|
|
|
|
|
|
|
2013-09-26 20:46:35 +02:00
|
|
|
def meets_goal(row):
|
|
|
|
# We don't count toward company goals for obvious reasons, and
|
|
|
|
# customer4.invalid is essentially a dup for users.customer4.invalid.
|
|
|
|
if row['domain'] in ['zulip.com', 'customer4.invalid']:
|
|
|
|
return False
|
|
|
|
return row['active_user_count'] >= 5
|
|
|
|
|
|
|
|
num_active_sites = len(filter(meets_goal, rows))
|
|
|
|
|
2013-09-26 16:46:30 +02:00
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/realm_summary_table.html',
|
2013-09-26 20:46:35 +02:00
|
|
|
dict(rows=rows, num_active_sites=num_active_sites)
|
2013-09-26 16:46:30 +02:00
|
|
|
)
|
|
|
|
return dict(content=content)
|
|
|
|
|
2013-10-02 04:19:03 +02:00
|
|
|
|
|
|
|
def user_activity_intervals():
|
|
|
|
day_end = timestamp_to_datetime(time.time())
|
|
|
|
day_start = day_end - datetime.timedelta(hours=24)
|
|
|
|
|
|
|
|
output = "Per-user online duration for the last 24 hours:\n"
|
|
|
|
total_duration = datetime.timedelta(0)
|
|
|
|
|
|
|
|
all_intervals = UserActivityInterval.objects.filter(
|
|
|
|
end__gte=day_start,
|
|
|
|
start__lte=day_end
|
|
|
|
).select_related(
|
|
|
|
'user_profile',
|
|
|
|
'user_profile__realm'
|
|
|
|
).only(
|
|
|
|
'start',
|
|
|
|
'end',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__realm__domain'
|
|
|
|
).order_by(
|
|
|
|
'user_profile__realm__domain',
|
|
|
|
'user_profile__email'
|
|
|
|
)
|
|
|
|
|
|
|
|
by_domain = lambda row: row.user_profile.realm.domain
|
|
|
|
by_email = lambda row: row.user_profile.email
|
|
|
|
|
2013-10-03 05:18:23 +02:00
|
|
|
realm_minutes = {}
|
|
|
|
|
2013-10-02 04:19:03 +02:00
|
|
|
for domain, realm_intervals in itertools.groupby(all_intervals, by_domain):
|
2013-10-03 05:18:23 +02:00
|
|
|
realm_duration = datetime.timedelta(0)
|
2013-10-02 04:19:03 +02:00
|
|
|
output += '<hr>%s\n' % (domain,)
|
|
|
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
|
|
|
duration = datetime.timedelta(0)
|
|
|
|
for interval in intervals:
|
|
|
|
start = max(day_start, interval.start)
|
|
|
|
end = min(day_end, interval.end)
|
|
|
|
duration += end - start
|
|
|
|
|
|
|
|
total_duration += duration
|
2013-10-03 05:18:23 +02:00
|
|
|
realm_duration += duration
|
2013-10-02 04:19:03 +02:00
|
|
|
output += " %-*s%s\n" % (37, email, duration, )
|
|
|
|
|
2013-10-03 05:18:23 +02:00
|
|
|
realm_minutes[domain] = realm_duration.total_seconds() / 60
|
|
|
|
|
2013-10-02 04:19:03 +02:00
|
|
|
output += "\nTotal Duration: %s\n" % (total_duration,)
|
|
|
|
output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,)
|
|
|
|
output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
|
|
|
|
content = mark_safe('<pre>' + output + '</pre>')
|
2013-10-03 05:18:23 +02:00
|
|
|
return dict(content=content), realm_minutes
|
2013-10-02 04:19:03 +02:00
|
|
|
|
2013-10-22 19:36:08 +02:00
|
|
|
def sent_messages_report(realm):
|
2013-10-22 15:19:07 +02:00
|
|
|
title = 'Recently sent messages for ' + realm
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Date',
|
|
|
|
'Count'
|
|
|
|
]
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
series.day::date,
|
|
|
|
q.cnt
|
|
|
|
from (
|
|
|
|
select generate_series(
|
|
|
|
(now()::date - interval '2 week'),
|
|
|
|
now()::date,
|
|
|
|
interval '1 day'
|
|
|
|
) as day
|
|
|
|
) as series
|
|
|
|
left join (
|
|
|
|
select
|
|
|
|
pub_date::date pub_date,
|
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
|
|
|
r.domain = %s
|
|
|
|
and
|
|
|
|
pub_date > now() - interval '2 week'
|
|
|
|
group by
|
|
|
|
pub_date::date
|
|
|
|
order by
|
|
|
|
pub_date::date
|
|
|
|
) q on
|
|
|
|
series.day = q.pub_date
|
|
|
|
'''
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query, [realm])
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
2013-10-22 19:36:08 +02:00
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
2013-10-22 15:19:07 +02:00
|
|
|
)
|
|
|
|
|
2013-10-22 19:36:08 +02:00
|
|
|
return content
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
def ad_hoc_queries():
|
|
|
|
def get_page(query, cols, title):
|
2013-10-16 22:48:47 +02:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
|
|
|
|
return dict(
|
|
|
|
content=content,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
pages = []
|
2013-10-16 22:48:47 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2013-10-18 17:31:31 +02:00
|
|
|
title = 'At risk users'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
realm.domain,
|
|
|
|
cast(floor(extract(epoch from age(now(), max(last_visit))) / 3600) as int) as age,
|
|
|
|
up.email,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where up.is_active
|
|
|
|
and (not up.is_bot)
|
|
|
|
and domain not in (
|
|
|
|
'users.customer4.invalid',
|
|
|
|
'ios_appreview.zulip.com',
|
|
|
|
'mit.edu'
|
|
|
|
)
|
|
|
|
and email not like '%%+%%'
|
|
|
|
group by up.email, realm.domain
|
|
|
|
having max(last_visit) between
|
|
|
|
now() - interval '7 day' and
|
|
|
|
now() - interval '1 day'
|
|
|
|
order by domain, max(last_visit)
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Domain',
|
|
|
|
'Hours since activity',
|
|
|
|
'Email',
|
|
|
|
'Hits',
|
|
|
|
'Last visit'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-18 17:31:31 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2013-10-16 22:48:47 +02:00
|
|
|
title = 'Android usage'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
realm.domain,
|
|
|
|
up.id user_id,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'Android'
|
|
|
|
and
|
|
|
|
query = 'send_message_backend'
|
|
|
|
group by domain, up.id, client.name
|
|
|
|
having max(last_visit) > now() - interval '2 week'
|
|
|
|
order by domain, up.id, client.name
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Domain',
|
|
|
|
'User id',
|
|
|
|
'Name',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-16 22:48:47 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Desktop users'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
realm.domain,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'desktop%%'
|
|
|
|
group by domain, client.name
|
|
|
|
having max(last_visit) > now() - interval '2 week'
|
|
|
|
order by domain, client.name
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Domain',
|
|
|
|
'Client',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-16 22:48:47 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Pure API'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
realm.domain,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
query = '/api/v1/send_message'
|
|
|
|
and
|
|
|
|
client.name = 'API'
|
|
|
|
and
|
|
|
|
domain != 'zulip.com'
|
|
|
|
group by domain
|
|
|
|
having max(last_visit) > now() - interval '2 week'
|
|
|
|
order by domain
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Domain',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-16 22:48:47 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Integrations by domain'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
realm.domain,
|
|
|
|
case
|
2013-10-22 17:59:27 +02:00
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
2013-10-16 22:48:47 +02:00
|
|
|
end client_name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2013-10-22 17:59:27 +02:00
|
|
|
(query = 'send_message_backend'
|
|
|
|
and client.name not in ('Android', 'API', 'API: Python')
|
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
2013-10-16 22:48:47 +02:00
|
|
|
or
|
|
|
|
query like '%%external%%'
|
|
|
|
group by domain, client_name
|
|
|
|
having max(last_visit) > now() - interval '2 week'
|
|
|
|
order by domain, client_name
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Domain',
|
|
|
|
'Client',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-16 22:48:47 +02:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Integrations by client'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
case
|
2013-10-22 17:59:27 +02:00
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
2013-10-16 22:48:47 +02:00
|
|
|
end client_name,
|
|
|
|
realm.domain,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2013-10-22 17:59:27 +02:00
|
|
|
(query = 'send_message_backend'
|
|
|
|
and client.name not in ('Android', 'API', 'API: Python')
|
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
2013-10-16 22:48:47 +02:00
|
|
|
or
|
|
|
|
query like '%%external%%'
|
|
|
|
group by client_name, domain
|
|
|
|
having max(last_visit) > now() - interval '2 week'
|
|
|
|
order by client_name, domain
|
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Client',
|
|
|
|
'Domain',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-10-16 22:48:47 +02:00
|
|
|
|
2013-10-22 17:11:54 +02:00
|
|
|
return pages
|
2013-10-16 22:48:47 +02:00
|
|
|
|
2013-10-22 15:39:39 +02:00
|
|
|
@zulip_internal
|
2013-09-25 23:47:54 +02:00
|
|
|
@has_request_variables
|
|
|
|
def get_activity(request, realm=REQ(default=None)):
|
2012-12-04 22:14:44 +01:00
|
|
|
web_queries = (
|
2013-04-02 18:42:23 +02:00
|
|
|
("get_updates", ["/json/get_updates", "/json/get_events"]),
|
2013-04-02 17:17:00 +02:00
|
|
|
("send_message", ["/json/send_message"]),
|
|
|
|
("update_pointer", ["/json/update_pointer"]),
|
2012-12-04 22:14:44 +01:00
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
api_queries = (
|
2013-04-02 18:42:23 +02:00
|
|
|
("get_updates", ["/api/v1/get_messages", "/api/v1/messages/latest", "/api/v1/events"]),
|
2013-04-02 17:17:00 +02:00
|
|
|
("send_message", ["/api/v1/send_message"]),
|
2012-12-04 22:14:44 +01:00
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2013-09-26 16:46:30 +02:00
|
|
|
if realm is None:
|
2013-10-03 05:18:23 +02:00
|
|
|
duration_content, realm_minutes = user_activity_intervals()
|
|
|
|
counts_content = realm_summary_table(realm_minutes)
|
2013-10-02 03:19:40 +02:00
|
|
|
data = [
|
2013-10-03 05:18:23 +02:00
|
|
|
('Counts', counts_content),
|
|
|
|
('Durations', duration_content),
|
2013-10-02 03:19:40 +02:00
|
|
|
]
|
2013-10-22 17:11:54 +02:00
|
|
|
for page in ad_hoc_queries():
|
|
|
|
data.append((page['title'], page))
|
2013-10-11 22:29:26 +02:00
|
|
|
title = 'Activity'
|
2013-10-02 03:19:40 +02:00
|
|
|
else:
|
|
|
|
data = [
|
|
|
|
('Website', ActivityTable(realm, 'website', web_queries)),
|
|
|
|
('Desktop', ActivityTable(realm, 'desktop', web_queries)),
|
|
|
|
('API', ActivityTable(realm, 'API', api_queries)),
|
|
|
|
('Android', ActivityTable(realm, 'Android', api_queries)),
|
2013-10-22 19:36:08 +02:00
|
|
|
('History', dict(content=sent_messages_report(realm))),
|
2013-10-02 03:19:40 +02:00
|
|
|
]
|
2013-10-11 22:29:26 +02:00
|
|
|
title = '%s activity' % (realm,)
|
2013-09-26 16:01:12 +02:00
|
|
|
|
|
|
|
return render_to_response(
|
|
|
|
'zerver/activity.html',
|
2013-10-11 22:29:26 +02:00
|
|
|
dict(data=data, realm=realm, title=title),
|
2013-09-26 16:01:12 +02:00
|
|
|
context_instance=RequestContext(request)
|
|
|
|
)
|
2013-10-31 18:17:36 +01:00
|
|
|
|
2013-11-01 21:01:14 +01:00
|
|
|
def get_user_activity_records_for_realm(realm, is_bot):
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
fields = [
|
2013-11-01 20:31:35 +01:00
|
|
|
'user_profile__full_name',
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
'user_profile__email',
|
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
|
|
|
'last_visit',
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2013-11-01 21:01:14 +01:00
|
|
|
user_profile__realm__domain=realm,
|
|
|
|
user_profile__is_bot=is_bot
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
)
|
|
|
|
records = records.order_by("user_profile__email", "-last_visit")
|
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
|
|
|
return records
|
|
|
|
|
2013-10-31 18:17:36 +01:00
|
|
|
def get_user_activity_records_for_email(email):
|
|
|
|
fields = [
|
2013-11-01 20:31:35 +01:00
|
|
|
'user_profile__full_name',
|
2013-10-31 18:17:36 +01:00
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
|
|
|
'last_visit'
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
|
|
|
user_profile__email=email
|
|
|
|
)
|
|
|
|
records = records.order_by("-last_visit")
|
2013-11-01 20:31:35 +01:00
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
2013-10-31 18:17:36 +01:00
|
|
|
return records
|
|
|
|
|
|
|
|
def raw_user_activity_table(records):
|
|
|
|
cols = [
|
|
|
|
'query',
|
|
|
|
'client',
|
|
|
|
'count',
|
|
|
|
'last_visit'
|
|
|
|
]
|
|
|
|
|
|
|
|
def row(record):
|
|
|
|
return [
|
|
|
|
record.query,
|
|
|
|
record.client.name,
|
|
|
|
record.count,
|
2013-11-01 19:19:33 +01:00
|
|
|
format_date_for_activity_reports(record.last_visit)
|
2013-10-31 18:17:36 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
rows = map(row, records)
|
|
|
|
|
|
|
|
title = 'Raw Data'
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
return content
|
|
|
|
|
|
|
|
def get_user_activity_summary(records):
|
|
|
|
summary = {}
|
|
|
|
def update(action, record):
|
|
|
|
if action not in summary:
|
|
|
|
summary[action] = dict(
|
|
|
|
count=record.count,
|
|
|
|
last_visit=record.last_visit
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
summary[action]['count'] += record.count
|
|
|
|
summary[action]['last_visit'] = max(
|
|
|
|
summary[action]['last_visit'],
|
|
|
|
record.last_visit
|
|
|
|
)
|
|
|
|
|
2013-11-01 20:31:35 +01:00
|
|
|
if records:
|
|
|
|
summary['name'] = records[0].user_profile.full_name
|
|
|
|
|
2013-10-31 18:17:36 +01:00
|
|
|
for record in records:
|
|
|
|
client = record.client.name
|
|
|
|
query = record.query
|
|
|
|
|
2013-11-01 17:05:06 +01:00
|
|
|
update('use', record)
|
|
|
|
|
2013-11-01 17:55:39 +01:00
|
|
|
if client.startswith('desktop'):
|
|
|
|
update('desktop', record)
|
2013-10-31 18:17:36 +01:00
|
|
|
if client == 'website':
|
|
|
|
update('website', record)
|
2013-11-01 19:33:42 +01:00
|
|
|
if 'send_message' in query:
|
2013-10-31 18:17:36 +01:00
|
|
|
update('send', record)
|
|
|
|
if query in ['/json/update_pointer', '/api/v1/update_pointer']:
|
|
|
|
update('pointer', record)
|
|
|
|
update(client, record)
|
|
|
|
|
|
|
|
|
|
|
|
return summary
|
|
|
|
|
2013-11-01 19:19:33 +01:00
|
|
|
def format_date_for_activity_reports(date):
|
|
|
|
if date:
|
|
|
|
return date.strftime('%Y-%m-%d %H:%M')
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2013-11-01 21:37:14 +01:00
|
|
|
def realm_client_table(user_summaries):
|
|
|
|
exclude_keys = [
|
2013-11-02 12:52:33 +01:00
|
|
|
'internal',
|
2013-11-01 21:37:14 +01:00
|
|
|
'name',
|
|
|
|
'use',
|
|
|
|
'send',
|
|
|
|
'pointer',
|
|
|
|
'website',
|
2013-11-04 16:07:22 +01:00
|
|
|
'desktop',
|
2013-11-01 21:37:14 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_summaries.items():
|
|
|
|
email_link = '<a href="/user_activity/%s/">%s</a>' % (email, email)
|
|
|
|
email_link = mark_safe(email_link)
|
|
|
|
name = user_summary['name']
|
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k in exclude_keys:
|
|
|
|
continue
|
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
name,
|
|
|
|
email_link,
|
|
|
|
count,
|
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Last visit',
|
|
|
|
'Client',
|
|
|
|
'Name',
|
|
|
|
'Email',
|
|
|
|
'Count',
|
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Clients'
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
return content
|
|
|
|
|
2013-10-31 18:17:36 +01:00
|
|
|
def user_activity_summary_table(user_summary):
|
|
|
|
rows = []
|
|
|
|
for k, v in user_summary.items():
|
2013-11-01 20:31:35 +01:00
|
|
|
if k == 'name':
|
|
|
|
continue
|
2013-10-31 18:17:36 +01:00
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
2013-11-01 19:19:33 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
2013-10-31 18:17:36 +01:00
|
|
|
client,
|
|
|
|
count,
|
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'last_visit',
|
|
|
|
'client',
|
|
|
|
'count',
|
|
|
|
]
|
|
|
|
|
|
|
|
title = 'User Activity'
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
return content
|
|
|
|
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
def realm_user_summary_table(all_records):
|
|
|
|
user_records = {}
|
|
|
|
|
|
|
|
def by_email(record):
|
|
|
|
return record.user_profile.email
|
|
|
|
|
|
|
|
for email, records in itertools.groupby(all_records, by_email):
|
|
|
|
user_records[email] = get_user_activity_summary(list(records))
|
|
|
|
|
|
|
|
def get_last_visit(user_summary, k):
|
|
|
|
if k in user_summary:
|
2013-11-01 19:19:33 +01:00
|
|
|
return user_summary[k]['last_visit'].strftime('%Y-%m-%d %H:%M')
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
else:
|
2013-11-01 19:19:33 +01:00
|
|
|
return ''
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
2013-11-01 19:31:06 +01:00
|
|
|
def get_count(user_summary, k):
|
|
|
|
if k in user_summary:
|
|
|
|
return user_summary[k]['count']
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_records.items():
|
2013-11-01 17:00:50 +01:00
|
|
|
email_link = '<a href="/user_activity/%s/">%s</a>' % (email, email)
|
|
|
|
email_link = mark_safe(email_link)
|
2013-11-01 19:31:06 +01:00
|
|
|
sent_count = get_count(user_summary, 'send')
|
2013-11-01 20:31:35 +01:00
|
|
|
row = [user_summary['name'], email_link, sent_count]
|
2013-11-01 17:55:39 +01:00
|
|
|
for field in ['use', 'send', 'pointer', 'desktop', 'iPhone', 'Android']:
|
|
|
|
val = get_last_visit(user_summary, field)
|
|
|
|
row.append(val)
|
|
|
|
rows.append(row)
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
2013-11-01 17:05:06 +01:00
|
|
|
def by_used_time(row):
|
2013-11-01 20:31:35 +01:00
|
|
|
return row[3]
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
2013-11-01 17:05:06 +01:00
|
|
|
rows = sorted(rows, key=by_used_time, reverse=True)
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
|
|
|
cols = [
|
2013-11-01 20:31:35 +01:00
|
|
|
'Name',
|
2013-11-01 16:56:23 +01:00
|
|
|
'Email',
|
2013-11-01 19:31:06 +01:00
|
|
|
'Total sent',
|
2013-11-01 19:08:54 +01:00
|
|
|
'Heard from',
|
|
|
|
'Message sent',
|
|
|
|
'Pointer motion',
|
2013-11-01 17:55:39 +01:00
|
|
|
'Desktop',
|
|
|
|
'iPhone',
|
|
|
|
'Android'
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Summary'
|
|
|
|
|
|
|
|
data = dict(
|
|
|
|
rows=rows,
|
|
|
|
cols=cols,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'zerver/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
|
2013-11-01 21:37:14 +01:00
|
|
|
return user_records, content
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
|
|
|
@zulip_internal
|
|
|
|
def get_realm_activity(request, realm):
|
|
|
|
data = []
|
2013-11-01 21:37:14 +01:00
|
|
|
all_records = {}
|
|
|
|
all_user_records = {}
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
2013-11-01 21:01:14 +01:00
|
|
|
for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]:
|
|
|
|
all_records = get_user_activity_records_for_realm(realm, is_bot)
|
|
|
|
all_records = list(all_records)
|
|
|
|
|
2013-11-01 21:37:14 +01:00
|
|
|
user_records, content = realm_user_summary_table(all_records)
|
|
|
|
all_user_records.update(user_records)
|
2013-11-01 21:01:14 +01:00
|
|
|
|
|
|
|
user_content = dict(content=content)
|
|
|
|
|
|
|
|
data += [(page_title, user_content)]
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
|
2013-11-01 21:37:14 +01:00
|
|
|
page_title = 'Clients'
|
|
|
|
content = realm_client_table(all_user_records)
|
|
|
|
data += [(page_title, dict(content=content))]
|
|
|
|
|
|
|
|
|
2013-11-01 21:41:45 +01:00
|
|
|
page_title = 'History'
|
|
|
|
content = sent_messages_report(realm)
|
|
|
|
data += [(page_title, dict(content=content))]
|
|
|
|
|
Add /realm_activity report.
This report will eventually replace the per-realm report that is
now accessible through /activity. In order not to disrupt Waseem,
I'm leaving the old reports around until we've polished the new
ones.
The old report does 24 different queries to get per-realm user data.
The new approach gets all the data at once, and it slices and dices
the data in Python to accomodate our slightly quirky data model.
On localhost, this is a typical query:
LOG: duration: 5.668 ms statement: SELECT "zerver_useractivity"."id", "zerver_useractivity"."user_profile_id", "zerver_useractivity"."client_id", "zerver_useractivity"."query", "zerver_useractivity"."count", "zerver_useractivity"."last_visit", "zerver_userprofile"."id", "zerver_userprofile"."email", "zerver_client"."id", "zerver_client"."name" FROM "zerver_useractivity" INNER JOIN "zerver_userprofile" ON ("zerver_useractivity"."user_profile_id" = "zerver_userprofile"."id") INNER JOIN "zerver_realm" ON ("zerver_userprofile"."realm_id" = "zerver_realm"."id") INNER JOIN "zerver_client" ON ("zerver_useractivity"."client_id" = "zerver_client"."id") WHERE "zerver_realm"."domain" = 'zulip.com' ORDER BY "zerver_userprofile"."email" ASC, "zerver_useractivity"."last_visit" DESC
(imported from commit 0c71f4e32fe5a40f4496749dc29ad3463868d55e)
2013-11-01 14:44:29 +01:00
|
|
|
realm = None
|
|
|
|
title = realm
|
|
|
|
return render_to_response(
|
|
|
|
'zerver/activity.html',
|
|
|
|
dict(data=data, realm=realm, title=title),
|
|
|
|
context_instance=RequestContext(request)
|
|
|
|
)
|
|
|
|
|
2013-10-31 18:17:36 +01:00
|
|
|
@zulip_internal
|
|
|
|
def get_user_activity(request, email):
|
|
|
|
records = get_user_activity_records_for_email(email)
|
|
|
|
|
|
|
|
data = []
|
|
|
|
user_summary = get_user_activity_summary(records)
|
|
|
|
content = user_activity_summary_table(user_summary)
|
|
|
|
|
|
|
|
user_content = dict(content=content)
|
|
|
|
data += [('Summary', user_content)]
|
|
|
|
|
|
|
|
content = raw_user_activity_table(records)
|
|
|
|
user_content = dict(content=content)
|
|
|
|
data += [('Info', user_content)]
|
|
|
|
|
|
|
|
realm = None
|
|
|
|
title = email
|
|
|
|
return render_to_response(
|
|
|
|
'zerver/activity.html',
|
|
|
|
dict(data=data, realm=realm, title=title),
|
|
|
|
context_instance=RequestContext(request)
|
|
|
|
)
|
2012-11-19 23:52:36 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
def get_status_list(requesting_user_profile):
|
2013-05-06 17:14:59 +02:00
|
|
|
return {'presences': get_status_dict(requesting_user_profile),
|
|
|
|
'server_timestamp': time.time()}
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-09-06 21:52:12 +02:00
|
|
|
def json_update_active_status(request, user_profile, status=REQ,
|
2013-09-10 17:32:40 +02:00
|
|
|
new_user_input=REQ(converter=json_to_bool, default=False)):
|
2013-04-03 22:00:02 +02:00
|
|
|
status_val = UserPresence.status_from_string(status)
|
|
|
|
if status_val is None:
|
|
|
|
raise JsonableError("Invalid presence status: %s" % (status,))
|
|
|
|
else:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_presence(user_profile, request.client, now(), status_val,
|
|
|
|
new_user_input)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret = get_status_list(user_profile)
|
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
try:
|
2013-10-04 22:06:47 +02:00
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="get_events_backend",
|
|
|
|
client__name="zephyr_mirror")
|
2013-04-30 19:17:38 +02:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret['zephyr_mirror_active'] = \
|
|
|
|
(activity.last_visit.replace(tzinfo=None) >
|
|
|
|
datetime.datetime.utcnow() - datetime.timedelta(minutes=5))
|
|
|
|
except UserActivity.DoesNotExist:
|
|
|
|
ret['zephyr_mirror_active'] = False
|
|
|
|
|
|
|
|
return json_success(ret)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_active_statuses(request, user_profile):
|
2013-03-13 20:02:57 +01:00
|
|
|
return json_success(get_status_list(user_profile))
|
2013-03-11 20:54:27 +01:00
|
|
|
|
2013-03-28 18:53:44 +01:00
|
|
|
# Read the source map information for decoding JavaScript backtraces
|
2013-03-28 20:49:08 +01:00
|
|
|
js_source_map = None
|
|
|
|
if not (settings.DEBUG or settings.TEST_SUITE):
|
2013-10-28 15:54:32 +01:00
|
|
|
js_source_map = SourceMap(os.path.join(
|
2013-07-12 22:01:31 +02:00
|
|
|
settings.DEPLOY_ROOT, 'prod-static/source-map'))
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-11-04 23:58:51 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_report_send_time(request, user_profile,
|
|
|
|
time=REQ(converter=to_non_negative_int)):
|
|
|
|
logging.info("End-to-end send time: %dms (%s)" % (time, user_profile.email))
|
|
|
|
return json_success()
|
|
|
|
|
2013-03-11 20:54:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_report_error(request, user_profile, message=REQ, stacktrace=REQ,
|
|
|
|
ui_message=REQ(converter=json_to_bool), user_agent=REQ,
|
2013-10-22 21:19:59 +02:00
|
|
|
href=REQ, log=REQ,
|
2013-05-08 20:16:16 +02:00
|
|
|
more_info=REQ(converter=json_to_dict, default=None)):
|
2013-03-28 20:43:34 +01:00
|
|
|
subject = "error for %s" % (user_profile.email,)
|
2013-03-27 18:31:18 +01:00
|
|
|
if ui_message:
|
|
|
|
subject = "User-visible browser " + subject
|
|
|
|
else:
|
|
|
|
subject = "Browser " + subject
|
|
|
|
|
2013-03-28 20:49:08 +01:00
|
|
|
if js_source_map:
|
2013-03-28 20:28:54 +01:00
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-05-01 21:49:21 +02:00
|
|
|
body = ("Message:\n%s\n\nStacktrace:\n%s\n\nUser agent: %s\nhref: %s\n"
|
2013-07-18 17:22:24 +02:00
|
|
|
"User saw error in UI: %s\n"
|
2013-05-01 21:49:21 +02:00
|
|
|
% (message, stacktrace, user_agent, href, ui_message))
|
2013-04-04 21:29:14 +02:00
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
body += "Server path: %s\n" % (settings.DEPLOY_ROOT,)
|
|
|
|
try:
|
|
|
|
body += "Deployed version: %s" % (
|
|
|
|
subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"]),)
|
|
|
|
except Exception:
|
|
|
|
body += "Could not determine current git commit ID.\n"
|
|
|
|
|
2013-04-04 21:29:14 +02:00
|
|
|
if more_info is not None:
|
2013-07-18 17:22:24 +02:00
|
|
|
body += "\nAdditional information:"
|
2013-04-04 21:29:14 +02:00
|
|
|
for (key, value) in more_info.iteritems():
|
|
|
|
body += "\n %s: %s" % (key, value)
|
|
|
|
|
2013-10-22 21:19:59 +02:00
|
|
|
body += "\n\nLog:\n%s" % (log,)
|
|
|
|
|
2013-04-04 21:29:14 +02:00
|
|
|
mail_admins(subject, body)
|
2013-03-11 20:54:27 +01:00
|
|
|
return json_success()
|
2013-03-14 23:21:53 +01:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_events_register(request, user_profile):
|
|
|
|
return events_register_backend(request, user_profile)
|
|
|
|
|
2013-04-02 22:42:51 +02:00
|
|
|
# Does not need to be authenticated because it's called from rest_dispatch
|
2013-03-14 23:21:53 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_events_register(request, user_profile,
|
2013-10-17 23:51:25 +02:00
|
|
|
apply_markdown=REQ(default=False, converter=json_to_bool),
|
|
|
|
all_public_streams=REQ(default=False, converter=json_to_bool)):
|
2013-03-14 23:21:53 +01:00
|
|
|
return events_register_backend(request, user_profile,
|
2013-10-17 23:51:25 +02:00
|
|
|
apply_markdown=apply_markdown,
|
|
|
|
all_public_streams=all_public_streams)
|
2013-03-14 23:21:53 +01:00
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def events_register_backend(request, user_profile, apply_markdown=True,
|
2013-10-17 23:51:25 +02:00
|
|
|
all_public_streams=False,
|
2013-08-05 22:09:12 +02:00
|
|
|
event_types=REQ(converter=json_to_list, default=None),
|
|
|
|
queue_lifespan_secs=REQ(converter=int, default=0)):
|
2013-05-07 17:25:25 +02:00
|
|
|
ret = do_events_register(user_profile, request.client, apply_markdown,
|
2013-10-17 23:51:25 +02:00
|
|
|
event_types, queue_lifespan_secs, all_public_streams)
|
2013-03-21 22:43:53 +01:00
|
|
|
return json_success(ret)
|
2013-04-24 17:18:49 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_messages_in_narrow(request, user_profile):
|
|
|
|
return messages_in_narrow_backend(request, user_profile)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def messages_in_narrow_backend(request, user_profile, msg_ids = REQ(converter=json_to_list),
|
|
|
|
narrow = REQ(converter=narrow_parameter)):
|
|
|
|
# Note that this function will only work on messages the user
|
|
|
|
# actually received
|
|
|
|
|
|
|
|
query = UserMessage.objects.select_related("message") \
|
|
|
|
.filter(user_profile=user_profile, message__id__in=msg_ids)
|
|
|
|
build = NarrowBuilder(user_profile, "message__")
|
|
|
|
for operator, operand in narrow:
|
|
|
|
query = build(query, operator, operand)
|
|
|
|
|
2013-05-01 22:00:07 +02:00
|
|
|
return json_success({"messages": dict((msg.message.id,
|
|
|
|
{'match_subject': msg.match_subject,
|
|
|
|
'match_content': msg.match_content})
|
|
|
|
for msg in query.iterator())})
|
2013-05-03 00:26:53 +02:00
|
|
|
|
2013-07-08 23:34:43 +02:00
|
|
|
def deactivate_user_backend(request, user_profile, email):
|
2013-07-03 21:54:10 +02:00
|
|
|
try:
|
2013-07-08 23:34:43 +02:00
|
|
|
target = get_user_profile_by_email(email)
|
2013-07-03 21:54:10 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('No such user')
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(target):
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('Insufficient permission')
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2013-07-09 00:25:49 +02:00
|
|
|
do_deactivate(target)
|
2013-07-03 21:54:10 +02:00
|
|
|
return json_success({})
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
@has_request_variables
|
2013-08-01 19:35:39 +02:00
|
|
|
def patch_bot_backend(request, user_profile, email, full_name=REQ):
|
2013-07-16 22:25:34 +02:00
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
2013-07-23 15:57:32 +02:00
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-23 15:57:32 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
do_change_full_name(bot, full_name)
|
2013-07-16 22:25:34 +02:00
|
|
|
|
2013-07-29 16:27:18 +02:00
|
|
|
bot_avatar_url = None
|
|
|
|
|
|
|
|
if len(request.FILES) == 0:
|
|
|
|
pass
|
|
|
|
elif len(request.FILES) == 1:
|
|
|
|
user_file = request.FILES.values()[0]
|
|
|
|
upload_avatar_image(user_file, user_profile, bot.email)
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
bot.avatar_source = avatar_source
|
2013-08-14 22:00:35 +02:00
|
|
|
bot.save(update_fields=["avatar_source"])
|
2013-07-29 16:27:18 +02:00
|
|
|
bot_avatar_url = avatar_url(bot)
|
|
|
|
else:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
json_result = dict(
|
|
|
|
full_name = full_name,
|
2013-07-29 16:27:18 +02:00
|
|
|
avatar_url = bot_avatar_url
|
2013-07-16 22:25:34 +02:00
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-10-28 15:49:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_set_avatar(request, user_profile):
|
|
|
|
if len(request.FILES) != 1:
|
|
|
|
return json_error("You must upload exactly one avatar.")
|
|
|
|
|
|
|
|
user_file = request.FILES.values()[0]
|
|
|
|
upload_avatar_image(user_file, user_profile, user_profile.email)
|
|
|
|
user_profile.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
user_profile.save(update_fields=["avatar_source"])
|
|
|
|
user_avatar_url = avatar_url(user_profile)
|
|
|
|
|
|
|
|
json_result = dict(
|
|
|
|
avatar_url = user_avatar_url
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-08-08 16:49:32 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_api_key(request, user_profile):
|
|
|
|
user_profile.api_key = random_api_key()
|
2013-08-13 22:43:33 +02:00
|
|
|
user_profile.save(update_fields=["api_key"])
|
2013-08-08 16:49:32 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = user_profile.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-07-19 17:45:06 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_bot_api_key(request, user_profile, email):
|
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-19 17:45:06 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
bot.api_key = random_api_key()
|
2013-08-14 22:00:35 +02:00
|
|
|
bot.save(update_fields=["api_key"])
|
2013-07-19 17:45:06 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = bot.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-05-03 00:26:53 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_create_bot(request, user_profile, full_name=REQ, short_name=REQ):
|
|
|
|
short_name += "-bot"
|
|
|
|
email = short_name + "@" + user_profile.realm.domain
|
|
|
|
form = CreateBotForm({'full_name': full_name, 'email': email})
|
|
|
|
if not form.is_valid():
|
|
|
|
# We validate client-side as well
|
|
|
|
return json_error('Bad name or username')
|
|
|
|
|
|
|
|
try:
|
|
|
|
get_user_profile_by_email(email)
|
|
|
|
return json_error("Username already in use")
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
elif len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
else:
|
|
|
|
user_file = request.FILES.values()[0]
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_avatar_image(user_file, user_profile, email)
|
2013-06-14 20:03:11 +02:00
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
|
2013-05-03 00:26:53 +02:00
|
|
|
bot_profile = do_create_user(email, '', user_profile.realm, full_name,
|
2013-06-14 20:03:11 +02:00
|
|
|
short_name, True, True,
|
|
|
|
user_profile, avatar_source)
|
|
|
|
json_result = dict(
|
|
|
|
api_key=bot_profile.api_key,
|
|
|
|
avatar_url=avatar_url(bot_profile)
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
2013-05-03 00:26:53 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_bots(request, user_profile):
|
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
2013-06-27 23:05:36 +02:00
|
|
|
bot_profiles = bot_profiles.order_by('date_joined')
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
def bot_info(bot_profile):
|
|
|
|
return dict(
|
|
|
|
username = bot_profile.email,
|
|
|
|
full_name = bot_profile.full_name,
|
|
|
|
api_key = bot_profile.api_key,
|
|
|
|
avatar_url = avatar_url(bot_profile)
|
|
|
|
)
|
|
|
|
|
|
|
|
return json_success({'bots': map(bot_info, bot_profiles)})
|
2013-07-26 16:51:02 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_refer_friend(request, user_profile, email=REQ):
|
2013-08-12 18:41:54 +02:00
|
|
|
if not email:
|
|
|
|
return json_error("No email address specified")
|
2013-07-26 16:51:02 +02:00
|
|
|
if user_profile.invites_granted - user_profile.invites_used <= 0:
|
|
|
|
return json_error("Insufficient invites")
|
|
|
|
|
|
|
|
do_refer_friend(user_profile, email);
|
|
|
|
|
|
|
|
return json_success()
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def list_alert_words(request, user_profile):
|
|
|
|
return json_success({'alert_words': user_alert_words(user_profile)})
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def set_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def add_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_add_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_remove_alert_words(user_profile, alert_words)
|
2013-09-04 21:02:11 +02:00
|
|
|
return json_success()
|
2013-09-10 00:06:24 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_muted_topics(request, user_profile,
|
|
|
|
muted_topics=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_muted_topics(user_profile, muted_topics)
|
|
|
|
return json_success()
|
2013-10-15 20:19:41 +02:00
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def add_apns_device_token(request, user_profile, token=REQ):
|
|
|
|
if token == '' or len(token) > 255:
|
|
|
|
return json_error('Empty or invalid length APNS token')
|
|
|
|
|
2013-10-16 20:57:12 +02:00
|
|
|
# The iOS app receives the token on each startup, so overwrite with our
|
|
|
|
# latest value
|
|
|
|
token, created = AppleDeviceToken.objects.get_or_create(user=user_profile, token=token)
|
|
|
|
if not created:
|
|
|
|
token.last_updated = now()
|
|
|
|
token.save(update_fields=['last_updated'])
|
2013-10-15 20:19:41 +02:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_apns_device_token(request, user_profile, token=REQ):
|
|
|
|
if token == '' or len(token) > 255:
|
|
|
|
return json_error('Empty or invalid length APNS token')
|
|
|
|
|
|
|
|
try:
|
|
|
|
apns_token = AppleDeviceToken.objects.get(token=token)
|
|
|
|
apns_token.delete()
|
|
|
|
except AppleDeviceToken.DoesNotExist:
|
|
|
|
return json_error("APNS token does not exist")
|
|
|
|
|
|
|
|
return json_success()
|