2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2012-09-19 19:39:34 +02:00
|
|
|
from django.conf import settings
|
2012-08-28 18:44:51 +02:00
|
|
|
from django.contrib.auth import authenticate, login
|
|
|
|
from django.contrib.auth.decorators import login_required
|
|
|
|
from django.core.urlresolvers import reverse
|
2013-02-05 22:21:07 +01:00
|
|
|
from django.http import HttpResponseRedirect
|
2013-06-17 18:01:22 +02:00
|
|
|
from django.shortcuts import render_to_response, redirect
|
2013-01-08 23:26:40 +01:00
|
|
|
from django.template import RequestContext, loader
|
2013-02-07 20:54:43 +01:00
|
|
|
from django.utils.timezone import now
|
2013-08-16 22:55:50 +02:00
|
|
|
from django.utils.cache import patch_cache_control
|
2012-09-29 00:49:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2012-12-11 23:42:32 +01:00
|
|
|
from django.core import validators
|
2013-03-05 23:45:02 +01:00
|
|
|
from django.contrib.auth.views import login as django_login_page, \
|
|
|
|
logout_then_login as django_logout_then_login
|
2013-03-06 20:53:54 +01:00
|
|
|
from django.db.models import Q, F
|
2013-07-25 21:15:04 +02:00
|
|
|
from django.core.mail import send_mail, mail_admins, EmailMessage
|
2013-03-21 20:18:44 +01:00
|
|
|
from django.db import transaction
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.models import Message, UserProfile, Stream, Subscription, \
|
2013-06-27 23:57:45 +02:00
|
|
|
Recipient, Realm, UserMessage, bulk_get_recipients, \
|
2013-04-01 16:57:50 +02:00
|
|
|
PreregistrationUser, get_client, MitUser, UserActivity, \
|
2013-06-27 22:52:05 +02:00
|
|
|
MAX_SUBJECT_LENGTH, get_stream, bulk_get_streams, UserPresence, \
|
2013-06-18 20:02:45 +02:00
|
|
|
get_recipient, valid_stream_name, to_dict_cache_key, to_dict_cache_key_id, \
|
2013-07-18 18:48:56 +02:00
|
|
|
extract_message_dict, stringify_message_dict, parse_usermessage_flags, \
|
2013-08-27 19:17:08 +02:00
|
|
|
email_to_domain, email_to_username, get_realm, completely_open, \
|
|
|
|
is_super_user, get_active_user_profiles_by_realm
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.actions import do_remove_subscription, bulk_remove_subscriptions, \
|
2013-02-27 23:18:38 +01:00
|
|
|
do_change_password, create_mit_user_if_needed, do_change_full_name, \
|
2013-05-03 21:49:01 +02:00
|
|
|
do_change_enable_desktop_notifications, do_change_enter_sends, do_change_enable_sounds, \
|
2013-04-08 18:27:07 +02:00
|
|
|
do_send_confirmation_email, do_activate_user, do_create_user, check_send_message, \
|
2013-07-16 21:56:20 +02:00
|
|
|
do_change_subscription_property, internal_send_message, \
|
2013-02-08 23:44:15 +01:00
|
|
|
create_stream_if_needed, gather_subscriptions, subscribed_to_stream, \
|
2013-08-08 20:38:24 +02:00
|
|
|
update_user_presence, bulk_add_subscriptions, do_update_message_flags, \
|
2013-06-27 16:41:58 +02:00
|
|
|
recipient_for_emails, extract_recipients, do_events_register, \
|
2013-05-08 15:28:27 +02:00
|
|
|
get_status_dict, do_change_enable_offline_email_notifications, \
|
2013-05-10 22:56:22 +02:00
|
|
|
do_update_onboarding_steps, do_update_message, internal_prep_message, \
|
2013-07-08 17:57:04 +02:00
|
|
|
do_send_messages, do_add_subscription, get_default_subs, do_deactivate, \
|
2013-09-03 22:41:17 +02:00
|
|
|
user_email_is_unique, do_invite_users, do_refer_friend, compute_mit_user_fullname, \
|
2013-09-06 21:52:12 +02:00
|
|
|
do_add_alert_words, do_remove_alert_words, do_set_alert_words, get_subscribers, \
|
2013-09-10 11:46:18 +02:00
|
|
|
update_user_activity_interval, do_set_muted_topics, do_rename_stream
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.create_user import random_api_key
|
|
|
|
from zerver.forms import RegistrationForm, HomepageForm, ToSForm, CreateBotForm, \
|
2013-08-12 00:47:28 +02:00
|
|
|
is_inactive, isnt_mit, not_mit_mailing_list
|
2013-06-19 23:10:13 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt, csrf_protect
|
2013-06-27 20:03:51 +02:00
|
|
|
from django_openid_auth.views import default_render_failure, login_complete
|
2013-04-23 23:46:12 +02:00
|
|
|
from openid.consumer.consumer import SUCCESS as openid_SUCCESS
|
|
|
|
from openid.extensions import ax
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib import bugdown
|
2013-09-03 22:41:17 +02:00
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.decorator import require_post, \
|
2012-12-02 20:51:51 +01:00
|
|
|
authenticated_api_view, authenticated_json_post_view, \
|
2013-05-08 20:16:16 +02:00
|
|
|
has_request_variables, authenticated_json_view, \
|
2013-01-31 21:12:53 +01:00
|
|
|
to_non_negative_int, json_to_dict, json_to_list, json_to_bool, \
|
2013-06-27 20:03:51 +02:00
|
|
|
JsonableError, get_user_profile_by_email, \
|
|
|
|
authenticated_rest_api_view, process_as_post, REQ, rate_limit_user
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.query import last_n
|
|
|
|
from zerver.lib.avatar import avatar_url
|
2013-09-16 20:59:54 +02:00
|
|
|
from zerver.lib.upload import upload_message_image_through_web_client, upload_avatar_image
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.response import json_success, json_error, json_response, json_method_not_allowed
|
|
|
|
from zerver.lib.cache import cache_get_many, cache_set_many, \
|
2013-06-27 21:55:42 +02:00
|
|
|
generic_bulk_cached_fetch
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
2013-08-08 16:50:58 +02:00
|
|
|
from zerver.lib.utils import statsd, generate_random_token
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver import tornado_callbacks
|
2013-06-19 20:43:45 +02:00
|
|
|
from django.db import connection
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
import subprocess
|
2013-08-09 20:26:35 +02:00
|
|
|
import calendar
|
2012-08-28 18:44:51 +02:00
|
|
|
import datetime
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2012-08-28 18:44:51 +02:00
|
|
|
import simplejson
|
2012-09-07 19:20:04 +02:00
|
|
|
import re
|
2012-10-04 20:27:49 +02:00
|
|
|
import urllib
|
2012-10-26 22:02:51 +02:00
|
|
|
import base64
|
2013-05-06 17:14:59 +02:00
|
|
|
import time
|
2013-05-20 19:09:18 +02:00
|
|
|
import logging
|
2013-03-28 18:53:44 +01:00
|
|
|
from os import path
|
2013-01-31 21:06:59 +01:00
|
|
|
from collections import defaultdict
|
2012-10-16 21:15:01 +02:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
def list_to_streams(streams_raw, user_profile, autocreate=False, invite_only=False):
|
|
|
|
"""Converts plaintext stream names to a list of Streams, validating input in the process
|
|
|
|
|
|
|
|
For each stream name, we validate it to ensure it meets our requirements for a proper
|
|
|
|
stream name: that is, that it is shorter than 30 characters and passes valid_stream_name.
|
|
|
|
|
|
|
|
This function in autocreate mode should be atomic: either an exception will be raised
|
|
|
|
during a precheck, or all the streams specified will have been created if applicable.
|
|
|
|
|
|
|
|
@param streams_raw The list of stream names to process
|
|
|
|
@param user_profile The user for whom we are retreiving the streams
|
|
|
|
@param autocreate Whether we should create streams if they don't already exist
|
|
|
|
@param invite_only Whether newly created streams should have the invite_only bit set
|
|
|
|
"""
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams = []
|
|
|
|
created_streams = []
|
2013-01-30 22:40:00 +01:00
|
|
|
# Validate all streams, getting extant ones, then get-or-creating the rest.
|
|
|
|
stream_set = set(stream_name.strip() for stream_name in streams_raw)
|
|
|
|
rejects = []
|
|
|
|
for stream_name in stream_set:
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2013-01-30 22:40:00 +01:00
|
|
|
raise JsonableError("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
|
|
|
raise JsonableError("Invalid stream name (%s)." % (stream_name,))
|
|
|
|
|
2013-08-15 22:44:50 +02:00
|
|
|
existing_stream_map = bulk_get_streams(user_profile.realm, stream_set)
|
2013-06-27 22:52:05 +02:00
|
|
|
|
|
|
|
for stream_name in stream_set:
|
2013-08-15 22:44:50 +02:00
|
|
|
stream = existing_stream_map.get(stream_name.lower())
|
2013-01-30 22:40:00 +01:00
|
|
|
if stream is None:
|
|
|
|
rejects.append(stream_name)
|
|
|
|
else:
|
2013-08-15 22:47:16 +02:00
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
if autocreate:
|
|
|
|
for stream_name in rejects:
|
|
|
|
stream, created = create_stream_if_needed(user_profile.realm,
|
2013-08-15 22:47:16 +02:00
|
|
|
stream_name,
|
|
|
|
invite_only=invite_only)
|
|
|
|
if created:
|
|
|
|
created_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
2013-01-30 22:40:00 +01:00
|
|
|
elif rejects:
|
|
|
|
raise JsonableError("Stream(s) (%s) do not exist" % ", ".join(rejects))
|
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
return existing_streams, created_streams
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
def send_signup_message(sender, signups_stream, user_profile, internal=False):
|
2012-12-07 00:02:53 +01:00
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
internal_send_message(sender,
|
2013-03-18 19:10:21 +01:00
|
|
|
"stream", signups_stream, user_profile.realm.domain,
|
2013-07-15 23:38:04 +02:00
|
|
|
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
user_profile.email,
|
2012-12-07 00:02:53 +01:00
|
|
|
internal_blurb,
|
2012-12-13 15:11:17 +01:00
|
|
|
UserProfile.objects.filter(realm=user_profile.realm,
|
2013-03-28 20:47:22 +01:00
|
|
|
is_active=True).count(),
|
2012-12-07 00:02:53 +01:00
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-01-17 20:24:07 +01:00
|
|
|
def notify_new_user(user_profile, internal=False):
|
2013-07-24 20:23:35 +02:00
|
|
|
send_signup_message("new-user-bot@zulip.com", "signups", user_profile, internal)
|
2013-04-22 17:03:38 +02:00
|
|
|
statsd.gauge("users.signups.%s" % (user_profile.realm.domain.replace('.', '_')), 1, delta=True)
|
2013-01-17 20:24:07 +01:00
|
|
|
|
2013-01-09 22:47:09 +01:00
|
|
|
class PrincipalError(JsonableError):
|
|
|
|
def __init__(self, principal):
|
|
|
|
self.principal = principal
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return ("User not authorized to execute queries on behalf of '%s'"
|
|
|
|
% (self.principal,))
|
|
|
|
|
|
|
|
def principal_to_user_profile(agent, principal):
|
|
|
|
principal_doesnt_exist = False
|
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
principal_user_profile = get_user_profile_by_email(principal)
|
2013-01-09 22:47:09 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
principal_doesnt_exist = True
|
|
|
|
|
|
|
|
if (principal_doesnt_exist
|
|
|
|
or agent.realm != principal_user_profile.realm):
|
|
|
|
# We have to make sure we don't leak information about which users
|
2013-08-06 21:32:15 +02:00
|
|
|
# are registered for Zulip in a different realm. We could do
|
2013-01-09 22:47:09 +01:00
|
|
|
# something a little more clever and check the domain part of the
|
|
|
|
# principal to maybe give a better error message
|
|
|
|
raise PrincipalError(principal)
|
|
|
|
|
|
|
|
return principal_user_profile
|
|
|
|
|
2013-03-21 20:15:27 +01:00
|
|
|
METHODS = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'PATCH')
|
|
|
|
|
2013-07-26 22:30:55 +02:00
|
|
|
# Import the Tornado REST views that are used by rest_dispatch
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.tornadoviews import get_events_backend, get_updates_backend
|
2013-07-26 22:30:55 +02:00
|
|
|
|
2013-05-14 22:39:36 +02:00
|
|
|
@csrf_exempt
|
|
|
|
def rest_dispatch(request, **kwargs):
|
2013-08-22 23:10:35 +02:00
|
|
|
"""Dispatch to a REST API endpoint.
|
|
|
|
|
|
|
|
This calls the function named in kwargs[request.method], if that request
|
|
|
|
method is supported, and after wrapping that function to:
|
|
|
|
|
|
|
|
* protect against CSRF (if the user is already authenticated through
|
|
|
|
a Django session)
|
|
|
|
* authenticate via an API key (otherwise)
|
|
|
|
* coerce PUT/PATCH/DELETE into having POST-like semantics for
|
|
|
|
retrieving variables
|
|
|
|
|
|
|
|
Any keyword args that are *not* HTTP methods are passed through to the
|
|
|
|
target function.
|
|
|
|
|
|
|
|
Note that we search views.py globals for the function to call, so never
|
|
|
|
make a urls.py pattern put user input into a variable called GET, POST,
|
|
|
|
etc.
|
|
|
|
"""
|
2013-03-21 20:15:27 +01:00
|
|
|
supported_methods = {}
|
|
|
|
# duplicate kwargs so we can mutate the original as we go
|
|
|
|
for arg in list(kwargs):
|
|
|
|
if arg in METHODS:
|
|
|
|
supported_methods[arg] = kwargs[arg]
|
|
|
|
del kwargs[arg]
|
|
|
|
if request.method in supported_methods.keys():
|
2013-05-14 22:39:36 +02:00
|
|
|
target_function = globals()[supported_methods[request.method]]
|
2013-06-19 23:10:13 +02:00
|
|
|
# We want to support authentication by both cookies (web client)
|
|
|
|
# and API keys (API clients). In the former case, we want to
|
|
|
|
# do a check to ensure that CSRF etc is honored, but in the latter
|
|
|
|
# we can skip all of that.
|
|
|
|
#
|
|
|
|
# Security implications of this portion of the code are minimal,
|
|
|
|
# as we should worst-case fail closed if we miscategorise a request.
|
|
|
|
if request.user.is_authenticated():
|
|
|
|
# Authenticated via sessions framework, only CSRF check needed
|
|
|
|
target_function = csrf_protect(authenticated_json_view(target_function))
|
|
|
|
else:
|
|
|
|
# Wrap function with decorator to authenticate the user before
|
|
|
|
# proceeding
|
|
|
|
target_function = authenticated_rest_api_view(target_function)
|
2013-05-14 22:39:36 +02:00
|
|
|
if request.method not in ["GET", "POST"]:
|
|
|
|
# process_as_post needs to be the outer decorator, because
|
|
|
|
# otherwise we might access and thus cache a value for
|
|
|
|
# request.REQUEST.
|
|
|
|
target_function = process_as_post(target_function)
|
|
|
|
return target_function(request, **kwargs)
|
2013-03-21 20:15:27 +01:00
|
|
|
return json_method_not_allowed(supported_methods.keys())
|
|
|
|
|
2013-05-17 21:45:37 +02:00
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def beta_signup_submission(request, name=REQ, email=REQ,
|
|
|
|
company=REQ, count=REQ, product=REQ):
|
|
|
|
content = """Name: %s
|
|
|
|
Email: %s
|
|
|
|
Company: %s
|
|
|
|
# users: %s
|
|
|
|
Currently using: %s""" % (name, email, company, count, product,)
|
2013-07-15 23:38:04 +02:00
|
|
|
subject = "Interest in Zulip: %s" % (company,)
|
2013-07-25 21:15:04 +02:00
|
|
|
from_email = '"%s" <humbug+signups@humbughq.com>' % (name,)
|
2013-07-15 23:38:04 +02:00
|
|
|
to_email = '"Zulip Signups" <humbug+signups@humbughq.com>'
|
2013-07-25 21:15:04 +02:00
|
|
|
headers = {'Reply-To' : '"%s" <%s>' % (name, email,)}
|
|
|
|
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
|
|
|
|
msg.send()
|
2013-05-17 21:45:37 +02:00
|
|
|
return json_success()
|
|
|
|
|
2012-09-28 22:47:05 +02:00
|
|
|
@require_post
|
2012-10-16 21:42:40 +02:00
|
|
|
def accounts_register(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
key = request.POST['key']
|
2012-10-29 19:08:18 +01:00
|
|
|
confirmation = Confirmation.objects.get(confirmation_key=key)
|
2012-12-11 23:42:32 +01:00
|
|
|
prereg_user = confirmation.content_object
|
|
|
|
email = prereg_user.email
|
2012-10-29 19:08:18 +01:00
|
|
|
mit_beta_user = isinstance(confirmation.content_object, MitUser)
|
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
validators.validate_email(email)
|
2013-02-06 22:26:04 +01:00
|
|
|
# If someone invited you, you are joining their realm regardless
|
|
|
|
# of your e-mail address.
|
2013-02-08 17:20:42 +01:00
|
|
|
#
|
|
|
|
# MitUsers can't be referred and don't have a referred_by field.
|
|
|
|
if not mit_beta_user and prereg_user.referred_by:
|
2013-02-06 22:26:04 +01:00
|
|
|
domain = prereg_user.referred_by.realm.domain
|
2013-08-15 18:44:08 +02:00
|
|
|
elif not mit_beta_user and prereg_user.realm:
|
2013-08-02 20:32:56 +02:00
|
|
|
# You have a realm set, even though nobody referred you. This
|
|
|
|
# happens if you sign up through a special URL for an open
|
|
|
|
# realm.
|
|
|
|
domain = prereg_user.realm.domain
|
2013-02-06 22:26:04 +01:00
|
|
|
else:
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2012-09-25 22:58:59 +02:00
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
2012-10-29 19:08:18 +01:00
|
|
|
if mit_beta_user:
|
|
|
|
# MIT users already exist, but are supposed to be inactive.
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-10-29 19:08:18 +01:00
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
2013-07-08 17:57:04 +02:00
|
|
|
user_email_is_unique(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-04 20:27:49 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-09-29 00:49:34 +02:00
|
|
|
|
2012-10-02 22:20:07 +02:00
|
|
|
if request.POST.get('from_confirmation'):
|
2013-08-15 18:43:42 +02:00
|
|
|
if domain == "mit.edu":
|
|
|
|
hesiod_name = compute_mit_user_fullname(email)
|
|
|
|
form = RegistrationForm(
|
|
|
|
initial={'full_name': hesiod_name if "@" not in hesiod_name else ""})
|
|
|
|
else:
|
|
|
|
form = RegistrationForm()
|
2012-10-02 22:20:07 +02:00
|
|
|
else:
|
2012-08-28 18:44:51 +02:00
|
|
|
form = RegistrationForm(request.POST)
|
|
|
|
if form.is_valid():
|
2012-10-10 21:16:23 +02:00
|
|
|
password = form.cleaned_data['password']
|
2012-10-11 19:15:41 +02:00
|
|
|
full_name = form.cleaned_data['full_name']
|
2013-07-18 18:48:56 +02:00
|
|
|
short_name = email_to_username(email)
|
2012-10-19 23:40:44 +02:00
|
|
|
(realm, _) = Realm.objects.get_or_create(domain=domain)
|
2013-07-11 21:40:52 +02:00
|
|
|
first_in_realm = len(UserProfile.objects.filter(realm=realm)) == 0
|
2012-10-11 16:57:47 +02:00
|
|
|
|
2012-12-05 20:56:31 +01:00
|
|
|
# FIXME: sanitize email addresses and fullname
|
2012-12-04 21:07:33 +01:00
|
|
|
if mit_beta_user:
|
2013-08-15 00:07:46 +02:00
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name)
|
2013-03-08 19:58:18 +01:00
|
|
|
do_activate_user(user_profile)
|
|
|
|
do_change_password(user_profile, password)
|
2012-12-13 15:11:17 +01:00
|
|
|
do_change_full_name(user_profile, full_name)
|
2012-12-04 21:07:33 +01:00
|
|
|
else:
|
2012-12-13 15:11:17 +01:00
|
|
|
user_profile = do_create_user(email, password, realm, full_name, short_name)
|
2013-06-27 16:41:58 +02:00
|
|
|
# We want to add the default subs list iff there were no subs
|
|
|
|
# specified when the user was invited.
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
if len(streams) == 0:
|
|
|
|
streams = get_default_subs(user_profile)
|
|
|
|
for stream in streams:
|
|
|
|
do_add_subscription(user_profile, stream)
|
|
|
|
|
2013-08-14 23:25:38 +02:00
|
|
|
# Give you the last 100 messages on your streams, so you have
|
|
|
|
# something to look at in your home view once you finish the
|
|
|
|
# tutorial.
|
|
|
|
recipients = Recipient.objects.filter(type=Recipient.STREAM,
|
|
|
|
type_id__in=[stream.id for stream in streams])
|
|
|
|
messages = Message.objects.filter(recipient_id__in=recipients).order_by("-id")[0:100]
|
|
|
|
if len(messages) > 0:
|
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message=message,
|
|
|
|
flags=UserMessage.flags.read)
|
|
|
|
for message in messages]
|
|
|
|
|
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2013-08-09 21:34:50 +02:00
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
if prereg_user.referred_by is not None:
|
|
|
|
# This is a cross-realm private message.
|
2013-07-24 20:23:35 +02:00
|
|
|
internal_send_message("new-user-bot@zulip.com",
|
2013-03-28 20:43:34 +01:00
|
|
|
"private", prereg_user.referred_by.email, user_profile.realm.domain,
|
2013-07-15 23:38:04 +02:00
|
|
|
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
2012-12-11 23:42:32 +01:00
|
|
|
user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
user_profile.email,
|
2012-12-11 23:42:32 +01:00
|
|
|
)
|
|
|
|
)
|
2013-04-09 19:10:40 +02:00
|
|
|
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as inactive
|
|
|
|
# so we can find the PreregistrationUser that we are actually working
|
|
|
|
# with here
|
|
|
|
PreregistrationUser.objects.filter(email=email) \
|
|
|
|
.exclude(id=prereg_user.id) \
|
|
|
|
.update(status=0)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-12-13 15:11:17 +01:00
|
|
|
notify_new_user(user_profile)
|
2013-04-02 19:59:12 +02:00
|
|
|
queue_json_publish(
|
|
|
|
"signups",
|
|
|
|
{
|
|
|
|
'EMAIL': email,
|
|
|
|
'merge_vars': {
|
|
|
|
'NAME': full_name,
|
2013-05-14 21:20:12 +02:00
|
|
|
'REALM': domain,
|
2013-04-02 19:59:12 +02:00
|
|
|
'OPTIN_IP': request.META['REMOTE_ADDR'],
|
|
|
|
'OPTIN_TIME': datetime.datetime.isoformat(datetime.datetime.now()),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
lambda event: None)
|
2012-11-17 03:57:46 +01:00
|
|
|
|
2012-09-21 16:10:36 +02:00
|
|
|
login(request, authenticate(username=email, password=password))
|
2013-07-11 21:40:52 +02:00
|
|
|
|
|
|
|
if first_in_realm:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.initial_invite_page'))
|
2013-07-11 21:40:52 +02:00
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.home'))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/register.html',
|
2013-04-23 23:46:12 +02:00
|
|
|
{'form': form,
|
|
|
|
'company_name': domain,
|
|
|
|
'email': email,
|
|
|
|
'key': key,
|
|
|
|
'gafyd_name': request.POST.get('gafyd_name', False),
|
|
|
|
},
|
2012-10-15 22:52:08 +02:00
|
|
|
context_instance=RequestContext(request))
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-01-08 23:26:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def accounts_accept_terms(request):
|
2013-04-23 23:46:12 +02:00
|
|
|
email = request.user.email
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2013-01-08 23:26:40 +01:00
|
|
|
if request.method == "POST":
|
|
|
|
form = ToSForm(request.POST)
|
|
|
|
if form.is_valid():
|
|
|
|
full_name = form.cleaned_data['full_name']
|
|
|
|
send_mail('Terms acceptance for ' + full_name,
|
2013-07-29 23:03:31 +02:00
|
|
|
loader.render_to_string('zerver/tos_accept_body.txt',
|
2013-01-08 23:26:40 +01:00
|
|
|
{'name': full_name,
|
|
|
|
'email': email,
|
|
|
|
'ip': request.META['REMOTE_ADDR'],
|
|
|
|
'browser': request.META['HTTP_USER_AGENT']}),
|
|
|
|
"humbug@humbughq.com",
|
2013-07-24 23:41:24 +02:00
|
|
|
["all@zulip.com"])
|
2013-03-29 17:39:53 +01:00
|
|
|
do_change_full_name(request.user, full_name)
|
2013-01-08 23:26:40 +01:00
|
|
|
return redirect(home)
|
|
|
|
|
|
|
|
else:
|
|
|
|
form = ToSForm()
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_accept_terms.html',
|
2013-07-18 18:48:56 +02:00
|
|
|
{ 'form': form, 'company_name': domain, 'email': email },
|
2013-01-08 23:26:40 +01:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-08-23 20:49:06 +02:00
|
|
|
from zerver.lib.ccache import make_ccache
|
|
|
|
|
|
|
|
@authenticated_json_view
|
|
|
|
@has_request_variables
|
|
|
|
def webathena_kerberos_login(request, user_profile,
|
|
|
|
cred=REQ(default=None)):
|
|
|
|
if cred is None:
|
|
|
|
return json_error("Could not find Kerberos credential")
|
|
|
|
if not user_profile.realm.domain == "mit.edu":
|
|
|
|
return json_error("Webathena login only for mit.edu realm")
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsed_cred = ujson.loads(cred)
|
|
|
|
user = parsed_cred["cname"]["nameString"][0]
|
2013-08-28 21:58:34 +02:00
|
|
|
if user == "golem":
|
|
|
|
# Hack for an mit.edu user whose Kerberos username doesn't
|
|
|
|
# match what he zephyrs as
|
|
|
|
user = "ctl"
|
2013-08-23 20:49:06 +02:00
|
|
|
assert(user == user_profile.email.split("@")[0])
|
|
|
|
ccache = make_ccache(parsed_cred)
|
|
|
|
except Exception:
|
|
|
|
return json_error("Invalid Kerberos cache")
|
|
|
|
|
|
|
|
# TODO: Send these data via (say) rabbitmq
|
|
|
|
try:
|
|
|
|
subprocess.check_call(["ssh", "humbug@zmirror2.zulip.net", "--",
|
|
|
|
"/home/humbug/humbug/bots/process_ccache",
|
|
|
|
user,
|
|
|
|
user_profile.api_key,
|
|
|
|
base64.b64encode(ccache)])
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Error updating the user's ccache")
|
|
|
|
return json_error("We were unable to setup mirroring for you")
|
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-04-17 17:24:07 +02:00
|
|
|
def api_endpoint_docs(request):
|
2013-07-29 23:03:31 +02:00
|
|
|
raw_calls = open('templates/zerver/api_content.json', 'r').read()
|
2013-06-18 23:55:55 +02:00
|
|
|
calls = ujson.loads(raw_calls)
|
2013-04-09 22:20:46 +02:00
|
|
|
langs = set()
|
|
|
|
for call in calls:
|
2013-08-08 17:45:25 +02:00
|
|
|
response = call['example_response']
|
|
|
|
if not '\n' in response:
|
|
|
|
# For 1-line responses, pretty-print them
|
|
|
|
extended_response = response.replace(", ", ",\n ")
|
|
|
|
else:
|
|
|
|
extended_response = response
|
|
|
|
call['rendered_response'] = bugdown.convert("~~~ .py\n" + extended_response + "\n~~~\n", "default")
|
2013-04-09 22:20:46 +02:00
|
|
|
for example_type in ('request', 'response'):
|
|
|
|
for lang in call.get('example_' + example_type, []):
|
|
|
|
langs.add(lang)
|
|
|
|
return render_to_response(
|
2013-07-29 23:03:31 +02:00
|
|
|
'zerver/api_endpoints.html', {
|
2013-04-09 22:20:46 +02:00
|
|
|
'content': calls,
|
|
|
|
'langs': langs,
|
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_invite_users(request, user_profile, invitee_emails=REQ):
|
2012-12-11 23:42:32 +01:00
|
|
|
# Validation
|
2013-08-12 00:57:54 +02:00
|
|
|
try:
|
|
|
|
isnt_mit(user_profile.email)
|
|
|
|
except ValidationError, e:
|
|
|
|
return json_error(e.message)
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2012-12-11 23:42:32 +01:00
|
|
|
if not invitee_emails:
|
|
|
|
return json_error("You must specify at least one email address.")
|
|
|
|
|
2013-01-03 16:50:46 +01:00
|
|
|
invitee_emails = set(re.split(r'[, \n]', invitee_emails))
|
2012-12-11 23:42:32 +01:00
|
|
|
|
|
|
|
stream_names = request.POST.getlist('stream')
|
|
|
|
if not stream_names:
|
|
|
|
return json_error("You must specify at least one stream for invitees to join.")
|
|
|
|
|
|
|
|
streams = []
|
|
|
|
for stream_name in stream_names:
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-11 23:42:32 +01:00
|
|
|
return json_error("Stream does not exist: %s. No invites were sent." % stream_name)
|
2013-01-14 21:47:17 +01:00
|
|
|
streams.append(stream)
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2013-07-08 17:57:04 +02:00
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
2013-02-06 17:27:40 +01:00
|
|
|
else:
|
|
|
|
return json_success()
|
2012-12-11 23:42:32 +01:00
|
|
|
|
2013-08-07 17:59:45 +02:00
|
|
|
def create_homepage_form(request, user_info=None):
|
|
|
|
if user_info:
|
|
|
|
return HomepageForm(user_info, domain=request.session.get("domain"))
|
|
|
|
# An empty fields dict is not treated the same way as not
|
|
|
|
# providing it.
|
|
|
|
return HomepageForm(domain=request.session.get("domain"))
|
|
|
|
|
2013-04-23 23:46:12 +02:00
|
|
|
def handle_openid_errors(request, issue, openid_response=None):
|
|
|
|
if issue == "Unknown user":
|
|
|
|
if openid_response is not None and openid_response.status == openid_SUCCESS:
|
|
|
|
ax_response = ax.FetchResponse.fromSuccessResponse(openid_response)
|
|
|
|
google_email = openid_response.getSigned('http://openid.net/srv/ax/1.0', 'value.email')
|
|
|
|
full_name = " ".join((
|
|
|
|
ax_response.get('http://axschema.org/namePerson/first')[0],
|
|
|
|
ax_response.get('http://axschema.org/namePerson/last')[0]))
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request, user_info={'email': google_email})
|
2013-04-23 23:46:12 +02:00
|
|
|
request.verified_email = None
|
|
|
|
if form.is_valid():
|
|
|
|
# Construct a PreregistrationUser object and send the user over to
|
|
|
|
# the confirmation view.
|
2013-08-02 20:32:56 +02:00
|
|
|
prereg_user = create_preregistration_user(google_email, request)
|
2013-04-23 23:46:12 +02:00
|
|
|
return redirect("".join((
|
2013-04-24 21:58:10 +02:00
|
|
|
"/",
|
|
|
|
# Split this so we only get the part after the /
|
|
|
|
Confirmation.objects.get_link_for_object(prereg_user).split("/", 3)[3],
|
2013-04-23 23:46:12 +02:00
|
|
|
'?gafyd_name=',
|
2013-08-14 20:59:29 +02:00
|
|
|
# urllib does not handle Unicode, so coerece to encoded byte string
|
|
|
|
# Explanation: http://stackoverflow.com/a/5605354/90777
|
|
|
|
urllib.quote_plus(full_name.encode('utf8')))))
|
2013-04-23 23:46:12 +02:00
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_home.html', {'form': form})
|
2013-04-23 23:46:12 +02:00
|
|
|
return default_render_failure(request, issue)
|
|
|
|
|
|
|
|
def process_openid_login(request):
|
|
|
|
return login_complete(request, render_failure=handle_openid_errors)
|
|
|
|
|
2012-09-29 00:49:34 +02:00
|
|
|
def login_page(request, **kwargs):
|
|
|
|
template_response = django_login_page(request, **kwargs)
|
|
|
|
try:
|
2012-10-11 19:15:41 +02:00
|
|
|
template_response.context_data['email'] = request.GET['email']
|
2012-09-29 00:49:34 +02:00
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
return template_response
|
|
|
|
|
2013-07-11 21:40:52 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_bulk_invite_users(request, user_profile, invitee_emails=REQ(converter=json_to_list)):
|
|
|
|
invitee_emails = set(invitee_emails)
|
|
|
|
streams = get_default_subs(user_profile)
|
|
|
|
|
|
|
|
ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams)
|
|
|
|
|
|
|
|
if ret_error is not None:
|
|
|
|
return json_error(data=error_data, msg=ret_error)
|
|
|
|
else:
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def initial_invite_page(request):
|
|
|
|
user = request.user
|
|
|
|
# Only show the bulk-invite page for the first user in a realm
|
|
|
|
domain_count = len(UserProfile.objects.filter(realm=user.realm))
|
|
|
|
if domain_count > 1:
|
2013-07-29 23:03:31 +02:00
|
|
|
return redirect('zerver.views.home')
|
2013-07-11 21:40:52 +02:00
|
|
|
|
|
|
|
params = {'company_name': user.realm.domain}
|
|
|
|
|
|
|
|
if (user.realm.restricted_to_domain):
|
|
|
|
params['invite_suffix'] = user.realm.domain
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/initial_invite_page.html', params,
|
2013-07-11 21:40:52 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-03-05 23:45:02 +01:00
|
|
|
@require_post
|
|
|
|
def logout_then_login(request, **kwargs):
|
|
|
|
return django_logout_then_login(request, kwargs)
|
|
|
|
|
2013-08-02 20:32:56 +02:00
|
|
|
def create_preregistration_user(email, request):
|
|
|
|
domain = request.session.get("domain")
|
2013-08-07 17:59:45 +02:00
|
|
|
if not completely_open(domain):
|
2013-08-02 20:32:56 +02:00
|
|
|
domain = None
|
2013-08-15 18:44:08 +02:00
|
|
|
# MIT users who are not explicitly signing up for an open realm
|
|
|
|
# require special handling (They may already have an (inactive)
|
|
|
|
# account, for example)
|
|
|
|
if email_to_domain(email) == "mit.edu" and not domain:
|
|
|
|
prereg_user, created = MitUser.objects.get_or_create(email=email)
|
|
|
|
else:
|
|
|
|
prereg_user = PreregistrationUser(email=email, realm=get_realm(domain))
|
|
|
|
prereg_user.save()
|
2013-08-02 20:32:56 +02:00
|
|
|
|
|
|
|
request.session["domain"] = None
|
|
|
|
|
|
|
|
return prereg_user
|
|
|
|
|
|
|
|
def accounts_home_with_domain(request, domain):
|
|
|
|
if completely_open(domain):
|
|
|
|
# You can sign up for a completely open realm through a
|
|
|
|
# special registration path that contains the domain in the
|
|
|
|
# URL. We store this information in the session rather than
|
|
|
|
# elsewhere because we don't have control over URL or form
|
|
|
|
# data for folks registering through OpenID.
|
|
|
|
request.session["domain"] = domain
|
|
|
|
return accounts_home(request)
|
|
|
|
else:
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.accounts_home'))
|
2013-08-02 20:32:56 +02:00
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
def accounts_home(request):
|
2012-09-28 22:47:05 +02:00
|
|
|
if request.method == 'POST':
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request, user_info=request.POST)
|
2012-09-28 22:47:05 +02:00
|
|
|
if form.is_valid():
|
2013-01-08 20:24:47 +01:00
|
|
|
email = form.cleaned_data['email']
|
2013-08-02 20:32:56 +02:00
|
|
|
prereg_user = create_preregistration_user(email, request)
|
2013-03-28 19:21:29 +01:00
|
|
|
Confirmation.objects.send_confirmation(prereg_user, email)
|
|
|
|
return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email}))
|
2012-09-29 00:49:34 +02:00
|
|
|
try:
|
|
|
|
email = request.POST['email']
|
2013-02-11 19:37:31 +01:00
|
|
|
# Note: We don't check for uniqueness
|
2013-02-12 20:52:42 +01:00
|
|
|
is_inactive(email)
|
2012-09-29 00:49:34 +02:00
|
|
|
except ValidationError:
|
2012-10-10 22:30:51 +02:00
|
|
|
return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.quote_plus(email))
|
2012-11-21 21:14:55 +01:00
|
|
|
else:
|
2013-08-07 17:59:45 +02:00
|
|
|
form = create_homepage_form(request)
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/accounts_home.html',
|
2013-08-02 20:32:56 +02:00
|
|
|
{'form': form, 'current_url': request.get_full_path},
|
2012-09-04 23:21:30 +02:00
|
|
|
context_instance=RequestContext(request))
|
|
|
|
|
2013-09-04 21:02:11 +02:00
|
|
|
def approximate_unread_count(user_profile):
|
2013-09-10 10:50:07 +02:00
|
|
|
not_in_home_view_recipients = [sub.recipient.id for sub in \
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, in_home_view=False)]
|
|
|
|
|
|
|
|
# Don't include messages that aren't in your home view, as they might never
|
|
|
|
# be read.
|
|
|
|
return UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile, message_id__gt=user_profile.pointer).exclude(
|
|
|
|
message__recipient__type=Recipient.STREAM,
|
|
|
|
message__recipient__id__in=not_in_home_view_recipients).count()
|
2013-08-09 20:26:35 +02:00
|
|
|
|
|
|
|
def sent_time_in_epoch_seconds(user_message):
|
|
|
|
# user_message is a UserMessage object.
|
|
|
|
if not user_message:
|
|
|
|
return None
|
|
|
|
# We have USE_TZ = True, so our datetime objects are timezone-aware.
|
|
|
|
# Return the epoch seconds in UTC.
|
|
|
|
return calendar.timegm(user_message.message.pub_date.utctimetuple())
|
|
|
|
|
2012-10-29 19:56:40 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
2012-08-28 18:44:51 +02:00
|
|
|
def home(request):
|
2012-11-21 00:42:16 +01:00
|
|
|
# We need to modify the session object every two weeks or it will expire.
|
|
|
|
# This line makes reloading the page a sufficient action to keep the
|
|
|
|
# session alive.
|
|
|
|
request.session.modified = True
|
|
|
|
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = request.user
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-05-07 17:25:25 +02:00
|
|
|
register_ret = do_events_register(user_profile, get_client("website"),
|
|
|
|
apply_markdown=True)
|
2013-03-28 18:09:27 +01:00
|
|
|
user_has_messages = (register_ret['max_message_id'] != -1)
|
2013-02-06 00:33:45 +01:00
|
|
|
|
2013-05-17 21:28:51 +02:00
|
|
|
# Reset our don't-spam-users-with-email counter since the
|
|
|
|
# user has since logged in
|
|
|
|
if not user_profile.last_reminder is None:
|
|
|
|
user_profile.last_reminder = None
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["last_reminder"])
|
2013-05-17 21:28:51 +02:00
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
# Brand new users get the tutorial
|
|
|
|
needs_tutorial = settings.TUTORIAL_ENABLED and \
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status != UserProfile.TUTORIAL_FINISHED
|
2013-03-12 04:54:17 +01:00
|
|
|
|
2013-03-28 18:09:27 +01:00
|
|
|
if user_profile.pointer == -1 and user_has_messages:
|
2013-02-06 00:33:45 +01:00
|
|
|
# Put the new user's pointer at the bottom
|
|
|
|
#
|
|
|
|
# This improves performance, because we limit backfilling of messages
|
|
|
|
# before the pointer. It's also likely that someone joining an
|
|
|
|
# organization is interested in recent messages more than the very
|
|
|
|
# first messages on the system.
|
|
|
|
|
2013-08-09 23:01:37 +02:00
|
|
|
register_ret['pointer'] = register_ret['max_message_id']
|
2013-02-06 00:33:45 +01:00
|
|
|
user_profile.last_pointer_updater = request.session.session_key
|
2012-09-12 22:55:37 +02:00
|
|
|
|
2013-08-09 20:26:35 +02:00
|
|
|
if user_profile.pointer == -1:
|
|
|
|
latest_read = None
|
|
|
|
else:
|
2013-08-19 18:10:32 +02:00
|
|
|
try:
|
|
|
|
latest_read = UserMessage.objects.get(user_profile=user_profile,
|
|
|
|
message__id=user_profile.pointer)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
# Don't completely fail if your saved pointer ID is invalid
|
|
|
|
logging.warning("%s has invalid pointer %s" % (user_profile.email, user_profile.pointer))
|
|
|
|
latest_read = None
|
|
|
|
|
2013-03-25 23:49:38 +01:00
|
|
|
# Pass parameters to the client-side JavaScript code.
|
|
|
|
# These end up in a global JavaScript Object named 'page_params'.
|
|
|
|
page_params = simplejson.encoder.JSONEncoderForHTML().encode(dict(
|
|
|
|
debug_mode = settings.DEBUG,
|
|
|
|
poll_timeout = settings.POLL_TIMEOUT,
|
2013-03-28 18:09:27 +01:00
|
|
|
have_initial_messages = user_has_messages,
|
2013-03-29 19:15:25 +01:00
|
|
|
stream_list = register_ret['subscriptions'],
|
2013-06-12 21:15:32 +02:00
|
|
|
unsubbed_info = register_ret['unsubscribed'],
|
2013-03-29 15:35:37 +01:00
|
|
|
people_list = register_ret['realm_users'],
|
2013-03-28 18:09:27 +01:00
|
|
|
initial_pointer = register_ret['pointer'],
|
2013-04-03 22:00:02 +02:00
|
|
|
initial_presences = register_ret['presences'],
|
2013-05-06 17:14:59 +02:00
|
|
|
initial_servertime = time.time(), # Used for calculating relative presence age
|
2013-03-25 23:49:38 +01:00
|
|
|
fullname = user_profile.full_name,
|
2013-03-28 20:43:34 +01:00
|
|
|
email = user_profile.email,
|
2013-03-25 23:49:38 +01:00
|
|
|
domain = user_profile.realm.domain,
|
|
|
|
enter_sends = user_profile.enter_sends,
|
2013-07-26 16:51:02 +02:00
|
|
|
referrals = register_ret['referrals'],
|
2013-08-22 19:54:35 +02:00
|
|
|
realm_emoji = register_ret['realm_emoji'],
|
2013-03-25 23:49:38 +01:00
|
|
|
needs_tutorial = needs_tutorial,
|
|
|
|
desktop_notifications_enabled =
|
|
|
|
user_profile.enable_desktop_notifications,
|
2013-05-03 21:49:01 +02:00
|
|
|
sounds_enabled =
|
|
|
|
user_profile.enable_sounds,
|
2013-05-07 23:19:52 +02:00
|
|
|
enable_offline_email_notifications =
|
|
|
|
user_profile.enable_offline_email_notifications,
|
2013-03-28 18:09:27 +01:00
|
|
|
event_queue_id = register_ret['queue_id'],
|
|
|
|
last_event_id = register_ret['last_event_id'],
|
2013-05-08 15:47:37 +02:00
|
|
|
max_message_id = register_ret['max_message_id'],
|
2013-09-04 21:02:11 +02:00
|
|
|
unread_count = approximate_unread_count(user_profile),
|
2013-08-09 20:26:35 +02:00
|
|
|
furthest_read_time = sent_time_in_epoch_seconds(latest_read),
|
2013-06-18 23:55:55 +02:00
|
|
|
onboarding_steps = ujson.loads(user_profile.onboarding_steps),
|
2013-09-03 22:41:17 +02:00
|
|
|
staging = settings.STAGING_DEPLOYED or not settings.DEPLOYED,
|
2013-09-10 11:46:18 +02:00
|
|
|
alert_words = register_ret['alert_words'],
|
|
|
|
show_admin = user_profile.show_admin
|
2013-03-25 23:49:38 +01:00
|
|
|
))
|
2013-01-09 00:10:37 +01:00
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
statsd.incr('views.home')
|
|
|
|
|
2013-01-03 00:19:35 +01:00
|
|
|
try:
|
2013-03-28 20:43:34 +01:00
|
|
|
isnt_mit(user_profile.email)
|
2013-01-03 00:19:35 +01:00
|
|
|
show_invites = True
|
|
|
|
except ValidationError:
|
2013-08-12 00:57:54 +02:00
|
|
|
show_invites = False
|
2013-01-03 00:19:35 +01:00
|
|
|
|
2013-06-02 19:23:02 +02:00
|
|
|
# For the CUSTOMER4 student realm, only let instructors (who have
|
|
|
|
# @customer4.invalid addresses) invite new users.
|
|
|
|
if ((user_profile.realm.domain == "users.customer4.invalid") and
|
|
|
|
(not user_profile.email.lower().endswith("@customer4.invalid"))):
|
|
|
|
show_invites = False
|
|
|
|
|
2013-08-16 22:55:50 +02:00
|
|
|
response = render_to_response('zerver/index.html',
|
|
|
|
{'user_profile': user_profile,
|
|
|
|
'page_params' : page_params,
|
|
|
|
'avatar_url': avatar_url(user_profile),
|
|
|
|
'nofontface': is_buggy_ua(request.META["HTTP_USER_AGENT"]),
|
|
|
|
'show_debug':
|
|
|
|
settings.DEBUG and ('show_debug' in request.GET),
|
|
|
|
'show_invites': show_invites,
|
|
|
|
'show_admin': user_profile.show_admin,
|
2013-08-27 21:15:15 +02:00
|
|
|
'show_webathena': user_profile.realm.domain == "mit.edu",
|
2013-08-16 22:55:50 +02:00
|
|
|
},
|
|
|
|
context_instance=RequestContext(request))
|
|
|
|
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
|
|
|
|
return response
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2013-07-01 20:13:26 +02:00
|
|
|
def is_buggy_ua(agent):
|
|
|
|
"""Discrimiate CSS served to clients based on User Agent
|
|
|
|
|
|
|
|
Due to QTBUG-3467, @font-face is not supported in QtWebKit.
|
|
|
|
This may get fixed in the future, but for right now we can
|
|
|
|
just serve the more conservative CSS to all our desktop apps.
|
|
|
|
"""
|
2013-07-15 23:38:04 +02:00
|
|
|
return ("Humbug Desktop/" in agent or "Zulip Desktop/" in agent) and \
|
|
|
|
not "Macintosh" in agent
|
2013-07-01 20:13:26 +02:00
|
|
|
|
2013-03-21 20:16:57 +01:00
|
|
|
def get_pointer_backend(request, user_profile):
|
|
|
|
return json_success({'pointer': user_profile.pointer})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2013-03-13 21:13:32 +01:00
|
|
|
def api_update_pointer(request, user_profile):
|
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_update_pointer(request, user_profile):
|
2013-03-13 21:13:32 +01:00
|
|
|
return update_pointer_backend(request, user_profile)
|
2012-10-21 19:33:14 +02:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-13 21:13:32 +01:00
|
|
|
def update_pointer_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
pointer=REQ(converter=to_non_negative_int)):
|
2012-10-29 22:02:10 +01:00
|
|
|
if pointer <= user_profile.pointer:
|
|
|
|
return json_success()
|
|
|
|
|
2013-08-19 21:05:23 +02:00
|
|
|
try:
|
|
|
|
UserMessage.objects.get(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__id=pointer
|
|
|
|
)
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
raise JsonableError("Invalid message ID")
|
|
|
|
|
2013-04-24 16:47:01 +02:00
|
|
|
prev_pointer = user_profile.pointer
|
2012-09-06 20:52:23 +02:00
|
|
|
user_profile.pointer = pointer
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["pointer"])
|
2012-10-17 23:10:23 +02:00
|
|
|
|
2013-03-26 19:40:28 +01:00
|
|
|
if request.client.name.lower() in ['android', 'iphone']:
|
2013-03-06 20:53:54 +01:00
|
|
|
# TODO (leo)
|
|
|
|
# Until we handle the new read counts in the mobile apps natively,
|
|
|
|
# this is a shim that will mark as read any messages up until the
|
|
|
|
# pointer move
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
2013-04-24 16:47:01 +02:00
|
|
|
message__id__gt=prev_pointer,
|
2013-03-06 20:53:54 +01:00
|
|
|
message__id__lte=pointer,
|
|
|
|
flags=~UserMessage.flags.read) \
|
2013-03-13 22:33:24 +01:00
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
2013-03-06 20:53:54 +01:00
|
|
|
|
2012-11-08 21:49:04 +01:00
|
|
|
if settings.TORNADO_SERVER:
|
2013-01-23 23:24:44 +01:00
|
|
|
tornado_callbacks.send_notification(dict(
|
|
|
|
type = 'pointer_update',
|
2012-12-31 23:19:59 +01:00
|
|
|
user = user_profile.id,
|
2013-03-13 21:13:32 +01:00
|
|
|
new_pointer = pointer))
|
2012-10-17 23:10:34 +02:00
|
|
|
|
2012-09-05 22:21:25 +02:00
|
|
|
return json_success()
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_get_old_messages(request, user_profile):
|
2013-07-01 20:19:40 +02:00
|
|
|
return get_old_messages_backend(request, user_profile)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-15 18:16:38 +01:00
|
|
|
@has_request_variables
|
2012-11-27 20:39:43 +01:00
|
|
|
def api_get_old_messages(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
apply_markdown=REQ(default=False,
|
2013-06-18 23:55:55 +02:00
|
|
|
converter=ujson.loads)):
|
2013-03-22 15:58:52 +01:00
|
|
|
return get_old_messages_backend(request, user_profile,
|
2012-11-15 18:16:38 +01:00
|
|
|
apply_markdown=apply_markdown)
|
2012-10-26 16:42:03 +02:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
class BadNarrowOperator(Exception):
|
|
|
|
def __init__(self, desc):
|
|
|
|
self.desc = desc
|
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
|
|
|
return 'Invalid narrow operator: ' + self.desc
|
|
|
|
|
|
|
|
class NarrowBuilder(object):
|
2013-04-03 22:31:04 +02:00
|
|
|
def __init__(self, user_profile, prefix):
|
2012-12-19 23:58:02 +01:00
|
|
|
self.user_profile = user_profile
|
2013-04-03 22:31:04 +02:00
|
|
|
self.prefix = prefix
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-30 19:05:03 +01:00
|
|
|
def __call__(self, query, operator, operand):
|
2012-12-19 23:58:02 +01:00
|
|
|
# We have to be careful here because we're letting users call a method
|
|
|
|
# by name! The prefix 'by_' prevents it from colliding with builtin
|
|
|
|
# Python __magic__ stuff.
|
|
|
|
method_name = 'by_' + operator.replace('-', '_')
|
2013-01-30 20:59:56 +01:00
|
|
|
if method_name == 'by_search':
|
|
|
|
return self.do_search(query, operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
method = getattr(self, method_name, None)
|
|
|
|
if method is None:
|
|
|
|
raise BadNarrowOperator('unknown operator ' + operator)
|
2013-01-30 19:05:03 +01:00
|
|
|
return query.filter(method(operand))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
# Wrapper for Q() which adds self.prefix to all the keys
|
|
|
|
def pQ(self, **kwargs):
|
|
|
|
return Q(**dict((self.prefix + key, kwargs[key]) for key in kwargs.keys()))
|
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
def by_is(self, operand):
|
2013-07-10 03:22:34 +02:00
|
|
|
if operand == 'private':
|
2013-04-03 22:31:04 +02:00
|
|
|
return (self.pQ(recipient__type=Recipient.PERSONAL) |
|
|
|
|
self.pQ(recipient__type=Recipient.HUDDLE))
|
2013-03-27 18:29:12 +01:00
|
|
|
elif operand == 'starred':
|
|
|
|
return Q(flags=UserMessage.flags.starred)
|
2013-05-29 00:33:03 +02:00
|
|
|
elif operand == 'mentioned':
|
|
|
|
return Q(flags=UserMessage.flags.mentioned)
|
2013-08-30 21:15:01 +02:00
|
|
|
elif operand == 'alerted':
|
|
|
|
return Q(flags=UserMessage.flags.mentioned)
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator("unknown 'is' operand " + operand)
|
|
|
|
|
|
|
|
def by_stream(self, operand):
|
2013-01-14 21:47:17 +01:00
|
|
|
stream = get_stream(operand, self.user_profile.realm)
|
|
|
|
if stream is None:
|
2012-12-19 23:58:02 +01:00
|
|
|
raise BadNarrowOperator('unknown stream ' + operand)
|
2013-06-25 23:12:59 +02:00
|
|
|
|
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to "social" to also show messages to /^(un)*social(.d)*$/
|
|
|
|
# (unsocial, ununsocial, social.d, etc)
|
2013-07-31 19:56:10 +02:00
|
|
|
m = re.search(r'^(?:un)*(.+?)(?:\.d)*$', stream.name, re.IGNORECASE)
|
2013-07-16 22:37:37 +02:00
|
|
|
if m:
|
|
|
|
base_stream_name = m.group(1)
|
|
|
|
else:
|
|
|
|
base_stream_name = stream.name
|
|
|
|
|
2013-06-25 23:12:59 +02:00
|
|
|
matching_streams = Stream.objects.filter(realm=self.user_profile.realm,
|
2013-07-31 19:56:10 +02:00
|
|
|
name__iregex=r'^(un)*%s(\.d)*$' % (re.escape(base_stream_name),))
|
2013-07-02 17:55:45 +02:00
|
|
|
matching_stream_ids = [matching_stream.id for matching_stream in matching_streams]
|
2013-06-27 23:57:45 +02:00
|
|
|
recipients = bulk_get_recipients(Recipient.STREAM, matching_stream_ids).values()
|
|
|
|
return self.pQ(recipient__in=recipients)
|
2013-06-25 23:12:59 +02:00
|
|
|
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-07-16 22:52:02 +02:00
|
|
|
def by_topic(self, operand):
|
2013-07-31 21:11:25 +02:00
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to topic "foo" to also show messages to /^foo(.d)*$/
|
|
|
|
# (foo, foo.d, foo.d.d, etc)
|
|
|
|
m = re.search(r'^(.*?)(?:\.d)*$', operand, re.IGNORECASE)
|
|
|
|
if m:
|
|
|
|
base_topic = m.group(1)
|
|
|
|
else:
|
|
|
|
base_topic = operand
|
|
|
|
|
|
|
|
# Additionally, MIT users expect the empty instance and
|
|
|
|
# instance "personal" to be the same.
|
|
|
|
if base_topic in ('', 'personal', '(instance "")'):
|
|
|
|
regex = r'^(|personal|\(instance ""\))(\.d)*$'
|
|
|
|
else:
|
|
|
|
regex = r'^%s(\.d)*$' % (re.escape(base_topic),)
|
|
|
|
|
|
|
|
return self.pQ(subject__iregex=regex)
|
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(subject__iexact=operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-02-28 22:10:22 +01:00
|
|
|
def by_sender(self, operand):
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(sender__email__iexact=operand)
|
2013-02-28 22:10:22 +01:00
|
|
|
|
2013-07-31 20:33:38 +02:00
|
|
|
def by_near(self, operand):
|
|
|
|
return Q()
|
|
|
|
|
2013-07-31 20:54:51 +02:00
|
|
|
def by_id(self, operand):
|
|
|
|
return self.pQ(id=operand)
|
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
def by_pm_with(self, operand):
|
|
|
|
if ',' in operand:
|
|
|
|
# Huddle
|
|
|
|
try:
|
|
|
|
emails = [e.strip() for e in operand.split(',')]
|
|
|
|
recipient = recipient_for_emails(emails, False,
|
|
|
|
self.user_profile, self.user_profile)
|
|
|
|
except ValidationError:
|
|
|
|
raise BadNarrowOperator('unknown recipient ' + operand)
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient=recipient)
|
2012-12-19 23:58:02 +01:00
|
|
|
else:
|
|
|
|
# Personal message
|
2013-03-18 16:54:58 +01:00
|
|
|
self_recipient = get_recipient(Recipient.PERSONAL, type_id=self.user_profile.id)
|
2013-03-28 20:43:34 +01:00
|
|
|
if operand == self.user_profile.email:
|
2013-01-03 19:31:58 +01:00
|
|
|
# Personals with self
|
2013-04-03 22:31:04 +02:00
|
|
|
return self.pQ(recipient__type=Recipient.PERSONAL,
|
|
|
|
sender=self.user_profile, recipient=self_recipient)
|
2013-01-03 19:31:58 +01:00
|
|
|
|
|
|
|
# Personals with other user; include both directions.
|
2012-12-19 23:58:02 +01:00
|
|
|
try:
|
2013-03-18 17:03:29 +01:00
|
|
|
narrow_profile = get_user_profile_by_email(operand)
|
2012-12-19 23:58:02 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
2013-03-18 16:54:58 +01:00
|
|
|
narrow_recipient = get_recipient(Recipient.PERSONAL, narrow_profile.id)
|
2013-04-03 22:31:04 +02:00
|
|
|
return ((self.pQ(sender=narrow_profile) & self.pQ(recipient=self_recipient)) |
|
|
|
|
(self.pQ(sender=self.user_profile) & self.pQ(recipient=narrow_recipient)))
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-30 20:59:56 +01:00
|
|
|
def do_search(self, query, operand):
|
|
|
|
if "postgres" in settings.DATABASES["default"]["ENGINE"]:
|
2013-04-29 22:53:57 +02:00
|
|
|
tsquery = "plainto_tsquery('humbug.english_us_search', %s)"
|
|
|
|
where = "search_tsvector @@ " + tsquery
|
|
|
|
match_content = "ts_headline('humbug.english_us_search', rendered_content, " \
|
|
|
|
+ tsquery + ", 'StartSel=\"<span class=\"\"highlight\"\">\", StopSel=</span>, " \
|
|
|
|
"HighlightAll=TRUE')"
|
|
|
|
# We HTML-escape the subject in Postgres to avoid doing a server round-trip
|
|
|
|
match_subject = "ts_headline('humbug.english_us_search', escape_html(subject), " \
|
|
|
|
+ tsquery + ", 'StartSel=\"<span class=\"\"highlight\"\">\", StopSel=</span>, " \
|
|
|
|
"HighlightAll=TRUE')"
|
|
|
|
|
2013-05-14 00:17:01 +02:00
|
|
|
# Do quoted string matching. We really want phrase
|
|
|
|
# search here so we can ignore punctuation and do
|
|
|
|
# stemming, but there isn't a standard phrase search
|
|
|
|
# mechanism in Postgres
|
|
|
|
for term in re.findall('"[^"]+"|\S+', operand):
|
|
|
|
if term[0] == '"' and term[-1] == '"':
|
|
|
|
term = term[1:-1]
|
|
|
|
query = query.filter(self.pQ(content__icontains=term) |
|
|
|
|
self.pQ(subject__icontains=term))
|
|
|
|
|
2013-04-29 22:53:57 +02:00
|
|
|
return query.extra(select={'match_content': match_content,
|
|
|
|
'match_subject': match_subject},
|
|
|
|
where=[where],
|
|
|
|
select_params=[operand, operand], params=[operand])
|
2013-01-30 20:59:56 +01:00
|
|
|
else:
|
2013-02-15 21:24:58 +01:00
|
|
|
for word in operand.split():
|
2013-04-03 22:31:04 +02:00
|
|
|
query = query.filter(self.pQ(content__icontains=word) |
|
|
|
|
self.pQ(subject__icontains=word))
|
2013-01-31 23:05:47 +01:00
|
|
|
return query
|
2013-01-30 20:59:56 +01:00
|
|
|
|
2012-12-19 23:58:02 +01:00
|
|
|
|
2013-01-02 21:43:49 +01:00
|
|
|
def narrow_parameter(json):
|
|
|
|
# FIXME: A hack to support old mobile clients
|
|
|
|
if json == '{}':
|
|
|
|
return None
|
|
|
|
|
|
|
|
data = json_to_list(json)
|
|
|
|
for elem in data:
|
|
|
|
if not isinstance(elem, list):
|
|
|
|
raise ValueError("element is not a list")
|
|
|
|
if (len(elem) != 2
|
|
|
|
or any(not isinstance(x, str) and not isinstance(x, unicode)
|
|
|
|
for x in elem)):
|
|
|
|
raise ValueError("element is not a string pair")
|
|
|
|
return data
|
|
|
|
|
2013-04-03 22:08:01 +02:00
|
|
|
def is_public_stream(request, stream, realm):
|
2013-01-16 00:52:57 +01:00
|
|
|
if not valid_stream_name(stream):
|
|
|
|
raise JsonableError("Invalid stream name")
|
|
|
|
stream = get_stream(stream, realm)
|
|
|
|
if stream is None:
|
2013-04-03 22:08:01 +02:00
|
|
|
return False
|
|
|
|
return stream.is_public()
|
2013-01-16 00:52:57 +01:00
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-22 15:58:52 +01:00
|
|
|
def get_old_messages_backend(request, user_profile,
|
|
|
|
anchor = REQ(converter=int),
|
|
|
|
num_before = REQ(converter=to_non_negative_int),
|
|
|
|
num_after = REQ(converter=to_non_negative_int),
|
|
|
|
narrow = REQ('narrow', converter=narrow_parameter, default=None),
|
2013-07-01 20:19:40 +02:00
|
|
|
apply_markdown=REQ(default=True,
|
|
|
|
converter=ujson.loads)):
|
2013-04-03 22:08:01 +02:00
|
|
|
include_history = False
|
|
|
|
if narrow is not None:
|
|
|
|
for operator, operand in narrow:
|
|
|
|
if operator == "stream":
|
|
|
|
if is_public_stream(request, operand, user_profile.realm):
|
|
|
|
include_history = True
|
|
|
|
# Disable historical messages if the user is narrowing to show
|
|
|
|
# only starred messages (or anything else that's a property on
|
|
|
|
# the UserMessage table). There cannot be historical messages
|
|
|
|
# in these cases anyway.
|
|
|
|
for operator, operand in narrow:
|
|
|
|
if operator == "is" and operand == "starred":
|
|
|
|
include_history = False
|
|
|
|
|
|
|
|
if include_history:
|
|
|
|
prefix = ""
|
2013-05-02 18:51:09 +02:00
|
|
|
query = Message.objects.only("id").order_by('id')
|
2013-01-15 20:55:13 +01:00
|
|
|
else:
|
2013-04-03 22:31:04 +02:00
|
|
|
prefix = "message__"
|
2013-05-02 18:51:09 +02:00
|
|
|
# Conceptually this query should be
|
|
|
|
# UserMessage.objects.filter(user_profile=user_profile).order_by('message')
|
|
|
|
#
|
|
|
|
# However, our do_search code above requires that there be a
|
|
|
|
# unique 'rendered_content' row in the query, so we need to
|
|
|
|
# somehow get the 'message' table into the query without
|
|
|
|
# actually fetching all the rows from the message table (since
|
|
|
|
# doing so would cause Django to consume a lot of resources
|
|
|
|
# rendering them). The following achieves these objectives.
|
|
|
|
query = UserMessage.objects.select_related("message").only("flags", "id", "message__id") \
|
|
|
|
.filter(user_profile=user_profile).order_by('message')
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2013-04-23 19:26:29 +02:00
|
|
|
num_extra_messages = 1
|
2013-04-29 22:53:57 +02:00
|
|
|
is_search = False
|
2013-04-23 19:26:29 +02:00
|
|
|
|
2013-06-19 20:43:45 +02:00
|
|
|
if narrow is None:
|
|
|
|
use_raw_query = True
|
|
|
|
else:
|
|
|
|
use_raw_query = False
|
2013-04-23 19:26:29 +02:00
|
|
|
num_extra_messages = 0
|
2013-04-03 22:31:04 +02:00
|
|
|
build = NarrowBuilder(user_profile, prefix)
|
2012-12-19 23:58:02 +01:00
|
|
|
for operator, operand in narrow:
|
2013-04-29 22:53:57 +02:00
|
|
|
if operator == 'search':
|
|
|
|
is_search = True
|
2013-01-30 19:05:03 +01:00
|
|
|
query = build(query, operator, operand)
|
2012-11-18 22:53:50 +01:00
|
|
|
|
2013-04-03 22:31:04 +02:00
|
|
|
def add_prefix(**kwargs):
|
|
|
|
return dict((prefix + key, kwargs[key]) for key in kwargs.keys())
|
|
|
|
|
2013-04-23 19:26:29 +02:00
|
|
|
# We add 1 to the number of messages requested if no narrow was
|
|
|
|
# specified to ensure that the resulting list always contains the
|
|
|
|
# anchor message. If a narrow was specified, the anchor message
|
|
|
|
# might not match the narrow anyway.
|
2013-06-19 22:36:58 +02:00
|
|
|
if num_after != 0:
|
2013-04-23 19:26:29 +02:00
|
|
|
num_after += num_extra_messages
|
2012-10-24 21:07:43 +02:00
|
|
|
else:
|
2013-06-19 22:36:58 +02:00
|
|
|
num_before += num_extra_messages
|
|
|
|
|
|
|
|
before_result = []
|
|
|
|
after_result = []
|
|
|
|
if num_before != 0:
|
|
|
|
before_anchor = anchor
|
|
|
|
if num_after != 0:
|
|
|
|
# Don't include the anchor in both the before query and the after query
|
|
|
|
before_anchor = anchor - 1
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
cursor = connection.cursor()
|
|
|
|
# These queries should always be the same as what we would do
|
|
|
|
# in the !include_history case.
|
2013-07-29 23:03:31 +02:00
|
|
|
cursor.execute("SELECT zerver_message.id, zerver_usermessage.flags FROM " +
|
|
|
|
"zerver_usermessage INNER JOIN zerver_message ON " +
|
|
|
|
"zerver_message.id = zerver_usermessage.message_id " +
|
|
|
|
"WHERE zerver_usermessage.user_profile_id = %s and zerver_message.id <= %s " +
|
2013-06-19 20:43:45 +02:00
|
|
|
"ORDER BY message_id DESC LIMIT %s", [user_profile.id, before_anchor, num_before])
|
|
|
|
before_result = reversed(cursor.fetchall())
|
|
|
|
else:
|
|
|
|
before_result = last_n(num_before, query.filter(**add_prefix(id__lte=before_anchor)))
|
2013-06-19 22:36:58 +02:00
|
|
|
if num_after != 0:
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
cursor = connection.cursor()
|
|
|
|
# These queries should always be the same as what we would do
|
|
|
|
# in the !include_history case.
|
2013-07-29 23:03:31 +02:00
|
|
|
cursor.execute("SELECT zerver_message.id, zerver_usermessage.flags FROM " +
|
|
|
|
"zerver_usermessage INNER JOIN zerver_message ON " +
|
|
|
|
"zerver_message.id = zerver_usermessage.message_id " +
|
|
|
|
"WHERE zerver_usermessage.user_profile_id = %s and zerver_message.id >= %s " +
|
2013-06-19 20:43:45 +02:00
|
|
|
"ORDER BY message_id LIMIT %s", [user_profile.id, anchor, num_after])
|
|
|
|
after_result = cursor.fetchall()
|
|
|
|
else:
|
|
|
|
after_result = query.filter(**add_prefix(id__gte=anchor))[:num_after]
|
2013-06-19 22:36:58 +02:00
|
|
|
query_result = list(before_result) + list(after_result)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2013-04-22 16:29:57 +02:00
|
|
|
# The following is a little messy, but ensures that the code paths
|
|
|
|
# are similar regardless of the value of include_history. The
|
|
|
|
# 'user_messages' dictionary maps each message to the user's
|
|
|
|
# UserMessage object for that message, which we will attach to the
|
|
|
|
# rendered message dict before returning it. We attempt to
|
|
|
|
# bulk-fetch rendered message dicts from memcached using the
|
|
|
|
# 'messages' list.
|
2013-04-29 22:53:57 +02:00
|
|
|
search_fields = dict()
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids = []
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = {}
|
2013-06-19 20:43:45 +02:00
|
|
|
if use_raw_query:
|
|
|
|
for row in query_result:
|
|
|
|
(message_id, flags_val) = row
|
|
|
|
user_message_flags[message_id] = parse_usermessage_flags(flags_val)
|
|
|
|
message_ids.append(message_id)
|
|
|
|
elif include_history:
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__in=query_result))
|
2013-04-29 22:53:57 +02:00
|
|
|
for message in query_result:
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids.append(message.id)
|
2013-06-19 21:05:22 +02:00
|
|
|
if user_message_flags.get(message.id) is None:
|
|
|
|
user_message_flags[message.id] = ["read", "historical"]
|
2013-04-29 22:53:57 +02:00
|
|
|
if is_search:
|
|
|
|
search_fields[message.id] = dict([('match_subject', message.match_subject),
|
|
|
|
('match_content', message.match_content)])
|
2013-04-03 22:08:01 +02:00
|
|
|
else:
|
2013-06-19 21:05:22 +02:00
|
|
|
user_message_flags = dict((user_message.message_id, user_message.flags_list())
|
|
|
|
for user_message in query_result)
|
2013-04-29 22:53:57 +02:00
|
|
|
for user_message in query_result:
|
2013-05-02 18:51:09 +02:00
|
|
|
message_ids.append(user_message.message_id)
|
2013-04-29 22:53:57 +02:00
|
|
|
if is_search:
|
2013-05-02 18:51:09 +02:00
|
|
|
search_fields[user_message.message_id] = \
|
2013-04-29 22:53:57 +02:00
|
|
|
dict([('match_subject', user_message.match_subject),
|
|
|
|
('match_content', user_message.match_content)])
|
2013-04-22 16:29:57 +02:00
|
|
|
|
2013-06-27 21:55:42 +02:00
|
|
|
message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown),
|
|
|
|
lambda needed_ids: Message.objects.select_related().filter(id__in=needed_ids),
|
|
|
|
message_ids,
|
|
|
|
cache_transformer=lambda x: x.to_dict_uncached(apply_markdown),
|
|
|
|
extractor=extract_message_dict,
|
|
|
|
setter=stringify_message_dict)
|
2013-04-25 20:41:54 +02:00
|
|
|
|
2013-04-22 16:29:57 +02:00
|
|
|
message_list = []
|
2013-05-02 18:51:09 +02:00
|
|
|
for message_id in message_ids:
|
2013-06-27 21:55:42 +02:00
|
|
|
msg_dict = message_dicts[message_id]
|
2013-06-19 21:05:22 +02:00
|
|
|
msg_dict.update({"flags": user_message_flags[message_id]})
|
2013-06-27 21:55:42 +02:00
|
|
|
msg_dict.update(search_fields.get(message_id, {}))
|
2013-04-29 22:53:57 +02:00
|
|
|
message_list.append(msg_dict)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
|
|
|
statsd.incr('loaded_old_messages', len(message_list))
|
2013-03-11 15:47:29 +01:00
|
|
|
ret = {'messages': message_list,
|
2012-10-24 21:07:43 +02:00
|
|
|
"result": "success",
|
2012-11-01 20:59:32 +01:00
|
|
|
"msg": ""}
|
2012-10-26 16:42:03 +02:00
|
|
|
return json_success(ret)
|
2012-10-24 21:07:43 +02:00
|
|
|
|
2012-10-26 22:02:51 +02:00
|
|
|
def generate_client_id():
|
2013-08-08 16:50:58 +02:00
|
|
|
return generate_random_token(32)
|
2012-10-26 22:02:51 +02:00
|
|
|
|
2013-02-20 03:21:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_profile(request, user_profile):
|
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-21 03:53:03 +02:00
|
|
|
def api_get_profile(request, user_profile):
|
2013-02-20 03:21:27 +01:00
|
|
|
return get_profile_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_profile_backend(request, user_profile):
|
2012-11-07 19:05:23 +01:00
|
|
|
result = dict(pointer = user_profile.pointer,
|
|
|
|
client_id = generate_client_id(),
|
|
|
|
max_message_id = -1)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
|
|
|
|
if messages:
|
|
|
|
result['max_message_id'] = messages[0].id
|
|
|
|
|
|
|
|
return json_success(result)
|
2012-10-21 03:53:03 +02:00
|
|
|
|
2013-03-06 21:04:53 +01:00
|
|
|
@authenticated_json_post_view
|
2013-08-08 20:38:24 +02:00
|
|
|
def json_update_flags(request, user_profile):
|
|
|
|
return update_message_flags(request, user_profile);
|
|
|
|
|
2013-03-06 21:04:53 +01:00
|
|
|
@has_request_variables
|
2013-08-08 20:38:24 +02:00
|
|
|
def update_message_flags(request, user_profile, messages=REQ('messages', converter=json_to_list),
|
2013-05-08 20:16:16 +02:00
|
|
|
operation=REQ('op'), flag=REQ('flag'),
|
|
|
|
all=REQ('all', converter=json_to_bool, default=False)):
|
2013-08-08 20:38:24 +02:00
|
|
|
do_update_message_flags(user_profile, operation, flag, messages, all)
|
2013-03-06 21:04:53 +01:00
|
|
|
return json_success({'result': 'success',
|
2013-05-02 21:14:17 +02:00
|
|
|
'messages': messages,
|
2013-03-06 21:04:53 +01:00
|
|
|
'msg': ''})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def api_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-10-01 21:36:44 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-27 20:44:58 +01:00
|
|
|
def json_send_message(request, user_profile):
|
2013-03-18 22:07:56 +01:00
|
|
|
return send_message_backend(request, user_profile)
|
2012-09-06 21:52:03 +02:00
|
|
|
|
2013-02-27 23:18:38 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_change_enter_sends(request, user_profile,
|
|
|
|
enter_sends=REQ('enter_sends', json_to_bool)):
|
2013-02-27 23:18:38 +01:00
|
|
|
do_change_enter_sends(user_profile, enter_sends)
|
|
|
|
return json_success()
|
|
|
|
|
2013-05-08 15:28:27 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_update_onboarding_steps(request, user_profile,
|
|
|
|
onboarding_steps=REQ(converter=json_to_list,
|
|
|
|
default=[])):
|
|
|
|
do_update_onboarding_steps(user_profile, onboarding_steps)
|
|
|
|
return json_success()
|
|
|
|
|
2012-10-03 22:32:50 +02:00
|
|
|
def is_super_user_api(request):
|
2013-08-26 18:09:17 +02:00
|
|
|
return request.user.is_authenticated() and is_super_user(request.user)
|
2012-09-21 16:40:46 +02:00
|
|
|
|
2013-02-05 22:56:04 +01:00
|
|
|
def mit_to_mit(user_profile, email):
|
|
|
|
# Are the sender and recipient both @mit.edu addresses?
|
|
|
|
# We have to handle this specially, inferring the domain from the
|
2013-08-06 21:32:15 +02:00
|
|
|
# e-mail address, because the recipient may not existing in Zulip
|
2013-02-05 22:56:04 +01:00
|
|
|
# and we may need to make a stub MIT user on the fly.
|
2013-07-18 18:48:56 +02:00
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
2013-02-05 22:56:04 +01:00
|
|
|
return False
|
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
domain = email_to_domain(email)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2013-07-18 18:48:56 +02:00
|
|
|
return user_profile.realm.domain == "mit.edu" and domain == "mit.edu"
|
2013-02-05 22:56:04 +01:00
|
|
|
|
2012-11-15 16:44:08 +01:00
|
|
|
def create_mirrored_message_users(request, user_profile, recipients):
|
2012-11-14 20:26:06 +01:00
|
|
|
if "sender" not in request.POST:
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
sender_email = request.POST["sender"].strip().lower()
|
|
|
|
referenced_users = set([sender_email])
|
2012-11-13 23:38:49 +01:00
|
|
|
if request.POST['type'] == 'private':
|
2012-11-15 16:44:08 +01:00
|
|
|
for email in recipients:
|
2012-11-15 17:03:33 +01:00
|
|
|
referenced_users.add(email.lower())
|
2012-11-07 17:53:58 +01:00
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Check that all referenced users are in our realm:
|
|
|
|
for email in referenced_users:
|
2013-02-05 22:56:04 +01:00
|
|
|
if not mit_to_mit(user_profile, email):
|
2012-10-22 22:34:56 +02:00
|
|
|
return (False, None)
|
|
|
|
|
2012-11-14 23:32:34 +01:00
|
|
|
# Create users for the referenced users, if needed.
|
|
|
|
for email in referenced_users:
|
2012-11-13 22:25:50 +01:00
|
|
|
create_mit_user_if_needed(user_profile.realm, email)
|
2012-10-22 22:34:56 +02:00
|
|
|
|
2013-03-18 17:03:29 +01:00
|
|
|
sender = get_user_profile_by_email(sender_email)
|
2012-10-22 22:34:56 +02:00
|
|
|
return (True, sender)
|
2012-09-06 22:00:39 +02:00
|
|
|
|
2013-04-04 22:30:28 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_tutorial_status(request, user_profile, status=REQ('status')):
|
2013-04-04 22:30:28 +02:00
|
|
|
if status == 'started':
|
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_STARTED
|
|
|
|
elif status == 'finished':
|
2013-06-27 16:41:58 +02:00
|
|
|
user_profile.tutorial_status = UserProfile.TUTORIAL_FINISHED
|
2013-07-23 22:01:38 +02:00
|
|
|
user_profile.save(update_fields=["tutorial_status"])
|
2013-04-04 22:30:28 +02:00
|
|
|
|
|
|
|
return json_success()
|
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_update_message(request, user_profile):
|
|
|
|
return update_message_backend(request, user_profile)
|
|
|
|
|
2013-05-15 00:22:16 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_fetch_raw_message(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int)):
|
|
|
|
try:
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
except Message.DoesNotExist:
|
|
|
|
return json_error("No such message")
|
|
|
|
|
|
|
|
if message.sender != user_profile:
|
|
|
|
return json_error("Message was not sent by you")
|
|
|
|
|
|
|
|
return json_success({"raw_content": message.content})
|
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
@has_request_variables
|
|
|
|
def update_message_backend(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int),
|
|
|
|
subject=REQ(default=None),
|
2013-09-03 22:07:59 +02:00
|
|
|
propagate_subject=REQ(default=False),
|
2013-05-14 21:18:11 +02:00
|
|
|
content=REQ(default=None)):
|
|
|
|
if subject is None and content is None:
|
|
|
|
return json_error("Nothing to change")
|
2013-09-03 22:07:59 +02:00
|
|
|
do_update_message(user_profile, message_id, subject, propagate_subject, content)
|
2013-05-14 21:18:11 +02:00
|
|
|
return json_success()
|
|
|
|
|
2012-10-03 21:31:44 +02:00
|
|
|
# We do not @require_login for send_message_backend, since it is used
|
|
|
|
# both from the API and the web service. Code calling
|
|
|
|
# send_message_backend should either check the API key or check that
|
|
|
|
# the user is logged in.
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-03-18 22:07:56 +01:00
|
|
|
def send_message_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
message_type_name = REQ('type'),
|
|
|
|
message_to = REQ('to', converter=extract_recipients),
|
|
|
|
forged = REQ(default=False),
|
|
|
|
subject_name = REQ('subject', lambda x: x.strip(), None),
|
2013-08-08 22:50:08 +02:00
|
|
|
message_content = REQ('content'),
|
|
|
|
domain = REQ('domain', default=None)):
|
2013-03-18 22:07:56 +01:00
|
|
|
client = request.client
|
2012-10-22 22:34:56 +02:00
|
|
|
is_super_user = is_super_user_api(request)
|
|
|
|
if forged and not is_super_user:
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
|
2013-08-08 22:50:08 +02:00
|
|
|
realm = None
|
|
|
|
if domain:
|
|
|
|
if not is_super_user:
|
|
|
|
# The email gateway bot needs to be able to send messages in
|
|
|
|
# any realm.
|
|
|
|
return json_error("User not authorized for this query")
|
|
|
|
realm = get_realm(domain)
|
|
|
|
if not realm:
|
|
|
|
return json_error("Unknown domain " + domain)
|
|
|
|
|
2012-11-27 20:44:58 +01:00
|
|
|
if client.name == "zephyr_mirror":
|
2012-10-22 22:34:56 +02:00
|
|
|
# Here's how security works for non-superuser mirroring:
|
|
|
|
#
|
2012-11-08 00:38:21 +01:00
|
|
|
# The message must be (1) a private message (2) that
|
2012-10-22 22:34:56 +02:00
|
|
|
# is both sent and received exclusively by other users in your
|
|
|
|
# realm which (3) must be the MIT realm and (4) you must have
|
|
|
|
# received the message.
|
|
|
|
#
|
|
|
|
# If that's the case, we let it through, but we still have the
|
|
|
|
# security flaw that we're trusting your Hesiod data for users
|
|
|
|
# you report having sent you a message.
|
|
|
|
if "sender" not in request.POST:
|
|
|
|
return json_error("Missing sender")
|
2012-11-08 00:38:21 +01:00
|
|
|
if message_type_name != "private" and not is_super_user:
|
2012-10-22 22:34:56 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2012-11-15 16:54:35 +01:00
|
|
|
(valid_input, mirror_sender) = \
|
|
|
|
create_mirrored_message_users(request, user_profile, message_to)
|
2012-10-22 22:34:56 +02:00
|
|
|
if not valid_input:
|
|
|
|
return json_error("Invalid mirrored message")
|
|
|
|
if user_profile.realm.domain != "mit.edu":
|
|
|
|
return json_error("Invalid mirrored realm")
|
|
|
|
sender = mirror_sender
|
2012-11-14 23:07:09 +01:00
|
|
|
else:
|
|
|
|
sender = user_profile
|
2012-09-07 19:20:04 +02:00
|
|
|
|
2013-03-18 19:17:18 +01:00
|
|
|
ret = check_send_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, forged=forged,
|
|
|
|
forged_timestamp = request.POST.get('time'),
|
2013-08-08 22:50:08 +02:00
|
|
|
forwarder_user_profile=user_profile, realm=realm)
|
2013-08-08 19:37:40 +02:00
|
|
|
return json_success({"id": ret})
|
2013-03-18 18:21:22 +01:00
|
|
|
|
2013-07-30 23:25:00 +02:00
|
|
|
@has_request_variables
|
|
|
|
def render_message_backend(request, user_profile, content=REQ):
|
|
|
|
rendered_content = bugdown.convert(content, user_profile.realm.domain)
|
|
|
|
return json_success({"rendered": rendered_content})
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-10-11 19:31:21 +02:00
|
|
|
def api_get_public_streams(request, user_profile):
|
2013-01-10 20:47:05 +01:00
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_public_streams(request, user_profile):
|
|
|
|
return get_public_streams_backend(request, user_profile)
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
# By default, lists all streams that the user has access to --
|
|
|
|
# i.e. public streams plus invite-only streams that the user is on
|
|
|
|
@has_request_variables
|
|
|
|
def get_streams_backend(request, user_profile,
|
|
|
|
include_public=REQ(converter=json_to_bool, default=True),
|
|
|
|
include_subscribed=REQ(converter=json_to_bool, default=True),
|
|
|
|
include_all_active=REQ(converter=json_to_bool, default=False)):
|
2013-08-22 17:48:21 +02:00
|
|
|
if include_all_active and not is_super_user_api(request):
|
2013-08-22 17:37:02 +02:00
|
|
|
return json_error("User not authorized for this query")
|
2013-01-10 20:47:25 +01:00
|
|
|
|
2013-08-22 17:49:02 +02:00
|
|
|
# Listing public streams are disabled for some users (e.g. a
|
|
|
|
# contractor for CUSTOMER5) and for the mit.edu realm.
|
|
|
|
include_public = include_public and not (user_profile.public_streams_disabled or
|
|
|
|
user_profile.realm.domain == "mit.edu")
|
2013-08-22 17:48:21 +02:00
|
|
|
|
2012-11-08 20:38:17 +01:00
|
|
|
# Only get streams someone is currently subscribed to
|
|
|
|
subs_filter = Subscription.objects.filter(active=True).values('recipient_id')
|
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
2013-08-22 17:37:02 +02:00
|
|
|
|
|
|
|
# Start out with all active streams in the realm
|
2013-08-14 19:38:30 +02:00
|
|
|
query = Stream.objects.filter(id__in = stream_ids, realm=user_profile.realm)
|
2013-08-22 17:37:02 +02:00
|
|
|
|
|
|
|
if not include_all_active:
|
|
|
|
user_subs = Subscription.objects.select_related("recipient").filter(
|
|
|
|
active=True, user_profile=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM)
|
|
|
|
|
|
|
|
if include_subscribed:
|
|
|
|
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
|
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
|
|
|
|
|
|
|
if include_subscribed and include_public:
|
|
|
|
query = query.filter(recipient_check | invite_only_check)
|
|
|
|
elif include_public:
|
|
|
|
query = query.filter(invite_only_check)
|
|
|
|
elif include_subscribed:
|
|
|
|
query = query.filter(recipient_check)
|
|
|
|
else:
|
|
|
|
# We're including nothing, so don't bother hitting the DB.
|
|
|
|
query = []
|
2013-08-14 19:38:30 +02:00
|
|
|
|
|
|
|
streams = sorted({"name": stream.name} for stream in query)
|
2012-10-11 19:31:21 +02:00
|
|
|
return json_success({"streams": streams})
|
|
|
|
|
2013-08-22 17:37:02 +02:00
|
|
|
def get_public_streams_backend(request, user_profile):
|
|
|
|
return get_streams_backend(request, user_profile, include_public=True,
|
|
|
|
include_subscribed=False, include_all_active=False)
|
|
|
|
|
2013-09-10 11:46:18 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_rename_stream(request, user_profile, old_name=REQ, new_name=REQ):
|
|
|
|
if not user_profile.has_perm('administer', user_profile.realm):
|
|
|
|
return json_error("Insufficient permission to rename stream")
|
|
|
|
|
|
|
|
return json_success(do_rename_stream(user_profile.realm, old_name, new_name))
|
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_list_subscriptions(request, user_profile):
|
2013-06-24 21:23:45 +02:00
|
|
|
return list_subscriptions_backend(request, user_profile)
|
|
|
|
|
|
|
|
def list_subscriptions_backend(request, user_profile):
|
2013-06-12 21:15:32 +02:00
|
|
|
return json_success({"subscriptions": gather_subscriptions(user_profile)[0]})
|
2012-10-11 19:31:21 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_list_subscriptions(request, user_profile):
|
2013-06-12 21:15:32 +02:00
|
|
|
all_subs = gather_subscriptions(user_profile)
|
|
|
|
return json_success({"subscriptions": all_subs[0], "unsubscribed": all_subs[1]})
|
2012-09-18 16:30:25 +02:00
|
|
|
|
2013-03-21 20:18:44 +01:00
|
|
|
@transaction.commit_on_success
|
|
|
|
@has_request_variables
|
|
|
|
def update_subscriptions_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
delete=REQ(converter=json_to_list, default=[]),
|
|
|
|
add=REQ(converter=json_to_list, default=[])):
|
2013-03-21 20:18:44 +01:00
|
|
|
if not add and not delete:
|
|
|
|
return json_error('Nothing to do. Specify at least one of "add" or "delete".')
|
|
|
|
|
|
|
|
json_dict = {}
|
|
|
|
for method, items in ((add_subscriptions_backend, add), (remove_subscriptions_backend, delete)):
|
|
|
|
response = method(request, user_profile, streams_raw=items)
|
|
|
|
if response.status_code != 200:
|
|
|
|
transaction.rollback()
|
|
|
|
return response
|
2013-06-18 23:55:55 +02:00
|
|
|
json_dict.update(ujson.loads(response.content))
|
2013-03-21 20:18:44 +01:00
|
|
|
return json_success(json_dict)
|
|
|
|
|
2012-11-16 20:15:03 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-16 20:11:08 +01:00
|
|
|
def json_remove_subscriptions(request, user_profile):
|
|
|
|
return remove_subscriptions_backend(request, user_profile)
|
|
|
|
|
2012-11-15 17:36:20 +01:00
|
|
|
@has_request_variables
|
2012-11-16 20:11:08 +01:00
|
|
|
def remove_subscriptions_backend(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
streams_raw = REQ("subscriptions", json_to_list)):
|
2013-01-30 22:40:00 +01:00
|
|
|
|
2013-08-15 22:47:16 +02:00
|
|
|
streams, _ = list_to_streams(streams_raw, user_profile)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
result = dict(removed=[], not_subscribed=[])
|
2013-06-28 17:16:55 +02:00
|
|
|
(removed, not_subscribed) = bulk_remove_subscriptions([user_profile], streams)
|
|
|
|
for (subscriber, stream) in removed:
|
|
|
|
result["removed"].append(stream.name)
|
|
|
|
for (subscriber, stream) in not_subscribed:
|
|
|
|
result["not_subscribed"].append(stream.name)
|
2012-11-16 20:11:08 +01:00
|
|
|
|
|
|
|
return json_success(result)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2012-11-01 20:11:52 +01:00
|
|
|
@authenticated_api_view
|
2012-11-16 19:40:03 +01:00
|
|
|
def api_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-08 22:43:00 +01:00
|
|
|
def json_add_subscriptions(request, user_profile):
|
2012-10-31 23:04:55 +01:00
|
|
|
return add_subscriptions_backend(request, user_profile)
|
2012-08-30 20:00:04 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
def filter_stream_authorization(user_profile, streams):
|
|
|
|
streams_subscribed = set()
|
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams])
|
|
|
|
subs = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient__in=recipients_map.values(),
|
|
|
|
active=True)
|
|
|
|
|
|
|
|
for sub in subs:
|
|
|
|
streams_subscribed.add(sub.recipient.type_id)
|
|
|
|
|
|
|
|
unauthorized_streams = []
|
|
|
|
for stream in streams:
|
2013-08-22 17:49:02 +02:00
|
|
|
# The user is authorized for his own streams
|
|
|
|
if stream.id in streams_subscribed:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# The user is not authorized for invite_only streams, and if
|
|
|
|
# the user has public streams disabled, nothing is authorized
|
|
|
|
if stream.invite_only or user_profile.public_streams_disabled:
|
2013-08-15 22:29:58 +02:00
|
|
|
unauthorized_streams.append(stream)
|
2013-08-22 17:49:02 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
streams = [stream for stream in streams if
|
|
|
|
stream.id not in set(stream.id for stream in unauthorized_streams)]
|
|
|
|
return streams, unauthorized_streams
|
|
|
|
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2012-11-16 19:43:27 +01:00
|
|
|
def add_subscriptions_backend(request, user_profile,
|
2013-08-15 23:17:00 +02:00
|
|
|
streams_raw = REQ("subscriptions", json_to_list),
|
|
|
|
invite_only = REQ(converter=json_to_bool, default=False),
|
2013-08-27 19:17:08 +02:00
|
|
|
announce = REQ(converter=json_to_bool, default=False),
|
2013-08-15 23:17:00 +02:00
|
|
|
principals = REQ(converter=json_to_list, default=None),
|
|
|
|
authorization_errors_fatal = REQ(converter=json_to_bool, default=True)):
|
2013-01-31 21:06:59 +01:00
|
|
|
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names = []
|
2013-06-24 21:32:56 +02:00
|
|
|
for stream in streams_raw:
|
|
|
|
if not isinstance(stream, dict):
|
|
|
|
return json_error("Malformed request")
|
|
|
|
stream_name = stream["name"].strip()
|
2013-03-20 01:16:41 +01:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2012-10-31 18:36:08 +01:00
|
|
|
return json_error("Stream name (%s) too long." % (stream_name,))
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-31 23:04:55 +01:00
|
|
|
return json_error("Invalid stream name (%s)." % (stream_name,))
|
2012-11-16 22:27:32 +01:00
|
|
|
stream_names.append(stream_name)
|
2012-10-11 21:34:17 +02:00
|
|
|
|
2013-08-15 22:29:58 +02:00
|
|
|
existing_streams, created_streams = \
|
|
|
|
list_to_streams(stream_names, user_profile, autocreate=True, invite_only=invite_only)
|
|
|
|
authorized_streams, unauthorized_streams = \
|
|
|
|
filter_stream_authorization(user_profile, existing_streams)
|
2013-08-15 23:17:00 +02:00
|
|
|
if len(unauthorized_streams) > 0 and authorization_errors_fatal:
|
2013-08-22 17:54:57 +02:00
|
|
|
return json_error("Unable to access stream (%s)." % unauthorized_streams[0].name)
|
2013-08-15 22:29:58 +02:00
|
|
|
# Newly created streams are also authorized for the creator
|
|
|
|
streams = authorized_streams + created_streams
|
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals is not None:
|
2013-08-15 00:50:19 +02:00
|
|
|
if user_profile.realm.domain == 'mit.edu' and not all(stream.invite_only for stream in streams):
|
|
|
|
return json_error("You can only invite other mit.edu users to invite-only streams.")
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = set(principal_to_user_profile(user_profile, principal) for principal in principals)
|
2013-01-09 22:47:09 +01:00
|
|
|
else:
|
2013-01-31 21:06:59 +01:00
|
|
|
subscribers = [user_profile]
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
(subscribed, already_subscribed) = bulk_add_subscriptions(streams, subscribers)
|
2013-01-23 20:39:02 +01:00
|
|
|
|
2013-01-30 22:40:00 +01:00
|
|
|
result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list))
|
2013-06-25 19:26:58 +02:00
|
|
|
for (subscriber, stream) in subscribed:
|
|
|
|
result["subscribed"][subscriber.email].append(stream.name)
|
|
|
|
for (subscriber, stream) in already_subscribed:
|
|
|
|
result["already_subscribed"][subscriber.email].append(stream.name)
|
2013-01-31 21:12:53 +01:00
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
private_streams = dict((stream.name, stream.invite_only) for stream in streams)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2013-08-27 19:17:08 +02:00
|
|
|
# Inform the user if someone else subscribed them to stuff,
|
|
|
|
# or if a new stream was created with the "announce" option.
|
|
|
|
notifications = []
|
2013-01-31 21:06:59 +01:00
|
|
|
if principals and result["subscribed"]:
|
|
|
|
for email, subscriptions in result["subscribed"].iteritems():
|
2013-03-28 20:43:34 +01:00
|
|
|
if email == user_profile.email:
|
2013-08-06 21:32:15 +02:00
|
|
|
# Don't send a Zulip if you invited yourself.
|
2013-01-31 21:06:59 +01:00
|
|
|
continue
|
2013-01-09 22:47:09 +01:00
|
|
|
|
2013-01-31 21:06:59 +01:00
|
|
|
if len(subscriptions) == 1:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
2013-01-31 21:12:53 +01:00
|
|
|
"subscribed you to the%s stream '%s'"
|
|
|
|
% (user_profile.full_name,
|
|
|
|
" **invite-only**" if private_streams[subscriptions[0]] else "",
|
|
|
|
subscriptions[0]))
|
2013-01-31 21:06:59 +01:00
|
|
|
else:
|
|
|
|
msg = ("Hi there! We thought you'd like to know that %s just "
|
|
|
|
"subscribed you to the following streams: \n\n"
|
|
|
|
% (user_profile.full_name,))
|
|
|
|
for stream in subscriptions:
|
2013-01-31 21:12:53 +01:00
|
|
|
msg += "* %s%s\n" % (
|
|
|
|
stream,
|
|
|
|
" (**invite-only**)" if private_streams[stream] else "")
|
2013-08-28 00:24:25 +02:00
|
|
|
|
|
|
|
if len([s for s in subscriptions if not private_streams[s]]) > 0:
|
|
|
|
msg += "\nYou can see historical content on a non-invite-only stream by narrowing to it."
|
2013-07-24 20:23:35 +02:00
|
|
|
notifications.append(internal_prep_message("notification-bot@zulip.com",
|
2013-05-10 22:56:22 +02:00
|
|
|
"private", email, "", msg))
|
2013-08-27 19:17:08 +02:00
|
|
|
|
|
|
|
if announce and len(created_streams) > 0:
|
|
|
|
for realm_user in get_active_user_profiles_by_realm(user_profile.realm):
|
|
|
|
# Don't announce to yourself or to people you explicitly added
|
|
|
|
# (who will get the notification above instead).
|
|
|
|
if realm_user.email in principals or realm_user.email == user_profile.email:
|
|
|
|
continue
|
|
|
|
msg = ("Hi there! %s just created a new stream '%s'. "
|
|
|
|
"To join, click the gear in the left-side streams list."
|
|
|
|
% (user_profile.full_name, created_streams[0].name))
|
|
|
|
notifications.append(internal_prep_message("notification-bot@zulip.com",
|
|
|
|
"private",
|
|
|
|
realm_user.email, "", msg))
|
|
|
|
|
|
|
|
if len(notifications) > 0:
|
2013-05-10 22:56:22 +02:00
|
|
|
do_send_messages(notifications)
|
2013-01-31 21:06:59 +01:00
|
|
|
|
|
|
|
result["subscribed"] = dict(result["subscribed"])
|
|
|
|
result["already_subscribed"] = dict(result["already_subscribed"])
|
2013-08-15 23:17:00 +02:00
|
|
|
if not authorization_errors_fatal:
|
|
|
|
result["unauthorized"] = [stream.name for stream in unauthorized_streams]
|
2012-11-16 22:27:32 +01:00
|
|
|
return json_success(result)
|
2012-09-05 23:38:20 +02:00
|
|
|
|
2013-02-11 17:20:16 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_members(request, user_profile):
|
|
|
|
return get_members_backend(request, user_profile)
|
|
|
|
|
|
|
|
def get_members_backend(request, user_profile):
|
2013-06-24 20:34:45 +02:00
|
|
|
members = [{"full_name": profile.full_name,
|
|
|
|
"email": profile.email} for profile in \
|
2013-02-11 17:20:16 +01:00
|
|
|
UserProfile.objects.select_related().filter(realm=user_profile.realm)]
|
|
|
|
return json_success({'members': members})
|
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_subscribers(request, user_profile):
|
|
|
|
return get_subscribers_backend(request, user_profile)
|
|
|
|
|
2013-03-14 22:12:25 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_upload_file(request, user_profile):
|
|
|
|
if len(request.FILES) == 0:
|
|
|
|
return json_error("You must specify a file to upload")
|
|
|
|
if len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
|
|
|
user_file = request.FILES.values()[0]
|
2013-09-16 20:59:54 +02:00
|
|
|
uri = upload_message_image_through_web_client(request, user_file, user_profile)
|
2013-06-18 20:47:37 +02:00
|
|
|
return json_success({'uri': uri})
|
2013-03-14 22:12:25 +01:00
|
|
|
|
2013-01-03 00:41:46 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def get_subscribers_backend(request, user_profile, stream_name=REQ('stream')):
|
2013-09-06 23:25:43 +02:00
|
|
|
subscribers = get_subscribers(stream_name, user_profile.realm, user_profile)
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2013-09-06 23:25:43 +02:00
|
|
|
return json_success({'subscribers': [subscriber.email
|
|
|
|
for subscriber in subscribers]})
|
2013-01-03 00:41:46 +01:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_change_settings(request, user_profile, full_name=REQ,
|
|
|
|
old_password=REQ, new_password=REQ,
|
|
|
|
confirm_password=REQ,
|
2012-11-23 21:23:41 +01:00
|
|
|
# enable_desktop_notification needs to default to False
|
|
|
|
# because browsers POST nothing for an unchecked checkbox
|
2013-05-08 20:16:16 +02:00
|
|
|
enable_desktop_notifications=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False),
|
2013-05-11 00:15:57 +02:00
|
|
|
enable_sounds=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False),
|
2013-05-08 20:16:16 +02:00
|
|
|
enable_offline_email_notifications=REQ(converter=lambda x: x == "on",
|
|
|
|
default=False)):
|
2012-12-21 04:17:49 +01:00
|
|
|
if new_password != "" or confirm_password != "":
|
2012-09-21 19:32:01 +02:00
|
|
|
if new_password != confirm_password:
|
|
|
|
return json_error("New password must match confirmation password!")
|
2013-03-28 20:43:34 +01:00
|
|
|
if not authenticate(username=user_profile.email, password=old_password):
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_error("Wrong password!")
|
2013-03-08 19:58:18 +01:00
|
|
|
do_change_password(user_profile, new_password)
|
2012-09-21 19:32:01 +02:00
|
|
|
|
|
|
|
result = {}
|
2012-12-05 20:56:31 +01:00
|
|
|
if user_profile.full_name != full_name and full_name.strip() != "":
|
2013-08-05 16:37:48 +02:00
|
|
|
if user_profile.realm.domain == "users.customer4.invalid":
|
|
|
|
# At the request of the facilitators, CUSTOMER4
|
|
|
|
# students can't change their names. Failingly silently is
|
|
|
|
# fine -- they can't do it through the UI, so they'd have
|
|
|
|
# to be trying to break the rules.
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
new_full_name = full_name.strip()
|
|
|
|
if len(new_full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
return json_error("Name too long!")
|
|
|
|
do_change_full_name(user_profile, new_full_name)
|
|
|
|
result['full_name'] = new_full_name
|
2012-10-02 22:20:07 +02:00
|
|
|
|
2012-11-23 21:23:41 +01:00
|
|
|
if user_profile.enable_desktop_notifications != enable_desktop_notifications:
|
|
|
|
do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications)
|
|
|
|
result['enable_desktop_notifications'] = enable_desktop_notifications
|
|
|
|
|
2013-05-03 21:49:01 +02:00
|
|
|
if user_profile.enable_sounds != enable_sounds:
|
|
|
|
do_change_enable_sounds(user_profile, enable_sounds)
|
|
|
|
result['enable_sounds'] = enable_sounds
|
|
|
|
|
2013-05-07 23:19:52 +02:00
|
|
|
if user_profile.enable_offline_email_notifications != enable_offline_email_notifications:
|
|
|
|
do_change_enable_offline_email_notifications(user_profile, enable_offline_email_notifications)
|
|
|
|
result['enable_offline_email_notifications'] = enable_offline_email_notifications
|
|
|
|
|
2012-09-21 19:32:01 +02:00
|
|
|
return json_success(result)
|
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_stream_exists(request, user_profile, stream=REQ):
|
2013-03-21 20:16:27 +01:00
|
|
|
return stream_exists_backend(request, user_profile, stream)
|
|
|
|
|
|
|
|
def stream_exists_backend(request, user_profile, stream_name):
|
|
|
|
if not valid_stream_name(stream_name):
|
2012-10-10 23:00:50 +02:00
|
|
|
return json_error("Invalid characters in stream name")
|
2013-03-21 20:16:27 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-11-15 19:13:33 +01:00
|
|
|
result = {"exists": bool(stream)}
|
|
|
|
if stream is not None:
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-11-15 19:13:33 +01:00
|
|
|
result["subscribed"] = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient,
|
|
|
|
active=True).exists()
|
2013-03-21 19:58:30 +01:00
|
|
|
return json_success(result) # results are ignored for HEAD requests
|
|
|
|
return json_response(data=result, status=404)
|
2012-10-17 20:43:52 +02:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
def get_subscription_or_die(stream_name, user_profile):
|
2012-12-03 00:19:00 +01:00
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
2012-12-03 02:02:38 +01:00
|
|
|
if not stream:
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Invalid stream %s" % (stream.name,))
|
2013-03-18 16:54:58 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2012-12-03 00:19:00 +01:00
|
|
|
subscription = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
recipient=recipient, active=True)
|
2013-01-18 18:25:36 +01:00
|
|
|
|
2012-12-03 00:19:00 +01:00
|
|
|
if not subscription.exists():
|
2013-01-18 18:20:58 +01:00
|
|
|
raise JsonableError("Not subscribed to stream %s" % (stream_name,))
|
2012-12-03 00:19:00 +01:00
|
|
|
|
2013-01-18 18:25:36 +01:00
|
|
|
return subscription
|
|
|
|
|
2012-12-02 22:58:00 +01:00
|
|
|
@authenticated_json_view
|
2013-04-08 19:34:04 +02:00
|
|
|
@has_request_variables
|
|
|
|
def json_subscription_property(request, user_profile, stream_name=REQ,
|
|
|
|
property=REQ):
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
|
|
|
This is the entry point to accessing or changing subscription
|
2013-04-08 19:34:04 +02:00
|
|
|
properties.
|
2012-12-02 22:58:00 +01:00
|
|
|
"""
|
2013-04-08 19:34:04 +02:00
|
|
|
property_converters = dict(color=lambda x: x,
|
2013-04-09 02:14:13 +02:00
|
|
|
in_home_view=json_to_bool,
|
|
|
|
notifications=json_to_bool)
|
2013-04-08 19:34:04 +02:00
|
|
|
if property not in property_converters:
|
|
|
|
return json_error("Unknown subscription property: %s" % (property,))
|
|
|
|
|
|
|
|
sub = get_subscription_or_die(stream_name, user_profile)[0]
|
2012-12-02 22:58:00 +01:00
|
|
|
if request.method == "GET":
|
2013-04-08 19:34:04 +02:00
|
|
|
return json_success({'stream_name': stream_name,
|
|
|
|
'value': getattr(sub, property)})
|
2012-12-02 22:58:00 +01:00
|
|
|
elif request.method == "POST":
|
2013-04-08 19:34:04 +02:00
|
|
|
@has_request_variables
|
|
|
|
def do_set_property(request,
|
2013-05-08 20:16:16 +02:00
|
|
|
value=REQ(converter=property_converters[property])):
|
2013-07-16 21:56:20 +02:00
|
|
|
do_change_subscription_property(user_profile, sub, stream_name,
|
|
|
|
property, value)
|
2013-04-08 19:34:04 +02:00
|
|
|
do_set_property(request)
|
|
|
|
return json_success()
|
2012-12-02 22:58:00 +01:00
|
|
|
else:
|
|
|
|
return json_error("Invalid verb")
|
2012-12-01 04:37:18 +01:00
|
|
|
|
2012-10-17 22:36:49 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def api_fetch_api_key(request, username=REQ, password=REQ):
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = authenticate(username=username, password=password)
|
|
|
|
if user_profile is None:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your username or password is incorrect.", status=403)
|
2013-03-29 17:39:53 +01:00
|
|
|
if not user_profile.is_active:
|
2012-10-30 19:38:22 +01:00
|
|
|
return json_error("Your account has been disabled.", status=403)
|
2013-03-29 17:39:53 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-10-17 22:26:59 +02:00
|
|
|
|
2012-12-02 20:51:51 +01:00
|
|
|
@authenticated_json_post_view
|
2012-11-02 19:57:17 +01:00
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_fetch_api_key(request, user_profile, password=REQ):
|
2013-03-29 17:39:53 +01:00
|
|
|
if not user_profile.check_password(password):
|
2012-10-17 22:26:59 +02:00
|
|
|
return json_error("Your username or password is incorrect.")
|
2012-11-08 22:43:00 +01:00
|
|
|
return json_success({"api_key": user_profile.api_key})
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
class ActivityTable(object):
|
2012-12-05 17:57:12 +01:00
|
|
|
def __init__(self, client_name, queries, default_tab=False):
|
|
|
|
self.default_tab = default_tab
|
2012-12-04 22:14:44 +01:00
|
|
|
self.has_pointer = False
|
|
|
|
self.rows = {}
|
2013-04-02 17:15:09 +02:00
|
|
|
|
|
|
|
def do_url(query_name, url):
|
2012-12-04 22:14:44 +01:00
|
|
|
for record in UserActivity.objects.filter(
|
|
|
|
query=url,
|
2013-03-28 16:11:37 +01:00
|
|
|
client__name__startswith=client_name).select_related():
|
2013-04-02 17:15:09 +02:00
|
|
|
row = self.rows.setdefault(record.user_profile.email,
|
|
|
|
{'realm': record.user_profile.realm.domain,
|
|
|
|
'full_name': record.user_profile.full_name,
|
|
|
|
'email': record.user_profile.email})
|
2012-12-04 22:14:44 +01:00
|
|
|
row[query_name + '_count'] = record.count
|
|
|
|
row[query_name + '_last' ] = record.last_visit
|
|
|
|
|
2013-04-02 17:17:00 +02:00
|
|
|
for query_name, urls in queries:
|
2013-04-02 17:15:09 +02:00
|
|
|
if 'pointer' in query_name:
|
|
|
|
self.has_pointer = True
|
2013-04-02 17:17:00 +02:00
|
|
|
for url in urls:
|
|
|
|
do_url(query_name, url)
|
2013-04-02 17:15:09 +02:00
|
|
|
|
2012-12-04 23:54:14 +01:00
|
|
|
for row in self.rows.values():
|
|
|
|
# kind of a hack
|
|
|
|
last_action = max(v for v in row.values() if isinstance(v, datetime.datetime))
|
|
|
|
age = now() - last_action
|
|
|
|
if age < datetime.timedelta(minutes=10):
|
|
|
|
row['class'] = 'recently_active'
|
|
|
|
elif age >= datetime.timedelta(days=1):
|
|
|
|
row['class'] = 'long_inactive'
|
2012-12-05 00:05:10 +01:00
|
|
|
row['age'] = age
|
|
|
|
|
|
|
|
def sorted_rows(self):
|
|
|
|
return sorted(self.rows.iteritems(), key=lambda (k,r): r['age'])
|
2012-12-04 23:54:14 +01:00
|
|
|
|
2012-12-05 19:32:09 +01:00
|
|
|
def can_view_activity(request):
|
2013-07-24 20:56:42 +02:00
|
|
|
return request.user.realm.domain == 'zulip.com'
|
2012-12-05 19:32:09 +01:00
|
|
|
|
2012-11-09 00:09:58 +01:00
|
|
|
@login_required(login_url = settings.HOME_NOT_LOGGED_IN)
|
|
|
|
def get_activity(request):
|
2012-12-05 19:32:09 +01:00
|
|
|
if not can_view_activity(request):
|
2013-07-29 23:03:31 +02:00
|
|
|
return HttpResponseRedirect(reverse('zerver.views.login_page'))
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
web_queries = (
|
2013-04-02 18:42:23 +02:00
|
|
|
("get_updates", ["/json/get_updates", "/json/get_events"]),
|
2013-04-02 17:17:00 +02:00
|
|
|
("send_message", ["/json/send_message"]),
|
|
|
|
("update_pointer", ["/json/update_pointer"]),
|
2012-12-04 22:14:44 +01:00
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2012-12-04 22:14:44 +01:00
|
|
|
api_queries = (
|
2013-04-02 18:42:23 +02:00
|
|
|
("get_updates", ["/api/v1/get_messages", "/api/v1/messages/latest", "/api/v1/events"]),
|
2013-04-02 17:17:00 +02:00
|
|
|
("send_message", ["/api/v1/send_message"]),
|
2012-12-04 22:14:44 +01:00
|
|
|
)
|
2012-11-09 00:09:58 +01:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
return render_to_response('zerver/activity.html',
|
2012-12-04 22:14:44 +01:00
|
|
|
{ 'data': {
|
2012-12-05 17:57:12 +01:00
|
|
|
'Website': ActivityTable('website', web_queries, default_tab=True),
|
2012-12-04 22:14:44 +01:00
|
|
|
'Mirror': ActivityTable('zephyr_mirror', api_queries),
|
2013-01-22 21:26:06 +01:00
|
|
|
'API': ActivityTable('API', api_queries),
|
|
|
|
'Android': ActivityTable('Android', api_queries),
|
|
|
|
'iPhone': ActivityTable('iPhone', api_queries)
|
2012-12-04 22:14:44 +01:00
|
|
|
}}, context_instance=RequestContext(request))
|
2012-11-19 23:52:36 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
def get_status_list(requesting_user_profile):
|
2013-05-06 17:14:59 +02:00
|
|
|
return {'presences': get_status_dict(requesting_user_profile),
|
|
|
|
'server_timestamp': time.time()}
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2013-02-08 23:44:15 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-09-06 21:52:12 +02:00
|
|
|
def json_update_active_status(request, user_profile, status=REQ,
|
2013-09-10 17:32:40 +02:00
|
|
|
new_user_input=REQ(converter=json_to_bool, default=False)):
|
2013-04-03 22:00:02 +02:00
|
|
|
status_val = UserPresence.status_from_string(status)
|
|
|
|
if status_val is None:
|
|
|
|
raise JsonableError("Invalid presence status: %s" % (status,))
|
|
|
|
else:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_presence(user_profile, request.client, now(), status_val,
|
|
|
|
new_user_input)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret = get_status_list(user_profile)
|
|
|
|
if user_profile.realm.domain == "mit.edu":
|
|
|
|
try:
|
2013-04-30 19:17:38 +02:00
|
|
|
# We renamed /api/v1/get_messages to /api/v1/events
|
|
|
|
try:
|
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="/api/v1/events",
|
|
|
|
client__name="zephyr_mirror")
|
|
|
|
except UserActivity.DoesNotExist:
|
|
|
|
activity = UserActivity.objects.get(user_profile = user_profile,
|
|
|
|
query="/api/v1/get_messages",
|
|
|
|
client__name="zephyr_mirror")
|
|
|
|
|
2013-03-13 20:02:57 +01:00
|
|
|
ret['zephyr_mirror_active'] = \
|
|
|
|
(activity.last_visit.replace(tzinfo=None) >
|
|
|
|
datetime.datetime.utcnow() - datetime.timedelta(minutes=5))
|
|
|
|
except UserActivity.DoesNotExist:
|
|
|
|
ret['zephyr_mirror_active'] = False
|
|
|
|
|
|
|
|
return json_success(ret)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2013-02-11 21:47:45 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_active_statuses(request, user_profile):
|
2013-03-13 20:02:57 +01:00
|
|
|
return json_success(get_status_list(user_profile))
|
2013-03-11 20:54:27 +01:00
|
|
|
|
2013-03-28 18:53:44 +01:00
|
|
|
# Read the source map information for decoding JavaScript backtraces
|
2013-03-28 20:49:08 +01:00
|
|
|
js_source_map = None
|
|
|
|
if not (settings.DEBUG or settings.TEST_SUITE):
|
2013-03-28 20:28:54 +01:00
|
|
|
js_source_map = SourceMap(path.join(
|
2013-07-12 22:01:31 +02:00
|
|
|
settings.DEPLOY_ROOT, 'prod-static/source-map'))
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-03-11 20:54:27 +01:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
2013-05-08 20:16:16 +02:00
|
|
|
def json_report_error(request, user_profile, message=REQ, stacktrace=REQ,
|
|
|
|
ui_message=REQ(converter=json_to_bool), user_agent=REQ,
|
|
|
|
href=REQ,
|
|
|
|
more_info=REQ(converter=json_to_dict, default=None)):
|
2013-03-28 20:43:34 +01:00
|
|
|
subject = "error for %s" % (user_profile.email,)
|
2013-03-27 18:31:18 +01:00
|
|
|
if ui_message:
|
|
|
|
subject = "User-visible browser " + subject
|
|
|
|
else:
|
|
|
|
subject = "Browser " + subject
|
|
|
|
|
2013-03-28 20:49:08 +01:00
|
|
|
if js_source_map:
|
2013-03-28 20:28:54 +01:00
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
2013-03-28 18:53:44 +01:00
|
|
|
|
2013-05-01 21:49:21 +02:00
|
|
|
body = ("Message:\n%s\n\nStacktrace:\n%s\n\nUser agent: %s\nhref: %s\n"
|
2013-07-18 17:22:24 +02:00
|
|
|
"User saw error in UI: %s\n"
|
2013-05-01 21:49:21 +02:00
|
|
|
% (message, stacktrace, user_agent, href, ui_message))
|
2013-04-04 21:29:14 +02:00
|
|
|
|
2013-07-18 17:22:24 +02:00
|
|
|
body += "Server path: %s\n" % (settings.DEPLOY_ROOT,)
|
|
|
|
try:
|
|
|
|
body += "Deployed version: %s" % (
|
|
|
|
subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"]),)
|
|
|
|
except Exception:
|
|
|
|
body += "Could not determine current git commit ID.\n"
|
|
|
|
|
2013-04-04 21:29:14 +02:00
|
|
|
if more_info is not None:
|
2013-07-18 17:22:24 +02:00
|
|
|
body += "\nAdditional information:"
|
2013-04-04 21:29:14 +02:00
|
|
|
for (key, value) in more_info.iteritems():
|
|
|
|
body += "\n %s: %s" % (key, value)
|
|
|
|
|
|
|
|
mail_admins(subject, body)
|
2013-03-11 20:54:27 +01:00
|
|
|
return json_success()
|
2013-03-14 23:21:53 +01:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_events_register(request, user_profile):
|
|
|
|
return events_register_backend(request, user_profile)
|
|
|
|
|
2013-04-02 22:42:51 +02:00
|
|
|
# Does not need to be authenticated because it's called from rest_dispatch
|
2013-03-14 23:21:53 +01:00
|
|
|
@has_request_variables
|
|
|
|
def api_events_register(request, user_profile,
|
2013-05-08 20:16:16 +02:00
|
|
|
apply_markdown=REQ(default=False, converter=json_to_bool)):
|
2013-03-14 23:21:53 +01:00
|
|
|
return events_register_backend(request, user_profile,
|
|
|
|
apply_markdown=apply_markdown)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def events_register_backend(request, user_profile, apply_markdown=True,
|
2013-08-05 22:09:12 +02:00
|
|
|
event_types=REQ(converter=json_to_list, default=None),
|
|
|
|
queue_lifespan_secs=REQ(converter=int, default=0)):
|
2013-05-07 17:25:25 +02:00
|
|
|
ret = do_events_register(user_profile, request.client, apply_markdown,
|
2013-08-05 22:09:12 +02:00
|
|
|
event_types, queue_lifespan_secs)
|
2013-03-21 22:43:53 +01:00
|
|
|
return json_success(ret)
|
2013-04-24 17:18:49 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_messages_in_narrow(request, user_profile):
|
|
|
|
return messages_in_narrow_backend(request, user_profile)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def messages_in_narrow_backend(request, user_profile, msg_ids = REQ(converter=json_to_list),
|
|
|
|
narrow = REQ(converter=narrow_parameter)):
|
|
|
|
# Note that this function will only work on messages the user
|
|
|
|
# actually received
|
|
|
|
|
|
|
|
query = UserMessage.objects.select_related("message") \
|
|
|
|
.filter(user_profile=user_profile, message__id__in=msg_ids)
|
|
|
|
build = NarrowBuilder(user_profile, "message__")
|
|
|
|
for operator, operand in narrow:
|
|
|
|
query = build(query, operator, operand)
|
|
|
|
|
2013-05-01 22:00:07 +02:00
|
|
|
return json_success({"messages": dict((msg.message.id,
|
|
|
|
{'match_subject': msg.match_subject,
|
|
|
|
'match_content': msg.match_content})
|
|
|
|
for msg in query.iterator())})
|
2013-05-03 00:26:53 +02:00
|
|
|
|
2013-07-08 23:34:43 +02:00
|
|
|
def deactivate_user_backend(request, user_profile, email):
|
2013-07-03 21:54:10 +02:00
|
|
|
try:
|
2013-07-08 23:34:43 +02:00
|
|
|
target = get_user_profile_by_email(email)
|
2013-07-03 21:54:10 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('No such user')
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(target):
|
2013-07-08 23:34:43 +02:00
|
|
|
return json_error('Insufficient permission')
|
2013-07-03 21:54:10 +02:00
|
|
|
|
2013-07-09 00:25:49 +02:00
|
|
|
do_deactivate(target)
|
2013-07-03 21:54:10 +02:00
|
|
|
return json_success({})
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
@has_request_variables
|
2013-08-01 19:35:39 +02:00
|
|
|
def patch_bot_backend(request, user_profile, email, full_name=REQ):
|
2013-07-16 22:25:34 +02:00
|
|
|
# TODO:
|
|
|
|
# 1) Validate data
|
|
|
|
# 2) Support avatar changes
|
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
2013-07-23 15:57:32 +02:00
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-23 15:57:32 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
do_change_full_name(bot, full_name)
|
2013-07-16 22:25:34 +02:00
|
|
|
|
2013-07-29 16:27:18 +02:00
|
|
|
bot_avatar_url = None
|
|
|
|
|
|
|
|
if len(request.FILES) == 0:
|
|
|
|
pass
|
|
|
|
elif len(request.FILES) == 1:
|
|
|
|
user_file = request.FILES.values()[0]
|
|
|
|
upload_avatar_image(user_file, user_profile, bot.email)
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
bot.avatar_source = avatar_source
|
2013-08-14 22:00:35 +02:00
|
|
|
bot.save(update_fields=["avatar_source"])
|
2013-07-29 16:27:18 +02:00
|
|
|
bot_avatar_url = avatar_url(bot)
|
|
|
|
else:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
|
2013-07-16 22:25:34 +02:00
|
|
|
json_result = dict(
|
|
|
|
full_name = full_name,
|
2013-07-29 16:27:18 +02:00
|
|
|
avatar_url = bot_avatar_url
|
2013-07-16 22:25:34 +02:00
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-08-08 16:49:32 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_api_key(request, user_profile):
|
|
|
|
user_profile.api_key = random_api_key()
|
2013-08-13 22:43:33 +02:00
|
|
|
user_profile.save(update_fields=["api_key"])
|
2013-08-08 16:49:32 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = user_profile.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-07-19 17:45:06 +02:00
|
|
|
@has_request_variables
|
|
|
|
def regenerate_bot_api_key(request, user_profile, email):
|
|
|
|
try:
|
|
|
|
bot = get_user_profile_by_email(email)
|
|
|
|
except:
|
|
|
|
return json_error('No such user')
|
|
|
|
|
2013-09-05 20:51:38 +02:00
|
|
|
if not user_profile.can_admin_user(bot):
|
2013-07-19 17:45:06 +02:00
|
|
|
return json_error('Insufficient permission')
|
|
|
|
|
|
|
|
bot.api_key = random_api_key()
|
2013-08-14 22:00:35 +02:00
|
|
|
bot.save(update_fields=["api_key"])
|
2013-07-19 17:45:06 +02:00
|
|
|
json_result = dict(
|
|
|
|
api_key = bot.api_key
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
|
|
|
|
2013-05-03 00:26:53 +02:00
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_create_bot(request, user_profile, full_name=REQ, short_name=REQ):
|
|
|
|
short_name += "-bot"
|
|
|
|
email = short_name + "@" + user_profile.realm.domain
|
|
|
|
form = CreateBotForm({'full_name': full_name, 'email': email})
|
|
|
|
if not form.is_valid():
|
|
|
|
# We validate client-side as well
|
|
|
|
return json_error('Bad name or username')
|
|
|
|
|
|
|
|
try:
|
|
|
|
get_user_profile_by_email(email)
|
|
|
|
return json_error("Username already in use")
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
elif len(request.FILES) != 1:
|
|
|
|
return json_error("You may only upload one file at a time")
|
|
|
|
else:
|
|
|
|
user_file = request.FILES.values()[0]
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_avatar_image(user_file, user_profile, email)
|
2013-06-14 20:03:11 +02:00
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
|
2013-05-03 00:26:53 +02:00
|
|
|
bot_profile = do_create_user(email, '', user_profile.realm, full_name,
|
2013-06-14 20:03:11 +02:00
|
|
|
short_name, True, True,
|
|
|
|
user_profile, avatar_source)
|
|
|
|
json_result = dict(
|
|
|
|
api_key=bot_profile.api_key,
|
|
|
|
avatar_url=avatar_url(bot_profile)
|
|
|
|
)
|
|
|
|
return json_success(json_result)
|
2013-05-03 00:26:53 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_get_bots(request, user_profile):
|
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
2013-06-27 23:05:36 +02:00
|
|
|
bot_profiles = bot_profiles.order_by('date_joined')
|
|
|
|
|
2013-06-14 20:03:11 +02:00
|
|
|
def bot_info(bot_profile):
|
|
|
|
return dict(
|
|
|
|
username = bot_profile.email,
|
|
|
|
full_name = bot_profile.full_name,
|
|
|
|
api_key = bot_profile.api_key,
|
|
|
|
avatar_url = avatar_url(bot_profile)
|
|
|
|
)
|
|
|
|
|
|
|
|
return json_success({'bots': map(bot_info, bot_profiles)})
|
2013-07-26 16:51:02 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_refer_friend(request, user_profile, email=REQ):
|
2013-08-12 18:41:54 +02:00
|
|
|
if not email:
|
|
|
|
return json_error("No email address specified")
|
2013-07-26 16:51:02 +02:00
|
|
|
if user_profile.invites_granted - user_profile.invites_used <= 0:
|
|
|
|
return json_error("Insufficient invites")
|
|
|
|
|
|
|
|
do_refer_friend(user_profile, email);
|
|
|
|
|
|
|
|
return json_success()
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def list_alert_words(request, user_profile):
|
|
|
|
return json_success({'alert_words': user_alert_words(user_profile)})
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def set_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def add_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_add_alert_words(user_profile, alert_words)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def remove_alert_words(request, user_profile,
|
|
|
|
alert_words=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_remove_alert_words(user_profile, alert_words)
|
2013-09-04 21:02:11 +02:00
|
|
|
return json_success()
|
2013-09-10 00:06:24 +02:00
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_set_muted_topics(request, user_profile,
|
|
|
|
muted_topics=REQ(converter=json_to_list, default=[])):
|
|
|
|
do_set_muted_topics(user_profile, muted_topics)
|
|
|
|
return json_success()
|