2013-12-12 18:36:32 +01:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-12-12 18:36:32 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.core import validators
|
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.db import connection
|
|
|
|
from django.db.models import Q
|
2016-06-06 00:32:39 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
|
|
|
from six import text_type
|
|
|
|
from typing import Optional
|
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
from zerver.decorator import authenticated_api_view, authenticated_json_post_view, \
|
2014-02-14 15:48:42 +01:00
|
|
|
has_request_variables, REQ, JsonableError, \
|
2013-12-12 18:36:32 +01:00
|
|
|
to_non_negative_int, to_non_negative_float
|
2013-11-26 00:41:24 +01:00
|
|
|
from django.utils.html import escape as escape_html
|
2013-12-12 18:36:32 +01:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
|
|
from zerver.lib import bugdown
|
|
|
|
from zerver.lib.actions import recipient_for_emails, do_update_message_flags, \
|
|
|
|
compute_mit_user_fullname, compute_irc_user_fullname, compute_jabber_user_fullname, \
|
|
|
|
create_mirror_user_if_needed, check_send_message, do_update_message, \
|
|
|
|
extract_recipients
|
|
|
|
from zerver.lib.cache import generic_bulk_cached_fetch
|
|
|
|
from zerver.lib.query import last_n
|
|
|
|
from zerver.lib.response import json_success, json_error
|
|
|
|
from zerver.lib.utils import statsd
|
2014-02-14 15:48:42 +01:00
|
|
|
from zerver.lib.validator import \
|
|
|
|
check_list, check_int, check_dict, check_string, check_bool
|
2014-02-25 22:22:35 +01:00
|
|
|
from zerver.models import Message, UserProfile, Stream, Subscription, \
|
2013-12-12 18:36:32 +01:00
|
|
|
Recipient, UserMessage, bulk_get_recipients, get_recipient, \
|
|
|
|
get_user_profile_by_email, get_stream, valid_stream_name, \
|
|
|
|
parse_usermessage_flags, to_dict_cache_key_id, extract_message_dict, \
|
2015-09-20 19:50:06 +02:00
|
|
|
stringify_message_dict, \
|
2014-02-12 22:09:34 +01:00
|
|
|
resolve_email_to_domain, get_realm, get_active_streams, \
|
|
|
|
bulk_get_streams
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
import sqlalchemy
|
|
|
|
from sqlalchemy import func
|
|
|
|
from sqlalchemy.sql import select, join, column, literal_column, literal, and_, \
|
2014-02-12 22:09:34 +01:00
|
|
|
or_, not_, union_all, alias
|
2013-12-10 23:32:29 +01:00
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
import re
|
|
|
|
import ujson
|
|
|
|
|
|
|
|
from zerver.lib.rest import rest_dispatch as _rest_dispatch
|
2015-11-01 17:14:53 +01:00
|
|
|
from six.moves import map
|
2015-11-01 17:15:17 +01:00
|
|
|
import six
|
2013-12-12 18:36:32 +01:00
|
|
|
rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs)))
|
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
# This is a Pool that doesn't close connections. Therefore it can be used with
|
|
|
|
# existing Django database connections.
|
|
|
|
class NonClosingPool(sqlalchemy.pool.NullPool):
|
|
|
|
def status(self):
|
|
|
|
return "NonClosingPool"
|
|
|
|
|
|
|
|
def _do_return_conn(self, conn):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def recreate(self):
|
2016-01-26 01:54:56 +01:00
|
|
|
return self.__class__(creator=self._creator, # type: ignore # __class__
|
2013-12-10 23:32:29 +01:00
|
|
|
recycle=self._recycle,
|
|
|
|
use_threadlocal=self._use_threadlocal,
|
|
|
|
reset_on_return=self._reset_on_return,
|
|
|
|
echo=self.echo,
|
|
|
|
logging_name=self._orig_logging_name,
|
|
|
|
_dispatch=self.dispatch)
|
|
|
|
|
|
|
|
sqlalchemy_engine = None
|
|
|
|
def get_sqlalchemy_connection():
|
|
|
|
global sqlalchemy_engine
|
|
|
|
if sqlalchemy_engine is None:
|
|
|
|
def get_dj_conn():
|
|
|
|
connection.ensure_connection()
|
|
|
|
return connection.connection
|
|
|
|
sqlalchemy_engine = sqlalchemy.create_engine('postgresql://',
|
|
|
|
creator=get_dj_conn,
|
|
|
|
poolclass=NonClosingPool,
|
|
|
|
pool_reset_on_return=False)
|
|
|
|
sa_connection = sqlalchemy_engine.connect()
|
|
|
|
sa_connection.execution_options(autocommit=False)
|
|
|
|
return sa_connection
|
|
|
|
|
2016-04-21 21:47:01 +02:00
|
|
|
class BadNarrowOperator(JsonableError):
|
|
|
|
def __init__(self, desc, status_code=400):
|
2013-12-12 18:36:32 +01:00
|
|
|
self.desc = desc
|
2016-04-21 21:47:01 +02:00
|
|
|
self.status_code = status_code
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
def to_json_error_msg(self):
|
2016-05-25 15:02:02 +02:00
|
|
|
return _('Invalid narrow operator: {}').format(self.desc)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
# When you add a new operator to this, also update zerver/lib/narrow.py
|
|
|
|
class NarrowBuilder(object):
|
2013-12-10 23:32:29 +01:00
|
|
|
def __init__(self, user_profile, msg_id_column):
|
2013-12-12 18:36:32 +01:00
|
|
|
self.user_profile = user_profile
|
2013-12-10 23:32:29 +01:00
|
|
|
self.msg_id_column = msg_id_column
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-27 20:09:59 +01:00
|
|
|
def add_term(self, query, term):
|
2013-12-12 18:36:32 +01:00
|
|
|
# We have to be careful here because we're letting users call a method
|
|
|
|
# by name! The prefix 'by_' prevents it from colliding with builtin
|
|
|
|
# Python __magic__ stuff.
|
2014-02-10 21:45:53 +01:00
|
|
|
operator = term['operator']
|
|
|
|
operand = term['operand']
|
2014-02-11 21:36:59 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
negated = term.get('negated', False)
|
2014-02-11 21:36:59 +01:00
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
method_name = 'by_' + operator.replace('-', '_')
|
|
|
|
method = getattr(self, method_name, None)
|
|
|
|
if method is None:
|
|
|
|
raise BadNarrowOperator('unknown operator ' + operator)
|
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
if negated:
|
|
|
|
maybe_negate = not_
|
|
|
|
else:
|
|
|
|
maybe_negate = lambda cond: cond
|
|
|
|
|
|
|
|
return method(query, operand, maybe_negate)
|
|
|
|
|
2014-03-05 18:41:01 +01:00
|
|
|
def by_has(self, query, operand, maybe_negate):
|
|
|
|
if operand not in ['attachment', 'image', 'link']:
|
|
|
|
raise BadNarrowOperator("unknown 'has' operand " + operand)
|
|
|
|
col_name = 'has_' + operand
|
|
|
|
cond = column(col_name)
|
|
|
|
return query.where(maybe_negate(cond))
|
|
|
|
|
2014-02-27 23:57:16 +01:00
|
|
|
def by_in(self, query, operand, maybe_negate):
|
|
|
|
if operand == 'home':
|
|
|
|
conditions = exclude_muting_conditions(self.user_profile, [])
|
|
|
|
return query.where(and_(*conditions))
|
|
|
|
elif operand == 'all':
|
|
|
|
return query
|
|
|
|
|
|
|
|
raise BadNarrowOperator("unknown 'in' operand " + operand)
|
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_is(self, query, operand, maybe_negate):
|
2013-12-12 18:36:32 +01:00
|
|
|
if operand == 'private':
|
2013-12-10 23:32:29 +01:00
|
|
|
query = query.select_from(join(query.froms[0], "zerver_recipient",
|
|
|
|
column("recipient_id") ==
|
|
|
|
literal_column("zerver_recipient.id")))
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = or_(column("type") == Recipient.PERSONAL,
|
|
|
|
column("type") == Recipient.HUDDLE)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
elif operand == 'starred':
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("flags").op("&")(UserMessage.flags.starred.mask) != 0
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-10 23:32:29 +01:00
|
|
|
elif operand == 'mentioned' or operand == 'alerted':
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("flags").op("&")(UserMessage.flags.mentioned.mask) != 0
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
raise BadNarrowOperator("unknown 'is' operand " + operand)
|
|
|
|
|
2014-01-07 22:15:22 +01:00
|
|
|
_alphanum = frozenset(
|
|
|
|
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789')
|
|
|
|
|
|
|
|
def _pg_re_escape(self, pattern):
|
|
|
|
"""
|
|
|
|
Escape user input to place in a regex
|
|
|
|
|
|
|
|
Python's re.escape escapes unicode characters in a way which postgres
|
|
|
|
fails on, u'\u03bb' to u'\\\u03bb'. This function will correctly escape
|
|
|
|
them for postgres, u'\u03bb' to u'\\u03bb'.
|
|
|
|
"""
|
|
|
|
s = list(pattern)
|
|
|
|
for i, c in enumerate(s):
|
|
|
|
if c not in self._alphanum:
|
|
|
|
if c == '\000':
|
|
|
|
s[1] = '\\000'
|
|
|
|
elif ord(c) >= 128:
|
|
|
|
# convert the character to hex postgres regex will take
|
|
|
|
# \uXXXX
|
|
|
|
s[i] = '\\u{:0>4x}'.format(ord(c))
|
|
|
|
else:
|
|
|
|
s[i] = '\\' + c
|
|
|
|
return ''.join(s)
|
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_stream(self, query, operand, maybe_negate):
|
2013-12-12 18:36:32 +01:00
|
|
|
stream = get_stream(operand, self.user_profile.realm)
|
|
|
|
if stream is None:
|
|
|
|
raise BadNarrowOperator('unknown stream ' + operand)
|
|
|
|
|
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to "social" to also show messages to /^(un)*social(.d)*$/
|
|
|
|
# (unsocial, ununsocial, social.d, etc)
|
|
|
|
m = re.search(r'^(?:un)*(.+?)(?:\.d)*$', stream.name, re.IGNORECASE)
|
|
|
|
if m:
|
|
|
|
base_stream_name = m.group(1)
|
|
|
|
else:
|
|
|
|
base_stream_name = stream.name
|
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
matching_streams = get_active_streams(self.user_profile.realm).filter(
|
|
|
|
name__iregex=r'^(un)*%s(\.d)*$' % (self._pg_re_escape(base_stream_name),))
|
2013-12-12 18:36:32 +01:00
|
|
|
matching_stream_ids = [matching_stream.id for matching_stream in matching_streams]
|
2016-01-25 01:27:18 +01:00
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, matching_stream_ids)
|
|
|
|
cond = column("recipient_id").in_([recipient.id for recipient in recipients_map.values()])
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
recipient = get_recipient(Recipient.STREAM, type_id=stream.id)
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("recipient_id") == recipient.id
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_topic(self, query, operand, maybe_negate):
|
2013-12-12 18:36:32 +01:00
|
|
|
if self.user_profile.realm.domain == "mit.edu":
|
|
|
|
# MIT users expect narrowing to topic "foo" to also show messages to /^foo(.d)*$/
|
|
|
|
# (foo, foo.d, foo.d.d, etc)
|
|
|
|
m = re.search(r'^(.*?)(?:\.d)*$', operand, re.IGNORECASE)
|
|
|
|
if m:
|
|
|
|
base_topic = m.group(1)
|
|
|
|
else:
|
|
|
|
base_topic = operand
|
|
|
|
|
|
|
|
# Additionally, MIT users expect the empty instance and
|
|
|
|
# instance "personal" to be the same.
|
|
|
|
if base_topic in ('', 'personal', '(instance "")'):
|
|
|
|
regex = r'^(|personal|\(instance ""\))(\.d)*$'
|
|
|
|
else:
|
2014-01-07 22:15:22 +01:00
|
|
|
regex = r'^%s(\.d)*$' % (self._pg_re_escape(base_topic),)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("subject").op("~*")(regex)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = func.upper(column("subject")) == func.upper(literal(operand))
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_sender(self, query, operand, maybe_negate):
|
2013-12-12 18:36:32 +01:00
|
|
|
try:
|
|
|
|
sender = get_user_profile_by_email(operand)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("sender_id") == literal(sender.id)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_near(self, query, operand, maybe_negate):
|
2013-12-10 23:32:29 +01:00
|
|
|
return query
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_id(self, query, operand, maybe_negate):
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = self.msg_id_column == literal(operand)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_pm_with(self, query, operand, maybe_negate):
|
2013-12-12 18:36:32 +01:00
|
|
|
if ',' in operand:
|
|
|
|
# Huddle
|
|
|
|
try:
|
|
|
|
emails = [e.strip() for e in operand.split(',')]
|
|
|
|
recipient = recipient_for_emails(emails, False,
|
|
|
|
self.user_profile, self.user_profile)
|
|
|
|
except ValidationError:
|
|
|
|
raise BadNarrowOperator('unknown recipient ' + operand)
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("recipient_id") == recipient.id
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
|
|
|
# Personal message
|
|
|
|
self_recipient = get_recipient(Recipient.PERSONAL, type_id=self.user_profile.id)
|
|
|
|
if operand == self.user_profile.email:
|
|
|
|
# Personals with self
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = and_(column("sender_id") == self.user_profile.id,
|
|
|
|
column("recipient_id") == self_recipient.id)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
# Personals with other user; include both directions.
|
|
|
|
try:
|
|
|
|
narrow_profile = get_user_profile_by_email(operand)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise BadNarrowOperator('unknown user ' + operand)
|
|
|
|
|
|
|
|
narrow_recipient = get_recipient(Recipient.PERSONAL, narrow_profile.id)
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = or_(and_(column("sender_id") == narrow_profile.id,
|
|
|
|
column("recipient_id") == self_recipient.id),
|
|
|
|
and_(column("sender_id") == self.user_profile.id,
|
|
|
|
column("recipient_id") == narrow_recipient.id))
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-10 23:32:29 +01:00
|
|
|
|
2014-02-12 19:09:11 +01:00
|
|
|
def by_search(self, query, operand, maybe_negate):
|
2013-12-10 23:32:29 +01:00
|
|
|
tsquery = func.plainto_tsquery(literal("zulip.english_us_search"), literal(operand))
|
|
|
|
ts_locs_array = func.ts_match_locs_array
|
|
|
|
query = query.column(ts_locs_array(literal("zulip.english_us_search"),
|
|
|
|
column("rendered_content"),
|
|
|
|
tsquery).label("content_matches"))
|
2013-12-02 20:29:57 +01:00
|
|
|
# We HTML-escape the subject in Postgres to avoid doing a server round-trip
|
2013-12-10 23:32:29 +01:00
|
|
|
query = query.column(ts_locs_array(literal("zulip.english_us_search"),
|
|
|
|
func.escape_html(column("subject")),
|
|
|
|
tsquery).label("subject_matches"))
|
2013-12-02 20:29:57 +01:00
|
|
|
|
|
|
|
# Do quoted string matching. We really want phrase
|
|
|
|
# search here so we can ignore punctuation and do
|
|
|
|
# stemming, but there isn't a standard phrase search
|
|
|
|
# mechanism in Postgres
|
|
|
|
for term in re.findall('"[^"]+"|\S+', operand):
|
|
|
|
if term[0] == '"' and term[-1] == '"':
|
|
|
|
term = term[1:-1]
|
2013-12-10 23:32:29 +01:00
|
|
|
term = '%' + connection.ops.prep_for_like_query(term) + '%'
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = or_(column("content").ilike(term),
|
|
|
|
column("subject").ilike(term))
|
2014-02-12 19:09:11 +01:00
|
|
|
query = query.where(maybe_negate(cond))
|
2013-12-02 20:29:57 +01:00
|
|
|
|
2014-02-11 23:33:24 +01:00
|
|
|
cond = column("search_tsvector").op("@@")(tsquery)
|
2014-02-12 19:09:11 +01:00
|
|
|
return query.where(maybe_negate(cond))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2013-11-26 00:41:24 +01:00
|
|
|
def highlight_string(string, locs):
|
2015-11-01 17:15:17 +01:00
|
|
|
if isinstance(string, six.text_type):
|
2015-03-19 21:55:26 +01:00
|
|
|
string = string.encode('utf-8')
|
|
|
|
|
2013-11-26 00:41:24 +01:00
|
|
|
highlight_start = '<span class="highlight">'
|
|
|
|
highlight_stop = '</span>'
|
|
|
|
pos = 0
|
|
|
|
result = ''
|
|
|
|
for loc in locs:
|
|
|
|
(offset, length) = loc
|
|
|
|
result += string[pos:offset]
|
|
|
|
result += highlight_start
|
|
|
|
result += string[offset:offset + length]
|
|
|
|
result += highlight_stop
|
|
|
|
pos = offset + length
|
|
|
|
result += string[pos:]
|
2015-03-19 21:55:26 +01:00
|
|
|
return result.decode('utf-8')
|
2013-11-26 00:41:24 +01:00
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
def get_search_fields(rendered_content, subject, content_matches, subject_matches):
|
|
|
|
return dict(match_content=highlight_string(rendered_content, content_matches),
|
|
|
|
match_subject=highlight_string(escape_html(subject), subject_matches))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
def narrow_parameter(json):
|
|
|
|
# FIXME: A hack to support old mobile clients
|
|
|
|
if json == '{}':
|
|
|
|
return None
|
|
|
|
|
2014-02-10 21:45:53 +01:00
|
|
|
data = ujson.loads(json)
|
|
|
|
if not isinstance(data, list):
|
|
|
|
raise ValueError("argument is not a list")
|
|
|
|
|
|
|
|
def convert_term(elem):
|
2014-02-11 00:31:26 +01:00
|
|
|
# We have to support a legacy tuple format.
|
|
|
|
if isinstance(elem, list):
|
|
|
|
if (len(elem) != 2
|
2015-11-01 17:15:17 +01:00
|
|
|
or any(not isinstance(x, str) and not isinstance(x, six.text_type)
|
2014-02-11 00:31:26 +01:00
|
|
|
for x in elem)):
|
|
|
|
raise ValueError("element is not a string pair")
|
|
|
|
return dict(operator=elem[0], operand=elem[1])
|
|
|
|
|
|
|
|
if isinstance(elem, dict):
|
|
|
|
validator = check_dict([
|
|
|
|
('operator', check_string),
|
|
|
|
('operand', check_string),
|
|
|
|
])
|
|
|
|
|
|
|
|
error = validator('elem', elem)
|
|
|
|
if error:
|
|
|
|
raise JsonableError(error)
|
|
|
|
|
|
|
|
# whitelist the fields we care about for now
|
2014-02-11 21:36:59 +01:00
|
|
|
return dict(
|
|
|
|
operator=elem['operator'],
|
|
|
|
operand=elem['operand'],
|
|
|
|
negated=elem.get('negated', False),
|
|
|
|
)
|
2014-02-11 00:31:26 +01:00
|
|
|
|
|
|
|
raise ValueError("element is not a dictionary")
|
2014-02-10 21:45:53 +01:00
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
return list(map(convert_term, data))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-01-08 18:37:41 +01:00
|
|
|
def is_public_stream(stream, realm):
|
2013-12-12 18:36:32 +01:00
|
|
|
if not valid_stream_name(stream):
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Invalid stream name"))
|
2013-12-12 18:36:32 +01:00
|
|
|
stream = get_stream(stream, realm)
|
|
|
|
if stream is None:
|
|
|
|
return False
|
|
|
|
return stream.is_public()
|
|
|
|
|
2014-02-13 16:24:06 +01:00
|
|
|
|
|
|
|
def ok_to_include_history(narrow, realm):
|
|
|
|
# There are occasions where we need to find Message rows that
|
|
|
|
# have no corresponding UserMessage row, because the user is
|
|
|
|
# reading a public stream that might include messages that
|
|
|
|
# were sent while the user was not subscribed, but which they are
|
|
|
|
# allowed to see. We have to be very careful about constructing
|
|
|
|
# queries in those situations, so this function should return True
|
|
|
|
# only if we are 100% sure that we're gonna add a clause to the
|
|
|
|
# query that narrows to a particular public stream on the user's realm.
|
|
|
|
# If we screw this up, then we can get into a nasty situation of
|
|
|
|
# polluting our narrow results with messages from other realms.
|
2013-12-12 18:36:32 +01:00
|
|
|
include_history = False
|
|
|
|
if narrow is not None:
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
2014-02-13 18:53:51 +01:00
|
|
|
if term['operator'] == "stream" and not term.get('negated', False):
|
2014-02-13 16:24:06 +01:00
|
|
|
if is_public_stream(term['operand'], realm):
|
2013-12-12 18:36:32 +01:00
|
|
|
include_history = True
|
2014-01-14 22:53:28 +01:00
|
|
|
# Disable historical messages if the user is narrowing on anything
|
|
|
|
# that's a property on the UserMessage table. There cannot be
|
|
|
|
# historical messages in these cases anyway.
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
|
|
|
if term['operator'] == "is":
|
2013-12-12 18:36:32 +01:00
|
|
|
include_history = False
|
|
|
|
|
2014-02-13 16:24:06 +01:00
|
|
|
return include_history
|
|
|
|
|
2014-02-25 15:41:32 +01:00
|
|
|
def get_stream_name_from_narrow(narrow):
|
|
|
|
for term in narrow:
|
|
|
|
if term['operator'] == 'stream':
|
|
|
|
return term['operand'].lower()
|
|
|
|
return None
|
|
|
|
|
|
|
|
def exclude_muting_conditions(user_profile, narrow):
|
2014-02-25 22:22:35 +01:00
|
|
|
conditions = []
|
2014-02-25 15:41:32 +01:00
|
|
|
stream_name = get_stream_name_from_narrow(narrow)
|
2014-02-25 22:22:35 +01:00
|
|
|
|
|
|
|
if stream_name is None:
|
|
|
|
rows = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
in_home_view=False,
|
|
|
|
recipient__type=Recipient.STREAM
|
|
|
|
).values('recipient_id')
|
2015-11-01 17:14:53 +01:00
|
|
|
muted_recipient_ids = [row['recipient_id'] for row in rows]
|
2014-02-25 22:22:35 +01:00
|
|
|
condition = not_(column("recipient_id").in_(muted_recipient_ids))
|
|
|
|
conditions.append(condition)
|
|
|
|
|
2014-02-24 23:00:58 +01:00
|
|
|
muted_topics = ujson.loads(user_profile.muted_topics)
|
|
|
|
if muted_topics:
|
2014-02-25 15:41:32 +01:00
|
|
|
if stream_name is not None:
|
|
|
|
muted_topics = [m for m in muted_topics if m[0].lower() == stream_name]
|
|
|
|
if not muted_topics:
|
2014-02-25 22:22:35 +01:00
|
|
|
return conditions
|
2014-02-25 15:41:32 +01:00
|
|
|
|
2014-02-24 23:00:58 +01:00
|
|
|
muted_streams = bulk_get_streams(user_profile.realm,
|
|
|
|
[muted[0] for muted in muted_topics])
|
|
|
|
muted_recipients = bulk_get_recipients(Recipient.STREAM,
|
2016-03-11 10:57:29 +01:00
|
|
|
[stream.id for stream in six.itervalues(muted_streams)])
|
2014-02-24 23:00:58 +01:00
|
|
|
recipient_map = dict((s.name.lower(), muted_recipients[s.id].id)
|
2016-03-11 10:57:29 +01:00
|
|
|
for s in six.itervalues(muted_streams))
|
2014-02-24 23:00:58 +01:00
|
|
|
|
2014-02-24 23:11:27 +01:00
|
|
|
muted_topics = [m for m in muted_topics if m[0].lower() in recipient_map]
|
2014-02-24 23:00:58 +01:00
|
|
|
|
2014-02-24 23:11:27 +01:00
|
|
|
if muted_topics:
|
|
|
|
def mute_cond(muted):
|
|
|
|
stream_cond = column("recipient_id") == recipient_map[muted[0].lower()]
|
|
|
|
topic_cond = func.upper(column("subject")) == func.upper(muted[1])
|
|
|
|
return and_(stream_cond, topic_cond)
|
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
condition = not_(or_(*list(map(mute_cond, muted_topics))))
|
2014-02-25 22:22:35 +01:00
|
|
|
return conditions + [condition]
|
2014-02-24 23:00:58 +01:00
|
|
|
|
2014-02-25 22:22:35 +01:00
|
|
|
return conditions
|
2014-02-24 23:00:58 +01:00
|
|
|
|
2014-02-13 16:24:06 +01:00
|
|
|
@has_request_variables
|
|
|
|
def get_old_messages_backend(request, user_profile,
|
|
|
|
anchor = REQ(converter=int),
|
|
|
|
num_before = REQ(converter=to_non_negative_int),
|
|
|
|
num_after = REQ(converter=to_non_negative_int),
|
|
|
|
narrow = REQ('narrow', converter=narrow_parameter, default=None),
|
|
|
|
use_first_unread_anchor = REQ(default=False, converter=ujson.loads),
|
|
|
|
apply_markdown=REQ(default=True,
|
|
|
|
converter=ujson.loads)):
|
|
|
|
include_history = ok_to_include_history(narrow, user_profile.realm)
|
|
|
|
|
2014-01-31 20:19:12 +01:00
|
|
|
if include_history and not use_first_unread_anchor:
|
2013-12-12 22:50:49 +01:00
|
|
|
query = select([column("id").label("message_id")], None, "zerver_message")
|
2013-12-10 23:32:29 +01:00
|
|
|
inner_msg_id_col = literal_column("zerver_message.id")
|
2013-12-12 22:50:49 +01:00
|
|
|
elif narrow is None:
|
|
|
|
query = select([column("message_id"), column("flags")],
|
|
|
|
column("user_profile_id") == literal(user_profile.id),
|
|
|
|
"zerver_usermessage")
|
|
|
|
inner_msg_id_col = column("message_id")
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
2013-12-12 22:50:49 +01:00
|
|
|
# TODO: Don't do this join if we're not doing a search
|
|
|
|
query = select([column("message_id"), column("flags")],
|
2013-12-10 23:32:29 +01:00
|
|
|
column("user_profile_id") == literal(user_profile.id),
|
|
|
|
join("zerver_usermessage", "zerver_message",
|
|
|
|
literal_column("zerver_usermessage.message_id") ==
|
|
|
|
literal_column("zerver_message.id")))
|
|
|
|
inner_msg_id_col = column("message_id")
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2013-12-12 22:50:49 +01:00
|
|
|
num_extra_messages = 1
|
|
|
|
is_search = False
|
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
if narrow is not None:
|
2013-12-12 22:50:49 +01:00
|
|
|
# Add some metadata to our logging data for narrows
|
2013-12-12 18:36:32 +01:00
|
|
|
verbose_operators = []
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
|
|
|
if term['operator'] == "is":
|
|
|
|
verbose_operators.append("is:" + term['operand'])
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
2014-02-10 21:45:53 +01:00
|
|
|
verbose_operators.append(term['operator'])
|
2013-12-12 18:36:32 +01:00
|
|
|
request._log_data['extra'] = "[%s]" % (",".join(verbose_operators),)
|
|
|
|
|
2013-12-12 22:50:49 +01:00
|
|
|
# Build the query for the narrow
|
2013-12-12 18:36:32 +01:00
|
|
|
num_extra_messages = 0
|
2014-02-27 20:09:59 +01:00
|
|
|
builder = NarrowBuilder(user_profile, inner_msg_id_col)
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
|
|
|
if term['operator'] == 'search' and not is_search:
|
2013-12-10 23:32:29 +01:00
|
|
|
query = query.column("subject").column("rendered_content")
|
2013-12-12 18:36:32 +01:00
|
|
|
is_search = True
|
2014-02-27 20:09:59 +01:00
|
|
|
query = builder.add_term(query, term)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
# We add 1 to the number of messages requested if no narrow was
|
|
|
|
# specified to ensure that the resulting list always contains the
|
|
|
|
# anchor message. If a narrow was specified, the anchor message
|
|
|
|
# might not match the narrow anyway.
|
|
|
|
if num_after != 0:
|
|
|
|
num_after += num_extra_messages
|
|
|
|
else:
|
|
|
|
num_before += num_extra_messages
|
|
|
|
|
2014-01-31 20:19:12 +01:00
|
|
|
sa_conn = get_sqlalchemy_connection()
|
|
|
|
if use_first_unread_anchor:
|
2014-02-12 22:09:34 +01:00
|
|
|
condition = column("flags").op("&")(UserMessage.flags.read.mask) == 0
|
|
|
|
|
|
|
|
# We exclude messages on muted topics when finding the first unread
|
|
|
|
# message in this narrow
|
2014-02-25 15:41:32 +01:00
|
|
|
muting_conditions = exclude_muting_conditions(user_profile, narrow)
|
2014-02-24 23:00:58 +01:00
|
|
|
if muting_conditions:
|
|
|
|
condition = and_(condition, *muting_conditions)
|
2014-02-12 22:09:34 +01:00
|
|
|
|
|
|
|
first_unread_query = query.where(condition)
|
2014-01-31 20:19:12 +01:00
|
|
|
first_unread_query = first_unread_query.order_by(inner_msg_id_col.asc()).limit(1)
|
|
|
|
first_unread_result = list(sa_conn.execute(first_unread_query).fetchall())
|
|
|
|
if len(first_unread_result) > 0:
|
|
|
|
anchor = first_unread_result[0][0]
|
|
|
|
else:
|
|
|
|
anchor = 10000000000000000
|
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
before_query = None
|
|
|
|
after_query = None
|
2013-12-12 18:36:32 +01:00
|
|
|
if num_before != 0:
|
|
|
|
before_anchor = anchor
|
|
|
|
if num_after != 0:
|
|
|
|
# Don't include the anchor in both the before query and the after query
|
|
|
|
before_anchor = anchor - 1
|
2013-12-12 22:50:49 +01:00
|
|
|
before_query = query.where(inner_msg_id_col <= before_anchor) \
|
|
|
|
.order_by(inner_msg_id_col.desc()).limit(num_before)
|
2013-12-12 18:36:32 +01:00
|
|
|
if num_after != 0:
|
2013-12-12 22:50:49 +01:00
|
|
|
after_query = query.where(inner_msg_id_col >= anchor) \
|
|
|
|
.order_by(inner_msg_id_col.asc()).limit(num_after)
|
2013-12-10 23:32:29 +01:00
|
|
|
|
2014-02-19 00:35:43 +01:00
|
|
|
if num_before == 0 and num_after == 0:
|
|
|
|
# This can happen when a narrow is specified.
|
|
|
|
after_query = query.where(inner_msg_id_col == anchor)
|
|
|
|
|
2013-12-12 22:50:49 +01:00
|
|
|
if before_query is not None:
|
|
|
|
if after_query is not None:
|
|
|
|
query = union_all(before_query.self_group(), after_query.self_group())
|
2013-12-10 23:32:29 +01:00
|
|
|
else:
|
2013-12-12 22:50:49 +01:00
|
|
|
query = before_query
|
|
|
|
else:
|
|
|
|
query = after_query
|
|
|
|
main_query = alias(query)
|
|
|
|
query = select(main_query.c, None, main_query).order_by(column("message_id").asc())
|
2014-01-08 18:01:35 +01:00
|
|
|
# This is a hack to tag the query we use for testing
|
|
|
|
query = query.prefix_with("/* get_old_messages */")
|
2013-12-12 22:50:49 +01:00
|
|
|
query_result = list(sa_conn.execute(query).fetchall())
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
# The following is a little messy, but ensures that the code paths
|
|
|
|
# are similar regardless of the value of include_history. The
|
|
|
|
# 'user_messages' dictionary maps each message to the user's
|
|
|
|
# UserMessage object for that message, which we will attach to the
|
|
|
|
# rendered message dict before returning it. We attempt to
|
2016-03-31 03:39:51 +02:00
|
|
|
# bulk-fetch rendered message dicts from remote cache using the
|
2013-12-12 18:36:32 +01:00
|
|
|
# 'messages' list.
|
2016-06-10 00:12:34 +02:00
|
|
|
search_fields = dict() # type: Dict[int, Dict[str, text_type]]
|
2016-01-26 01:54:56 +01:00
|
|
|
message_ids = [] # type: List[int]
|
|
|
|
user_message_flags = {} # type: Dict[int, List[str]]
|
2013-12-12 22:50:49 +01:00
|
|
|
if include_history:
|
2013-12-10 23:32:29 +01:00
|
|
|
message_ids = [row[0] for row in query_result]
|
|
|
|
|
|
|
|
# TODO: This could be done with an outer join instead of two queries
|
2013-12-12 18:36:32 +01:00
|
|
|
user_message_flags = dict((user_message.message_id, user_message.flags_list()) for user_message in
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
2013-12-10 23:32:29 +01:00
|
|
|
message__id__in=message_ids))
|
|
|
|
for row in query_result:
|
|
|
|
message_id = row[0]
|
|
|
|
if user_message_flags.get(message_id) is None:
|
|
|
|
user_message_flags[message_id] = ["read", "historical"]
|
2013-12-12 18:36:32 +01:00
|
|
|
if is_search:
|
2013-12-10 23:32:29 +01:00
|
|
|
(_, subject, rendered_content, content_matches, subject_matches) = row
|
|
|
|
search_fields[message_id] = get_search_fields(rendered_content, subject,
|
|
|
|
content_matches, subject_matches)
|
2013-12-12 18:36:32 +01:00
|
|
|
else:
|
2013-12-10 23:32:29 +01:00
|
|
|
for row in query_result:
|
|
|
|
message_id = row[0]
|
|
|
|
flags = row[1]
|
|
|
|
user_message_flags[message_id] = parse_usermessage_flags(flags)
|
|
|
|
|
|
|
|
message_ids.append(message_id)
|
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
if is_search:
|
2013-12-10 23:32:29 +01:00
|
|
|
(_, _, subject, rendered_content, content_matches, subject_matches) = row
|
|
|
|
search_fields[message_id] = get_search_fields(rendered_content, subject,
|
|
|
|
content_matches, subject_matches)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
cache_transformer = lambda row: Message.build_dict_from_raw_db_row(row, apply_markdown)
|
|
|
|
id_fetcher = lambda row: row['id']
|
|
|
|
|
|
|
|
message_dicts = generic_bulk_cached_fetch(lambda message_id: to_dict_cache_key_id(message_id, apply_markdown),
|
|
|
|
Message.get_raw_db_rows,
|
|
|
|
message_ids,
|
|
|
|
id_fetcher=id_fetcher,
|
|
|
|
cache_transformer=cache_transformer,
|
|
|
|
extractor=extract_message_dict,
|
|
|
|
setter=stringify_message_dict)
|
|
|
|
|
|
|
|
message_list = []
|
|
|
|
for message_id in message_ids:
|
|
|
|
msg_dict = message_dicts[message_id]
|
|
|
|
msg_dict.update({"flags": user_message_flags[message_id]})
|
|
|
|
msg_dict.update(search_fields.get(message_id, {}))
|
|
|
|
message_list.append(msg_dict)
|
|
|
|
|
|
|
|
statsd.incr('loaded_old_messages', len(message_list))
|
|
|
|
ret = {'messages': message_list,
|
|
|
|
"result": "success",
|
|
|
|
"msg": ""}
|
|
|
|
return json_success(ret)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def update_message_flags(request, user_profile,
|
2016-05-08 15:20:51 +02:00
|
|
|
messages=REQ('messages', validator=check_list(check_int)),
|
|
|
|
operation=REQ('op'), flag=REQ('flag'),
|
|
|
|
all=REQ('all', validator=check_bool, default=False),
|
|
|
|
stream_name=REQ('stream_name', default=None),
|
|
|
|
topic_name=REQ('topic_name', default=None)):
|
|
|
|
|
2014-01-30 18:08:27 +01:00
|
|
|
request._log_data["extra"] = "[%s %s]" % (operation, flag)
|
2016-05-08 15:20:51 +02:00
|
|
|
stream = None
|
|
|
|
if stream_name is not None:
|
|
|
|
stream = get_stream(stream_name, user_profile.realm)
|
|
|
|
if not stream:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_('No such stream \'%s\'') % (stream_name,))
|
2016-05-08 15:20:51 +02:00
|
|
|
if topic_name:
|
|
|
|
topic_exists = UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__recipient__type_id=stream.id,
|
|
|
|
message__recipient__type=Recipient.STREAM,
|
|
|
|
message__subject__iexact=topic_name).exists()
|
|
|
|
if not topic_exists:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_('No such topic \'%s\'') % (topic_name,))
|
2016-05-08 15:20:51 +02:00
|
|
|
do_update_message_flags(user_profile, operation, flag, messages, all, stream, topic_name)
|
2013-12-12 18:36:32 +01:00
|
|
|
return json_success({'result': 'success',
|
|
|
|
'messages': messages,
|
|
|
|
'msg': ''})
|
|
|
|
|
|
|
|
def create_mirrored_message_users(request, user_profile, recipients):
|
|
|
|
if "sender" not in request.POST:
|
|
|
|
return (False, None)
|
|
|
|
|
|
|
|
sender_email = request.POST["sender"].strip().lower()
|
|
|
|
referenced_users = set([sender_email])
|
|
|
|
if request.POST['type'] == 'private':
|
|
|
|
for email in recipients:
|
|
|
|
referenced_users.add(email.lower())
|
|
|
|
|
|
|
|
if request.client.name == "zephyr_mirror":
|
2014-03-05 17:54:37 +01:00
|
|
|
user_check = same_realm_zephyr_user
|
2013-12-12 18:36:32 +01:00
|
|
|
fullname_function = compute_mit_user_fullname
|
|
|
|
elif request.client.name == "irc_mirror":
|
|
|
|
user_check = same_realm_irc_user
|
|
|
|
fullname_function = compute_irc_user_fullname
|
2014-02-28 20:53:54 +01:00
|
|
|
elif request.client.name in ("jabber_mirror", "JabberMirror"):
|
2014-03-05 17:51:35 +01:00
|
|
|
user_check = same_realm_jabber_user
|
2013-12-12 18:36:32 +01:00
|
|
|
fullname_function = compute_jabber_user_fullname
|
|
|
|
else:
|
|
|
|
# Unrecognized mirroring client
|
|
|
|
return (False, None)
|
|
|
|
|
|
|
|
for email in referenced_users:
|
|
|
|
# Check that all referenced users are in our realm:
|
|
|
|
if not user_check(user_profile, email):
|
|
|
|
return (False, None)
|
|
|
|
|
|
|
|
# Create users for the referenced users, if needed.
|
|
|
|
for email in referenced_users:
|
|
|
|
create_mirror_user_if_needed(user_profile.realm, email, fullname_function)
|
|
|
|
|
|
|
|
sender = get_user_profile_by_email(sender_email)
|
|
|
|
return (True, sender)
|
|
|
|
|
2014-03-05 17:54:37 +01:00
|
|
|
def same_realm_zephyr_user(user_profile, email):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (UserProfile, text_type) -> bool
|
2013-12-12 18:36:32 +01:00
|
|
|
# Are the sender and recipient both @mit.edu addresses?
|
|
|
|
# We have to handle this specially, inferring the domain from the
|
|
|
|
# e-mail address, because the recipient may not existing in Zulip
|
|
|
|
# and we may need to make a stub MIT user on the fly.
|
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
domain = resolve_email_to_domain(email)
|
|
|
|
|
|
|
|
return user_profile.realm.domain == "mit.edu" and domain == "mit.edu"
|
|
|
|
|
|
|
|
def same_realm_irc_user(user_profile, email):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (UserProfile, text_type) -> bool
|
2013-12-12 18:36:32 +01:00
|
|
|
# Check whether the target email address is an IRC user in the
|
|
|
|
# same realm as user_profile, i.e. if the domain were example.com,
|
|
|
|
# the IRC user would need to be username@irc.example.com
|
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
domain = resolve_email_to_domain(email)
|
|
|
|
|
|
|
|
return user_profile.realm.domain == domain.replace("irc.", "")
|
|
|
|
|
2014-03-05 17:51:35 +01:00
|
|
|
def same_realm_jabber_user(user_profile, email):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (UserProfile, text_type) -> bool
|
2013-12-12 18:36:32 +01:00
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
|
|
|
|
domain = resolve_email_to_domain(email)
|
2014-03-05 17:51:35 +01:00
|
|
|
# The ist.mit.edu realm uses mit.edu email addresses so that their accounts
|
|
|
|
# can receive mail.
|
|
|
|
if user_profile.realm.domain == 'ist.mit.edu' and domain == 'mit.edu':
|
|
|
|
return True
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
return user_profile.realm.domain == domain
|
|
|
|
|
2014-03-05 17:51:35 +01:00
|
|
|
|
2013-12-12 18:36:32 +01:00
|
|
|
@authenticated_api_view
|
|
|
|
def api_send_message(request, user_profile):
|
|
|
|
return send_message_backend(request, user_profile)
|
|
|
|
|
|
|
|
# We do not @require_login for send_message_backend, since it is used
|
|
|
|
# both from the API and the web service. Code calling
|
|
|
|
# send_message_backend should either check the API key or check that
|
|
|
|
# the user is logged in.
|
|
|
|
@has_request_variables
|
|
|
|
def send_message_backend(request, user_profile,
|
|
|
|
message_type_name = REQ('type'),
|
2014-02-06 23:12:34 +01:00
|
|
|
message_to = REQ('to', converter=extract_recipients, default=[]),
|
2013-12-12 18:36:32 +01:00
|
|
|
forged = REQ(default=False),
|
|
|
|
subject_name = REQ('subject', lambda x: x.strip(), None),
|
|
|
|
message_content = REQ('content'),
|
|
|
|
domain = REQ('domain', default=None),
|
2014-06-24 06:36:34 +02:00
|
|
|
local_id = REQ(default=None),
|
2013-12-12 18:36:32 +01:00
|
|
|
queue_id = REQ(default=None)):
|
|
|
|
client = request.client
|
2016-02-08 03:59:38 +01:00
|
|
|
is_super_user = request.user.is_api_super_user
|
2013-12-12 18:36:32 +01:00
|
|
|
if forged and not is_super_user:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("User not authorized for this query"))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
realm = None
|
|
|
|
if domain and domain != user_profile.realm.domain:
|
|
|
|
if not is_super_user:
|
|
|
|
# The email gateway bot needs to be able to send messages in
|
|
|
|
# any realm.
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("User not authorized for this query"))
|
2013-12-12 18:36:32 +01:00
|
|
|
realm = get_realm(domain)
|
|
|
|
if not realm:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Unknown domain %s") % (domain,))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2014-02-28 20:53:54 +01:00
|
|
|
if client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]:
|
2013-12-12 18:36:32 +01:00
|
|
|
# Here's how security works for mirroring:
|
|
|
|
#
|
|
|
|
# For private messages, the message must be (1) both sent and
|
|
|
|
# received exclusively by users in your realm, and (2)
|
|
|
|
# received by the forwarding user.
|
|
|
|
#
|
|
|
|
# For stream messages, the message must be (1) being forwarded
|
|
|
|
# by an API superuser for your realm and (2) being sent to a
|
|
|
|
# mirrored stream (any stream for the Zephyr and Jabber
|
|
|
|
# mirrors, but only streams with names starting with a "#" for
|
|
|
|
# IRC mirrors)
|
|
|
|
#
|
|
|
|
# The security checks are split between the below code
|
|
|
|
# (especially create_mirrored_message_users which checks the
|
|
|
|
# same-realm constraint) and recipient_for_emails (which
|
|
|
|
# checks that PMs are received by the forwarding user)
|
|
|
|
if "sender" not in request.POST:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Missing sender"))
|
2013-12-12 18:36:32 +01:00
|
|
|
if message_type_name != "private" and not is_super_user:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("User not authorized for this query"))
|
2013-12-12 18:36:32 +01:00
|
|
|
(valid_input, mirror_sender) = \
|
|
|
|
create_mirrored_message_users(request, user_profile, message_to)
|
|
|
|
if not valid_input:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Invalid mirrored message"))
|
2013-12-12 18:36:32 +01:00
|
|
|
if client.name == "zephyr_mirror" and user_profile.realm.domain != "mit.edu":
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Invalid mirrored realm"))
|
2013-12-12 18:36:32 +01:00
|
|
|
if (client.name == "irc_mirror" and message_type_name != "private" and
|
|
|
|
not message_to[0].startswith("#")):
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("IRC stream names must start with #"))
|
2013-12-12 18:36:32 +01:00
|
|
|
sender = mirror_sender
|
|
|
|
else:
|
|
|
|
sender = user_profile
|
|
|
|
|
|
|
|
ret = check_send_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, forged=forged,
|
|
|
|
forged_timestamp = request.POST.get('time'),
|
|
|
|
forwarder_user_profile=user_profile, realm=realm,
|
|
|
|
local_id=local_id, sender_queue_id=queue_id)
|
|
|
|
return json_success({"id": ret})
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_update_message(request, user_profile):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (HttpRequest, UserProfile) -> HttpResponse
|
2013-12-12 18:36:32 +01:00
|
|
|
return update_message_backend(request, user_profile)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def update_message_backend(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int),
|
|
|
|
subject=REQ(default=None),
|
|
|
|
propagate_mode=REQ(default="change_one"),
|
|
|
|
content=REQ(default=None)):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (HttpRequest, UserProfile, int, Optional[text_type], Optional[str], Optional[text_type]) -> HttpResponse
|
2013-12-12 18:36:32 +01:00
|
|
|
if subject is None and content is None:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Nothing to change"))
|
2013-12-12 18:36:32 +01:00
|
|
|
do_update_message(user_profile, message_id, subject, propagate_mode, content)
|
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
@has_request_variables
|
|
|
|
def json_fetch_raw_message(request, user_profile,
|
|
|
|
message_id=REQ(converter=to_non_negative_int)):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (HttpRequest, UserProfile, int) -> HttpResponse
|
2013-12-12 18:36:32 +01:00
|
|
|
try:
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
except Message.DoesNotExist:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("No such message"))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
if message.sender != user_profile:
|
2016-05-25 15:02:02 +02:00
|
|
|
return json_error(_("Message was not sent by you"))
|
2013-12-12 18:36:32 +01:00
|
|
|
|
|
|
|
return json_success({"raw_content": message.content})
|
|
|
|
|
|
|
|
@has_request_variables
|
2016-05-31 16:29:39 +02:00
|
|
|
def render_message_backend(request, user_profile, content=REQ()):
|
2016-06-06 00:32:39 +02:00
|
|
|
# type: (HttpRequest, UserProfile, text_type) -> HttpResponse
|
2013-12-12 18:36:32 +01:00
|
|
|
rendered_content = bugdown.convert(content, user_profile.realm.domain)
|
|
|
|
return json_success({"rendered": rendered_content})
|
|
|
|
|
|
|
|
@authenticated_json_post_view
|
|
|
|
def json_messages_in_narrow(request, user_profile):
|
|
|
|
return messages_in_narrow_backend(request, user_profile)
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def messages_in_narrow_backend(request, user_profile,
|
|
|
|
msg_ids = REQ(validator=check_list(check_int)),
|
|
|
|
narrow = REQ(converter=narrow_parameter)):
|
|
|
|
# Note that this function will only work on messages the user
|
|
|
|
# actually received
|
|
|
|
|
2013-12-13 23:55:04 +01:00
|
|
|
# TODO: We assume that the narrow is a search. For now this works because
|
|
|
|
# the browser only ever calls this function for searches, since it can't
|
|
|
|
# apply that narrow operator itself.
|
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
query = select([column("message_id"), column("subject"), column("rendered_content")],
|
|
|
|
and_(column("user_profile_id") == literal(user_profile.id),
|
|
|
|
column("message_id").in_(msg_ids)),
|
|
|
|
join("zerver_usermessage", "zerver_message",
|
|
|
|
literal_column("zerver_usermessage.message_id") ==
|
|
|
|
literal_column("zerver_message.id")))
|
|
|
|
|
2014-02-27 20:09:59 +01:00
|
|
|
builder = NarrowBuilder(user_profile, column("message_id"))
|
2014-02-10 21:45:53 +01:00
|
|
|
for term in narrow:
|
2014-02-27 20:09:59 +01:00
|
|
|
query = builder.add_term(query, term)
|
2013-12-12 18:36:32 +01:00
|
|
|
|
2013-12-10 23:32:29 +01:00
|
|
|
sa_conn = get_sqlalchemy_connection()
|
|
|
|
query_result = list(sa_conn.execute(query).fetchall())
|
|
|
|
|
|
|
|
search_fields = dict()
|
|
|
|
for row in query_result:
|
|
|
|
(message_id, subject, rendered_content, content_matches, subject_matches) = row
|
|
|
|
search_fields[message_id] = get_search_fields(rendered_content, subject,
|
|
|
|
content_matches, subject_matches)
|
|
|
|
|
|
|
|
return json_success({"messages": search_fields})
|