2013-04-23 18:51:17 +02:00
|
|
|
from __future__ import absolute_import
|
2015-11-01 17:11:06 +01:00
|
|
|
from __future__ import print_function
|
2016-05-25 06:55:14 +02:00
|
|
|
from typing import (
|
2017-03-03 19:01:52 +01:00
|
|
|
AbstractSet, Any, AnyStr, Callable, Dict, Iterable, List, Mapping, MutableMapping,
|
2017-03-19 23:29:29 +01:00
|
|
|
Optional, Sequence, Set, Text, Tuple, TypeVar, Union, cast,
|
2016-05-25 06:55:14 +02:00
|
|
|
)
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2016-12-13 19:18:08 +01:00
|
|
|
from django.utils.html import escape
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-01-10 22:01:33 +01:00
|
|
|
from django.conf import settings
|
2013-07-08 17:57:04 +02:00
|
|
|
from django.core import validators
|
2017-02-15 17:26:22 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat
|
2016-10-04 16:49:16 +02:00
|
|
|
from zerver.lib.bugdown import (
|
|
|
|
BugdownRenderingException,
|
2017-03-13 14:42:03 +01:00
|
|
|
version as bugdown_version,
|
|
|
|
url_embed_preview_enabled_for_realm
|
2016-10-04 16:49:16 +02:00
|
|
|
)
|
2016-10-04 15:40:02 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
to_dict_cache_key,
|
|
|
|
to_dict_cache_key_id,
|
|
|
|
)
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.context_managers import lockfile
|
2016-10-04 15:52:26 +02:00
|
|
|
from zerver.lib.message import (
|
2016-10-12 02:19:45 +02:00
|
|
|
access_message,
|
2016-10-04 15:52:26 +02:00
|
|
|
MessageDict,
|
|
|
|
message_to_dict,
|
2016-10-04 18:32:46 +02:00
|
|
|
render_markdown,
|
2016-10-04 15:52:26 +02:00
|
|
|
)
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2016-09-28 00:03:13 +02:00
|
|
|
from zerver.models import Realm, RealmEmoji, Stream, UserProfile, UserActivity, RealmAlias, \
|
2017-02-15 04:35:10 +01:00
|
|
|
Subscription, Recipient, Message, Attachment, UserMessage, RealmAuditLog, \
|
2016-05-25 06:55:14 +02:00
|
|
|
Client, DefaultStream, UserPresence, Referral, PushDeviceToken, MAX_SUBJECT_LENGTH, \
|
2013-03-26 18:51:55 +01:00
|
|
|
MAX_MESSAGE_LENGTH, get_client, get_stream, get_recipient, get_huddle, \
|
2013-05-14 21:18:11 +02:00
|
|
|
get_user_profile_by_id, PreregistrationUser, get_display_recipient, \
|
2017-01-04 05:30:48 +01:00
|
|
|
get_realm, bulk_get_recipients, \
|
2016-01-12 16:24:34 +01:00
|
|
|
email_allowed_for_realm, email_to_username, display_recipient_cache_key, \
|
2016-10-04 15:40:02 +02:00
|
|
|
get_user_profile_by_email, get_stream_cache_key, \
|
2014-01-24 23:30:53 +01:00
|
|
|
UserActivityInterval, get_active_user_dicts_in_realm, get_active_streams, \
|
2017-03-06 03:05:04 +01:00
|
|
|
realm_filters_for_realm, RealmFilter, ScheduledJob, get_owned_bot_dicts, \
|
|
|
|
get_old_unclaimed_attachments, get_cross_realm_emails, \
|
2017-01-20 12:27:38 +01:00
|
|
|
Reaction, EmailChangeStatus
|
2014-01-24 23:30:53 +01:00
|
|
|
|
2016-09-14 18:02:24 +02:00
|
|
|
from zerver.lib.alert_words import alert_words_in_realm
|
2017-02-16 22:35:57 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
from django.db import transaction, IntegrityError, connection
|
2013-05-24 16:56:00 +02:00
|
|
|
from django.db.models import F, Q
|
2016-05-25 06:55:14 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2013-03-18 18:57:34 +01:00
|
|
|
from django.core.exceptions import ValidationError
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.core.mail import EmailMessage
|
2017-02-25 21:02:13 +01:00
|
|
|
from django.utils import timezone
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
from confirmation.models import Confirmation, EmailChangeConfirmation
|
2015-11-01 17:14:25 +01:00
|
|
|
import six
|
2015-11-01 17:14:31 +01:00
|
|
|
from six.moves import filter
|
2015-11-01 17:14:53 +01:00
|
|
|
from six.moves import map
|
2015-11-01 17:15:05 +01:00
|
|
|
from six.moves import range
|
2016-05-25 15:55:13 +02:00
|
|
|
from six import unichr
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2014-02-26 20:02:43 +01:00
|
|
|
from zerver.lib.create_user import random_api_key
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
|
|
|
from zerver.lib.create_user import create_user
|
|
|
|
from zerver.lib import bugdown
|
2013-08-22 17:45:23 +02:00
|
|
|
from zerver.lib.cache import cache_with_key, cache_set, \
|
2013-09-17 20:29:43 +02:00
|
|
|
user_profile_by_email_cache_key, cache_set_many, \
|
2016-07-08 02:24:59 +02:00
|
|
|
cache_delete, cache_delete_many
|
2016-05-29 16:52:55 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.utils import log_statsd_event, statsd
|
|
|
|
from zerver.lib.html_diff import highlight_html_differences
|
2013-09-03 22:41:17 +02:00
|
|
|
from zerver.lib.alert_words import user_alert_words, add_user_alert_words, \
|
|
|
|
remove_user_alert_words, set_user_alert_words
|
2014-01-24 22:29:17 +01:00
|
|
|
from zerver.lib.notifications import clear_followup_emails_queue
|
2013-12-10 16:28:16 +01:00
|
|
|
from zerver.lib.narrow import check_supported_events_narrow_filter
|
2016-05-29 16:52:55 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
2017-03-08 11:46:12 +01:00
|
|
|
from zerver.lib.sessions import delete_user_sessions
|
2016-07-11 03:07:37 +02:00
|
|
|
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id, \
|
|
|
|
claim_attachment, delete_message_image
|
2016-09-20 06:45:52 +02:00
|
|
|
from zerver.lib.str_utils import NonBinaryStr, force_str
|
2017-03-03 23:15:18 +01:00
|
|
|
from zerver.tornado.event_queue import request_event_queue, send_event
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2013-08-12 00:21:54 +02:00
|
|
|
import DNS
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2013-01-10 22:01:33 +01:00
|
|
|
import time
|
|
|
|
import traceback
|
|
|
|
import re
|
2013-01-14 20:09:25 +01:00
|
|
|
import datetime
|
|
|
|
import os
|
|
|
|
import platform
|
2013-03-18 18:57:34 +01:00
|
|
|
import logging
|
2014-01-23 18:09:34 +01:00
|
|
|
import itertools
|
2013-04-05 00:13:03 +02:00
|
|
|
from collections import defaultdict
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
# This will be used to type annotate parameters in a function if the function
|
|
|
|
# works on both str and unicode in python 2 but in python 3 it only works on str.
|
2016-12-14 20:04:21 +01:00
|
|
|
SizedTextIterable = Union[Sequence[Text], AbstractSet[Text]]
|
2016-05-25 06:55:14 +02:00
|
|
|
|
2016-08-03 23:37:12 +02:00
|
|
|
STREAM_ASSIGNMENT_COLORS = [
|
|
|
|
"#76ce90", "#fae589", "#a6c7e5", "#e79ab5",
|
|
|
|
"#bfd56f", "#f4ae55", "#b0a5fd", "#addfe5",
|
|
|
|
"#f5ce6e", "#c2726a", "#94c849", "#bd86e5",
|
|
|
|
"#ee7e4a", "#a6dcbf", "#95a5fd", "#53a063",
|
|
|
|
"#9987e1", "#e4523d", "#c2c2c2", "#4f8de4",
|
|
|
|
"#c6a8ad", "#e7cc4d", "#c8bebf", "#a47462"]
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Store an event in the log for re-importing messages
|
|
|
|
def log_event(event):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (MutableMapping[str, Any]) -> None
|
2013-11-12 18:06:34 +01:00
|
|
|
if settings.EVENT_LOG_DIR is None:
|
|
|
|
return
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
if "timestamp" not in event:
|
|
|
|
event["timestamp"] = time.time()
|
2013-01-14 20:09:25 +01:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
if not os.path.exists(settings.EVENT_LOG_DIR):
|
2013-01-14 20:09:25 +01:00
|
|
|
os.mkdir(settings.EVENT_LOG_DIR)
|
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
template = os.path.join(settings.EVENT_LOG_DIR,
|
2017-01-24 05:50:04 +01:00
|
|
|
'%s.' + platform.node() +
|
2017-02-25 07:44:57 +01:00
|
|
|
timezone.now().strftime('.%Y-%m-%d'))
|
2013-01-14 20:09:25 +01:00
|
|
|
|
|
|
|
with lockfile(template % ('lock',)):
|
|
|
|
with open(template % ('events',), 'a') as log:
|
2016-09-20 06:45:52 +02:00
|
|
|
log.write(force_str(ujson.dumps(event) + u'\n'))
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-08-22 16:36:42 +02:00
|
|
|
def active_user_ids(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> List[int]
|
2013-10-23 23:16:39 +02:00
|
|
|
return [userdict['id'] for userdict in get_active_user_dicts_in_realm(realm)]
|
2013-08-22 16:36:42 +02:00
|
|
|
|
2016-11-04 07:02:24 +01:00
|
|
|
def can_access_stream_user_ids(stream):
|
|
|
|
# type: (Stream) -> Set[int]
|
|
|
|
|
|
|
|
# return user ids of users who can access the attributes of
|
|
|
|
# a stream, such as its name/description
|
|
|
|
if stream.is_public():
|
|
|
|
return set(active_user_ids(stream.realm))
|
|
|
|
else:
|
|
|
|
return private_stream_user_ids(stream)
|
|
|
|
|
|
|
|
def private_stream_user_ids(stream):
|
|
|
|
# type: (Stream) -> Set[int]
|
|
|
|
|
|
|
|
# TODO: Find similar queries elsewhere and de-duplicate this code.
|
|
|
|
subscriptions = Subscription.objects.filter(
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream.id,
|
|
|
|
active=True)
|
2014-01-27 23:43:05 +01:00
|
|
|
|
2016-11-04 07:02:24 +01:00
|
|
|
return {sub['user_profile_id'] for sub in subscriptions.values('user_profile_id')}
|
2014-01-27 23:43:05 +01:00
|
|
|
|
2014-02-26 00:12:14 +01:00
|
|
|
def bot_owner_userids(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> Sequence[int]
|
2014-02-26 00:12:14 +01:00
|
|
|
is_private_bot = (
|
|
|
|
user_profile.default_sending_stream and user_profile.default_sending_stream.invite_only or
|
|
|
|
user_profile.default_events_register_stream and user_profile.default_events_register_stream.invite_only)
|
|
|
|
if is_private_bot:
|
2016-05-26 13:27:00 +02:00
|
|
|
return (user_profile.bot_owner_id,) # TODO: change this to list instead of tuple
|
2014-02-26 00:12:14 +01:00
|
|
|
else:
|
|
|
|
return active_user_ids(user_profile.realm)
|
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
def realm_user_count(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> int
|
2015-12-26 02:13:42 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
def get_topic_history_for_stream(user_profile, recipient):
|
|
|
|
# type: (UserProfile, Recipient) -> List[Tuple[str, int]]
|
|
|
|
|
|
|
|
# We tested the below query on some large prod datasets, and we never
|
|
|
|
# saw more than 50ms to execute it, so we think that's acceptable,
|
|
|
|
# but we will monitor it, and we may later optimize it further.
|
|
|
|
query = '''
|
|
|
|
SELECT topic, read, count(*)
|
|
|
|
FROM (
|
|
|
|
SELECT
|
|
|
|
("zerver_usermessage"."flags" & 1) as read,
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
"zerver_message"."id" as message_id
|
|
|
|
FROM "zerver_usermessage"
|
|
|
|
INNER JOIN "zerver_message" ON (
|
|
|
|
"zerver_usermessage"."message_id" = "zerver_message"."id"
|
|
|
|
) WHERE (
|
|
|
|
"zerver_usermessage"."user_profile_id" = %s AND
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
) ORDER BY "zerver_usermessage"."message_id" DESC
|
|
|
|
) messages_for_stream
|
|
|
|
GROUP BY topic, read
|
|
|
|
ORDER BY max(message_id) desc
|
|
|
|
'''
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query, [user_profile.id, recipient.id])
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
|
|
|
topic_names = dict() # type: Dict[str, str]
|
|
|
|
topic_counts = dict() # type: Dict[str, int]
|
|
|
|
topics = []
|
|
|
|
for row in rows:
|
|
|
|
topic_name, read, count = row
|
|
|
|
if topic_name.lower() not in topic_names:
|
|
|
|
topic_names[topic_name.lower()] = topic_name
|
|
|
|
topic_name = topic_names[topic_name.lower()]
|
|
|
|
if topic_name not in topic_counts:
|
|
|
|
topic_counts[topic_name] = 0
|
|
|
|
topics.append(topic_name)
|
|
|
|
if not read:
|
|
|
|
topic_counts[topic_name] += count
|
|
|
|
|
|
|
|
history = [(topic, topic_counts[topic]) for topic in topics]
|
|
|
|
return history
|
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
def send_signup_message(sender, signups_stream, user_profile,
|
|
|
|
internal=False, realm=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, UserProfile, bool, Optional[Realm]) -> None
|
2015-10-13 21:30:22 +02:00
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
|
|
|
user_count = realm_user_count(user_profile.realm)
|
|
|
|
# Send notification to realm notifications stream if it exists
|
|
|
|
# Don't send notification for the first user in a realm
|
|
|
|
if user_profile.realm.notifications_stream is not None and user_count > 1:
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
|
|
|
user_profile.realm.notifications_stream.name,
|
|
|
|
"New users", "%s just signed up for Zulip. Say hello!" % (
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.full_name,)
|
|
|
|
)
|
2016-12-11 14:30:45 +01:00
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
# We also send a notification to the Zulip administrative realm
|
|
|
|
admin_realm = get_user_profile_by_email(sender).realm
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
admin_realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
|
|
|
signups_stream,
|
2017-03-13 18:00:38 +01:00
|
|
|
user_profile.realm.string_id,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
internal_blurb,
|
|
|
|
user_count,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
def notify_new_user(user_profile, internal=False):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool) -> None
|
2015-10-13 21:30:22 +02:00
|
|
|
if settings.NEW_USER_BOT is not None:
|
|
|
|
send_signup_message(settings.NEW_USER_BOT, "signups", user_profile, internal)
|
2017-03-13 17:50:28 +01:00
|
|
|
statsd.gauge("users.signups.%s" % (user_profile.realm.string_id), 1, delta=True)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2016-11-11 05:19:21 +01:00
|
|
|
def add_new_user_history(user_profile, streams):
|
|
|
|
# type: (UserProfile, Iterable[Stream]) -> None
|
|
|
|
"""Give you the last 100 messages on your public streams, so you have
|
|
|
|
something to look at in your home view once you finish the
|
|
|
|
tutorial."""
|
2017-02-25 21:02:13 +01:00
|
|
|
one_week_ago = timezone.now() - datetime.timedelta(weeks=1)
|
2016-11-11 05:19:21 +01:00
|
|
|
recipients = Recipient.objects.filter(type=Recipient.STREAM,
|
|
|
|
type_id__in=[stream.id for stream in streams
|
|
|
|
if not stream.invite_only])
|
2016-11-11 05:33:30 +01:00
|
|
|
recent_messages = Message.objects.filter(recipient_id__in=recipients,
|
|
|
|
pub_date__gt=one_week_ago).order_by("-id")
|
2016-11-11 05:37:43 +01:00
|
|
|
message_ids_to_use = list(reversed(recent_messages.values_list('id', flat=True)[0:100]))
|
2016-11-11 05:33:30 +01:00
|
|
|
if len(message_ids_to_use) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Handle the race condition where a message arrives between
|
|
|
|
# bulk_add_subscriptions above and the Message query just above
|
|
|
|
already_ids = set(UserMessage.objects.filter(message_id__in=message_ids_to_use,
|
|
|
|
user_profile=user_profile).values_list("message_id", flat=True))
|
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message_id=message_id,
|
|
|
|
flags=UserMessage.flags.read)
|
|
|
|
for message_id in message_ids_to_use
|
|
|
|
if message_id not in already_ids]
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2016-11-11 05:33:30 +01:00
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
# Does the processing for a new user account:
|
|
|
|
# * Subscribes to default/invitation streams
|
|
|
|
# * Fills in some recent historical messages
|
|
|
|
# * Notifies other users in realm and Zulip about the signup
|
|
|
|
# * Deactivates PreregistrationUser objects
|
|
|
|
# * subscribe the user to newsletter if newsletter_data is specified
|
|
|
|
def process_new_human_user(user_profile, prereg_user=None, newsletter_data=None):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Optional[PreregistrationUser], Optional[Dict[str, str]]) -> None
|
2016-07-27 01:45:29 +02:00
|
|
|
mit_beta_user = user_profile.realm.is_zephyr_mirror_realm
|
2015-10-13 21:30:22 +02:00
|
|
|
try:
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
except AttributeError:
|
|
|
|
# This will catch both the case where prereg_user is None and where it
|
|
|
|
# is a MitUser.
|
|
|
|
streams = []
|
|
|
|
|
|
|
|
# If the user's invitation didn't explicitly list some streams, we
|
|
|
|
# add the default streams
|
|
|
|
if len(streams) == 0:
|
|
|
|
streams = get_default_subs(user_profile)
|
|
|
|
bulk_add_subscriptions(streams, [user_profile])
|
|
|
|
|
2016-11-11 05:19:21 +01:00
|
|
|
add_new_user_history(user_profile, streams)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
# mit_beta_users don't have a referred_by field
|
|
|
|
if not mit_beta_user and prereg_user is not None and prereg_user.referred_by is not None \
|
|
|
|
and settings.NOTIFICATION_BOT is not None:
|
|
|
|
# This is a cross-realm private message.
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
settings.NOTIFICATION_BOT,
|
|
|
|
"private",
|
|
|
|
prereg_user.referred_by.email,
|
2017-03-13 18:00:38 +01:00
|
|
|
user_profile.realm.string_id,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as
|
|
|
|
# inactive so we can keep track of the PreregistrationUser we
|
|
|
|
# actually used for analytics
|
|
|
|
if prereg_user is not None:
|
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.email).exclude(
|
|
|
|
id=prereg_user.id).update(status=0)
|
|
|
|
else:
|
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.email).update(status=0)
|
|
|
|
|
|
|
|
notify_new_user(user_profile)
|
|
|
|
|
|
|
|
if newsletter_data is not None:
|
|
|
|
# If the user was created automatically via the API, we may
|
|
|
|
# not want to register them for the newsletter
|
|
|
|
queue_json_publish(
|
|
|
|
"signups",
|
|
|
|
{
|
2016-12-28 22:24:56 +01:00
|
|
|
'email_address': user_profile.email,
|
|
|
|
'merge_fields': {
|
2015-10-13 21:30:22 +02:00
|
|
|
'NAME': user_profile.full_name,
|
2017-01-03 21:04:55 +01:00
|
|
|
'REALM_ID': user_profile.realm_id,
|
2015-10-13 21:30:22 +02:00
|
|
|
'OPTIN_IP': newsletter_data["IP"],
|
2017-02-25 21:02:13 +01:00
|
|
|
'OPTIN_TIME': datetime.datetime.isoformat(timezone.now().replace(microsecond=0)),
|
2015-10-13 21:30:22 +02:00
|
|
|
},
|
|
|
|
},
|
2016-12-02 08:15:16 +01:00
|
|
|
lambda event: None)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2013-07-22 21:26:44 +02:00
|
|
|
def notify_created_user(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> None
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_user", op="add",
|
|
|
|
person=dict(email=user_profile.email,
|
2016-10-26 03:35:32 +02:00
|
|
|
user_id=user_profile.id,
|
2016-02-08 03:59:38 +01:00
|
|
|
is_admin=user_profile.is_realm_admin,
|
2014-01-24 23:24:44 +01:00
|
|
|
full_name=user_profile.full_name,
|
2017-02-21 17:55:32 +01:00
|
|
|
avatar_url=avatar_url(user_profile),
|
2014-01-24 23:24:44 +01:00
|
|
|
is_bot=user_profile.is_bot))
|
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2013-07-22 21:26:44 +02:00
|
|
|
|
2014-02-26 00:12:14 +01:00
|
|
|
def notify_created_bot(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> None
|
2014-02-26 00:12:14 +01:00
|
|
|
|
|
|
|
def stream_name(stream):
|
2017-03-19 23:29:29 +01:00
|
|
|
# type: (Optional[Stream]) -> Optional[Text]
|
2014-02-26 00:12:14 +01:00
|
|
|
if not stream:
|
|
|
|
return None
|
|
|
|
return stream.name
|
|
|
|
|
|
|
|
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
|
|
|
|
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
|
|
|
|
|
2017-02-15 21:06:07 +01:00
|
|
|
bot = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
|
|
|
is_active=user_profile.is_active,
|
|
|
|
api_key=user_profile.api_key,
|
|
|
|
default_sending_stream=default_sending_stream_name,
|
|
|
|
default_events_register_stream=default_events_register_stream_name,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set the owner key only when the bot has an owner.
|
|
|
|
# The default bots don't have an owner. So don't
|
|
|
|
# set the owner key while reactivating them.
|
|
|
|
if user_profile.bot_owner is not None:
|
|
|
|
bot['owner'] = user_profile.bot_owner.email
|
|
|
|
|
|
|
|
event = dict(type="realm_bot", op="add", bot=bot)
|
2014-02-26 00:12:14 +01:00
|
|
|
send_event(event, bot_owner_userids(user_profile))
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
def do_create_user(email, password, realm, full_name, short_name,
|
2016-08-10 03:05:26 +02:00
|
|
|
active=True, bot_type=None, bot_owner=None, tos_version=None,
|
2014-02-11 18:43:30 +01:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
default_sending_stream=None, default_events_register_stream=None,
|
2015-10-13 21:33:54 +02:00
|
|
|
default_all_public_streams=None, prereg_user=None,
|
|
|
|
newsletter_data=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, Text, Realm, Text, Text, bool, Optional[int], Optional[UserProfile], Optional[Text], Text, Optional[Stream], Optional[Stream], bool, Optional[PreregistrationUser], Optional[Dict[str, str]]) -> UserProfile
|
2017-02-15 04:35:10 +01:00
|
|
|
user_profile = create_user(email=email, password=password, realm=realm,
|
|
|
|
full_name=full_name, short_name=short_name,
|
|
|
|
active=active, bot_type=bot_type, bot_owner=bot_owner,
|
|
|
|
tos_version=tos_version, avatar_source=avatar_source,
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams)
|
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
event_time = user_profile.date_joined
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2017-02-15 17:26:22 +01:00
|
|
|
event_type='user_created', event_time=event_time)
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2013-07-22 21:26:44 +02:00
|
|
|
notify_created_user(user_profile)
|
2016-05-18 20:23:03 +02:00
|
|
|
if bot_type:
|
2014-02-26 00:12:14 +01:00
|
|
|
notify_created_bot(user_profile)
|
2015-10-13 21:33:54 +02:00
|
|
|
else:
|
|
|
|
process_new_human_user(user_profile, prereg_user=prereg_user,
|
|
|
|
newsletter_data=newsletter_data)
|
2013-03-29 15:35:37 +01:00
|
|
|
return user_profile
|
|
|
|
|
2014-01-07 18:04:26 +01:00
|
|
|
def active_humans_in_realm(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> Sequence[UserProfile]
|
2014-01-07 18:04:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
|
|
|
|
|
2014-01-28 15:11:10 +01:00
|
|
|
def do_set_realm_name(realm, name):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text) -> None
|
2014-01-28 15:11:10 +01:00
|
|
|
realm.name = name
|
|
|
|
realm.save(update_fields=['name'])
|
2014-01-28 22:23:31 +01:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='name',
|
|
|
|
value=name,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
2014-01-28 15:11:10 +01:00
|
|
|
|
2017-03-18 20:19:44 +01:00
|
|
|
def do_set_realm_description(realm, description):
|
|
|
|
# type: (Realm, Text) -> None
|
|
|
|
realm.description = description
|
|
|
|
realm.save(update_fields=['description'])
|
|
|
|
event = dict(
|
|
|
|
type='realm',
|
|
|
|
op='update',
|
|
|
|
property='description',
|
|
|
|
value=description,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2015-08-20 02:38:32 +02:00
|
|
|
def do_set_realm_restricted_to_domain(realm, restricted):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm, bool) -> None
|
2015-08-20 02:38:32 +02:00
|
|
|
realm.restricted_to_domain = restricted
|
|
|
|
realm.save(update_fields=['restricted_to_domain'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
2015-08-20 23:31:01 +02:00
|
|
|
property='restricted_to_domain',
|
2015-08-20 02:38:32 +02:00
|
|
|
value=restricted,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
|
|
|
def do_set_realm_invite_required(realm, invite_required):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm, bool) -> None
|
2015-08-20 02:38:32 +02:00
|
|
|
realm.invite_required = invite_required
|
|
|
|
realm.save(update_fields=['invite_required'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='invite_required',
|
|
|
|
value=invite_required,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2015-08-20 21:25:30 +02:00
|
|
|
def do_set_realm_invite_by_admins_only(realm, invite_by_admins_only):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm, bool) -> None
|
2015-08-20 21:25:30 +02:00
|
|
|
realm.invite_by_admins_only = invite_by_admins_only
|
|
|
|
realm.save(update_fields=['invite_by_admins_only'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='invite_by_admins_only',
|
|
|
|
value=invite_by_admins_only,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2017-03-13 14:42:03 +01:00
|
|
|
def do_set_realm_inline_image_preview(realm, inline_image_preview):
|
|
|
|
# type: (Realm, bool) -> None
|
|
|
|
realm.inline_image_preview = inline_image_preview
|
|
|
|
realm.save(update_fields=['inline_image_preview'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='inline_image_preview',
|
|
|
|
value=inline_image_preview,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
|
|
|
def do_set_realm_inline_url_embed_preview(realm, inline_url_embed_preview):
|
|
|
|
# type: (Realm, bool) -> None
|
|
|
|
realm.inline_url_embed_preview = inline_url_embed_preview
|
|
|
|
realm.save(update_fields=['inline_url_embed_preview'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='inline_url_embed_preview',
|
|
|
|
value=inline_url_embed_preview,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
def do_set_realm_authentication_methods(realm, authentication_methods):
|
|
|
|
# type: (Realm, Dict[str, bool]) -> None
|
|
|
|
for key, value in list(authentication_methods.items()):
|
|
|
|
index = getattr(realm.authentication_methods, key).number
|
|
|
|
realm.authentication_methods.set_bit(index, int(value))
|
|
|
|
realm.save(update_fields=['authentication_methods'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property='default',
|
|
|
|
data=dict(authentication_methods=realm.authentication_methods_dict())
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-05-12 10:28:00 +02:00
|
|
|
def do_set_realm_create_stream_by_admins_only(realm, create_stream_by_admins_only):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm, bool) -> None
|
2016-05-12 10:28:00 +02:00
|
|
|
realm.create_stream_by_admins_only = create_stream_by_admins_only
|
|
|
|
realm.save(update_fields=['create_stream_by_admins_only'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='create_stream_by_admins_only',
|
|
|
|
value=create_stream_by_admins_only,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-12-20 15:41:30 +01:00
|
|
|
def do_set_realm_add_emoji_by_admins_only(realm, add_emoji_by_admins_only):
|
|
|
|
# type: (Realm, bool) -> None
|
|
|
|
realm.add_emoji_by_admins_only = add_emoji_by_admins_only
|
|
|
|
realm.save(update_fields=['add_emoji_by_admins_only'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='add_emoji_by_admins_only',
|
|
|
|
value=add_emoji_by_admins_only,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-07-08 02:25:55 +02:00
|
|
|
def do_set_realm_message_editing(realm, allow_message_editing, message_content_edit_limit_seconds):
|
|
|
|
# type: (Realm, bool, int) -> None
|
2016-06-21 21:34:41 +02:00
|
|
|
realm.allow_message_editing = allow_message_editing
|
2016-07-08 02:25:55 +02:00
|
|
|
realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds
|
|
|
|
realm.save(update_fields=['allow_message_editing', 'message_content_edit_limit_seconds'])
|
2016-06-21 21:34:41 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
2016-07-08 02:25:55 +02:00
|
|
|
data=dict(allow_message_editing=allow_message_editing,
|
|
|
|
message_content_edit_limit_seconds=message_content_edit_limit_seconds),
|
2016-06-21 21:34:41 +02:00
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-08-04 17:32:41 +02:00
|
|
|
def do_set_realm_default_language(realm, default_language):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text) -> None
|
2016-08-04 17:32:41 +02:00
|
|
|
|
|
|
|
realm.default_language = default_language
|
|
|
|
realm.save(update_fields=['default_language'])
|
|
|
|
event = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property="default_language",
|
|
|
|
value=default_language
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2016-08-04 17:32:41 +02:00
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2016-11-29 08:57:35 +01:00
|
|
|
def do_set_realm_waiting_period_threshold(realm, threshold):
|
|
|
|
# type: (Realm, int) -> None
|
|
|
|
realm.waiting_period_threshold = threshold
|
|
|
|
realm.save(update_fields=['waiting_period_threshold'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='waiting_period_threshold',
|
|
|
|
value=threshold,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2014-01-07 18:04:26 +01:00
|
|
|
def do_deactivate_realm(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> None
|
2014-01-07 18:04:26 +01:00
|
|
|
"""
|
|
|
|
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
|
|
|
|
tell the difference between users that were intentionally deactivated,
|
|
|
|
e.g. by a realm admin, and users who can't currently use Zulip because their
|
|
|
|
realm has been deactivated.
|
|
|
|
"""
|
|
|
|
if realm.deactivated:
|
|
|
|
return
|
|
|
|
|
|
|
|
realm.deactivated = True
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
|
|
|
for user in active_humans_in_realm(realm):
|
|
|
|
# Don't deactivate the users, but do delete their sessions so they get
|
|
|
|
# bumped to the login screen, where they'll get a realm deactivation
|
|
|
|
# notice when they try to log in.
|
|
|
|
delete_user_sessions(user)
|
2014-01-28 17:29:00 +01:00
|
|
|
|
2016-04-21 17:58:22 +02:00
|
|
|
def do_reactivate_realm(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> None
|
2016-04-21 17:58:22 +02:00
|
|
|
realm.deactivated = False
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
2017-03-09 00:17:23 +01:00
|
|
|
def do_deactivate_user(user_profile, _cascade=True):
|
|
|
|
# type: (UserProfile, bool) -> None
|
2013-06-28 23:37:15 +02:00
|
|
|
if not user_profile.is_active:
|
|
|
|
return
|
|
|
|
|
2016-11-09 13:44:29 +01:00
|
|
|
user_profile.is_active = False
|
2013-07-06 06:18:53 +02:00
|
|
|
user_profile.save(update_fields=["is_active"])
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2013-04-02 17:54:57 +02:00
|
|
|
delete_user_sessions(user_profile)
|
2013-03-05 19:09:05 +01:00
|
|
|
|
2017-02-15 04:35:10 +01:00
|
|
|
event_time = timezone.now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_deactivated', event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time, increment=-1)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_user", op="remove",
|
|
|
|
person=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-01-24 23:24:44 +01:00
|
|
|
full_name=user_profile.full_name))
|
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2014-02-26 22:27:51 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
event = dict(type="realm_bot", op="remove",
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 22:27:51 +01:00
|
|
|
full_name=user_profile.full_name))
|
|
|
|
send_event(event, bot_owner_userids(user_profile))
|
|
|
|
|
2013-06-28 23:37:15 +02:00
|
|
|
if _cascade:
|
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
|
|
|
for profile in bot_profiles:
|
2013-11-16 17:11:15 +01:00
|
|
|
do_deactivate_user(profile, _cascade=False)
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2013-11-22 01:12:53 +01:00
|
|
|
def do_deactivate_stream(stream, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Stream, bool) -> None
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
# Get the affected user ids *before* we deactivate everybody.
|
|
|
|
affected_user_ids = can_access_stream_user_ids(stream)
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
Subscription.objects.select_related('user_profile').filter(
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream.id,
|
|
|
|
active=True).update(active=False)
|
2014-01-24 19:08:39 +01:00
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
was_invite_only = stream.invite_only
|
2014-01-24 19:08:39 +01:00
|
|
|
stream.deactivated = True
|
|
|
|
stream.invite_only = True
|
|
|
|
# Preserve as much as possible the original stream name while giving it a
|
|
|
|
# special prefix that both indicates that the stream is deactivated and
|
|
|
|
# frees up the original name for reuse.
|
|
|
|
old_name = stream.name
|
|
|
|
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
|
|
|
|
for i in range(20):
|
|
|
|
existing_deactivated_stream = get_stream(new_name, stream.realm)
|
|
|
|
if existing_deactivated_stream:
|
|
|
|
# This stream has alrady been deactivated, keep prepending !s until
|
|
|
|
# we have a unique stream name or you've hit a rename limit.
|
|
|
|
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
|
|
|
|
else:
|
|
|
|
break
|
|
|
|
|
|
|
|
# If you don't have a unique name at this point, this will fail later in the
|
|
|
|
# code path.
|
|
|
|
|
|
|
|
stream.name = new_name[:Stream.MAX_NAME_LENGTH]
|
|
|
|
stream.save()
|
|
|
|
|
2017-02-18 18:01:00 +01:00
|
|
|
DefaultStream.objects.filter(realm=stream.realm, stream=stream).delete()
|
|
|
|
|
2016-03-31 03:39:51 +02:00
|
|
|
# Remove the old stream information from remote cache.
|
2014-01-24 19:08:39 +01:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm)
|
|
|
|
cache_delete(old_cache_key)
|
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
stream_dict = stream.to_dict()
|
|
|
|
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
|
|
|
|
event = dict(type="stream", op="delete",
|
|
|
|
streams=[stream_dict])
|
|
|
|
send_event(event, affected_user_ids)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2013-03-08 19:58:18 +01:00
|
|
|
def do_change_user_email(user_profile, new_email):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text) -> None
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.email = new_email
|
|
|
|
user_profile.save(update_fields=["email"])
|
|
|
|
|
2017-02-22 08:42:46 +01:00
|
|
|
payload = dict(user_id=user_profile.id,
|
|
|
|
new_email=new_email)
|
|
|
|
send_event(dict(type='realm_user', op='update', person=payload),
|
|
|
|
active_user_ids(user_profile.realm))
|
2017-03-14 06:06:56 +01:00
|
|
|
event_time = timezone.now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
|
|
|
modified_user=user_profile, event_type='user_email_changed',
|
|
|
|
event_time=event_time)
|
2013-02-10 22:45:25 +01:00
|
|
|
|
2017-01-20 12:27:38 +01:00
|
|
|
def do_start_email_change_process(user_profile, new_email):
|
|
|
|
# type: (UserProfile, Text) -> None
|
|
|
|
old_email = user_profile.email
|
|
|
|
user_profile.email = new_email
|
|
|
|
|
|
|
|
context = {'support_email': settings.ZULIP_ADMINISTRATOR,
|
|
|
|
'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS,
|
|
|
|
'realm': user_profile.realm,
|
|
|
|
'old_email': old_email,
|
|
|
|
'new_email': new_email,
|
|
|
|
}
|
|
|
|
|
|
|
|
with transaction.atomic():
|
|
|
|
obj = EmailChangeStatus.objects.create(new_email=new_email,
|
|
|
|
old_email=old_email,
|
|
|
|
user_profile=user_profile,
|
|
|
|
realm=user_profile.realm)
|
|
|
|
|
|
|
|
EmailChangeConfirmation.objects.send_confirmation(
|
|
|
|
obj, new_email,
|
|
|
|
additional_context=context,
|
|
|
|
host=user_profile.realm.host,
|
|
|
|
)
|
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
def compute_irc_user_fullname(email):
|
2016-06-09 08:37:53 +02:00
|
|
|
# type: (NonBinaryStr) -> NonBinaryStr
|
2013-10-17 17:19:44 +02:00
|
|
|
return email.split("@")[0] + " (IRC)"
|
|
|
|
|
2013-10-29 16:00:20 +01:00
|
|
|
def compute_jabber_user_fullname(email):
|
2016-06-09 08:37:53 +02:00
|
|
|
# type: (NonBinaryStr) -> NonBinaryStr
|
2013-10-29 16:00:20 +01:00
|
|
|
return email.split("@")[0] + " (XMPP)"
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
def compute_mit_user_fullname(email):
|
2016-06-09 08:37:53 +02:00
|
|
|
# type: (NonBinaryStr) -> NonBinaryStr
|
2013-01-10 22:01:33 +01:00
|
|
|
try:
|
2015-09-20 09:35:54 +02:00
|
|
|
# Input is either e.g. username@mit.edu or user|CROSSREALM.INVALID@mit.edu
|
2013-01-10 22:01:33 +01:00
|
|
|
match_user = re.match(r'^([a-zA-Z0-9_.-]+)(\|.+)?@mit\.edu$', email.lower())
|
|
|
|
if match_user and match_user.group(2) is None:
|
2013-08-12 00:21:54 +02:00
|
|
|
answer = DNS.dnslookup(
|
|
|
|
"%s.passwd.ns.athena.mit.edu" % (match_user.group(1),),
|
|
|
|
DNS.Type.TXT)
|
2016-11-03 21:19:36 +01:00
|
|
|
hesiod_name = force_str(answer[0][0]).split(':')[4].split(',')[0].strip()
|
2013-08-12 00:21:54 +02:00
|
|
|
if hesiod_name != "":
|
2013-01-10 22:01:33 +01:00
|
|
|
return hesiod_name
|
|
|
|
elif match_user:
|
|
|
|
return match_user.group(1).lower() + "@" + match_user.group(2).upper()[1:]
|
2013-08-13 19:21:24 +02:00
|
|
|
except DNS.Base.ServerError:
|
|
|
|
pass
|
2017-03-05 10:25:27 +01:00
|
|
|
except Exception:
|
2016-11-30 03:11:29 +01:00
|
|
|
print("Error getting fullname for %s:" % (email,))
|
2013-01-10 22:01:33 +01:00
|
|
|
traceback.print_exc()
|
|
|
|
return email.lower()
|
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
@cache_with_key(lambda realm, email, f: user_profile_by_email_cache_key(email),
|
2013-03-26 19:09:45 +01:00
|
|
|
timeout=3600*24*7)
|
2013-10-17 17:19:44 +02:00
|
|
|
def create_mirror_user_if_needed(realm, email, email_to_fullname):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text, Callable[[Text], Text]) -> UserProfile
|
2013-01-10 22:01:33 +01:00
|
|
|
try:
|
2013-03-28 20:20:31 +01:00
|
|
|
return get_user_profile_by_email(email)
|
2013-01-10 22:01:33 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
try:
|
|
|
|
# Forge a user for this person
|
2016-04-28 23:26:11 +02:00
|
|
|
return create_user(email, None, realm,
|
2013-10-17 17:19:44 +02:00
|
|
|
email_to_fullname(email), email_to_username(email),
|
2014-01-07 18:57:54 +01:00
|
|
|
active=False, is_mirror_dummy=True)
|
2013-01-10 22:01:33 +01:00
|
|
|
except IntegrityError:
|
2013-03-28 20:20:31 +01:00
|
|
|
return get_user_profile_by_email(email)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
|
|
|
def log_message(message):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Message) -> None
|
2013-01-10 22:01:33 +01:00
|
|
|
if not message.sending_client.name.startswith("test:"):
|
|
|
|
log_event(message.to_log_dict())
|
|
|
|
|
2017-01-22 05:55:30 +01:00
|
|
|
def render_incoming_message(message, content, message_users, realm):
|
|
|
|
# type: (Message, Text, Set[UserProfile], Realm) -> Text
|
|
|
|
realm_alert_words = alert_words_in_realm(realm)
|
2016-09-14 21:58:44 +02:00
|
|
|
try:
|
2016-10-04 18:32:46 +02:00
|
|
|
rendered_content = render_markdown(
|
|
|
|
message=message,
|
2016-09-14 18:02:24 +02:00
|
|
|
content=content,
|
2017-01-18 23:19:18 +01:00
|
|
|
realm=realm,
|
2016-09-14 18:02:24 +02:00
|
|
|
realm_alert_words=realm_alert_words,
|
2016-09-15 00:24:44 +02:00
|
|
|
message_users=message_users,
|
2016-09-14 18:02:24 +02:00
|
|
|
)
|
2016-09-14 21:58:44 +02:00
|
|
|
except BugdownRenderingException:
|
|
|
|
raise JsonableError(_('Unable to render message'))
|
|
|
|
return rendered_content
|
|
|
|
|
2016-10-24 09:56:35 +02:00
|
|
|
def get_recipient_user_profiles(recipient, sender_id):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Recipient, Text) -> List[UserProfile]
|
2016-10-24 09:56:35 +02:00
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
recipients = list(set([get_user_profile_by_id(recipient.type_id),
|
|
|
|
get_user_profile_by_id(sender_id)]))
|
|
|
|
# For personals, you send out either 1 or 2 copies, for
|
|
|
|
# personals to yourself or to someone else, respectively.
|
|
|
|
assert((len(recipients) == 1) or (len(recipients) == 2))
|
|
|
|
elif (recipient.type == Recipient.STREAM or recipient.type == Recipient.HUDDLE):
|
|
|
|
# We use select_related()/only() here, while the PERSONAL case above uses
|
|
|
|
# get_user_profile_by_id() to get UserProfile objects from cache. Streams will
|
|
|
|
# typically have more recipients than PMs, so get_user_profile_by_id() would be
|
|
|
|
# a bit more expensive here, given that we need to hit the DB anyway and only
|
|
|
|
# care about the email from the user profile.
|
|
|
|
fields = [
|
|
|
|
'user_profile__id',
|
|
|
|
'user_profile__email',
|
|
|
|
'user_profile__enable_online_push_notifications',
|
|
|
|
'user_profile__is_active',
|
|
|
|
'user_profile__realm__domain'
|
|
|
|
]
|
|
|
|
query = Subscription.objects.select_related("user_profile", "user_profile__realm").only(*fields).filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
recipient=recipient, active=True)
|
2016-10-24 09:56:35 +02:00
|
|
|
recipients = [s.user_profile for s in query]
|
|
|
|
else:
|
|
|
|
raise ValueError('Bad recipient type')
|
|
|
|
return recipients
|
|
|
|
|
2017-03-19 23:29:29 +01:00
|
|
|
def do_send_messages(messages_maybe_none):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Sequence[Optional[MutableMapping[str, Any]]]) -> List[int]
|
2013-05-10 22:56:22 +02:00
|
|
|
# Filter out messages which didn't pass internal_prep_message properly
|
2017-03-19 23:29:29 +01:00
|
|
|
messages = [message for message in messages_maybe_none if message is not None]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
|
|
|
# Filter out zephyr mirror anomalies where the message was already sent
|
2016-06-27 13:35:55 +02:00
|
|
|
already_sent_ids = [] # type: List[int]
|
|
|
|
new_messages = [] # type: List[MutableMapping[str, Any]]
|
2013-08-12 20:05:57 +02:00
|
|
|
for message in messages:
|
|
|
|
if isinstance(message['message'], int):
|
|
|
|
already_sent_ids.append(message['message'])
|
|
|
|
else:
|
|
|
|
new_messages.append(message)
|
|
|
|
messages = new_messages
|
2013-05-10 22:56:22 +02:00
|
|
|
|
|
|
|
# For consistency, changes to the default values for these gets should also be applied
|
|
|
|
# to the default args in do_send_message
|
|
|
|
for message in messages:
|
|
|
|
message['rendered_content'] = message.get('rendered_content', None)
|
|
|
|
message['no_log'] = message.get('no_log', False)
|
|
|
|
message['stream'] = message.get('stream', None)
|
2014-01-07 23:14:13 +01:00
|
|
|
message['local_id'] = message.get('local_id', None)
|
|
|
|
message['sender_queue_id'] = message.get('sender_queue_id', None)
|
2017-01-22 05:40:00 +01:00
|
|
|
message['realm'] = message.get('realm', message['message'].sender.realm)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Log the message to our message log for populate_db to refill
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
|
|
|
if not message['no_log']:
|
|
|
|
log_message(message['message'])
|
|
|
|
|
|
|
|
for message in messages:
|
2016-10-24 09:56:35 +02:00
|
|
|
message['recipients'] = get_recipient_user_profiles(message['message'].recipient,
|
|
|
|
message['message'].sender_id)
|
2014-01-07 20:52:11 +01:00
|
|
|
# Only deliver the message to active user recipients
|
2016-07-08 18:57:01 +02:00
|
|
|
message['active_recipients'] = [user_profile for user_profile in message['recipients']
|
|
|
|
if user_profile.is_active]
|
2016-09-14 21:58:44 +02:00
|
|
|
|
2016-12-14 20:04:21 +01:00
|
|
|
links_for_embed = set() # type: Set[Text]
|
2016-09-14 21:58:44 +02:00
|
|
|
# Render our messages.
|
|
|
|
for message in messages:
|
|
|
|
assert message['message'].rendered_content is None
|
|
|
|
rendered_content = render_incoming_message(
|
|
|
|
message['message'],
|
2016-09-15 00:24:44 +02:00
|
|
|
message['message'].content,
|
2017-01-22 05:55:30 +01:00
|
|
|
message['active_recipients'],
|
|
|
|
message['realm'])
|
2016-10-04 16:49:16 +02:00
|
|
|
message['message'].rendered_content = rendered_content
|
|
|
|
message['message'].rendered_content_version = bugdown_version
|
2016-10-27 12:06:44 +02:00
|
|
|
links_for_embed |= message['message'].links_for_preview
|
2016-09-14 21:58:44 +02:00
|
|
|
|
|
|
|
for message in messages:
|
2014-02-21 21:18:38 +01:00
|
|
|
message['message'].update_calculated_fields()
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Save the message receipts in the database
|
2016-01-25 23:42:16 +01:00
|
|
|
user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]]
|
2015-08-19 21:04:49 +02:00
|
|
|
with transaction.atomic():
|
2013-05-10 22:56:22 +02:00
|
|
|
Message.objects.bulk_create([message['message'] for message in messages])
|
2016-06-27 13:35:55 +02:00
|
|
|
ums = [] # type: List[UserMessage]
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message=message['message'])
|
2014-01-07 20:52:11 +01:00
|
|
|
for user_profile in message['active_recipients']]
|
2013-06-28 16:02:58 +02:00
|
|
|
|
|
|
|
# These properties on the Message are set via
|
2016-10-04 18:32:46 +02:00
|
|
|
# render_markdown by code in the bugdown inline patterns
|
2013-06-28 16:02:58 +02:00
|
|
|
wildcard = message['message'].mentions_wildcard
|
|
|
|
mentioned_ids = message['message'].mentions_user_ids
|
2013-09-03 22:41:17 +02:00
|
|
|
ids_with_alert_words = message['message'].user_ids_with_alert_words
|
2014-01-27 16:56:01 +01:00
|
|
|
is_me_message = message['message'].is_me_message
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
for um in ums_to_create:
|
2013-12-31 22:42:38 +01:00
|
|
|
if um.user_profile.id == message['message'].sender.id and \
|
|
|
|
message['message'].sent_by_human():
|
2013-05-10 22:56:22 +02:00
|
|
|
um.flags |= UserMessage.flags.read
|
2013-05-31 16:15:27 +02:00
|
|
|
if wildcard:
|
|
|
|
um.flags |= UserMessage.flags.wildcard_mentioned
|
2013-06-25 19:29:37 +02:00
|
|
|
if um.user_profile_id in mentioned_ids:
|
2013-05-24 16:56:00 +02:00
|
|
|
um.flags |= UserMessage.flags.mentioned
|
2013-09-03 22:41:17 +02:00
|
|
|
if um.user_profile_id in ids_with_alert_words:
|
|
|
|
um.flags |= UserMessage.flags.has_alert_word
|
2014-01-27 16:56:01 +01:00
|
|
|
if is_me_message:
|
|
|
|
um.flags |= UserMessage.flags.is_me_message
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2013-06-25 19:29:37 +02:00
|
|
|
user_message_flags[message['message'].id][um.user_profile_id] = um.flags_list()
|
2013-05-10 22:56:22 +02:00
|
|
|
ums.extend(ums_to_create)
|
|
|
|
UserMessage.objects.bulk_create(ums)
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# Claim attachments in message
|
|
|
|
for message in messages:
|
|
|
|
if Message.content_has_attachment(message['message'].content):
|
2016-07-24 21:52:41 +02:00
|
|
|
do_claim_attachments(message['message'])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
2013-07-02 00:14:58 +02:00
|
|
|
# Render Markdown etc. here and store (automatically) in
|
2016-03-31 03:39:51 +02:00
|
|
|
# remote cache, so that the single-threaded Tornado server
|
2013-07-02 00:14:58 +02:00
|
|
|
# doesn't have to.
|
|
|
|
user_flags = user_message_flags.get(message['message'].id, {})
|
2013-09-15 19:10:16 +02:00
|
|
|
sender = message['message'].sender
|
|
|
|
user_presences = get_status_dict(sender)
|
|
|
|
presences = {}
|
2014-01-07 20:52:11 +01:00
|
|
|
for user_profile in message['active_recipients']:
|
|
|
|
if user_profile.email in user_presences:
|
2014-01-09 21:54:43 +01:00
|
|
|
presences[user_profile.id] = user_presences[user_profile.email]
|
2013-09-15 19:10:16 +02:00
|
|
|
|
2014-01-27 19:48:32 +01:00
|
|
|
event = dict(
|
|
|
|
type = 'message',
|
2013-09-13 23:33:11 +02:00
|
|
|
message = message['message'].id,
|
2016-10-04 15:52:26 +02:00
|
|
|
message_dict_markdown = message_to_dict(message['message'], apply_markdown=True),
|
|
|
|
message_dict_no_markdown = message_to_dict(message['message'], apply_markdown=False),
|
2014-01-27 19:48:32 +01:00
|
|
|
presences = presences)
|
|
|
|
users = [{'id': user.id,
|
|
|
|
'flags': user_flags.get(user.id, []),
|
2016-09-19 22:55:18 +02:00
|
|
|
'always_push_notify': user.enable_online_push_notifications}
|
2014-01-27 19:48:32 +01:00
|
|
|
for user in message['active_recipients']]
|
2013-07-02 00:14:58 +02:00
|
|
|
if message['message'].recipient.type == Recipient.STREAM:
|
|
|
|
# Note: This is where authorization for single-stream
|
|
|
|
# get_updates happens! We only attach stream data to the
|
|
|
|
# notify new_message request if it's a public stream,
|
|
|
|
# ensuring that in the tornado server, non-public stream
|
|
|
|
# messages are only associated to their subscribed users.
|
|
|
|
if message['stream'] is None:
|
|
|
|
message['stream'] = Stream.objects.select_related("realm").get(id=message['message'].recipient.type_id)
|
|
|
|
if message['stream'].is_public():
|
2017-01-03 21:04:55 +01:00
|
|
|
event['realm_id'] = message['stream'].realm_id
|
2014-01-27 19:48:32 +01:00
|
|
|
event['stream_name'] = message['stream'].name
|
2013-08-28 00:01:50 +02:00
|
|
|
if message['stream'].invite_only:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['invite_only'] = True
|
2014-01-07 23:14:13 +01:00
|
|
|
if message['local_id'] is not None:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['local_id'] = message['local_id']
|
2014-01-07 23:14:13 +01:00
|
|
|
if message['sender_queue_id'] is not None:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['sender_queue_id'] = message['sender_queue_id']
|
|
|
|
send_event(event, users)
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2017-03-13 14:42:03 +01:00
|
|
|
if url_embed_preview_enabled_for_realm(message['message']) and links_for_embed:
|
2016-10-27 12:06:44 +02:00
|
|
|
event_data = {
|
|
|
|
'message_id': message['message'].id,
|
|
|
|
'message_content': message['message'].content,
|
2017-01-22 05:55:30 +01:00
|
|
|
'message_realm_id': message['realm'].id,
|
2016-10-27 12:06:44 +02:00
|
|
|
'urls': links_for_embed}
|
|
|
|
queue_json_publish('embed_links', event_data, lambda x: None)
|
|
|
|
|
2013-11-13 16:59:03 +01:00
|
|
|
if (settings.ENABLE_FEEDBACK and
|
|
|
|
message['message'].recipient.type == Recipient.PERSONAL and
|
2016-12-03 18:19:09 +01:00
|
|
|
settings.FEEDBACK_BOT in [up.email for up in message['recipients']]):
|
2013-10-17 22:55:09 +02:00
|
|
|
queue_json_publish(
|
2017-01-24 07:06:13 +01:00
|
|
|
'feedback_messages',
|
|
|
|
message_to_dict(message['message'], apply_markdown=False),
|
|
|
|
lambda x: None
|
2013-10-17 22:55:09 +02:00
|
|
|
)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-08-13 17:17:04 +02:00
|
|
|
# Note that this does not preserve the order of message ids
|
|
|
|
# returned. In practice, this shouldn't matter, as we only
|
|
|
|
# mirror single zephyr messages at a time and don't otherwise
|
|
|
|
# intermingle sending zephyr messages with other messages.
|
2013-08-12 20:05:57 +02:00
|
|
|
return already_sent_ids + [message['message'].id for message in messages]
|
2013-08-08 19:37:40 +02:00
|
|
|
|
2017-03-23 03:57:38 +01:00
|
|
|
def notify_reaction_update(user_profile, message, emoji_name, op):
|
|
|
|
# type: (UserProfile, Message, Text, Text) -> None
|
2016-11-03 18:49:00 +01:00
|
|
|
user_dict = {'user_id': user_profile.id,
|
|
|
|
'email': user_profile.email,
|
|
|
|
'full_name': user_profile.full_name}
|
|
|
|
|
|
|
|
event = {'type': 'reaction',
|
2017-03-23 03:57:38 +01:00
|
|
|
'op': op,
|
2016-11-03 18:49:00 +01:00
|
|
|
'user': user_dict,
|
|
|
|
'message_id': message.id,
|
|
|
|
'emoji_name': emoji_name} # type: Dict[str, Any]
|
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
# Update the cached message since new reaction is added.
|
|
|
|
update_to_dict_cache([message])
|
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
# Recipients for message update events, including reactions, are
|
|
|
|
# everyone who got the original message. This means reactions
|
|
|
|
# won't live-update in preview narrows, but it's the right
|
2016-11-30 08:14:46 +01:00
|
|
|
# performance tradeoff, since otherwise we'd need to send all
|
|
|
|
# reactions to public stream messages to every browser for every
|
|
|
|
# client in the organization, which doesn't scale.
|
2017-03-23 04:15:32 +01:00
|
|
|
#
|
|
|
|
# However, to ensure that reactions do live-update for any user
|
|
|
|
# who has actually participated in reacting to a message, we add a
|
|
|
|
# "historical" UserMessage row for any user who reacts to message,
|
|
|
|
# subscribing them to future notifications.
|
2016-11-30 08:14:46 +01:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2017-01-03 19:51:26 +01:00
|
|
|
send_event(event, [um.user_profile_id for um in ums])
|
2016-11-30 08:14:46 +01:00
|
|
|
|
2017-03-23 03:57:38 +01:00
|
|
|
def do_add_reaction(user_profile, message, emoji_name):
|
|
|
|
# type: (UserProfile, Message, Text) -> None
|
|
|
|
reaction = Reaction(user_profile=user_profile, message=message, emoji_name=emoji_name)
|
|
|
|
reaction.save()
|
|
|
|
notify_reaction_update(user_profile, message, emoji_name, "add")
|
|
|
|
|
2016-11-30 08:14:46 +01:00
|
|
|
def do_remove_reaction(user_profile, message, emoji_name):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Message, Text) -> None
|
2016-11-30 08:14:46 +01:00
|
|
|
Reaction.objects.filter(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
emoji_name=emoji_name).delete()
|
2017-03-23 03:57:38 +01:00
|
|
|
notify_reaction_update(user_profile, message, emoji_name, "remove")
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2016-10-12 20:57:59 +02:00
|
|
|
def do_send_typing_notification(notification):
|
|
|
|
# type: (Dict[str, Any]) -> None
|
2016-10-24 09:56:35 +02:00
|
|
|
recipient_user_profiles = get_recipient_user_profiles(notification['recipient'],
|
|
|
|
notification['sender'].id)
|
2016-10-28 18:50:21 +02:00
|
|
|
# Only deliver the notification to active user recipients
|
|
|
|
user_ids_to_notify = [profile.id for profile in recipient_user_profiles if profile.is_active]
|
|
|
|
sender_dict = {'user_id': notification['sender'].id, 'email': notification['sender'].email}
|
|
|
|
# Include a list of recipients in the event body to help identify where the typing is happening
|
|
|
|
recipient_dicts = [{'user_id': profile.id, 'email': profile.email} for profile in recipient_user_profiles]
|
2016-10-12 20:57:59 +02:00
|
|
|
event = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
type = 'typing',
|
|
|
|
op = notification['op'],
|
|
|
|
sender = sender_dict,
|
|
|
|
recipients = recipient_dicts)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2016-10-28 18:50:21 +02:00
|
|
|
send_event(event, user_ids_to_notify)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
|
|
|
# check_send_typing_notification:
|
|
|
|
# Checks the typing notification and sends it
|
|
|
|
def check_send_typing_notification(sender, notification_to, operator):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Sequence[Text], Text) -> None
|
2016-10-12 20:57:59 +02:00
|
|
|
typing_notification = check_typing_notification(sender, notification_to, operator)
|
|
|
|
do_send_typing_notification(typing_notification)
|
|
|
|
|
|
|
|
# check_typing_notification:
|
|
|
|
# Returns typing notification ready for sending with do_send_typing_notification on success
|
|
|
|
# or the error message (string) on error.
|
|
|
|
def check_typing_notification(sender, notification_to, operator):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Sequence[Text], Text) -> Dict[str, Any]
|
2016-10-12 20:57:59 +02:00
|
|
|
if len(notification_to) == 0:
|
|
|
|
raise JsonableError(_('Missing parameter: \'to\' (recipient)'))
|
|
|
|
elif operator not in ('start', 'stop'):
|
|
|
|
raise JsonableError(_('Invalid \'op\' value (should be start or stop)'))
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
recipient = recipient_for_emails(notification_to, False,
|
|
|
|
sender, sender)
|
|
|
|
except ValidationError as e:
|
|
|
|
assert isinstance(e.messages[0], six.string_types)
|
|
|
|
raise JsonableError(e.messages[0])
|
2016-10-24 09:56:35 +02:00
|
|
|
if recipient.type == Recipient.STREAM:
|
|
|
|
raise ValueError('Forbidden recipient type')
|
|
|
|
return {'sender': sender, 'recipient': recipient, 'op': operator}
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2016-11-20 20:33:41 +01:00
|
|
|
def create_stream_if_needed(realm, stream_name, invite_only=False, stream_description = ""):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text, bool, Text) -> Tuple[Stream, bool]
|
2013-01-10 22:01:33 +01:00
|
|
|
(stream, created) = Stream.objects.get_or_create(
|
|
|
|
realm=realm, name__iexact=stream_name,
|
2016-11-20 20:33:41 +01:00
|
|
|
defaults={'name': stream_name,
|
|
|
|
'description': stream_description,
|
|
|
|
'invite_only': invite_only})
|
2013-01-10 22:01:33 +01:00
|
|
|
if created:
|
|
|
|
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
2014-03-02 06:46:54 +01:00
|
|
|
if not invite_only:
|
|
|
|
event = dict(type="stream", op="create",
|
|
|
|
streams=[stream.to_dict()])
|
|
|
|
send_event(event, active_user_ids(realm))
|
2013-01-23 20:39:02 +01:00
|
|
|
return stream, created
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2016-11-21 00:16:52 +01:00
|
|
|
def create_streams_if_needed(realm, stream_dicts):
|
|
|
|
# type: (Realm, List[Mapping[str, Any]]) -> Tuple[List[Stream], List[Stream]]
|
2016-11-20 21:55:50 +01:00
|
|
|
"""Note that stream_dict["name"] is assumed to already be stripped of
|
|
|
|
whitespace"""
|
2016-09-15 16:22:09 +02:00
|
|
|
added_streams = [] # type: List[Stream]
|
|
|
|
existing_streams = [] # type: List[Stream]
|
2016-11-20 21:55:50 +01:00
|
|
|
for stream_dict in stream_dicts:
|
2016-09-15 16:22:09 +02:00
|
|
|
stream, created = create_stream_if_needed(realm,
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dict["name"],
|
2016-11-20 20:33:41 +01:00
|
|
|
invite_only=stream_dict.get("invite_only", False),
|
|
|
|
stream_description=stream_dict.get("description", ""))
|
|
|
|
|
2016-09-15 16:22:09 +02:00
|
|
|
if created:
|
|
|
|
added_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
return added_streams, existing_streams
|
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
def recipient_for_emails(emails, not_forged_mirror_message,
|
2017-03-20 05:20:24 +01:00
|
|
|
forwarder_user_profile, sender):
|
2017-03-19 23:29:29 +01:00
|
|
|
# type: (Iterable[Text], bool, Optional[UserProfile], UserProfile) -> Recipient
|
2013-03-18 18:57:34 +01:00
|
|
|
recipient_profile_ids = set()
|
2016-11-02 00:51:11 +01:00
|
|
|
|
|
|
|
# We exempt cross-realm bots from the check that all the recipients
|
2017-03-13 18:29:31 +01:00
|
|
|
# are in the same realm.
|
|
|
|
realms = set()
|
2016-11-02 21:57:59 +01:00
|
|
|
exempt_emails = get_cross_realm_emails()
|
2016-11-02 00:51:11 +01:00
|
|
|
if sender.email not in exempt_emails:
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(sender.realm_id)
|
2015-01-31 07:55:18 +01:00
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
for email in emails:
|
|
|
|
try:
|
2014-01-14 09:25:04 +01:00
|
|
|
user_profile = get_user_profile_by_email(email)
|
2013-03-18 18:57:34 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise ValidationError(_("Invalid email '%s'") % (email,))
|
2014-03-05 20:14:50 +01:00
|
|
|
if (not user_profile.is_active and not user_profile.is_mirror_dummy) or \
|
|
|
|
user_profile.realm.deactivated:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise ValidationError(_("'%s' is no longer using Zulip.") % (email,))
|
2014-01-14 09:25:04 +01:00
|
|
|
recipient_profile_ids.add(user_profile.id)
|
2016-11-02 00:51:11 +01:00
|
|
|
if email not in exempt_emails:
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(user_profile.realm_id)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-03-20 05:20:24 +01:00
|
|
|
if not_forged_mirror_message:
|
|
|
|
assert forwarder_user_profile is not None
|
|
|
|
if forwarder_user_profile.id not in recipient_profile_ids:
|
|
|
|
raise ValidationError(_("User not authorized for this query"))
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-03-13 18:29:31 +01:00
|
|
|
if len(realms) > 1:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise ValidationError(_("You can't send private messages outside of your organization."))
|
2014-01-14 09:25:04 +01:00
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
# If the private message is just between the sender and
|
|
|
|
# another person, force it to be a personal internally
|
2017-01-24 05:50:04 +01:00
|
|
|
if (len(recipient_profile_ids) == 2 and
|
|
|
|
sender.id in recipient_profile_ids):
|
2013-03-18 18:57:34 +01:00
|
|
|
recipient_profile_ids.remove(sender.id)
|
|
|
|
|
|
|
|
if len(recipient_profile_ids) > 1:
|
|
|
|
# Make sure the sender is included in huddle messages
|
|
|
|
recipient_profile_ids.add(sender.id)
|
|
|
|
huddle = get_huddle(list(recipient_profile_ids))
|
|
|
|
return get_recipient(Recipient.HUDDLE, huddle.id)
|
|
|
|
else:
|
|
|
|
return get_recipient(Recipient.PERSONAL, list(recipient_profile_ids)[0])
|
|
|
|
|
2013-08-12 20:05:57 +02:00
|
|
|
def already_sent_mirrored_message_id(message):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Message) -> Optional[int]
|
2013-03-18 18:57:34 +01:00
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
|
|
|
# For huddle messages, we use a 10-second window because the
|
|
|
|
# timestamps aren't guaranteed to actually match between two
|
|
|
|
# copies of the same message.
|
|
|
|
time_window = datetime.timedelta(seconds=10)
|
|
|
|
else:
|
|
|
|
time_window = datetime.timedelta(seconds=0)
|
|
|
|
|
2016-11-30 22:49:02 +01:00
|
|
|
messages = Message.objects.filter(
|
2013-03-18 18:57:34 +01:00
|
|
|
sender=message.sender,
|
|
|
|
recipient=message.recipient,
|
|
|
|
content=message.content,
|
|
|
|
subject=message.subject,
|
|
|
|
sending_client=message.sending_client,
|
2013-08-12 20:14:54 +02:00
|
|
|
pub_date__gte=message.pub_date - time_window,
|
|
|
|
pub_date__lte=message.pub_date + time_window)
|
2013-08-12 20:05:57 +02:00
|
|
|
|
|
|
|
if messages.exists():
|
|
|
|
return messages[0].id
|
|
|
|
return None
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2014-02-14 19:39:11 +01:00
|
|
|
def extract_recipients(s):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Union[str, Iterable[Text]]) -> List[Text]
|
2014-02-14 19:29:42 +01:00
|
|
|
# We try to accept multiple incoming formats for recipients.
|
|
|
|
# See test_extract_recipients() for examples of what we allow.
|
2013-03-18 19:10:21 +01:00
|
|
|
try:
|
2016-09-20 06:48:06 +02:00
|
|
|
data = ujson.loads(s) # type: ignore # This function has a super weird union argument.
|
2013-06-18 23:55:55 +02:00
|
|
|
except ValueError:
|
2014-02-14 19:39:11 +01:00
|
|
|
data = s
|
|
|
|
|
2015-11-01 17:14:25 +01:00
|
|
|
if isinstance(data, six.string_types):
|
2016-05-17 06:31:12 +02:00
|
|
|
data = data.split(',') # type: ignore # https://github.com/python/typeshed/pull/138
|
2014-02-14 19:39:11 +01:00
|
|
|
|
|
|
|
if not isinstance(data, list):
|
|
|
|
raise ValueError("Invalid data type for recipients")
|
|
|
|
|
|
|
|
recipients = data
|
2013-03-18 19:10:21 +01:00
|
|
|
|
|
|
|
# Strip recipients, and then remove any duplicates and any that
|
|
|
|
# are the empty string after being stripped.
|
|
|
|
recipients = [recipient.strip() for recipient in recipients]
|
|
|
|
return list(set(recipient for recipient in recipients if recipient))
|
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
# check_send_message:
|
2013-08-12 22:00:06 +02:00
|
|
|
# Returns the id of the sent message. Has same argspec as check_message.
|
2016-06-05 21:18:15 +02:00
|
|
|
def check_send_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, realm=None, forged=False,
|
|
|
|
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
|
|
|
|
sender_queue_id=None):
|
2017-02-11 05:45:39 +01:00
|
|
|
# type: (UserProfile, Client, Text, Sequence[Text], Optional[Text], Text, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[Text], Optional[Text]) -> int
|
2016-06-05 21:18:15 +02:00
|
|
|
message = check_message(sender, client, message_type_name, message_to,
|
|
|
|
subject_name, message_content, realm, forged, forged_timestamp,
|
|
|
|
forwarder_user_profile, local_id, sender_queue_id)
|
2013-08-08 19:37:40 +02:00
|
|
|
return do_send_messages([message])[0]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2013-08-22 23:40:27 +02:00
|
|
|
def check_stream_name(stream_name):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> None
|
2017-01-30 07:01:19 +01:00
|
|
|
if stream_name.strip() == "":
|
|
|
|
raise JsonableError(_("Invalid stream name '%s'" % (stream_name)))
|
2013-08-22 23:40:27 +02:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2017-01-30 07:01:19 +01:00
|
|
|
raise JsonableError(_("Stream name too long (limit: %s characters)" % (Stream.MAX_NAME_LENGTH)))
|
2013-08-22 23:40:27 +02:00
|
|
|
|
2016-11-15 05:34:20 +01:00
|
|
|
def send_pm_if_empty_stream(sender, stream, stream_name, realm):
|
2017-03-19 23:29:29 +01:00
|
|
|
# type: (UserProfile, Optional[Stream], Text, Realm) -> None
|
2016-11-15 05:34:20 +01:00
|
|
|
"""If a bot sends a message to a stream that doesn't exist or has no
|
|
|
|
subscribers, sends a notification to the bot owner (if not a
|
|
|
|
cross-realm bot) so that the owner can correct the issue."""
|
2016-07-27 01:45:29 +02:00
|
|
|
if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated:
|
2013-09-19 22:42:30 +02:00
|
|
|
return
|
|
|
|
|
2016-11-15 05:40:00 +01:00
|
|
|
if not sender.is_bot or sender.bot_owner is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Don't send these notifications for cross-realm bot messages
|
|
|
|
# (e.g. from EMAIL_GATEWAY_BOT) since the owner for
|
|
|
|
# EMAIL_GATEWAY_BOT is probably the server administrator, not
|
|
|
|
# the owner of the bot who could potentially fix the problem.
|
|
|
|
if sender.realm != realm:
|
|
|
|
return
|
|
|
|
|
|
|
|
if stream is not None:
|
|
|
|
num_subscribers = stream.num_subscribers()
|
|
|
|
if num_subscribers > 0:
|
2016-01-19 20:28:49 +01:00
|
|
|
return
|
|
|
|
|
2016-11-15 05:40:00 +01:00
|
|
|
# We warn the user once every 5 minutes to avoid a flood of
|
|
|
|
# PMs on a misconfigured integration, re-using the
|
|
|
|
# UserProfile.last_reminder field, which is not used for bots.
|
|
|
|
last_reminder = sender.last_reminder
|
|
|
|
waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD)
|
|
|
|
if last_reminder and timezone.now() - last_reminder <= waitperiod:
|
|
|
|
return
|
|
|
|
|
|
|
|
if stream is None:
|
|
|
|
error_msg = "that stream does not yet exist. To create it, "
|
|
|
|
else:
|
|
|
|
# num_subscribers == 0
|
|
|
|
error_msg = "there are no subscribers to that stream. To join it, "
|
|
|
|
|
|
|
|
content = ("Hi there! We thought you'd like to know that your bot **%s** just "
|
|
|
|
"tried to send a message to stream `%s`, but %s"
|
|
|
|
"click the gear in the left-side stream list." %
|
|
|
|
(sender.full_name, stream_name, error_msg))
|
2017-01-22 05:11:53 +01:00
|
|
|
message = internal_prep_message(realm, settings.NOTIFICATION_BOT, "private",
|
2016-11-15 05:40:00 +01:00
|
|
|
sender.bot_owner.email, "", content)
|
|
|
|
do_send_messages([message])
|
|
|
|
|
|
|
|
sender.last_reminder = timezone.now()
|
|
|
|
sender.save(update_fields=['last_reminder'])
|
2013-09-19 22:37:24 +02:00
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
# check_message:
|
|
|
|
# Returns message ready for sending with do_send_message on success or the error message (string) on error.
|
|
|
|
def check_message(sender, client, message_type_name, message_to,
|
2017-02-12 04:22:13 +01:00
|
|
|
subject_name, message_content_raw, realm=None, forged=False,
|
2014-01-07 23:14:13 +01:00
|
|
|
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
|
|
|
|
sender_queue_id=None):
|
2017-03-19 23:29:29 +01:00
|
|
|
# type: (UserProfile, Client, Text, Sequence[Text], Optional[Text], Text, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[Text], Optional[Text]) -> Dict[str, Any]
|
2013-03-18 18:57:34 +01:00
|
|
|
stream = None
|
2014-02-06 23:12:34 +01:00
|
|
|
if not message_to and message_type_name == 'stream' and sender.default_sending_stream:
|
|
|
|
# Use the users default stream
|
|
|
|
message_to = [sender.default_sending_stream.name]
|
2017-02-12 04:22:13 +01:00
|
|
|
if len(message_to) == 0:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Message must have recipients"))
|
2017-02-18 23:47:18 +01:00
|
|
|
message_content = message_content_raw.rstrip()
|
2017-02-12 04:22:13 +01:00
|
|
|
if len(message_content) == 0:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Message must not be empty"))
|
2013-11-22 18:33:22 +01:00
|
|
|
message_content = truncate_body(message_content)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
if realm is None:
|
|
|
|
realm = sender.realm
|
|
|
|
|
|
|
|
if message_type_name == 'stream':
|
|
|
|
if len(message_to) > 1:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Cannot send to multiple streams"))
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
stream_name = message_to[0].strip()
|
2013-08-22 23:40:27 +02:00
|
|
|
check_stream_name(stream_name)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
if subject_name is None:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Missing topic"))
|
2013-03-18 18:57:34 +01:00
|
|
|
subject = subject_name.strip()
|
|
|
|
if subject == "":
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Topic can't be empty"))
|
2013-11-22 18:37:21 +01:00
|
|
|
subject = truncate_topic(subject)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
stream = get_stream(stream_name, realm)
|
2013-09-17 22:31:05 +02:00
|
|
|
|
2016-11-15 05:34:20 +01:00
|
|
|
send_pm_if_empty_stream(sender, stream, stream_name, realm)
|
2013-09-17 22:31:05 +02:00
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
if stream is None:
|
2016-12-13 19:18:08 +01:00
|
|
|
raise JsonableError(_("Stream '%(stream_name)s' does not exist") % {'stream_name': escape(stream_name)})
|
2013-03-18 18:57:34 +01:00
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
2013-05-08 21:23:07 +02:00
|
|
|
|
2013-08-29 01:06:11 +02:00
|
|
|
if not stream.invite_only:
|
|
|
|
# This is a public stream
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
2013-08-29 01:06:11 +02:00
|
|
|
elif subscribed_to_stream(sender, stream):
|
|
|
|
# Or it is private, but your are subscribed
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
2016-02-08 03:59:38 +01:00
|
|
|
elif sender.is_api_super_user or (forwarder_user_profile is not None and
|
|
|
|
forwarder_user_profile.is_api_super_user):
|
2013-08-29 01:06:11 +02:00
|
|
|
# Or this request is being done on behalf of a super user
|
|
|
|
pass
|
|
|
|
elif sender.is_bot and subscribed_to_stream(sender.bot_owner, stream):
|
|
|
|
# Or you're a bot and your owner is subscribed.
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# All other cases are an error.
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Not authorized to send to stream '%s'") % (stream.name,))
|
2013-08-26 18:07:52 +02:00
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
elif message_type_name == 'private':
|
2014-02-28 20:53:54 +01:00
|
|
|
mirror_message = client and client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]
|
2013-10-17 17:19:44 +02:00
|
|
|
not_forged_mirror_message = mirror_message and not forged
|
2013-03-18 18:57:34 +01:00
|
|
|
try:
|
2013-10-17 17:19:44 +02:00
|
|
|
recipient = recipient_for_emails(message_to, not_forged_mirror_message,
|
2013-03-18 18:57:34 +01:00
|
|
|
forwarder_user_profile, sender)
|
2015-11-01 17:08:33 +01:00
|
|
|
except ValidationError as e:
|
2015-11-01 17:14:25 +01:00
|
|
|
assert isinstance(e.messages[0], six.string_types)
|
2013-08-12 22:00:06 +02:00
|
|
|
raise JsonableError(e.messages[0])
|
2013-03-18 18:57:34 +01:00
|
|
|
else:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Invalid message type"))
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
message = Message()
|
|
|
|
message.sender = sender
|
|
|
|
message.content = message_content
|
|
|
|
message.recipient = recipient
|
|
|
|
if message_type_name == 'stream':
|
|
|
|
message.subject = subject
|
2013-10-17 17:08:25 +02:00
|
|
|
if forged and forged_timestamp is not None:
|
2013-03-18 18:57:34 +01:00
|
|
|
# Forged messages come with a timestamp
|
|
|
|
message.pub_date = timestamp_to_datetime(forged_timestamp)
|
|
|
|
else:
|
|
|
|
message.pub_date = timezone.now()
|
|
|
|
message.sending_client = client
|
|
|
|
|
2016-09-14 21:58:44 +02:00
|
|
|
# We render messages later in the process.
|
|
|
|
assert message.rendered_content is None
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-08-12 20:05:57 +02:00
|
|
|
if client.name == "zephyr_mirror":
|
|
|
|
id = already_sent_mirrored_message_id(message)
|
|
|
|
if id is not None:
|
|
|
|
return {'message': id}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-01-22 05:40:00 +01:00
|
|
|
return {'message': message, 'stream': stream, 'local_id': local_id,
|
|
|
|
'sender_queue_id': sender_queue_id, 'realm': realm}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-01-22 05:11:53 +01:00
|
|
|
def internal_prep_message(realm, sender_email, recipient_type_name, recipients,
|
|
|
|
subject, content):
|
|
|
|
# type: (Realm, Text, str, Text, Text, Text) -> Optional[Dict[str, Any]]
|
2013-05-10 22:56:22 +02:00
|
|
|
"""
|
|
|
|
Create a message object and checks it, but doesn't send it or save it to the database.
|
|
|
|
The internal function that calls this can therefore batch send a bunch of created
|
|
|
|
messages together as one database query.
|
|
|
|
Call do_send_messages with a list of the return values of this method.
|
|
|
|
"""
|
2013-01-10 22:01:33 +01:00
|
|
|
if len(content) > MAX_MESSAGE_LENGTH:
|
|
|
|
content = content[0:3900] + "\n\n[message was too long and has been truncated]"
|
2013-03-08 20:54:53 +01:00
|
|
|
|
2013-03-18 19:10:21 +01:00
|
|
|
sender = get_user_profile_by_email(sender_email)
|
|
|
|
if realm is None:
|
2017-01-22 05:11:53 +01:00
|
|
|
raise RuntimeError("None is not a valid realm for internal_prep_message!")
|
2013-03-18 19:10:21 +01:00
|
|
|
parsed_recipients = extract_recipients(recipients)
|
|
|
|
if recipient_type_name == "stream":
|
|
|
|
stream, _ = create_stream_if_needed(realm, parsed_recipients[0])
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-08-12 22:00:06 +02:00
|
|
|
try:
|
|
|
|
return check_message(sender, get_client("Internal"), recipient_type_name,
|
2017-01-22 05:40:00 +01:00
|
|
|
parsed_recipients, subject, content, realm=realm)
|
2015-11-01 17:08:33 +01:00
|
|
|
except JsonableError as e:
|
2013-08-12 22:00:06 +02:00
|
|
|
logging.error("Error queueing internal message by %s: %s" % (sender_email, str(e)))
|
|
|
|
|
|
|
|
return None
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
def internal_send_message(realm, sender_email, recipient_type_name, recipients,
|
|
|
|
subject, content):
|
|
|
|
# type: (Realm, Text, str, Text, Text, Text) -> None
|
2017-01-22 05:11:53 +01:00
|
|
|
msg = internal_prep_message(realm, sender_email, recipient_type_name, recipients,
|
|
|
|
subject, content)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
|
|
|
# internal_prep_message encountered an error
|
|
|
|
if msg is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
do_send_messages([msg])
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-03-10 19:36:45 +01:00
|
|
|
def pick_color(user_profile):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile) -> Text
|
2013-05-10 17:43:27 +02:00
|
|
|
subs = Subscription.objects.filter(user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM)
|
|
|
|
return pick_color_helper(user_profile, subs)
|
|
|
|
|
|
|
|
def pick_color_helper(user_profile, subs):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Iterable[Subscription]) -> Text
|
2013-03-10 19:36:45 +01:00
|
|
|
# These colors are shared with the palette in subs.js.
|
2013-05-10 17:43:27 +02:00
|
|
|
used_colors = [sub.color for sub in subs if sub.active]
|
2016-08-03 23:37:12 +02:00
|
|
|
available_colors = [s for s in STREAM_ASSIGNMENT_COLORS if s not in used_colors]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
|
|
|
if available_colors:
|
|
|
|
return available_colors[0]
|
|
|
|
else:
|
2016-08-03 23:37:12 +02:00
|
|
|
return STREAM_ASSIGNMENT_COLORS[len(used_colors) % len(STREAM_ASSIGNMENT_COLORS)]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
2013-09-30 22:03:16 +02:00
|
|
|
def validate_user_access_to_subscribers(user_profile, stream):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Optional[UserProfile], Stream) -> None
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Validates whether the user can view the subscribers of a stream. Raises a JsonableError if:
|
2013-09-30 21:58:36 +02:00
|
|
|
* The user and the stream are in different realms
|
2013-09-06 23:25:43 +02:00
|
|
|
* The realm is MIT and the stream is not invite only.
|
|
|
|
* The stream is invite only, requesting_user is passed, and that user
|
|
|
|
does not subscribe to the stream.
|
|
|
|
"""
|
2016-05-26 13:27:00 +02:00
|
|
|
validate_user_access_to_subscribers_helper(
|
2013-10-02 19:46:40 +02:00
|
|
|
user_profile,
|
2017-03-13 19:52:38 +01:00
|
|
|
{"realm_id": stream.realm_id,
|
2013-10-02 19:46:40 +02:00
|
|
|
"invite_only": stream.invite_only},
|
|
|
|
# We use a lambda here so that we only compute whether the
|
|
|
|
# user is subscribed if we have to
|
2017-03-19 23:29:29 +01:00
|
|
|
lambda: subscribed_to_stream(cast(UserProfile, user_profile), stream))
|
2013-10-02 19:46:40 +02:00
|
|
|
|
|
|
|
def validate_user_access_to_subscribers_helper(user_profile, stream_dict, check_user_subscribed):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Optional[UserProfile], Mapping[str, Any], Callable[[], bool]) -> None
|
2013-10-02 19:46:40 +02:00
|
|
|
""" Helper for validate_user_access_to_subscribers that doesn't require a full stream object
|
|
|
|
* check_user_subscribed is a function that when called with no
|
|
|
|
arguments, will report whether the user is subscribed to the stream
|
|
|
|
"""
|
2016-07-27 01:54:16 +02:00
|
|
|
if user_profile is None:
|
|
|
|
raise ValidationError("Missing user to validate access for")
|
|
|
|
|
|
|
|
if user_profile.realm_id != stream_dict["realm_id"]:
|
|
|
|
raise ValidationError("Requesting user not in given realm")
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm and not stream_dict["invite_only"]:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("You cannot get subscribers for public streams in this realm"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2016-07-27 01:54:16 +02:00
|
|
|
if (stream_dict["invite_only"] and not check_user_subscribed()):
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Unable to retrieve subscribers for invite-only stream"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2013-10-02 19:46:40 +02:00
|
|
|
# sub_dict is a dictionary mapping stream_id => whether the user is subscribed to that stream
|
2013-10-20 21:35:58 +02:00
|
|
|
def bulk_get_subscriber_user_ids(stream_dicts, user_profile, sub_dict):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Iterable[Mapping[str, Any]], UserProfile, Mapping[int, bool]) -> Dict[int, List[int]]
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts = []
|
|
|
|
for stream_dict in stream_dicts:
|
2013-09-30 22:09:43 +02:00
|
|
|
try:
|
2013-10-02 19:46:40 +02:00
|
|
|
validate_user_access_to_subscribers_helper(user_profile, stream_dict,
|
|
|
|
lambda: sub_dict[stream_dict["id"]])
|
2013-09-30 22:09:43 +02:00
|
|
|
except JsonableError:
|
|
|
|
continue
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts.append(stream_dict)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2013-10-20 21:35:58 +02:00
|
|
|
subscriptions = Subscription.objects.select_related("recipient").filter(
|
2013-09-30 22:09:43 +02:00
|
|
|
recipient__type=Recipient.STREAM,
|
2013-10-02 19:46:40 +02:00
|
|
|
recipient__type_id__in=[stream["id"] for stream in target_stream_dicts],
|
2013-09-30 22:09:43 +02:00
|
|
|
user_profile__is_active=True,
|
2013-10-20 21:35:58 +02:00
|
|
|
active=True).values("user_profile_id", "recipient__type_id")
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2016-01-25 23:42:16 +01:00
|
|
|
result = dict((stream["id"], []) for stream in stream_dicts) # type: Dict[int, List[int]]
|
2013-09-30 22:09:43 +02:00
|
|
|
for sub in subscriptions:
|
2013-10-20 21:35:58 +02:00
|
|
|
result[sub["recipient__type_id"]].append(sub["user_profile_id"])
|
2013-09-30 22:09:43 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2013-09-30 22:03:16 +02:00
|
|
|
def get_subscribers_query(stream, requesting_user):
|
2017-03-19 23:29:29 +01:00
|
|
|
# type: (Stream, Optional[UserProfile]) -> QuerySet
|
2016-05-25 06:55:14 +02:00
|
|
|
# TODO: Make a generic stub for QuerySet
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Build a query to get the subscribers list for a stream, raising a JsonableError if:
|
|
|
|
|
2016-05-26 14:25:48 +02:00
|
|
|
'realm' is optional in stream.
|
2013-09-30 22:03:16 +02:00
|
|
|
|
|
|
|
The caller can refine this query with select_related(), values(), etc. depending
|
|
|
|
on whether it wants objects or just certain fields
|
|
|
|
"""
|
|
|
|
validate_user_access_to_subscribers(requesting_user, stream)
|
|
|
|
|
2013-09-06 23:25:43 +02:00
|
|
|
# Note that non-active users may still have "active" subscriptions, because we
|
|
|
|
# want to be able to easily reactivate them with their old subscriptions. This
|
|
|
|
# is why the query here has to look at the UserProfile.is_active flag.
|
|
|
|
subscriptions = Subscription.objects.filter(recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream.id,
|
|
|
|
user_profile__is_active=True,
|
2013-09-13 19:22:28 +02:00
|
|
|
active=True)
|
|
|
|
return subscriptions
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2013-09-30 21:58:36 +02:00
|
|
|
def get_subscribers(stream, requesting_user=None):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Stream, Optional[UserProfile]) -> List[UserProfile]
|
2013-09-30 21:58:36 +02:00
|
|
|
subscriptions = get_subscribers_query(stream, requesting_user).select_related()
|
2013-09-06 23:25:43 +02:00
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
2013-09-30 21:58:36 +02:00
|
|
|
def get_subscriber_emails(stream, requesting_user=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Stream, Optional[UserProfile]) -> List[Text]
|
2016-05-26 14:25:48 +02:00
|
|
|
subscriptions_query = get_subscribers_query(stream, requesting_user)
|
|
|
|
subscriptions = subscriptions_query.values('user_profile__email')
|
2013-09-13 19:30:05 +02:00
|
|
|
return [subscription['user_profile__email'] for subscription in subscriptions]
|
|
|
|
|
2016-07-27 04:00:46 +02:00
|
|
|
def maybe_get_subscriber_emails(stream, user_profile):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Stream, UserProfile) -> List[Text]
|
2013-09-13 19:50:03 +02:00
|
|
|
""" Alternate version of get_subscriber_emails that takes a Stream object only
|
2013-09-07 03:08:01 +02:00
|
|
|
(not a name), and simply returns an empty list if unable to get a real
|
|
|
|
subscriber list (because we're on the MIT realm). """
|
|
|
|
try:
|
2016-07-27 04:00:46 +02:00
|
|
|
subscribers = get_subscriber_emails(stream, requesting_user=user_profile)
|
2013-09-07 03:08:01 +02:00
|
|
|
except JsonableError:
|
|
|
|
subscribers = []
|
|
|
|
return subscribers
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2013-09-25 23:11:01 +02:00
|
|
|
def notify_subscriptions_added(user_profile, sub_pairs, stream_emails, no_log=False):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Iterable[Tuple[Subscription, Stream]], Callable[[Stream], List[Text]], bool) -> None
|
2013-05-10 17:43:27 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_added',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for sub, stream in sub_pairs],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2013-09-07 01:06:41 +02:00
|
|
|
# Send a notification to the user who subscribed.
|
2013-06-28 17:49:51 +02:00
|
|
|
payload = [dict(name=stream.name,
|
2014-02-05 19:58:41 +01:00
|
|
|
stream_id=stream.id,
|
2013-06-28 17:49:51 +02:00
|
|
|
in_home_view=subscription.in_home_view,
|
|
|
|
invite_only=stream.invite_only,
|
2013-08-12 21:13:07 +02:00
|
|
|
color=subscription.color,
|
2013-09-07 03:08:01 +02:00
|
|
|
email_address=encode_email_address(stream),
|
2014-02-05 23:21:02 +01:00
|
|
|
desktop_notifications=subscription.desktop_notifications,
|
|
|
|
audible_notifications=subscription.audible_notifications,
|
2014-01-22 20:20:10 +01:00
|
|
|
description=stream.description,
|
2016-07-01 07:26:09 +02:00
|
|
|
pin_to_top=subscription.pin_to_top,
|
2013-09-25 23:11:01 +02:00
|
|
|
subscribers=stream_emails(stream))
|
2016-12-11 14:30:45 +01:00
|
|
|
for (subscription, stream) in sub_pairs]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2016-10-19 23:49:04 +02:00
|
|
|
def get_peer_user_ids_for_stream_change(stream, altered_users, subscribed_users):
|
|
|
|
# type: (Stream, Iterable[UserProfile], Iterable[UserProfile]) -> Set[int]
|
|
|
|
'''
|
|
|
|
altered_users is a list of users that we are adding/removing
|
|
|
|
subscribed_users is the list of already subscribed users
|
|
|
|
|
|
|
|
Based on stream policy, we notify the correct bystanders, while
|
|
|
|
not notifying altered_users (who get subscribers via another event)
|
|
|
|
'''
|
|
|
|
|
|
|
|
altered_user_ids = [user.id for user in altered_users]
|
|
|
|
|
|
|
|
if stream.invite_only:
|
|
|
|
# PRIVATE STREAMS
|
|
|
|
all_subscribed_ids = [user.id for user in subscribed_users]
|
|
|
|
return set(all_subscribed_ids) - set(altered_user_ids)
|
|
|
|
|
|
|
|
else:
|
|
|
|
# PUBLIC STREAMS
|
|
|
|
# We now do "peer_add" or "peer_remove" events even for streams
|
|
|
|
# users were never subscribed to, in order for the neversubscribed
|
|
|
|
# structure to stay up-to-date.
|
|
|
|
return set(active_user_ids(stream.realm)) - set(altered_user_ids)
|
|
|
|
|
2016-10-20 19:17:47 +02:00
|
|
|
def query_all_subs_by_stream(streams):
|
|
|
|
# type: (Iterable[Stream]) -> Dict[int, List[UserProfile]]
|
|
|
|
all_subs = Subscription.objects.filter(recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id__in=[stream.id for stream in streams],
|
|
|
|
user_profile__is_active=True,
|
|
|
|
active=True).select_related('recipient', 'user_profile')
|
|
|
|
|
|
|
|
all_subs_by_stream = defaultdict(list) # type: Dict[int, List[UserProfile]]
|
|
|
|
for sub in all_subs:
|
|
|
|
all_subs_by_stream[sub.recipient.type_id].append(sub.user_profile)
|
|
|
|
return all_subs_by_stream
|
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
def bulk_add_subscriptions(streams, users):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Iterable[Stream], Iterable[UserProfile]) -> Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams]) # type: Mapping[int, Recipient]
|
|
|
|
recipients = [recipient.id for recipient in recipients_map.values()] # type: List[int]
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
stream_map = {} # type: Dict[int, Stream]
|
2013-06-25 19:26:58 +02:00
|
|
|
for stream in streams:
|
|
|
|
stream_map[recipients_map[stream.id].id] = stream
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2016-01-25 23:42:16 +01:00
|
|
|
subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]]
|
2013-06-25 19:26:58 +02:00
|
|
|
all_subs_query = Subscription.objects.select_related("user_profile")
|
|
|
|
for sub in all_subs_query.filter(user_profile__in=users,
|
|
|
|
recipient__type=Recipient.STREAM):
|
2013-05-10 17:43:27 +02:00
|
|
|
subs_by_user[sub.user_profile_id].append(sub)
|
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
already_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
|
|
|
subs_to_activate = [] # type: List[Tuple[Subscription, Stream]]
|
|
|
|
new_subs = [] # type: List[Tuple[UserProfile, int, Stream]]
|
2013-05-10 17:43:27 +02:00
|
|
|
for user_profile in users:
|
2016-05-25 06:55:14 +02:00
|
|
|
needs_new_sub = set(recipients) # type: Set[int]
|
2013-05-10 17:43:27 +02:00
|
|
|
for sub in subs_by_user[user_profile.id]:
|
2013-06-25 19:26:58 +02:00
|
|
|
if sub.recipient_id in needs_new_sub:
|
|
|
|
needs_new_sub.remove(sub.recipient_id)
|
2013-05-10 17:43:27 +02:00
|
|
|
if sub.active:
|
2013-06-25 19:26:58 +02:00
|
|
|
already_subscribed.append((user_profile, stream_map[sub.recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
else:
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_to_activate.append((sub, stream_map[sub.recipient_id]))
|
|
|
|
# Mark the sub as active, without saving, so that
|
|
|
|
# pick_color will consider this to be an active
|
|
|
|
# subscription when picking colors
|
|
|
|
sub.active = True
|
|
|
|
for recipient_id in needs_new_sub:
|
|
|
|
new_subs.append((user_profile, recipient_id, stream_map[recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
subs_to_add = [] # type: List[Tuple[Subscription, Stream]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (user_profile, recipient_id, stream) in new_subs:
|
2013-05-10 17:43:27 +02:00
|
|
|
color = pick_color_helper(user_profile, subs_by_user[user_profile.id])
|
2013-06-25 19:26:58 +02:00
|
|
|
sub_to_add = Subscription(user_profile=user_profile, active=True,
|
2013-09-20 15:52:21 +02:00
|
|
|
color=color, recipient_id=recipient_id,
|
2014-02-05 23:21:02 +01:00
|
|
|
desktop_notifications=user_profile.enable_stream_desktop_notifications,
|
|
|
|
audible_notifications=user_profile.enable_stream_sounds)
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_by_user[user_profile.id].append(sub_to_add)
|
|
|
|
subs_to_add.append((sub_to_add, stream))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
|
|
|
occupied_streams_before = list(get_occupied_streams(user_profile.realm))
|
|
|
|
Subscription.objects.bulk_create([sub for (sub, stream) in subs_to_add])
|
2016-05-30 07:32:56 +02:00
|
|
|
Subscription.objects.filter(id__in=[sub.id for (sub, stream) in subs_to_activate]).update(active=True)
|
2014-03-02 06:46:54 +01:00
|
|
|
occupied_streams_after = list(get_occupied_streams(user_profile.realm))
|
|
|
|
|
|
|
|
new_occupied_streams = [stream for stream in
|
|
|
|
set(occupied_streams_after) - set(occupied_streams_before)
|
|
|
|
if not stream.invite_only]
|
|
|
|
if new_occupied_streams:
|
|
|
|
event = dict(type="stream", op="occupy",
|
|
|
|
streams=[stream.to_dict()
|
|
|
|
for stream in new_occupied_streams])
|
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2013-09-25 23:11:01 +02:00
|
|
|
# Notify all existing users on streams that users have joined
|
|
|
|
|
|
|
|
# First, get all users subscribed to the streams that we care about
|
|
|
|
# We fetch all subscription information upfront, as it's used throughout
|
|
|
|
# the following code and we want to minize DB queries
|
2016-10-20 19:17:47 +02:00
|
|
|
all_subs_by_stream = query_all_subs_by_stream(streams=streams)
|
2013-09-25 23:11:01 +02:00
|
|
|
|
|
|
|
def fetch_stream_subscriber_emails(stream):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Stream) -> List[Text]
|
2016-07-27 01:45:29 +02:00
|
|
|
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
return []
|
2016-10-20 19:00:35 +02:00
|
|
|
users = all_subs_by_stream[stream.id]
|
|
|
|
return [u.email for u in users]
|
2013-09-25 23:11:01 +02:00
|
|
|
|
2016-01-25 23:42:16 +01:00
|
|
|
sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]]
|
2016-05-25 06:55:14 +02:00
|
|
|
new_streams = set() # type: Set[Tuple[int, int]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (sub, stream) in subs_to_add + subs_to_activate:
|
2013-06-28 17:49:51 +02:00
|
|
|
sub_tuples_by_user[sub.user_profile.id].append((sub, stream))
|
2013-09-25 23:11:01 +02:00
|
|
|
new_streams.add((sub.user_profile.id, stream.id))
|
2013-06-28 17:49:51 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# We now send several types of events to notify browsers. The
|
|
|
|
# first batch is notifications to users on invite-only streams
|
|
|
|
# that the stream exists.
|
|
|
|
for stream in streams:
|
|
|
|
new_users = [user for user in users if (user.id, stream.id) in new_streams]
|
|
|
|
|
|
|
|
# Users newly added to invite-only streams need a `create`
|
|
|
|
# notification, since they didn't have the invite-only stream
|
|
|
|
# in their browser yet.
|
|
|
|
if stream.invite_only:
|
|
|
|
event = dict(type="stream", op="create",
|
|
|
|
streams=[stream.to_dict()])
|
|
|
|
send_event(event, [user.id for user in new_users])
|
|
|
|
|
|
|
|
# The second batch is events for the users themselves that they
|
|
|
|
# were subscribed to the new streams.
|
2013-06-28 17:49:51 +02:00
|
|
|
for user_profile in users:
|
|
|
|
if len(sub_tuples_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
2013-09-13 21:11:41 +02:00
|
|
|
sub_pairs = sub_tuples_by_user[user_profile.id]
|
2013-09-25 23:11:01 +02:00
|
|
|
notify_subscriptions_added(user_profile, sub_pairs, fetch_stream_subscriber_emails)
|
2013-09-13 23:09:19 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# The second batch is events for other users who are tracking the
|
|
|
|
# subscribers lists of streams in their browser; everyone for
|
|
|
|
# public streams and only existing subscribers for private streams.
|
2013-09-13 23:09:19 +02:00
|
|
|
for stream in streams:
|
2016-07-27 01:45:29 +02:00
|
|
|
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
new_users = [user for user in users if (user.id, stream.id) in new_streams]
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
|
|
|
altered_users=new_users,
|
|
|
|
subscribed_users=all_subs_by_stream[stream.id]
|
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
|
|
|
for added_user in new_users:
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="peer_add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=[stream.name],
|
2016-10-31 20:18:32 +01:00
|
|
|
user_id=added_user.id)
|
2016-10-19 23:49:04 +02:00
|
|
|
send_event(event, peer_user_ids)
|
|
|
|
|
2016-05-30 07:32:56 +02:00
|
|
|
return ([(user_profile, stream) for (user_profile, recipient_id, stream) in new_subs] +
|
|
|
|
[(sub.user_profile, stream) for (sub, stream) in subs_to_activate],
|
2013-05-10 17:43:27 +02:00
|
|
|
already_subscribed)
|
|
|
|
|
2013-06-28 17:49:51 +02:00
|
|
|
def notify_subscriptions_removed(user_profile, streams, no_log=False):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Iterable[Stream], bool) -> None
|
2013-06-28 17:16:55 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_removed',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for stream in streams],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2014-02-07 19:06:02 +01:00
|
|
|
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="remove",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
|
|
|
def bulk_remove_subscriptions(users, streams):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Iterable[UserProfile], Iterable[Stream]) -> Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
2016-10-20 20:12:39 +02:00
|
|
|
|
2013-06-28 17:16:55 +02:00
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM,
|
2016-05-25 06:55:14 +02:00
|
|
|
[stream.id for stream in streams]) # type: Mapping[int, Recipient]
|
|
|
|
stream_map = {} # type: Dict[int, Stream]
|
2013-06-28 17:16:55 +02:00
|
|
|
for stream in streams:
|
|
|
|
stream_map[recipients_map[stream.id].id] = stream
|
|
|
|
|
2016-01-25 23:42:16 +01:00
|
|
|
subs_by_user = dict((user_profile.id, []) for user_profile in users) # type: Dict[int, List[Subscription]]
|
2013-06-28 17:16:55 +02:00
|
|
|
for sub in Subscription.objects.select_related("user_profile").filter(user_profile__in=users,
|
2016-01-25 01:27:18 +01:00
|
|
|
recipient__in=list(recipients_map.values()),
|
2013-06-28 17:16:55 +02:00
|
|
|
active=True):
|
|
|
|
subs_by_user[sub.user_profile_id].append(sub)
|
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
subs_to_deactivate = [] # type: List[Tuple[Subscription, Stream]]
|
|
|
|
not_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
2013-06-28 17:16:55 +02:00
|
|
|
for user_profile in users:
|
|
|
|
recipients_to_unsub = set([recipient.id for recipient in recipients_map.values()])
|
|
|
|
for sub in subs_by_user[user_profile.id]:
|
|
|
|
recipients_to_unsub.remove(sub.recipient_id)
|
|
|
|
subs_to_deactivate.append((sub, stream_map[sub.recipient_id]))
|
|
|
|
for recipient_id in recipients_to_unsub:
|
|
|
|
not_subscribed.append((user_profile, stream_map[recipient_id]))
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
|
|
|
occupied_streams_before = list(get_occupied_streams(user_profile.realm))
|
|
|
|
Subscription.objects.filter(id__in=[sub.id for (sub, stream_name) in
|
|
|
|
subs_to_deactivate]).update(active=False)
|
|
|
|
occupied_streams_after = list(get_occupied_streams(user_profile.realm))
|
|
|
|
|
|
|
|
new_vacant_streams = [stream for stream in
|
|
|
|
set(occupied_streams_before) - set(occupied_streams_after)
|
|
|
|
if not stream.invite_only]
|
|
|
|
if new_vacant_streams:
|
|
|
|
event = dict(type="stream", op="vacate",
|
|
|
|
streams=[stream.to_dict()
|
|
|
|
for stream in new_vacant_streams])
|
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_user_dict = defaultdict(list) # type: Dict[int, List[UserProfile]]
|
2016-01-25 23:42:16 +01:00
|
|
|
streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]]
|
2013-06-28 17:16:55 +02:00
|
|
|
for (sub, stream) in subs_to_deactivate:
|
2013-06-28 17:49:51 +02:00
|
|
|
streams_by_user[sub.user_profile_id].append(stream)
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_user_dict[stream.id].append(sub.user_profile)
|
2013-06-28 17:49:51 +02:00
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
if len(streams_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
|
|
|
notify_subscriptions_removed(user_profile, streams_by_user[user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
all_subs_by_stream = query_all_subs_by_stream(streams=streams)
|
|
|
|
|
|
|
|
for stream in streams:
|
|
|
|
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
|
|
|
|
continue
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_users = altered_user_dict[stream.id]
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
|
|
|
altered_users=altered_users,
|
|
|
|
subscribed_users=all_subs_by_stream[stream.id]
|
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
|
|
|
for removed_user in altered_users:
|
|
|
|
event = dict(type="subscription",
|
|
|
|
op="peer_remove",
|
|
|
|
subscriptions=[stream.name],
|
2016-11-08 15:04:18 +01:00
|
|
|
user_id=removed_user.id)
|
2016-10-20 20:12:39 +02:00
|
|
|
send_event(event, peer_user_ids)
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2013-06-28 17:16:55 +02:00
|
|
|
return ([(sub.user_profile, stream) for (sub, stream) in subs_to_deactivate],
|
|
|
|
not_subscribed)
|
|
|
|
|
2013-04-08 18:01:01 +02:00
|
|
|
def log_subscription_property_change(user_email, stream_name, property, value):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, Text, Text, Any) -> None
|
2013-01-10 22:01:33 +01:00
|
|
|
event = {'type': 'subscription_property',
|
|
|
|
'property': property,
|
2013-04-08 18:01:01 +02:00
|
|
|
'user': user_email,
|
|
|
|
'stream_name': stream_name,
|
|
|
|
'value': value}
|
2013-01-10 22:01:33 +01:00
|
|
|
log_event(event)
|
|
|
|
|
2017-03-05 01:30:48 +01:00
|
|
|
def do_change_subscription_property(user_profile, sub, stream,
|
2013-07-16 21:56:20 +02:00
|
|
|
property_name, value):
|
2017-03-05 01:30:48 +01:00
|
|
|
# type: (UserProfile, Subscription, Stream, Text, Any) -> None
|
2013-07-16 21:56:20 +02:00
|
|
|
setattr(sub, property_name, value)
|
|
|
|
sub.save(update_fields=[property_name])
|
2017-03-05 01:30:48 +01:00
|
|
|
log_subscription_property_change(user_profile.email, stream.name,
|
2013-07-16 21:56:20 +02:00
|
|
|
property_name, value)
|
|
|
|
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription",
|
2014-01-24 23:24:44 +01:00
|
|
|
op="update",
|
|
|
|
email=user_profile.email,
|
|
|
|
property=property_name,
|
|
|
|
value=value,
|
2017-03-05 01:30:48 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name)
|
2014-01-24 23:24:44 +01:00
|
|
|
send_event(event, [user_profile.id])
|
2013-07-16 22:21:41 +02:00
|
|
|
|
2017-03-09 00:19:58 +01:00
|
|
|
def do_activate_user(user_profile):
|
|
|
|
# type: (UserProfile) -> None
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.is_active = True
|
2014-01-07 18:57:54 +01:00
|
|
|
user_profile.is_mirror_dummy = False
|
2016-04-28 23:26:11 +02:00
|
|
|
user_profile.set_unusable_password()
|
2017-03-09 00:19:58 +01:00
|
|
|
user_profile.date_joined = timezone.now()
|
2016-08-10 03:05:26 +02:00
|
|
|
user_profile.tos_version = settings.TOS_VERSION
|
2014-01-07 18:57:54 +01:00
|
|
|
user_profile.save(update_fields=["is_active", "date_joined", "password",
|
2016-08-10 03:05:26 +02:00
|
|
|
"is_mirror_dummy", "tos_version"])
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2017-03-09 00:19:58 +01:00
|
|
|
event_time = user_profile.date_joined
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_activated', event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2013-07-22 21:26:44 +02:00
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
2013-11-16 17:11:15 +01:00
|
|
|
def do_reactivate_user(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> None
|
2013-11-15 18:57:44 +01:00
|
|
|
# Unlike do_activate_user, this is meant for re-activating existing users,
|
|
|
|
# so it doesn't reset their password, etc.
|
|
|
|
user_profile.is_active = True
|
|
|
|
user_profile.save(update_fields=["is_active"])
|
|
|
|
|
2017-02-15 04:35:10 +01:00
|
|
|
event_time = timezone.now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_reactivated', event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2013-11-15 18:57:44 +01:00
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
2017-02-15 21:06:07 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
notify_created_bot(user_profile)
|
|
|
|
|
2017-03-14 06:07:14 +01:00
|
|
|
def do_change_password(user_profile, password, commit=True,
|
2013-03-29 18:36:27 +01:00
|
|
|
hashed_password=False):
|
2017-03-14 06:07:14 +01:00
|
|
|
# type: (UserProfile, Text, bool, bool) -> None
|
2013-03-29 18:36:27 +01:00
|
|
|
if hashed_password:
|
|
|
|
# This is a hashed password, not the password itself.
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.set_password(password)
|
2013-03-29 18:36:27 +01:00
|
|
|
else:
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.set_password(password)
|
2013-01-10 22:01:33 +01:00
|
|
|
if commit:
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.save(update_fields=["password"])
|
2017-03-14 06:07:14 +01:00
|
|
|
event_time = timezone.now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
|
|
|
modified_user=user_profile, event_type='user_change_password',
|
|
|
|
event_time=event_time)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
|
|
|
def do_change_full_name(user_profile, full_name, log=True):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, bool) -> None
|
2013-01-10 22:01:33 +01:00
|
|
|
user_profile.full_name = full_name
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["full_name"])
|
2013-01-10 22:01:33 +01:00
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_full_name',
|
2013-03-28 20:43:34 +01:00
|
|
|
'user': user_profile.email,
|
2013-01-10 22:01:33 +01:00
|
|
|
'full_name': full_name})
|
|
|
|
|
2014-02-26 19:55:29 +01:00
|
|
|
payload = dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 19:55:29 +01:00
|
|
|
full_name=user_profile.full_name)
|
|
|
|
send_event(dict(type='realm_user', op='update', person=payload),
|
|
|
|
active_user_ids(user_profile.realm))
|
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot', op='update', bot=payload),
|
|
|
|
bot_owner_userids(user_profile))
|
|
|
|
|
2017-02-24 06:36:54 +01:00
|
|
|
def do_change_bot_owner(user_profile, bot_owner, log=True):
|
|
|
|
# type: (UserProfile, UserProfile, bool) -> None
|
|
|
|
user_profile.bot_owner = bot_owner
|
|
|
|
user_profile.save()
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_owner',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'owner': user_profile.bot_owner.email})
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
owner_id=user_profile.bot_owner.id,
|
|
|
|
)),
|
|
|
|
bot_owner_userids(user_profile))
|
|
|
|
|
2016-08-10 03:05:26 +02:00
|
|
|
def do_change_tos_version(user_profile, tos_version, log=True):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, bool) -> None
|
2016-08-10 03:05:26 +02:00
|
|
|
user_profile.tos_version = tos_version
|
|
|
|
user_profile.save(update_fields=["tos_version"])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_tos_version',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'tos_version': tos_version})
|
|
|
|
|
2014-02-26 20:02:43 +01:00
|
|
|
def do_regenerate_api_key(user_profile, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool) -> None
|
2014-02-26 20:02:43 +01:00
|
|
|
user_profile.api_key = random_api_key()
|
|
|
|
user_profile.save(update_fields=["api_key"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_api_key',
|
|
|
|
'user': user_profile.email})
|
2013-07-16 21:32:33 +02:00
|
|
|
|
2014-02-26 20:17:19 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
api_key=user_profile.api_key,
|
|
|
|
)),
|
2016-11-30 14:17:35 +01:00
|
|
|
bot_owner_userids(user_profile))
|
2014-02-26 20:17:19 +01:00
|
|
|
|
2017-01-28 19:05:20 +01:00
|
|
|
def do_change_avatar_fields(user_profile, avatar_source, log=True):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, bool) -> None
|
2014-02-26 20:54:59 +01:00
|
|
|
user_profile.avatar_source = avatar_source
|
2017-01-28 19:05:20 +01:00
|
|
|
user_profile.avatar_version += 1
|
|
|
|
user_profile.save(update_fields=["avatar_source", "avatar_version"])
|
2014-02-26 20:54:59 +01:00
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_avatar_source',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'avatar_source': avatar_source})
|
|
|
|
|
2014-02-26 21:05:10 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
)),
|
2016-11-30 14:17:35 +01:00
|
|
|
bot_owner_userids(user_profile))
|
2016-08-19 00:28:28 +02:00
|
|
|
|
2017-02-21 17:55:32 +01:00
|
|
|
payload = dict(
|
|
|
|
email=user_profile.email,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
user_id=user_profile.id
|
|
|
|
)
|
|
|
|
|
|
|
|
send_event(dict(type='realm_user',
|
|
|
|
op='update',
|
|
|
|
person=payload),
|
|
|
|
active_user_ids(user_profile.realm))
|
2014-02-26 21:05:10 +01:00
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
def do_change_icon_source(realm, icon_source, log=True):
|
|
|
|
# type: (Realm, Text, bool) -> None
|
|
|
|
realm.icon_source = icon_source
|
|
|
|
realm.icon_version += 1
|
|
|
|
realm.save(update_fields=["icon_source", "icon_version"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'realm_change_icon',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': realm.string_id,
|
2017-02-21 03:41:20 +01:00
|
|
|
'icon_source': icon_source})
|
|
|
|
|
2017-02-26 20:35:23 +01:00
|
|
|
send_event(dict(type='realm',
|
|
|
|
op='update_dict',
|
|
|
|
property="icon",
|
|
|
|
data=dict(icon_source=realm.icon_source,
|
|
|
|
icon_url=realm_icon_url(realm))),
|
2017-02-21 03:41:20 +01:00
|
|
|
active_user_ids(realm))
|
|
|
|
|
2014-02-13 19:39:54 +01:00
|
|
|
def _default_stream_permision_check(user_profile, stream):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Optional[Stream]) -> None
|
2014-02-13 19:39:54 +01:00
|
|
|
# Any user can have a None default stream
|
|
|
|
if stream is not None:
|
|
|
|
if user_profile.is_bot:
|
|
|
|
user = user_profile.bot_owner
|
|
|
|
else:
|
|
|
|
user = user_profile
|
|
|
|
if stream.invite_only and not subscribed_to_stream(user, stream):
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_('Insufficient permission'))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
|
|
|
def do_change_default_sending_stream(user_profile, stream, log=True):
|
2017-02-11 05:45:39 +01:00
|
|
|
# type: (UserProfile, Optional[Stream], bool) -> None
|
2014-02-13 19:39:54 +01:00
|
|
|
_default_stream_permision_check(user_profile, stream)
|
|
|
|
|
|
|
|
user_profile.default_sending_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_sending_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_sending_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:23:18 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2017-02-11 05:26:24 +01:00
|
|
|
stream_name = stream.name # type: Optional[Text]
|
2014-02-26 21:23:18 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_sending_stream=stream_name,
|
|
|
|
)),
|
2016-11-30 14:17:35 +01:00
|
|
|
bot_owner_userids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
|
|
|
def do_change_default_events_register_stream(user_profile, stream, log=True):
|
2017-02-11 05:45:39 +01:00
|
|
|
# type: (UserProfile, Optional[Stream], bool) -> None
|
2014-02-13 19:39:54 +01:00
|
|
|
_default_stream_permision_check(user_profile, stream)
|
|
|
|
|
|
|
|
user_profile.default_events_register_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_events_register_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_events_register_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:34:12 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2017-02-11 05:26:24 +01:00
|
|
|
stream_name = stream.name # type: Optional[Text]
|
2014-02-26 21:34:12 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_events_register_stream=stream_name,
|
|
|
|
)),
|
2016-11-30 14:17:35 +01:00
|
|
|
bot_owner_userids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
|
|
|
def do_change_default_all_public_streams(user_profile, value, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2014-02-13 19:39:54 +01:00
|
|
|
user_profile.default_all_public_streams = value
|
|
|
|
user_profile.save(update_fields=['default_all_public_streams'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_all_public_streams',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'value': str(value)})
|
2014-02-26 21:15:31 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
)),
|
2016-11-30 14:17:35 +01:00
|
|
|
bot_owner_userids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2016-02-08 03:59:38 +01:00
|
|
|
def do_change_is_admin(user_profile, value, permission='administer'):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, str) -> None
|
2016-02-08 03:59:38 +01:00
|
|
|
if permission == "administer":
|
|
|
|
user_profile.is_realm_admin = value
|
|
|
|
user_profile.save(update_fields=["is_realm_admin"])
|
|
|
|
elif permission == "api_super_user":
|
|
|
|
user_profile.is_api_super_user = value
|
|
|
|
user_profile.save(update_fields=["is_api_super_user"])
|
2014-01-21 19:27:22 +01:00
|
|
|
else:
|
2016-02-08 03:59:38 +01:00
|
|
|
raise Exception("Unknown permission")
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2015-09-20 19:32:01 +02:00
|
|
|
if permission == 'administer':
|
|
|
|
event = dict(type="realm_user", op="update",
|
|
|
|
person=dict(email=user_profile.email,
|
2017-01-21 15:27:56 +01:00
|
|
|
user_id=user_profile.id,
|
2016-02-08 03:59:38 +01:00
|
|
|
is_admin=value))
|
2015-09-20 19:32:01 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2016-05-18 20:23:03 +02:00
|
|
|
def do_change_bot_type(user_profile, value):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, int) -> None
|
2016-05-18 20:23:03 +02:00
|
|
|
user_profile.bot_type = value
|
2016-05-24 16:24:53 +02:00
|
|
|
user_profile.save(update_fields=["bot_type"])
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-01-30 03:52:55 +01:00
|
|
|
def do_change_stream_invite_only(stream, invite_only):
|
|
|
|
# type: (Stream, bool) -> None
|
|
|
|
stream.invite_only = invite_only
|
2014-01-02 23:20:33 +01:00
|
|
|
stream.save(update_fields=['invite_only'])
|
|
|
|
|
2017-01-30 04:05:39 +01:00
|
|
|
def do_rename_stream(stream, new_name, log=True):
|
|
|
|
# type: (Stream, Text, bool) -> Dict[str, Text]
|
2013-08-22 17:45:23 +02:00
|
|
|
old_name = stream.name
|
|
|
|
stream.name = new_name
|
|
|
|
stream.save(update_fields=["name"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'stream_name_change',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': stream.realm.string_id,
|
2013-08-22 17:45:23 +02:00
|
|
|
'new_name': new_name})
|
|
|
|
|
|
|
|
recipient = get_recipient(Recipient.STREAM, stream.id)
|
|
|
|
messages = Message.objects.filter(recipient=recipient).only("id")
|
|
|
|
|
|
|
|
# Update the display recipient and stream, which are easy single
|
|
|
|
# items to set.
|
2017-01-30 04:05:39 +01:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm)
|
|
|
|
new_cache_key = get_stream_cache_key(stream.name, stream.realm)
|
2013-08-28 18:00:19 +02:00
|
|
|
if old_cache_key != new_cache_key:
|
|
|
|
cache_delete(old_cache_key)
|
|
|
|
cache_set(new_cache_key, stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
cache_set(display_recipient_cache_key(recipient.id), stream.name)
|
|
|
|
|
|
|
|
# Delete cache entries for everything else, which is cheaper and
|
|
|
|
# clearer than trying to set them. display_recipient is the out of
|
|
|
|
# date field in all cases.
|
|
|
|
cache_delete_many(
|
|
|
|
to_dict_cache_key_id(message.id, True) for message in messages)
|
|
|
|
cache_delete_many(
|
|
|
|
to_dict_cache_key_id(message.id, False) for message in messages)
|
2014-02-02 15:30:33 +01:00
|
|
|
new_email = encode_email_address(stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
|
2014-01-22 23:25:03 +01:00
|
|
|
# We will tell our users to essentially
|
|
|
|
# update stream.name = new_name where name = old_name
|
2014-02-02 15:30:33 +01:00
|
|
|
# and update stream.email = new_email where name = old_name.
|
|
|
|
# We could optimize this by trying to send one message, but the
|
|
|
|
# client code really wants one property update at a time, and
|
|
|
|
# updating stream names is a pretty infrequent operation.
|
|
|
|
# More importantly, we want to key these updates by id, not name,
|
|
|
|
# since id is the immutable primary key, and obviously name is not.
|
|
|
|
data_updates = [
|
|
|
|
['email_address', new_email],
|
|
|
|
['name', new_name],
|
|
|
|
]
|
|
|
|
for property, value in data_updates:
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property=property,
|
|
|
|
value=value,
|
2017-03-05 01:50:25 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=old_name,
|
2014-02-02 15:30:33 +01:00
|
|
|
)
|
2016-11-04 07:02:24 +01:00
|
|
|
send_event(event, can_access_stream_user_ids(stream))
|
2013-08-22 17:45:23 +02:00
|
|
|
|
2013-09-10 11:46:18 +02:00
|
|
|
# Even though the token doesn't change, the web client needs to update the
|
|
|
|
# email forwarding address to display the correctly-escaped new name.
|
2014-02-02 15:30:33 +01:00
|
|
|
return {"email_address": new_email}
|
2013-09-10 11:46:18 +02:00
|
|
|
|
2017-01-30 04:14:12 +01:00
|
|
|
def do_change_stream_description(stream, new_description):
|
|
|
|
# type: (Stream, Text) -> None
|
2014-01-22 20:20:10 +01:00
|
|
|
stream.description = new_description
|
|
|
|
stream.save(update_fields=['description'])
|
|
|
|
|
2017-03-05 01:50:25 +01:00
|
|
|
event = dict(
|
|
|
|
type='stream',
|
|
|
|
op='update',
|
|
|
|
property='description',
|
|
|
|
name=stream.name,
|
|
|
|
stream_id=stream.id,
|
|
|
|
value=new_description,
|
|
|
|
)
|
2016-11-04 07:02:24 +01:00
|
|
|
send_event(event, can_access_stream_user_ids(stream))
|
2014-01-22 20:20:10 +01:00
|
|
|
|
2016-10-28 07:21:53 +02:00
|
|
|
def do_create_realm(string_id, name, restricted_to_domain=None,
|
2016-10-29 03:48:47 +02:00
|
|
|
invite_required=None, org_type=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, Text, Optional[bool], Optional[bool], Optional[int]) -> Tuple[Realm, bool]
|
2017-01-04 05:30:48 +01:00
|
|
|
realm = get_realm(string_id)
|
2013-05-31 20:04:18 +02:00
|
|
|
created = not realm
|
2013-05-31 16:55:41 +02:00
|
|
|
if created:
|
2016-10-28 21:43:47 +02:00
|
|
|
kwargs = {} # type: Dict[str, Any]
|
|
|
|
if restricted_to_domain is not None:
|
|
|
|
kwargs['restricted_to_domain'] = restricted_to_domain
|
|
|
|
if invite_required is not None:
|
|
|
|
kwargs['invite_required'] = invite_required
|
|
|
|
if org_type is not None:
|
|
|
|
kwargs['org_type'] = org_type
|
2017-03-09 01:35:10 +01:00
|
|
|
# General a value for domain that we control
|
|
|
|
domain = string_id + "." + settings.EXTERNAL_HOST
|
2016-10-28 21:43:47 +02:00
|
|
|
realm = Realm(string_id=string_id, name=name,
|
2017-03-09 01:35:10 +01:00
|
|
|
domain=domain, **kwargs)
|
2013-05-31 20:04:18 +02:00
|
|
|
realm.save()
|
2013-10-02 23:40:21 +02:00
|
|
|
|
|
|
|
# Create stream once Realm object has been saved
|
2015-11-30 18:16:09 +01:00
|
|
|
notifications_stream, _ = create_stream_if_needed(realm, Realm.DEFAULT_NOTIFICATION_STREAM_NAME)
|
2013-10-02 23:40:21 +02:00
|
|
|
realm.notifications_stream = notifications_stream
|
|
|
|
realm.save(update_fields=['notifications_stream'])
|
|
|
|
|
|
|
|
# Include a welcome message in this notifications stream
|
2017-02-21 20:44:43 +01:00
|
|
|
content = """Hello, and welcome to Zulip!
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2016-07-08 18:57:01 +02:00
|
|
|
This is a message on stream `%s` with the topic `welcome`. We'll use this stream for
|
2017-02-21 20:44:43 +01:00
|
|
|
system-generated notifications.""" % (notifications_stream.name,)
|
2017-01-22 05:11:53 +01:00
|
|
|
msg = internal_prep_message(realm, settings.WELCOME_BOT, 'stream',
|
2016-11-30 14:17:35 +01:00
|
|
|
notifications_stream.name, "welcome",
|
2017-01-22 05:11:53 +01:00
|
|
|
content)
|
2013-10-02 23:40:21 +02:00
|
|
|
do_send_messages([msg])
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Log the event
|
|
|
|
log_event({"type": "realm_created",
|
2016-10-28 07:21:53 +02:00
|
|
|
"string_id": string_id,
|
2016-09-16 19:05:14 +02:00
|
|
|
"restricted_to_domain": restricted_to_domain,
|
|
|
|
"invite_required": invite_required,
|
2016-10-29 03:48:47 +02:00
|
|
|
"org_type": org_type})
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
# Send a notification to the admin realm (if configured)
|
2013-10-31 18:33:19 +01:00
|
|
|
if settings.NEW_USER_BOT is not None:
|
|
|
|
signup_message = "Signups enabled"
|
2017-01-22 05:23:36 +01:00
|
|
|
admin_realm = get_user_profile_by_email(settings.NEW_USER_BOT).realm
|
|
|
|
internal_send_message(admin_realm, settings.NEW_USER_BOT, "stream",
|
2016-10-28 07:21:53 +02:00
|
|
|
"signups", string_id, signup_message)
|
2013-01-10 22:01:33 +01:00
|
|
|
return (realm, created)
|
|
|
|
|
2014-02-05 21:41:01 +01:00
|
|
|
def do_change_enable_stream_desktop_notifications(user_profile,
|
|
|
|
enable_stream_desktop_notifications,
|
|
|
|
log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2014-02-05 21:41:01 +01:00
|
|
|
user_profile.enable_stream_desktop_notifications = enable_stream_desktop_notifications
|
|
|
|
user_profile.save(update_fields=["enable_stream_desktop_notifications"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_stream_desktop_notifications',
|
|
|
|
'setting': enable_stream_desktop_notifications}
|
2014-02-05 21:41:01 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2014-02-05 21:41:01 +01:00
|
|
|
|
|
|
|
def do_change_enable_stream_sounds(user_profile, enable_stream_sounds, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2014-02-05 21:41:01 +01:00
|
|
|
user_profile.enable_stream_sounds = enable_stream_sounds
|
|
|
|
user_profile.save(update_fields=["enable_stream_sounds"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_stream_sounds',
|
|
|
|
'setting': enable_stream_sounds}
|
2014-02-05 21:41:01 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2014-02-05 21:41:01 +01:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
def do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-01-10 22:01:33 +01:00
|
|
|
user_profile.enable_desktop_notifications = enable_desktop_notifications
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["enable_desktop_notifications"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_desktop_notifications',
|
|
|
|
'setting': enable_desktop_notifications}
|
2013-01-10 22:01:33 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2016-12-07 17:29:12 +01:00
|
|
|
def do_change_pm_content_in_desktop_notifications(user_profile,
|
|
|
|
pm_content_in_desktop_notifications, log=True):
|
|
|
|
# type: (UserProfile, bool, bool) -> None
|
|
|
|
user_profile.pm_content_in_desktop_notifications \
|
|
|
|
= pm_content_in_desktop_notifications
|
|
|
|
user_profile.save(update_fields=["pm_content_in_desktop_notifications"])
|
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'pm_content_in_desktop_notifications',
|
|
|
|
'setting': pm_content_in_desktop_notifications}
|
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
|
|
|
|
2013-05-03 21:49:01 +02:00
|
|
|
def do_change_enable_sounds(user_profile, enable_sounds, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-05-03 21:49:01 +02:00
|
|
|
user_profile.enable_sounds = enable_sounds
|
|
|
|
user_profile.save(update_fields=["enable_sounds"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_sounds',
|
|
|
|
'setting': enable_sounds}
|
2013-05-03 21:49:01 +02:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-05-03 21:49:01 +02:00
|
|
|
|
2013-05-07 23:19:52 +02:00
|
|
|
def do_change_enable_offline_email_notifications(user_profile, offline_email_notifications, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-05-07 23:19:52 +02:00
|
|
|
user_profile.enable_offline_email_notifications = offline_email_notifications
|
|
|
|
user_profile.save(update_fields=["enable_offline_email_notifications"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_offline_email_notifications',
|
|
|
|
'setting': offline_email_notifications}
|
2013-05-07 23:19:52 +02:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-05-07 23:19:52 +02:00
|
|
|
|
2013-10-16 17:24:52 +02:00
|
|
|
def do_change_enable_offline_push_notifications(user_profile, offline_push_notifications, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-10-16 17:24:52 +02:00
|
|
|
user_profile.enable_offline_push_notifications = offline_push_notifications
|
|
|
|
user_profile.save(update_fields=["enable_offline_push_notifications"])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_offline_push_notifications',
|
|
|
|
'setting': offline_push_notifications}
|
2013-10-16 17:24:52 +02:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-10-16 17:24:52 +02:00
|
|
|
|
2016-12-08 21:06:23 +01:00
|
|
|
def do_change_enable_online_push_notifications(user_profile, enable_online_push_notifications, log=True):
|
2016-09-19 22:55:18 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2016-12-08 21:06:23 +01:00
|
|
|
user_profile.enable_online_push_notifications = enable_online_push_notifications
|
2016-09-19 22:55:18 +02:00
|
|
|
user_profile.save(update_fields=["enable_online_push_notifications"])
|
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
2016-12-08 21:06:23 +01:00
|
|
|
'notification_name': 'enable_online_push_notifications',
|
|
|
|
'setting': enable_online_push_notifications}
|
2016-09-19 22:55:18 +02:00
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2013-12-02 01:39:10 +01:00
|
|
|
def do_change_enable_digest_emails(user_profile, enable_digest_emails, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-12-02 01:39:10 +01:00
|
|
|
user_profile.enable_digest_emails = enable_digest_emails
|
|
|
|
user_profile.save(update_fields=["enable_digest_emails"])
|
|
|
|
|
|
|
|
if not enable_digest_emails:
|
|
|
|
# Remove any digest emails that have been enqueued.
|
|
|
|
clear_followup_emails_queue(user_profile.email)
|
|
|
|
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'notification_name': 'enable_digest_emails',
|
|
|
|
'setting': enable_digest_emails}
|
2013-12-02 01:39:10 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2013-12-03 21:01:37 +01:00
|
|
|
def do_change_autoscroll_forever(user_profile, autoscroll_forever, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2013-12-03 21:01:37 +01:00
|
|
|
user_profile.autoscroll_forever = autoscroll_forever
|
|
|
|
user_profile.save(update_fields=["autoscroll_forever"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'autoscroll_forever',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'autoscroll_forever': autoscroll_forever})
|
|
|
|
|
2013-02-27 23:18:38 +01:00
|
|
|
def do_change_enter_sends(user_profile, enter_sends):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool) -> None
|
2013-02-27 23:18:38 +01:00
|
|
|
user_profile.enter_sends = enter_sends
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["enter_sends"])
|
2013-02-27 23:18:38 +01:00
|
|
|
|
2014-01-16 22:48:50 +01:00
|
|
|
def do_change_default_desktop_notifications(user_profile, default_desktop_notifications):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool) -> None
|
2014-01-16 22:48:50 +01:00
|
|
|
user_profile.default_desktop_notifications = default_desktop_notifications
|
|
|
|
user_profile.save(update_fields=["default_desktop_notifications"])
|
|
|
|
|
2015-09-20 08:17:36 +02:00
|
|
|
def do_change_twenty_four_hour_time(user_profile, setting_value, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2015-09-20 08:17:36 +02:00
|
|
|
user_profile.twenty_four_hour_time = setting_value
|
2015-08-19 22:35:46 +02:00
|
|
|
user_profile.save(update_fields=["twenty_four_hour_time"])
|
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
2015-08-21 02:50:47 +02:00
|
|
|
'setting_name': 'twenty_four_hour_time',
|
2015-09-20 08:17:36 +02:00
|
|
|
'setting': setting_value}
|
2015-08-19 22:35:46 +02:00
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2015-08-20 23:59:44 +02:00
|
|
|
def do_change_left_side_userlist(user_profile, setting_value, log=True):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, bool, bool) -> None
|
2015-08-20 23:59:44 +02:00
|
|
|
user_profile.left_side_userlist = setting_value
|
|
|
|
user_profile.save(update_fields=["left_side_userlist"])
|
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
2016-11-30 21:55:59 +01:00
|
|
|
'setting_name': 'left_side_userlist',
|
2015-08-20 23:59:44 +02:00
|
|
|
'setting': setting_value}
|
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2017-03-02 08:30:53 +01:00
|
|
|
def do_change_emoji_alt_code(user_profile, setting_value, log=True):
|
|
|
|
# type: (UserProfile, bool, bool) -> None
|
|
|
|
user_profile.emoji_alt_code = setting_value
|
|
|
|
user_profile.save(update_fields=["emoji_alt_code"])
|
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'setting_name': 'emoji_alt_code',
|
|
|
|
'setting': setting_value}
|
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2016-06-23 11:32:45 +02:00
|
|
|
def do_change_default_language(user_profile, setting_value, log=True):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, bool) -> None
|
2016-06-23 11:32:45 +02:00
|
|
|
|
|
|
|
user_profile.default_language = setting_value
|
|
|
|
user_profile.save(update_fields=["default_language"])
|
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
2016-11-30 21:55:59 +01:00
|
|
|
'setting_name': 'default_language',
|
2016-06-23 11:32:45 +02:00
|
|
|
'setting': setting_value}
|
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2017-03-14 10:53:09 +01:00
|
|
|
def do_change_timezone(user_profile, setting_value, log=True):
|
|
|
|
# type: (UserProfile, Text, bool) -> None
|
|
|
|
user_profile.timezone = setting_value
|
|
|
|
user_profile.save(update_fields=['timezone'])
|
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'setting_name': 'timezone',
|
|
|
|
'setting': setting_value}
|
|
|
|
if log:
|
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2016-12-08 01:43:15 +01:00
|
|
|
def set_default_streams(realm, stream_dict):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Dict[Text, Dict[Text, Any]]) -> None
|
2013-01-10 22:01:33 +01:00
|
|
|
DefaultStream.objects.filter(realm=realm).delete()
|
2016-12-08 01:43:15 +01:00
|
|
|
stream_names = []
|
|
|
|
for name, options in stream_dict.items():
|
|
|
|
stream_names.append(name)
|
|
|
|
stream, _ = create_stream_if_needed(realm,
|
|
|
|
name,
|
|
|
|
invite_only = options["invite_only"],
|
|
|
|
stream_description = options["description"])
|
2013-01-10 22:01:33 +01:00
|
|
|
DefaultStream.objects.create(stream=stream, realm=realm)
|
|
|
|
|
2015-11-30 18:15:16 +01:00
|
|
|
# Always include the realm's default notifications streams, if it exists
|
|
|
|
if realm.notifications_stream is not None:
|
2015-12-28 21:48:30 +01:00
|
|
|
DefaultStream.objects.get_or_create(stream=realm.notifications_stream, realm=realm)
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2013-11-22 22:09:47 +01:00
|
|
|
log_event({'type': 'default_streams',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': realm.string_id,
|
2013-11-22 22:09:47 +01:00
|
|
|
'streams': stream_names})
|
|
|
|
|
2016-05-20 22:08:42 +02:00
|
|
|
def notify_default_streams(realm):
|
|
|
|
# type: (Realm) -> None
|
|
|
|
event = dict(
|
|
|
|
type="default_streams",
|
|
|
|
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm))
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
|
2017-01-30 04:23:08 +01:00
|
|
|
def do_add_default_stream(stream):
|
|
|
|
# type: (Stream) -> None
|
|
|
|
if not DefaultStream.objects.filter(realm=stream.realm, stream=stream).exists():
|
|
|
|
DefaultStream.objects.create(realm=stream.realm, stream=stream)
|
|
|
|
notify_default_streams(stream.realm)
|
2014-01-27 19:39:54 +01:00
|
|
|
|
2017-01-30 04:25:40 +01:00
|
|
|
def do_remove_default_stream(stream):
|
|
|
|
# type: (Stream) -> None
|
|
|
|
DefaultStream.objects.filter(realm=stream.realm, stream=stream).delete()
|
|
|
|
notify_default_streams(stream.realm)
|
2014-01-27 20:02:20 +01:00
|
|
|
|
2014-01-27 18:02:41 +01:00
|
|
|
def get_default_streams_for_realm(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> List[Stream]
|
2013-04-05 17:04:50 +02:00
|
|
|
return [default.stream for default in
|
2014-01-27 18:02:41 +01:00
|
|
|
DefaultStream.objects.select_related("stream", "stream__realm").filter(realm=realm)]
|
|
|
|
|
|
|
|
def get_default_subs(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> List[Stream]
|
2014-01-27 18:02:41 +01:00
|
|
|
# Right now default streams are realm-wide. This wrapper gives us flexibility
|
|
|
|
# to some day further customize how we set up default streams for new users.
|
|
|
|
return get_default_streams_for_realm(user_profile.realm)
|
2013-01-11 23:36:41 +01:00
|
|
|
|
2016-05-20 22:08:42 +02:00
|
|
|
# returns default streams in json serializeable format
|
|
|
|
def streams_to_dicts_sorted(streams):
|
|
|
|
# type: (List[Stream]) -> List[Dict[str, Any]]
|
|
|
|
return sorted([stream.to_dict() for stream in streams], key=lambda elt: elt["name"])
|
|
|
|
|
2013-09-04 00:00:44 +02:00
|
|
|
def do_update_user_activity_interval(user_profile, log_time):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, datetime.datetime) -> None
|
2013-09-04 00:00:44 +02:00
|
|
|
effective_end = log_time + datetime.timedelta(minutes=15)
|
2013-09-27 22:22:52 +02:00
|
|
|
# This code isn't perfect, because with various races we might end
|
|
|
|
# up creating two overlapping intervals, but that shouldn't happen
|
|
|
|
# often, and can be corrected for in post-processing
|
2013-09-04 00:00:44 +02:00
|
|
|
try:
|
2013-10-08 21:19:56 +02:00
|
|
|
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
|
|
|
|
# There are two ways our intervals could overlap:
|
|
|
|
# (1) The start of the new interval could be inside the old interval
|
|
|
|
# (2) The end of the new interval could be inside the old interval
|
|
|
|
# In either case, we just extend the old interval to include the new interval.
|
|
|
|
if ((log_time <= last.end and log_time >= last.start) or
|
2016-12-03 18:19:09 +01:00
|
|
|
(effective_end <= last.end and effective_end >= last.start)):
|
2013-09-27 22:22:52 +02:00
|
|
|
last.end = max(last.end, effective_end)
|
|
|
|
last.start = min(last.start, log_time)
|
|
|
|
last.save(update_fields=["start", "end"])
|
2013-09-04 00:00:44 +02:00
|
|
|
return
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2013-10-08 21:19:56 +02:00
|
|
|
# Otherwise, the intervals don't overlap, so we should make a new one
|
2013-09-04 00:00:44 +02:00
|
|
|
UserActivityInterval.objects.create(user_profile=user_profile, start=log_time,
|
|
|
|
end=effective_end)
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_activity')
|
2013-01-11 21:16:42 +01:00
|
|
|
def do_update_user_activity(user_profile, client, query, log_time):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Client, Text, datetime.datetime) -> None
|
2013-11-01 19:02:11 +01:00
|
|
|
(activity, created) = UserActivity.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
query = query,
|
|
|
|
defaults={'last_visit': log_time, 'count': 0})
|
|
|
|
|
2013-01-11 23:36:41 +01:00
|
|
|
activity.count += 1
|
2013-01-11 21:16:42 +01:00
|
|
|
activity.last_visit = log_time
|
2013-03-21 21:29:28 +01:00
|
|
|
activity.save(update_fields=["last_visit", "count"])
|
2013-01-11 21:16:42 +01:00
|
|
|
|
2013-04-03 22:00:02 +02:00
|
|
|
def send_presence_changed(user_profile, presence):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, UserPresence) -> None
|
2013-04-05 00:13:03 +02:00
|
|
|
presence_dict = presence.to_dict()
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="presence", email=user_profile.email,
|
|
|
|
server_timestamp=time.time(),
|
|
|
|
presence={presence_dict['client']: presence.to_dict()})
|
|
|
|
send_event(event, active_user_ids(user_profile.realm))
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2014-02-13 14:28:08 +01:00
|
|
|
def consolidate_client(client):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Client) -> Client
|
2014-02-13 14:28:08 +01:00
|
|
|
# The web app reports a client as 'website'
|
|
|
|
# The desktop app reports a client as ZulipDesktop
|
|
|
|
# due to it setting a custom user agent. We want both
|
|
|
|
# to count as web users
|
|
|
|
|
|
|
|
# Alias ZulipDesktop to website
|
|
|
|
if client.name in ['ZulipDesktop']:
|
|
|
|
return get_client('website')
|
|
|
|
else:
|
|
|
|
return client
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_presence')
|
2013-02-11 21:47:45 +01:00
|
|
|
def do_update_user_presence(user_profile, client, log_time, status):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Client, datetime.datetime, int) -> None
|
2014-02-13 14:28:08 +01:00
|
|
|
client = consolidate_client(client)
|
2013-11-01 19:02:11 +01:00
|
|
|
(presence, created) = UserPresence.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
defaults = {'timestamp': log_time,
|
|
|
|
'status': status})
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2013-08-20 20:57:26 +02:00
|
|
|
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
|
2013-04-05 00:13:03 +02:00
|
|
|
was_idle = presence.status == UserPresence.IDLE
|
|
|
|
became_online = (status == UserPresence.ACTIVE) and (stale_status or was_idle)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2014-03-17 18:35:43 +01:00
|
|
|
# If an object was created, it has already been saved.
|
|
|
|
#
|
|
|
|
# We suppress changes from ACTIVE to IDLE before stale_status is reached;
|
|
|
|
# this protects us from the user having two clients open: one active, the
|
|
|
|
# other idle. Without this check, we would constantly toggle their status
|
|
|
|
# between the two states.
|
|
|
|
if not created and stale_status or was_idle or status == presence.status:
|
2013-06-24 19:10:25 +02:00
|
|
|
# The following block attempts to only update the "status"
|
|
|
|
# field in the event that it actually changed. This is
|
|
|
|
# important to avoid flushing the UserPresence cache when the
|
|
|
|
# data it would return to a client hasn't actually changed
|
|
|
|
# (see the UserPresence post_save hook for details).
|
2013-06-24 19:05:41 +02:00
|
|
|
presence.timestamp = log_time
|
2013-06-24 19:10:25 +02:00
|
|
|
update_fields = ["timestamp"]
|
|
|
|
if presence.status != status:
|
|
|
|
presence.status = status
|
|
|
|
update_fields.append("status")
|
|
|
|
presence.save(update_fields=update_fields)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm and (created or became_online):
|
2013-04-03 22:00:02 +02:00
|
|
|
# Push event to all users in the realm so they see the new user
|
|
|
|
# appear in the presence list immediately, or the newly online
|
2013-09-15 20:49:04 +02:00
|
|
|
# user without delay. Note that we won't send an update here for a
|
|
|
|
# timestamp update, because we rely on the browser to ping us every 50
|
|
|
|
# seconds for realm-wide status updates, and those updates should have
|
|
|
|
# recent timestamps, which means the browser won't think active users
|
|
|
|
# have gone idle. If we were more aggressive in this function about
|
|
|
|
# sending timestamp updates, we could eliminate the ping responses, but
|
|
|
|
# that's not a high priority for now, considering that most of our non-MIT
|
|
|
|
# realms are pretty small.
|
2013-04-03 22:00:02 +02:00
|
|
|
send_presence_changed(user_profile, presence)
|
|
|
|
|
2013-09-06 21:52:12 +02:00
|
|
|
def update_user_activity_interval(user_profile, log_time):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, datetime.datetime) -> None
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'time': datetime_to_timestamp(log_time)}
|
2013-09-04 00:00:44 +02:00
|
|
|
queue_json_publish("user_activity_interval", event,
|
|
|
|
lambda e: do_update_user_activity_interval(user_profile, log_time))
|
2013-09-06 21:52:12 +02:00
|
|
|
|
|
|
|
def update_user_presence(user_profile, client, log_time, status,
|
|
|
|
new_user_input):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Client, datetime.datetime, int, bool) -> None
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'status': status,
|
|
|
|
'time': datetime_to_timestamp(log_time),
|
|
|
|
'client': client.name}
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-09-04 00:00:44 +02:00
|
|
|
queue_json_publish("user_presence", event,
|
|
|
|
lambda e: do_update_user_presence(user_profile, client,
|
|
|
|
log_time, status))
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-09-10 17:32:40 +02:00
|
|
|
if new_user_input:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_activity_interval(user_profile, log_time)
|
|
|
|
|
2014-01-24 23:50:24 +01:00
|
|
|
def do_update_pointer(user_profile, pointer, update_flags=False):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, int, bool) -> None
|
2014-01-24 23:50:24 +01:00
|
|
|
prev_pointer = user_profile.pointer
|
|
|
|
user_profile.pointer = pointer
|
|
|
|
user_profile.save(update_fields=["pointer"])
|
|
|
|
|
|
|
|
if update_flags:
|
|
|
|
# Until we handle the new read counts in the Android app
|
|
|
|
# natively, this is a shim that will mark as read any messages
|
|
|
|
# up until the pointer move
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__gt=prev_pointer,
|
|
|
|
message__id__lte=pointer,
|
|
|
|
flags=~UserMessage.flags.read) \
|
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
|
|
|
|
2014-01-24 23:52:04 +01:00
|
|
|
event = dict(type='pointer', pointer=pointer)
|
|
|
|
send_event(event, [user_profile.id])
|
2014-01-24 23:50:24 +01:00
|
|
|
|
2016-05-08 15:20:51 +02:00
|
|
|
def do_update_message_flags(user_profile, operation, flag, messages, all, stream_obj, topic_name):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, Text, Sequence[int], bool, Optional[Stream], Optional[Text]) -> int
|
2013-06-25 20:22:40 +02:00
|
|
|
flagattr = getattr(UserMessage.flags, flag)
|
2013-03-25 20:37:00 +01:00
|
|
|
|
|
|
|
if all:
|
2013-04-16 22:58:21 +02:00
|
|
|
log_statsd_event('bankruptcy')
|
2013-06-25 20:22:40 +02:00
|
|
|
msgs = UserMessage.objects.filter(user_profile=user_profile)
|
2016-05-08 15:20:51 +02:00
|
|
|
elif stream_obj is not None:
|
|
|
|
recipient = get_recipient(Recipient.STREAM, stream_obj.id)
|
|
|
|
if topic_name:
|
|
|
|
msgs = UserMessage.objects.filter(message__recipient=recipient,
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__subject__iexact=topic_name)
|
|
|
|
else:
|
|
|
|
msgs = UserMessage.objects.filter(message__recipient=recipient, user_profile=user_profile)
|
2013-06-25 20:22:40 +02:00
|
|
|
else:
|
|
|
|
msgs = UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__in=messages)
|
2013-07-17 00:34:31 +02:00
|
|
|
# Hack to let you star any message
|
|
|
|
if msgs.count() == 0:
|
|
|
|
if not len(messages) == 1:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
2013-07-17 00:34:31 +02:00
|
|
|
if flag != "starred":
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
2016-10-12 02:19:45 +02:00
|
|
|
# Validate that the user could have read the relevant message
|
|
|
|
message = access_message(user_profile, messages[0])[0]
|
2013-07-17 00:34:31 +02:00
|
|
|
|
|
|
|
# OK, this is a message that you legitimately have access
|
|
|
|
# to via narrowing to the stream it is on, even though you
|
|
|
|
# didn't actually receive it. So we create a historical,
|
|
|
|
# read UserMessage message row for you to star.
|
|
|
|
UserMessage.objects.create(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
flags=UserMessage.flags.historical | UserMessage.flags.read)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2013-10-15 16:13:14 +02:00
|
|
|
# The filter() statements below prevent postgres from doing a lot of
|
|
|
|
# unnecessary work, which is a big deal for users updating lots of
|
|
|
|
# flags (e.g. bankruptcy). This patch arose from seeing slow calls
|
2016-04-01 12:16:09 +02:00
|
|
|
# to POST /json/messages/flags in the logs. The filter() statements
|
2013-10-15 16:13:14 +02:00
|
|
|
# are kind of magical; they are actually just testing the one bit.
|
2013-06-25 20:22:40 +02:00
|
|
|
if operation == 'add':
|
2013-10-15 16:13:14 +02:00
|
|
|
msgs = msgs.filter(flags=~flagattr)
|
2016-05-08 15:20:51 +02:00
|
|
|
if stream_obj:
|
|
|
|
messages = list(msgs.values_list('message__id', flat=True))
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitor(flagattr))
|
2013-06-25 20:22:40 +02:00
|
|
|
elif operation == 'remove':
|
2013-10-15 16:13:14 +02:00
|
|
|
msgs = msgs.filter(flags=flagattr)
|
2016-05-08 15:20:51 +02:00
|
|
|
if stream_obj:
|
|
|
|
messages = list(msgs.values_list('message__id', flat=True))
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitand(~flagattr))
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-08-05 23:56:09 +02:00
|
|
|
event = {'type': 'update_message_flags',
|
|
|
|
'operation': operation,
|
|
|
|
'flag': flag,
|
|
|
|
'messages': messages,
|
|
|
|
'all': all}
|
|
|
|
log_event(event)
|
2014-01-24 23:24:44 +01:00
|
|
|
send_event(event, [user_profile.id])
|
2013-08-05 23:56:09 +02:00
|
|
|
|
2013-06-25 20:26:50 +02:00
|
|
|
statsd.incr("flags.%s.%s" % (flag, operation), count)
|
2016-07-13 03:16:42 +02:00
|
|
|
return count
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2013-02-04 23:41:49 +01:00
|
|
|
def subscribed_to_stream(user_profile, stream):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Stream) -> bool
|
2013-02-04 23:41:49 +01:00
|
|
|
try:
|
|
|
|
if Subscription.objects.get(user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream.id):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
except Subscription.DoesNotExist:
|
|
|
|
return False
|
|
|
|
|
2013-11-22 18:33:22 +01:00
|
|
|
def truncate_content(content, max_length, truncation_message):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, int, Text) -> Text
|
2013-11-22 18:33:22 +01:00
|
|
|
if len(content) > max_length:
|
|
|
|
content = content[:max_length - len(truncation_message)] + truncation_message
|
|
|
|
return content
|
|
|
|
|
|
|
|
def truncate_body(body):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> Text
|
2013-11-22 18:33:22 +01:00
|
|
|
return truncate_content(body, MAX_MESSAGE_LENGTH, "...")
|
|
|
|
|
|
|
|
def truncate_topic(topic):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> Text
|
2013-11-22 18:33:22 +01:00
|
|
|
return truncate_content(topic, MAX_SUBJECT_LENGTH, "...")
|
|
|
|
|
2014-01-08 19:42:45 +01:00
|
|
|
|
|
|
|
def update_user_message_flags(message, ums):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Message, Iterable[UserMessage]) -> None
|
2014-01-07 19:40:02 +01:00
|
|
|
wildcard = message.mentions_wildcard
|
|
|
|
mentioned_ids = message.mentions_user_ids
|
2014-01-08 19:42:45 +01:00
|
|
|
ids_with_alert_words = message.user_ids_with_alert_words
|
2016-06-27 13:35:55 +02:00
|
|
|
changed_ums = set() # type: Set[UserMessage]
|
2014-01-08 19:42:45 +01:00
|
|
|
|
|
|
|
def update_flag(um, should_set, flag):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserMessage, bool, int) -> None
|
2014-01-08 19:42:45 +01:00
|
|
|
if should_set:
|
|
|
|
if not (um.flags & flag):
|
|
|
|
um.flags |= flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
else:
|
|
|
|
if (um.flags & flag):
|
|
|
|
um.flags &= ~flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
|
|
|
|
for um in ums:
|
|
|
|
has_alert_word = um.user_profile_id in ids_with_alert_words
|
|
|
|
update_flag(um, has_alert_word, UserMessage.flags.has_alert_word)
|
|
|
|
|
2014-01-07 19:40:02 +01:00
|
|
|
mentioned = um.user_profile_id in mentioned_ids
|
|
|
|
update_flag(um, mentioned, UserMessage.flags.mentioned)
|
|
|
|
|
|
|
|
update_flag(um, wildcard, UserMessage.flags.wildcard_mentioned)
|
|
|
|
|
2016-06-22 22:14:35 +02:00
|
|
|
is_me_message = getattr(message, 'is_me_message', False)
|
|
|
|
update_flag(um, is_me_message, UserMessage.flags.is_me_message)
|
|
|
|
|
2014-01-08 19:42:45 +01:00
|
|
|
for um in changed_ums:
|
|
|
|
um.save(update_fields=['flags'])
|
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
def update_to_dict_cache(changed_messages):
|
|
|
|
# type: (List[Message]) -> List[int]
|
|
|
|
"""Updates the message as stored in the to_dict cache (for serving
|
|
|
|
messages)."""
|
|
|
|
items_for_remote_cache = {}
|
|
|
|
message_ids = []
|
|
|
|
for changed_message in changed_messages:
|
|
|
|
message_ids.append(changed_message.id)
|
|
|
|
items_for_remote_cache[to_dict_cache_key(changed_message, True)] = \
|
|
|
|
(MessageDict.to_dict_uncached(changed_message, apply_markdown=True),)
|
|
|
|
items_for_remote_cache[to_dict_cache_key(changed_message, False)] = \
|
|
|
|
(MessageDict.to_dict_uncached(changed_message, apply_markdown=False),)
|
|
|
|
cache_set_many(items_for_remote_cache)
|
|
|
|
return message_ids
|
|
|
|
|
2016-10-27 12:06:44 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
|
|
|
def do_update_embedded_data(user_profile, message, content, rendered_content):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Message, Optional[Text], Optional[Text]) -> None
|
2016-10-27 12:06:44 +02:00
|
|
|
event = {
|
|
|
|
'type': 'update_message',
|
|
|
|
'sender': user_profile.email,
|
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
|
|
|
changed_messages = [message]
|
|
|
|
|
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
|
|
|
|
|
|
|
if content is not None:
|
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
message.content = content
|
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
|
|
|
|
|
|
|
log_event(event)
|
|
|
|
message.save(update_fields=["content", "rendered_content"])
|
|
|
|
|
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
|
|
|
|
|
|
|
def user_info(um):
|
|
|
|
# type: (UserMessage) -> Dict[str, Any]
|
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
|
|
|
send_event(event, list(map(user_info, ums)))
|
|
|
|
|
2016-07-22 23:45:24 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
2016-06-21 21:34:41 +02:00
|
|
|
def do_update_message(user_profile, message, subject, propagate_mode, content, rendered_content):
|
2017-01-24 02:07:12 +01:00
|
|
|
# type: (UserProfile, Message, Optional[Text], str, Optional[Text], Optional[Text]) -> int
|
2013-05-14 21:18:11 +02:00
|
|
|
event = {'type': 'update_message',
|
2017-02-20 00:23:42 +01:00
|
|
|
# TODO: We probably want to remove the 'sender' field
|
|
|
|
# after confirming it isn't used by any consumers.
|
2013-05-14 21:18:11 +02:00
|
|
|
'sender': user_profile.email,
|
2017-02-20 00:23:42 +01:00
|
|
|
'user_id': user_profile.id,
|
2016-06-21 21:34:41 +02:00
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
2017-02-20 00:23:42 +01:00
|
|
|
edit_history_event = {
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
} # type: Dict[str, Any]
|
2013-09-03 22:07:59 +02:00
|
|
|
changed_messages = [message]
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2013-06-06 19:51:35 +02:00
|
|
|
# Set first_rendered_content to be the oldest version of the
|
|
|
|
# rendered content recorded; which is the current version if the
|
|
|
|
# content hasn't been edited before. Note that because one could
|
|
|
|
# have edited just the subject, not every edit history event
|
|
|
|
# contains a prev_rendered_content element.
|
|
|
|
first_rendered_content = message.rendered_content
|
2013-05-31 21:06:05 +02:00
|
|
|
if message.edit_history is not None:
|
2013-06-18 23:55:55 +02:00
|
|
|
edit_history = ujson.loads(message.edit_history)
|
2013-06-06 19:51:35 +02:00
|
|
|
for old_edit_history_event in edit_history:
|
|
|
|
if 'prev_rendered_content' in old_edit_history_event:
|
|
|
|
first_rendered_content = old_edit_history_event['prev_rendered_content']
|
2013-05-31 21:06:05 +02:00
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2014-01-08 19:37:15 +01:00
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
if content is not None:
|
2014-01-08 19:42:45 +01:00
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
|
2013-07-23 21:37:49 +02:00
|
|
|
# We are turning off diff highlighting everywhere until ticket #1532 is addressed.
|
|
|
|
if False:
|
2013-06-06 16:40:07 +02:00
|
|
|
# Don't highlight message edit diffs on prod
|
|
|
|
rendered_content = highlight_html_differences(first_rendered_content, rendered_content)
|
2013-05-31 21:06:05 +02:00
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
event['orig_content'] = message.content
|
|
|
|
event['orig_rendered_content'] = message.rendered_content
|
2013-05-23 22:31:14 +02:00
|
|
|
edit_history_event["prev_content"] = message.content
|
|
|
|
edit_history_event["prev_rendered_content"] = message.rendered_content
|
|
|
|
edit_history_event["prev_rendered_content_version"] = message.rendered_content_version
|
2013-05-14 21:18:11 +02:00
|
|
|
message.content = content
|
2016-10-04 16:49:16 +02:00
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
2013-05-14 21:18:11 +02:00
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
2017-02-20 00:23:08 +01:00
|
|
|
event['prev_rendered_content_version'] = message.rendered_content_version
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-07-07 09:47:15 +02:00
|
|
|
prev_content = edit_history_event['prev_content']
|
|
|
|
if Message.content_has_attachment(prev_content) or Message.content_has_attachment(message.content):
|
|
|
|
check_attachment_reference_change(prev_content, message)
|
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
if subject is not None:
|
2016-07-14 17:48:11 +02:00
|
|
|
orig_subject = message.topic_name()
|
2013-11-22 18:37:21 +01:00
|
|
|
subject = truncate_topic(subject)
|
2013-09-03 22:07:59 +02:00
|
|
|
event["orig_subject"] = orig_subject
|
2014-03-11 21:33:50 +01:00
|
|
|
event["propagate_mode"] = propagate_mode
|
2013-05-14 21:18:11 +02:00
|
|
|
message.subject = subject
|
2014-03-11 14:40:22 +01:00
|
|
|
event["stream_id"] = message.recipient.type_id
|
2013-05-14 21:18:11 +02:00
|
|
|
event["subject"] = subject
|
2016-12-31 03:08:43 +01:00
|
|
|
event['subject_links'] = bugdown.subject_links(message.sender.realm_id, subject)
|
2013-09-03 22:07:59 +02:00
|
|
|
edit_history_event["prev_subject"] = orig_subject
|
|
|
|
|
2013-09-13 18:12:29 +02:00
|
|
|
if propagate_mode in ["change_later", "change_all"]:
|
|
|
|
propagate_query = Q(recipient = message.recipient, subject = orig_subject)
|
|
|
|
# We only change messages up to 2 days in the past, to avoid hammering our
|
|
|
|
# DB by changing an unbounded amount of messages
|
|
|
|
if propagate_mode == 'change_all':
|
2017-02-25 21:02:13 +01:00
|
|
|
before_bound = timezone.now() - datetime.timedelta(days=2)
|
2013-09-13 18:12:29 +02:00
|
|
|
|
2016-11-30 14:17:35 +01:00
|
|
|
propagate_query = (propagate_query & ~Q(id = message.id) &
|
2017-02-25 21:02:13 +01:00
|
|
|
Q(pub_date__range=(before_bound, timezone.now())))
|
2013-09-13 18:12:29 +02:00
|
|
|
if propagate_mode == 'change_later':
|
|
|
|
propagate_query = propagate_query & Q(id__gt = message.id)
|
|
|
|
|
2016-11-09 13:44:29 +01:00
|
|
|
messages = Message.objects.filter(propagate_query).select_related()
|
2013-09-03 22:07:59 +02:00
|
|
|
|
|
|
|
# Evaluate the query before running the update
|
|
|
|
messages_list = list(messages)
|
|
|
|
messages.update(subject=subject)
|
|
|
|
|
|
|
|
for m in messages_list:
|
|
|
|
# The cached ORM object is not changed by messages.update()
|
2016-03-31 03:39:51 +02:00
|
|
|
# and the remote cache update requires the new value
|
2013-09-03 22:07:59 +02:00
|
|
|
m.subject = subject
|
|
|
|
|
|
|
|
changed_messages += messages_list
|
2013-05-21 17:48:46 +02:00
|
|
|
|
|
|
|
message.last_edit_time = timezone.now()
|
|
|
|
event['edit_timestamp'] = datetime_to_timestamp(message.last_edit_time)
|
|
|
|
edit_history_event['timestamp'] = event['edit_timestamp']
|
|
|
|
if message.edit_history is not None:
|
|
|
|
edit_history.insert(0, edit_history_event)
|
|
|
|
else:
|
|
|
|
edit_history = [edit_history_event]
|
2013-06-18 23:55:55 +02:00
|
|
|
message.edit_history = ujson.dumps(edit_history)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
|
|
|
log_event(event)
|
|
|
|
message.save(update_fields=["subject", "content", "rendered_content",
|
2013-05-21 17:48:46 +02:00
|
|
|
"rendered_content_version", "last_edit_time",
|
|
|
|
"edit_history"])
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2014-01-08 19:37:15 +01:00
|
|
|
def user_info(um):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserMessage) -> Dict[str, Any]
|
2014-01-08 19:37:15 +01:00
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
2015-11-01 17:14:53 +01:00
|
|
|
send_event(event, list(map(user_info, ums)))
|
2017-01-24 02:07:12 +01:00
|
|
|
return len(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2013-08-12 22:12:43 +02:00
|
|
|
def encode_email_address(stream):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Stream) -> Text
|
2013-10-02 19:46:40 +02:00
|
|
|
return encode_email_address_helper(stream.name, stream.email_token)
|
|
|
|
|
|
|
|
def encode_email_address_helper(name, email_token):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, Text) -> Text
|
2013-11-12 16:19:38 +01:00
|
|
|
# Some deployments may not use the email gateway
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN == '':
|
|
|
|
return ''
|
|
|
|
|
2013-08-12 22:12:43 +02:00
|
|
|
# Given the fact that we have almost no restrictions on stream names and
|
|
|
|
# that what characters are allowed in e-mail addresses is complicated and
|
|
|
|
# dependent on context in the address, we opt for a very simple scheme:
|
|
|
|
#
|
|
|
|
# Only encode the stream name (leave the + and token alone). Encode
|
|
|
|
# everything that isn't alphanumeric plus _ as the percent-prefixed integer
|
|
|
|
# ordinal of that character, padded with zeroes to the maximum number of
|
|
|
|
# bytes of a UTF-8 encoded Unicode character.
|
2013-10-02 19:46:40 +02:00
|
|
|
encoded_name = re.sub("\W", lambda x: "%" + str(ord(x.group(0))).zfill(4), name)
|
2013-10-08 21:02:47 +02:00
|
|
|
encoded_token = "%s+%s" % (encoded_name, email_token)
|
|
|
|
return settings.EMAIL_GATEWAY_PATTERN % (encoded_token,)
|
2013-08-12 22:12:43 +02:00
|
|
|
|
2015-10-14 17:11:50 +02:00
|
|
|
def get_email_gateway_message_string_from_address(address):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> Optional[Text]
|
2013-10-08 21:02:47 +02:00
|
|
|
pattern_parts = [re.escape(part) for part in settings.EMAIL_GATEWAY_PATTERN.split('%s')]
|
2016-07-19 07:04:14 +02:00
|
|
|
if settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK:
|
2014-02-04 19:07:29 +01:00
|
|
|
# Accept mails delivered to any Zulip server
|
2016-07-19 07:04:14 +02:00
|
|
|
pattern_parts[-1] = settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK
|
2013-10-08 21:02:47 +02:00
|
|
|
match_email_re = re.compile("(.*?)".join(pattern_parts))
|
2015-10-14 17:11:50 +02:00
|
|
|
match = match_email_re.match(address)
|
2013-10-08 21:02:47 +02:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
return None
|
|
|
|
|
2015-10-14 17:11:50 +02:00
|
|
|
msg_string = match.group(1)
|
|
|
|
|
|
|
|
return msg_string
|
|
|
|
|
|
|
|
def decode_email_address(email):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> Tuple[Text, Text]
|
2015-10-14 17:11:50 +02:00
|
|
|
# Perform the reverse of encode_email_address. Returns a tuple of (streamname, email_token)
|
|
|
|
msg_string = get_email_gateway_message_string_from_address(email)
|
|
|
|
|
|
|
|
if '.' in msg_string:
|
2014-01-13 23:20:55 +01:00
|
|
|
# Workaround for Google Groups and other programs that don't accept emails
|
|
|
|
# that have + signs in them (see Trac #2102)
|
2015-10-14 17:11:50 +02:00
|
|
|
encoded_stream_name, token = msg_string.split('.')
|
2014-01-13 23:20:55 +01:00
|
|
|
else:
|
2015-10-14 17:11:50 +02:00
|
|
|
encoded_stream_name, token = msg_string.split('+')
|
2014-01-13 23:20:55 +01:00
|
|
|
stream_name = re.sub("%\d{4}", lambda x: unichr(int(x.group(0)[1:])), encoded_stream_name)
|
|
|
|
return stream_name, token
|
2013-08-12 22:12:43 +02:00
|
|
|
|
2013-10-02 18:45:10 +02:00
|
|
|
# In general, it's better to avoid using .values() because it makes
|
|
|
|
# the code pretty ugly, but in this case, it has significant
|
|
|
|
# performance impact for loading / for users with large numbers of
|
|
|
|
# subscriptions, so it's worth optimizing.
|
2017-02-20 07:52:37 +01:00
|
|
|
def gather_subscriptions_helper(user_profile, include_subscribers=True):
|
|
|
|
# type: (UserProfile, bool) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]]
|
2013-10-02 18:45:10 +02:00
|
|
|
sub_dicts = Subscription.objects.select_related("recipient").filter(
|
2013-01-28 23:06:35 +01:00
|
|
|
user_profile = user_profile,
|
2013-10-02 18:45:10 +02:00
|
|
|
recipient__type = Recipient.STREAM).values(
|
2014-02-05 23:21:02 +01:00
|
|
|
"recipient__type_id", "in_home_view", "color", "desktop_notifications",
|
2016-07-01 07:26:09 +02:00
|
|
|
"audible_notifications", "active", "pin_to_top")
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_ids = set([sub["recipient__type_id"] for sub in sub_dicts])
|
|
|
|
all_streams = get_active_streams(user_profile.realm).select_related(
|
2016-12-03 18:07:49 +01:00
|
|
|
"realm").values("id", "name", "invite_only", "realm_id",
|
2016-12-11 14:30:45 +01:00
|
|
|
"realm__domain", "email_token", "description")
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_dicts = [stream for stream in all_streams if stream['id'] in stream_ids]
|
2013-02-12 20:42:59 +01:00
|
|
|
stream_hash = {}
|
2013-10-02 19:46:40 +02:00
|
|
|
for stream in stream_dicts:
|
|
|
|
stream_hash[stream["id"]] = stream
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id = [stream["id"] for stream in all_streams]
|
|
|
|
|
2013-06-12 21:15:32 +02:00
|
|
|
subscribed = []
|
|
|
|
unsubscribed = []
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed = []
|
2013-06-12 21:15:32 +02:00
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
# Deactivated streams aren't in stream_hash.
|
2016-12-03 18:07:49 +01:00
|
|
|
streams = [stream_hash[sub["recipient__type_id"]] for sub in sub_dicts
|
2016-11-30 14:17:35 +01:00
|
|
|
if sub["recipient__type_id"] in stream_hash]
|
2013-10-02 19:46:40 +02:00
|
|
|
streams_subscribed_map = dict((sub["recipient__type_id"], sub["active"]) for sub in sub_dicts)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
# Add never subscribed streams to streams_subscribed_map
|
|
|
|
streams_subscribed_map.update({stream['id']: False for stream in all_streams if stream not in streams})
|
|
|
|
|
2017-02-20 07:52:37 +01:00
|
|
|
if include_subscribers:
|
|
|
|
subscriber_map = bulk_get_subscriber_user_ids(all_streams, user_profile, streams_subscribed_map)
|
|
|
|
else:
|
|
|
|
# If we're not including subscribers, always return None,
|
|
|
|
# which the below code needs to check for anyway.
|
|
|
|
subscriber_map = defaultdict(lambda: None)
|
2016-07-12 23:57:16 +02:00
|
|
|
|
|
|
|
sub_unsub_stream_ids = set()
|
2013-10-02 18:45:10 +02:00
|
|
|
for sub in sub_dicts:
|
2016-07-12 23:57:16 +02:00
|
|
|
sub_unsub_stream_ids.add(sub["recipient__type_id"])
|
2014-01-24 23:30:53 +01:00
|
|
|
stream = stream_hash.get(sub["recipient__type_id"])
|
|
|
|
if not stream:
|
|
|
|
# This stream has been deactivated, don't include it.
|
|
|
|
continue
|
|
|
|
|
2017-03-19 23:29:29 +01:00
|
|
|
subscribers = subscriber_map[stream["id"]] # type: Optional[List[int]]
|
2013-09-07 02:20:51 +02:00
|
|
|
|
|
|
|
# Important: don't show the subscribers if the stream is invite only
|
|
|
|
# and this user isn't on it anymore.
|
2013-10-02 19:46:40 +02:00
|
|
|
if stream["invite_only"] and not sub["active"]:
|
2013-09-07 02:20:51 +02:00
|
|
|
subscribers = None
|
|
|
|
|
2013-10-02 19:46:40 +02:00
|
|
|
stream_dict = {'name': stream["name"],
|
2013-10-02 18:45:10 +02:00
|
|
|
'in_home_view': sub["in_home_view"],
|
2013-10-02 19:46:40 +02:00
|
|
|
'invite_only': stream["invite_only"],
|
2013-10-02 18:45:10 +02:00
|
|
|
'color': sub["color"],
|
2014-02-05 23:21:02 +01:00
|
|
|
'desktop_notifications': sub["desktop_notifications"],
|
|
|
|
'audible_notifications': sub["audible_notifications"],
|
2016-07-01 07:26:09 +02:00
|
|
|
'pin_to_top': sub["pin_to_top"],
|
2014-02-05 19:58:41 +01:00
|
|
|
'stream_id': stream["id"],
|
2014-01-24 20:40:06 +01:00
|
|
|
'description': stream["description"],
|
2013-10-02 19:46:40 +02:00
|
|
|
'email_address': encode_email_address_helper(stream["name"], stream["email_token"])}
|
2013-09-07 02:20:51 +02:00
|
|
|
if subscribers is not None:
|
2013-09-30 21:53:49 +02:00
|
|
|
stream_dict['subscribers'] = subscribers
|
2013-10-02 18:45:10 +02:00
|
|
|
if sub["active"]:
|
2013-09-30 21:53:49 +02:00
|
|
|
subscribed.append(stream_dict)
|
2013-06-12 21:15:32 +02:00
|
|
|
else:
|
2013-09-30 21:53:49 +02:00
|
|
|
unsubscribed.append(stream_dict)
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id_set = set(all_streams_id)
|
2016-10-23 06:04:28 +02:00
|
|
|
# Listing public streams are disabled for Zephyr mirroring realms.
|
|
|
|
if user_profile.realm.is_zephyr_mirror_realm:
|
2016-10-23 07:10:06 +02:00
|
|
|
never_subscribed_stream_ids = set() # type: Set[int]
|
2016-10-23 06:04:28 +02:00
|
|
|
else:
|
|
|
|
never_subscribed_stream_ids = all_streams_id_set - sub_unsub_stream_ids
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed_streams = [ns_stream_dict for ns_stream_dict in all_streams
|
|
|
|
if ns_stream_dict['id'] in never_subscribed_stream_ids]
|
|
|
|
|
|
|
|
for stream in never_subscribed_streams:
|
|
|
|
if not stream['invite_only']:
|
|
|
|
stream_dict = {'name': stream['name'],
|
|
|
|
'invite_only': stream['invite_only'],
|
|
|
|
'stream_id': stream['id'],
|
|
|
|
'description': stream['description']}
|
|
|
|
subscribers = subscriber_map[stream["id"]]
|
|
|
|
if subscribers is not None:
|
|
|
|
stream_dict['subscribers'] = subscribers
|
|
|
|
never_subscribed.append(stream_dict)
|
|
|
|
|
2016-10-30 20:15:43 +01:00
|
|
|
return (sorted(subscribed, key=lambda x: x['name']),
|
|
|
|
sorted(unsubscribed, key=lambda x: x['name']),
|
|
|
|
sorted(never_subscribed, key=lambda x: x['name']))
|
|
|
|
|
|
|
|
def gather_subscriptions(user_profile):
|
|
|
|
# type: (UserProfile) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]
|
|
|
|
subscribed, unsubscribed, never_subscribed = gather_subscriptions_helper(user_profile)
|
2013-10-20 21:35:58 +02:00
|
|
|
user_ids = set()
|
2016-10-23 06:29:56 +02:00
|
|
|
for subs in [subscribed, unsubscribed, never_subscribed]:
|
2013-10-20 21:35:58 +02:00
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
|
|
|
for subscriber in sub['subscribers']:
|
|
|
|
user_ids.add(subscriber)
|
|
|
|
email_dict = get_emails_from_user_ids(list(user_ids))
|
|
|
|
|
|
|
|
for subs in [subscribed, unsubscribed]:
|
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
|
|
|
sub['subscribers'] = [email_dict[user_id] for user_id in sub['subscribers']]
|
|
|
|
|
|
|
|
return (subscribed, unsubscribed)
|
2013-03-28 18:07:03 +01:00
|
|
|
|
2013-04-05 00:13:03 +02:00
|
|
|
def get_status_dict(requesting_user_profile):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile) -> Dict[Text, Dict[Text, Dict[str, Any]]]
|
2016-07-27 02:09:10 +02:00
|
|
|
if requesting_user_profile.realm.presence_disabled:
|
2016-07-27 01:45:29 +02:00
|
|
|
# Return an empty dict if presence is disabled in this realm
|
2013-09-13 23:33:11 +02:00
|
|
|
return defaultdict(dict)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return UserPresence.get_status_dict_by_realm(requesting_user_profile.realm_id)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
2016-11-02 23:48:47 +01:00
|
|
|
def get_cross_realm_dicts():
|
|
|
|
# type: () -> List[Dict[str, Any]]
|
|
|
|
users = [get_user_profile_by_email(email) for email in get_cross_realm_emails()]
|
2016-12-02 00:08:34 +01:00
|
|
|
return [{'email': user.email,
|
|
|
|
'user_id': user.id,
|
|
|
|
'is_admin': user.is_realm_admin,
|
|
|
|
'is_bot': user.is_bot,
|
|
|
|
'full_name': user.full_name}
|
2016-11-02 23:48:47 +01:00
|
|
|
for user in users]
|
|
|
|
|
2017-02-12 21:21:31 +01:00
|
|
|
def do_send_confirmation_email(invitee, referrer, body):
|
|
|
|
# type: (PreregistrationUser, UserProfile, Optional[str]) -> None
|
2013-04-08 18:27:07 +02:00
|
|
|
"""
|
|
|
|
Send the confirmation/welcome e-mail to an invited user.
|
|
|
|
|
|
|
|
`invitee` is a PreregistrationUser.
|
|
|
|
`referrer` is a UserProfile.
|
|
|
|
"""
|
2017-01-08 17:28:33 +01:00
|
|
|
subject_template_path = 'confirmation/invite_email.subject'
|
|
|
|
body_template_path = 'confirmation/invite_email.txt'
|
2017-03-19 23:29:29 +01:00
|
|
|
html_body_template_path = 'confirmation/invite_email.html' # type: Optional[str]
|
2017-01-08 17:28:33 +01:00
|
|
|
|
2013-11-16 00:54:12 +01:00
|
|
|
context = {'referrer': referrer,
|
|
|
|
'support_email': settings.ZULIP_ADMINISTRATOR,
|
2016-07-19 07:31:59 +02:00
|
|
|
'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS}
|
2013-09-19 23:42:49 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
if referrer.realm.is_zephyr_mirror_realm:
|
2013-09-19 23:42:49 +02:00
|
|
|
subject_template_path = 'confirmation/mituser_invite_email_subject.txt'
|
|
|
|
body_template_path = 'confirmation/mituser_invite_email_body.txt'
|
2017-01-08 17:28:33 +01:00
|
|
|
html_body_template_path = None
|
2013-09-19 23:42:49 +02:00
|
|
|
|
2013-04-08 18:27:07 +02:00
|
|
|
Confirmation.objects.send_confirmation(
|
2013-11-16 00:54:12 +01:00
|
|
|
invitee, invitee.email, additional_context=context,
|
2013-09-19 23:42:49 +02:00
|
|
|
subject_template_path=subject_template_path,
|
2017-01-08 17:28:33 +01:00
|
|
|
body_template_path=body_template_path,
|
|
|
|
html_body_template_path=html_body_template_path,
|
2017-02-12 21:21:31 +01:00
|
|
|
host=referrer.realm.host, custom_body=body)
|
2013-05-03 20:24:55 +02:00
|
|
|
|
2015-12-01 17:11:16 +01:00
|
|
|
def is_inactive(email):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> None
|
2014-01-07 19:13:45 +01:00
|
|
|
try:
|
2015-12-01 17:11:16 +01:00
|
|
|
if get_user_profile_by_email(email).is_active:
|
|
|
|
raise ValidationError(u'%s is already active' % (email,))
|
2014-01-07 19:13:45 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2015-12-01 17:11:16 +01:00
|
|
|
def user_email_is_unique(email):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text) -> None
|
2013-07-08 17:57:04 +02:00
|
|
|
try:
|
2015-12-01 17:11:16 +01:00
|
|
|
get_user_profile_by_email(email)
|
|
|
|
raise ValidationError(u'%s is already registered' % (email,))
|
2013-07-08 17:57:04 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2017-01-16 05:35:52 +01:00
|
|
|
def validate_email(user_profile, email):
|
|
|
|
# type: (UserProfile, Text) -> Tuple[Optional[str], Optional[str]]
|
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return _("Invalid address."), None
|
|
|
|
|
|
|
|
if not email_allowed_for_realm(email, user_profile.realm):
|
|
|
|
return _("Outside your domain."), None
|
|
|
|
|
|
|
|
try:
|
|
|
|
existing_user_profile = get_user_profile_by_email(email)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
existing_user_profile = None
|
|
|
|
|
|
|
|
try:
|
|
|
|
if existing_user_profile is not None and existing_user_profile.is_mirror_dummy:
|
|
|
|
# Mirror dummy users to be activated must be inactive
|
|
|
|
is_inactive(email)
|
|
|
|
else:
|
|
|
|
# Other users should not already exist at all.
|
|
|
|
user_email_is_unique(email)
|
|
|
|
except ValidationError:
|
|
|
|
return None, _("Already has an account.")
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2017-02-12 21:21:31 +01:00
|
|
|
def do_invite_users(user_profile, invitee_emails, streams, body=None):
|
|
|
|
# type: (UserProfile, SizedTextIterable, Iterable[Stream], Optional[str]) -> Tuple[Optional[str], Dict[str, Union[List[Tuple[Text, str]], bool]]]
|
2016-12-28 02:06:19 +01:00
|
|
|
validated_emails = [] # type: List[Text]
|
2016-12-14 20:04:21 +01:00
|
|
|
errors = [] # type: List[Tuple[Text, str]]
|
|
|
|
skipped = [] # type: List[Tuple[Text, str]]
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
ret_error = None # type: Optional[str]
|
2016-12-13 20:03:23 +01:00
|
|
|
ret_error_data = {} # type: Dict[str, Union[List[Tuple[Text, str]], bool]]
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
for email in invitee_emails:
|
|
|
|
if email == '':
|
|
|
|
continue
|
|
|
|
|
2017-01-16 05:35:52 +01:00
|
|
|
email_error, email_skipped = validate_email(user_profile, email)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2017-01-16 05:35:52 +01:00
|
|
|
if not (email_error or email_skipped):
|
|
|
|
validated_emails.append(email)
|
|
|
|
elif email_error:
|
|
|
|
errors.append((email, email_error))
|
|
|
|
elif email_skipped:
|
|
|
|
skipped.append((email, email_skipped))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if errors:
|
2016-05-25 15:02:02 +02:00
|
|
|
ret_error = _("Some emails did not validate, so we didn't send any invitations.")
|
2016-12-13 20:03:23 +01:00
|
|
|
ret_error_data = {'errors': errors + skipped, 'sent_invitations': False}
|
|
|
|
return ret_error, ret_error_data
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped and len(skipped) == len(invitee_emails):
|
|
|
|
# All e-mails were skipped, so we didn't actually invite anyone.
|
2016-05-25 15:02:02 +02:00
|
|
|
ret_error = _("We weren't able to invite anyone.")
|
2016-12-13 20:03:23 +01:00
|
|
|
ret_error_data = {'errors': skipped, 'sent_invitations': False}
|
2013-07-08 17:57:04 +02:00
|
|
|
return ret_error, ret_error_data
|
|
|
|
|
2016-12-28 02:06:19 +01:00
|
|
|
# Now that we are past all the possible errors, we actually create
|
|
|
|
# the PreregistrationUser objects and trigger the email invitations.
|
|
|
|
for email in validated_emails:
|
|
|
|
# The logged in user is the referrer.
|
|
|
|
prereg_user = PreregistrationUser(email=email, referred_by=user_profile)
|
|
|
|
|
|
|
|
# We save twice because you cannot associate a ManyToMany field
|
|
|
|
# on an unsaved object.
|
|
|
|
prereg_user.save()
|
|
|
|
prereg_user.streams = streams
|
|
|
|
prereg_user.save()
|
|
|
|
|
2017-02-12 21:21:31 +01:00
|
|
|
event = {"email": prereg_user.email, "referrer_email": user_profile.email, "email_body": body}
|
2013-07-08 17:57:04 +02:00
|
|
|
queue_json_publish("invites", event,
|
2017-02-12 21:21:31 +01:00
|
|
|
lambda event: do_send_confirmation_email(prereg_user, user_profile, body))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped:
|
2016-05-25 15:02:02 +02:00
|
|
|
ret_error = _("Some of those addresses are already using Zulip, "
|
|
|
|
"so we didn't send them an invitation. We did send "
|
|
|
|
"invitations to everyone else!")
|
2016-12-13 20:03:23 +01:00
|
|
|
ret_error_data = {'errors': skipped, 'sent_invitations': True}
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
return ret_error, ret_error_data
|
2013-07-26 16:51:02 +02:00
|
|
|
|
|
|
|
def send_referral_event(user_profile):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile) -> None
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="referral",
|
|
|
|
referrals=dict(granted=user_profile.invites_granted,
|
|
|
|
used=user_profile.invites_used))
|
|
|
|
send_event(event, [user_profile.id])
|
2013-07-26 16:51:02 +02:00
|
|
|
|
|
|
|
def do_refer_friend(user_profile, email):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text) -> None
|
2016-07-22 19:05:09 +02:00
|
|
|
content = ('Referrer: "%s" <%s>\n'
|
|
|
|
'Realm: %s\n'
|
|
|
|
'Referred: %s') % (user_profile.full_name, user_profile.email,
|
2017-03-13 18:37:32 +01:00
|
|
|
user_profile.realm.string_id, email)
|
2013-07-26 16:51:02 +02:00
|
|
|
subject = "Zulip referral: %s" % (email,)
|
2013-10-31 18:33:19 +01:00
|
|
|
from_email = '"%s" <%s>' % (user_profile.full_name, 'referrals@zulip.com')
|
2013-07-26 16:51:02 +02:00
|
|
|
to_email = '"Zulip Referrals" <zulip+referrals@zulip.com>'
|
2016-12-02 00:08:34 +01:00
|
|
|
headers = {'Reply-To': '"%s" <%s>' % (user_profile.full_name, user_profile.email,)}
|
2013-07-26 16:51:02 +02:00
|
|
|
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
|
|
|
|
msg.send()
|
|
|
|
|
|
|
|
referral = Referral(user_profile=user_profile, email=email)
|
|
|
|
referral.save()
|
|
|
|
user_profile.invites_used += 1
|
|
|
|
user_profile.save(update_fields=['invites_used'])
|
|
|
|
|
|
|
|
send_referral_event(user_profile)
|
2013-08-22 19:15:54 +02:00
|
|
|
|
2013-08-22 19:54:35 +02:00
|
|
|
def notify_realm_emoji(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> None
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_emoji", op="update",
|
|
|
|
realm_emoji=realm.get_emoji())
|
2016-01-24 02:43:22 +01:00
|
|
|
send_event(event, active_user_ids(realm))
|
2013-08-22 19:54:35 +02:00
|
|
|
|
2016-12-20 09:22:00 +01:00
|
|
|
def check_add_realm_emoji(realm, name, img_url, author=None):
|
|
|
|
# type: (Realm, Text, Text, Optional[UserProfile]) -> None
|
|
|
|
emoji = RealmEmoji(realm=realm, name=name, img_url=img_url, author=author)
|
2016-02-12 21:08:56 +01:00
|
|
|
emoji.full_clean()
|
|
|
|
emoji.save()
|
2013-08-22 19:54:35 +02:00
|
|
|
notify_realm_emoji(realm)
|
2013-08-22 19:15:54 +02:00
|
|
|
|
|
|
|
def do_remove_realm_emoji(realm, name):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text) -> None
|
2013-08-22 19:15:54 +02:00
|
|
|
RealmEmoji.objects.get(realm=realm, name=name).delete()
|
2013-08-22 19:54:35 +02:00
|
|
|
notify_realm_emoji(realm)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2013-09-11 17:24:27 +02:00
|
|
|
def notify_alert_words(user_profile, words):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Iterable[Text]) -> None
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="alert_words", alert_words=words)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def do_add_alert_words(user_profile, alert_words):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Iterable[Text]) -> None
|
2013-09-11 17:24:27 +02:00
|
|
|
words = add_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def do_remove_alert_words(user_profile, alert_words):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Iterable[Text]) -> None
|
2013-09-11 17:24:27 +02:00
|
|
|
words = remove_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
|
|
|
def do_set_alert_words(user_profile, alert_words):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, List[Text]) -> None
|
2013-09-03 22:41:17 +02:00
|
|
|
set_user_alert_words(user_profile, alert_words)
|
2013-09-11 17:24:27 +02:00
|
|
|
notify_alert_words(user_profile, alert_words)
|
2013-09-10 00:06:24 +02:00
|
|
|
|
|
|
|
def do_set_muted_topics(user_profile, muted_topics):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Union[List[List[Text]], List[Tuple[Text, Text]]]) -> None
|
2013-09-10 00:06:24 +02:00
|
|
|
user_profile.muted_topics = ujson.dumps(muted_topics)
|
|
|
|
user_profile.save(update_fields=['muted_topics'])
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="muted_topics", muted_topics=muted_topics)
|
|
|
|
send_event(event, [user_profile.id])
|
2017-03-13 22:05:35 +01:00
|
|
|
|
|
|
|
def do_update_muted_topic(user_profile, stream, topic, op):
|
|
|
|
# type: (UserProfile, str, str, str) -> None
|
|
|
|
muted_topics = ujson.loads(user_profile.muted_topics)
|
|
|
|
if op == 'add':
|
|
|
|
muted_topics.append([stream, topic])
|
|
|
|
elif op == 'remove':
|
|
|
|
muted_topics.remove([stream, topic])
|
|
|
|
user_profile.muted_topics = ujson.dumps(muted_topics)
|
|
|
|
user_profile.save(update_fields=['muted_topics'])
|
|
|
|
event = dict(type="muted_topics", muted_topics=muted_topics)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-10-07 17:35:22 +02:00
|
|
|
|
2014-01-06 23:42:02 +01:00
|
|
|
def notify_realm_filters(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> None
|
2016-12-31 03:08:43 +01:00
|
|
|
realm_filters = realm_filters_for_realm(realm.id)
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_filters", realm_filters=realm_filters)
|
2016-01-24 02:43:22 +01:00
|
|
|
send_event(event, active_user_ids(realm))
|
2014-01-06 23:42:02 +01:00
|
|
|
|
2014-01-27 19:43:55 +01:00
|
|
|
# NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
|
|
|
|
# RegExp syntax. In addition to JS-compatible syntax, the following features are available:
|
|
|
|
# * Named groups will be converted to numbered groups automatically
|
|
|
|
# * Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
|
2014-01-06 23:42:02 +01:00
|
|
|
def do_add_realm_filter(realm, pattern, url_format_string):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Text, Text) -> int
|
2016-02-13 19:17:15 +01:00
|
|
|
pattern = pattern.strip()
|
|
|
|
url_format_string = url_format_string.strip()
|
|
|
|
realm_filter = RealmFilter(
|
|
|
|
realm=realm, pattern=pattern,
|
|
|
|
url_format_string=url_format_string)
|
|
|
|
realm_filter.full_clean()
|
|
|
|
realm_filter.save()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
return realm_filter.id
|
|
|
|
|
|
|
|
def do_remove_realm_filter(realm, pattern=None, id=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Realm, Optional[Text], Optional[int]) -> None
|
2016-02-13 19:17:15 +01:00
|
|
|
if pattern is not None:
|
|
|
|
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
|
|
|
|
else:
|
|
|
|
RealmFilter.objects.get(realm=realm, pk=id).delete()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2013-10-20 21:10:03 +02:00
|
|
|
def get_emails_from_user_ids(user_ids):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Sequence[int]) -> Dict[int, Text]
|
2013-10-20 21:10:03 +02:00
|
|
|
# We may eventually use memcached to speed this up, but the DB is fast.
|
|
|
|
return UserProfile.emails_from_ids(user_ids)
|
|
|
|
|
2017-01-17 02:31:47 +01:00
|
|
|
def get_realm_aliases(realm):
|
2016-12-26 19:19:02 +01:00
|
|
|
# type: (Realm) -> List[Dict[str, Text]]
|
2017-01-21 08:19:03 +01:00
|
|
|
return list(realm.realmalias_set.values('domain', 'allow_subdomains'))
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2017-01-21 08:19:03 +01:00
|
|
|
def do_add_realm_alias(realm, domain, allow_subdomains):
|
|
|
|
# type: (Realm, Text, bool) -> (RealmAlias)
|
|
|
|
alias = RealmAlias.objects.create(realm=realm, domain=domain,
|
|
|
|
allow_subdomains=allow_subdomains)
|
2016-12-26 19:19:02 +01:00
|
|
|
event = dict(type="realm_domains", op="add",
|
2017-01-21 09:09:27 +01:00
|
|
|
alias=dict(domain=alias.domain,
|
2017-01-21 08:19:03 +01:00
|
|
|
allow_subdomains=alias.allow_subdomains))
|
2016-12-26 19:19:02 +01:00
|
|
|
send_event(event, active_user_ids(realm))
|
|
|
|
return alias
|
|
|
|
|
2017-01-21 08:19:03 +01:00
|
|
|
def do_change_realm_alias(alias, allow_subdomains):
|
|
|
|
# type: (RealmAlias, bool) -> None
|
|
|
|
alias.allow_subdomains = allow_subdomains
|
|
|
|
alias.save(update_fields=['allow_subdomains'])
|
|
|
|
event = dict(type="realm_domains", op="change",
|
|
|
|
alias=dict(domain=alias.domain,
|
|
|
|
allow_subdomains=alias.allow_subdomains))
|
|
|
|
send_event(event, active_user_ids(alias.realm))
|
|
|
|
|
2017-01-23 04:18:05 +01:00
|
|
|
def do_remove_realm_alias(alias):
|
|
|
|
# type: (RealmAlias) -> None
|
|
|
|
realm = alias.realm
|
|
|
|
domain = alias.domain
|
|
|
|
alias.delete()
|
2017-01-26 10:52:56 +01:00
|
|
|
if RealmAlias.objects.filter(realm=realm).count() == 0 and realm.restricted_to_domain:
|
|
|
|
# If this was the last realm alias, we mark the realm as no
|
|
|
|
# longer restricted to domain, because the feature doesn't do
|
|
|
|
# anything if there are no domains, and this is probably less
|
|
|
|
# confusing than the alternative.
|
|
|
|
do_set_realm_restricted_to_domain(realm, False)
|
2017-01-21 09:09:27 +01:00
|
|
|
event = dict(type="realm_domains", op="remove", domain=domain)
|
2016-12-26 19:19:02 +01:00
|
|
|
send_event(event, active_user_ids(realm))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
def get_occupied_streams(realm):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (Realm) -> QuerySet
|
|
|
|
# TODO: Make a generic stub for QuerySet
|
2014-03-02 06:46:54 +01:00
|
|
|
""" Get streams with subscribers """
|
2014-03-06 23:34:44 +01:00
|
|
|
subs_filter = Subscription.objects.filter(active=True, user_profile__realm=realm,
|
|
|
|
user_profile__is_active=True).values('recipient_id')
|
2014-03-02 06:46:54 +01:00
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
|
|
|
|
|
|
|
return Stream.objects.filter(id__in=stream_ids, realm=realm, deactivated=False)
|
|
|
|
|
2014-03-02 06:29:15 +01:00
|
|
|
def do_get_streams(user_profile, include_public=True, include_subscribed=True,
|
2016-05-20 22:08:42 +02:00
|
|
|
include_all_active=False, include_default=False):
|
|
|
|
# type: (UserProfile, bool, bool, bool, bool) -> List[Dict[str, Any]]
|
2016-02-08 03:59:38 +01:00
|
|
|
if include_all_active and not user_profile.is_api_super_user:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
# Listing public streams are disabled for Zephyr mirroring realms.
|
|
|
|
include_public = include_public and not user_profile.realm.is_zephyr_mirror_realm
|
2014-03-02 06:46:54 +01:00
|
|
|
# Start out with all streams in the realm with subscribers
|
|
|
|
query = get_occupied_streams(user_profile.realm)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
if not include_all_active:
|
|
|
|
user_subs = Subscription.objects.select_related("recipient").filter(
|
|
|
|
active=True, user_profile=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM)
|
|
|
|
|
|
|
|
if include_subscribed:
|
|
|
|
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
|
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
|
|
|
|
|
|
|
if include_subscribed and include_public:
|
|
|
|
query = query.filter(recipient_check | invite_only_check)
|
|
|
|
elif include_public:
|
|
|
|
query = query.filter(invite_only_check)
|
|
|
|
elif include_subscribed:
|
|
|
|
query = query.filter(recipient_check)
|
|
|
|
else:
|
|
|
|
# We're including nothing, so don't bother hitting the DB.
|
|
|
|
query = []
|
|
|
|
|
2016-06-04 00:08:20 +02:00
|
|
|
streams = [(row.to_dict()) for row in query]
|
2014-03-02 06:29:15 +01:00
|
|
|
streams.sort(key=lambda elt: elt["name"])
|
2016-05-20 22:08:42 +02:00
|
|
|
if include_default:
|
|
|
|
is_default = {}
|
|
|
|
default_streams = get_default_streams_for_realm(user_profile.realm)
|
|
|
|
for default_stream in default_streams:
|
|
|
|
is_default[default_stream.id] = True
|
|
|
|
for stream in streams:
|
|
|
|
stream['is_default'] = is_default.get(stream["stream_id"], False)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
return streams
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
def do_claim_attachments(message):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Message) -> List[Tuple[Text, bool]]
|
2016-07-24 21:52:41 +02:00
|
|
|
attachment_url_list = attachment_url_re.findall(message.content)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
results = []
|
|
|
|
for url in attachment_url_list:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-24 21:52:41 +02:00
|
|
|
user_profile = message.sender
|
2016-06-17 18:49:36 +02:00
|
|
|
is_message_realm_public = False
|
2016-07-24 21:52:41 +02:00
|
|
|
if message.recipient.type == Recipient.STREAM:
|
|
|
|
is_message_realm_public = Stream.objects.get(id=message.recipient.type_id).is_public()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
if path_id is not None:
|
2016-07-24 21:52:41 +02:00
|
|
|
is_claimed = claim_attachment(user_profile, path_id, message,
|
2016-07-24 21:54:29 +02:00
|
|
|
is_message_realm_public)
|
2016-03-24 20:24:01 +01:00
|
|
|
results.append((path_id, is_claimed))
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
|
|
|
def do_delete_old_unclaimed_attachments(weeks_ago):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (int) -> None
|
2016-03-24 20:24:01 +01:00
|
|
|
old_unclaimed_attachments = get_old_unclaimed_attachments(weeks_ago)
|
|
|
|
|
|
|
|
for attachment in old_unclaimed_attachments:
|
|
|
|
delete_message_image(attachment.path_id)
|
|
|
|
attachment.delete()
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
def check_attachment_reference_change(prev_content, message):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (Text, Message) -> None
|
2016-07-07 09:47:15 +02:00
|
|
|
new_content = message.content
|
|
|
|
prev_attachments = set(attachment_url_re.findall(prev_content))
|
|
|
|
new_attachments = set(attachment_url_re.findall(new_content))
|
|
|
|
|
|
|
|
to_remove = list(prev_attachments - new_attachments)
|
|
|
|
path_ids = []
|
|
|
|
for url in to_remove:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-07 09:47:15 +02:00
|
|
|
path_ids.append(path_id)
|
|
|
|
|
|
|
|
attachments_to_update = Attachment.objects.filter(path_id__in=path_ids).select_for_update()
|
2016-07-24 22:03:22 +02:00
|
|
|
message.attachment_set.remove(*attachments_to_update)
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
to_add = list(new_attachments - prev_attachments)
|
2016-07-24 22:03:22 +02:00
|
|
|
if len(to_add) > 0:
|
2016-07-07 09:47:15 +02:00
|
|
|
do_claim_attachments(message)
|
2017-03-04 06:39:45 +01:00
|
|
|
|
|
|
|
def do_set_email_changes_disabled(realm, email_changes_disabled):
|
|
|
|
# type: (Realm, bool) -> None
|
|
|
|
realm.email_changes_disabled = email_changes_disabled
|
|
|
|
realm.save(update_fields=['email_changes_disabled'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='email_changes_disabled',
|
|
|
|
value=email_changes_disabled,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|
2017-03-13 18:33:49 +01:00
|
|
|
|
|
|
|
def do_set_name_changes_disabled(realm, name_changes_disabled):
|
|
|
|
# type: (Realm, bool) -> None
|
|
|
|
realm.name_changes_disabled = name_changes_disabled
|
|
|
|
realm.save(update_fields=['name_changes_disabled'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property='name_changes_disabled',
|
|
|
|
value=name_changes_disabled,
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm))
|