2016-05-25 06:55:14 +02:00
|
|
|
from typing import (
|
2017-03-03 19:01:52 +01:00
|
|
|
AbstractSet, Any, AnyStr, Callable, Dict, Iterable, List, Mapping, MutableMapping,
|
2017-09-09 15:26:44 +02:00
|
|
|
Optional, Sequence, Set, Text, Tuple, TypeVar, Union, cast
|
2016-05-25 06:55:14 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
from mypy_extensions import TypedDict
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
import django.db.utils
|
2017-10-21 03:15:12 +02:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
2016-12-13 19:18:08 +01:00
|
|
|
from django.utils.html import escape
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-01-10 22:01:33 +01:00
|
|
|
from django.conf import settings
|
2013-07-08 17:57:04 +02:00
|
|
|
from django.core import validators
|
2017-12-05 21:39:45 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat, \
|
|
|
|
RealmCount
|
2016-10-04 16:49:16 +02:00
|
|
|
from zerver.lib.bugdown import (
|
|
|
|
BugdownRenderingException,
|
2017-03-13 14:42:03 +01:00
|
|
|
version as bugdown_version,
|
|
|
|
url_embed_preview_enabled_for_realm
|
2016-10-04 16:49:16 +02:00
|
|
|
)
|
2017-08-18 02:15:51 +02:00
|
|
|
from zerver.lib.addressee import (
|
|
|
|
Addressee,
|
|
|
|
user_profiles_from_unvalidated_emails,
|
|
|
|
)
|
2016-10-04 15:40:02 +02:00
|
|
|
from zerver.lib.cache import (
|
2017-08-05 19:42:59 +02:00
|
|
|
delete_user_profile_caches,
|
2016-10-04 15:40:02 +02:00
|
|
|
to_dict_cache_key_id,
|
|
|
|
)
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.context_managers import lockfile
|
2017-05-01 07:29:56 +02:00
|
|
|
from zerver.lib.emoji import emoji_name_to_emoji_code
|
2017-01-24 01:48:35 +01:00
|
|
|
from zerver.lib.hotspots import get_next_hotspots
|
2016-10-04 15:52:26 +02:00
|
|
|
from zerver.lib.message import (
|
2016-10-12 02:19:45 +02:00
|
|
|
access_message,
|
2016-10-04 15:52:26 +02:00
|
|
|
MessageDict,
|
2016-10-04 18:32:46 +02:00
|
|
|
render_markdown,
|
2016-10-04 15:52:26 +02:00
|
|
|
)
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2017-05-14 21:14:26 +02:00
|
|
|
from zerver.lib.retention import move_message_to_archive
|
2017-06-26 19:43:32 +02:00
|
|
|
from zerver.lib.send_email import send_email, FromAddress
|
2017-10-29 15:40:07 +01:00
|
|
|
from zerver.lib.stream_subscription import (
|
|
|
|
get_active_subscriptions_for_stream_id,
|
2017-10-29 15:52:01 +01:00
|
|
|
get_active_subscriptions_for_stream_ids,
|
2017-10-29 20:19:57 +01:00
|
|
|
get_bulk_stream_subscriber_info,
|
2017-10-29 17:11:11 +01:00
|
|
|
get_stream_subscriptions_for_user,
|
2017-10-29 19:15:35 +01:00
|
|
|
get_stream_subscriptions_for_users,
|
2017-10-29 15:40:07 +01:00
|
|
|
num_subscribers_for_stream_id,
|
|
|
|
)
|
2017-10-24 00:07:03 +02:00
|
|
|
from zerver.lib.stream_topic import StreamTopicTarget
|
2017-08-24 17:58:40 +02:00
|
|
|
from zerver.lib.topic_mutes import (
|
|
|
|
get_topic_mutes,
|
|
|
|
add_topic_mute,
|
|
|
|
remove_topic_mute,
|
|
|
|
)
|
2017-11-16 02:28:50 +01:00
|
|
|
from zerver.lib.users import bulk_get_users, check_full_name
|
2017-11-15 08:09:06 +01:00
|
|
|
from zerver.lib.user_groups import create_user_group, access_user_group_by_id
|
2017-05-22 23:37:15 +02:00
|
|
|
from zerver.models import Realm, RealmEmoji, Stream, UserProfile, UserActivity, \
|
|
|
|
RealmDomain, \
|
|
|
|
Subscription, Recipient, Message, Attachment, UserMessage, RealmAuditLog, \
|
|
|
|
UserHotspot, \
|
2017-10-12 19:36:14 +02:00
|
|
|
Client, DefaultStream, DefaultStreamGroup, UserPresence, PushDeviceToken, \
|
|
|
|
ScheduledEmail, MAX_SUBJECT_LENGTH, \
|
2017-10-28 21:31:21 +02:00
|
|
|
MAX_MESSAGE_LENGTH, get_client, get_stream, get_personal_recipient, get_huddle, \
|
2013-05-14 21:18:11 +02:00
|
|
|
get_user_profile_by_id, PreregistrationUser, get_display_recipient, \
|
2017-10-29 19:01:08 +01:00
|
|
|
get_realm, bulk_get_recipients, get_stream_recipient, get_stream_recipients, \
|
2016-01-12 16:24:34 +01:00
|
|
|
email_allowed_for_realm, email_to_username, display_recipient_cache_key, \
|
2017-11-22 20:22:11 +01:00
|
|
|
get_user, get_stream_cache_key, \
|
2017-09-16 21:44:03 +02:00
|
|
|
UserActivityInterval, active_user_ids, get_active_streams, \
|
2017-11-29 02:49:11 +01:00
|
|
|
realm_filters_for_realm, RealmFilter, \
|
2017-09-17 22:07:00 +02:00
|
|
|
get_owned_bot_dicts, stream_name_in_use, \
|
2017-12-07 21:15:34 +01:00
|
|
|
get_old_unclaimed_attachments, is_cross_realm_bot_email, \
|
2017-05-22 23:37:15 +02:00
|
|
|
Reaction, EmailChangeStatus, CustomProfileField, \
|
2017-10-29 17:03:51 +01:00
|
|
|
custom_profile_fields_for_realm, get_huddle_user_ids, \
|
2017-05-23 03:02:01 +02:00
|
|
|
CustomProfileFieldValue, validate_attachment_request, get_system_bot, \
|
2017-11-02 07:53:08 +01:00
|
|
|
get_display_recipient_by_id, query_for_ids, get_huddle_recipient, \
|
2017-11-01 18:20:34 +01:00
|
|
|
UserGroup, UserGroupMembership, get_default_stream_groups
|
2014-01-24 23:30:53 +01:00
|
|
|
|
2016-09-14 18:02:24 +02:00
|
|
|
from zerver.lib.alert_words import alert_words_in_realm
|
2017-02-16 22:35:57 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2017-09-13 20:00:36 +02:00
|
|
|
from zerver.lib.stream_recipient import StreamRecipientMap
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
from django.db import transaction, IntegrityError, connection
|
2017-12-05 21:39:45 +01:00
|
|
|
from django.db.models import F, Q, Max, Sum
|
2016-05-25 06:55:14 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2013-03-18 18:57:34 +01:00
|
|
|
from django.core.exceptions import ValidationError
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.core.mail import EmailMessage
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2017-07-08 04:38:13 +02:00
|
|
|
from confirmation.models import Confirmation, create_confirmation_link
|
2017-10-21 03:15:12 +02:00
|
|
|
from confirmation import settings as confirmation_settings
|
2016-05-25 15:55:13 +02:00
|
|
|
from six import unichr
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2014-02-26 20:02:43 +01:00
|
|
|
from zerver.lib.create_user import random_api_key
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
|
|
|
from zerver.lib.create_user import create_user
|
|
|
|
from zerver.lib import bugdown
|
2013-08-22 17:45:23 +02:00
|
|
|
from zerver.lib.cache import cache_with_key, cache_set, \
|
2017-05-22 19:45:54 +02:00
|
|
|
user_profile_by_email_cache_key, user_profile_cache_key, \
|
|
|
|
cache_set_many, cache_delete, cache_delete_many
|
2016-05-29 16:52:55 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.utils import log_statsd_event, statsd
|
|
|
|
from zerver.lib.html_diff import highlight_html_differences
|
2013-09-03 22:41:17 +02:00
|
|
|
from zerver.lib.alert_words import user_alert_words, add_user_alert_words, \
|
|
|
|
remove_user_alert_words, set_user_alert_words
|
2017-09-22 04:23:14 +02:00
|
|
|
from zerver.lib.notifications import clear_scheduled_emails, \
|
|
|
|
clear_scheduled_invitation_emails, enqueue_welcome_emails
|
2013-12-10 16:28:16 +01:00
|
|
|
from zerver.lib.narrow import check_supported_events_narrow_filter
|
2017-07-25 02:02:30 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError, ErrorCode
|
2017-03-08 11:46:12 +01:00
|
|
|
from zerver.lib.sessions import delete_user_sessions
|
2016-07-11 03:07:37 +02:00
|
|
|
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id, \
|
|
|
|
claim_attachment, delete_message_image
|
2016-09-20 06:45:52 +02:00
|
|
|
from zerver.lib.str_utils import NonBinaryStr, force_str
|
2017-03-03 23:15:18 +01:00
|
|
|
from zerver.tornado.event_queue import request_event_queue, send_event
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2013-08-12 00:21:54 +02:00
|
|
|
import DNS
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2013-01-10 22:01:33 +01:00
|
|
|
import time
|
|
|
|
import traceback
|
|
|
|
import re
|
2013-01-14 20:09:25 +01:00
|
|
|
import datetime
|
|
|
|
import os
|
|
|
|
import platform
|
2013-03-18 18:57:34 +01:00
|
|
|
import logging
|
2014-01-23 18:09:34 +01:00
|
|
|
import itertools
|
2013-04-05 00:13:03 +02:00
|
|
|
from collections import defaultdict
|
2017-09-13 21:47:43 +02:00
|
|
|
from operator import itemgetter
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
# This will be used to type annotate parameters in a function if the function
|
|
|
|
# works on both str and unicode in python 2 but in python 3 it only works on str.
|
2016-12-14 20:04:21 +01:00
|
|
|
SizedTextIterable = Union[Sequence[Text], AbstractSet[Text]]
|
2016-05-25 06:55:14 +02:00
|
|
|
|
2016-08-03 23:37:12 +02:00
|
|
|
STREAM_ASSIGNMENT_COLORS = [
|
|
|
|
"#76ce90", "#fae589", "#a6c7e5", "#e79ab5",
|
|
|
|
"#bfd56f", "#f4ae55", "#b0a5fd", "#addfe5",
|
|
|
|
"#f5ce6e", "#c2726a", "#94c849", "#bd86e5",
|
|
|
|
"#ee7e4a", "#a6dcbf", "#95a5fd", "#53a063",
|
|
|
|
"#9987e1", "#e4523d", "#c2c2c2", "#4f8de4",
|
|
|
|
"#c6a8ad", "#e7cc4d", "#c8bebf", "#a47462"]
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Store an event in the log for re-importing messages
|
2017-11-05 11:15:10 +01:00
|
|
|
def log_event(event: MutableMapping[str, Any]) -> None:
|
2013-11-12 18:06:34 +01:00
|
|
|
if settings.EVENT_LOG_DIR is None:
|
|
|
|
return
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
if "timestamp" not in event:
|
|
|
|
event["timestamp"] = time.time()
|
2013-01-14 20:09:25 +01:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
if not os.path.exists(settings.EVENT_LOG_DIR):
|
2013-01-14 20:09:25 +01:00
|
|
|
os.mkdir(settings.EVENT_LOG_DIR)
|
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
template = os.path.join(settings.EVENT_LOG_DIR,
|
2017-01-24 05:50:04 +01:00
|
|
|
'%s.' + platform.node() +
|
2017-04-15 04:03:56 +02:00
|
|
|
timezone_now().strftime('.%Y-%m-%d'))
|
2013-01-14 20:09:25 +01:00
|
|
|
|
|
|
|
with lockfile(template % ('lock',)):
|
|
|
|
with open(template % ('events',), 'a') as log:
|
2017-11-04 19:32:16 +01:00
|
|
|
log.write(ujson.dumps(event) + '\n')
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def can_access_stream_user_ids(stream: Stream) -> Set[int]:
|
2016-11-04 07:02:24 +01:00
|
|
|
# return user ids of users who can access the attributes of
|
|
|
|
# a stream, such as its name/description
|
|
|
|
if stream.is_public():
|
2017-09-16 21:26:54 +02:00
|
|
|
return set(active_user_ids(stream.realm_id))
|
2016-11-04 07:02:24 +01:00
|
|
|
else:
|
2017-09-17 21:48:23 +02:00
|
|
|
return private_stream_user_ids(stream.id)
|
2016-11-04 07:02:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def private_stream_user_ids(stream_id: int) -> Set[int]:
|
2016-11-04 07:02:24 +01:00
|
|
|
# TODO: Find similar queries elsewhere and de-duplicate this code.
|
2017-10-29 15:40:07 +01:00
|
|
|
subscriptions = get_active_subscriptions_for_stream_id(stream_id)
|
2016-11-04 07:02:24 +01:00
|
|
|
return {sub['user_profile_id'] for sub in subscriptions.values('user_profile_id')}
|
2014-01-27 23:43:05 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bot_owner_user_ids(user_profile: UserProfile) -> Set[int]:
|
2014-02-26 00:12:14 +01:00
|
|
|
is_private_bot = (
|
2017-11-08 03:56:01 +01:00
|
|
|
user_profile.default_sending_stream and
|
|
|
|
user_profile.default_sending_stream.invite_only or
|
|
|
|
user_profile.default_events_register_stream and
|
|
|
|
user_profile.default_events_register_stream.invite_only)
|
2014-02-26 00:12:14 +01:00
|
|
|
if is_private_bot:
|
2017-03-21 07:32:59 +01:00
|
|
|
return {user_profile.bot_owner_id, }
|
2014-02-26 00:12:14 +01:00
|
|
|
else:
|
2017-03-21 07:32:59 +01:00
|
|
|
users = {user.id for user in user_profile.realm.get_admin_users()}
|
|
|
|
users.add(user_profile.bot_owner_id)
|
|
|
|
return users
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def realm_user_count(realm: Realm) -> int:
|
2015-12-26 02:13:42 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_topic_history_for_stream(user_profile: UserProfile,
|
|
|
|
recipient: Recipient) -> List[Dict[str, Any]]:
|
2016-10-27 15:54:49 +02:00
|
|
|
query = '''
|
2017-07-20 21:40:11 +02:00
|
|
|
SELECT
|
|
|
|
"zerver_message"."subject" as topic,
|
|
|
|
max("zerver_message".id) as max_message_id
|
|
|
|
FROM "zerver_message"
|
|
|
|
INNER JOIN "zerver_usermessage" ON (
|
|
|
|
"zerver_usermessage"."message_id" = "zerver_message"."id"
|
|
|
|
)
|
|
|
|
WHERE (
|
|
|
|
"zerver_usermessage"."user_profile_id" = %s AND
|
|
|
|
"zerver_message"."recipient_id" = %s
|
|
|
|
)
|
|
|
|
GROUP BY (
|
|
|
|
"zerver_message"."subject"
|
|
|
|
)
|
|
|
|
ORDER BY max("zerver_message".id) DESC
|
2016-10-27 15:54:49 +02:00
|
|
|
'''
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query, [user_profile.id, recipient.id])
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2017-07-20 21:40:11 +02:00
|
|
|
canonical_topic_names = set() # type: Set[str]
|
|
|
|
history = []
|
|
|
|
for (topic_name, max_message_id) in rows:
|
|
|
|
canonical_name = topic_name.lower()
|
|
|
|
if canonical_name in canonical_topic_names:
|
|
|
|
continue
|
|
|
|
|
|
|
|
canonical_topic_names.add(canonical_name)
|
|
|
|
history.append(dict(
|
|
|
|
name=topic_name,
|
|
|
|
max_id=max_message_id))
|
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
return history
|
|
|
|
|
2017-10-04 02:01:22 +02:00
|
|
|
def send_signup_message(sender, admin_realm_signup_notifications_stream, user_profile,
|
2015-10-13 21:30:22 +02:00
|
|
|
internal=False, realm=None):
|
2016-12-14 20:04:21 +01:00
|
|
|
# type: (UserProfile, Text, UserProfile, bool, Optional[Realm]) -> None
|
2015-10-13 21:30:22 +02:00
|
|
|
if internal:
|
|
|
|
# When this is done using manage.py vs. the web interface
|
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
|
|
|
user_count = realm_user_count(user_profile.realm)
|
2017-10-04 02:01:22 +02:00
|
|
|
signup_notifications_stream = user_profile.realm.get_signup_notifications_stream()
|
|
|
|
# Send notification to realm signup notifications stream if it exists
|
2015-10-13 21:30:22 +02:00
|
|
|
# Don't send notification for the first user in a realm
|
2017-10-04 02:01:22 +02:00
|
|
|
if signup_notifications_stream is not None and user_count > 1:
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
2017-10-04 02:01:22 +02:00
|
|
|
signup_notifications_stream.name,
|
|
|
|
"signups",
|
|
|
|
"%s (%s) just signed up for Zulip. (total: %i)" % (
|
|
|
|
user_profile.full_name, user_profile.email, user_count
|
|
|
|
)
|
2017-01-22 05:23:36 +01:00
|
|
|
)
|
2016-12-11 14:30:45 +01:00
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
# We also send a notification to the Zulip administrative realm
|
2017-07-19 18:14:55 +02:00
|
|
|
admin_realm = get_system_bot(sender).realm
|
2017-03-24 05:49:23 +01:00
|
|
|
try:
|
|
|
|
# Check whether the stream exists
|
2017-10-04 02:01:22 +02:00
|
|
|
get_stream(admin_realm_signup_notifications_stream, admin_realm)
|
2017-03-24 05:49:23 +01:00
|
|
|
except Stream.DoesNotExist:
|
|
|
|
# If the signups stream hasn't been created in the admin
|
|
|
|
# realm, don't auto-create it to send to it; just do nothing.
|
|
|
|
return
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
admin_realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
2017-10-04 02:01:22 +02:00
|
|
|
admin_realm_signup_notifications_stream,
|
2017-10-19 08:51:29 +02:00
|
|
|
user_profile.realm.display_subdomain,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
internal_blurb,
|
|
|
|
user_count,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_new_user(user_profile: UserProfile, internal: bool=False) -> None:
|
2015-10-13 21:30:22 +02:00
|
|
|
if settings.NEW_USER_BOT is not None:
|
|
|
|
send_signup_message(settings.NEW_USER_BOT, "signups", user_profile, internal)
|
2017-03-13 17:50:28 +01:00
|
|
|
statsd.gauge("users.signups.%s" % (user_profile.realm.string_id), 1, delta=True)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-09-21 14:29:25 +02:00
|
|
|
# We also clear any scheduled invitation emails to prevent them
|
|
|
|
# from being sent after the user is created.
|
|
|
|
clear_scheduled_invitation_emails(user_profile.email)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None:
|
2017-05-13 21:14:25 +02:00
|
|
|
"""Give you the last 1000 messages on your public streams, so you have
|
2016-11-11 05:19:21 +01:00
|
|
|
something to look at in your home view once you finish the
|
|
|
|
tutorial."""
|
2017-04-15 04:03:56 +02:00
|
|
|
one_week_ago = timezone_now() - datetime.timedelta(weeks=1)
|
2017-10-29 19:01:08 +01:00
|
|
|
|
|
|
|
stream_ids = [stream.id for stream in streams if not stream.invite_only]
|
|
|
|
recipients = get_stream_recipients(stream_ids)
|
2016-11-11 05:33:30 +01:00
|
|
|
recent_messages = Message.objects.filter(recipient_id__in=recipients,
|
|
|
|
pub_date__gt=one_week_ago).order_by("-id")
|
2017-05-13 21:14:25 +02:00
|
|
|
message_ids_to_use = list(reversed(recent_messages.values_list('id', flat=True)[0:1000]))
|
2016-11-11 05:33:30 +01:00
|
|
|
if len(message_ids_to_use) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Handle the race condition where a message arrives between
|
|
|
|
# bulk_add_subscriptions above and the Message query just above
|
|
|
|
already_ids = set(UserMessage.objects.filter(message_id__in=message_ids_to_use,
|
2017-11-08 03:56:01 +01:00
|
|
|
user_profile=user_profile).values_list("message_id",
|
|
|
|
flat=True))
|
2016-11-11 05:33:30 +01:00
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message_id=message_id,
|
|
|
|
flags=UserMessage.flags.read)
|
|
|
|
for message_id in message_ids_to_use
|
|
|
|
if message_id not in already_ids]
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2016-11-11 05:33:30 +01:00
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
# Does the processing for a new user account:
|
|
|
|
# * Subscribes to default/invitation streams
|
|
|
|
# * Fills in some recent historical messages
|
|
|
|
# * Notifies other users in realm and Zulip about the signup
|
|
|
|
# * Deactivates PreregistrationUser objects
|
|
|
|
# * subscribe the user to newsletter if newsletter_data is specified
|
2017-10-12 19:36:14 +02:00
|
|
|
def process_new_human_user(user_profile, prereg_user=None, newsletter_data=None,
|
2017-10-26 20:31:43 +02:00
|
|
|
default_stream_groups=[]):
|
|
|
|
# type: (UserProfile, Optional[PreregistrationUser], Optional[Dict[str, str]], List[DefaultStreamGroup]) -> None
|
2016-07-27 01:45:29 +02:00
|
|
|
mit_beta_user = user_profile.realm.is_zephyr_mirror_realm
|
2017-08-25 09:33:12 +02:00
|
|
|
if prereg_user is not None:
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
acting_user = prereg_user.referred_by # type: Optional[UserProfile]
|
|
|
|
else:
|
2015-10-13 21:30:22 +02:00
|
|
|
streams = []
|
2017-08-25 09:33:12 +02:00
|
|
|
acting_user = None
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
# If the user's invitation didn't explicitly list some streams, we
|
|
|
|
# add the default streams
|
|
|
|
if len(streams) == 0:
|
|
|
|
streams = get_default_subs(user_profile)
|
2017-10-12 19:36:14 +02:00
|
|
|
|
2017-10-26 20:31:43 +02:00
|
|
|
for default_stream_group in default_stream_groups:
|
2017-10-12 19:36:14 +02:00
|
|
|
default_stream_group_streams = default_stream_group.streams.all()
|
|
|
|
for stream in default_stream_group_streams:
|
|
|
|
if stream not in streams:
|
2017-11-16 00:26:54 +01:00
|
|
|
streams.append(stream)
|
2017-07-17 00:40:15 +02:00
|
|
|
|
|
|
|
bulk_add_subscriptions(streams, [user_profile], acting_user=acting_user)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2016-11-11 05:19:21 +01:00
|
|
|
add_new_user_history(user_profile, streams)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
# mit_beta_users don't have a referred_by field
|
|
|
|
if not mit_beta_user and prereg_user is not None and prereg_user.referred_by is not None \
|
|
|
|
and settings.NOTIFICATION_BOT is not None:
|
|
|
|
# This is a cross-realm private message.
|
2017-09-25 21:51:02 +02:00
|
|
|
internal_send_private_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2017-09-25 21:51:02 +02:00
|
|
|
get_system_bot(settings.NOTIFICATION_BOT),
|
|
|
|
prereg_user.referred_by,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as
|
|
|
|
# inactive so we can keep track of the PreregistrationUser we
|
|
|
|
# actually used for analytics
|
|
|
|
if prereg_user is not None:
|
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.email).exclude(
|
|
|
|
id=prereg_user.id).update(status=0)
|
|
|
|
else:
|
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.email).update(status=0)
|
|
|
|
|
|
|
|
notify_new_user(user_profile)
|
2017-09-22 04:29:01 +02:00
|
|
|
enqueue_welcome_emails(user_profile)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-10-25 21:48:37 +02:00
|
|
|
# We have an import loop here; it's intentional, because we want
|
|
|
|
# to keep all the onboarding code in zerver/lib/onboarding.py.
|
|
|
|
from zerver.lib.onboarding import send_initial_pms
|
|
|
|
send_initial_pms(user_profile)
|
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
if newsletter_data is not None:
|
|
|
|
# If the user was created automatically via the API, we may
|
|
|
|
# not want to register them for the newsletter
|
|
|
|
queue_json_publish(
|
|
|
|
"signups",
|
|
|
|
{
|
2016-12-28 22:24:56 +01:00
|
|
|
'email_address': user_profile.email,
|
2017-07-11 05:48:09 +02:00
|
|
|
'user_id': user_profile.id,
|
2016-12-28 22:24:56 +01:00
|
|
|
'merge_fields': {
|
2015-10-13 21:30:22 +02:00
|
|
|
'NAME': user_profile.full_name,
|
2017-01-03 21:04:55 +01:00
|
|
|
'REALM_ID': user_profile.realm_id,
|
2015-10-13 21:30:22 +02:00
|
|
|
'OPTIN_IP': newsletter_data["IP"],
|
2017-04-15 04:03:56 +02:00
|
|
|
'OPTIN_TIME': datetime.datetime.isoformat(timezone_now().replace(microsecond=0)),
|
2015-10-13 21:30:22 +02:00
|
|
|
},
|
|
|
|
},
|
2016-12-02 08:15:16 +01:00
|
|
|
lambda event: None)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_created_user(user_profile: UserProfile) -> None:
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_user", op="add",
|
|
|
|
person=dict(email=user_profile.email,
|
2016-10-26 03:35:32 +02:00
|
|
|
user_id=user_profile.id,
|
2016-02-08 03:59:38 +01:00
|
|
|
is_admin=user_profile.is_realm_admin,
|
2014-01-24 23:24:44 +01:00
|
|
|
full_name=user_profile.full_name,
|
2017-02-21 17:55:32 +01:00
|
|
|
avatar_url=avatar_url(user_profile),
|
2017-04-02 20:57:27 +02:00
|
|
|
timezone=user_profile.timezone,
|
2014-01-24 23:24:44 +01:00
|
|
|
is_bot=user_profile.is_bot))
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm_id))
|
2013-07-22 21:26:44 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_created_bot(user_profile: UserProfile) -> None:
|
|
|
|
def stream_name(stream: Optional[Stream]) -> Optional[Text]:
|
2014-02-26 00:12:14 +01:00
|
|
|
if not stream:
|
|
|
|
return None
|
|
|
|
return stream.name
|
|
|
|
|
|
|
|
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
|
|
|
|
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
|
|
|
|
|
2017-02-15 21:06:07 +01:00
|
|
|
bot = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
2017-06-12 19:50:03 +02:00
|
|
|
bot_type=user_profile.bot_type,
|
2017-02-15 21:06:07 +01:00
|
|
|
is_active=user_profile.is_active,
|
|
|
|
api_key=user_profile.api_key,
|
|
|
|
default_sending_stream=default_sending_stream_name,
|
|
|
|
default_events_register_stream=default_events_register_stream_name,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
)
|
|
|
|
|
|
|
|
# Set the owner key only when the bot has an owner.
|
|
|
|
# The default bots don't have an owner. So don't
|
|
|
|
# set the owner key while reactivating them.
|
|
|
|
if user_profile.bot_owner is not None:
|
|
|
|
bot['owner'] = user_profile.bot_owner.email
|
|
|
|
|
|
|
|
event = dict(type="realm_bot", op="add", bot=bot)
|
2017-10-07 17:53:14 +02:00
|
|
|
send_event(event, bot_owner_user_ids(user_profile))
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
def do_create_user(email, password, realm, full_name, short_name,
|
2017-10-28 19:22:02 +02:00
|
|
|
is_realm_admin=False, bot_type=None, bot_owner=None, tos_version=None,
|
2017-05-04 15:19:06 +02:00
|
|
|
timezone=u"", avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
2014-02-11 18:43:30 +01:00
|
|
|
default_sending_stream=None, default_events_register_stream=None,
|
2015-10-13 21:33:54 +02:00
|
|
|
default_all_public_streams=None, prereg_user=None,
|
2017-10-26 20:31:43 +02:00
|
|
|
newsletter_data=None, default_stream_groups=[]):
|
2017-10-28 19:22:02 +02:00
|
|
|
# type: (Text, Optional[Text], Realm, Text, Text, bool, Optional[int], Optional[UserProfile], Optional[Text], Text, Text, Optional[Stream], Optional[Stream], bool, Optional[PreregistrationUser], Optional[Dict[str, str]], List[DefaultStreamGroup]) -> UserProfile
|
2017-10-26 20:31:43 +02:00
|
|
|
|
2017-02-15 04:35:10 +01:00
|
|
|
user_profile = create_user(email=email, password=password, realm=realm,
|
|
|
|
full_name=full_name, short_name=short_name,
|
2017-10-28 19:22:02 +02:00
|
|
|
is_realm_admin=is_realm_admin,
|
2017-08-18 07:12:22 +02:00
|
|
|
bot_type=bot_type, bot_owner=bot_owner,
|
2017-05-04 15:19:06 +02:00
|
|
|
tos_version=tos_version, timezone=timezone, avatar_source=avatar_source,
|
2017-02-15 04:35:10 +01:00
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams)
|
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
event_time = user_profile.date_joined
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2017-02-15 17:26:22 +01:00
|
|
|
event_type='user_created', event_time=event_time)
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2013-07-22 21:26:44 +02:00
|
|
|
notify_created_user(user_profile)
|
2016-05-18 20:23:03 +02:00
|
|
|
if bot_type:
|
2014-02-26 00:12:14 +01:00
|
|
|
notify_created_bot(user_profile)
|
2015-10-13 21:33:54 +02:00
|
|
|
else:
|
|
|
|
process_new_human_user(user_profile, prereg_user=prereg_user,
|
2017-10-12 19:36:14 +02:00
|
|
|
newsletter_data=newsletter_data,
|
2017-10-26 20:31:43 +02:00
|
|
|
default_stream_groups=default_stream_groups)
|
2013-03-29 15:35:37 +01:00
|
|
|
return user_profile
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_activate_user(user_profile: UserProfile) -> None:
|
2017-10-24 18:05:55 +02:00
|
|
|
user_profile.is_active = True
|
|
|
|
user_profile.is_mirror_dummy = False
|
|
|
|
user_profile.set_unusable_password()
|
|
|
|
user_profile.date_joined = timezone_now()
|
|
|
|
user_profile.tos_version = settings.TOS_VERSION
|
|
|
|
user_profile.save(update_fields=["is_active", "date_joined", "password",
|
|
|
|
"is_mirror_dummy", "tos_version"])
|
|
|
|
|
|
|
|
event_time = user_profile.date_joined
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_activated', event_time=event_time)
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
|
|
|
|
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_reactivate_user(user_profile: UserProfile, acting_user: Optional[UserProfile]=None) -> None:
|
2017-10-24 18:05:55 +02:00
|
|
|
# Unlike do_activate_user, this is meant for re-activating existing users,
|
|
|
|
# so it doesn't reset their password, etc.
|
|
|
|
user_profile.is_active = True
|
|
|
|
user_profile.save(update_fields=["is_active"])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_reactivated', event_time=event_time,
|
|
|
|
acting_user=acting_user)
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
|
|
|
|
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
|
|
|
if user_profile.is_bot:
|
|
|
|
notify_created_bot(user_profile)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def active_humans_in_realm(realm: Realm) -> Sequence[UserProfile]:
|
2014-01-07 18:04:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
|
|
|
|
|
2014-01-28 15:11:10 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_property(realm: Realm, name: str, value: Any) -> None:
|
2017-04-09 00:35:41 +02:00
|
|
|
"""Takes in a realm object, the name of an attribute to update, and the
|
|
|
|
value to update.
|
2017-03-21 18:08:40 +01:00
|
|
|
"""
|
2017-03-24 01:44:29 +01:00
|
|
|
property_type = Realm.property_types[name]
|
2017-04-09 00:35:41 +02:00
|
|
|
assert isinstance(value, property_type), (
|
|
|
|
'Cannot update %s: %s is not an instance of %s' % (
|
|
|
|
name, value, property_type,))
|
|
|
|
|
2017-03-21 18:08:40 +01:00
|
|
|
setattr(realm, name, value)
|
|
|
|
realm.save(update_fields=[name])
|
2017-03-18 20:19:44 +01:00
|
|
|
event = dict(
|
|
|
|
type='realm',
|
|
|
|
op='update',
|
2017-03-21 18:08:40 +01:00
|
|
|
property=name,
|
|
|
|
value=value,
|
2017-03-18 20:19:44 +01:00
|
|
|
)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2017-03-18 20:19:44 +01:00
|
|
|
|
2017-03-13 14:42:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_authentication_methods(realm: Realm,
|
|
|
|
authentication_methods: Dict[str, bool]) -> None:
|
2016-11-02 21:51:56 +01:00
|
|
|
for key, value in list(authentication_methods.items()):
|
|
|
|
index = getattr(realm.authentication_methods, key).number
|
|
|
|
realm.authentication_methods.set_bit(index, int(value))
|
|
|
|
realm.save(update_fields=['authentication_methods'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property='default',
|
|
|
|
data=dict(authentication_methods=realm.authentication_methods_dict())
|
|
|
|
)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2016-12-20 15:41:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_message_editing(realm: Realm,
|
|
|
|
allow_message_editing: bool,
|
|
|
|
message_content_edit_limit_seconds: int) -> None:
|
2016-06-21 21:34:41 +02:00
|
|
|
realm.allow_message_editing = allow_message_editing
|
2016-07-08 02:25:55 +02:00
|
|
|
realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds
|
|
|
|
realm.save(update_fields=['allow_message_editing', 'message_content_edit_limit_seconds'])
|
2016-06-21 21:34:41 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
2016-07-08 02:25:55 +02:00
|
|
|
data=dict(allow_message_editing=allow_message_editing,
|
|
|
|
message_content_edit_limit_seconds=message_content_edit_limit_seconds),
|
2016-06-21 21:34:41 +02:00
|
|
|
)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2016-06-21 21:34:41 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_notifications_stream(realm: Realm, stream: Stream, stream_id: int) -> None:
|
2017-06-09 20:50:38 +02:00
|
|
|
realm.notifications_stream = stream
|
|
|
|
realm.save(update_fields=['notifications_stream'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property="notifications_stream_id",
|
|
|
|
value=stream_id
|
|
|
|
)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2017-06-09 20:50:38 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_signup_notifications_stream(realm: Realm, stream: Stream,
|
|
|
|
stream_id: int) -> None:
|
2017-10-20 16:55:04 +02:00
|
|
|
realm.signup_notifications_stream = stream
|
|
|
|
realm.save(update_fields=['signup_notifications_stream'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property="signup_notifications_stream_id",
|
|
|
|
value=stream_id
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm.id))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_deactivate_realm(realm: Realm) -> None:
|
2014-01-07 18:04:26 +01:00
|
|
|
"""
|
|
|
|
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
|
|
|
|
tell the difference between users that were intentionally deactivated,
|
|
|
|
e.g. by a realm admin, and users who can't currently use Zulip because their
|
|
|
|
realm has been deactivated.
|
|
|
|
"""
|
|
|
|
if realm.deactivated:
|
|
|
|
return
|
|
|
|
|
|
|
|
realm.deactivated = True
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
|
|
|
for user in active_humans_in_realm(realm):
|
|
|
|
# Don't deactivate the users, but do delete their sessions so they get
|
|
|
|
# bumped to the login screen, where they'll get a realm deactivation
|
|
|
|
# notice when they try to log in.
|
|
|
|
delete_user_sessions(user)
|
2017-07-12 03:17:17 +02:00
|
|
|
clear_scheduled_emails(user.id)
|
2014-01-28 17:29:00 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_reactivate_realm(realm: Realm) -> None:
|
2016-04-21 17:58:22 +02:00
|
|
|
realm.deactivated = False
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_deactivate_user(user_profile: UserProfile,
|
|
|
|
acting_user: Optional[UserProfile]=None,
|
|
|
|
_cascade: bool=True) -> None:
|
2013-06-28 23:37:15 +02:00
|
|
|
if not user_profile.is_active:
|
|
|
|
return
|
|
|
|
|
2016-11-09 13:44:29 +01:00
|
|
|
user_profile.is_active = False
|
2013-07-06 06:18:53 +02:00
|
|
|
user_profile.save(update_fields=["is_active"])
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2013-04-02 17:54:57 +02:00
|
|
|
delete_user_sessions(user_profile)
|
2017-07-12 03:17:17 +02:00
|
|
|
clear_scheduled_emails(user_profile.id)
|
2013-03-05 19:09:05 +01:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2017-08-17 01:20:23 +02:00
|
|
|
acting_user=acting_user,
|
2017-02-15 04:35:10 +01:00
|
|
|
event_type='user_deactivated', event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time, increment=-1)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_user", op="remove",
|
|
|
|
person=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-01-24 23:24:44 +01:00
|
|
|
full_name=user_profile.full_name))
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm_id))
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2014-02-26 22:27:51 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
event = dict(type="realm_bot", op="remove",
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 22:27:51 +01:00
|
|
|
full_name=user_profile.full_name))
|
2017-10-07 17:53:14 +02:00
|
|
|
send_event(event, bot_owner_user_ids(user_profile))
|
2014-02-26 22:27:51 +01:00
|
|
|
|
2013-06-28 23:37:15 +02:00
|
|
|
if _cascade:
|
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
|
|
|
for profile in bot_profiles:
|
2017-08-17 01:20:23 +02:00
|
|
|
do_deactivate_user(profile, acting_user=acting_user, _cascade=False)
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_deactivate_stream(stream: Stream, log: bool=True) -> None:
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
# Get the affected user ids *before* we deactivate everybody.
|
|
|
|
affected_user_ids = can_access_stream_user_ids(stream)
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-10-29 15:40:07 +01:00
|
|
|
get_active_subscriptions_for_stream_id(stream.id).update(active=False)
|
2014-01-24 19:08:39 +01:00
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
was_invite_only = stream.invite_only
|
2014-01-24 19:08:39 +01:00
|
|
|
stream.deactivated = True
|
|
|
|
stream.invite_only = True
|
|
|
|
# Preserve as much as possible the original stream name while giving it a
|
|
|
|
# special prefix that both indicates that the stream is deactivated and
|
|
|
|
# frees up the original name for reuse.
|
|
|
|
old_name = stream.name
|
|
|
|
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
|
|
|
|
for i in range(20):
|
2017-09-17 22:07:00 +02:00
|
|
|
if stream_name_in_use(new_name, stream.realm_id):
|
2014-01-24 19:08:39 +01:00
|
|
|
# This stream has alrady been deactivated, keep prepending !s until
|
|
|
|
# we have a unique stream name or you've hit a rename limit.
|
|
|
|
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
|
2017-09-17 22:07:00 +02:00
|
|
|
else:
|
2014-01-24 19:08:39 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
# If you don't have a unique name at this point, this will fail later in the
|
|
|
|
# code path.
|
|
|
|
|
|
|
|
stream.name = new_name[:Stream.MAX_NAME_LENGTH]
|
2017-09-17 23:32:14 +02:00
|
|
|
stream.save(update_fields=['name', 'deactivated', 'invite_only'])
|
2014-01-24 19:08:39 +01:00
|
|
|
|
2017-06-04 19:34:48 +02:00
|
|
|
# If this is a default stream, remove it, properly sending a
|
|
|
|
# notification to browser clients.
|
2017-09-17 00:34:13 +02:00
|
|
|
if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists():
|
2017-06-04 19:34:48 +02:00
|
|
|
do_remove_default_stream(stream)
|
2017-02-18 18:01:00 +01:00
|
|
|
|
2016-03-31 03:39:51 +02:00
|
|
|
# Remove the old stream information from remote cache.
|
2017-09-17 22:26:43 +02:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
|
2014-01-24 19:08:39 +01:00
|
|
|
cache_delete(old_cache_key)
|
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
stream_dict = stream.to_dict()
|
|
|
|
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
|
|
|
|
event = dict(type="stream", op="delete",
|
|
|
|
streams=[stream_dict])
|
|
|
|
send_event(event, affected_user_ids)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_user_email(user_profile: UserProfile, new_email: Text) -> None:
|
2017-08-05 19:42:59 +02:00
|
|
|
delete_user_profile_caches([user_profile])
|
|
|
|
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.email = new_email
|
|
|
|
user_profile.save(update_fields=["email"])
|
|
|
|
|
2017-02-22 08:42:46 +01:00
|
|
|
payload = dict(user_id=user_profile.id,
|
|
|
|
new_email=new_email)
|
|
|
|
send_event(dict(type='realm_user', op='update', person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-14 06:06:56 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
|
|
|
modified_user=user_profile, event_type='user_email_changed',
|
|
|
|
event_time=event_time)
|
2013-02-10 22:45:25 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_start_email_change_process(user_profile: UserProfile, new_email: Text) -> None:
|
2017-01-20 12:27:38 +01:00
|
|
|
old_email = user_profile.email
|
|
|
|
user_profile.email = new_email
|
2017-06-11 00:39:58 +02:00
|
|
|
obj = EmailChangeStatus.objects.create(new_email=new_email, old_email=old_email,
|
|
|
|
user_profile=user_profile, realm=user_profile.realm)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-07-08 04:38:13 +02:00
|
|
|
activation_url = create_confirmation_link(obj, user_profile.realm.host, Confirmation.EMAIL_CHANGE)
|
2017-09-28 01:44:15 +02:00
|
|
|
from zerver.context_processors import common_context
|
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
|
|
|
'old_email': old_email,
|
|
|
|
'new_email': new_email,
|
|
|
|
'activate_url': activation_url
|
|
|
|
})
|
2017-07-11 05:01:32 +02:00
|
|
|
send_email('zerver/emails/confirm_new_email', to_email=new_email,
|
|
|
|
from_name='Zulip Account Security', from_address=FromAddress.NOREPLY,
|
|
|
|
context=context)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def compute_irc_user_fullname(email: NonBinaryStr) -> NonBinaryStr:
|
2013-10-17 17:19:44 +02:00
|
|
|
return email.split("@")[0] + " (IRC)"
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def compute_jabber_user_fullname(email: NonBinaryStr) -> NonBinaryStr:
|
2013-10-29 16:00:20 +01:00
|
|
|
return email.split("@")[0] + " (XMPP)"
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def compute_mit_user_fullname(email: NonBinaryStr) -> NonBinaryStr:
|
2013-01-10 22:01:33 +01:00
|
|
|
try:
|
2015-09-20 09:35:54 +02:00
|
|
|
# Input is either e.g. username@mit.edu or user|CROSSREALM.INVALID@mit.edu
|
2013-01-10 22:01:33 +01:00
|
|
|
match_user = re.match(r'^([a-zA-Z0-9_.-]+)(\|.+)?@mit\.edu$', email.lower())
|
|
|
|
if match_user and match_user.group(2) is None:
|
2013-08-12 00:21:54 +02:00
|
|
|
answer = DNS.dnslookup(
|
|
|
|
"%s.passwd.ns.athena.mit.edu" % (match_user.group(1),),
|
|
|
|
DNS.Type.TXT)
|
2016-11-03 21:19:36 +01:00
|
|
|
hesiod_name = force_str(answer[0][0]).split(':')[4].split(',')[0].strip()
|
2013-08-12 00:21:54 +02:00
|
|
|
if hesiod_name != "":
|
2013-01-10 22:01:33 +01:00
|
|
|
return hesiod_name
|
|
|
|
elif match_user:
|
|
|
|
return match_user.group(1).lower() + "@" + match_user.group(2).upper()[1:]
|
2013-08-13 19:21:24 +02:00
|
|
|
except DNS.Base.ServerError:
|
|
|
|
pass
|
2017-03-05 10:25:27 +01:00
|
|
|
except Exception:
|
2016-11-30 03:11:29 +01:00
|
|
|
print("Error getting fullname for %s:" % (email,))
|
2013-01-10 22:01:33 +01:00
|
|
|
traceback.print_exc()
|
|
|
|
return email.lower()
|
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
@cache_with_key(lambda realm, email, f: user_profile_by_email_cache_key(email),
|
2013-03-26 19:09:45 +01:00
|
|
|
timeout=3600*24*7)
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_mirror_user_if_needed(realm: Realm, email: Text,
|
|
|
|
email_to_fullname: Callable[[Text], Text]) -> UserProfile:
|
2013-01-10 22:01:33 +01:00
|
|
|
try:
|
2017-05-23 20:57:59 +02:00
|
|
|
return get_user(email, realm)
|
2013-01-10 22:01:33 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
try:
|
|
|
|
# Forge a user for this person
|
2016-04-28 23:26:11 +02:00
|
|
|
return create_user(email, None, realm,
|
2013-10-17 17:19:44 +02:00
|
|
|
email_to_fullname(email), email_to_username(email),
|
2014-01-07 18:57:54 +01:00
|
|
|
active=False, is_mirror_dummy=True)
|
2013-01-10 22:01:33 +01:00
|
|
|
except IntegrityError:
|
2017-05-23 20:57:59 +02:00
|
|
|
return get_user(email, realm)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_welcome_bot_response(message: MutableMapping[str, Any]) -> None:
|
2017-09-27 19:43:28 +02:00
|
|
|
welcome_bot = get_system_bot(settings.WELCOME_BOT)
|
2017-10-28 21:31:21 +02:00
|
|
|
human_recipient = get_personal_recipient(message['message'].sender.id)
|
2017-09-27 19:43:28 +02:00
|
|
|
if Message.objects.filter(sender=welcome_bot, recipient=human_recipient).count() < 2:
|
|
|
|
internal_send_private_message(
|
|
|
|
message['realm'], welcome_bot, message['message'].sender,
|
|
|
|
"Congratulations on your first reply! :tada:\n\n"
|
|
|
|
"Feel free to continue using this space to practice your new messaging "
|
|
|
|
"skills. Or, try clicking on some of the stream names to your left!")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def render_incoming_message(message: Message,
|
|
|
|
content: Text,
|
|
|
|
user_ids: Set[int],
|
|
|
|
realm: Realm,
|
|
|
|
mention_data: Optional[bugdown.MentionData]=None,
|
|
|
|
email_gateway: Optional[bool]=False) -> Text:
|
2017-01-22 05:55:30 +01:00
|
|
|
realm_alert_words = alert_words_in_realm(realm)
|
2016-09-14 21:58:44 +02:00
|
|
|
try:
|
2016-10-04 18:32:46 +02:00
|
|
|
rendered_content = render_markdown(
|
|
|
|
message=message,
|
2016-09-14 18:02:24 +02:00
|
|
|
content=content,
|
2017-01-18 23:19:18 +01:00
|
|
|
realm=realm,
|
2016-09-14 18:02:24 +02:00
|
|
|
realm_alert_words=realm_alert_words,
|
2017-09-09 02:50:57 +02:00
|
|
|
user_ids=user_ids,
|
2017-10-24 02:47:09 +02:00
|
|
|
mention_data=mention_data,
|
2017-11-03 12:13:17 +01:00
|
|
|
email_gateway=email_gateway,
|
2016-09-14 18:02:24 +02:00
|
|
|
)
|
2016-09-14 21:58:44 +02:00
|
|
|
except BugdownRenderingException:
|
|
|
|
raise JsonableError(_('Unable to render message'))
|
|
|
|
return rendered_content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_typing_user_profiles(recipient: Recipient, sender_id: int) -> List[UserProfile]:
|
2017-09-12 19:17:07 +02:00
|
|
|
if recipient.type == Recipient.STREAM:
|
|
|
|
'''
|
|
|
|
We don't support typing indicators for streams because they
|
|
|
|
are expensive and initial user feedback was they were too
|
|
|
|
distracting.
|
|
|
|
'''
|
|
|
|
raise ValueError('Typing indicators not supported for streams')
|
|
|
|
|
2016-10-24 09:56:35 +02:00
|
|
|
if recipient.type == Recipient.PERSONAL:
|
2017-06-10 13:43:22 +02:00
|
|
|
# The sender and recipient may be the same id, so
|
|
|
|
# de-duplicate using a set.
|
|
|
|
user_ids = list({recipient.type_id, sender_id})
|
2017-09-09 15:04:45 +02:00
|
|
|
assert(len(user_ids) in [1, 2])
|
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
elif recipient.type == Recipient.HUDDLE:
|
2017-10-29 17:03:51 +01:00
|
|
|
user_ids = get_huddle_user_ids(recipient)
|
2017-09-09 15:04:45 +02:00
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
else:
|
|
|
|
raise ValueError('Bad recipient type')
|
2017-09-09 15:09:48 +02:00
|
|
|
|
|
|
|
users = [get_user_profile_by_id(user_id) for user_id in user_ids]
|
|
|
|
return users
|
|
|
|
|
2017-09-09 15:26:44 +02:00
|
|
|
RecipientInfoResult = TypedDict('RecipientInfoResult', {
|
|
|
|
'active_user_ids': Set[int],
|
|
|
|
'push_notify_user_ids': Set[int],
|
2017-08-17 16:55:32 +02:00
|
|
|
'stream_push_user_ids': Set[int],
|
2017-09-09 15:26:44 +02:00
|
|
|
'um_eligible_user_ids': Set[int],
|
|
|
|
'long_term_idle_user_ids': Set[int],
|
2017-10-24 20:08:19 +02:00
|
|
|
'default_bot_user_ids': Set[int],
|
2017-09-09 15:26:44 +02:00
|
|
|
'service_bot_tuples': List[Tuple[int, int]],
|
|
|
|
})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_recipient_info(recipient: Recipient,
|
|
|
|
sender_id: int,
|
|
|
|
stream_topic: Optional[StreamTopicTarget],
|
|
|
|
possibly_mentioned_user_ids: Optional[Set[int]]=None) -> RecipientInfoResult:
|
2017-10-25 00:40:55 +02:00
|
|
|
if recipient.type == Recipient.STREAM:
|
|
|
|
# Anybody calling us w/r/t a stream message needs to supply
|
|
|
|
# stream_topic. We may eventually want to have different versions
|
|
|
|
# of this function for different message types.
|
|
|
|
assert(stream_topic is not None)
|
|
|
|
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids = set() # type: Set[int]
|
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
# The sender and recipient may be the same id, so
|
|
|
|
# de-duplicate using a set.
|
2017-10-24 17:54:46 +02:00
|
|
|
message_to_user_ids = list({recipient.type_id, sender_id})
|
|
|
|
assert(len(message_to_user_ids) in [1, 2])
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
elif recipient.type == Recipient.STREAM:
|
2017-10-29 16:50:20 +01:00
|
|
|
subscription_rows = stream_topic.get_active_subscriptions().values(
|
2017-08-17 16:55:32 +02:00
|
|
|
'user_profile_id',
|
|
|
|
'push_notifications',
|
2017-10-18 07:38:54 +02:00
|
|
|
'in_home_view',
|
2017-08-17 16:55:32 +02:00
|
|
|
).order_by('user_profile_id')
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2017-10-24 17:54:46 +02:00
|
|
|
message_to_user_ids = [
|
2017-08-17 16:55:32 +02:00
|
|
|
row['user_profile_id']
|
|
|
|
for row in subscription_rows
|
|
|
|
]
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in subscription_rows
|
2017-10-18 07:38:54 +02:00
|
|
|
# Note: muting a stream overrides stream_push_notify
|
|
|
|
if row['push_notifications'] and row['in_home_view']
|
2017-10-24 00:07:03 +02:00
|
|
|
} - stream_topic.user_ids_muting_topic()
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
elif recipient.type == Recipient.HUDDLE:
|
2017-10-29 17:03:51 +01:00
|
|
|
message_to_user_ids = get_huddle_user_ids(recipient)
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
raise ValueError('Bad recipient type')
|
2017-09-09 15:04:45 +02:00
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
message_to_user_id_set = set(message_to_user_ids)
|
|
|
|
|
|
|
|
user_ids = set(message_to_user_id_set)
|
|
|
|
if possibly_mentioned_user_ids:
|
|
|
|
# Important note: Because we haven't rendered bugdown yet, we
|
|
|
|
# don't yet know which of these possibly-mentioned users was
|
|
|
|
# actually mentioned in the message (in other words, the
|
|
|
|
# mention syntax might have been in a code block or otherwise
|
|
|
|
# escaped). `get_ids_for` will filter these extra user rows
|
|
|
|
# for our data structures not related to bots
|
|
|
|
user_ids |= possibly_mentioned_user_ids
|
2017-10-24 17:54:46 +02:00
|
|
|
|
2017-09-15 22:31:09 +02:00
|
|
|
if user_ids:
|
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
is_active=True,
|
|
|
|
).values(
|
|
|
|
'id',
|
|
|
|
'enable_online_push_notifications',
|
|
|
|
'is_bot',
|
|
|
|
'bot_type',
|
|
|
|
'long_term_idle',
|
|
|
|
)
|
|
|
|
|
|
|
|
# query_for_ids is fast highly optimized for large queries, and we
|
|
|
|
# need this codepath to be fast (it's part of sending messages)
|
|
|
|
query = query_for_ids(
|
|
|
|
query=query,
|
2017-10-24 19:25:50 +02:00
|
|
|
user_ids=sorted(list(user_ids)),
|
2017-09-15 22:31:09 +02:00
|
|
|
field='id'
|
|
|
|
)
|
|
|
|
rows = list(query)
|
|
|
|
else:
|
|
|
|
# TODO: We should always have at least one user_id as a recipient
|
|
|
|
# of any message we send. Right now the exception to this
|
|
|
|
# rule is `notify_new_user`, which, at least in a possibly
|
|
|
|
# contrived test scenario, can attempt to send messages
|
|
|
|
# to an inactive bot. When we plug that hole, we can avoid
|
|
|
|
# this `else` clause and just `assert(user_ids)`.
|
|
|
|
rows = []
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_ids_for(f: Callable[[Dict[str, Any]], bool]) -> Set[int]:
|
2017-10-24 19:25:50 +02:00
|
|
|
"""Only includes users on the explicit message to line"""
|
2017-09-09 16:14:06 +02:00
|
|
|
return {
|
|
|
|
row['id']
|
|
|
|
for row in rows
|
|
|
|
if f(row)
|
2017-10-24 19:25:50 +02:00
|
|
|
} & message_to_user_id_set
|
2017-09-09 16:14:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def is_service_bot(row: Dict[str, Any]) -> bool:
|
2017-09-09 16:14:06 +02:00
|
|
|
return row['is_bot'] and (row['bot_type'] in UserProfile.SERVICE_BOT_TYPES)
|
|
|
|
|
2017-10-27 02:04:46 +02:00
|
|
|
active_user_ids = get_ids_for(lambda r: True)
|
2017-09-09 16:14:06 +02:00
|
|
|
push_notify_user_ids = get_ids_for(
|
2017-09-14 18:41:53 +02:00
|
|
|
lambda r: r['enable_online_push_notifications']
|
2017-09-09 16:14:06 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
|
|
|
# Service bots don't get UserMessage rows.
|
2017-09-09 16:14:06 +02:00
|
|
|
um_eligible_user_ids = get_ids_for(
|
2017-09-14 18:41:53 +02:00
|
|
|
lambda r: not is_service_bot(r)
|
2017-09-09 16:14:06 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-09-09 16:14:06 +02:00
|
|
|
long_term_idle_user_ids = get_ids_for(
|
|
|
|
lambda r: r['long_term_idle']
|
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
# These two bot data structures need to filter from the full set
|
|
|
|
# of users who either are receiving the message or might have been
|
|
|
|
# mentioned in it, and so can't use get_ids_for.
|
|
|
|
#
|
|
|
|
# Further in the do_send_messages code path, once
|
|
|
|
# `mentioned_user_ids` has been computed via bugdown, we'll filter
|
|
|
|
# these data structures for just those users who are either a
|
|
|
|
# direct recipient or were mentioned; for now, we're just making
|
|
|
|
# sure we have the data we need for that without extra database
|
|
|
|
# queries.
|
|
|
|
default_bot_user_ids = set([
|
|
|
|
row['id']
|
|
|
|
for row in rows
|
|
|
|
if row['is_bot'] and row['bot_type'] == UserProfile.DEFAULT_BOT
|
|
|
|
])
|
|
|
|
|
2017-09-09 15:26:44 +02:00
|
|
|
service_bot_tuples = [
|
2017-09-09 16:14:06 +02:00
|
|
|
(row['id'], row['bot_type'])
|
|
|
|
for row in rows
|
2017-09-14 18:41:53 +02:00
|
|
|
if is_service_bot(row)
|
2017-09-09 15:26:44 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
info = dict(
|
|
|
|
active_user_ids=active_user_ids,
|
|
|
|
push_notify_user_ids=push_notify_user_ids,
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids=stream_push_user_ids,
|
2017-09-09 15:26:44 +02:00
|
|
|
um_eligible_user_ids=um_eligible_user_ids,
|
|
|
|
long_term_idle_user_ids=long_term_idle_user_ids,
|
2017-10-24 20:08:19 +02:00
|
|
|
default_bot_user_ids=default_bot_user_ids,
|
2017-09-09 15:26:44 +02:00
|
|
|
service_bot_tuples=service_bot_tuples
|
|
|
|
) # type: RecipientInfoResult
|
|
|
|
return info
|
2016-10-24 09:56:35 +02:00
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
def get_service_bot_events(sender, service_bot_tuples, mentioned_user_ids,
|
|
|
|
active_user_ids, recipient_type):
|
2017-10-24 19:25:50 +02:00
|
|
|
# type: (UserProfile, List[Tuple[int, int]], Set[int], Set[int], int) -> Dict[str, List[Dict[str, Any]]]
|
2017-09-26 23:55:15 +02:00
|
|
|
|
|
|
|
event_dict = defaultdict(list) # type: Dict[str, List[Dict[str, Any]]]
|
|
|
|
|
|
|
|
# Avoid infinite loops by preventing messages sent by bots from generating
|
|
|
|
# Service events.
|
|
|
|
if sender.is_bot:
|
|
|
|
return event_dict
|
|
|
|
|
|
|
|
for user_profile_id, bot_type in service_bot_tuples:
|
|
|
|
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
|
|
|
queue_name = 'outgoing_webhooks'
|
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
queue_name = 'embedded_bots'
|
|
|
|
else:
|
|
|
|
logging.error(
|
|
|
|
'Unexpected bot_type for Service bot id=%s: %s' %
|
|
|
|
(user_profile_id, bot_type))
|
|
|
|
continue
|
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
is_stream = (recipient_type == Recipient.STREAM)
|
|
|
|
|
2017-10-27 02:11:04 +02:00
|
|
|
# Important note: service_bot_tuples may contain service bots
|
|
|
|
# who were not actually mentioned in the message (e.g. if
|
|
|
|
# mention syntax for that bot appeared in a code block).
|
|
|
|
# Thus, it is important to filter any users who aren't part of
|
|
|
|
# either mentioned_user_ids (the actual mentioned users) or
|
|
|
|
# active_user_ids (the actual recipients).
|
|
|
|
#
|
|
|
|
# So even though this is implied by the logic below, we filter
|
|
|
|
# these not-actually-mentioned users here, to help keep[ this
|
|
|
|
# function future-proof.
|
|
|
|
if user_profile_id not in mentioned_user_ids and user_profile_id not in active_user_ids:
|
|
|
|
continue
|
|
|
|
|
2017-09-26 23:55:15 +02:00
|
|
|
# Mention triggers, primarily for stream messages
|
|
|
|
if user_profile_id in mentioned_user_ids:
|
|
|
|
trigger = 'mention'
|
|
|
|
# PM triggers for personal and huddle messsages
|
2017-10-24 19:25:50 +02:00
|
|
|
elif (not is_stream) and (user_profile_id in active_user_ids):
|
2017-09-26 23:55:15 +02:00
|
|
|
trigger = 'private_message'
|
|
|
|
else:
|
|
|
|
continue
|
|
|
|
|
|
|
|
event_dict[queue_name].append({
|
|
|
|
'trigger': trigger,
|
|
|
|
'user_profile_id': user_profile_id,
|
|
|
|
})
|
|
|
|
|
|
|
|
return event_dict
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, Any]]],
|
|
|
|
email_gateway: Optional[bool]=False) -> List[int]:
|
2013-05-10 22:56:22 +02:00
|
|
|
# Filter out messages which didn't pass internal_prep_message properly
|
2017-03-19 23:29:29 +01:00
|
|
|
messages = [message for message in messages_maybe_none if message is not None]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
|
|
|
# Filter out zephyr mirror anomalies where the message was already sent
|
2017-05-17 20:39:57 +02:00
|
|
|
already_sent_ids = [] # type: List[int]
|
|
|
|
new_messages = [] # type: List[MutableMapping[str, Any]]
|
2013-08-12 20:05:57 +02:00
|
|
|
for message in messages:
|
|
|
|
if isinstance(message['message'], int):
|
|
|
|
already_sent_ids.append(message['message'])
|
|
|
|
else:
|
|
|
|
new_messages.append(message)
|
|
|
|
messages = new_messages
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-10-11 09:39:56 +02:00
|
|
|
links_for_embed = set() # type: Set[Text]
|
2013-05-10 22:56:22 +02:00
|
|
|
# For consistency, changes to the default values for these gets should also be applied
|
|
|
|
# to the default args in do_send_message
|
|
|
|
for message in messages:
|
|
|
|
message['rendered_content'] = message.get('rendered_content', None)
|
|
|
|
message['stream'] = message.get('stream', None)
|
2017-07-14 19:30:23 +02:00
|
|
|
message['local_id'] = message.get('local_id', None)
|
2014-01-07 23:14:13 +01:00
|
|
|
message['sender_queue_id'] = message.get('sender_queue_id', None)
|
2017-01-22 05:40:00 +01:00
|
|
|
message['realm'] = message.get('realm', message['message'].sender.realm)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-10-24 02:47:09 +02:00
|
|
|
mention_data = bugdown.MentionData(
|
|
|
|
realm_id=message['realm'].id,
|
|
|
|
content=message['message'].content,
|
|
|
|
)
|
|
|
|
message['mention_data'] = mention_data
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message['message'].is_stream_message():
|
2017-10-24 00:07:03 +02:00
|
|
|
stream_id = message['message'].recipient.type_id
|
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream_id,
|
|
|
|
topic_name=message['message'].topic_name()
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
stream_topic = None
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
recipient=message['message'].recipient,
|
|
|
|
sender_id=message['message'].sender_id,
|
|
|
|
stream_topic=stream_topic,
|
2017-10-24 19:25:50 +02:00
|
|
|
possibly_mentioned_user_ids=mention_data.get_user_ids(),
|
2017-10-24 00:07:03 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
|
|
|
message['active_user_ids'] = info['active_user_ids']
|
|
|
|
message['push_notify_user_ids'] = info['push_notify_user_ids']
|
2017-08-17 16:55:32 +02:00
|
|
|
message['stream_push_user_ids'] = info['stream_push_user_ids']
|
2017-09-09 15:26:44 +02:00
|
|
|
message['um_eligible_user_ids'] = info['um_eligible_user_ids']
|
|
|
|
message['long_term_idle_user_ids'] = info['long_term_idle_user_ids']
|
2017-10-24 20:08:19 +02:00
|
|
|
message['default_bot_user_ids'] = info['default_bot_user_ids']
|
2017-09-09 15:26:44 +02:00
|
|
|
message['service_bot_tuples'] = info['service_bot_tuples']
|
2017-09-09 03:45:34 +02:00
|
|
|
|
2017-10-11 09:39:56 +02:00
|
|
|
# Render our messages.
|
2016-09-14 21:58:44 +02:00
|
|
|
assert message['message'].rendered_content is None
|
2017-10-24 02:47:09 +02:00
|
|
|
|
2016-09-14 21:58:44 +02:00
|
|
|
rendered_content = render_incoming_message(
|
|
|
|
message['message'],
|
2016-09-15 00:24:44 +02:00
|
|
|
message['message'].content,
|
2017-09-09 02:50:57 +02:00
|
|
|
message['active_user_ids'],
|
2017-10-24 02:47:09 +02:00
|
|
|
message['realm'],
|
|
|
|
mention_data=message['mention_data'],
|
2017-11-03 12:13:17 +01:00
|
|
|
email_gateway=email_gateway,
|
2017-10-24 02:47:09 +02:00
|
|
|
)
|
2016-10-04 16:49:16 +02:00
|
|
|
message['message'].rendered_content = rendered_content
|
|
|
|
message['message'].rendered_content_version = bugdown_version
|
2016-10-27 12:06:44 +02:00
|
|
|
links_for_embed |= message['message'].links_for_preview
|
2016-09-14 21:58:44 +02:00
|
|
|
|
2017-10-27 14:47:54 +02:00
|
|
|
# Add members of the mentioned user groups into `mentions_user_ids`.
|
|
|
|
mention_data = message['mention_data']
|
|
|
|
for group_id in message['message'].mentions_user_group_ids:
|
|
|
|
members = message['mention_data'].get_group_members(group_id)
|
|
|
|
message['message'].mentions_user_ids.update(members)
|
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
'''
|
|
|
|
Once we have the actual list of mentioned ids from message
|
|
|
|
rendering, we can patch in "default bots" (aka normal bots)
|
|
|
|
who were directly mentioned in this message as eligible to
|
|
|
|
get UserMessage rows.
|
|
|
|
'''
|
|
|
|
mentioned_user_ids = message['message'].mentions_user_ids
|
|
|
|
default_bot_user_ids = message['default_bot_user_ids']
|
|
|
|
mentioned_bot_user_ids = default_bot_user_ids & mentioned_user_ids
|
|
|
|
message['um_eligible_user_ids'] |= mentioned_bot_user_ids
|
|
|
|
|
2017-10-11 09:39:56 +02:00
|
|
|
# Update calculated fields of the message
|
2014-02-21 21:18:38 +01:00
|
|
|
message['message'].update_calculated_fields()
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Save the message receipts in the database
|
2017-05-17 20:39:57 +02:00
|
|
|
user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]]
|
2015-08-19 21:04:49 +02:00
|
|
|
with transaction.atomic():
|
2013-05-10 22:56:22 +02:00
|
|
|
Message.objects.bulk_create([message['message'] for message in messages])
|
2017-09-10 21:36:23 +02:00
|
|
|
ums = [] # type: List[UserMessageLite]
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
2017-05-24 22:00:49 +02:00
|
|
|
# Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows;
|
|
|
|
# they will be processed later.
|
2017-09-10 20:36:08 +02:00
|
|
|
mentioned_user_ids = message['message'].mentions_user_ids
|
|
|
|
user_messages = create_user_messages(
|
|
|
|
message=message['message'],
|
|
|
|
um_eligible_user_ids=message['um_eligible_user_ids'],
|
|
|
|
long_term_idle_user_ids=message['long_term_idle_user_ids'],
|
|
|
|
mentioned_user_ids=mentioned_user_ids,
|
|
|
|
)
|
|
|
|
|
|
|
|
for um in user_messages:
|
2017-08-26 14:18:20 +02:00
|
|
|
user_message_flags[message['message'].id][um.user_profile_id] = um.flags_list()
|
2017-07-18 16:03:47 +02:00
|
|
|
|
|
|
|
ums.extend(user_messages)
|
2017-04-20 22:04:08 +02:00
|
|
|
|
2017-09-26 23:55:15 +02:00
|
|
|
message['message'].service_queue_events = get_service_bot_events(
|
|
|
|
sender=message['message'].sender,
|
|
|
|
service_bot_tuples=message['service_bot_tuples'],
|
|
|
|
mentioned_user_ids=mentioned_user_ids,
|
2017-10-24 19:25:50 +02:00
|
|
|
active_user_ids=message['active_user_ids'],
|
2017-09-26 23:55:15 +02:00
|
|
|
recipient_type=message['message'].recipient.type,
|
|
|
|
)
|
2017-05-24 22:00:49 +02:00
|
|
|
|
2017-09-10 18:31:35 +02:00
|
|
|
bulk_insert_ums(ums)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# Claim attachments in message
|
|
|
|
for message in messages:
|
|
|
|
if Message.content_has_attachment(message['message'].content):
|
2016-07-24 21:52:41 +02:00
|
|
|
do_claim_attachments(message['message'])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
2017-04-20 22:04:08 +02:00
|
|
|
# Deliver events to the real-time push system, as well as
|
|
|
|
# enqueuing any additional processing triggered by the message.
|
2017-10-20 21:34:05 +02:00
|
|
|
wide_message_dict = MessageDict.wide_dict(message['message'])
|
2017-09-05 21:10:37 +02:00
|
|
|
|
2013-07-02 00:14:58 +02:00
|
|
|
user_flags = user_message_flags.get(message['message'].id, {})
|
2013-09-15 19:10:16 +02:00
|
|
|
sender = message['message'].sender
|
2017-10-20 21:34:05 +02:00
|
|
|
message_type = wide_message_dict['type']
|
2017-09-05 20:50:25 +02:00
|
|
|
|
2017-10-07 17:59:19 +02:00
|
|
|
presence_idle_user_ids = get_active_presence_idle_user_ids(
|
2017-09-05 20:50:25 +02:00
|
|
|
realm=sender.realm,
|
|
|
|
sender_id=sender.id,
|
|
|
|
message_type=message_type,
|
2017-09-09 04:14:28 +02:00
|
|
|
active_user_ids=message['active_user_ids'],
|
2017-09-05 20:50:25 +02:00
|
|
|
user_flags=user_flags,
|
|
|
|
)
|
2013-09-15 19:10:16 +02:00
|
|
|
|
2014-01-27 19:48:32 +01:00
|
|
|
event = dict(
|
2017-09-05 21:10:37 +02:00
|
|
|
type='message',
|
|
|
|
message=message['message'].id,
|
2017-10-20 21:34:05 +02:00
|
|
|
message_dict=wide_message_dict,
|
2017-10-07 17:59:19 +02:00
|
|
|
presence_idle_user_ids=presence_idle_user_ids,
|
2017-09-05 21:10:37 +02:00
|
|
|
)
|
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
'''
|
|
|
|
TODO: We may want to limit user_ids to only those users who have
|
|
|
|
UserMessage rows, if only for minor performance reasons.
|
|
|
|
|
|
|
|
For now we queue events for all subscribers/sendees of the
|
|
|
|
message, since downstream code may still do notifications
|
|
|
|
that don't require UserMessage rows.
|
|
|
|
|
|
|
|
Our automated tests have gotten better on this codepath,
|
|
|
|
but we may have coverage gaps, so we should be careful
|
|
|
|
about changing the next line.
|
|
|
|
'''
|
|
|
|
user_ids = message['active_user_ids'] | set(user_flags.keys())
|
|
|
|
|
2017-09-09 14:09:36 +02:00
|
|
|
users = [
|
|
|
|
dict(
|
|
|
|
id=user_id,
|
|
|
|
flags=user_flags.get(user_id, []),
|
2017-08-17 16:55:32 +02:00
|
|
|
always_push_notify=(user_id in message['push_notify_user_ids']),
|
|
|
|
stream_push_notify=(user_id in message['stream_push_user_ids']),
|
2017-09-09 14:09:36 +02:00
|
|
|
)
|
2017-10-24 19:25:50 +02:00
|
|
|
for user_id in user_ids
|
2017-09-09 14:09:36 +02:00
|
|
|
]
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message['message'].is_stream_message():
|
2013-07-02 00:14:58 +02:00
|
|
|
# Note: This is where authorization for single-stream
|
|
|
|
# get_updates happens! We only attach stream data to the
|
|
|
|
# notify new_message request if it's a public stream,
|
|
|
|
# ensuring that in the tornado server, non-public stream
|
|
|
|
# messages are only associated to their subscribed users.
|
|
|
|
if message['stream'] is None:
|
2017-11-08 03:56:01 +01:00
|
|
|
stream_id = message['message'].recipient.type_id
|
|
|
|
message['stream'] = Stream.objects.select_related("realm").get(id=stream_id)
|
2017-05-24 23:49:19 +02:00
|
|
|
assert message['stream'] is not None # assert needed because stubs for django are missing
|
2013-07-02 00:14:58 +02:00
|
|
|
if message['stream'].is_public():
|
2017-01-03 21:04:55 +01:00
|
|
|
event['realm_id'] = message['stream'].realm_id
|
2014-01-27 19:48:32 +01:00
|
|
|
event['stream_name'] = message['stream'].name
|
2013-08-28 00:01:50 +02:00
|
|
|
if message['stream'].invite_only:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['invite_only'] = True
|
2017-07-14 19:30:23 +02:00
|
|
|
if message['local_id'] is not None:
|
|
|
|
event['local_id'] = message['local_id']
|
2014-01-07 23:14:13 +01:00
|
|
|
if message['sender_queue_id'] is not None:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['sender_queue_id'] = message['sender_queue_id']
|
|
|
|
send_event(event, users)
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2017-03-13 14:42:03 +01:00
|
|
|
if url_embed_preview_enabled_for_realm(message['message']) and links_for_embed:
|
2016-10-27 12:06:44 +02:00
|
|
|
event_data = {
|
|
|
|
'message_id': message['message'].id,
|
|
|
|
'message_content': message['message'].content,
|
2017-01-22 05:55:30 +01:00
|
|
|
'message_realm_id': message['realm'].id,
|
2016-10-27 12:06:44 +02:00
|
|
|
'urls': links_for_embed}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish('embed_links', event_data)
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2017-09-09 14:23:12 +02:00
|
|
|
if (settings.ENABLE_FEEDBACK and settings.FEEDBACK_BOT and
|
|
|
|
message['message'].recipient.type == Recipient.PERSONAL):
|
|
|
|
|
2017-10-03 00:38:33 +02:00
|
|
|
feedback_bot_id = get_system_bot(email=settings.FEEDBACK_BOT).id
|
2017-09-14 18:24:39 +02:00
|
|
|
if feedback_bot_id in message['active_user_ids']:
|
2017-09-09 14:23:12 +02:00
|
|
|
queue_json_publish(
|
|
|
|
'feedback_messages',
|
2017-10-20 21:34:05 +02:00
|
|
|
wide_message_dict,
|
2017-09-09 14:23:12 +02:00
|
|
|
lambda x: None
|
|
|
|
)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-09-27 19:43:28 +02:00
|
|
|
if message['message'].recipient.type == Recipient.PERSONAL:
|
2017-11-18 02:05:54 +01:00
|
|
|
welcome_bot_id = get_system_bot(settings.WELCOME_BOT).id
|
2017-09-27 19:43:28 +02:00
|
|
|
if (welcome_bot_id in message['active_user_ids'] and
|
|
|
|
welcome_bot_id != message['message'].sender_id):
|
|
|
|
send_welcome_bot_response(message)
|
|
|
|
|
2017-05-24 22:00:49 +02:00
|
|
|
for queue_name, events in message['message'].service_queue_events.items():
|
|
|
|
for event in events:
|
|
|
|
queue_json_publish(
|
|
|
|
queue_name,
|
|
|
|
{
|
2017-10-20 21:34:05 +02:00
|
|
|
"message": wide_message_dict,
|
2017-05-24 22:00:49 +02:00
|
|
|
"trigger": event['trigger'],
|
2017-09-09 03:45:34 +02:00
|
|
|
"user_profile_id": event["user_profile_id"],
|
2017-11-24 13:18:46 +01:00
|
|
|
}
|
2017-05-24 22:00:49 +02:00
|
|
|
)
|
2017-04-20 22:04:08 +02:00
|
|
|
|
2013-08-13 17:17:04 +02:00
|
|
|
# Note that this does not preserve the order of message ids
|
|
|
|
# returned. In practice, this shouldn't matter, as we only
|
|
|
|
# mirror single zephyr messages at a time and don't otherwise
|
|
|
|
# intermingle sending zephyr messages with other messages.
|
2013-08-12 20:05:57 +02:00
|
|
|
return already_sent_ids + [message['message'].id for message in messages]
|
2013-08-08 19:37:40 +02:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class UserMessageLite:
|
2017-09-10 21:36:23 +02:00
|
|
|
'''
|
|
|
|
The Django ORM is too slow for bulk operations. This class
|
|
|
|
is optimized for the simple use case of inserting a bunch of
|
|
|
|
rows into zerver_usermessage.
|
|
|
|
'''
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, user_profile_id: int, message_id: int) -> None:
|
2017-09-10 21:36:23 +02:00
|
|
|
self.user_profile_id = user_profile_id
|
|
|
|
self.message_id = message_id
|
|
|
|
self.flags = 0
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def flags_list(self) -> List[str]:
|
2017-09-10 21:36:23 +02:00
|
|
|
return UserMessage.flags_list_for_flags(self.flags)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_user_messages(message: Message,
|
|
|
|
um_eligible_user_ids: Set[int],
|
|
|
|
long_term_idle_user_ids: Set[int],
|
|
|
|
mentioned_user_ids: Set[int]) -> List[UserMessageLite]:
|
2017-09-10 20:36:08 +02:00
|
|
|
ums_to_create = []
|
|
|
|
|
|
|
|
for user_profile_id in um_eligible_user_ids:
|
2017-09-10 21:36:23 +02:00
|
|
|
um = UserMessageLite(
|
2017-09-10 20:36:08 +02:00
|
|
|
user_profile_id=user_profile_id,
|
2017-09-10 21:36:23 +02:00
|
|
|
message_id=message.id,
|
2017-09-10 20:36:08 +02:00
|
|
|
)
|
|
|
|
ums_to_create.append(um)
|
|
|
|
|
|
|
|
# These properties on the Message are set via
|
|
|
|
# render_markdown by code in the bugdown inline patterns
|
|
|
|
wildcard = message.mentions_wildcard
|
|
|
|
ids_with_alert_words = message.user_ids_with_alert_words
|
|
|
|
|
|
|
|
for um in ums_to_create:
|
|
|
|
if um.user_profile_id == message.sender.id and \
|
|
|
|
message.sent_by_human():
|
|
|
|
um.flags |= UserMessage.flags.read
|
|
|
|
if wildcard:
|
|
|
|
um.flags |= UserMessage.flags.wildcard_mentioned
|
|
|
|
if um.user_profile_id in mentioned_user_ids:
|
|
|
|
um.flags |= UserMessage.flags.mentioned
|
|
|
|
if um.user_profile_id in ids_with_alert_words:
|
|
|
|
um.flags |= UserMessage.flags.has_alert_word
|
|
|
|
|
|
|
|
user_messages = []
|
|
|
|
for um in ums_to_create:
|
|
|
|
if (um.user_profile_id in long_term_idle_user_ids and
|
2017-10-28 21:53:47 +02:00
|
|
|
message.is_stream_message() and
|
2017-09-10 20:36:08 +02:00
|
|
|
int(um.flags) == 0):
|
|
|
|
continue
|
|
|
|
user_messages.append(um)
|
|
|
|
|
|
|
|
return user_messages
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_insert_ums(ums: List[UserMessageLite]) -> None:
|
2017-09-10 18:31:35 +02:00
|
|
|
'''
|
|
|
|
Doing bulk inserts this way is much faster than using Django,
|
|
|
|
since we don't have any ORM overhead. Profiling with 1000
|
|
|
|
users shows a speedup of 0.436 -> 0.027 seconds, so we're
|
|
|
|
talking about a 15x speedup.
|
|
|
|
'''
|
|
|
|
if not ums:
|
|
|
|
return
|
|
|
|
|
|
|
|
vals = ','.join([
|
|
|
|
'(%d, %d, %d)' % (um.user_profile_id, um.message_id, um.flags)
|
|
|
|
for um in ums
|
|
|
|
])
|
|
|
|
query = '''
|
|
|
|
INSERT into
|
|
|
|
zerver_usermessage (user_profile_id, message_id, flags)
|
|
|
|
VALUES
|
|
|
|
''' + vals
|
|
|
|
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(query)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_reaction_update(user_profile: UserProfile, message: Message,
|
|
|
|
reaction: Reaction, op: Text) -> None:
|
2016-11-03 18:49:00 +01:00
|
|
|
user_dict = {'user_id': user_profile.id,
|
|
|
|
'email': user_profile.email,
|
|
|
|
'full_name': user_profile.full_name}
|
|
|
|
|
|
|
|
event = {'type': 'reaction',
|
2017-03-23 03:57:38 +01:00
|
|
|
'op': op,
|
2016-11-03 18:49:00 +01:00
|
|
|
'user': user_dict,
|
|
|
|
'message_id': message.id,
|
2017-05-01 07:29:56 +02:00
|
|
|
'emoji_name': reaction.emoji_name,
|
|
|
|
'emoji_code': reaction.emoji_code,
|
|
|
|
'reaction_type': reaction.reaction_type} # type: Dict[str, Any]
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
# Update the cached message since new reaction is added.
|
|
|
|
update_to_dict_cache([message])
|
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
# Recipients for message update events, including reactions, are
|
|
|
|
# everyone who got the original message. This means reactions
|
|
|
|
# won't live-update in preview narrows, but it's the right
|
2016-11-30 08:14:46 +01:00
|
|
|
# performance tradeoff, since otherwise we'd need to send all
|
|
|
|
# reactions to public stream messages to every browser for every
|
|
|
|
# client in the organization, which doesn't scale.
|
2017-03-23 04:15:32 +01:00
|
|
|
#
|
|
|
|
# However, to ensure that reactions do live-update for any user
|
|
|
|
# who has actually participated in reacting to a message, we add a
|
|
|
|
# "historical" UserMessage row for any user who reacts to message,
|
|
|
|
# subscribing them to future notifications.
|
2016-11-30 08:14:46 +01:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2017-01-03 19:51:26 +01:00
|
|
|
send_event(event, [um.user_profile_id for um in ums])
|
2016-11-30 08:14:46 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_reaction_legacy(user_profile: UserProfile, message: Message, emoji_name: Text) -> None:
|
2017-05-01 07:29:56 +02:00
|
|
|
(emoji_code, reaction_type) = emoji_name_to_emoji_code(user_profile.realm, emoji_name)
|
|
|
|
reaction = Reaction(user_profile=user_profile, message=message,
|
|
|
|
emoji_name=emoji_name, emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type)
|
2017-03-23 03:57:38 +01:00
|
|
|
reaction.save()
|
2017-05-01 07:29:56 +02:00
|
|
|
notify_reaction_update(user_profile, message, reaction, "add")
|
2017-03-23 03:57:38 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_reaction_legacy(user_profile: UserProfile, message: Message, emoji_name: Text) -> None:
|
2017-05-01 07:29:56 +02:00
|
|
|
reaction = Reaction.objects.filter(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
emoji_name=emoji_name).get()
|
|
|
|
reaction.delete()
|
|
|
|
notify_reaction_update(user_profile, message, reaction, "remove")
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_reaction(user_profile: UserProfile, message: Message,
|
|
|
|
emoji_name: Text, emoji_code: Text, reaction_type: Text) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
reaction = Reaction(user_profile=user_profile, message=message,
|
|
|
|
emoji_name=emoji_name, emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type)
|
|
|
|
reaction.save()
|
|
|
|
notify_reaction_update(user_profile, message, reaction, "add")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_reaction(user_profile: UserProfile, message: Message,
|
|
|
|
emoji_code: Text, reaction_type: Text) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
reaction = Reaction.objects.filter(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type).get()
|
|
|
|
reaction.delete()
|
|
|
|
notify_reaction_update(user_profile, message, reaction, "remove")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_typing_notification(notification: Dict[str, Any]) -> None:
|
2017-09-09 15:09:48 +02:00
|
|
|
recipient_user_profiles = get_typing_user_profiles(notification['recipient'],
|
|
|
|
notification['sender'].id)
|
2016-10-28 18:50:21 +02:00
|
|
|
# Only deliver the notification to active user recipients
|
|
|
|
user_ids_to_notify = [profile.id for profile in recipient_user_profiles if profile.is_active]
|
|
|
|
sender_dict = {'user_id': notification['sender'].id, 'email': notification['sender'].email}
|
|
|
|
# Include a list of recipients in the event body to help identify where the typing is happening
|
2017-11-08 03:56:01 +01:00
|
|
|
recipient_dicts = [{'user_id': profile.id, 'email': profile.email}
|
|
|
|
for profile in recipient_user_profiles]
|
2016-10-12 20:57:59 +02:00
|
|
|
event = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
type = 'typing',
|
|
|
|
op = notification['op'],
|
|
|
|
sender = sender_dict,
|
|
|
|
recipients = recipient_dicts)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2016-10-28 18:50:21 +02:00
|
|
|
send_event(event, user_ids_to_notify)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
|
|
|
# check_send_typing_notification:
|
|
|
|
# Checks the typing notification and sends it
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_send_typing_notification(sender: UserProfile, notification_to: Sequence[Text],
|
|
|
|
operator: Text) -> None:
|
2016-10-12 20:57:59 +02:00
|
|
|
typing_notification = check_typing_notification(sender, notification_to, operator)
|
|
|
|
do_send_typing_notification(typing_notification)
|
|
|
|
|
|
|
|
# check_typing_notification:
|
|
|
|
# Returns typing notification ready for sending with do_send_typing_notification on success
|
|
|
|
# or the error message (string) on error.
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_typing_notification(sender: UserProfile, notification_to: Sequence[Text],
|
|
|
|
operator: Text) -> Dict[str, Any]:
|
2016-10-12 20:57:59 +02:00
|
|
|
if len(notification_to) == 0:
|
|
|
|
raise JsonableError(_('Missing parameter: \'to\' (recipient)'))
|
|
|
|
elif operator not in ('start', 'stop'):
|
|
|
|
raise JsonableError(_('Invalid \'op\' value (should be start or stop)'))
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
recipient = recipient_for_emails(notification_to, False,
|
|
|
|
sender, sender)
|
|
|
|
except ValidationError as e:
|
2017-09-27 10:06:17 +02:00
|
|
|
assert isinstance(e.messages[0], str)
|
2016-10-12 20:57:59 +02:00
|
|
|
raise JsonableError(e.messages[0])
|
2016-10-24 09:56:35 +02:00
|
|
|
if recipient.type == Recipient.STREAM:
|
|
|
|
raise ValueError('Forbidden recipient type')
|
|
|
|
return {'sender': sender, 'recipient': recipient, 'op': operator}
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def stream_welcome_message(stream: Stream) -> Text:
|
2017-04-27 00:03:21 +02:00
|
|
|
content = _('Welcome to #**%s**.') % (stream.name,)
|
|
|
|
|
|
|
|
if stream.description:
|
|
|
|
content += '\n\n**' + _('Description') + '**: '
|
|
|
|
content += stream.description
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def prep_stream_welcome_message(stream: Stream) -> Optional[Dict[str, Any]]:
|
2017-04-27 00:03:21 +02:00
|
|
|
realm = stream.realm
|
2017-05-22 23:37:15 +02:00
|
|
|
sender = get_system_bot(settings.WELCOME_BOT)
|
2017-04-27 19:44:09 +02:00
|
|
|
topic = _('hello')
|
2017-04-27 00:03:21 +02:00
|
|
|
content = stream_welcome_message(stream)
|
|
|
|
|
2017-04-27 19:44:09 +02:00
|
|
|
message = internal_prep_stream_message(
|
2017-04-27 00:03:21 +02:00
|
|
|
realm=realm,
|
2017-04-27 19:44:09 +02:00
|
|
|
sender=sender,
|
|
|
|
stream_name=stream.name,
|
|
|
|
topic=topic,
|
2017-04-27 00:03:21 +02:00
|
|
|
content=content)
|
|
|
|
|
|
|
|
return message
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_stream_creation_event(stream: Stream, user_ids: List[int]) -> None:
|
2017-08-16 21:03:44 +02:00
|
|
|
event = dict(type="stream", op="create",
|
|
|
|
streams=[stream.to_dict()])
|
|
|
|
send_event(event, user_ids)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_stream_if_needed(realm: Realm,
|
|
|
|
stream_name: Text,
|
|
|
|
invite_only: bool=False,
|
|
|
|
stream_description: Text="") -> Tuple[Stream, bool]:
|
2013-01-10 22:01:33 +01:00
|
|
|
(stream, created) = Stream.objects.get_or_create(
|
2017-10-08 21:16:51 +02:00
|
|
|
realm=realm,
|
|
|
|
name__iexact=stream_name,
|
|
|
|
defaults = dict(
|
|
|
|
name=stream_name,
|
|
|
|
description=stream_description,
|
|
|
|
invite_only=invite_only,
|
|
|
|
is_in_zephyr_realm=realm.is_zephyr_mirror_realm
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
if created:
|
|
|
|
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
2017-08-16 21:04:57 +02:00
|
|
|
if stream.is_public():
|
2017-09-16 21:26:54 +02:00
|
|
|
send_stream_creation_event(stream, active_user_ids(stream.realm_id))
|
2013-01-23 20:39:02 +01:00
|
|
|
return stream, created
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_streams_if_needed(realm: Realm,
|
|
|
|
stream_dicts: List[Mapping[str, Any]]) -> Tuple[List[Stream], List[Stream]]:
|
2016-11-20 21:55:50 +01:00
|
|
|
"""Note that stream_dict["name"] is assumed to already be stripped of
|
|
|
|
whitespace"""
|
2017-05-17 20:39:57 +02:00
|
|
|
added_streams = [] # type: List[Stream]
|
|
|
|
existing_streams = [] # type: List[Stream]
|
2016-11-20 21:55:50 +01:00
|
|
|
for stream_dict in stream_dicts:
|
2016-09-15 16:22:09 +02:00
|
|
|
stream, created = create_stream_if_needed(realm,
|
2016-11-20 21:55:50 +01:00
|
|
|
stream_dict["name"],
|
2016-11-20 20:33:41 +01:00
|
|
|
invite_only=stream_dict.get("invite_only", False),
|
|
|
|
stream_description=stream_dict.get("description", ""))
|
|
|
|
|
2016-09-15 16:22:09 +02:00
|
|
|
if created:
|
|
|
|
added_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
return added_streams, existing_streams
|
|
|
|
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_recipient_from_user_ids(recipient_profile_ids: Set[int],
|
|
|
|
not_forged_mirror_message: bool,
|
|
|
|
forwarder_user_profile: Optional[UserProfile],
|
|
|
|
sender: UserProfile) -> Recipient:
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-10-28 21:19:34 +02:00
|
|
|
# Avoid mutating the passed in set of recipient_profile_ids.
|
|
|
|
recipient_profile_ids = set(recipient_profile_ids)
|
|
|
|
|
2017-07-24 23:55:33 +02:00
|
|
|
# If the private message is just between the sender and
|
|
|
|
# another person, force it to be a personal internally
|
|
|
|
|
|
|
|
if not_forged_mirror_message:
|
|
|
|
assert forwarder_user_profile is not None
|
|
|
|
if forwarder_user_profile.id not in recipient_profile_ids:
|
|
|
|
raise ValidationError(_("User not authorized for this query"))
|
|
|
|
|
|
|
|
if (len(recipient_profile_ids) == 2 and sender.id in recipient_profile_ids):
|
|
|
|
recipient_profile_ids.remove(sender.id)
|
|
|
|
|
|
|
|
if len(recipient_profile_ids) > 1:
|
|
|
|
# Make sure the sender is included in huddle messages
|
|
|
|
recipient_profile_ids.add(sender.id)
|
2017-10-28 21:14:41 +02:00
|
|
|
return get_huddle_recipient(recipient_profile_ids)
|
2017-07-24 23:55:33 +02:00
|
|
|
else:
|
2017-10-28 21:31:21 +02:00
|
|
|
return get_personal_recipient(list(recipient_profile_ids)[0])
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_recipient_user_profiles(user_profiles: List[UserProfile],
|
|
|
|
sender: UserProfile) -> Set[int]:
|
2013-03-18 18:57:34 +01:00
|
|
|
recipient_profile_ids = set()
|
2016-11-02 00:51:11 +01:00
|
|
|
|
|
|
|
# We exempt cross-realm bots from the check that all the recipients
|
2017-03-13 18:29:31 +01:00
|
|
|
# are in the same realm.
|
|
|
|
realms = set()
|
2017-12-07 21:15:34 +01:00
|
|
|
if not is_cross_realm_bot_email(sender.email):
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(sender.realm_id)
|
2015-01-31 07:55:18 +01:00
|
|
|
|
2017-07-25 02:03:24 +02:00
|
|
|
for user_profile in user_profiles:
|
2014-03-05 20:14:50 +01:00
|
|
|
if (not user_profile.is_active and not user_profile.is_mirror_dummy) or \
|
|
|
|
user_profile.realm.deactivated:
|
2017-07-25 02:03:24 +02:00
|
|
|
raise ValidationError(_("'%s' is no longer using Zulip.") % (user_profile.email,))
|
2014-01-14 09:25:04 +01:00
|
|
|
recipient_profile_ids.add(user_profile.id)
|
2017-12-07 21:15:34 +01:00
|
|
|
if not is_cross_realm_bot_email(user_profile.email):
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(user_profile.realm_id)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-03-13 18:29:31 +01:00
|
|
|
if len(realms) > 1:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise ValidationError(_("You can't send private messages outside of your organization."))
|
2014-01-14 09:25:04 +01:00
|
|
|
|
2017-07-25 02:03:24 +02:00
|
|
|
return recipient_profile_ids
|
|
|
|
|
|
|
|
def recipient_for_emails(emails, not_forged_mirror_message,
|
|
|
|
forwarder_user_profile, sender):
|
|
|
|
# type: (Iterable[Text], bool, Optional[UserProfile], UserProfile) -> Recipient
|
|
|
|
|
2017-09-25 21:51:47 +02:00
|
|
|
user_profiles = user_profiles_from_unvalidated_emails(emails, sender.realm)
|
2017-08-18 01:57:48 +02:00
|
|
|
|
|
|
|
return recipient_for_user_profiles(
|
|
|
|
user_profiles=user_profiles,
|
|
|
|
not_forged_mirror_message=not_forged_mirror_message,
|
|
|
|
forwarder_user_profile=forwarder_user_profile,
|
|
|
|
sender=sender
|
|
|
|
)
|
|
|
|
|
|
|
|
def recipient_for_user_profiles(user_profiles, not_forged_mirror_message,
|
|
|
|
forwarder_user_profile, sender):
|
|
|
|
# type: (List[UserProfile], bool, Optional[UserProfile], UserProfile) -> Recipient
|
2017-07-25 02:03:24 +02:00
|
|
|
|
|
|
|
recipient_profile_ids = validate_recipient_user_profiles(user_profiles, sender)
|
|
|
|
|
2017-07-24 23:55:33 +02:00
|
|
|
return get_recipient_from_user_ids(recipient_profile_ids, not_forged_mirror_message,
|
|
|
|
forwarder_user_profile, sender)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def already_sent_mirrored_message_id(message: Message) -> Optional[int]:
|
2013-03-18 18:57:34 +01:00
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
|
|
|
# For huddle messages, we use a 10-second window because the
|
|
|
|
# timestamps aren't guaranteed to actually match between two
|
|
|
|
# copies of the same message.
|
|
|
|
time_window = datetime.timedelta(seconds=10)
|
|
|
|
else:
|
|
|
|
time_window = datetime.timedelta(seconds=0)
|
|
|
|
|
2016-11-30 22:49:02 +01:00
|
|
|
messages = Message.objects.filter(
|
2013-03-18 18:57:34 +01:00
|
|
|
sender=message.sender,
|
|
|
|
recipient=message.recipient,
|
|
|
|
content=message.content,
|
|
|
|
subject=message.subject,
|
|
|
|
sending_client=message.sending_client,
|
2013-08-12 20:14:54 +02:00
|
|
|
pub_date__gte=message.pub_date - time_window,
|
|
|
|
pub_date__lte=message.pub_date + time_window)
|
2013-08-12 20:05:57 +02:00
|
|
|
|
|
|
|
if messages.exists():
|
|
|
|
return messages[0].id
|
|
|
|
return None
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def extract_recipients(s: Union[str, Iterable[Text]]) -> List[Text]:
|
2014-02-14 19:29:42 +01:00
|
|
|
# We try to accept multiple incoming formats for recipients.
|
|
|
|
# See test_extract_recipients() for examples of what we allow.
|
2013-03-18 19:10:21 +01:00
|
|
|
try:
|
2017-05-17 20:39:57 +02:00
|
|
|
data = ujson.loads(s) # type: ignore # This function has a super weird union argument.
|
2013-06-18 23:55:55 +02:00
|
|
|
except ValueError:
|
2014-02-14 19:39:11 +01:00
|
|
|
data = s
|
|
|
|
|
2017-09-27 10:06:17 +02:00
|
|
|
if isinstance(data, str):
|
2017-07-28 01:12:01 +02:00
|
|
|
data = data.split(',')
|
2014-02-14 19:39:11 +01:00
|
|
|
|
|
|
|
if not isinstance(data, list):
|
|
|
|
raise ValueError("Invalid data type for recipients")
|
|
|
|
|
|
|
|
recipients = data
|
2013-03-18 19:10:21 +01:00
|
|
|
|
|
|
|
# Strip recipients, and then remove any duplicates and any that
|
|
|
|
# are the empty string after being stripped.
|
|
|
|
recipients = [recipient.strip() for recipient in recipients]
|
|
|
|
return list(set(recipient for recipient in recipients if recipient))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_send_stream_message(sender: UserProfile, client: Client, stream_name: Text,
|
|
|
|
topic: Text, body: Text) -> int:
|
2017-09-30 04:14:34 +02:00
|
|
|
addressee = Addressee.for_stream(stream_name, topic)
|
|
|
|
message = check_message(sender, client, addressee, body)
|
|
|
|
|
|
|
|
return do_send_messages([message])[0]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_send_private_message(sender: UserProfile, client: Client,
|
|
|
|
receiving_user: UserProfile, body: Text) -> int:
|
2017-10-02 07:22:42 +02:00
|
|
|
addressee = Addressee.for_user_profile(receiving_user)
|
|
|
|
message = check_message(sender, client, addressee, body)
|
|
|
|
|
|
|
|
return do_send_messages([message])[0]
|
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
# check_send_message:
|
2013-08-12 22:00:06 +02:00
|
|
|
# Returns the id of the sent message. Has same argspec as check_message.
|
2016-06-05 21:18:15 +02:00
|
|
|
def check_send_message(sender, client, message_type_name, message_to,
|
2017-10-27 14:13:22 +02:00
|
|
|
topic_name, message_content, realm=None, forged=False,
|
2017-07-14 19:30:23 +02:00
|
|
|
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
|
2016-06-05 21:18:15 +02:00
|
|
|
sender_queue_id=None):
|
2017-02-11 05:45:39 +01:00
|
|
|
# type: (UserProfile, Client, Text, Sequence[Text], Optional[Text], Text, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[Text], Optional[Text]) -> int
|
2017-04-27 22:48:06 +02:00
|
|
|
|
|
|
|
addressee = Addressee.legacy_build(
|
2017-08-18 12:26:43 +02:00
|
|
|
sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
message_type_name,
|
|
|
|
message_to,
|
2017-10-27 14:13:22 +02:00
|
|
|
topic_name)
|
2017-04-27 22:48:06 +02:00
|
|
|
|
|
|
|
message = check_message(sender, client, addressee,
|
|
|
|
message_content, realm, forged, forged_timestamp,
|
2017-07-14 19:30:23 +02:00
|
|
|
forwarder_user_profile, local_id, sender_queue_id)
|
2013-08-08 19:37:40 +02:00
|
|
|
return do_send_messages([message])[0]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_stream_name(stream_name: Text) -> None:
|
2017-01-30 07:01:19 +01:00
|
|
|
if stream_name.strip() == "":
|
|
|
|
raise JsonableError(_("Invalid stream name '%s'" % (stream_name)))
|
2013-08-22 23:40:27 +02:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2017-01-30 07:01:19 +01:00
|
|
|
raise JsonableError(_("Stream name too long (limit: %s characters)" % (Stream.MAX_NAME_LENGTH)))
|
2017-04-26 01:28:22 +02:00
|
|
|
for i in stream_name:
|
|
|
|
if ord(i) == 0:
|
|
|
|
raise JsonableError(_("Stream name '%s' contains NULL (0x00) characters." % (stream_name)))
|
2013-08-22 23:40:27 +02:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
def check_default_stream_group_name(group_name: Text) -> None:
|
|
|
|
if group_name.strip() == "":
|
|
|
|
raise JsonableError(_("Invalid default stream group name '%s'" % (group_name)))
|
|
|
|
if len(group_name) > DefaultStreamGroup.MAX_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Default stream group name too long (limit: %s characters)"
|
|
|
|
% (DefaultStreamGroup.MAX_NAME_LENGTH)))
|
|
|
|
for i in group_name:
|
|
|
|
if ord(i) == 0:
|
|
|
|
raise JsonableError(_("Default stream group name '%s' contains NULL (0x00) characters."
|
|
|
|
% (group_name)))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_pm_if_empty_stream(sender: UserProfile,
|
|
|
|
stream: Optional[Stream],
|
|
|
|
stream_name: Text,
|
|
|
|
realm: Realm) -> None:
|
2016-11-15 05:34:20 +01:00
|
|
|
"""If a bot sends a message to a stream that doesn't exist or has no
|
|
|
|
subscribers, sends a notification to the bot owner (if not a
|
|
|
|
cross-realm bot) so that the owner can correct the issue."""
|
2016-07-27 01:45:29 +02:00
|
|
|
if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated:
|
2013-09-19 22:42:30 +02:00
|
|
|
return
|
|
|
|
|
2016-11-15 05:40:00 +01:00
|
|
|
if not sender.is_bot or sender.bot_owner is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Don't send these notifications for cross-realm bot messages
|
|
|
|
# (e.g. from EMAIL_GATEWAY_BOT) since the owner for
|
|
|
|
# EMAIL_GATEWAY_BOT is probably the server administrator, not
|
|
|
|
# the owner of the bot who could potentially fix the problem.
|
|
|
|
if sender.realm != realm:
|
|
|
|
return
|
|
|
|
|
|
|
|
if stream is not None:
|
2017-10-29 15:40:07 +01:00
|
|
|
num_subscribers = num_subscribers_for_stream_id(stream.id)
|
2016-11-15 05:40:00 +01:00
|
|
|
if num_subscribers > 0:
|
2016-01-19 20:28:49 +01:00
|
|
|
return
|
|
|
|
|
2016-11-15 05:40:00 +01:00
|
|
|
# We warn the user once every 5 minutes to avoid a flood of
|
|
|
|
# PMs on a misconfigured integration, re-using the
|
|
|
|
# UserProfile.last_reminder field, which is not used for bots.
|
|
|
|
last_reminder = sender.last_reminder
|
|
|
|
waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD)
|
2017-04-15 04:03:56 +02:00
|
|
|
if last_reminder and timezone_now() - last_reminder <= waitperiod:
|
2016-11-15 05:40:00 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if stream is None:
|
|
|
|
error_msg = "that stream does not yet exist. To create it, "
|
|
|
|
else:
|
|
|
|
# num_subscribers == 0
|
|
|
|
error_msg = "there are no subscribers to that stream. To join it, "
|
|
|
|
|
|
|
|
content = ("Hi there! We thought you'd like to know that your bot **%s** just "
|
|
|
|
"tried to send a message to stream `%s`, but %s"
|
|
|
|
"click the gear in the left-side stream list." %
|
|
|
|
(sender.full_name, stream_name, error_msg))
|
2017-04-27 20:42:13 +02:00
|
|
|
|
2017-06-11 18:31:30 +02:00
|
|
|
internal_send_private_message(realm, get_system_bot(settings.NOTIFICATION_BOT),
|
2017-08-18 05:02:02 +02:00
|
|
|
sender.bot_owner, content)
|
2016-11-15 05:40:00 +01:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
sender.last_reminder = timezone_now()
|
2016-11-15 05:40:00 +01:00
|
|
|
sender.save(update_fields=['last_reminder'])
|
2013-09-19 22:37:24 +02:00
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
# check_message:
|
|
|
|
# Returns message ready for sending with do_send_message on success or the error message (string) on error.
|
2017-04-27 22:48:06 +02:00
|
|
|
def check_message(sender, client, addressee,
|
|
|
|
message_content_raw, realm=None, forged=False,
|
2017-07-14 19:30:23 +02:00
|
|
|
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
|
2014-01-07 23:14:13 +01:00
|
|
|
sender_queue_id=None):
|
2017-04-27 22:48:06 +02:00
|
|
|
# type: (UserProfile, Client, Addressee, Text, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[Text], Optional[Text]) -> Dict[str, Any]
|
2013-03-18 18:57:34 +01:00
|
|
|
stream = None
|
2017-04-27 22:48:06 +02:00
|
|
|
|
2017-02-18 23:47:18 +01:00
|
|
|
message_content = message_content_raw.rstrip()
|
2017-02-12 04:22:13 +01:00
|
|
|
if len(message_content) == 0:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Message must not be empty"))
|
2017-10-02 21:56:30 +02:00
|
|
|
if '\x00' in message_content:
|
|
|
|
raise JsonableError(_("Message must not contain null bytes"))
|
|
|
|
|
2013-11-22 18:33:22 +01:00
|
|
|
message_content = truncate_body(message_content)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
if realm is None:
|
|
|
|
realm = sender.realm
|
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
if addressee.is_stream():
|
|
|
|
stream_name = addressee.stream_name()
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
stream_name = stream_name.strip()
|
2013-08-22 23:40:27 +02:00
|
|
|
check_stream_name(stream_name)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-10-27 14:17:51 +02:00
|
|
|
topic_name = addressee.topic()
|
|
|
|
if topic_name is None:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Missing topic"))
|
2017-10-27 14:17:51 +02:00
|
|
|
topic_name = topic_name.strip()
|
|
|
|
if topic_name == "":
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Topic can't be empty"))
|
2017-10-27 14:17:51 +02:00
|
|
|
topic_name = truncate_topic(topic_name)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-03-23 07:22:28 +01:00
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, realm)
|
2013-09-17 22:31:05 +02:00
|
|
|
|
2017-03-23 07:22:28 +01:00
|
|
|
send_pm_if_empty_stream(sender, stream, stream_name, realm)
|
2013-09-17 22:31:05 +02:00
|
|
|
|
2017-03-23 07:22:28 +01:00
|
|
|
except Stream.DoesNotExist:
|
|
|
|
send_pm_if_empty_stream(sender, None, stream_name, realm)
|
2017-11-08 03:56:01 +01:00
|
|
|
raise JsonableError(_("Stream '%(stream_name)s' "
|
|
|
|
"does not exist") % {'stream_name': escape(stream_name)})
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2013-05-08 21:23:07 +02:00
|
|
|
|
2013-08-29 01:06:11 +02:00
|
|
|
if not stream.invite_only:
|
|
|
|
# This is a public stream
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
2017-09-17 21:05:00 +02:00
|
|
|
elif subscribed_to_stream(sender, stream.id):
|
2013-08-29 01:06:11 +02:00
|
|
|
# Or it is private, but your are subscribed
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
2016-02-08 03:59:38 +01:00
|
|
|
elif sender.is_api_super_user or (forwarder_user_profile is not None and
|
|
|
|
forwarder_user_profile.is_api_super_user):
|
2013-08-29 01:06:11 +02:00
|
|
|
# Or this request is being done on behalf of a super user
|
|
|
|
pass
|
2017-05-24 23:49:19 +02:00
|
|
|
elif sender.is_bot and (sender.bot_owner is not None and
|
2017-09-17 21:05:00 +02:00
|
|
|
subscribed_to_stream(sender.bot_owner, stream.id)):
|
2013-08-29 01:06:11 +02:00
|
|
|
# Or you're a bot and your owner is subscribed.
|
2013-08-26 18:07:52 +02:00
|
|
|
pass
|
2017-04-27 00:03:21 +02:00
|
|
|
elif sender.email == settings.WELCOME_BOT:
|
|
|
|
# The welcome bot welcomes folks to the stream.
|
|
|
|
pass
|
2017-10-04 02:01:22 +02:00
|
|
|
elif sender.email == settings.NEW_USER_BOT:
|
|
|
|
pass
|
2013-08-26 18:07:52 +02:00
|
|
|
else:
|
|
|
|
# All other cases are an error.
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Not authorized to send to stream '%s'") % (stream.name,))
|
2013-08-26 18:07:52 +02:00
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
elif addressee.is_private():
|
2017-08-18 05:01:22 +02:00
|
|
|
user_profiles = addressee.user_profiles()
|
2017-04-27 22:48:06 +02:00
|
|
|
|
2017-08-18 05:01:22 +02:00
|
|
|
if user_profiles is None or len(user_profiles) == 0:
|
2017-04-27 22:48:06 +02:00
|
|
|
raise JsonableError(_("Message must have recipients"))
|
|
|
|
|
2017-11-08 03:56:01 +01:00
|
|
|
mirror_message = client and client.name in ["zephyr_mirror", "irc_mirror",
|
|
|
|
"jabber_mirror", "JabberMirror"]
|
2013-10-17 17:19:44 +02:00
|
|
|
not_forged_mirror_message = mirror_message and not forged
|
2013-03-18 18:57:34 +01:00
|
|
|
try:
|
2017-08-18 05:01:22 +02:00
|
|
|
recipient = recipient_for_user_profiles(user_profiles, not_forged_mirror_message,
|
|
|
|
forwarder_user_profile, sender)
|
2015-11-01 17:08:33 +01:00
|
|
|
except ValidationError as e:
|
2017-09-27 10:06:17 +02:00
|
|
|
assert isinstance(e.messages[0], str)
|
2013-08-12 22:00:06 +02:00
|
|
|
raise JsonableError(e.messages[0])
|
2013-03-18 18:57:34 +01:00
|
|
|
else:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Invalid message type"))
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
message = Message()
|
|
|
|
message.sender = sender
|
|
|
|
message.content = message_content
|
|
|
|
message.recipient = recipient
|
2017-04-27 22:48:06 +02:00
|
|
|
if addressee.is_stream():
|
2017-10-27 14:17:51 +02:00
|
|
|
message.subject = topic_name
|
2013-10-17 17:08:25 +02:00
|
|
|
if forged and forged_timestamp is not None:
|
2013-03-18 18:57:34 +01:00
|
|
|
# Forged messages come with a timestamp
|
|
|
|
message.pub_date = timestamp_to_datetime(forged_timestamp)
|
|
|
|
else:
|
2017-04-15 04:03:56 +02:00
|
|
|
message.pub_date = timezone_now()
|
2013-03-18 18:57:34 +01:00
|
|
|
message.sending_client = client
|
|
|
|
|
2016-09-14 21:58:44 +02:00
|
|
|
# We render messages later in the process.
|
|
|
|
assert message.rendered_content is None
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-08-12 20:05:57 +02:00
|
|
|
if client.name == "zephyr_mirror":
|
|
|
|
id = already_sent_mirrored_message_id(message)
|
|
|
|
if id is not None:
|
|
|
|
return {'message': id}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-07-14 19:30:23 +02:00
|
|
|
return {'message': message, 'stream': stream, 'local_id': local_id,
|
2017-01-22 05:40:00 +01:00
|
|
|
'sender_queue_id': sender_queue_id, 'realm': realm}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def _internal_prep_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
addressee: Addressee,
|
|
|
|
content: Text) -> Optional[Dict[str, Any]]:
|
2013-05-10 22:56:22 +02:00
|
|
|
"""
|
|
|
|
Create a message object and checks it, but doesn't send it or save it to the database.
|
|
|
|
The internal function that calls this can therefore batch send a bunch of created
|
|
|
|
messages together as one database query.
|
|
|
|
Call do_send_messages with a list of the return values of this method.
|
|
|
|
"""
|
2017-10-02 21:56:30 +02:00
|
|
|
# Remove any null bytes from the content
|
2013-01-10 22:01:33 +01:00
|
|
|
if len(content) > MAX_MESSAGE_LENGTH:
|
|
|
|
content = content[0:3900] + "\n\n[message was too long and has been truncated]"
|
2013-03-08 20:54:53 +01:00
|
|
|
|
2013-03-18 19:10:21 +01:00
|
|
|
if realm is None:
|
2017-01-22 05:11:53 +01:00
|
|
|
raise RuntimeError("None is not a valid realm for internal_prep_message!")
|
2017-04-27 19:33:15 +02:00
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
if addressee.is_stream():
|
|
|
|
stream, _ = create_stream_if_needed(realm, addressee.stream_name())
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-08-12 22:00:06 +02:00
|
|
|
try:
|
2017-04-27 22:48:06 +02:00
|
|
|
return check_message(sender, get_client("Internal"), addressee,
|
|
|
|
content, realm=realm)
|
2015-11-01 17:08:33 +01:00
|
|
|
except JsonableError as e:
|
2017-10-19 06:08:41 +02:00
|
|
|
logging.exception(u"Error queueing internal message by %s: %s" % (sender.email, e))
|
2013-08-12 22:00:06 +02:00
|
|
|
|
|
|
|
return None
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_prep_stream_message(realm: Realm, sender: UserProfile,
|
|
|
|
stream_name: Text, topic: Text,
|
|
|
|
content: Text) -> Optional[Dict[str, Any]]:
|
2017-04-27 19:44:09 +02:00
|
|
|
"""
|
|
|
|
See _internal_prep_message for details of how this works.
|
|
|
|
"""
|
2017-04-27 22:48:06 +02:00
|
|
|
addressee = Addressee.for_stream(stream_name, topic)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
|
|
|
return _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
addressee=addressee,
|
2017-04-27 19:44:09 +02:00
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_prep_private_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
recipient_user: UserProfile,
|
|
|
|
content: Text) -> Optional[Dict[str, Any]]:
|
2017-04-27 20:42:13 +02:00
|
|
|
"""
|
|
|
|
See _internal_prep_message for details of how this works.
|
|
|
|
"""
|
2017-08-18 05:02:02 +02:00
|
|
|
addressee = Addressee.for_user_profile(recipient_user)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
return _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
addressee=addressee,
|
2017-04-27 20:42:13 +02:00
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
def internal_send_message(realm, sender_email, recipient_type_name, recipients,
|
2017-11-03 12:13:17 +01:00
|
|
|
topic_name, content, email_gateway=False):
|
|
|
|
# type: (Realm, Text, str, Text, Text, Text, Optional[bool]) -> None
|
2017-11-27 01:43:30 +01:00
|
|
|
"""internal_send_message should only be used where `sender_email` is a
|
|
|
|
system bot."""
|
|
|
|
|
2017-11-27 01:43:30 +01:00
|
|
|
# Verify the user is in fact a system bot
|
2017-12-07 21:15:34 +01:00
|
|
|
assert(is_cross_realm_bot_email(sender_email) or sender_email == settings.ERROR_BOT)
|
2017-11-27 01:43:30 +01:00
|
|
|
|
2017-11-27 01:43:30 +01:00
|
|
|
sender = get_system_bot(sender_email)
|
|
|
|
parsed_recipients = extract_recipients(recipients)
|
|
|
|
|
|
|
|
addressee = Addressee.legacy_build(
|
|
|
|
sender,
|
|
|
|
recipient_type_name,
|
|
|
|
parsed_recipients,
|
|
|
|
topic_name,
|
|
|
|
realm=realm)
|
|
|
|
|
|
|
|
msg = _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
|
|
|
addressee=addressee,
|
|
|
|
content=content,
|
|
|
|
)
|
2013-05-10 22:56:22 +02:00
|
|
|
if msg is None:
|
|
|
|
return
|
|
|
|
|
2017-11-03 12:13:17 +01:00
|
|
|
do_send_messages([msg], email_gateway=email_gateway)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_send_private_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
recipient_user: UserProfile,
|
|
|
|
content: Text) -> None:
|
2017-08-18 05:02:02 +02:00
|
|
|
message = internal_prep_private_message(realm, sender, recipient_user, content)
|
2017-06-11 18:31:30 +02:00
|
|
|
if message is None:
|
|
|
|
return
|
|
|
|
do_send_messages([message])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_send_stream_message(realm: Realm, sender: UserProfile, stream_name: str,
|
|
|
|
topic: str, content: str) -> None:
|
2017-11-27 01:41:07 +01:00
|
|
|
message = internal_prep_stream_message(realm, sender, stream_name, topic, content)
|
|
|
|
if message is None:
|
|
|
|
return
|
|
|
|
do_send_messages([message])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_send_huddle_message(realm: Realm, sender: UserProfile, emails: List[str],
|
|
|
|
content: str) -> None:
|
2017-11-27 01:41:07 +01:00
|
|
|
addressee = Addressee.for_private(emails, realm)
|
|
|
|
message = _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
|
|
|
addressee=addressee,
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
if message is None:
|
|
|
|
return
|
|
|
|
do_send_messages([message])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def pick_color(user_profile: UserProfile) -> Text:
|
2017-10-29 17:11:11 +01:00
|
|
|
subs = get_stream_subscriptions_for_user(user_profile).filter(active=True)
|
2013-05-10 17:43:27 +02:00
|
|
|
return pick_color_helper(user_profile, subs)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def pick_color_helper(user_profile: UserProfile, subs: Iterable[Subscription]) -> Text:
|
2013-03-10 19:36:45 +01:00
|
|
|
# These colors are shared with the palette in subs.js.
|
2013-05-10 17:43:27 +02:00
|
|
|
used_colors = [sub.color for sub in subs if sub.active]
|
2016-08-03 23:37:12 +02:00
|
|
|
available_colors = [s for s in STREAM_ASSIGNMENT_COLORS if s not in used_colors]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
|
|
|
if available_colors:
|
|
|
|
return available_colors[0]
|
|
|
|
else:
|
2016-08-03 23:37:12 +02:00
|
|
|
return STREAM_ASSIGNMENT_COLORS[len(used_colors) % len(STREAM_ASSIGNMENT_COLORS)]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_user_access_to_subscribers(user_profile: Optional[UserProfile],
|
|
|
|
stream: Stream) -> None:
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Validates whether the user can view the subscribers of a stream. Raises a JsonableError if:
|
2013-09-30 21:58:36 +02:00
|
|
|
* The user and the stream are in different realms
|
2013-09-06 23:25:43 +02:00
|
|
|
* The realm is MIT and the stream is not invite only.
|
|
|
|
* The stream is invite only, requesting_user is passed, and that user
|
|
|
|
does not subscribe to the stream.
|
|
|
|
"""
|
2016-05-26 13:27:00 +02:00
|
|
|
validate_user_access_to_subscribers_helper(
|
2013-10-02 19:46:40 +02:00
|
|
|
user_profile,
|
2017-03-13 19:52:38 +01:00
|
|
|
{"realm_id": stream.realm_id,
|
2013-10-02 19:46:40 +02:00
|
|
|
"invite_only": stream.invite_only},
|
|
|
|
# We use a lambda here so that we only compute whether the
|
|
|
|
# user is subscribed if we have to
|
2017-09-17 21:05:00 +02:00
|
|
|
lambda: subscribed_to_stream(cast(UserProfile, user_profile), stream.id))
|
2013-10-02 19:46:40 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_user_access_to_subscribers_helper(user_profile: Optional[UserProfile],
|
|
|
|
stream_dict: Mapping[str, Any],
|
|
|
|
check_user_subscribed: Callable[[], bool]) -> None:
|
2013-10-02 19:46:40 +02:00
|
|
|
""" Helper for validate_user_access_to_subscribers that doesn't require a full stream object
|
2017-12-12 04:28:19 +01:00
|
|
|
|
|
|
|
* check_user_subscribed reports whether the user is subscribed to the stream.
|
2013-10-02 19:46:40 +02:00
|
|
|
"""
|
2016-07-27 01:54:16 +02:00
|
|
|
if user_profile is None:
|
|
|
|
raise ValidationError("Missing user to validate access for")
|
|
|
|
|
|
|
|
if user_profile.realm_id != stream_dict["realm_id"]:
|
|
|
|
raise ValidationError("Requesting user not in given realm")
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm and not stream_dict["invite_only"]:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("You cannot get subscribers for public streams in this realm"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2016-07-27 01:54:16 +02:00
|
|
|
if (stream_dict["invite_only"] and not check_user_subscribed()):
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Unable to retrieve subscribers for invite-only stream"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_get_subscriber_user_ids(stream_dicts: Iterable[Mapping[str, Any]],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
sub_dict: Mapping[int, bool],
|
|
|
|
stream_recipient: StreamRecipientMap) -> Dict[int, List[int]]:
|
2017-12-12 04:28:19 +01:00
|
|
|
"""sub_dict maps stream_id => whether the user is subscribed to that stream."""
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts = []
|
|
|
|
for stream_dict in stream_dicts:
|
2013-09-30 22:09:43 +02:00
|
|
|
try:
|
2013-10-02 19:46:40 +02:00
|
|
|
validate_user_access_to_subscribers_helper(user_profile, stream_dict,
|
|
|
|
lambda: sub_dict[stream_dict["id"]])
|
2013-09-30 22:09:43 +02:00
|
|
|
except JsonableError:
|
|
|
|
continue
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts.append(stream_dict)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2017-09-13 20:00:36 +02:00
|
|
|
stream_ids = [stream['id'] for stream in target_stream_dicts]
|
|
|
|
stream_recipient.populate_for_stream_ids(stream_ids)
|
|
|
|
recipient_ids = sorted([
|
|
|
|
stream_recipient.recipient_id_for(stream_id)
|
|
|
|
for stream_id in stream_ids
|
|
|
|
])
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
result = dict((stream["id"], []) for stream in stream_dicts) # type: Dict[int, List[int]]
|
2017-09-13 21:33:15 +02:00
|
|
|
if not recipient_ids:
|
|
|
|
return result
|
|
|
|
|
|
|
|
'''
|
|
|
|
The raw SQL below leads to more than a 2x speedup when tested with
|
|
|
|
20k+ total subscribers. (For large realms with lots of default
|
|
|
|
streams, this function deals with LOTS of data, so it is important
|
|
|
|
to optimize.)
|
|
|
|
'''
|
|
|
|
|
|
|
|
id_list = ', '.join(str(recipient_id) for recipient_id in recipient_ids)
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
zerver_subscription.recipient_id,
|
|
|
|
zerver_subscription.user_profile_id
|
|
|
|
FROM
|
|
|
|
zerver_subscription
|
|
|
|
INNER JOIN zerver_userprofile ON
|
|
|
|
zerver_userprofile.id = zerver_subscription.user_profile_id
|
|
|
|
WHERE
|
|
|
|
zerver_subscription.recipient_id in (%s) AND
|
|
|
|
zerver_subscription.active AND
|
|
|
|
zerver_userprofile.is_active
|
|
|
|
ORDER BY
|
|
|
|
zerver_subscription.recipient_id
|
|
|
|
''' % (id_list,)
|
|
|
|
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2017-09-13 20:00:36 +02:00
|
|
|
recip_to_stream_id = stream_recipient.recipient_to_stream_id_dict()
|
2017-09-13 21:47:43 +02:00
|
|
|
|
|
|
|
'''
|
|
|
|
Using groupby/itemgetter here is important for performance, at scale.
|
|
|
|
It makes it so that all interpreter overhead is just O(N) in nature.
|
|
|
|
'''
|
|
|
|
for recip_id, recip_rows in itertools.groupby(rows, itemgetter(0)):
|
|
|
|
user_profile_ids = [r[1] for r in recip_rows]
|
2017-09-13 20:00:36 +02:00
|
|
|
stream_id = recip_to_stream_id[recip_id]
|
2017-09-13 21:47:43 +02:00
|
|
|
result[stream_id] = list(user_profile_ids)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_subscribers_query(stream: Stream, requesting_user: Optional[UserProfile]) -> QuerySet:
|
2016-05-25 06:55:14 +02:00
|
|
|
# TODO: Make a generic stub for QuerySet
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Build a query to get the subscribers list for a stream, raising a JsonableError if:
|
|
|
|
|
2016-05-26 14:25:48 +02:00
|
|
|
'realm' is optional in stream.
|
2013-09-30 22:03:16 +02:00
|
|
|
|
|
|
|
The caller can refine this query with select_related(), values(), etc. depending
|
|
|
|
on whether it wants objects or just certain fields
|
|
|
|
"""
|
|
|
|
validate_user_access_to_subscribers(requesting_user, stream)
|
|
|
|
|
2013-09-06 23:25:43 +02:00
|
|
|
# Note that non-active users may still have "active" subscriptions, because we
|
|
|
|
# want to be able to easily reactivate them with their old subscriptions. This
|
|
|
|
# is why the query here has to look at the UserProfile.is_active flag.
|
2017-10-29 15:40:07 +01:00
|
|
|
subscriptions = get_active_subscriptions_for_stream_id(stream.id).filter(
|
|
|
|
user_profile__is_active=True
|
|
|
|
)
|
2013-09-13 19:22:28 +02:00
|
|
|
return subscriptions
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_subscribers(stream: Stream,
|
|
|
|
requesting_user: Optional[UserProfile]=None) -> List[UserProfile]:
|
2013-09-30 21:58:36 +02:00
|
|
|
subscriptions = get_subscribers_query(stream, requesting_user).select_related()
|
2013-09-06 23:25:43 +02:00
|
|
|
return [subscription.user_profile for subscription in subscriptions]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_subscriber_emails(stream: Stream,
|
|
|
|
requesting_user: Optional[UserProfile]=None) -> List[Text]:
|
2016-05-26 14:25:48 +02:00
|
|
|
subscriptions_query = get_subscribers_query(stream, requesting_user)
|
|
|
|
subscriptions = subscriptions_query.values('user_profile__email')
|
2013-09-13 19:30:05 +02:00
|
|
|
return [subscription['user_profile__email'] for subscription in subscriptions]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def maybe_get_subscriber_emails(stream: Stream, user_profile: UserProfile) -> List[Text]:
|
2013-09-13 19:50:03 +02:00
|
|
|
""" Alternate version of get_subscriber_emails that takes a Stream object only
|
2013-09-07 03:08:01 +02:00
|
|
|
(not a name), and simply returns an empty list if unable to get a real
|
|
|
|
subscriber list (because we're on the MIT realm). """
|
|
|
|
try:
|
2016-07-27 04:00:46 +02:00
|
|
|
subscribers = get_subscriber_emails(stream, requesting_user=user_profile)
|
2013-09-07 03:08:01 +02:00
|
|
|
except JsonableError:
|
|
|
|
subscribers = []
|
|
|
|
return subscribers
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_subscriptions_added(user_profile: UserProfile,
|
|
|
|
sub_pairs: Iterable[Tuple[Subscription, Stream]],
|
|
|
|
stream_user_ids: Callable[[Stream], List[int]],
|
|
|
|
no_log: bool=False) -> None:
|
2013-05-10 17:43:27 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_added',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for sub, stream in sub_pairs],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2013-09-07 01:06:41 +02:00
|
|
|
# Send a notification to the user who subscribed.
|
2013-06-28 17:49:51 +02:00
|
|
|
payload = [dict(name=stream.name,
|
2014-02-05 19:58:41 +01:00
|
|
|
stream_id=stream.id,
|
2013-06-28 17:49:51 +02:00
|
|
|
in_home_view=subscription.in_home_view,
|
|
|
|
invite_only=stream.invite_only,
|
2013-08-12 21:13:07 +02:00
|
|
|
color=subscription.color,
|
2013-09-07 03:08:01 +02:00
|
|
|
email_address=encode_email_address(stream),
|
2014-02-05 23:21:02 +01:00
|
|
|
desktop_notifications=subscription.desktop_notifications,
|
|
|
|
audible_notifications=subscription.audible_notifications,
|
2017-08-17 16:55:32 +02:00
|
|
|
push_notifications=subscription.push_notifications,
|
2014-01-22 20:20:10 +01:00
|
|
|
description=stream.description,
|
2016-07-01 07:26:09 +02:00
|
|
|
pin_to_top=subscription.pin_to_top,
|
2017-10-07 16:00:39 +02:00
|
|
|
subscribers=stream_user_ids(stream))
|
2016-12-11 14:30:45 +01:00
|
|
|
for (subscription, stream) in sub_pairs]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_peer_user_ids_for_stream_change(stream: Stream,
|
|
|
|
altered_user_ids: Iterable[int],
|
|
|
|
subscribed_user_ids: Iterable[int]) -> Set[int]:
|
2016-10-19 23:49:04 +02:00
|
|
|
'''
|
2017-12-12 04:28:19 +01:00
|
|
|
altered_user_ids is the user_ids that we are adding/removing
|
|
|
|
subscribed_user_ids is the already-subscribed user_ids
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
Based on stream policy, we notify the correct bystanders, while
|
|
|
|
not notifying altered_users (who get subscribers via another event)
|
|
|
|
'''
|
|
|
|
|
|
|
|
if stream.invite_only:
|
|
|
|
# PRIVATE STREAMS
|
2017-10-06 16:52:18 +02:00
|
|
|
return set(subscribed_user_ids) - set(altered_user_ids)
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
# PUBLIC STREAMS
|
|
|
|
# We now do "peer_add" or "peer_remove" events even for streams
|
|
|
|
# users were never subscribed to, in order for the neversubscribed
|
|
|
|
# structure to stay up-to-date.
|
2017-09-16 21:26:54 +02:00
|
|
|
return set(active_user_ids(stream.realm_id)) - set(altered_user_ids)
|
2016-10-19 23:49:04 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_ids_for_streams(streams: Iterable[Stream]) -> Dict[int, List[int]]:
|
2017-10-29 15:52:01 +01:00
|
|
|
stream_ids = [stream.id for stream in streams]
|
|
|
|
|
|
|
|
all_subs = get_active_subscriptions_for_stream_ids(stream_ids).filter(
|
2017-10-06 17:35:55 +02:00
|
|
|
user_profile__is_active=True,
|
|
|
|
).values(
|
|
|
|
'recipient__type_id',
|
|
|
|
'user_profile_id',
|
|
|
|
).order_by(
|
|
|
|
'recipient__type_id',
|
|
|
|
)
|
|
|
|
|
|
|
|
get_stream_id = itemgetter('recipient__type_id')
|
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = defaultdict(list) # type: Dict[int, List[int]]
|
2017-10-06 17:35:55 +02:00
|
|
|
for stream_id, rows in itertools.groupby(all_subs, get_stream_id):
|
2017-10-07 16:00:39 +02:00
|
|
|
user_ids = [row['user_profile_id'] for row in rows]
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream[stream_id] = user_ids
|
2017-10-06 17:35:55 +02:00
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
return all_subscribers_by_stream
|
2016-10-20 19:17:47 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
SubT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
def bulk_add_subscriptions(streams: Iterable[Stream],
|
|
|
|
users: Iterable[UserProfile],
|
|
|
|
from_stream_creation: bool=False,
|
|
|
|
acting_user: Optional[UserProfile]=None) -> SubT:
|
2017-10-29 19:15:35 +01:00
|
|
|
users = list(users)
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams]) # type: Mapping[int, Recipient]
|
|
|
|
recipients = [recipient.id for recipient in recipients_map.values()] # type: List[int]
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
stream_map = {} # type: Dict[int, Stream]
|
2013-06-25 19:26:58 +02:00
|
|
|
for stream in streams:
|
|
|
|
stream_map[recipients_map[stream.id].id] = stream
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]]
|
2017-10-29 19:15:35 +01:00
|
|
|
all_subs_query = get_stream_subscriptions_for_users(users).select_related('user_profile')
|
|
|
|
for sub in all_subs_query:
|
2013-05-10 17:43:27 +02:00
|
|
|
subs_by_user[sub.user_profile_id].append(sub)
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
already_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
|
|
|
subs_to_activate = [] # type: List[Tuple[Subscription, Stream]]
|
|
|
|
new_subs = [] # type: List[Tuple[UserProfile, int, Stream]]
|
2013-05-10 17:43:27 +02:00
|
|
|
for user_profile in users:
|
2017-05-17 20:39:57 +02:00
|
|
|
needs_new_sub = set(recipients) # type: Set[int]
|
2013-05-10 17:43:27 +02:00
|
|
|
for sub in subs_by_user[user_profile.id]:
|
2013-06-25 19:26:58 +02:00
|
|
|
if sub.recipient_id in needs_new_sub:
|
|
|
|
needs_new_sub.remove(sub.recipient_id)
|
2013-05-10 17:43:27 +02:00
|
|
|
if sub.active:
|
2013-06-25 19:26:58 +02:00
|
|
|
already_subscribed.append((user_profile, stream_map[sub.recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
else:
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_to_activate.append((sub, stream_map[sub.recipient_id]))
|
|
|
|
# Mark the sub as active, without saving, so that
|
|
|
|
# pick_color will consider this to be an active
|
|
|
|
# subscription when picking colors
|
|
|
|
sub.active = True
|
|
|
|
for recipient_id in needs_new_sub:
|
|
|
|
new_subs.append((user_profile, recipient_id, stream_map[recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_to_add = [] # type: List[Tuple[Subscription, Stream]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (user_profile, recipient_id, stream) in new_subs:
|
2013-05-10 17:43:27 +02:00
|
|
|
color = pick_color_helper(user_profile, subs_by_user[user_profile.id])
|
2013-06-25 19:26:58 +02:00
|
|
|
sub_to_add = Subscription(user_profile=user_profile, active=True,
|
2013-09-20 15:52:21 +02:00
|
|
|
color=color, recipient_id=recipient_id,
|
2014-02-05 23:21:02 +01:00
|
|
|
desktop_notifications=user_profile.enable_stream_desktop_notifications,
|
2017-08-17 16:55:32 +02:00
|
|
|
audible_notifications=user_profile.enable_stream_sounds,
|
|
|
|
push_notifications=user_profile.enable_stream_push_notifications,
|
|
|
|
)
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_by_user[user_profile.id].append(sub_to_add)
|
|
|
|
subs_to_add.append((sub_to_add, stream))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
|
|
|
occupied_streams_before = list(get_occupied_streams(user_profile.realm))
|
|
|
|
Subscription.objects.bulk_create([sub for (sub, stream) in subs_to_add])
|
2017-11-08 03:56:01 +01:00
|
|
|
sub_ids = [sub.id for (sub, stream) in subs_to_activate]
|
|
|
|
Subscription.objects.filter(id__in=sub_ids).update(active=True)
|
2014-03-02 06:46:54 +01:00
|
|
|
occupied_streams_after = list(get_occupied_streams(user_profile.realm))
|
|
|
|
|
2017-07-12 17:28:27 +02:00
|
|
|
# Log Subscription Activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
|
|
|
event_last_message_id = Message.objects.aggregate(Max('id'))['id__max']
|
|
|
|
all_subscription_logs = [] # type: (List[RealmAuditLog])
|
|
|
|
for (sub, stream) in subs_to_add:
|
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=sub.user_profile.realm,
|
2017-07-17 00:40:15 +02:00
|
|
|
acting_user=acting_user,
|
2017-07-12 17:28:27 +02:00
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
2017-07-22 13:42:54 +02:00
|
|
|
event_last_message_id=event_last_message_id,
|
2017-07-12 17:28:27 +02:00
|
|
|
event_type='subscription_created',
|
|
|
|
event_time=event_time))
|
|
|
|
for (sub, stream) in subs_to_activate:
|
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=sub.user_profile.realm,
|
2017-07-17 00:40:15 +02:00
|
|
|
acting_user=acting_user,
|
2017-07-12 17:28:27 +02:00
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
|
|
|
event_type='subscription_activated',
|
|
|
|
event_time=event_time))
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
new_occupied_streams = [stream for stream in
|
|
|
|
set(occupied_streams_after) - set(occupied_streams_before)
|
|
|
|
if not stream.invite_only]
|
2017-07-17 00:33:12 +02:00
|
|
|
if new_occupied_streams and not from_stream_creation:
|
2014-03-02 06:46:54 +01:00
|
|
|
event = dict(type="stream", op="occupy",
|
|
|
|
streams=[stream.to_dict()
|
|
|
|
for stream in new_occupied_streams])
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm_id))
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2013-09-25 23:11:01 +02:00
|
|
|
# Notify all existing users on streams that users have joined
|
|
|
|
|
|
|
|
# First, get all users subscribed to the streams that we care about
|
|
|
|
# We fetch all subscription information upfront, as it's used throughout
|
|
|
|
# the following code and we want to minize DB queries
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = get_user_ids_for_streams(streams=streams)
|
2013-09-25 23:11:01 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_stream_subscriber_user_ids(stream: Stream) -> List[int]:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
return []
|
2017-10-08 21:33:53 +02:00
|
|
|
user_ids = all_subscribers_by_stream[stream.id]
|
2017-10-07 16:00:39 +02:00
|
|
|
return user_ids
|
2013-09-25 23:11:01 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]]
|
|
|
|
new_streams = set() # type: Set[Tuple[int, int]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (sub, stream) in subs_to_add + subs_to_activate:
|
2013-06-28 17:49:51 +02:00
|
|
|
sub_tuples_by_user[sub.user_profile.id].append((sub, stream))
|
2013-09-25 23:11:01 +02:00
|
|
|
new_streams.add((sub.user_profile.id, stream.id))
|
2013-06-28 17:49:51 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# We now send several types of events to notify browsers. The
|
|
|
|
# first batch is notifications to users on invite-only streams
|
|
|
|
# that the stream exists.
|
|
|
|
for stream in streams:
|
|
|
|
new_users = [user for user in users if (user.id, stream.id) in new_streams]
|
|
|
|
|
|
|
|
# Users newly added to invite-only streams need a `create`
|
|
|
|
# notification, since they didn't have the invite-only stream
|
|
|
|
# in their browser yet.
|
2017-08-16 21:04:57 +02:00
|
|
|
if not stream.is_public():
|
2017-08-16 21:03:44 +02:00
|
|
|
send_stream_creation_event(stream, [user.id for user in new_users])
|
2017-01-29 01:21:31 +01:00
|
|
|
|
|
|
|
# The second batch is events for the users themselves that they
|
|
|
|
# were subscribed to the new streams.
|
2013-06-28 17:49:51 +02:00
|
|
|
for user_profile in users:
|
|
|
|
if len(sub_tuples_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
2013-09-13 21:11:41 +02:00
|
|
|
sub_pairs = sub_tuples_by_user[user_profile.id]
|
2017-10-07 16:00:39 +02:00
|
|
|
notify_subscriptions_added(user_profile, sub_pairs, fetch_stream_subscriber_user_ids)
|
2013-09-13 23:09:19 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# The second batch is events for other users who are tracking the
|
|
|
|
# subscribers lists of streams in their browser; everyone for
|
|
|
|
# public streams and only existing subscribers for private streams.
|
2013-09-13 23:09:19 +02:00
|
|
|
for stream in streams:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
continue
|
|
|
|
|
2017-10-06 16:52:18 +02:00
|
|
|
new_user_ids = [user.id for user in users if (user.id, stream.id) in new_streams]
|
2017-10-08 21:33:53 +02:00
|
|
|
subscribed_user_ids = all_subscribers_by_stream[stream.id]
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids=new_user_ids,
|
|
|
|
subscribed_user_ids=subscribed_user_ids,
|
2016-10-19 23:49:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
2017-10-06 16:52:18 +02:00
|
|
|
for new_user_id in new_user_ids:
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="peer_add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=[stream.name],
|
2017-10-06 16:52:18 +02:00
|
|
|
user_id=new_user_id)
|
2016-10-19 23:49:04 +02:00
|
|
|
send_event(event, peer_user_ids)
|
|
|
|
|
2016-05-30 07:32:56 +02:00
|
|
|
return ([(user_profile, stream) for (user_profile, recipient_id, stream) in new_subs] +
|
|
|
|
[(sub.user_profile, stream) for (sub, stream) in subs_to_activate],
|
2013-05-10 17:43:27 +02:00
|
|
|
already_subscribed)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_subscriptions_removed(user_profile: UserProfile, streams: Iterable[Stream],
|
|
|
|
no_log: bool=False) -> None:
|
2013-06-28 17:16:55 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_removed',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for stream in streams],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2014-02-07 19:06:02 +01:00
|
|
|
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="remove",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
SubAndRemovedT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
def bulk_remove_subscriptions(users: Iterable[UserProfile],
|
|
|
|
streams: Iterable[Stream],
|
|
|
|
acting_user: Optional[UserProfile]=None) -> SubAndRemovedT:
|
2016-10-20 20:12:39 +02:00
|
|
|
|
2017-10-30 14:34:02 +01:00
|
|
|
users = list(users)
|
2017-10-29 20:19:57 +01:00
|
|
|
streams = list(streams)
|
2017-10-30 14:34:02 +01:00
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
stream_dict = {stream.id: stream for stream in streams}
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
existing_subs_by_user = get_bulk_stream_subscriber_info(users, stream_dict)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_non_subscribed_tups() -> List[Tuple[UserProfile, Stream]]:
|
2017-10-29 20:19:57 +01:00
|
|
|
stream_ids = {stream.id for stream in streams}
|
|
|
|
|
|
|
|
not_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
user_sub_stream_info = existing_subs_by_user[user_profile.id]
|
|
|
|
|
|
|
|
subscribed_stream_ids = {
|
|
|
|
stream.id
|
|
|
|
for (sub, stream) in user_sub_stream_info
|
|
|
|
}
|
|
|
|
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
|
|
|
|
|
|
|
|
for stream_id in not_subscribed_stream_ids:
|
|
|
|
stream = stream_dict[stream_id]
|
|
|
|
not_subscribed.append((user_profile, stream))
|
|
|
|
|
|
|
|
return not_subscribed
|
|
|
|
|
|
|
|
not_subscribed = get_non_subscribed_tups()
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_to_deactivate = [] # type: List[Tuple[Subscription, Stream]]
|
2017-10-29 20:19:57 +01:00
|
|
|
sub_ids_to_deactivate = [] # type: List[int]
|
|
|
|
|
|
|
|
# This loop just flattens out our data into big lists for
|
|
|
|
# bulk operations.
|
|
|
|
for tup_list in existing_subs_by_user.values():
|
|
|
|
for (sub, stream) in tup_list:
|
|
|
|
subs_to_deactivate.append((sub, stream))
|
|
|
|
sub_ids_to_deactivate.append(sub.id)
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-10-30 14:34:02 +01:00
|
|
|
our_realm = users[0].realm
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
2017-10-30 14:34:02 +01:00
|
|
|
occupied_streams_before = list(get_occupied_streams(our_realm))
|
2017-10-29 20:19:57 +01:00
|
|
|
Subscription.objects.filter(
|
|
|
|
id__in=sub_ids_to_deactivate,
|
|
|
|
) .update(active=False)
|
2017-10-30 14:34:02 +01:00
|
|
|
occupied_streams_after = list(get_occupied_streams(our_realm))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2017-07-12 17:28:27 +02:00
|
|
|
# Log Subscription Activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
|
|
|
event_last_message_id = Message.objects.aggregate(Max('id'))['id__max']
|
|
|
|
all_subscription_logs = [] # type: (List[RealmAuditLog])
|
|
|
|
for (sub, stream) in subs_to_deactivate:
|
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=sub.user_profile.realm,
|
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
|
|
|
event_type='subscription_deactivated',
|
|
|
|
event_time=event_time))
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
new_vacant_streams = [stream for stream in
|
2017-06-04 19:32:41 +02:00
|
|
|
set(occupied_streams_before) - set(occupied_streams_after)]
|
|
|
|
new_vacant_private_streams = [stream for stream in new_vacant_streams
|
|
|
|
if stream.invite_only]
|
|
|
|
new_vacant_public_streams = [stream for stream in new_vacant_streams
|
|
|
|
if not stream.invite_only]
|
|
|
|
if new_vacant_public_streams:
|
2014-03-02 06:46:54 +01:00
|
|
|
event = dict(type="stream", op="vacate",
|
|
|
|
streams=[stream.to_dict()
|
2017-06-04 19:32:41 +02:00
|
|
|
for stream in new_vacant_public_streams])
|
2017-10-30 14:34:02 +01:00
|
|
|
send_event(event, active_user_ids(our_realm.id))
|
2017-06-04 19:32:41 +02:00
|
|
|
if new_vacant_private_streams:
|
|
|
|
# Deactivate any newly-vacant private streams
|
|
|
|
for stream in new_vacant_private_streams:
|
|
|
|
do_deactivate_stream(stream)
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
altered_user_dict = defaultdict(list) # type: Dict[int, List[UserProfile]]
|
|
|
|
streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]]
|
2013-06-28 17:16:55 +02:00
|
|
|
for (sub, stream) in subs_to_deactivate:
|
2013-06-28 17:49:51 +02:00
|
|
|
streams_by_user[sub.user_profile_id].append(stream)
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_user_dict[stream.id].append(sub.user_profile)
|
2013-06-28 17:49:51 +02:00
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
if len(streams_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
|
|
|
notify_subscriptions_removed(user_profile, streams_by_user[user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-11-13 21:24:51 +01:00
|
|
|
event = {'type': 'mark_stream_messages_as_read',
|
|
|
|
'user_profile_id': user_profile.id,
|
|
|
|
'stream_ids': [stream.id for stream in streams]}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("deferred_work", event)
|
2017-11-13 21:24:51 +01:00
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = get_user_ids_for_streams(streams=streams)
|
2016-10-20 20:12:39 +02:00
|
|
|
|
|
|
|
for stream in streams:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2016-10-20 20:12:39 +02:00
|
|
|
continue
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_users = altered_user_dict[stream.id]
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids = [u.id for u in altered_users]
|
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
subscribed_user_ids = all_subscribers_by_stream[stream.id]
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids=altered_user_ids,
|
|
|
|
subscribed_user_ids=subscribed_user_ids,
|
2016-10-20 20:12:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
|
|
|
for removed_user in altered_users:
|
|
|
|
event = dict(type="subscription",
|
|
|
|
op="peer_remove",
|
|
|
|
subscriptions=[stream.name],
|
2016-11-08 15:04:18 +01:00
|
|
|
user_id=removed_user.id)
|
2016-10-20 20:12:39 +02:00
|
|
|
send_event(event, peer_user_ids)
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
return (
|
|
|
|
[(sub.user_profile, stream) for (sub, stream) in subs_to_deactivate],
|
|
|
|
not_subscribed,
|
|
|
|
)
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def log_subscription_property_change(user_email: Text, stream_name: Text, property: Text,
|
|
|
|
value: Any) -> None:
|
2013-01-10 22:01:33 +01:00
|
|
|
event = {'type': 'subscription_property',
|
|
|
|
'property': property,
|
2013-04-08 18:01:01 +02:00
|
|
|
'user': user_email,
|
|
|
|
'stream_name': stream_name,
|
|
|
|
'value': value}
|
2013-01-10 22:01:33 +01:00
|
|
|
log_event(event)
|
|
|
|
|
2017-03-05 01:30:48 +01:00
|
|
|
def do_change_subscription_property(user_profile, sub, stream,
|
2013-07-16 21:56:20 +02:00
|
|
|
property_name, value):
|
2017-03-05 01:30:48 +01:00
|
|
|
# type: (UserProfile, Subscription, Stream, Text, Any) -> None
|
2013-07-16 21:56:20 +02:00
|
|
|
setattr(sub, property_name, value)
|
|
|
|
sub.save(update_fields=[property_name])
|
2017-03-05 01:30:48 +01:00
|
|
|
log_subscription_property_change(user_profile.email, stream.name,
|
2013-07-16 21:56:20 +02:00
|
|
|
property_name, value)
|
|
|
|
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription",
|
2014-01-24 23:24:44 +01:00
|
|
|
op="update",
|
|
|
|
email=user_profile.email,
|
|
|
|
property=property_name,
|
|
|
|
value=value,
|
2017-03-05 01:30:48 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name)
|
2014-01-24 23:24:44 +01:00
|
|
|
send_event(event, [user_profile.id])
|
2013-07-16 22:21:41 +02:00
|
|
|
|
2017-03-14 06:07:14 +01:00
|
|
|
def do_change_password(user_profile, password, commit=True,
|
2013-03-29 18:36:27 +01:00
|
|
|
hashed_password=False):
|
2017-03-14 06:07:14 +01:00
|
|
|
# type: (UserProfile, Text, bool, bool) -> None
|
2013-03-29 18:36:27 +01:00
|
|
|
if hashed_password:
|
|
|
|
# This is a hashed password, not the password itself.
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.set_password(password)
|
2013-03-29 18:36:27 +01:00
|
|
|
else:
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.set_password(password)
|
2013-01-10 22:01:33 +01:00
|
|
|
if commit:
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.save(update_fields=["password"])
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-14 06:07:14 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
|
|
|
modified_user=user_profile, event_type='user_change_password',
|
|
|
|
event_time=event_time)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_full_name(user_profile: UserProfile, full_name: Text,
|
|
|
|
acting_user: UserProfile) -> None:
|
2017-08-17 01:17:54 +02:00
|
|
|
old_name = user_profile.full_name
|
2013-01-10 22:01:33 +01:00
|
|
|
user_profile.full_name = full_name
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["full_name"])
|
2017-04-07 07:28:28 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
|
|
|
modified_user=user_profile, event_type='user_full_name_changed',
|
2017-08-17 01:17:54 +02:00
|
|
|
event_time=event_time, extra_data=old_name)
|
2014-02-26 19:55:29 +01:00
|
|
|
payload = dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 19:55:29 +01:00
|
|
|
full_name=user_profile.full_name)
|
|
|
|
send_event(dict(type='realm_user', op='update', person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2014-02-26 19:55:29 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot', op='update', bot=payload),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def check_change_full_name(user_profile: UserProfile, full_name_raw: Text,
|
|
|
|
acting_user: UserProfile) -> Text:
|
2017-11-16 02:29:53 +01:00
|
|
|
"""Verifies that the user's proposed full name is valid. The caller
|
|
|
|
is responsible for checking check permissions. Returns the new
|
|
|
|
full name, which may differ from what was passed in (because this
|
|
|
|
function strips whitespace)."""
|
|
|
|
new_full_name = check_full_name(full_name_raw)
|
|
|
|
do_change_full_name(user_profile, new_full_name, acting_user)
|
|
|
|
return new_full_name
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_bot_owner(user_profile: UserProfile, bot_owner: UserProfile,
|
|
|
|
acting_user: UserProfile) -> None:
|
2017-02-24 06:36:54 +01:00
|
|
|
user_profile.bot_owner = bot_owner
|
2017-11-20 19:52:10 +01:00
|
|
|
user_profile.save() # Can't use update_fields because of how the foreign key works.
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-31 17:27:08 +02:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
|
|
|
modified_user=user_profile, event_type='bot_owner_changed',
|
|
|
|
event_time=event_time)
|
2017-02-24 06:36:54 +01:00
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
owner_id=user_profile.bot_owner.id,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_tos_version(user_profile: UserProfile, tos_version: Text) -> None:
|
2016-08-10 03:05:26 +02:00
|
|
|
user_profile.tos_version = tos_version
|
|
|
|
user_profile.save(update_fields=["tos_version"])
|
2017-04-07 07:29:29 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
|
|
|
modified_user=user_profile, event_type='user_tos_version_changed',
|
|
|
|
event_time=event_time)
|
2016-08-10 03:05:26 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_regenerate_api_key(user_profile: UserProfile, acting_user: UserProfile) -> None:
|
2014-02-26 20:02:43 +01:00
|
|
|
user_profile.api_key = random_api_key()
|
|
|
|
user_profile.save(update_fields=["api_key"])
|
2017-04-06 12:27:58 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
|
|
|
modified_user=user_profile, event_type='user_api_key_changed',
|
|
|
|
event_time=event_time)
|
2013-07-16 21:32:33 +02:00
|
|
|
|
2014-02-26 20:17:19 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
api_key=user_profile.api_key,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-26 20:17:19 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_avatar_fields(user_profile: UserProfile, avatar_source: Text) -> None:
|
2014-02-26 20:54:59 +01:00
|
|
|
user_profile.avatar_source = avatar_source
|
2017-01-28 19:05:20 +01:00
|
|
|
user_profile.avatar_version += 1
|
|
|
|
user_profile.save(update_fields=["avatar_source", "avatar_version"])
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-23 22:02:35 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
|
|
|
event_type='user_change_avatar_source',
|
|
|
|
extra_data={'avatar_source': avatar_source},
|
|
|
|
event_time=event_time)
|
2014-02-26 20:54:59 +01:00
|
|
|
|
2014-02-26 21:05:10 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2016-08-19 00:28:28 +02:00
|
|
|
|
2017-02-21 17:55:32 +01:00
|
|
|
payload = dict(
|
|
|
|
email=user_profile.email,
|
2017-04-27 00:04:11 +02:00
|
|
|
avatar_source=user_profile.avatar_source,
|
2017-02-21 17:55:32 +01:00
|
|
|
avatar_url=avatar_url(user_profile),
|
2017-04-27 00:04:11 +02:00
|
|
|
avatar_url_medium=avatar_url(user_profile, medium=True),
|
2017-02-21 17:55:32 +01:00
|
|
|
user_id=user_profile.id
|
|
|
|
)
|
|
|
|
|
|
|
|
send_event(dict(type='realm_user',
|
|
|
|
op='update',
|
|
|
|
person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2014-02-26 21:05:10 +01:00
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_icon_source(realm: Realm, icon_source: Text, log: bool=True) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
realm.icon_source = icon_source
|
|
|
|
realm.icon_version += 1
|
|
|
|
realm.save(update_fields=["icon_source", "icon_version"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'realm_change_icon',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': realm.string_id,
|
2017-02-21 03:41:20 +01:00
|
|
|
'icon_source': icon_source})
|
|
|
|
|
2017-02-26 20:35:23 +01:00
|
|
|
send_event(dict(type='realm',
|
|
|
|
op='update_dict',
|
|
|
|
property="icon",
|
|
|
|
data=dict(icon_source=realm.icon_source,
|
|
|
|
icon_url=realm_icon_url(realm))),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(realm.id))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def _default_stream_permision_check(user_profile: UserProfile, stream: Optional[Stream]) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
# Any user can have a None default stream
|
|
|
|
if stream is not None:
|
|
|
|
if user_profile.is_bot:
|
|
|
|
user = user_profile.bot_owner
|
|
|
|
else:
|
|
|
|
user = user_profile
|
2017-09-17 21:05:00 +02:00
|
|
|
if stream.invite_only and (user is None or not subscribed_to_stream(user, stream.id)):
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_('Insufficient permission'))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_sending_stream(user_profile: UserProfile, stream: Optional[Stream],
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
_default_stream_permision_check(user_profile, stream)
|
|
|
|
|
|
|
|
user_profile.default_sending_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_sending_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_sending_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:23:18 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2017-02-11 05:26:24 +01:00
|
|
|
stream_name = stream.name # type: Optional[Text]
|
2014-02-26 21:23:18 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_sending_stream=stream_name,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_events_register_stream(user_profile: UserProfile,
|
|
|
|
stream: Optional[Stream],
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
_default_stream_permision_check(user_profile, stream)
|
|
|
|
|
|
|
|
user_profile.default_events_register_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_events_register_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_events_register_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:34:12 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2017-02-11 05:26:24 +01:00
|
|
|
stream_name = stream.name # type: Optional[Text]
|
2014-02-26 21:34:12 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_events_register_stream=stream_name,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_all_public_streams(user_profile: UserProfile, value: bool,
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
user_profile.default_all_public_streams = value
|
|
|
|
user_profile.save(update_fields=['default_all_public_streams'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_all_public_streams',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'value': str(value)})
|
2014-02-26 21:15:31 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
send_event(dict(type='realm_bot',
|
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_is_admin(user_profile: UserProfile, value: bool,
|
|
|
|
permission: str='administer') -> None:
|
2016-02-08 03:59:38 +01:00
|
|
|
if permission == "administer":
|
|
|
|
user_profile.is_realm_admin = value
|
|
|
|
user_profile.save(update_fields=["is_realm_admin"])
|
|
|
|
elif permission == "api_super_user":
|
|
|
|
user_profile.is_api_super_user = value
|
|
|
|
user_profile.save(update_fields=["is_api_super_user"])
|
2014-01-21 19:27:22 +01:00
|
|
|
else:
|
2016-02-08 03:59:38 +01:00
|
|
|
raise Exception("Unknown permission")
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2015-09-20 19:32:01 +02:00
|
|
|
if permission == 'administer':
|
|
|
|
event = dict(type="realm_user", op="update",
|
|
|
|
person=dict(email=user_profile.email,
|
2017-01-21 15:27:56 +01:00
|
|
|
user_id=user_profile.id,
|
2016-02-08 03:59:38 +01:00
|
|
|
is_admin=value))
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm_id))
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_bot_type(user_profile: UserProfile, value: int) -> None:
|
2016-05-18 20:23:03 +02:00
|
|
|
user_profile.bot_type = value
|
2016-05-24 16:24:53 +02:00
|
|
|
user_profile.save(update_fields=["bot_type"])
|
2016-05-18 20:23:03 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_stream_invite_only(stream: Stream, invite_only: bool) -> None:
|
2017-01-30 03:52:55 +01:00
|
|
|
stream.invite_only = invite_only
|
2014-01-02 23:20:33 +01:00
|
|
|
stream.save(update_fields=['invite_only'])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_rename_stream(stream: Stream, new_name: Text, log: bool=True) -> Dict[str, Text]:
|
2013-08-22 17:45:23 +02:00
|
|
|
old_name = stream.name
|
|
|
|
stream.name = new_name
|
|
|
|
stream.save(update_fields=["name"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'stream_name_change',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': stream.realm.string_id,
|
2013-08-22 17:45:23 +02:00
|
|
|
'new_name': new_name})
|
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2013-08-22 17:45:23 +02:00
|
|
|
messages = Message.objects.filter(recipient=recipient).only("id")
|
|
|
|
|
|
|
|
# Update the display recipient and stream, which are easy single
|
|
|
|
# items to set.
|
2017-09-17 22:26:43 +02:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
|
|
|
|
new_cache_key = get_stream_cache_key(stream.name, stream.realm_id)
|
2013-08-28 18:00:19 +02:00
|
|
|
if old_cache_key != new_cache_key:
|
|
|
|
cache_delete(old_cache_key)
|
|
|
|
cache_set(new_cache_key, stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
cache_set(display_recipient_cache_key(recipient.id), stream.name)
|
|
|
|
|
|
|
|
# Delete cache entries for everything else, which is cheaper and
|
|
|
|
# clearer than trying to set them. display_recipient is the out of
|
|
|
|
# date field in all cases.
|
|
|
|
cache_delete_many(
|
2017-10-20 20:29:49 +02:00
|
|
|
to_dict_cache_key_id(message.id) for message in messages)
|
2014-02-02 15:30:33 +01:00
|
|
|
new_email = encode_email_address(stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
|
2014-01-22 23:25:03 +01:00
|
|
|
# We will tell our users to essentially
|
|
|
|
# update stream.name = new_name where name = old_name
|
2014-02-02 15:30:33 +01:00
|
|
|
# and update stream.email = new_email where name = old_name.
|
|
|
|
# We could optimize this by trying to send one message, but the
|
|
|
|
# client code really wants one property update at a time, and
|
|
|
|
# updating stream names is a pretty infrequent operation.
|
|
|
|
# More importantly, we want to key these updates by id, not name,
|
|
|
|
# since id is the immutable primary key, and obviously name is not.
|
|
|
|
data_updates = [
|
|
|
|
['email_address', new_email],
|
|
|
|
['name', new_name],
|
|
|
|
]
|
|
|
|
for property, value in data_updates:
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property=property,
|
|
|
|
value=value,
|
2017-03-05 01:50:25 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=old_name,
|
2014-02-02 15:30:33 +01:00
|
|
|
)
|
2016-11-04 07:02:24 +01:00
|
|
|
send_event(event, can_access_stream_user_ids(stream))
|
2013-08-22 17:45:23 +02:00
|
|
|
|
2013-09-10 11:46:18 +02:00
|
|
|
# Even though the token doesn't change, the web client needs to update the
|
|
|
|
# email forwarding address to display the correctly-escaped new name.
|
2014-02-02 15:30:33 +01:00
|
|
|
return {"email_address": new_email}
|
2013-09-10 11:46:18 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_stream_description(stream: Stream, new_description: Text) -> None:
|
2014-01-22 20:20:10 +01:00
|
|
|
stream.description = new_description
|
|
|
|
stream.save(update_fields=['description'])
|
|
|
|
|
2017-03-05 01:50:25 +01:00
|
|
|
event = dict(
|
|
|
|
type='stream',
|
|
|
|
op='update',
|
|
|
|
property='description',
|
|
|
|
name=stream.name,
|
|
|
|
stream_id=stream.id,
|
|
|
|
value=new_description,
|
|
|
|
)
|
2016-11-04 07:02:24 +01:00
|
|
|
send_event(event, can_access_stream_user_ids(stream))
|
2014-01-22 20:20:10 +01:00
|
|
|
|
2016-10-28 07:21:53 +02:00
|
|
|
def do_create_realm(string_id, name, restricted_to_domain=None,
|
2016-10-29 03:48:47 +02:00
|
|
|
invite_required=None, org_type=None):
|
2017-08-24 04:52:34 +02:00
|
|
|
# type: (Text, Text, Optional[bool], Optional[bool], Optional[int]) -> Realm
|
|
|
|
existing_realm = get_realm(string_id)
|
|
|
|
if existing_realm is not None:
|
|
|
|
raise AssertionError("Realm %s already exists!" % (string_id,))
|
|
|
|
|
|
|
|
kwargs = {} # type: Dict[str, Any]
|
|
|
|
if restricted_to_domain is not None:
|
|
|
|
kwargs['restricted_to_domain'] = restricted_to_domain
|
|
|
|
if invite_required is not None:
|
|
|
|
kwargs['invite_required'] = invite_required
|
|
|
|
if org_type is not None:
|
|
|
|
kwargs['org_type'] = org_type
|
|
|
|
realm = Realm(string_id=string_id, name=name, **kwargs)
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
# Create stream once Realm object has been saved
|
|
|
|
notifications_stream, _ = create_stream_if_needed(realm, Realm.DEFAULT_NOTIFICATION_STREAM_NAME)
|
|
|
|
realm.notifications_stream = notifications_stream
|
2017-11-15 23:44:31 +01:00
|
|
|
|
|
|
|
signup_notifications_stream, _ = create_stream_if_needed(
|
2017-10-04 02:07:44 +02:00
|
|
|
realm, Realm.INITIAL_PRIVATE_STREAM_NAME, invite_only=True,
|
2017-11-15 23:44:31 +01:00
|
|
|
stream_description="A private stream for core team members.")
|
|
|
|
realm.signup_notifications_stream = signup_notifications_stream
|
|
|
|
|
|
|
|
realm.save(update_fields=['notifications_stream', 'signup_notifications_stream'])
|
2017-08-24 04:52:34 +02:00
|
|
|
|
|
|
|
# Log the event
|
|
|
|
log_event({"type": "realm_created",
|
|
|
|
"string_id": string_id,
|
|
|
|
"restricted_to_domain": restricted_to_domain,
|
|
|
|
"invite_required": invite_required,
|
|
|
|
"org_type": org_type})
|
|
|
|
|
|
|
|
# Send a notification to the admin realm (if configured)
|
|
|
|
if settings.NEW_USER_BOT is not None:
|
|
|
|
signup_message = "Signups enabled"
|
|
|
|
admin_realm = get_system_bot(settings.NEW_USER_BOT).realm
|
|
|
|
internal_send_message(admin_realm, settings.NEW_USER_BOT, "stream",
|
2017-10-19 08:51:29 +02:00
|
|
|
"signups", realm.display_subdomain, signup_message)
|
2017-08-24 04:52:34 +02:00
|
|
|
return realm
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_notification_settings(user_profile: UserProfile, name: str, value: bool,
|
|
|
|
log: bool=True) -> None:
|
2017-05-23 03:19:21 +02:00
|
|
|
"""Takes in a UserProfile object, the name of a global notification
|
|
|
|
preference to update, and the value to update to
|
|
|
|
"""
|
2013-10-16 17:24:52 +02:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
notification_setting_type = UserProfile.notification_setting_types[name]
|
|
|
|
assert isinstance(value, notification_setting_type), (
|
|
|
|
'Cannot update %s: %s is not an instance of %s' % (
|
|
|
|
name, value, notification_setting_type,))
|
2016-09-19 22:55:18 +02:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
setattr(user_profile, name, value)
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
# Disabling digest emails should clear a user's email queue
|
2017-05-24 01:21:02 +02:00
|
|
|
if name == 'enable_digest_emails' and not value:
|
2017-07-12 03:17:17 +02:00
|
|
|
clear_scheduled_emails(user_profile.id, ScheduledEmail.DIGEST)
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
user_profile.save(update_fields=[name])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
2017-05-23 03:19:21 +02:00
|
|
|
'notification_name': name,
|
|
|
|
'setting': value}
|
2013-12-02 01:39:10 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_autoscroll_forever(user_profile: UserProfile, autoscroll_forever: bool,
|
|
|
|
log: bool=True) -> None:
|
2013-12-03 21:01:37 +01:00
|
|
|
user_profile.autoscroll_forever = autoscroll_forever
|
|
|
|
user_profile.save(update_fields=["autoscroll_forever"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'autoscroll_forever',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'autoscroll_forever': autoscroll_forever})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_enter_sends(user_profile: UserProfile, enter_sends: bool) -> None:
|
2013-02-27 23:18:38 +01:00
|
|
|
user_profile.enter_sends = enter_sends
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["enter_sends"])
|
2013-02-27 23:18:38 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_desktop_notifications(user_profile: UserProfile,
|
|
|
|
default_desktop_notifications: bool) -> None:
|
2014-01-16 22:48:50 +01:00
|
|
|
user_profile.default_desktop_notifications = default_desktop_notifications
|
|
|
|
user_profile.save(update_fields=["default_desktop_notifications"])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_user_display_setting(user_profile: UserProfile,
|
|
|
|
setting_name: str,
|
|
|
|
setting_value: Union[bool, Text]) -> None:
|
2017-04-07 00:05:55 +02:00
|
|
|
property_type = UserProfile.property_types[setting_name]
|
|
|
|
assert isinstance(setting_value, property_type)
|
|
|
|
setattr(user_profile, setting_name, setting_value)
|
|
|
|
user_profile.save(update_fields=[setting_name])
|
2017-03-14 10:53:09 +01:00
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
2017-04-07 00:05:55 +02:00
|
|
|
'setting_name': setting_name,
|
2017-03-14 10:53:09 +01:00
|
|
|
'setting': setting_value}
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
2017-04-02 20:57:27 +02:00
|
|
|
# Updates to the timezone display setting are sent to all users
|
|
|
|
if setting_name == "timezone":
|
|
|
|
payload = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
timezone=user_profile.timezone)
|
|
|
|
send_event(dict(type='realm_user', op='update', person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2017-04-02 20:57:27 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def lookup_default_stream_groups(default_stream_group_names: List[str],
|
|
|
|
realm: Realm) -> List[DefaultStreamGroup]:
|
2017-10-26 20:31:43 +02:00
|
|
|
default_stream_groups = []
|
|
|
|
for group_name in default_stream_group_names:
|
|
|
|
try:
|
|
|
|
default_stream_group = DefaultStreamGroup.objects.get(
|
|
|
|
name=group_name, realm=realm)
|
|
|
|
except DefaultStreamGroup.DoesNotExist:
|
|
|
|
raise JsonableError(_('Invalid default stream group %s' % (group_name,)))
|
|
|
|
default_stream_groups.append(default_stream_group)
|
|
|
|
return default_stream_groups
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def set_default_streams(realm: Realm, stream_dict: Dict[Text, Dict[Text, Any]]) -> None:
|
2013-01-10 22:01:33 +01:00
|
|
|
DefaultStream.objects.filter(realm=realm).delete()
|
2016-12-08 01:43:15 +01:00
|
|
|
stream_names = []
|
|
|
|
for name, options in stream_dict.items():
|
|
|
|
stream_names.append(name)
|
|
|
|
stream, _ = create_stream_if_needed(realm,
|
|
|
|
name,
|
2017-07-15 21:57:38 +02:00
|
|
|
invite_only = options.get("invite_only", False),
|
|
|
|
stream_description = options.get("description", ''))
|
2013-01-10 22:01:33 +01:00
|
|
|
DefaultStream.objects.create(stream=stream, realm=realm)
|
|
|
|
|
2015-11-30 18:15:16 +01:00
|
|
|
# Always include the realm's default notifications streams, if it exists
|
|
|
|
if realm.notifications_stream is not None:
|
2015-12-28 21:48:30 +01:00
|
|
|
DefaultStream.objects.get_or_create(stream=realm.notifications_stream, realm=realm)
|
2013-10-02 23:40:21 +02:00
|
|
|
|
2013-11-22 22:09:47 +01:00
|
|
|
log_event({'type': 'default_streams',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': realm.string_id,
|
2013-11-22 22:09:47 +01:00
|
|
|
'streams': stream_names})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_default_streams(realm_id: int) -> None:
|
2016-05-20 22:08:42 +02:00
|
|
|
event = dict(
|
|
|
|
type="default_streams",
|
2017-09-17 00:34:13 +02:00
|
|
|
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm_id))
|
2016-05-20 22:08:42 +02:00
|
|
|
)
|
2017-09-17 00:34:13 +02:00
|
|
|
send_event(event, active_user_ids(realm_id))
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
def notify_default_stream_groups(realm: Realm) -> None:
|
|
|
|
event = dict(
|
|
|
|
type="default_stream_groups",
|
|
|
|
default_stream_groups=default_stream_groups_to_dicts_sorted(get_default_stream_groups(realm))
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(realm.id))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_default_stream(stream: Stream) -> None:
|
2017-09-17 00:34:13 +02:00
|
|
|
realm_id = stream.realm_id
|
|
|
|
stream_id = stream.id
|
|
|
|
if not DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).exists():
|
|
|
|
DefaultStream.objects.create(realm_id=realm_id, stream_id=stream_id)
|
|
|
|
notify_default_streams(realm_id)
|
2014-01-27 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_default_stream(stream: Stream) -> None:
|
2017-09-17 00:34:13 +02:00
|
|
|
realm_id = stream.realm_id
|
|
|
|
stream_id = stream.id
|
|
|
|
DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).delete()
|
|
|
|
notify_default_streams(realm_id)
|
2014-01-27 20:02:20 +01:00
|
|
|
|
2017-11-14 20:51:34 +01:00
|
|
|
def do_create_default_stream_group(realm: Realm, group_name: Text,
|
|
|
|
description: Text, streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
default_streams = get_default_streams_for_realm(realm.id)
|
|
|
|
for stream in streams:
|
|
|
|
if stream in default_streams:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
|
|
|
"'%(stream_name)s' is a default stream and cannot be added to '%(group_name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group_name': group_name})
|
2017-11-01 18:20:34 +01:00
|
|
|
|
|
|
|
check_default_stream_group_name(group_name)
|
2017-11-15 19:57:52 +01:00
|
|
|
(group, created) = DefaultStreamGroup.objects.get_or_create(
|
|
|
|
name=group_name, realm=realm, description=description)
|
2017-11-01 18:20:34 +01:00
|
|
|
if not created:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_("Default stream group '%(group_name)s' already exists")
|
|
|
|
% {'group_name': group_name})
|
2017-11-01 18:20:34 +01:00
|
|
|
|
|
|
|
group.streams = streams
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_add_streams_to_default_stream_group(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
default_streams = get_default_streams_for_realm(realm.id)
|
|
|
|
for stream in streams:
|
|
|
|
if stream in default_streams:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
|
|
|
"'%(stream_name)s' is a default stream and cannot be added to '%(group.name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group.name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
if stream in group.streams.all():
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
|
|
|
"Stream '%(stream_name)s' is already present in default stream group '%(group.name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group.name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
group.streams.add(stream)
|
|
|
|
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
def do_remove_streams_from_default_stream_group(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
for stream in streams:
|
|
|
|
if stream not in group.streams.all():
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
|
|
|
"Stream '%(stream_name)s' is not present in default stream group '%(group.name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group.name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
group.streams.remove(stream)
|
|
|
|
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_change_default_stream_group_name(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
new_group_name: Text) -> None:
|
2017-11-14 21:06:02 +01:00
|
|
|
if group.name == new_group_name:
|
|
|
|
raise JsonableError(_("This default stream group is already named '%s'") % (new_group_name,))
|
|
|
|
|
|
|
|
if DefaultStreamGroup.objects.filter(name=new_group_name, realm=realm).exists():
|
|
|
|
raise JsonableError(_("Default stream group '%s' already exists") % (new_group_name,))
|
|
|
|
|
|
|
|
group.name = new_group_name
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_change_default_stream_group_description(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
new_description: Text) -> None:
|
2017-11-14 20:51:34 +01:00
|
|
|
group.description = new_description
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
def do_remove_default_stream_group(realm: Realm, group: DefaultStreamGroup) -> None:
|
|
|
|
group.delete()
|
2017-11-01 18:20:34 +01:00
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_default_streams_for_realm(realm_id: int) -> List[Stream]:
|
2013-04-05 17:04:50 +02:00
|
|
|
return [default.stream for default in
|
2017-11-15 19:57:52 +01:00
|
|
|
DefaultStream.objects.select_related("stream", "stream__realm").filter(
|
|
|
|
realm_id=realm_id)]
|
2014-01-27 18:02:41 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_default_subs(user_profile: UserProfile) -> List[Stream]:
|
2014-01-27 18:02:41 +01:00
|
|
|
# Right now default streams are realm-wide. This wrapper gives us flexibility
|
|
|
|
# to some day further customize how we set up default streams for new users.
|
2017-09-17 00:34:13 +02:00
|
|
|
return get_default_streams_for_realm(user_profile.realm_id)
|
2013-01-11 23:36:41 +01:00
|
|
|
|
2016-05-20 22:08:42 +02:00
|
|
|
# returns default streams in json serializeable format
|
2017-11-05 11:15:10 +01:00
|
|
|
def streams_to_dicts_sorted(streams: List[Stream]) -> List[Dict[str, Any]]:
|
2016-05-20 22:08:42 +02:00
|
|
|
return sorted([stream.to_dict() for stream in streams], key=lambda elt: elt["name"])
|
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
def default_stream_groups_to_dicts_sorted(groups: List[DefaultStreamGroup]) -> List[Dict[str, Any]]:
|
|
|
|
return sorted([group.to_dict() for group in groups], key=lambda elt: elt["name"])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_activity_interval(user_profile: UserProfile,
|
|
|
|
log_time: datetime.datetime) -> None:
|
2017-04-15 07:20:16 +02:00
|
|
|
effective_end = log_time + UserActivityInterval.MIN_INTERVAL_LENGTH
|
2013-09-27 22:22:52 +02:00
|
|
|
# This code isn't perfect, because with various races we might end
|
|
|
|
# up creating two overlapping intervals, but that shouldn't happen
|
|
|
|
# often, and can be corrected for in post-processing
|
2013-09-04 00:00:44 +02:00
|
|
|
try:
|
2013-10-08 21:19:56 +02:00
|
|
|
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
|
|
|
|
# There are two ways our intervals could overlap:
|
|
|
|
# (1) The start of the new interval could be inside the old interval
|
|
|
|
# (2) The end of the new interval could be inside the old interval
|
|
|
|
# In either case, we just extend the old interval to include the new interval.
|
|
|
|
if ((log_time <= last.end and log_time >= last.start) or
|
2016-12-03 18:19:09 +01:00
|
|
|
(effective_end <= last.end and effective_end >= last.start)):
|
2013-09-27 22:22:52 +02:00
|
|
|
last.end = max(last.end, effective_end)
|
|
|
|
last.start = min(last.start, log_time)
|
|
|
|
last.save(update_fields=["start", "end"])
|
2013-09-04 00:00:44 +02:00
|
|
|
return
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2013-10-08 21:19:56 +02:00
|
|
|
# Otherwise, the intervals don't overlap, so we should make a new one
|
2013-09-04 00:00:44 +02:00
|
|
|
UserActivityInterval.objects.create(user_profile=user_profile, start=log_time,
|
|
|
|
end=effective_end)
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_activity')
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_activity(user_profile: UserProfile,
|
|
|
|
client: Client,
|
|
|
|
query: Text,
|
|
|
|
log_time: datetime.datetime) -> None:
|
2013-11-01 19:02:11 +01:00
|
|
|
(activity, created) = UserActivity.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
query = query,
|
|
|
|
defaults={'last_visit': log_time, 'count': 0})
|
|
|
|
|
2013-01-11 23:36:41 +01:00
|
|
|
activity.count += 1
|
2013-01-11 21:16:42 +01:00
|
|
|
activity.last_visit = log_time
|
2013-03-21 21:29:28 +01:00
|
|
|
activity.save(update_fields=["last_visit", "count"])
|
2013-01-11 21:16:42 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_presence_changed(user_profile: UserProfile, presence: UserPresence) -> None:
|
2013-04-05 00:13:03 +02:00
|
|
|
presence_dict = presence.to_dict()
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="presence", email=user_profile.email,
|
|
|
|
server_timestamp=time.time(),
|
2017-04-25 11:50:30 +02:00
|
|
|
presence={presence_dict['client']: presence_dict})
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(user_profile.realm_id))
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def consolidate_client(client: Client) -> Client:
|
2014-02-13 14:28:08 +01:00
|
|
|
# The web app reports a client as 'website'
|
|
|
|
# The desktop app reports a client as ZulipDesktop
|
|
|
|
# due to it setting a custom user agent. We want both
|
|
|
|
# to count as web users
|
|
|
|
|
|
|
|
# Alias ZulipDesktop to website
|
|
|
|
if client.name in ['ZulipDesktop']:
|
|
|
|
return get_client('website')
|
|
|
|
else:
|
|
|
|
return client
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_presence')
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_presence(user_profile: UserProfile,
|
|
|
|
client: Client,
|
|
|
|
log_time: datetime.datetime,
|
|
|
|
status: int) -> None:
|
2014-02-13 14:28:08 +01:00
|
|
|
client = consolidate_client(client)
|
2013-11-01 19:02:11 +01:00
|
|
|
(presence, created) = UserPresence.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
defaults = {'timestamp': log_time,
|
|
|
|
'status': status})
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2013-08-20 20:57:26 +02:00
|
|
|
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
|
2013-04-05 00:13:03 +02:00
|
|
|
was_idle = presence.status == UserPresence.IDLE
|
|
|
|
became_online = (status == UserPresence.ACTIVE) and (stale_status or was_idle)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2014-03-17 18:35:43 +01:00
|
|
|
# If an object was created, it has already been saved.
|
|
|
|
#
|
|
|
|
# We suppress changes from ACTIVE to IDLE before stale_status is reached;
|
|
|
|
# this protects us from the user having two clients open: one active, the
|
|
|
|
# other idle. Without this check, we would constantly toggle their status
|
|
|
|
# between the two states.
|
|
|
|
if not created and stale_status or was_idle or status == presence.status:
|
2013-06-24 19:10:25 +02:00
|
|
|
# The following block attempts to only update the "status"
|
|
|
|
# field in the event that it actually changed. This is
|
|
|
|
# important to avoid flushing the UserPresence cache when the
|
|
|
|
# data it would return to a client hasn't actually changed
|
|
|
|
# (see the UserPresence post_save hook for details).
|
2013-06-24 19:05:41 +02:00
|
|
|
presence.timestamp = log_time
|
2013-06-24 19:10:25 +02:00
|
|
|
update_fields = ["timestamp"]
|
|
|
|
if presence.status != status:
|
|
|
|
presence.status = status
|
|
|
|
update_fields.append("status")
|
|
|
|
presence.save(update_fields=update_fields)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
if not user_profile.realm.is_zephyr_mirror_realm and (created or became_online):
|
2013-04-03 22:00:02 +02:00
|
|
|
# Push event to all users in the realm so they see the new user
|
|
|
|
# appear in the presence list immediately, or the newly online
|
2013-09-15 20:49:04 +02:00
|
|
|
# user without delay. Note that we won't send an update here for a
|
|
|
|
# timestamp update, because we rely on the browser to ping us every 50
|
|
|
|
# seconds for realm-wide status updates, and those updates should have
|
|
|
|
# recent timestamps, which means the browser won't think active users
|
|
|
|
# have gone idle. If we were more aggressive in this function about
|
|
|
|
# sending timestamp updates, we could eliminate the ping responses, but
|
|
|
|
# that's not a high priority for now, considering that most of our non-MIT
|
|
|
|
# realms are pretty small.
|
2013-04-03 22:00:02 +02:00
|
|
|
send_presence_changed(user_profile, presence)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_user_activity_interval(user_profile: UserProfile, log_time: datetime.datetime) -> None:
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'time': datetime_to_timestamp(log_time)}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("user_activity_interval", event)
|
2013-09-06 21:52:12 +02:00
|
|
|
|
|
|
|
def update_user_presence(user_profile, client, log_time, status,
|
|
|
|
new_user_input):
|
2016-05-25 06:55:14 +02:00
|
|
|
# type: (UserProfile, Client, datetime.datetime, int, bool) -> None
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'status': status,
|
|
|
|
'time': datetime_to_timestamp(log_time),
|
|
|
|
'client': client.name}
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("user_presence", event)
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-09-10 17:32:40 +02:00
|
|
|
if new_user_input:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_activity_interval(user_profile, log_time)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_pointer(user_profile: UserProfile, pointer: int, update_flags: bool=False) -> None:
|
2014-01-24 23:50:24 +01:00
|
|
|
prev_pointer = user_profile.pointer
|
|
|
|
user_profile.pointer = pointer
|
|
|
|
user_profile.save(update_fields=["pointer"])
|
|
|
|
|
|
|
|
if update_flags:
|
|
|
|
# Until we handle the new read counts in the Android app
|
|
|
|
# natively, this is a shim that will mark as read any messages
|
|
|
|
# up until the pointer move
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__gt=prev_pointer,
|
|
|
|
message__id__lte=pointer,
|
|
|
|
flags=~UserMessage.flags.read) \
|
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
|
|
|
|
2014-01-24 23:52:04 +01:00
|
|
|
event = dict(type='pointer', pointer=pointer)
|
|
|
|
send_event(event, [user_profile.id])
|
2014-01-24 23:50:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mark_all_as_read(user_profile: UserProfile) -> int:
|
2017-08-04 20:26:38 +02:00
|
|
|
log_statsd_event('bankruptcy')
|
|
|
|
|
|
|
|
msgs = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile
|
|
|
|
).extra(
|
|
|
|
where=[UserMessage.where_unread()]
|
|
|
|
)
|
|
|
|
|
|
|
|
count = msgs.update(
|
|
|
|
flags=F('flags').bitor(UserMessage.flags.read)
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type='update_message_flags',
|
|
|
|
operation='add',
|
|
|
|
flag='read',
|
|
|
|
messages=[], # we don't send messages, since the client reloads anyway
|
|
|
|
all=True
|
|
|
|
)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
|
|
|
statsd.incr("mark_all_as_read", count)
|
|
|
|
return count
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mark_stream_messages_as_read(user_profile: UserProfile,
|
|
|
|
stream: Optional[Stream],
|
|
|
|
topic_name: Optional[Text]=None) -> int:
|
2017-08-06 15:00:08 +02:00
|
|
|
log_statsd_event('mark_stream_as_read')
|
|
|
|
|
|
|
|
msgs = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile
|
|
|
|
)
|
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2017-08-06 15:00:08 +02:00
|
|
|
msgs = msgs.filter(message__recipient=recipient)
|
|
|
|
|
|
|
|
if topic_name:
|
|
|
|
msgs = msgs.filter(message__subject__iexact=topic_name)
|
|
|
|
|
|
|
|
msgs = msgs.extra(
|
|
|
|
where=[UserMessage.where_unread()]
|
|
|
|
)
|
|
|
|
|
|
|
|
message_ids = list(msgs.values_list('message__id', flat=True))
|
|
|
|
|
|
|
|
count = msgs.update(
|
|
|
|
flags=F('flags').bitor(UserMessage.flags.read)
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type='update_message_flags',
|
|
|
|
operation='add',
|
|
|
|
flag='read',
|
|
|
|
messages=message_ids,
|
|
|
|
all=False,
|
|
|
|
)
|
|
|
|
send_event(event, [user_profile.id])
|
|
|
|
|
|
|
|
statsd.incr("mark_stream_as_read", count)
|
|
|
|
return count
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_message_flags(user_profile: UserProfile,
|
|
|
|
operation: Text,
|
|
|
|
flag: Text,
|
|
|
|
messages: Optional[Sequence[int]]) -> int:
|
2013-06-25 20:22:40 +02:00
|
|
|
flagattr = getattr(UserMessage.flags, flag)
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2017-08-06 15:00:08 +02:00
|
|
|
assert messages is not None
|
|
|
|
msgs = UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__in=messages)
|
|
|
|
# Hack to let you star any message
|
|
|
|
if msgs.count() == 0:
|
|
|
|
if not len(messages) == 1:
|
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
|
|
|
if flag != "starred":
|
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
|
|
|
# Validate that the user could have read the relevant message
|
|
|
|
message = access_message(user_profile, messages[0])[0]
|
|
|
|
|
|
|
|
# OK, this is a message that you legitimately have access
|
|
|
|
# to via narrowing to the stream it is on, even though you
|
|
|
|
# didn't actually receive it. So we create a historical,
|
|
|
|
# read UserMessage message row for you to star.
|
|
|
|
UserMessage.objects.create(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
flags=UserMessage.flags.historical | UserMessage.flags.read)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2013-06-25 20:22:40 +02:00
|
|
|
if operation == 'add':
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitor(flagattr))
|
2013-06-25 20:22:40 +02:00
|
|
|
elif operation == 'remove':
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitand(~flagattr))
|
2017-08-25 09:34:56 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid message flags operation")
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-08-05 23:56:09 +02:00
|
|
|
event = {'type': 'update_message_flags',
|
|
|
|
'operation': operation,
|
|
|
|
'flag': flag,
|
|
|
|
'messages': messages,
|
2017-08-04 21:02:59 +02:00
|
|
|
'all': False}
|
2014-01-24 23:24:44 +01:00
|
|
|
send_event(event, [user_profile.id])
|
2013-08-05 23:56:09 +02:00
|
|
|
|
2013-06-25 20:26:50 +02:00
|
|
|
statsd.incr("flags.%s.%s" % (flag, operation), count)
|
2016-07-13 03:16:42 +02:00
|
|
|
return count
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def subscribed_to_stream(user_profile: UserProfile, stream_id: int) -> bool:
|
2013-02-04 23:41:49 +01:00
|
|
|
try:
|
|
|
|
if Subscription.objects.get(user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
2017-09-17 21:05:00 +02:00
|
|
|
recipient__type_id=stream_id):
|
2013-02-04 23:41:49 +01:00
|
|
|
return True
|
|
|
|
return False
|
|
|
|
except Subscription.DoesNotExist:
|
|
|
|
return False
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def truncate_content(content: Text, max_length: int, truncation_message: Text) -> Text:
|
2013-11-22 18:33:22 +01:00
|
|
|
if len(content) > max_length:
|
|
|
|
content = content[:max_length - len(truncation_message)] + truncation_message
|
|
|
|
return content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def truncate_body(body: Text) -> Text:
|
2013-11-22 18:33:22 +01:00
|
|
|
return truncate_content(body, MAX_MESSAGE_LENGTH, "...")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def truncate_topic(topic: Text) -> Text:
|
2013-11-22 18:33:22 +01:00
|
|
|
return truncate_content(topic, MAX_SUBJECT_LENGTH, "...")
|
|
|
|
|
2017-09-27 15:06:03 +02:00
|
|
|
MessageUpdateUserInfoResult = TypedDict('MessageUpdateUserInfoResult', {
|
|
|
|
'message_user_ids': Set[int],
|
2017-09-27 16:47:13 +02:00
|
|
|
'mention_user_ids': Set[int],
|
2017-09-27 15:06:03 +02:00
|
|
|
})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_info_for_message_updates(message_id: int) -> MessageUpdateUserInfoResult:
|
2017-09-27 15:06:03 +02:00
|
|
|
|
|
|
|
# We exclude UserMessage.flags.historical rows since those
|
|
|
|
# users did not receive the message originally, and thus
|
|
|
|
# probably are not relevant for reprocessed alert_words,
|
|
|
|
# mentions and similar rendering features. This may be a
|
|
|
|
# decision we change in the future.
|
|
|
|
query = UserMessage.objects.filter(
|
|
|
|
message=message_id,
|
|
|
|
flags=~UserMessage.flags.historical
|
2017-09-27 16:47:13 +02:00
|
|
|
).values('user_profile_id', 'flags')
|
|
|
|
rows = list(query)
|
|
|
|
|
|
|
|
message_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in rows
|
|
|
|
}
|
2017-09-27 15:06:03 +02:00
|
|
|
|
2017-09-27 16:47:13 +02:00
|
|
|
mask = UserMessage.flags.mentioned | UserMessage.flags.wildcard_mentioned
|
|
|
|
|
|
|
|
mention_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in rows
|
|
|
|
if int(row['flags']) & mask
|
|
|
|
}
|
2017-09-27 15:06:03 +02:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
message_user_ids=message_user_ids,
|
2017-09-27 16:47:13 +02:00
|
|
|
mention_user_ids=mention_user_ids,
|
2017-09-27 15:06:03 +02:00
|
|
|
)
|
2014-01-08 19:42:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_user_message_flags(message: Message, ums: Iterable[UserMessage]) -> None:
|
2014-01-07 19:40:02 +01:00
|
|
|
wildcard = message.mentions_wildcard
|
|
|
|
mentioned_ids = message.mentions_user_ids
|
2014-01-08 19:42:45 +01:00
|
|
|
ids_with_alert_words = message.user_ids_with_alert_words
|
2017-05-17 20:39:57 +02:00
|
|
|
changed_ums = set() # type: Set[UserMessage]
|
2014-01-08 19:42:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_flag(um: UserMessage, should_set: bool, flag: int) -> None:
|
2014-01-08 19:42:45 +01:00
|
|
|
if should_set:
|
|
|
|
if not (um.flags & flag):
|
|
|
|
um.flags |= flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
else:
|
|
|
|
if (um.flags & flag):
|
|
|
|
um.flags &= ~flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
|
|
|
|
for um in ums:
|
|
|
|
has_alert_word = um.user_profile_id in ids_with_alert_words
|
|
|
|
update_flag(um, has_alert_word, UserMessage.flags.has_alert_word)
|
|
|
|
|
2014-01-07 19:40:02 +01:00
|
|
|
mentioned = um.user_profile_id in mentioned_ids
|
|
|
|
update_flag(um, mentioned, UserMessage.flags.mentioned)
|
|
|
|
|
|
|
|
update_flag(um, wildcard, UserMessage.flags.wildcard_mentioned)
|
|
|
|
|
2014-01-08 19:42:45 +01:00
|
|
|
for um in changed_ums:
|
|
|
|
um.save(update_fields=['flags'])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_to_dict_cache(changed_messages: List[Message]) -> List[int]:
|
2016-12-08 02:26:16 +01:00
|
|
|
"""Updates the message as stored in the to_dict cache (for serving
|
|
|
|
messages)."""
|
|
|
|
items_for_remote_cache = {}
|
|
|
|
message_ids = []
|
|
|
|
for changed_message in changed_messages:
|
|
|
|
message_ids.append(changed_message.id)
|
2017-10-20 20:29:49 +02:00
|
|
|
key = to_dict_cache_key_id(changed_message.id)
|
|
|
|
value = MessageDict.to_dict_uncached(changed_message)
|
|
|
|
items_for_remote_cache[key] = (value,)
|
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
cache_set_many(items_for_remote_cache)
|
|
|
|
return message_ids
|
|
|
|
|
2016-10-27 12:06:44 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_embedded_data(user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
content: Optional[Text],
|
|
|
|
rendered_content: Optional[Text]) -> None:
|
2016-10-27 12:06:44 +02:00
|
|
|
event = {
|
|
|
|
'type': 'update_message',
|
|
|
|
'sender': user_profile.email,
|
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
|
|
|
changed_messages = [message]
|
|
|
|
|
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
|
|
|
|
|
|
|
if content is not None:
|
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
message.content = content
|
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
|
|
|
|
|
|
|
message.save(update_fields=["content", "rendered_content"])
|
|
|
|
|
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def user_info(um: UserMessage) -> Dict[str, Any]:
|
2016-10-27 12:06:44 +02:00
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
|
|
|
send_event(event, list(map(user_info, ums)))
|
|
|
|
|
2016-07-22 23:45:24 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
2017-10-27 14:46:01 +02:00
|
|
|
def do_update_message(user_profile, message, topic_name, propagate_mode,
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
content, rendered_content,
|
|
|
|
prior_mention_user_ids, mention_user_ids):
|
|
|
|
# type: (UserProfile, Message, Optional[Text], str, Optional[Text], Optional[Text], Set[int], Set[int]) -> int
|
2013-05-14 21:18:11 +02:00
|
|
|
event = {'type': 'update_message',
|
2017-02-20 00:23:42 +01:00
|
|
|
# TODO: We probably want to remove the 'sender' field
|
|
|
|
# after confirming it isn't used by any consumers.
|
2013-05-14 21:18:11 +02:00
|
|
|
'sender': user_profile.email,
|
2017-02-20 00:23:42 +01:00
|
|
|
'user_id': user_profile.id,
|
2017-05-17 20:39:57 +02:00
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
2017-02-20 00:23:42 +01:00
|
|
|
edit_history_event = {
|
|
|
|
'user_id': user_profile.id,
|
2017-05-17 20:39:57 +02:00
|
|
|
} # type: Dict[str, Any]
|
2013-09-03 22:07:59 +02:00
|
|
|
changed_messages = [message]
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
stream_id = message.recipient.type_id
|
|
|
|
event['stream_name'] = Stream.objects.get(id=stream_id).name
|
|
|
|
|
2013-06-06 19:51:35 +02:00
|
|
|
# Set first_rendered_content to be the oldest version of the
|
|
|
|
# rendered content recorded; which is the current version if the
|
|
|
|
# content hasn't been edited before. Note that because one could
|
2017-10-27 14:46:01 +02:00
|
|
|
# have edited just the topic_name, not every edit history event
|
2013-06-06 19:51:35 +02:00
|
|
|
# contains a prev_rendered_content element.
|
|
|
|
first_rendered_content = message.rendered_content
|
2013-05-31 21:06:05 +02:00
|
|
|
if message.edit_history is not None:
|
2013-06-18 23:55:55 +02:00
|
|
|
edit_history = ujson.loads(message.edit_history)
|
2013-06-06 19:51:35 +02:00
|
|
|
for old_edit_history_event in edit_history:
|
|
|
|
if 'prev_rendered_content' in old_edit_history_event:
|
|
|
|
first_rendered_content = old_edit_history_event['prev_rendered_content']
|
2013-05-31 21:06:05 +02:00
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2014-01-08 19:37:15 +01:00
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
if content is not None:
|
2014-01-08 19:42:45 +01:00
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
|
2013-07-23 21:37:49 +02:00
|
|
|
# We are turning off diff highlighting everywhere until ticket #1532 is addressed.
|
|
|
|
if False:
|
2013-06-06 16:40:07 +02:00
|
|
|
# Don't highlight message edit diffs on prod
|
|
|
|
rendered_content = highlight_html_differences(first_rendered_content, rendered_content)
|
2013-05-31 21:06:05 +02:00
|
|
|
|
2017-07-16 11:00:44 +02:00
|
|
|
# One could imagine checking realm.allow_edit_history here and
|
|
|
|
# modifying the events based on that setting, but doing so
|
|
|
|
# doesn't really make sense. We need to send the edit event
|
|
|
|
# to clients regardless, and a client already had access to
|
|
|
|
# the original/pre-edit content of the message anyway. That
|
|
|
|
# setting must be enforced on the client side, and making a
|
|
|
|
# change here simply complicates the logic for clients parsing
|
|
|
|
# edit history events.
|
2013-05-14 21:18:11 +02:00
|
|
|
event['orig_content'] = message.content
|
|
|
|
event['orig_rendered_content'] = message.rendered_content
|
2013-05-23 22:31:14 +02:00
|
|
|
edit_history_event["prev_content"] = message.content
|
|
|
|
edit_history_event["prev_rendered_content"] = message.rendered_content
|
|
|
|
edit_history_event["prev_rendered_content_version"] = message.rendered_content_version
|
2013-05-14 21:18:11 +02:00
|
|
|
message.content = content
|
2016-10-04 16:49:16 +02:00
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
2013-05-14 21:18:11 +02:00
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
2017-02-20 00:23:08 +01:00
|
|
|
event['prev_rendered_content_version'] = message.rendered_content_version
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-07-07 09:47:15 +02:00
|
|
|
prev_content = edit_history_event['prev_content']
|
|
|
|
if Message.content_has_attachment(prev_content) or Message.content_has_attachment(message.content):
|
|
|
|
check_attachment_reference_change(prev_content, message)
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
2017-10-27 14:46:01 +02:00
|
|
|
if topic_name is not None:
|
|
|
|
new_topic_name = topic_name
|
2017-10-24 00:07:03 +02:00
|
|
|
else:
|
|
|
|
new_topic_name = message.topic_name()
|
|
|
|
|
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream_id,
|
|
|
|
topic_name=new_topic_name,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
stream_topic = None
|
|
|
|
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
# TODO: We may want a slightly leaner of this function for updates.
|
2017-10-24 00:07:03 +02:00
|
|
|
info = get_recipient_info(
|
|
|
|
recipient=message.recipient,
|
|
|
|
sender_id=message.sender_id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
event['push_notify_user_ids'] = list(info['push_notify_user_ids'])
|
|
|
|
event['stream_push_user_ids'] = list(info['stream_push_user_ids'])
|
|
|
|
event['prior_mention_user_ids'] = list(prior_mention_user_ids)
|
|
|
|
event['mention_user_ids'] = list(mention_user_ids)
|
2017-10-07 17:59:19 +02:00
|
|
|
event['presence_idle_user_ids'] = filter_presence_idle_user_ids(info['active_user_ids'])
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
|
2017-10-27 14:46:01 +02:00
|
|
|
if topic_name is not None:
|
|
|
|
orig_topic_name = message.topic_name()
|
|
|
|
topic_name = truncate_topic(topic_name)
|
|
|
|
event["orig_subject"] = orig_topic_name
|
2014-03-11 21:33:50 +01:00
|
|
|
event["propagate_mode"] = propagate_mode
|
2017-10-27 14:46:01 +02:00
|
|
|
message.subject = topic_name
|
2014-03-11 14:40:22 +01:00
|
|
|
event["stream_id"] = message.recipient.type_id
|
2017-10-27 14:46:01 +02:00
|
|
|
event["subject"] = topic_name
|
|
|
|
event['subject_links'] = bugdown.subject_links(message.sender.realm_id, topic_name)
|
|
|
|
edit_history_event["prev_subject"] = orig_topic_name
|
2013-09-03 22:07:59 +02:00
|
|
|
|
2013-09-13 18:12:29 +02:00
|
|
|
if propagate_mode in ["change_later", "change_all"]:
|
2017-10-27 14:46:01 +02:00
|
|
|
propagate_query = Q(recipient = message.recipient, subject = orig_topic_name)
|
2013-09-13 18:12:29 +02:00
|
|
|
# We only change messages up to 2 days in the past, to avoid hammering our
|
|
|
|
# DB by changing an unbounded amount of messages
|
|
|
|
if propagate_mode == 'change_all':
|
2017-04-15 04:03:56 +02:00
|
|
|
before_bound = timezone_now() - datetime.timedelta(days=2)
|
2013-09-13 18:12:29 +02:00
|
|
|
|
2016-11-30 14:17:35 +01:00
|
|
|
propagate_query = (propagate_query & ~Q(id = message.id) &
|
2017-04-15 04:03:56 +02:00
|
|
|
Q(pub_date__range=(before_bound, timezone_now())))
|
2013-09-13 18:12:29 +02:00
|
|
|
if propagate_mode == 'change_later':
|
|
|
|
propagate_query = propagate_query & Q(id__gt = message.id)
|
|
|
|
|
2016-11-09 13:44:29 +01:00
|
|
|
messages = Message.objects.filter(propagate_query).select_related()
|
2013-09-03 22:07:59 +02:00
|
|
|
|
|
|
|
# Evaluate the query before running the update
|
|
|
|
messages_list = list(messages)
|
2017-10-27 14:46:01 +02:00
|
|
|
messages.update(subject=topic_name)
|
2013-09-03 22:07:59 +02:00
|
|
|
|
|
|
|
for m in messages_list:
|
|
|
|
# The cached ORM object is not changed by messages.update()
|
2016-03-31 03:39:51 +02:00
|
|
|
# and the remote cache update requires the new value
|
2017-10-27 14:46:01 +02:00
|
|
|
m.subject = topic_name
|
2013-09-03 22:07:59 +02:00
|
|
|
|
|
|
|
changed_messages += messages_list
|
2013-05-21 17:48:46 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
message.last_edit_time = timezone_now()
|
2017-05-24 23:49:19 +02:00
|
|
|
assert message.last_edit_time is not None # assert needed because stubs for django are missing
|
2013-05-21 17:48:46 +02:00
|
|
|
event['edit_timestamp'] = datetime_to_timestamp(message.last_edit_time)
|
|
|
|
edit_history_event['timestamp'] = event['edit_timestamp']
|
|
|
|
if message.edit_history is not None:
|
|
|
|
edit_history.insert(0, edit_history_event)
|
|
|
|
else:
|
|
|
|
edit_history = [edit_history_event]
|
2013-06-18 23:55:55 +02:00
|
|
|
message.edit_history = ujson.dumps(edit_history)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
|
|
|
message.save(update_fields=["subject", "content", "rendered_content",
|
2013-05-21 17:48:46 +02:00
|
|
|
"rendered_content_version", "last_edit_time",
|
|
|
|
"edit_history"])
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def user_info(um: UserMessage) -> Dict[str, Any]:
|
2014-01-08 19:37:15 +01:00
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
2015-11-01 17:14:53 +01:00
|
|
|
send_event(event, list(map(user_info, ums)))
|
2017-01-24 02:07:12 +01:00
|
|
|
return len(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_delete_message(user_profile: UserProfile, message: Message) -> None:
|
2017-05-14 21:14:26 +02:00
|
|
|
event = {
|
|
|
|
'type': 'delete_message',
|
|
|
|
'sender': user_profile.email,
|
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
|
|
|
ums = [{'id': um.user_profile_id} for um in
|
|
|
|
UserMessage.objects.filter(message=message.id)]
|
|
|
|
move_message_to_archive(message.id)
|
|
|
|
send_event(event, ums)
|
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def encode_email_address(stream: Stream) -> Text:
|
2013-10-02 19:46:40 +02:00
|
|
|
return encode_email_address_helper(stream.name, stream.email_token)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def encode_email_address_helper(name: Text, email_token: Text) -> Text:
|
2013-11-12 16:19:38 +01:00
|
|
|
# Some deployments may not use the email gateway
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN == '':
|
|
|
|
return ''
|
|
|
|
|
2013-08-12 22:12:43 +02:00
|
|
|
# Given the fact that we have almost no restrictions on stream names and
|
|
|
|
# that what characters are allowed in e-mail addresses is complicated and
|
|
|
|
# dependent on context in the address, we opt for a very simple scheme:
|
|
|
|
#
|
|
|
|
# Only encode the stream name (leave the + and token alone). Encode
|
|
|
|
# everything that isn't alphanumeric plus _ as the percent-prefixed integer
|
|
|
|
# ordinal of that character, padded with zeroes to the maximum number of
|
|
|
|
# bytes of a UTF-8 encoded Unicode character.
|
2013-10-02 19:46:40 +02:00
|
|
|
encoded_name = re.sub("\W", lambda x: "%" + str(ord(x.group(0))).zfill(4), name)
|
2013-10-08 21:02:47 +02:00
|
|
|
encoded_token = "%s+%s" % (encoded_name, email_token)
|
|
|
|
return settings.EMAIL_GATEWAY_PATTERN % (encoded_token,)
|
2013-08-12 22:12:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_email_gateway_message_string_from_address(address: Text) -> Optional[Text]:
|
2013-10-08 21:02:47 +02:00
|
|
|
pattern_parts = [re.escape(part) for part in settings.EMAIL_GATEWAY_PATTERN.split('%s')]
|
2016-07-19 07:04:14 +02:00
|
|
|
if settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK:
|
2014-02-04 19:07:29 +01:00
|
|
|
# Accept mails delivered to any Zulip server
|
2016-07-19 07:04:14 +02:00
|
|
|
pattern_parts[-1] = settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK
|
2013-10-08 21:02:47 +02:00
|
|
|
match_email_re = re.compile("(.*?)".join(pattern_parts))
|
2015-10-14 17:11:50 +02:00
|
|
|
match = match_email_re.match(address)
|
2013-10-08 21:02:47 +02:00
|
|
|
|
|
|
|
if not match:
|
|
|
|
return None
|
|
|
|
|
2015-10-14 17:11:50 +02:00
|
|
|
msg_string = match.group(1)
|
|
|
|
|
|
|
|
return msg_string
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def decode_email_address(email: Text) -> Optional[Tuple[Text, Text]]:
|
2015-10-14 17:11:50 +02:00
|
|
|
# Perform the reverse of encode_email_address. Returns a tuple of (streamname, email_token)
|
|
|
|
msg_string = get_email_gateway_message_string_from_address(email)
|
|
|
|
|
2017-05-24 23:48:45 +02:00
|
|
|
if msg_string is None:
|
|
|
|
return None
|
|
|
|
elif '.' in msg_string:
|
2014-01-13 23:20:55 +01:00
|
|
|
# Workaround for Google Groups and other programs that don't accept emails
|
|
|
|
# that have + signs in them (see Trac #2102)
|
2015-10-14 17:11:50 +02:00
|
|
|
encoded_stream_name, token = msg_string.split('.')
|
2014-01-13 23:20:55 +01:00
|
|
|
else:
|
2015-10-14 17:11:50 +02:00
|
|
|
encoded_stream_name, token = msg_string.split('+')
|
2014-01-13 23:20:55 +01:00
|
|
|
stream_name = re.sub("%\d{4}", lambda x: unichr(int(x.group(0)[1:])), encoded_stream_name)
|
|
|
|
return stream_name, token
|
2013-08-12 22:12:43 +02:00
|
|
|
|
2013-10-02 18:45:10 +02:00
|
|
|
# In general, it's better to avoid using .values() because it makes
|
|
|
|
# the code pretty ugly, but in this case, it has significant
|
|
|
|
# performance impact for loading / for users with large numbers of
|
|
|
|
# subscriptions, so it's worth optimizing.
|
2017-11-05 11:15:10 +01:00
|
|
|
SubHelperT = Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]]
|
|
|
|
def gather_subscriptions_helper(user_profile: UserProfile,
|
|
|
|
include_subscribers: bool=True) -> SubHelperT:
|
2017-10-29 17:11:11 +01:00
|
|
|
sub_dicts = get_stream_subscriptions_for_user(user_profile).values(
|
2017-09-13 20:00:36 +02:00
|
|
|
"recipient_id", "in_home_view", "color", "desktop_notifications",
|
|
|
|
"audible_notifications", "push_notifications", "active", "pin_to_top"
|
|
|
|
).order_by("recipient_id")
|
|
|
|
|
|
|
|
sub_dicts = list(sub_dicts)
|
|
|
|
sub_recipient_ids = [
|
|
|
|
sub['recipient_id']
|
|
|
|
for sub in sub_dicts
|
|
|
|
]
|
|
|
|
stream_recipient = StreamRecipientMap()
|
|
|
|
stream_recipient.populate_for_recipient_ids(sub_recipient_ids)
|
|
|
|
|
|
|
|
stream_ids = set() # type: Set[int]
|
|
|
|
for sub in sub_dicts:
|
|
|
|
sub['stream_id'] = stream_recipient.stream_id_for(sub['recipient_id'])
|
|
|
|
stream_ids.add(sub['stream_id'])
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams = get_active_streams(user_profile.realm).select_related(
|
2016-12-03 18:07:49 +01:00
|
|
|
"realm").values("id", "name", "invite_only", "realm_id",
|
2017-03-13 23:17:41 +01:00
|
|
|
"email_token", "description")
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_dicts = [stream for stream in all_streams if stream['id'] in stream_ids]
|
2013-02-12 20:42:59 +01:00
|
|
|
stream_hash = {}
|
2013-10-02 19:46:40 +02:00
|
|
|
for stream in stream_dicts:
|
|
|
|
stream_hash[stream["id"]] = stream
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id = [stream["id"] for stream in all_streams]
|
|
|
|
|
2013-06-12 21:15:32 +02:00
|
|
|
subscribed = []
|
|
|
|
unsubscribed = []
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed = []
|
2013-06-12 21:15:32 +02:00
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
# Deactivated streams aren't in stream_hash.
|
2017-09-13 20:00:36 +02:00
|
|
|
streams = [stream_hash[sub["stream_id"]] for sub in sub_dicts
|
|
|
|
if sub["stream_id"] in stream_hash]
|
|
|
|
streams_subscribed_map = dict((sub["stream_id"], sub["active"]) for sub in sub_dicts)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
# Add never subscribed streams to streams_subscribed_map
|
|
|
|
streams_subscribed_map.update({stream['id']: False for stream in all_streams if stream not in streams})
|
|
|
|
|
2017-02-20 07:52:37 +01:00
|
|
|
if include_subscribers:
|
2017-09-13 20:00:36 +02:00
|
|
|
subscriber_map = bulk_get_subscriber_user_ids(
|
|
|
|
all_streams,
|
|
|
|
user_profile,
|
|
|
|
streams_subscribed_map,
|
|
|
|
stream_recipient
|
|
|
|
) # type: Mapping[int, Optional[List[int]]]
|
2017-02-20 07:52:37 +01:00
|
|
|
else:
|
|
|
|
# If we're not including subscribers, always return None,
|
|
|
|
# which the below code needs to check for anyway.
|
|
|
|
subscriber_map = defaultdict(lambda: None)
|
2016-07-12 23:57:16 +02:00
|
|
|
|
|
|
|
sub_unsub_stream_ids = set()
|
2013-10-02 18:45:10 +02:00
|
|
|
for sub in sub_dicts:
|
2017-09-13 20:00:36 +02:00
|
|
|
sub_unsub_stream_ids.add(sub["stream_id"])
|
|
|
|
stream = stream_hash.get(sub["stream_id"])
|
2014-01-24 23:30:53 +01:00
|
|
|
if not stream:
|
|
|
|
# This stream has been deactivated, don't include it.
|
|
|
|
continue
|
|
|
|
|
2017-03-19 23:29:29 +01:00
|
|
|
subscribers = subscriber_map[stream["id"]] # type: Optional[List[int]]
|
2013-09-07 02:20:51 +02:00
|
|
|
|
|
|
|
# Important: don't show the subscribers if the stream is invite only
|
|
|
|
# and this user isn't on it anymore.
|
2013-10-02 19:46:40 +02:00
|
|
|
if stream["invite_only"] and not sub["active"]:
|
2013-09-07 02:20:51 +02:00
|
|
|
subscribers = None
|
|
|
|
|
2013-10-02 19:46:40 +02:00
|
|
|
stream_dict = {'name': stream["name"],
|
2013-10-02 18:45:10 +02:00
|
|
|
'in_home_view': sub["in_home_view"],
|
2013-10-02 19:46:40 +02:00
|
|
|
'invite_only': stream["invite_only"],
|
2013-10-02 18:45:10 +02:00
|
|
|
'color': sub["color"],
|
2014-02-05 23:21:02 +01:00
|
|
|
'desktop_notifications': sub["desktop_notifications"],
|
|
|
|
'audible_notifications': sub["audible_notifications"],
|
2017-08-17 16:55:32 +02:00
|
|
|
'push_notifications': sub["push_notifications"],
|
2016-07-01 07:26:09 +02:00
|
|
|
'pin_to_top': sub["pin_to_top"],
|
2014-02-05 19:58:41 +01:00
|
|
|
'stream_id': stream["id"],
|
2014-01-24 20:40:06 +01:00
|
|
|
'description': stream["description"],
|
2013-10-02 19:46:40 +02:00
|
|
|
'email_address': encode_email_address_helper(stream["name"], stream["email_token"])}
|
2013-09-07 02:20:51 +02:00
|
|
|
if subscribers is not None:
|
2013-09-30 21:53:49 +02:00
|
|
|
stream_dict['subscribers'] = subscribers
|
2013-10-02 18:45:10 +02:00
|
|
|
if sub["active"]:
|
2013-09-30 21:53:49 +02:00
|
|
|
subscribed.append(stream_dict)
|
2013-06-12 21:15:32 +02:00
|
|
|
else:
|
2013-09-30 21:53:49 +02:00
|
|
|
unsubscribed.append(stream_dict)
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id_set = set(all_streams_id)
|
2016-10-23 06:04:28 +02:00
|
|
|
# Listing public streams are disabled for Zephyr mirroring realms.
|
|
|
|
if user_profile.realm.is_zephyr_mirror_realm:
|
2017-05-17 20:39:57 +02:00
|
|
|
never_subscribed_stream_ids = set() # type: Set[int]
|
2016-10-23 06:04:28 +02:00
|
|
|
else:
|
|
|
|
never_subscribed_stream_ids = all_streams_id_set - sub_unsub_stream_ids
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed_streams = [ns_stream_dict for ns_stream_dict in all_streams
|
|
|
|
if ns_stream_dict['id'] in never_subscribed_stream_ids]
|
|
|
|
|
|
|
|
for stream in never_subscribed_streams:
|
2017-08-22 16:18:35 +02:00
|
|
|
is_public = (not stream['invite_only'])
|
|
|
|
if is_public or user_profile.is_realm_admin:
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_dict = {'name': stream['name'],
|
|
|
|
'invite_only': stream['invite_only'],
|
|
|
|
'stream_id': stream['id'],
|
|
|
|
'description': stream['description']}
|
2017-08-22 16:18:35 +02:00
|
|
|
if is_public:
|
|
|
|
subscribers = subscriber_map[stream["id"]]
|
|
|
|
if subscribers is not None:
|
|
|
|
stream_dict['subscribers'] = subscribers
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed.append(stream_dict)
|
|
|
|
|
2016-10-30 20:15:43 +01:00
|
|
|
return (sorted(subscribed, key=lambda x: x['name']),
|
|
|
|
sorted(unsubscribed, key=lambda x: x['name']),
|
|
|
|
sorted(never_subscribed, key=lambda x: x['name']))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def gather_subscriptions(user_profile: UserProfile) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
2016-10-30 20:15:43 +01:00
|
|
|
subscribed, unsubscribed, never_subscribed = gather_subscriptions_helper(user_profile)
|
2013-10-20 21:35:58 +02:00
|
|
|
user_ids = set()
|
2016-10-23 06:29:56 +02:00
|
|
|
for subs in [subscribed, unsubscribed, never_subscribed]:
|
2013-10-20 21:35:58 +02:00
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
|
|
|
for subscriber in sub['subscribers']:
|
|
|
|
user_ids.add(subscriber)
|
|
|
|
email_dict = get_emails_from_user_ids(list(user_ids))
|
|
|
|
|
|
|
|
for subs in [subscribed, unsubscribed]:
|
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
2017-09-13 23:46:58 +02:00
|
|
|
sub['subscribers'] = sorted([email_dict[user_id] for user_id in sub['subscribers']])
|
2013-10-20 21:35:58 +02:00
|
|
|
|
|
|
|
return (subscribed, unsubscribed)
|
2013-03-28 18:07:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_active_presence_idle_user_ids(realm: Realm,
|
|
|
|
sender_id: int,
|
|
|
|
message_type: str,
|
|
|
|
active_user_ids: Set[int],
|
|
|
|
user_flags: Dict[int, List[str]]) -> List[int]:
|
2017-10-02 16:35:35 +02:00
|
|
|
'''
|
|
|
|
Given a list of active_user_ids, we build up a subset
|
|
|
|
of those users who fit these criteria:
|
|
|
|
|
|
|
|
* They are likely to need notifications (either due
|
|
|
|
to mentions or being PM'ed).
|
|
|
|
* They are no longer "present" according to the
|
|
|
|
UserPresence table.
|
|
|
|
'''
|
|
|
|
|
2017-09-05 20:50:25 +02:00
|
|
|
if realm.presence_disabled:
|
|
|
|
return []
|
|
|
|
|
|
|
|
is_pm = message_type == 'private'
|
|
|
|
|
|
|
|
user_ids = set()
|
2017-09-09 04:14:28 +02:00
|
|
|
for user_id in active_user_ids:
|
|
|
|
flags = user_flags.get(user_id, []) # type: Iterable[str]
|
2017-09-05 20:50:25 +02:00
|
|
|
mentioned = 'mentioned' in flags
|
2017-09-14 14:30:16 +02:00
|
|
|
private_message = is_pm and user_id != sender_id
|
|
|
|
if mentioned or private_message:
|
2017-09-09 04:14:28 +02:00
|
|
|
user_ids.add(user_id)
|
2017-09-05 20:50:25 +02:00
|
|
|
|
2017-10-07 17:59:19 +02:00
|
|
|
return filter_presence_idle_user_ids(user_ids)
|
2017-09-27 15:50:38 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
|
2017-09-05 20:50:25 +02:00
|
|
|
if not user_ids:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# 140 seconds is consistent with presence.js:OFFLINE_THRESHOLD_SECS
|
|
|
|
recent = timezone_now() - datetime.timedelta(seconds=140)
|
|
|
|
rows = UserPresence.objects.filter(
|
|
|
|
user_profile_id__in=user_ids,
|
|
|
|
timestamp__gte=recent
|
|
|
|
).distinct('user_profile_id').values('user_profile_id')
|
|
|
|
active_user_ids = {row['user_profile_id'] for row in rows}
|
|
|
|
idle_user_ids = user_ids - active_user_ids
|
|
|
|
return sorted(list(idle_user_ids))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_status_dict(requesting_user_profile: UserProfile) -> Dict[Text, Dict[Text, Dict[str, Any]]]:
|
2016-07-27 02:09:10 +02:00
|
|
|
if requesting_user_profile.realm.presence_disabled:
|
2016-07-27 01:45:29 +02:00
|
|
|
# Return an empty dict if presence is disabled in this realm
|
2013-09-13 23:33:11 +02:00
|
|
|
return defaultdict(dict)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return UserPresence.get_status_dict_by_realm(requesting_user_profile.realm_id)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
|
2017-12-07 21:15:34 +01:00
|
|
|
users = bulk_get_users(list(settings.CROSS_REALM_BOT_EMAILS), None,
|
2017-11-16 02:28:50 +01:00
|
|
|
base_query=UserProfile.objects.filter(
|
2017-11-27 23:46:07 +01:00
|
|
|
realm__string_id=settings.SYSTEM_BOT_REALM)).values()
|
2016-12-02 00:08:34 +01:00
|
|
|
return [{'email': user.email,
|
|
|
|
'user_id': user.id,
|
|
|
|
'is_admin': user.is_realm_admin,
|
|
|
|
'is_bot': user.is_bot,
|
|
|
|
'full_name': user.full_name}
|
2017-11-27 23:35:50 +01:00
|
|
|
for user in users
|
2017-11-27 23:41:05 +01:00
|
|
|
# Important: We filter here, is addition to in
|
|
|
|
# `base_query`, because of how bulk_get_users shares its
|
|
|
|
# cache with other UserProfile caches.
|
2017-11-27 23:46:07 +01:00
|
|
|
if user.realm.string_id == settings.SYSTEM_BOT_REALM]
|
2016-11-02 23:48:47 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_confirmation_email(invitee: PreregistrationUser,
|
2017-12-06 22:31:11 +01:00
|
|
|
referrer: UserProfile) -> None:
|
2013-04-08 18:27:07 +02:00
|
|
|
"""
|
|
|
|
Send the confirmation/welcome e-mail to an invited user.
|
|
|
|
"""
|
2017-07-08 04:38:13 +02:00
|
|
|
activation_url = create_confirmation_link(invitee, referrer.realm.host, Confirmation.INVITATION)
|
2017-12-06 22:31:11 +01:00
|
|
|
context = {'referrer': referrer, 'activate_url': activation_url,
|
2017-07-14 03:38:12 +02:00
|
|
|
'referrer_realm_name': referrer.realm.name}
|
2017-07-09 23:44:12 +02:00
|
|
|
from_name = u"%s (via Zulip)" % (referrer.full_name,)
|
2017-07-11 05:01:32 +02:00
|
|
|
send_email('zerver/emails/invitation', to_email=invitee.email, from_name=from_name,
|
2017-07-09 23:44:12 +02:00
|
|
|
from_address=FromAddress.NOREPLY, context=context)
|
2013-05-03 20:24:55 +02:00
|
|
|
|
2017-11-22 20:05:53 +01:00
|
|
|
def email_not_system_bot(email: Text) -> None:
|
2017-12-07 21:15:34 +01:00
|
|
|
if is_cross_realm_bot_email(email):
|
2017-11-22 20:05:53 +01:00
|
|
|
raise ValidationError('%s is an email address reserved for system bots' % (email,))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2017-11-22 20:22:11 +01:00
|
|
|
def validate_email_for_realm(target_realm: Realm, email: Text) -> None:
|
2017-08-25 07:05:27 +02:00
|
|
|
try:
|
2017-11-22 20:22:11 +01:00
|
|
|
# Registering with a system bot's email is not allowed...
|
|
|
|
email_not_system_bot(email)
|
|
|
|
except ValidationError:
|
|
|
|
# ... unless this is the first user with that email. This
|
|
|
|
# should be impossible in production, because these users are
|
|
|
|
# created by initialize_voyager_db, but it happens in a test's
|
|
|
|
# setup. (This would be a good wrinkle to clean up.)
|
|
|
|
if UserProfile.objects.filter(email__iexact=email).exists():
|
|
|
|
raise
|
|
|
|
|
|
|
|
try:
|
|
|
|
existing_user_profile = get_user(email, target_realm)
|
2017-08-25 07:05:27 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2017-11-22 20:22:11 +01:00
|
|
|
return
|
2017-08-25 07:05:27 +02:00
|
|
|
|
2017-11-22 20:22:11 +01:00
|
|
|
if existing_user_profile.is_mirror_dummy:
|
2017-08-25 07:05:27 +02:00
|
|
|
# Mirror dummy users to be activated must be inactive
|
|
|
|
if existing_user_profile.is_active:
|
2017-11-27 00:58:56 +01:00
|
|
|
raise AssertionError("Mirror dummy user is already active!")
|
2017-11-22 20:22:11 +01:00
|
|
|
else:
|
2017-08-25 07:05:27 +02:00
|
|
|
# Other users should not already exist at all.
|
2017-11-03 03:12:25 +01:00
|
|
|
raise ValidationError('%s already has an account' % (email,))
|
2017-08-25 07:05:27 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_email(user_profile: UserProfile, email: Text) -> Tuple[Optional[str], Optional[str]]:
|
2017-01-16 05:35:52 +01:00
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return _("Invalid address."), None
|
|
|
|
|
|
|
|
if not email_allowed_for_realm(email, user_profile.realm):
|
|
|
|
return _("Outside your domain."), None
|
|
|
|
|
|
|
|
try:
|
2017-08-25 07:05:27 +02:00
|
|
|
validate_email_for_realm(user_profile.realm, email)
|
2017-01-16 05:35:52 +01:00
|
|
|
except ValidationError:
|
|
|
|
return None, _("Already has an account.")
|
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2017-07-25 02:02:30 +02:00
|
|
|
class InvitationError(JsonableError):
|
|
|
|
code = ErrorCode.INVITATION_FAILED
|
|
|
|
data_fields = ['errors', 'sent_invitations']
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self, msg: Text, errors: List[Tuple[Text, str]], sent_invitations: bool) -> None:
|
2017-07-25 02:02:30 +02:00
|
|
|
self._msg = msg # type: Text
|
|
|
|
self.errors = errors # type: List[Tuple[Text, str]]
|
|
|
|
self.sent_invitations = sent_invitations # type: bool
|
|
|
|
|
2017-12-05 21:39:45 +01:00
|
|
|
def estimate_recent_invites(realm: Realm, *, days: int) -> int:
|
|
|
|
'''An upper bound on the number of invites sent in the last `days` days'''
|
|
|
|
recent_invites = RealmCount.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
property='invites_sent::day',
|
|
|
|
end_time__gte=timezone_now() - datetime.timedelta(days=days)
|
|
|
|
).aggregate(Sum('value'))['value__sum']
|
|
|
|
if recent_invites is None:
|
|
|
|
return 0
|
|
|
|
return recent_invites
|
|
|
|
|
2017-12-05 07:59:27 +01:00
|
|
|
def check_invite_limit(user: UserProfile, num_invitees: int) -> None:
|
2017-12-05 21:39:45 +01:00
|
|
|
# Discourage using invitation emails as a vector for carrying spam
|
2017-11-30 01:53:09 +01:00
|
|
|
if settings.OPEN_REALM_CREATION:
|
2017-12-05 21:39:45 +01:00
|
|
|
recent_invites = estimate_recent_invites(user.realm, days=1)
|
|
|
|
if num_invitees + recent_invites > user.realm.max_invites:
|
2017-11-30 01:53:09 +01:00
|
|
|
raise InvitationError(
|
2017-12-05 08:03:09 +01:00
|
|
|
_("You do not have enough remaining invites. "
|
|
|
|
"Please contact %s to have your limit raised. "
|
2017-11-30 01:53:09 +01:00
|
|
|
"No invitations were sent." % (settings.ZULIP_ADMINISTRATOR)),
|
|
|
|
[], sent_invitations=False)
|
|
|
|
|
2017-12-05 07:59:27 +01:00
|
|
|
def do_invite_users(user_profile: UserProfile,
|
|
|
|
invitee_emails: SizedTextIterable,
|
|
|
|
streams: Iterable[Stream],
|
2017-12-06 22:31:11 +01:00
|
|
|
invite_as_admin: Optional[bool]=False) -> None:
|
2017-12-05 07:59:27 +01:00
|
|
|
|
|
|
|
check_invite_limit(user_profile, len(invitee_emails))
|
2017-12-07 05:29:41 +01:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
validated_emails = [] # type: List[Text]
|
|
|
|
errors = [] # type: List[Tuple[Text, str]]
|
|
|
|
skipped = [] # type: List[Tuple[Text, str]]
|
2013-07-08 17:57:04 +02:00
|
|
|
for email in invitee_emails:
|
|
|
|
if email == '':
|
|
|
|
continue
|
2017-01-16 05:35:52 +01:00
|
|
|
email_error, email_skipped = validate_email(user_profile, email)
|
|
|
|
if not (email_error or email_skipped):
|
|
|
|
validated_emails.append(email)
|
|
|
|
elif email_error:
|
|
|
|
errors.append((email, email_error))
|
|
|
|
elif email_skipped:
|
|
|
|
skipped.append((email, email_skipped))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if errors:
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(
|
|
|
|
_("Some emails did not validate, so we didn't send any invitations."),
|
|
|
|
errors + skipped, sent_invitations=False)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped and len(skipped) == len(invitee_emails):
|
|
|
|
# All e-mails were skipped, so we didn't actually invite anyone.
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(_("We weren't able to invite anyone."),
|
|
|
|
skipped, sent_invitations=False)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
# We do this here rather than in the invite queue processor since this
|
|
|
|
# is used for rate limiting invitations, rather than keeping track of
|
|
|
|
# when exactly invitations were sent
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['invites_sent::day'],
|
|
|
|
None, timezone_now(), increment=len(validated_emails))
|
|
|
|
|
2016-12-28 02:06:19 +01:00
|
|
|
# Now that we are past all the possible errors, we actually create
|
|
|
|
# the PreregistrationUser objects and trigger the email invitations.
|
|
|
|
for email in validated_emails:
|
|
|
|
# The logged in user is the referrer.
|
2017-10-15 18:34:47 +02:00
|
|
|
prereg_user = PreregistrationUser(email=email, referred_by=user_profile,
|
2017-11-08 22:02:59 +01:00
|
|
|
invited_as_admin=invite_as_admin,
|
|
|
|
realm=user_profile.realm)
|
2016-12-28 02:06:19 +01:00
|
|
|
prereg_user.save()
|
2017-09-17 18:49:23 +02:00
|
|
|
stream_ids = [stream.id for stream in streams]
|
|
|
|
prereg_user.streams.set(stream_ids)
|
2016-12-28 02:06:19 +01:00
|
|
|
|
2017-12-06 22:31:11 +01:00
|
|
|
event = {"prereg_id": prereg_user.id, "referrer_id": user_profile.id}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("invites", event)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped:
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(_("Some of those addresses are already using Zulip, "
|
|
|
|
"so we didn't send them an invitation. We did send "
|
|
|
|
"invitations to everyone else!"),
|
|
|
|
skipped, sent_invitations=True)
|
2013-07-26 16:51:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_get_user_invites(user_profile: UserProfile) -> List[Dict[str, Any]]:
|
2017-10-21 03:15:12 +02:00
|
|
|
days_to_activate = getattr(settings, 'ACCOUNT_ACTIVATION_DAYS', 7)
|
|
|
|
active_value = getattr(confirmation_settings, 'STATUS_ACTIVE', 1)
|
|
|
|
|
|
|
|
lowest_datetime = timezone_now() - datetime.timedelta(days=days_to_activate)
|
|
|
|
prereg_users = PreregistrationUser.objects.exclude(status=active_value).filter(
|
|
|
|
invited_at__gte=lowest_datetime,
|
|
|
|
referred_by__realm=user_profile.realm)
|
|
|
|
|
|
|
|
invites = []
|
|
|
|
|
|
|
|
for invitee in prereg_users:
|
|
|
|
invites.append(dict(email=invitee.email,
|
|
|
|
ref=invitee.referred_by.email,
|
|
|
|
invited=invitee.invited_at.strftime("%Y-%m-%d %H:%M:%S"),
|
|
|
|
id=invitee.id))
|
|
|
|
|
|
|
|
return invites
|
|
|
|
|
2017-12-05 20:01:55 +01:00
|
|
|
def do_revoke_user_invite(prereg_user: PreregistrationUser) -> None:
|
2017-10-21 03:15:12 +02:00
|
|
|
email = prereg_user.email
|
|
|
|
|
|
|
|
# Delete both the confirmation objects and the prereg_user object.
|
|
|
|
# TODO: Probably we actaully want to set the confirmation objects
|
|
|
|
# to a "revoked" status so that we can give the user a better
|
|
|
|
# error message.
|
|
|
|
content_type = ContentType.objects.get_for_model(PreregistrationUser)
|
|
|
|
Confirmation.objects.filter(content_type=content_type,
|
|
|
|
object_id=prereg_user.id).delete()
|
|
|
|
prereg_user.delete()
|
|
|
|
clear_scheduled_invitation_emails(email)
|
|
|
|
|
2017-12-05 20:01:55 +01:00
|
|
|
def do_resend_user_invite_email(prereg_user: PreregistrationUser) -> str:
|
2017-12-05 08:03:09 +01:00
|
|
|
check_invite_limit(prereg_user.referred_by, 1)
|
|
|
|
|
2017-10-21 03:15:12 +02:00
|
|
|
prereg_user.invited_at = timezone_now()
|
|
|
|
prereg_user.save()
|
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
do_increment_logging_stat(prereg_user.realm, COUNT_STATS['invites_sent::day'],
|
|
|
|
None, prereg_user.invited_at)
|
|
|
|
|
2017-10-21 03:15:12 +02:00
|
|
|
clear_scheduled_invitation_emails(prereg_user.email)
|
2017-12-05 07:51:25 +01:00
|
|
|
# We don't store the custom email body, so just set it to None
|
2017-12-05 09:01:41 +01:00
|
|
|
event = {"prereg_id": prereg_user.id, "referrer_id": prereg_user.referred_by.id, "email_body": None}
|
2017-12-05 07:51:25 +01:00
|
|
|
queue_json_publish("invites", event)
|
2017-10-21 03:15:12 +02:00
|
|
|
|
|
|
|
return prereg_user.invited_at.strftime("%Y-%m-%d %H:%M:%S")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_realm_emoji(realm: Realm) -> None:
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_emoji", op="update",
|
|
|
|
realm_emoji=realm.get_emoji())
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2013-08-22 19:54:35 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_add_realm_emoji(realm: Realm,
|
|
|
|
name: Text,
|
|
|
|
file_name: Text,
|
|
|
|
author: Optional[UserProfile]=None) -> None:
|
2017-03-13 05:45:50 +01:00
|
|
|
emoji = RealmEmoji(realm=realm, name=name, file_name=file_name, author=author)
|
2016-02-12 21:08:56 +01:00
|
|
|
emoji.full_clean()
|
|
|
|
emoji.save()
|
2013-08-22 19:54:35 +02:00
|
|
|
notify_realm_emoji(realm)
|
2013-08-22 19:15:54 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_emoji(realm: Realm, name: Text) -> None:
|
2017-05-22 17:08:37 +02:00
|
|
|
emoji = RealmEmoji.objects.get(realm=realm, name=name)
|
|
|
|
emoji.deactivated = True
|
|
|
|
emoji.save(update_fields=['deactivated'])
|
2013-08-22 19:54:35 +02:00
|
|
|
notify_realm_emoji(realm)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_alert_words(user_profile: UserProfile, words: Iterable[Text]) -> None:
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="alert_words", alert_words=words)
|
|
|
|
send_event(event, [user_profile.id])
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_alert_words(user_profile: UserProfile, alert_words: Iterable[Text]) -> None:
|
2013-09-11 17:24:27 +02:00
|
|
|
words = add_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_alert_words(user_profile: UserProfile, alert_words: Iterable[Text]) -> None:
|
2013-09-11 17:24:27 +02:00
|
|
|
words = remove_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_alert_words(user_profile: UserProfile, alert_words: List[Text]) -> None:
|
2013-09-03 22:41:17 +02:00
|
|
|
set_user_alert_words(user_profile, alert_words)
|
2013-09-11 17:24:27 +02:00
|
|
|
notify_alert_words(user_profile, alert_words)
|
2013-09-10 00:06:24 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mute_topic(user_profile: UserProfile, stream: Stream, recipient: Recipient, topic: str) -> None:
|
2017-08-30 02:19:34 +02:00
|
|
|
add_topic_mute(user_profile, stream.id, recipient.id, topic)
|
|
|
|
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
|
|
|
|
send_event(event, [user_profile.id])
|
2017-08-24 17:58:40 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_unmute_topic(user_profile: UserProfile, stream: Stream, topic: str) -> None:
|
2017-08-30 02:19:34 +02:00
|
|
|
remove_topic_mute(user_profile, stream.id, topic)
|
2017-08-24 17:58:40 +02:00
|
|
|
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
|
2017-03-13 22:05:35 +01:00
|
|
|
send_event(event, [user_profile.id])
|
2013-10-07 17:35:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mark_hotspot_as_read(user: UserProfile, hotspot: str) -> None:
|
2017-01-24 01:48:35 +01:00
|
|
|
UserHotspot.objects.get_or_create(user=user, hotspot=hotspot)
|
|
|
|
event = dict(type="hotspots", hotspots=get_next_hotspots(user))
|
|
|
|
send_event(event, [user.id])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_realm_filters(realm: Realm) -> None:
|
2016-12-31 03:08:43 +01:00
|
|
|
realm_filters = realm_filters_for_realm(realm.id)
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_filters", realm_filters=realm_filters)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2014-01-06 23:42:02 +01:00
|
|
|
|
2014-01-27 19:43:55 +01:00
|
|
|
# NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
|
|
|
|
# RegExp syntax. In addition to JS-compatible syntax, the following features are available:
|
|
|
|
# * Named groups will be converted to numbered groups automatically
|
|
|
|
# * Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_realm_filter(realm: Realm, pattern: Text, url_format_string: Text) -> int:
|
2016-02-13 19:17:15 +01:00
|
|
|
pattern = pattern.strip()
|
|
|
|
url_format_string = url_format_string.strip()
|
|
|
|
realm_filter = RealmFilter(
|
|
|
|
realm=realm, pattern=pattern,
|
|
|
|
url_format_string=url_format_string)
|
|
|
|
realm_filter.full_clean()
|
|
|
|
realm_filter.save()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
return realm_filter.id
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_filter(realm: Realm, pattern: Optional[Text]=None,
|
|
|
|
id: Optional[int]=None) -> None:
|
2016-02-13 19:17:15 +01:00
|
|
|
if pattern is not None:
|
|
|
|
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
|
|
|
|
else:
|
|
|
|
RealmFilter.objects.get(realm=realm, pk=id).delete()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_emails_from_user_ids(user_ids: Sequence[int]) -> Dict[int, Text]:
|
2013-10-20 21:10:03 +02:00
|
|
|
# We may eventually use memcached to speed this up, but the DB is fast.
|
|
|
|
return UserProfile.emails_from_ids(user_ids)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_realm_domain(realm: Realm, domain: Text, allow_subdomains: bool) -> (RealmDomain):
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain = RealmDomain.objects.create(realm=realm, domain=domain,
|
|
|
|
allow_subdomains=allow_subdomains)
|
2016-12-26 19:19:02 +01:00
|
|
|
event = dict(type="realm_domains", op="add",
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain=dict(domain=realm_domain.domain,
|
|
|
|
allow_subdomains=realm_domain.allow_subdomains))
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2017-03-31 20:10:29 +02:00
|
|
|
return realm_domain
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_realm_domain(realm_domain: RealmDomain, allow_subdomains: bool) -> None:
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain.allow_subdomains = allow_subdomains
|
|
|
|
realm_domain.save(update_fields=['allow_subdomains'])
|
2017-01-21 08:19:03 +01:00
|
|
|
event = dict(type="realm_domains", op="change",
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain=dict(domain=realm_domain.domain,
|
|
|
|
allow_subdomains=realm_domain.allow_subdomains))
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm_domain.realm_id))
|
2017-01-21 08:19:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_domain(realm_domain: RealmDomain) -> None:
|
2017-03-31 20:10:29 +02:00
|
|
|
realm = realm_domain.realm
|
|
|
|
domain = realm_domain.domain
|
|
|
|
realm_domain.delete()
|
2017-03-31 16:20:07 +02:00
|
|
|
if RealmDomain.objects.filter(realm=realm).count() == 0 and realm.restricted_to_domain:
|
2017-03-31 20:10:29 +02:00
|
|
|
# If this was the last realm domain, we mark the realm as no
|
2017-01-26 10:52:56 +01:00
|
|
|
# longer restricted to domain, because the feature doesn't do
|
|
|
|
# anything if there are no domains, and this is probably less
|
|
|
|
# confusing than the alternative.
|
2017-03-21 18:08:40 +01:00
|
|
|
do_set_realm_property(realm, 'restricted_to_domain', False)
|
2017-01-21 09:09:27 +01:00
|
|
|
event = dict(type="realm_domains", op="remove", domain=domain)
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_occupied_streams(realm: Realm) -> QuerySet:
|
2016-05-25 06:55:14 +02:00
|
|
|
# TODO: Make a generic stub for QuerySet
|
2014-03-02 06:46:54 +01:00
|
|
|
""" Get streams with subscribers """
|
2014-03-06 23:34:44 +01:00
|
|
|
subs_filter = Subscription.objects.filter(active=True, user_profile__realm=realm,
|
|
|
|
user_profile__is_active=True).values('recipient_id')
|
2014-03-02 06:46:54 +01:00
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
|
|
|
|
|
|
|
return Stream.objects.filter(id__in=stream_ids, realm=realm, deactivated=False)
|
|
|
|
|
2014-03-02 06:29:15 +01:00
|
|
|
def do_get_streams(user_profile, include_public=True, include_subscribed=True,
|
2016-05-20 22:08:42 +02:00
|
|
|
include_all_active=False, include_default=False):
|
|
|
|
# type: (UserProfile, bool, bool, bool, bool) -> List[Dict[str, Any]]
|
2016-02-08 03:59:38 +01:00
|
|
|
if include_all_active and not user_profile.is_api_super_user:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2016-07-27 01:45:29 +02:00
|
|
|
# Listing public streams are disabled for Zephyr mirroring realms.
|
|
|
|
include_public = include_public and not user_profile.realm.is_zephyr_mirror_realm
|
2014-03-02 06:46:54 +01:00
|
|
|
# Start out with all streams in the realm with subscribers
|
|
|
|
query = get_occupied_streams(user_profile.realm)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
if not include_all_active:
|
2017-10-29 17:11:11 +01:00
|
|
|
user_subs = get_stream_subscriptions_for_user(user_profile).filter(
|
|
|
|
active=True,
|
|
|
|
).select_related('recipient')
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
if include_subscribed:
|
|
|
|
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
|
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
|
|
|
|
|
|
|
if include_subscribed and include_public:
|
|
|
|
query = query.filter(recipient_check | invite_only_check)
|
|
|
|
elif include_public:
|
|
|
|
query = query.filter(invite_only_check)
|
|
|
|
elif include_subscribed:
|
|
|
|
query = query.filter(recipient_check)
|
|
|
|
else:
|
|
|
|
# We're including nothing, so don't bother hitting the DB.
|
|
|
|
query = []
|
|
|
|
|
2016-06-04 00:08:20 +02:00
|
|
|
streams = [(row.to_dict()) for row in query]
|
2014-03-02 06:29:15 +01:00
|
|
|
streams.sort(key=lambda elt: elt["name"])
|
2016-05-20 22:08:42 +02:00
|
|
|
if include_default:
|
|
|
|
is_default = {}
|
2017-09-17 00:34:13 +02:00
|
|
|
default_streams = get_default_streams_for_realm(user_profile.realm_id)
|
2016-05-20 22:08:42 +02:00
|
|
|
for default_stream in default_streams:
|
|
|
|
is_default[default_stream.id] = True
|
|
|
|
for stream in streams:
|
|
|
|
stream['is_default'] = is_default.get(stream["stream_id"], False)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
return streams
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_claim_attachments(message: Message) -> None:
|
2016-07-24 21:52:41 +02:00
|
|
|
attachment_url_list = attachment_url_re.findall(message.content)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
for url in attachment_url_list:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-24 21:52:41 +02:00
|
|
|
user_profile = message.sender
|
2016-06-17 18:49:36 +02:00
|
|
|
is_message_realm_public = False
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
2016-07-24 21:52:41 +02:00
|
|
|
is_message_realm_public = Stream.objects.get(id=message.recipient.type_id).is_public()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-04-14 00:59:59 +02:00
|
|
|
if not validate_attachment_request(user_profile, path_id):
|
2017-04-14 01:15:46 +02:00
|
|
|
# Technically, there are 2 cases here:
|
|
|
|
# * The user put something in their message that has the form
|
|
|
|
# of an upload, but doesn't correspond to a file that doesn't
|
|
|
|
# exist. validate_attachment_request will return None.
|
|
|
|
# * The user is trying to send a link to a file they don't have permission to
|
|
|
|
# access themselves. validate_attachment_request will return False.
|
|
|
|
#
|
|
|
|
# Either case is unusual and suggests a UI bug that got
|
|
|
|
# the user in this situation, so we log in these cases.
|
|
|
|
logging.warning("User %s tried to share upload %s in message %s, but lacks permission" % (
|
|
|
|
user_profile.id, path_id, message.id))
|
2017-04-14 00:59:59 +02:00
|
|
|
continue
|
|
|
|
|
2017-04-14 01:03:49 +02:00
|
|
|
claim_attachment(user_profile, path_id, message, is_message_realm_public)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_delete_old_unclaimed_attachments(weeks_ago: int) -> None:
|
2016-03-24 20:24:01 +01:00
|
|
|
old_unclaimed_attachments = get_old_unclaimed_attachments(weeks_ago)
|
|
|
|
|
|
|
|
for attachment in old_unclaimed_attachments:
|
|
|
|
delete_message_image(attachment.path_id)
|
|
|
|
attachment.delete()
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_attachment_reference_change(prev_content: Text, message: Message) -> None:
|
2016-07-07 09:47:15 +02:00
|
|
|
new_content = message.content
|
|
|
|
prev_attachments = set(attachment_url_re.findall(prev_content))
|
|
|
|
new_attachments = set(attachment_url_re.findall(new_content))
|
|
|
|
|
|
|
|
to_remove = list(prev_attachments - new_attachments)
|
|
|
|
path_ids = []
|
|
|
|
for url in to_remove:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-07 09:47:15 +02:00
|
|
|
path_ids.append(path_id)
|
|
|
|
|
|
|
|
attachments_to_update = Attachment.objects.filter(path_id__in=path_ids).select_for_update()
|
2016-07-24 22:03:22 +02:00
|
|
|
message.attachment_set.remove(*attachments_to_update)
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
to_add = list(new_attachments - prev_attachments)
|
2016-07-24 22:03:22 +02:00
|
|
|
if len(to_add) > 0:
|
2016-07-07 09:47:15 +02:00
|
|
|
do_claim_attachments(message)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_realm_custom_profile_fields(realm: Realm) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
fields = custom_profile_fields_for_realm(realm.id)
|
|
|
|
event = dict(type="custom_profile_fields",
|
|
|
|
fields=[f.as_dict() for f in fields])
|
2017-09-16 21:26:54 +02:00
|
|
|
send_event(event, active_user_ids(realm.id))
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def try_add_realm_custom_profile_field(realm: Realm, name: Text, field_type: int) -> CustomProfileField:
|
2017-03-17 10:07:22 +01:00
|
|
|
field = CustomProfileField(realm=realm, name=name, field_type=field_type)
|
|
|
|
field.save()
|
|
|
|
notify_realm_custom_profile_fields(realm)
|
|
|
|
return field
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_custom_profile_field(realm: Realm, field: CustomProfileField) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
"""
|
|
|
|
Deleting a field will also delete the user profile data
|
|
|
|
associated with it in CustomProfileFieldValue model.
|
|
|
|
"""
|
|
|
|
field.delete()
|
|
|
|
notify_realm_custom_profile_fields(realm)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def try_update_realm_custom_profile_field(realm: Realm, field: CustomProfileField,
|
|
|
|
name: Text) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
field.name = name
|
|
|
|
field.save(update_fields=['name'])
|
|
|
|
notify_realm_custom_profile_fields(realm)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_custom_profile_data(user_profile: UserProfile,
|
|
|
|
data: List[Dict[str, Union[int, Text]]]) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
update_or_create = CustomProfileFieldValue.objects.update_or_create
|
|
|
|
for field in data:
|
|
|
|
update_or_create(user_profile=user_profile,
|
|
|
|
field_id=field['id'],
|
|
|
|
defaults={'value': field['value']})
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2017-11-14 07:31:31 +01:00
|
|
|
def do_send_create_user_group_event(user_group: UserGroup, members: List[UserProfile]) -> None:
|
|
|
|
event = dict(type="user_group",
|
|
|
|
op="add",
|
|
|
|
group=dict(name=user_group.name,
|
|
|
|
members=[member.id for member in members],
|
|
|
|
description=user_group.description,
|
|
|
|
id=user_group.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
send_event(event, active_user_ids(user_group.realm_id))
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def check_add_user_group(realm: Realm, name: Text, initial_members: List[UserProfile],
|
|
|
|
description: Text) -> None:
|
2017-11-01 10:04:16 +01:00
|
|
|
try:
|
2017-11-14 07:31:31 +01:00
|
|
|
user_group = create_user_group(name, initial_members, realm, description=description)
|
|
|
|
do_send_create_user_group_event(user_group, initial_members)
|
2017-11-01 10:04:16 +01:00
|
|
|
except django.db.utils.IntegrityError:
|
|
|
|
raise JsonableError(_("User group '%s' already exists." % (name,)))
|
2017-11-02 07:53:08 +01:00
|
|
|
|
2017-11-14 08:00:18 +01:00
|
|
|
def do_send_user_group_update_event(user_group: UserGroup, data: Dict[str, Any]) -> None:
|
|
|
|
event = dict(type="user_group", op='update', group_id=user_group.id, data=data)
|
|
|
|
send_event(event, active_user_ids(user_group.realm_id))
|
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def do_update_user_group_name(user_group: UserGroup, name: Text) -> None:
|
2017-11-02 07:53:08 +01:00
|
|
|
user_group.name = name
|
|
|
|
user_group.save(update_fields=['name'])
|
2017-11-14 08:00:18 +01:00
|
|
|
do_send_user_group_update_event(user_group, dict(name=name))
|
2017-11-02 07:53:08 +01:00
|
|
|
|
2017-11-27 05:27:04 +01:00
|
|
|
def do_update_user_group_description(user_group: UserGroup, description: Text) -> None:
|
2017-11-02 07:53:08 +01:00
|
|
|
user_group.description = description
|
|
|
|
user_group.save(update_fields=['description'])
|
2017-11-14 08:00:53 +01:00
|
|
|
do_send_user_group_update_event(user_group, dict(description=description))
|
2017-11-02 08:53:30 +01:00
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
def do_send_user_group_members_update_event(event_name: Text,
|
|
|
|
user_group: UserGroup,
|
|
|
|
user_ids: List[int]) -> None:
|
|
|
|
event = dict(type="user_group",
|
|
|
|
op=event_name,
|
|
|
|
group_id=user_group.id,
|
|
|
|
user_ids=user_ids)
|
|
|
|
send_event(event, active_user_ids(user_group.realm_id))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_add_members_to_user_group(user_group: UserGroup,
|
|
|
|
user_profiles: List[UserProfile]) -> None:
|
2017-11-14 08:40:53 +01:00
|
|
|
memberships = [UserGroupMembership(user_group_id=user_group.id,
|
2017-11-02 08:53:30 +01:00
|
|
|
user_profile=user_profile)
|
|
|
|
for user_profile in user_profiles]
|
|
|
|
UserGroupMembership.objects.bulk_create(memberships)
|
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
user_ids = [up.id for up in user_profiles]
|
|
|
|
do_send_user_group_members_update_event('add_members', user_group, user_ids)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remove_members_from_user_group(user_group: UserGroup,
|
|
|
|
user_profiles: List[UserProfile]) -> None:
|
2017-11-02 08:53:30 +01:00
|
|
|
UserGroupMembership.objects.filter(
|
2017-11-14 08:35:28 +01:00
|
|
|
user_group_id=user_group.id,
|
2017-11-02 08:53:30 +01:00
|
|
|
user_profile__in=user_profiles).delete()
|
2017-11-14 08:01:50 +01:00
|
|
|
|
|
|
|
user_ids = [up.id for up in user_profiles]
|
|
|
|
do_send_user_group_members_update_event('remove_members', user_group, user_ids)
|
2017-11-15 08:09:06 +01:00
|
|
|
|
2017-11-15 08:09:49 +01:00
|
|
|
def do_send_delete_user_group_event(user_group_id: int, realm_id: int) -> None:
|
|
|
|
event = dict(type="user_group",
|
|
|
|
op="remove",
|
|
|
|
group_id=user_group_id)
|
|
|
|
send_event(event, active_user_ids(realm_id))
|
|
|
|
|
2017-11-15 08:09:06 +01:00
|
|
|
def check_delete_user_group(user_group_id: int, realm: Realm) -> None:
|
|
|
|
user_group = access_user_group_by_id(user_group_id, realm)
|
|
|
|
user_group.delete()
|
2017-11-15 08:09:49 +01:00
|
|
|
do_send_delete_user_group_event(user_group_id, realm.id)
|