2016-05-25 06:55:14 +02:00
|
|
|
from typing import (
|
2019-02-02 23:53:55 +01:00
|
|
|
AbstractSet, Any, Callable, Dict, Iterable, List, Mapping, MutableMapping,
|
|
|
|
Optional, Sequence, Set, Tuple, Union, cast
|
2016-05-25 06:55:14 +02:00
|
|
|
)
|
2019-08-06 01:29:34 +02:00
|
|
|
from typing_extensions import TypedDict
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
import django.db.utils
|
2017-04-03 17:13:42 +02:00
|
|
|
from django.db.models import Count
|
2017-10-21 03:15:12 +02:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
2016-12-13 19:18:08 +01:00
|
|
|
from django.utils.html import escape
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-01-10 22:01:33 +01:00
|
|
|
from django.conf import settings
|
2013-07-08 17:57:04 +02:00
|
|
|
from django.core import validators
|
2018-03-11 18:55:20 +01:00
|
|
|
from django.core.files import File
|
2017-12-05 21:39:45 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat, \
|
|
|
|
RealmCount
|
2018-03-13 23:36:11 +01:00
|
|
|
|
2016-10-04 16:49:16 +02:00
|
|
|
from zerver.lib.bugdown import (
|
2017-03-13 14:42:03 +01:00
|
|
|
version as bugdown_version,
|
2019-03-01 01:53:18 +01:00
|
|
|
url_embed_preview_enabled,
|
2019-03-01 09:10:40 +01:00
|
|
|
convert as bugdown_convert,
|
2016-10-04 16:49:16 +02:00
|
|
|
)
|
2018-08-24 23:33:08 +02:00
|
|
|
from zerver.lib.addressee import Addressee
|
2018-02-10 15:44:58 +01:00
|
|
|
from zerver.lib.bot_config import (
|
|
|
|
ConfigError,
|
|
|
|
get_bot_config,
|
2018-04-02 19:52:54 +02:00
|
|
|
get_bot_configs,
|
2018-01-30 19:21:13 +01:00
|
|
|
set_bot_config,
|
2018-02-10 15:44:58 +01:00
|
|
|
)
|
2016-10-04 15:40:02 +02:00
|
|
|
from zerver.lib.cache import (
|
2018-02-10 15:44:58 +01:00
|
|
|
bot_dict_fields,
|
2017-08-05 19:42:59 +02:00
|
|
|
delete_user_profile_caches,
|
2016-10-04 15:40:02 +02:00
|
|
|
to_dict_cache_key_id,
|
2019-01-05 02:25:06 +01:00
|
|
|
user_profile_by_api_key_cache_key,
|
2016-10-04 15:40:02 +02:00
|
|
|
)
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.context_managers import lockfile
|
2019-03-21 10:24:56 +01:00
|
|
|
from zerver.lib.email_mirror_helpers import encode_email_address, encode_email_address_helper
|
2018-03-11 18:55:20 +01:00
|
|
|
from zerver.lib.emoji import emoji_name_to_emoji_code, get_emoji_file_name
|
2019-01-28 05:28:29 +01:00
|
|
|
from zerver.lib.exceptions import StreamDoesNotExistError, \
|
|
|
|
StreamWithIDDoesNotExistError
|
2019-06-24 02:51:13 +02:00
|
|
|
from zerver.lib.export import get_realm_exports_serialized
|
2017-01-24 01:48:35 +01:00
|
|
|
from zerver.lib.hotspots import get_next_hotspots
|
2016-10-04 15:52:26 +02:00
|
|
|
from zerver.lib.message import (
|
2016-10-12 02:19:45 +02:00
|
|
|
access_message,
|
2016-10-04 15:52:26 +02:00
|
|
|
MessageDict,
|
2016-10-04 18:32:46 +02:00
|
|
|
render_markdown,
|
2018-10-25 07:54:37 +02:00
|
|
|
update_first_visible_message_id,
|
2016-10-04 15:52:26 +02:00
|
|
|
)
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2018-08-16 01:26:55 +02:00
|
|
|
from zerver.lib.realm_logo import realm_logo_url
|
2018-09-13 16:29:12 +02:00
|
|
|
from zerver.lib.retention import move_messages_to_archive
|
2019-03-15 18:48:01 +01:00
|
|
|
from zerver.lib.send_email import send_email, FromAddress, send_email_to_admins, \
|
|
|
|
clear_scheduled_emails, clear_scheduled_invitation_emails
|
2019-07-17 02:29:08 +02:00
|
|
|
from zerver.lib.storage import static_path
|
2017-10-29 15:40:07 +01:00
|
|
|
from zerver.lib.stream_subscription import (
|
|
|
|
get_active_subscriptions_for_stream_id,
|
2017-10-29 15:52:01 +01:00
|
|
|
get_active_subscriptions_for_stream_ids,
|
2017-10-29 20:19:57 +01:00
|
|
|
get_bulk_stream_subscriber_info,
|
2017-10-29 17:11:11 +01:00
|
|
|
get_stream_subscriptions_for_user,
|
2017-10-29 19:15:35 +01:00
|
|
|
get_stream_subscriptions_for_users,
|
2017-10-29 15:40:07 +01:00
|
|
|
num_subscribers_for_stream_id,
|
|
|
|
)
|
2017-10-24 00:07:03 +02:00
|
|
|
from zerver.lib.stream_topic import StreamTopicTarget
|
2018-11-01 15:48:14 +01:00
|
|
|
from zerver.lib.topic import (
|
|
|
|
filter_by_exact_message_topic,
|
2018-11-01 18:06:55 +01:00
|
|
|
filter_by_topic_name_via_message,
|
2018-11-01 20:12:59 +01:00
|
|
|
save_message_for_edit_use_case,
|
2018-11-01 19:55:14 +01:00
|
|
|
update_messages_for_topic_edit,
|
2018-11-01 18:26:20 +01:00
|
|
|
ORIG_TOPIC,
|
2018-11-09 17:53:59 +01:00
|
|
|
LEGACY_PREV_TOPIC,
|
2018-11-01 18:26:20 +01:00
|
|
|
TOPIC_LINKS,
|
|
|
|
TOPIC_NAME,
|
2018-11-01 15:48:14 +01:00
|
|
|
)
|
2017-08-24 17:58:40 +02:00
|
|
|
from zerver.lib.topic_mutes import (
|
|
|
|
get_topic_mutes,
|
|
|
|
add_topic_mute,
|
|
|
|
remove_topic_mute,
|
|
|
|
)
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import (
|
|
|
|
bulk_get_users,
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
check_bot_name_available,
|
2018-08-01 10:53:40 +02:00
|
|
|
check_full_name,
|
|
|
|
get_api_key,
|
|
|
|
)
|
2018-12-17 22:04:07 +01:00
|
|
|
from zerver.lib.user_status import (
|
2019-01-21 18:19:59 +01:00
|
|
|
update_user_status,
|
2018-12-17 22:04:07 +01:00
|
|
|
)
|
2017-11-15 08:09:06 +01:00
|
|
|
from zerver.lib.user_groups import create_user_group, access_user_group_by_id
|
2018-08-11 16:26:46 +02:00
|
|
|
|
2017-05-22 23:37:15 +02:00
|
|
|
from zerver.models import Realm, RealmEmoji, Stream, UserProfile, UserActivity, \
|
2018-02-12 10:53:36 +01:00
|
|
|
RealmDomain, Service, SubMessage, \
|
2017-05-22 23:37:15 +02:00
|
|
|
Subscription, Recipient, Message, Attachment, UserMessage, RealmAuditLog, \
|
2019-01-21 18:19:59 +01:00
|
|
|
UserHotspot, MultiuseInvite, ScheduledMessage, UserStatus, \
|
2019-02-02 23:53:55 +01:00
|
|
|
Client, DefaultStream, DefaultStreamGroup, UserPresence, \
|
2018-11-01 21:23:48 +01:00
|
|
|
ScheduledEmail, MAX_TOPIC_NAME_LENGTH, \
|
2019-02-02 23:53:55 +01:00
|
|
|
MAX_MESSAGE_LENGTH, get_client, get_stream, get_personal_recipient, \
|
|
|
|
get_user_profile_by_id, PreregistrationUser, \
|
2019-05-04 04:47:44 +02:00
|
|
|
bulk_get_recipients, get_stream_recipient, get_stream_recipients, \
|
2016-01-12 16:24:34 +01:00
|
|
|
email_allowed_for_realm, email_to_username, display_recipient_cache_key, \
|
2018-12-07 00:05:57 +01:00
|
|
|
get_user_by_delivery_email, get_stream_cache_key, active_non_guest_user_ids, \
|
2017-09-16 21:44:03 +02:00
|
|
|
UserActivityInterval, active_user_ids, get_active_streams, \
|
2018-02-10 15:44:58 +01:00
|
|
|
realm_filters_for_realm, RealmFilter, stream_name_in_use, \
|
2017-12-07 21:15:34 +01:00
|
|
|
get_old_unclaimed_attachments, is_cross_realm_bot_email, \
|
2017-05-22 23:37:15 +02:00
|
|
|
Reaction, EmailChangeStatus, CustomProfileField, \
|
2017-10-29 17:03:51 +01:00
|
|
|
custom_profile_fields_for_realm, get_huddle_user_ids, \
|
2017-05-23 03:02:01 +02:00
|
|
|
CustomProfileFieldValue, validate_attachment_request, get_system_bot, \
|
2019-02-02 23:53:55 +01:00
|
|
|
query_for_ids, get_huddle_recipient, \
|
2018-01-16 20:34:12 +01:00
|
|
|
UserGroup, UserGroupMembership, get_default_stream_groups, \
|
2018-04-02 19:52:54 +02:00
|
|
|
get_bot_services, get_bot_dicts_in_realm, DomainNotAllowedForRealmError, \
|
2018-08-24 23:33:08 +02:00
|
|
|
DisposableEmailError, EmailContainsPlusError, \
|
2019-01-28 05:28:29 +01:00
|
|
|
get_user_including_cross_realm, get_user_by_id_in_realm_including_cross_realm, \
|
|
|
|
get_stream_by_id_in_realm
|
2014-01-24 23:30:53 +01:00
|
|
|
|
2019-02-11 15:19:38 +01:00
|
|
|
from zerver.lib.alert_words import get_alert_word_automaton
|
2018-02-10 15:44:58 +01:00
|
|
|
from zerver.lib.avatar import avatar_url, avatar_url_from_dict
|
2017-09-13 20:00:36 +02:00
|
|
|
from zerver.lib.stream_recipient import StreamRecipientMap
|
2018-05-21 15:23:46 +02:00
|
|
|
from zerver.lib.validator import check_widget_content
|
2018-08-09 01:22:53 +02:00
|
|
|
from zerver.lib.widget import do_widget_post_save_actions
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
|
2016-10-27 15:54:49 +02:00
|
|
|
from django.db import transaction, IntegrityError, connection
|
2017-12-05 21:39:45 +01:00
|
|
|
from django.db.models import F, Q, Max, Sum
|
2016-05-25 06:55:14 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2013-03-18 18:57:34 +01:00
|
|
|
from django.core.exceptions import ValidationError
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2019-02-15 23:28:07 +01:00
|
|
|
from confirmation.models import Confirmation, create_confirmation_link, generate_key, \
|
|
|
|
confirmation_url
|
2017-10-21 03:15:12 +02:00
|
|
|
from confirmation import settings as confirmation_settings
|
2013-04-08 18:27:07 +02:00
|
|
|
|
2018-01-12 09:10:43 +01:00
|
|
|
from zerver.lib.bulk_create import bulk_create_users
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
2018-08-01 11:18:37 +02:00
|
|
|
from zerver.lib.utils import generate_api_key
|
2018-12-06 23:17:46 +01:00
|
|
|
from zerver.lib.create_user import create_user, get_display_email_address
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib import bugdown
|
2013-08-22 17:45:23 +02:00
|
|
|
from zerver.lib.cache import cache_with_key, cache_set, \
|
2019-02-02 23:53:55 +01:00
|
|
|
user_profile_by_email_cache_key, \
|
2017-05-22 19:45:54 +02:00
|
|
|
cache_set_many, cache_delete, cache_delete_many
|
2016-05-29 16:52:55 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.utils import log_statsd_event, statsd
|
2018-03-05 03:46:07 +01:00
|
|
|
from zerver.lib.i18n import get_language_name
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.alert_words import add_user_alert_words, \
|
2013-09-03 22:41:17 +02:00
|
|
|
remove_user_alert_words, set_user_alert_words
|
2019-03-15 18:51:39 +01:00
|
|
|
from zerver.lib.email_notifications import enqueue_welcome_emails
|
2018-06-21 15:09:14 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError, ErrorCode, BugdownRenderingException
|
2017-03-08 11:46:12 +01:00
|
|
|
from zerver.lib.sessions import delete_user_sessions
|
2016-07-11 03:07:37 +02:00
|
|
|
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id, \
|
2018-09-07 17:44:40 +02:00
|
|
|
claim_attachment, delete_message_image, upload_emoji_image, delete_avatar_image
|
2018-12-28 20:45:54 +01:00
|
|
|
from zerver.lib.video_calls import request_zoom_video_call_url
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.tornado.event_queue import send_event
|
2018-04-08 09:50:05 +02:00
|
|
|
from zerver.lib.types import ProfileFieldData
|
2013-01-23 23:24:44 +01:00
|
|
|
|
2017-04-03 17:13:42 +02:00
|
|
|
from analytics.models import StreamCount
|
|
|
|
|
2019-01-26 02:36:37 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.stripe import update_license_ledger_if_needed
|
|
|
|
|
2013-06-18 23:55:55 +02:00
|
|
|
import ujson
|
2013-01-10 22:01:33 +01:00
|
|
|
import time
|
2013-01-14 20:09:25 +01:00
|
|
|
import datetime
|
|
|
|
import os
|
|
|
|
import platform
|
2013-03-18 18:57:34 +01:00
|
|
|
import logging
|
2014-01-23 18:09:34 +01:00
|
|
|
import itertools
|
2013-04-05 00:13:03 +02:00
|
|
|
from collections import defaultdict
|
2017-09-13 21:47:43 +02:00
|
|
|
from operator import itemgetter
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2016-05-25 06:55:14 +02:00
|
|
|
# This will be used to type annotate parameters in a function if the function
|
|
|
|
# works on both str and unicode in python 2 but in python 3 it only works on str.
|
2018-05-11 02:01:29 +02:00
|
|
|
SizedTextIterable = Union[Sequence[str], AbstractSet[str]]
|
2016-05-25 06:55:14 +02:00
|
|
|
|
2016-08-03 23:37:12 +02:00
|
|
|
STREAM_ASSIGNMENT_COLORS = [
|
|
|
|
"#76ce90", "#fae589", "#a6c7e5", "#e79ab5",
|
|
|
|
"#bfd56f", "#f4ae55", "#b0a5fd", "#addfe5",
|
|
|
|
"#f5ce6e", "#c2726a", "#94c849", "#bd86e5",
|
|
|
|
"#ee7e4a", "#a6dcbf", "#95a5fd", "#53a063",
|
|
|
|
"#9987e1", "#e4523d", "#c2c2c2", "#4f8de4",
|
|
|
|
"#c6a8ad", "#e7cc4d", "#c8bebf", "#a47462"]
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Store an event in the log for re-importing messages
|
2017-11-05 11:15:10 +01:00
|
|
|
def log_event(event: MutableMapping[str, Any]) -> None:
|
2013-11-12 18:06:34 +01:00
|
|
|
if settings.EVENT_LOG_DIR is None:
|
|
|
|
return
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
if "timestamp" not in event:
|
|
|
|
event["timestamp"] = time.time()
|
2013-01-14 20:09:25 +01:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
if not os.path.exists(settings.EVENT_LOG_DIR):
|
2013-01-14 20:09:25 +01:00
|
|
|
os.mkdir(settings.EVENT_LOG_DIR)
|
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
template = os.path.join(settings.EVENT_LOG_DIR,
|
2017-01-24 05:50:04 +01:00
|
|
|
'%s.' + platform.node() +
|
2017-04-15 04:03:56 +02:00
|
|
|
timezone_now().strftime('.%Y-%m-%d'))
|
2013-01-14 20:09:25 +01:00
|
|
|
|
|
|
|
with lockfile(template % ('lock',)):
|
|
|
|
with open(template % ('events',), 'a') as log:
|
2017-11-04 19:32:16 +01:00
|
|
|
log.write(ujson.dumps(event) + '\n')
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def can_access_stream_user_ids(stream: Stream) -> Set[int]:
|
2016-11-04 07:02:24 +01:00
|
|
|
# return user ids of users who can access the attributes of
|
2018-04-13 02:42:30 +02:00
|
|
|
# a stream, such as its name/description.
|
2016-11-04 07:02:24 +01:00
|
|
|
if stream.is_public():
|
2018-04-13 02:42:30 +02:00
|
|
|
# For a public stream, this is everyone in the realm
|
2018-06-03 19:11:52 +02:00
|
|
|
# except unsubscribed guest users
|
|
|
|
return public_stream_user_ids(stream)
|
2016-11-04 07:02:24 +01:00
|
|
|
else:
|
2018-04-13 02:42:30 +02:00
|
|
|
# for a private stream, it's subscribers plus realm admins.
|
2019-06-20 23:36:15 +02:00
|
|
|
return private_stream_user_ids(
|
|
|
|
stream.id) | {user.id for user in stream.realm.get_admin_users_and_bots()}
|
2016-11-04 07:02:24 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def private_stream_user_ids(stream_id: int) -> Set[int]:
|
2016-11-04 07:02:24 +01:00
|
|
|
# TODO: Find similar queries elsewhere and de-duplicate this code.
|
2017-10-29 15:40:07 +01:00
|
|
|
subscriptions = get_active_subscriptions_for_stream_id(stream_id)
|
2016-11-04 07:02:24 +01:00
|
|
|
return {sub['user_profile_id'] for sub in subscriptions.values('user_profile_id')}
|
2014-01-27 23:43:05 +01:00
|
|
|
|
2018-06-03 19:11:52 +02:00
|
|
|
def public_stream_user_ids(stream: Stream) -> Set[int]:
|
|
|
|
guest_subscriptions = get_active_subscriptions_for_stream_id(
|
|
|
|
stream.id).filter(user_profile__is_guest=True)
|
|
|
|
guest_subscriptions = {sub['user_profile_id'] for sub in guest_subscriptions.values('user_profile_id')}
|
|
|
|
return set(active_non_guest_user_ids(stream.realm_id)) | guest_subscriptions
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bot_owner_user_ids(user_profile: UserProfile) -> Set[int]:
|
2014-02-26 00:12:14 +01:00
|
|
|
is_private_bot = (
|
2017-11-08 03:56:01 +01:00
|
|
|
user_profile.default_sending_stream and
|
|
|
|
user_profile.default_sending_stream.invite_only or
|
|
|
|
user_profile.default_events_register_stream and
|
|
|
|
user_profile.default_events_register_stream.invite_only)
|
2014-02-26 00:12:14 +01:00
|
|
|
if is_private_bot:
|
2017-03-21 07:32:59 +01:00
|
|
|
return {user_profile.bot_owner_id, }
|
2014-02-26 00:12:14 +01:00
|
|
|
else:
|
2019-06-20 23:26:54 +02:00
|
|
|
users = {user.id for user in user_profile.realm.get_human_admin_users()}
|
2017-03-21 07:32:59 +01:00
|
|
|
users.add(user_profile.bot_owner_id)
|
|
|
|
return users
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def realm_user_count(realm: Realm) -> int:
|
2015-12-26 02:13:42 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def send_signup_message(sender: UserProfile, admin_realm_signup_notifications_stream: str,
|
2017-12-29 12:51:56 +01:00
|
|
|
user_profile: UserProfile, internal: bool=False,
|
|
|
|
realm: Optional[Realm]=None) -> None:
|
2015-10-13 21:30:22 +02:00
|
|
|
if internal:
|
2018-12-17 07:22:52 +01:00
|
|
|
# TODO: This should be whether this is done using manage.py
|
|
|
|
# vs. the web interface. But recent refactorings mean that
|
|
|
|
# the internal flag isn't passed properly to this function.
|
2015-10-13 21:30:22 +02:00
|
|
|
internal_blurb = " **INTERNAL SIGNUP** "
|
|
|
|
else:
|
|
|
|
internal_blurb = " "
|
|
|
|
|
|
|
|
user_count = realm_user_count(user_profile.realm)
|
2017-10-04 02:01:22 +02:00
|
|
|
signup_notifications_stream = user_profile.realm.get_signup_notifications_stream()
|
|
|
|
# Send notification to realm signup notifications stream if it exists
|
2015-10-13 21:30:22 +02:00
|
|
|
# Don't send notification for the first user in a realm
|
2017-10-04 02:01:22 +02:00
|
|
|
if signup_notifications_stream is not None and user_count > 1:
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
2017-10-04 02:01:22 +02:00
|
|
|
signup_notifications_stream.name,
|
|
|
|
"signups",
|
2019-02-20 10:15:33 +01:00
|
|
|
"@_**%s|%s** just signed up for Zulip. (total: %i)" % (
|
2019-01-08 18:24:22 +01:00
|
|
|
user_profile.full_name, user_profile.id, user_count
|
2017-10-04 02:01:22 +02:00
|
|
|
)
|
2017-01-22 05:23:36 +01:00
|
|
|
)
|
2016-12-11 14:30:45 +01:00
|
|
|
|
2017-01-22 05:23:36 +01:00
|
|
|
# We also send a notification to the Zulip administrative realm
|
2017-07-19 18:14:55 +02:00
|
|
|
admin_realm = get_system_bot(sender).realm
|
2017-03-24 05:49:23 +01:00
|
|
|
try:
|
|
|
|
# Check whether the stream exists
|
2017-10-04 02:01:22 +02:00
|
|
|
get_stream(admin_realm_signup_notifications_stream, admin_realm)
|
2017-03-24 05:49:23 +01:00
|
|
|
except Stream.DoesNotExist:
|
|
|
|
# If the signups stream hasn't been created in the admin
|
|
|
|
# realm, don't auto-create it to send to it; just do nothing.
|
|
|
|
return
|
2016-12-11 14:30:45 +01:00
|
|
|
internal_send_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
admin_realm,
|
2016-12-11 14:30:45 +01:00
|
|
|
sender,
|
|
|
|
"stream",
|
2017-10-04 02:01:22 +02:00
|
|
|
admin_realm_signup_notifications_stream,
|
2017-10-19 08:51:29 +02:00
|
|
|
user_profile.realm.display_subdomain,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
internal_blurb,
|
|
|
|
user_count,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def notify_invites_changed(user_profile: UserProfile) -> None:
|
|
|
|
event = dict(type="invites_changed")
|
2019-06-20 23:36:15 +02:00
|
|
|
admin_ids = [user.id for user in
|
|
|
|
user_profile.realm.get_admin_users_and_bots()]
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, admin_ids)
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_new_user(user_profile: UserProfile, internal: bool=False) -> None:
|
2019-07-18 01:28:54 +02:00
|
|
|
send_signup_message(settings.NOTIFICATION_BOT, "signups", user_profile, internal)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None:
|
2017-05-13 21:14:25 +02:00
|
|
|
"""Give you the last 1000 messages on your public streams, so you have
|
2016-11-11 05:19:21 +01:00
|
|
|
something to look at in your home view once you finish the
|
|
|
|
tutorial."""
|
2017-04-15 04:03:56 +02:00
|
|
|
one_week_ago = timezone_now() - datetime.timedelta(weeks=1)
|
2017-10-29 19:01:08 +01:00
|
|
|
|
|
|
|
stream_ids = [stream.id for stream in streams if not stream.invite_only]
|
|
|
|
recipients = get_stream_recipients(stream_ids)
|
2016-11-11 05:33:30 +01:00
|
|
|
recent_messages = Message.objects.filter(recipient_id__in=recipients,
|
|
|
|
pub_date__gt=one_week_ago).order_by("-id")
|
2017-05-13 21:14:25 +02:00
|
|
|
message_ids_to_use = list(reversed(recent_messages.values_list('id', flat=True)[0:1000]))
|
2016-11-11 05:33:30 +01:00
|
|
|
if len(message_ids_to_use) == 0:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Handle the race condition where a message arrives between
|
|
|
|
# bulk_add_subscriptions above and the Message query just above
|
|
|
|
already_ids = set(UserMessage.objects.filter(message_id__in=message_ids_to_use,
|
2017-11-08 03:56:01 +01:00
|
|
|
user_profile=user_profile).values_list("message_id",
|
|
|
|
flat=True))
|
2016-11-11 05:33:30 +01:00
|
|
|
ums_to_create = [UserMessage(user_profile=user_profile, message_id=message_id,
|
|
|
|
flags=UserMessage.flags.read)
|
|
|
|
for message_id in message_ids_to_use
|
|
|
|
if message_id not in already_ids]
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2016-11-11 05:33:30 +01:00
|
|
|
UserMessage.objects.bulk_create(ums_to_create)
|
2016-11-11 05:19:21 +01:00
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
# Does the processing for a new user account:
|
|
|
|
# * Subscribes to default/invitation streams
|
|
|
|
# * Fills in some recent historical messages
|
|
|
|
# * Notifies other users in realm and Zulip about the signup
|
|
|
|
# * Deactivates PreregistrationUser objects
|
|
|
|
# * subscribe the user to newsletter if newsletter_data is specified
|
2017-12-29 12:51:56 +01:00
|
|
|
def process_new_human_user(user_profile: UserProfile,
|
|
|
|
prereg_user: Optional[PreregistrationUser]=None,
|
|
|
|
newsletter_data: Optional[Dict[str, str]]=None,
|
2018-06-29 09:31:00 +02:00
|
|
|
default_stream_groups: List[DefaultStreamGroup]=[],
|
|
|
|
realm_creation: bool=False) -> None:
|
2016-07-27 01:45:29 +02:00
|
|
|
mit_beta_user = user_profile.realm.is_zephyr_mirror_realm
|
2017-08-25 09:33:12 +02:00
|
|
|
if prereg_user is not None:
|
|
|
|
streams = prereg_user.streams.all()
|
|
|
|
acting_user = prereg_user.referred_by # type: Optional[UserProfile]
|
|
|
|
else:
|
2015-10-13 21:30:22 +02:00
|
|
|
streams = []
|
2017-08-25 09:33:12 +02:00
|
|
|
acting_user = None
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
# If the user's invitation didn't explicitly list some streams, we
|
|
|
|
# add the default streams
|
|
|
|
if len(streams) == 0:
|
|
|
|
streams = get_default_subs(user_profile)
|
2017-10-12 19:36:14 +02:00
|
|
|
|
2017-10-26 20:31:43 +02:00
|
|
|
for default_stream_group in default_stream_groups:
|
2017-10-12 19:36:14 +02:00
|
|
|
default_stream_group_streams = default_stream_group.streams.all()
|
|
|
|
for stream in default_stream_group_streams:
|
|
|
|
if stream not in streams:
|
2017-11-16 00:26:54 +01:00
|
|
|
streams.append(stream)
|
2017-07-17 00:40:15 +02:00
|
|
|
|
|
|
|
bulk_add_subscriptions(streams, [user_profile], acting_user=acting_user)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2016-11-11 05:19:21 +01:00
|
|
|
add_new_user_history(user_profile, streams)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
# mit_beta_users don't have a referred_by field
|
2019-07-18 01:28:54 +02:00
|
|
|
if not mit_beta_user and prereg_user is not None and prereg_user.referred_by is not None:
|
2015-10-13 21:30:22 +02:00
|
|
|
# This is a cross-realm private message.
|
2017-09-25 21:51:02 +02:00
|
|
|
internal_send_private_message(
|
2017-01-22 05:23:36 +01:00
|
|
|
user_profile.realm,
|
2017-09-25 21:51:02 +02:00
|
|
|
get_system_bot(settings.NOTIFICATION_BOT),
|
|
|
|
prereg_user.referred_by,
|
2016-12-11 14:30:45 +01:00
|
|
|
"%s <`%s`> accepted your invitation to join Zulip!" % (
|
|
|
|
user_profile.full_name,
|
|
|
|
user_profile.email,
|
|
|
|
)
|
|
|
|
)
|
2015-10-13 21:30:22 +02:00
|
|
|
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as
|
|
|
|
# inactive so we can keep track of the PreregistrationUser we
|
|
|
|
# actually used for analytics
|
|
|
|
if prereg_user is not None:
|
2018-12-07 00:05:57 +01:00
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.delivery_email).exclude(
|
2015-10-13 21:30:22 +02:00
|
|
|
id=prereg_user.id).update(status=0)
|
2017-12-14 22:22:17 +01:00
|
|
|
if prereg_user.referred_by is not None:
|
|
|
|
notify_invites_changed(user_profile)
|
2015-10-13 21:30:22 +02:00
|
|
|
else:
|
2018-12-07 00:05:57 +01:00
|
|
|
PreregistrationUser.objects.filter(email__iexact=user_profile.delivery_email).update(status=0)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
|
|
|
notify_new_user(user_profile)
|
2019-07-18 01:17:13 +02:00
|
|
|
# Clear any scheduled invitation emails to prevent them
|
|
|
|
# from being sent after the user is created.
|
|
|
|
clear_scheduled_invitation_emails(user_profile.delivery_email)
|
2018-02-18 09:34:54 +01:00
|
|
|
if user_profile.realm.send_welcome_emails:
|
2018-06-29 09:31:00 +02:00
|
|
|
enqueue_welcome_emails(user_profile, realm_creation)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-10-25 21:48:37 +02:00
|
|
|
# We have an import loop here; it's intentional, because we want
|
|
|
|
# to keep all the onboarding code in zerver/lib/onboarding.py.
|
|
|
|
from zerver.lib.onboarding import send_initial_pms
|
|
|
|
send_initial_pms(user_profile)
|
|
|
|
|
2015-10-13 21:30:22 +02:00
|
|
|
if newsletter_data is not None:
|
|
|
|
# If the user was created automatically via the API, we may
|
|
|
|
# not want to register them for the newsletter
|
|
|
|
queue_json_publish(
|
|
|
|
"signups",
|
|
|
|
{
|
2016-12-28 22:24:56 +01:00
|
|
|
'email_address': user_profile.email,
|
2017-07-11 05:48:09 +02:00
|
|
|
'user_id': user_profile.id,
|
2016-12-28 22:24:56 +01:00
|
|
|
'merge_fields': {
|
2015-10-13 21:30:22 +02:00
|
|
|
'NAME': user_profile.full_name,
|
2017-01-03 21:04:55 +01:00
|
|
|
'REALM_ID': user_profile.realm_id,
|
2015-10-13 21:30:22 +02:00
|
|
|
'OPTIN_IP': newsletter_data["IP"],
|
2017-04-15 04:03:56 +02:00
|
|
|
'OPTIN_TIME': datetime.datetime.isoformat(timezone_now().replace(microsecond=0)),
|
2015-10-13 21:30:22 +02:00
|
|
|
},
|
|
|
|
},
|
2016-12-02 08:15:16 +01:00
|
|
|
lambda event: None)
|
2015-10-13 21:30:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_created_user(user_profile: UserProfile) -> None:
|
2019-05-21 18:47:30 +02:00
|
|
|
person = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
is_admin=user_profile.is_realm_admin,
|
|
|
|
full_name=user_profile.full_name,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
timezone=user_profile.timezone,
|
|
|
|
date_joined=user_profile.date_joined.isoformat(),
|
|
|
|
is_guest=user_profile.is_guest,
|
|
|
|
is_bot=user_profile.is_bot) # type: Dict[str, Any]
|
|
|
|
if user_profile.is_bot and user_profile.bot_owner_id is not None:
|
|
|
|
person["bot_owner_id"] = user_profile.bot_owner_id
|
|
|
|
event = dict(type="realm_user", op="add", person=person) # type: Dict[str, Any]
|
2018-07-31 19:53:56 +02:00
|
|
|
if not user_profile.is_bot:
|
|
|
|
event["person"]["profile_data"] = {}
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2013-07-22 21:26:44 +02:00
|
|
|
|
2018-03-06 22:32:03 +01:00
|
|
|
def created_bot_event(user_profile: UserProfile) -> Dict[str, Any]:
|
2018-05-11 02:01:29 +02:00
|
|
|
def stream_name(stream: Optional[Stream]) -> Optional[str]:
|
2014-02-26 00:12:14 +01:00
|
|
|
if not stream:
|
|
|
|
return None
|
|
|
|
return stream.name
|
|
|
|
|
|
|
|
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
|
|
|
|
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
|
|
|
|
|
2017-02-15 21:06:07 +01:00
|
|
|
bot = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
2017-06-12 19:50:03 +02:00
|
|
|
bot_type=user_profile.bot_type,
|
2017-02-15 21:06:07 +01:00
|
|
|
is_active=user_profile.is_active,
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key=get_api_key(user_profile),
|
2017-02-15 21:06:07 +01:00
|
|
|
default_sending_stream=default_sending_stream_name,
|
|
|
|
default_events_register_stream=default_events_register_stream_name,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
2018-04-02 19:26:16 +02:00
|
|
|
services = get_service_dicts_for_bot(user_profile.id),
|
2017-02-15 21:06:07 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Set the owner key only when the bot has an owner.
|
|
|
|
# The default bots don't have an owner. So don't
|
|
|
|
# set the owner key while reactivating them.
|
|
|
|
if user_profile.bot_owner is not None:
|
|
|
|
bot['owner'] = user_profile.bot_owner.email
|
|
|
|
|
2018-03-06 22:32:03 +01:00
|
|
|
return dict(type="realm_bot", op="add", bot=bot)
|
|
|
|
|
|
|
|
def notify_created_bot(user_profile: UserProfile) -> None:
|
|
|
|
event = created_bot_event(user_profile)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2019-03-19 21:38:43 +01:00
|
|
|
def create_users(realm: Realm, name_list: Iterable[Tuple[str, str]], bot_type: Optional[int]=None) -> None:
|
2018-01-12 09:10:43 +01:00
|
|
|
user_set = set()
|
|
|
|
for full_name, email in name_list:
|
|
|
|
short_name = email_to_username(email)
|
|
|
|
user_set.add((email, full_name, short_name, True))
|
|
|
|
bulk_create_users(realm, user_set, bot_type)
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_create_user(email: str, password: Optional[str], realm: Realm, full_name: str,
|
2018-12-30 10:59:33 +01:00
|
|
|
short_name: str, bot_type: Optional[int]=None,
|
|
|
|
is_realm_admin: bool=False, is_guest: bool=False,
|
2018-05-11 02:01:29 +02:00
|
|
|
bot_owner: Optional[UserProfile]=None, tos_version: Optional[str]=None,
|
|
|
|
timezone: str="", avatar_source: str=UserProfile.AVATAR_FROM_GRAVATAR,
|
2017-12-29 12:51:56 +01:00
|
|
|
default_sending_stream: Optional[Stream]=None,
|
|
|
|
default_events_register_stream: Optional[Stream]=None,
|
2018-06-01 17:47:50 +02:00
|
|
|
default_all_public_streams: Optional[bool]=None,
|
2017-12-29 12:51:56 +01:00
|
|
|
prereg_user: Optional[PreregistrationUser]=None,
|
|
|
|
newsletter_data: Optional[Dict[str, str]]=None,
|
2018-05-22 18:13:51 +02:00
|
|
|
default_stream_groups: List[DefaultStreamGroup]=[],
|
2018-06-29 09:31:00 +02:00
|
|
|
source_profile: Optional[UserProfile]=None,
|
|
|
|
realm_creation: bool=False) -> UserProfile:
|
2017-10-26 20:31:43 +02:00
|
|
|
|
2017-02-15 04:35:10 +01:00
|
|
|
user_profile = create_user(email=email, password=password, realm=realm,
|
|
|
|
full_name=full_name, short_name=short_name,
|
2018-12-30 10:59:33 +01:00
|
|
|
is_realm_admin=is_realm_admin, is_guest=is_guest,
|
2017-08-18 07:12:22 +02:00
|
|
|
bot_type=bot_type, bot_owner=bot_owner,
|
2017-05-04 15:19:06 +02:00
|
|
|
tos_version=tos_version, timezone=timezone, avatar_source=avatar_source,
|
2017-02-15 04:35:10 +01:00
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
2018-05-22 18:13:51 +02:00
|
|
|
default_all_public_streams=default_all_public_streams,
|
|
|
|
source_profile=source_profile)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
event_time = user_profile.date_joined
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2018-12-24 09:59:52 +01:00
|
|
|
event_type=RealmAuditLog.USER_CREATED, event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2019-01-26 02:36:37 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
update_license_ledger_if_needed(user_profile.realm, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2013-07-22 21:26:44 +02:00
|
|
|
notify_created_user(user_profile)
|
2016-05-18 20:23:03 +02:00
|
|
|
if bot_type:
|
2014-02-26 00:12:14 +01:00
|
|
|
notify_created_bot(user_profile)
|
2015-10-13 21:33:54 +02:00
|
|
|
else:
|
|
|
|
process_new_human_user(user_profile, prereg_user=prereg_user,
|
2017-10-12 19:36:14 +02:00
|
|
|
newsletter_data=newsletter_data,
|
2018-06-29 09:31:00 +02:00
|
|
|
default_stream_groups=default_stream_groups,
|
|
|
|
realm_creation=realm_creation)
|
2013-03-29 15:35:37 +01:00
|
|
|
return user_profile
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_activate_user(user_profile: UserProfile) -> None:
|
2017-10-24 18:05:55 +02:00
|
|
|
user_profile.is_active = True
|
|
|
|
user_profile.is_mirror_dummy = False
|
|
|
|
user_profile.set_unusable_password()
|
|
|
|
user_profile.date_joined = timezone_now()
|
|
|
|
user_profile.tos_version = settings.TOS_VERSION
|
|
|
|
user_profile.save(update_fields=["is_active", "date_joined", "password",
|
|
|
|
"is_mirror_dummy", "tos_version"])
|
|
|
|
|
|
|
|
event_time = user_profile.date_joined
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2018-12-24 09:59:52 +01:00
|
|
|
event_type=RealmAuditLog.USER_ACTIVATED, event_time=event_time)
|
2017-10-24 18:05:55 +02:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2019-01-26 02:36:37 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
update_license_ledger_if_needed(user_profile.realm, event_time)
|
2017-10-24 18:05:55 +02:00
|
|
|
|
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_reactivate_user(user_profile: UserProfile, acting_user: Optional[UserProfile]=None) -> None:
|
2017-10-24 18:05:55 +02:00
|
|
|
# Unlike do_activate_user, this is meant for re-activating existing users,
|
|
|
|
# so it doesn't reset their password, etc.
|
|
|
|
user_profile.is_active = True
|
|
|
|
user_profile.save(update_fields=["is_active"])
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2018-07-09 20:10:31 +02:00
|
|
|
event_type=RealmAuditLog.USER_REACTIVATED, event_time=event_time,
|
2018-12-24 09:59:52 +01:00
|
|
|
acting_user=acting_user)
|
2017-10-24 18:05:55 +02:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time)
|
2019-01-26 02:36:37 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
update_license_ledger_if_needed(user_profile.realm, event_time)
|
2017-10-24 18:05:55 +02:00
|
|
|
|
|
|
|
notify_created_user(user_profile)
|
|
|
|
|
|
|
|
if user_profile.is_bot:
|
|
|
|
notify_created_bot(user_profile)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def active_humans_in_realm(realm: Realm) -> Sequence[UserProfile]:
|
2014-01-07 18:04:26 +01:00
|
|
|
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
|
|
|
|
|
2014-01-28 15:11:10 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_property(realm: Realm, name: str, value: Any) -> None:
|
2017-04-09 00:35:41 +02:00
|
|
|
"""Takes in a realm object, the name of an attribute to update, and the
|
|
|
|
value to update.
|
2017-03-21 18:08:40 +01:00
|
|
|
"""
|
2017-03-24 01:44:29 +01:00
|
|
|
property_type = Realm.property_types[name]
|
2017-04-09 00:35:41 +02:00
|
|
|
assert isinstance(value, property_type), (
|
|
|
|
'Cannot update %s: %s is not an instance of %s' % (
|
|
|
|
name, value, property_type,))
|
|
|
|
|
2017-03-21 18:08:40 +01:00
|
|
|
setattr(realm, name, value)
|
|
|
|
realm.save(update_fields=[name])
|
2019-03-01 00:12:40 +01:00
|
|
|
|
|
|
|
if name == 'zoom_api_secret':
|
|
|
|
# Send '' as the value through the API for the API secret
|
|
|
|
value = ''
|
2017-03-18 20:19:44 +01:00
|
|
|
event = dict(
|
|
|
|
type='realm',
|
|
|
|
op='update',
|
2017-03-21 18:08:40 +01:00
|
|
|
property=name,
|
|
|
|
value=value,
|
2017-03-18 20:19:44 +01:00
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-03-18 20:19:44 +01:00
|
|
|
|
2018-12-06 23:17:46 +01:00
|
|
|
if name == "email_address_visibility":
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=realm, is_bot=False):
|
|
|
|
# TODO: This does linear queries in the number of users
|
|
|
|
# and thus is potentially very slow. Probably not super
|
|
|
|
# important since this is a feature few folks will toggle,
|
|
|
|
# but as a policy matter, we don't do linear queries
|
|
|
|
# ~anywhere in Zulip.
|
|
|
|
old_email = user_profile.email
|
|
|
|
user_profile.email = get_display_email_address(user_profile, realm)
|
|
|
|
user_profile.save(update_fields=["email"])
|
|
|
|
|
|
|
|
# TODO: Design a bulk event for this or force-reload all clients
|
|
|
|
if user_profile.email != old_email:
|
|
|
|
send_user_email_update_event(user_profile)
|
2017-03-13 14:42:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_authentication_methods(realm: Realm,
|
|
|
|
authentication_methods: Dict[str, bool]) -> None:
|
2016-11-02 21:51:56 +01:00
|
|
|
for key, value in list(authentication_methods.items()):
|
|
|
|
index = getattr(realm.authentication_methods, key).number
|
|
|
|
realm.authentication_methods.set_bit(index, int(value))
|
|
|
|
realm.save(update_fields=['authentication_methods'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property='default',
|
|
|
|
data=dict(authentication_methods=realm.authentication_methods_dict())
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_message_editing(realm: Realm,
|
|
|
|
allow_message_editing: bool,
|
2017-12-03 00:50:48 +01:00
|
|
|
message_content_edit_limit_seconds: int,
|
|
|
|
allow_community_topic_editing: bool) -> None:
|
2016-06-21 21:34:41 +02:00
|
|
|
realm.allow_message_editing = allow_message_editing
|
2016-07-08 02:25:55 +02:00
|
|
|
realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds
|
2017-12-03 00:50:48 +01:00
|
|
|
realm.allow_community_topic_editing = allow_community_topic_editing
|
|
|
|
realm.save(update_fields=['allow_message_editing',
|
|
|
|
'allow_community_topic_editing',
|
|
|
|
'message_content_edit_limit_seconds',
|
|
|
|
]
|
|
|
|
)
|
2016-06-21 21:34:41 +02:00
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
2016-07-08 02:25:55 +02:00
|
|
|
data=dict(allow_message_editing=allow_message_editing,
|
2017-12-03 00:50:48 +01:00
|
|
|
message_content_edit_limit_seconds=message_content_edit_limit_seconds,
|
|
|
|
allow_community_topic_editing=allow_community_topic_editing),
|
2016-06-21 21:34:41 +02:00
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2016-06-21 21:34:41 +02:00
|
|
|
|
2017-11-26 09:12:10 +01:00
|
|
|
def do_set_realm_message_deleting(realm: Realm,
|
|
|
|
message_content_delete_limit_seconds: int) -> None:
|
|
|
|
realm.message_content_delete_limit_seconds = message_content_delete_limit_seconds
|
|
|
|
realm.save(update_fields=['message_content_delete_limit_seconds'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update_dict",
|
|
|
|
property="default",
|
|
|
|
data=dict(message_content_delete_limit_seconds=message_content_delete_limit_seconds),
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-11-26 09:12:10 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_notifications_stream(realm: Realm, stream: Stream, stream_id: int) -> None:
|
2017-06-09 20:50:38 +02:00
|
|
|
realm.notifications_stream = stream
|
|
|
|
realm.save(update_fields=['notifications_stream'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property="notifications_stream_id",
|
|
|
|
value=stream_id
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-06-09 20:50:38 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_realm_signup_notifications_stream(realm: Realm, stream: Stream,
|
|
|
|
stream_id: int) -> None:
|
2017-10-20 16:55:04 +02:00
|
|
|
realm.signup_notifications_stream = stream
|
|
|
|
realm.save(update_fields=['signup_notifications_stream'])
|
|
|
|
event = dict(
|
|
|
|
type="realm",
|
|
|
|
op="update",
|
|
|
|
property="signup_notifications_stream_id",
|
|
|
|
value=stream_id
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-10-20 16:55:04 +02:00
|
|
|
|
2019-05-08 05:59:04 +02:00
|
|
|
def do_deactivate_realm(realm: Realm, acting_user: Optional[UserProfile]=None) -> None:
|
2014-01-07 18:04:26 +01:00
|
|
|
"""
|
|
|
|
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
|
|
|
|
tell the difference between users that were intentionally deactivated,
|
|
|
|
e.g. by a realm admin, and users who can't currently use Zulip because their
|
|
|
|
realm has been deactivated.
|
|
|
|
"""
|
|
|
|
if realm.deactivated:
|
|
|
|
return
|
|
|
|
|
|
|
|
realm.deactivated = True
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
2017-12-05 05:57:37 +01:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
2019-05-08 05:59:04 +02:00
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, event_time=event_time,
|
|
|
|
acting_user=acting_user)
|
2017-12-05 05:57:37 +01:00
|
|
|
|
2017-12-05 03:27:53 +01:00
|
|
|
ScheduledEmail.objects.filter(realm=realm).delete()
|
2014-01-07 18:04:26 +01:00
|
|
|
for user in active_humans_in_realm(realm):
|
|
|
|
# Don't deactivate the users, but do delete their sessions so they get
|
|
|
|
# bumped to the login screen, where they'll get a realm deactivation
|
|
|
|
# notice when they try to log in.
|
|
|
|
delete_user_sessions(user)
|
2014-01-28 17:29:00 +01:00
|
|
|
|
2018-01-30 14:58:50 +01:00
|
|
|
event = dict(type="realm", op="deactivated",
|
|
|
|
realm_id=realm.id)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2018-01-30 14:58:50 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_reactivate_realm(realm: Realm) -> None:
|
2016-04-21 17:58:22 +02:00
|
|
|
realm.deactivated = False
|
|
|
|
realm.save(update_fields=["deactivated"])
|
|
|
|
|
2017-12-05 05:57:37 +01:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(
|
2018-07-10 06:45:41 +02:00
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED, event_time=event_time)
|
2017-12-05 05:57:37 +01:00
|
|
|
|
2018-11-15 23:29:04 +01:00
|
|
|
def do_change_realm_subdomain(realm: Realm, new_subdomain: str) -> None:
|
|
|
|
realm.string_id = new_subdomain
|
|
|
|
realm.save(update_fields=["string_id"])
|
|
|
|
|
2018-09-14 13:14:40 +02:00
|
|
|
def do_scrub_realm(realm: Realm) -> None:
|
|
|
|
users = UserProfile.objects.filter(realm=realm)
|
|
|
|
for user in users:
|
2019-01-18 09:58:40 +01:00
|
|
|
do_delete_messages_by_sender(user)
|
2018-09-14 13:14:40 +02:00
|
|
|
do_delete_avatar_image(user)
|
|
|
|
user.full_name = "Scrubbed {}".format(generate_key()[:15])
|
2018-10-15 09:10:37 +02:00
|
|
|
scrubbed_email = "scrubbed-{}@{}".format(generate_key()[:15], realm.host)
|
|
|
|
user.email = scrubbed_email
|
|
|
|
user.delivery_email = scrubbed_email
|
|
|
|
user.save(update_fields=["full_name", "email", "delivery_email"])
|
2018-09-14 13:14:40 +02:00
|
|
|
|
|
|
|
do_remove_realm_custom_profile_fields(realm)
|
|
|
|
Attachment.objects.filter(realm=realm).delete()
|
|
|
|
|
|
|
|
RealmAuditLog.objects.create(realm=realm, event_time=timezone_now(),
|
|
|
|
event_type=RealmAuditLog.REALM_SCRUBBED)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_deactivate_user(user_profile: UserProfile,
|
|
|
|
acting_user: Optional[UserProfile]=None,
|
|
|
|
_cascade: bool=True) -> None:
|
2013-06-28 23:37:15 +02:00
|
|
|
if not user_profile.is_active:
|
|
|
|
return
|
|
|
|
|
2019-03-08 01:02:14 +01:00
|
|
|
if user_profile.realm.is_zephyr_mirror_realm: # nocoverage
|
|
|
|
# For zephyr mirror users, we need to make them a mirror dummy
|
|
|
|
# again; otherwise, other users won't get the correct behavior
|
|
|
|
# when trying to send messages to this person inside Zulip.
|
|
|
|
#
|
|
|
|
# Ideally, we need to also ensure their zephyr mirroring bot
|
|
|
|
# isn't running, but that's a separate issue.
|
|
|
|
user_profile.is_mirror_dummy = True
|
2016-11-09 13:44:29 +01:00
|
|
|
user_profile.is_active = False
|
2013-07-06 06:18:53 +02:00
|
|
|
user_profile.save(update_fields=["is_active"])
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2013-04-02 17:54:57 +02:00
|
|
|
delete_user_sessions(user_profile)
|
2019-01-04 01:50:21 +01:00
|
|
|
clear_scheduled_emails([user_profile.id])
|
2013-03-05 19:09:05 +01:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-02-15 04:35:10 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2017-08-17 01:20:23 +02:00
|
|
|
acting_user=acting_user,
|
2018-12-24 09:59:52 +01:00
|
|
|
event_type=RealmAuditLog.USER_DEACTIVATED, event_time=event_time)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
|
|
|
|
user_profile.is_bot, event_time, increment=-1)
|
2019-01-26 02:36:37 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
update_license_ledger_if_needed(user_profile.realm, event_time)
|
2017-02-15 04:35:10 +01:00
|
|
|
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_user", op="remove",
|
|
|
|
person=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-01-24 23:24:44 +01:00
|
|
|
full_name=user_profile.full_name))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2014-02-26 22:27:51 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
event = dict(type="realm_bot", op="remove",
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 04:52:10 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 22:27:51 +01:00
|
|
|
full_name=user_profile.full_name))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, bot_owner_user_ids(user_profile))
|
2014-02-26 22:27:51 +01:00
|
|
|
|
2013-06-28 23:37:15 +02:00
|
|
|
if _cascade:
|
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
|
|
|
|
bot_owner=user_profile)
|
|
|
|
for profile in bot_profiles:
|
2017-08-17 01:20:23 +02:00
|
|
|
do_deactivate_user(profile, acting_user=acting_user, _cascade=False)
|
2013-03-29 15:35:37 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_deactivate_stream(stream: Stream, log: bool=True) -> None:
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
# Get the affected user ids *before* we deactivate everybody.
|
|
|
|
affected_user_ids = can_access_stream_user_ids(stream)
|
2017-02-15 17:08:57 +01:00
|
|
|
|
2017-10-29 15:40:07 +01:00
|
|
|
get_active_subscriptions_for_stream_id(stream.id).update(active=False)
|
2014-01-24 19:08:39 +01:00
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
was_invite_only = stream.invite_only
|
2014-01-24 19:08:39 +01:00
|
|
|
stream.deactivated = True
|
|
|
|
stream.invite_only = True
|
|
|
|
# Preserve as much as possible the original stream name while giving it a
|
|
|
|
# special prefix that both indicates that the stream is deactivated and
|
|
|
|
# frees up the original name for reuse.
|
|
|
|
old_name = stream.name
|
|
|
|
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
|
|
|
|
for i in range(20):
|
2017-09-17 22:07:00 +02:00
|
|
|
if stream_name_in_use(new_name, stream.realm_id):
|
2014-01-24 19:08:39 +01:00
|
|
|
# This stream has alrady been deactivated, keep prepending !s until
|
|
|
|
# we have a unique stream name or you've hit a rename limit.
|
|
|
|
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
|
2017-09-17 22:07:00 +02:00
|
|
|
else:
|
2014-01-24 19:08:39 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
# If you don't have a unique name at this point, this will fail later in the
|
|
|
|
# code path.
|
|
|
|
|
|
|
|
stream.name = new_name[:Stream.MAX_NAME_LENGTH]
|
2017-09-17 23:32:14 +02:00
|
|
|
stream.save(update_fields=['name', 'deactivated', 'invite_only'])
|
2014-01-24 19:08:39 +01:00
|
|
|
|
2017-06-04 19:34:48 +02:00
|
|
|
# If this is a default stream, remove it, properly sending a
|
|
|
|
# notification to browser clients.
|
2017-09-17 00:34:13 +02:00
|
|
|
if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists():
|
2017-06-04 19:34:48 +02:00
|
|
|
do_remove_default_stream(stream)
|
2017-02-18 18:01:00 +01:00
|
|
|
|
2016-03-31 03:39:51 +02:00
|
|
|
# Remove the old stream information from remote cache.
|
2017-09-17 22:26:43 +02:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
|
2014-01-24 19:08:39 +01:00
|
|
|
cache_delete(old_cache_key)
|
|
|
|
|
2017-02-15 17:38:44 +01:00
|
|
|
stream_dict = stream.to_dict()
|
|
|
|
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
|
|
|
|
event = dict(type="stream", op="delete",
|
|
|
|
streams=[stream_dict])
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(stream.realm, event, affected_user_ids)
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2018-12-07 01:25:57 +01:00
|
|
|
def send_user_email_update_event(user_profile: UserProfile) -> None:
|
|
|
|
payload = dict(user_id=user_profile.id,
|
|
|
|
new_email=user_profile.email)
|
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_user', op='update', person=payload),
|
|
|
|
active_user_ids(user_profile.realm_id))
|
|
|
|
|
2018-08-02 08:47:13 +02:00
|
|
|
def do_change_user_delivery_email(user_profile: UserProfile, new_email: str) -> None:
|
2017-08-05 19:42:59 +02:00
|
|
|
delete_user_profile_caches([user_profile])
|
|
|
|
|
2018-12-06 23:17:46 +01:00
|
|
|
user_profile.delivery_email = new_email
|
2018-08-02 08:47:13 +02:00
|
|
|
if user_profile.realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
user_profile.email = new_email
|
2018-12-06 23:17:46 +01:00
|
|
|
user_profile.save(update_fields=["email", "delivery_email"])
|
|
|
|
else:
|
|
|
|
user_profile.save(update_fields=["delivery_email"])
|
2013-03-08 19:53:00 +01:00
|
|
|
|
2018-08-02 08:47:13 +02:00
|
|
|
# We notify just the target user (and eventually org admins) about
|
|
|
|
# their new delivery email, since that field is private.
|
|
|
|
payload = dict(user_id=user_profile.id,
|
|
|
|
delivery_email=new_email)
|
|
|
|
event = dict(type='realm_user', op='update', person=payload)
|
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
|
|
|
|
|
|
|
if user_profile.realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
# Additionally, if we're also changing the publicly visible
|
|
|
|
# email, we send a new_email event as well.
|
|
|
|
send_user_email_update_event(user_profile)
|
2018-12-07 01:25:57 +01:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-14 06:06:56 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
2018-07-10 06:19:50 +02:00
|
|
|
modified_user=user_profile, event_type=RealmAuditLog.USER_EMAIL_CHANGED,
|
2017-03-14 06:06:56 +01:00
|
|
|
event_time=event_time)
|
2013-02-10 22:45:25 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_start_email_change_process(user_profile: UserProfile, new_email: str) -> None:
|
2018-08-02 08:47:13 +02:00
|
|
|
old_email = user_profile.delivery_email
|
2017-06-11 00:39:58 +02:00
|
|
|
obj = EmailChangeStatus.objects.create(new_email=new_email, old_email=old_email,
|
|
|
|
user_profile=user_profile, realm=user_profile.realm)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2017-07-08 04:38:13 +02:00
|
|
|
activation_url = create_confirmation_link(obj, user_profile.realm.host, Confirmation.EMAIL_CHANGE)
|
2017-09-28 01:44:15 +02:00
|
|
|
from zerver.context_processors import common_context
|
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
|
|
|
'old_email': old_email,
|
|
|
|
'new_email': new_email,
|
|
|
|
'activate_url': activation_url
|
|
|
|
})
|
2018-12-03 23:26:51 +01:00
|
|
|
send_email('zerver/emails/confirm_new_email', to_emails=[new_email],
|
2018-06-11 11:55:25 +02:00
|
|
|
from_name='Zulip Account Security', from_address=FromAddress.tokenized_no_reply_address(),
|
2018-12-14 08:41:42 +01:00
|
|
|
language=user_profile.default_language, context=context)
|
2017-01-20 12:27:38 +01:00
|
|
|
|
2018-11-27 20:21:55 +01:00
|
|
|
def compute_irc_user_fullname(email: str) -> str:
|
2013-10-17 17:19:44 +02:00
|
|
|
return email.split("@")[0] + " (IRC)"
|
|
|
|
|
2018-11-27 20:21:55 +01:00
|
|
|
def compute_jabber_user_fullname(email: str) -> str:
|
2013-10-29 16:00:20 +01:00
|
|
|
return email.split("@")[0] + " (XMPP)"
|
|
|
|
|
2013-10-17 17:19:44 +02:00
|
|
|
@cache_with_key(lambda realm, email, f: user_profile_by_email_cache_key(email),
|
2013-03-26 19:09:45 +01:00
|
|
|
timeout=3600*24*7)
|
2018-05-11 02:01:29 +02:00
|
|
|
def create_mirror_user_if_needed(realm: Realm, email: str,
|
|
|
|
email_to_fullname: Callable[[str], str]) -> UserProfile:
|
2013-01-10 22:01:33 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
return get_user_by_delivery_email(email, realm)
|
2013-01-10 22:01:33 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
try:
|
|
|
|
# Forge a user for this person
|
2018-08-21 16:48:33 +02:00
|
|
|
return create_user(
|
|
|
|
email=email,
|
|
|
|
password=None,
|
|
|
|
realm=realm,
|
|
|
|
full_name=email_to_fullname(email),
|
|
|
|
short_name=email_to_username(email),
|
|
|
|
active=False,
|
|
|
|
is_mirror_dummy=True,
|
|
|
|
)
|
2013-01-10 22:01:33 +01:00
|
|
|
except IntegrityError:
|
2018-12-07 00:05:57 +01:00
|
|
|
return get_user_by_delivery_email(email, realm)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_welcome_bot_response(message: MutableMapping[str, Any]) -> None:
|
2017-09-27 19:43:28 +02:00
|
|
|
welcome_bot = get_system_bot(settings.WELCOME_BOT)
|
2017-10-28 21:31:21 +02:00
|
|
|
human_recipient = get_personal_recipient(message['message'].sender.id)
|
2017-09-27 19:43:28 +02:00
|
|
|
if Message.objects.filter(sender=welcome_bot, recipient=human_recipient).count() < 2:
|
|
|
|
internal_send_private_message(
|
|
|
|
message['realm'], welcome_bot, message['message'].sender,
|
|
|
|
"Congratulations on your first reply! :tada:\n\n"
|
|
|
|
"Feel free to continue using this space to practice your new messaging "
|
|
|
|
"skills. Or, try clicking on some of the stream names to your left!")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def render_incoming_message(message: Message,
|
2018-05-11 02:01:29 +02:00
|
|
|
content: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
user_ids: Set[int],
|
|
|
|
realm: Realm,
|
|
|
|
mention_data: Optional[bugdown.MentionData]=None,
|
2018-05-11 02:01:29 +02:00
|
|
|
email_gateway: Optional[bool]=False) -> str:
|
2019-02-11 15:19:38 +01:00
|
|
|
realm_alert_words_automaton = get_alert_word_automaton(realm)
|
2016-09-14 21:58:44 +02:00
|
|
|
try:
|
2016-10-04 18:32:46 +02:00
|
|
|
rendered_content = render_markdown(
|
|
|
|
message=message,
|
2016-09-14 18:02:24 +02:00
|
|
|
content=content,
|
2017-01-18 23:19:18 +01:00
|
|
|
realm=realm,
|
2019-02-11 15:19:38 +01:00
|
|
|
realm_alert_words_automaton = realm_alert_words_automaton,
|
2017-09-09 02:50:57 +02:00
|
|
|
user_ids=user_ids,
|
2017-10-24 02:47:09 +02:00
|
|
|
mention_data=mention_data,
|
2017-11-03 12:13:17 +01:00
|
|
|
email_gateway=email_gateway,
|
2016-09-14 18:02:24 +02:00
|
|
|
)
|
2016-09-14 21:58:44 +02:00
|
|
|
except BugdownRenderingException:
|
|
|
|
raise JsonableError(_('Unable to render message'))
|
|
|
|
return rendered_content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_typing_user_profiles(recipient: Recipient, sender_id: int) -> List[UserProfile]:
|
2017-09-12 19:17:07 +02:00
|
|
|
if recipient.type == Recipient.STREAM:
|
|
|
|
'''
|
|
|
|
We don't support typing indicators for streams because they
|
|
|
|
are expensive and initial user feedback was they were too
|
|
|
|
distracting.
|
|
|
|
'''
|
|
|
|
raise ValueError('Typing indicators not supported for streams')
|
|
|
|
|
2016-10-24 09:56:35 +02:00
|
|
|
if recipient.type == Recipient.PERSONAL:
|
2017-06-10 13:43:22 +02:00
|
|
|
# The sender and recipient may be the same id, so
|
|
|
|
# de-duplicate using a set.
|
|
|
|
user_ids = list({recipient.type_id, sender_id})
|
2017-09-09 15:04:45 +02:00
|
|
|
assert(len(user_ids) in [1, 2])
|
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
elif recipient.type == Recipient.HUDDLE:
|
2017-10-29 17:03:51 +01:00
|
|
|
user_ids = get_huddle_user_ids(recipient)
|
2017-09-09 15:04:45 +02:00
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
else:
|
|
|
|
raise ValueError('Bad recipient type')
|
2017-09-09 15:09:48 +02:00
|
|
|
|
|
|
|
users = [get_user_profile_by_id(user_id) for user_id in user_ids]
|
|
|
|
return users
|
|
|
|
|
2017-09-09 15:26:44 +02:00
|
|
|
RecipientInfoResult = TypedDict('RecipientInfoResult', {
|
|
|
|
'active_user_ids': Set[int],
|
|
|
|
'push_notify_user_ids': Set[int],
|
2017-08-17 16:55:32 +02:00
|
|
|
'stream_push_user_ids': Set[int],
|
2017-11-21 04:35:26 +01:00
|
|
|
'stream_email_user_ids': Set[int],
|
2017-09-09 15:26:44 +02:00
|
|
|
'um_eligible_user_ids': Set[int],
|
|
|
|
'long_term_idle_user_ids': Set[int],
|
2017-10-24 20:08:19 +02:00
|
|
|
'default_bot_user_ids': Set[int],
|
2017-09-09 15:26:44 +02:00
|
|
|
'service_bot_tuples': List[Tuple[int, int]],
|
|
|
|
})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_recipient_info(recipient: Recipient,
|
|
|
|
sender_id: int,
|
|
|
|
stream_topic: Optional[StreamTopicTarget],
|
|
|
|
possibly_mentioned_user_ids: Optional[Set[int]]=None) -> RecipientInfoResult:
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids = set() # type: Set[int]
|
2017-11-21 04:35:26 +01:00
|
|
|
stream_email_user_ids = set() # type: Set[int]
|
2017-08-17 16:55:32 +02:00
|
|
|
|
2017-09-12 19:17:07 +02:00
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
# The sender and recipient may be the same id, so
|
|
|
|
# de-duplicate using a set.
|
2017-10-24 17:54:46 +02:00
|
|
|
message_to_user_ids = list({recipient.type_id, sender_id})
|
|
|
|
assert(len(message_to_user_ids) in [1, 2])
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
elif recipient.type == Recipient.STREAM:
|
2017-12-24 21:56:21 +01:00
|
|
|
# Anybody calling us w/r/t a stream message needs to supply
|
|
|
|
# stream_topic. We may eventually want to have different versions
|
|
|
|
# of this function for different message types.
|
|
|
|
assert(stream_topic is not None)
|
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
subscription_rows = stream_topic.get_active_subscriptions().annotate(
|
|
|
|
user_profile_email_notifications=F('user_profile__enable_stream_email_notifications'),
|
|
|
|
user_profile_push_notifications=F('user_profile__enable_stream_push_notifications'),
|
|
|
|
).values(
|
2017-08-17 16:55:32 +02:00
|
|
|
'user_profile_id',
|
|
|
|
'push_notifications',
|
2017-11-21 04:35:26 +01:00
|
|
|
'email_notifications',
|
2019-02-13 10:22:16 +01:00
|
|
|
'user_profile_email_notifications',
|
|
|
|
'user_profile_push_notifications',
|
2018-08-02 23:46:05 +02:00
|
|
|
'is_muted',
|
2017-08-17 16:55:32 +02:00
|
|
|
).order_by('user_profile_id')
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2017-10-24 17:54:46 +02:00
|
|
|
message_to_user_ids = [
|
2017-08-17 16:55:32 +02:00
|
|
|
row['user_profile_id']
|
|
|
|
for row in subscription_rows
|
|
|
|
]
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
def should_send(setting: str, row: Dict[str, Any]) -> bool:
|
|
|
|
# This implements the structure that the UserProfile stream notification settings
|
|
|
|
# are defaults, which can be overridden by the stream-level settings (if those
|
|
|
|
# values are not null).
|
2018-08-02 23:46:05 +02:00
|
|
|
if row['is_muted']:
|
2019-02-13 10:22:16 +01:00
|
|
|
return False
|
|
|
|
if row[setting] is not None:
|
|
|
|
return row[setting]
|
|
|
|
return row['user_profile_' + setting]
|
|
|
|
|
2017-11-21 04:35:26 +01:00
|
|
|
user_ids_muting_topic = stream_topic.user_ids_muting_topic()
|
|
|
|
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in subscription_rows
|
2017-10-18 07:38:54 +02:00
|
|
|
# Note: muting a stream overrides stream_push_notify
|
2019-02-13 10:22:16 +01:00
|
|
|
if should_send('push_notifications', row)
|
2017-11-21 04:35:26 +01:00
|
|
|
} - user_ids_muting_topic
|
|
|
|
|
|
|
|
stream_email_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in subscription_rows
|
|
|
|
# Note: muting a stream overrides stream_email_notify
|
2019-02-13 10:22:16 +01:00
|
|
|
if should_send('email_notifications', row)
|
2017-11-21 04:35:26 +01:00
|
|
|
} - user_ids_muting_topic
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
elif recipient.type == Recipient.HUDDLE:
|
2017-10-29 17:03:51 +01:00
|
|
|
message_to_user_ids = get_huddle_user_ids(recipient)
|
2017-09-12 19:17:07 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
raise ValueError('Bad recipient type')
|
2017-09-09 15:04:45 +02:00
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
message_to_user_id_set = set(message_to_user_ids)
|
|
|
|
|
|
|
|
user_ids = set(message_to_user_id_set)
|
|
|
|
if possibly_mentioned_user_ids:
|
|
|
|
# Important note: Because we haven't rendered bugdown yet, we
|
|
|
|
# don't yet know which of these possibly-mentioned users was
|
|
|
|
# actually mentioned in the message (in other words, the
|
|
|
|
# mention syntax might have been in a code block or otherwise
|
|
|
|
# escaped). `get_ids_for` will filter these extra user rows
|
|
|
|
# for our data structures not related to bots
|
|
|
|
user_ids |= possibly_mentioned_user_ids
|
2017-10-24 17:54:46 +02:00
|
|
|
|
2017-09-15 22:31:09 +02:00
|
|
|
if user_ids:
|
|
|
|
query = UserProfile.objects.filter(
|
|
|
|
is_active=True,
|
|
|
|
).values(
|
|
|
|
'id',
|
|
|
|
'enable_online_push_notifications',
|
|
|
|
'is_bot',
|
|
|
|
'bot_type',
|
|
|
|
'long_term_idle',
|
|
|
|
)
|
|
|
|
|
|
|
|
# query_for_ids is fast highly optimized for large queries, and we
|
|
|
|
# need this codepath to be fast (it's part of sending messages)
|
|
|
|
query = query_for_ids(
|
|
|
|
query=query,
|
2017-10-24 19:25:50 +02:00
|
|
|
user_ids=sorted(list(user_ids)),
|
2017-09-15 22:31:09 +02:00
|
|
|
field='id'
|
|
|
|
)
|
|
|
|
rows = list(query)
|
|
|
|
else:
|
|
|
|
# TODO: We should always have at least one user_id as a recipient
|
|
|
|
# of any message we send. Right now the exception to this
|
|
|
|
# rule is `notify_new_user`, which, at least in a possibly
|
|
|
|
# contrived test scenario, can attempt to send messages
|
|
|
|
# to an inactive bot. When we plug that hole, we can avoid
|
|
|
|
# this `else` clause and just `assert(user_ids)`.
|
|
|
|
rows = []
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_ids_for(f: Callable[[Dict[str, Any]], bool]) -> Set[int]:
|
2017-10-24 19:25:50 +02:00
|
|
|
"""Only includes users on the explicit message to line"""
|
2017-09-09 16:14:06 +02:00
|
|
|
return {
|
|
|
|
row['id']
|
|
|
|
for row in rows
|
|
|
|
if f(row)
|
2017-10-24 19:25:50 +02:00
|
|
|
} & message_to_user_id_set
|
2017-09-09 16:14:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def is_service_bot(row: Dict[str, Any]) -> bool:
|
2017-09-09 16:14:06 +02:00
|
|
|
return row['is_bot'] and (row['bot_type'] in UserProfile.SERVICE_BOT_TYPES)
|
|
|
|
|
2017-10-27 02:04:46 +02:00
|
|
|
active_user_ids = get_ids_for(lambda r: True)
|
2017-09-09 16:14:06 +02:00
|
|
|
push_notify_user_ids = get_ids_for(
|
2017-09-14 18:41:53 +02:00
|
|
|
lambda r: r['enable_online_push_notifications']
|
2017-09-09 16:14:06 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
|
|
|
# Service bots don't get UserMessage rows.
|
2017-09-09 16:14:06 +02:00
|
|
|
um_eligible_user_ids = get_ids_for(
|
2017-09-14 18:41:53 +02:00
|
|
|
lambda r: not is_service_bot(r)
|
2017-09-09 16:14:06 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-09-09 16:14:06 +02:00
|
|
|
long_term_idle_user_ids = get_ids_for(
|
|
|
|
lambda r: r['long_term_idle']
|
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
# These two bot data structures need to filter from the full set
|
|
|
|
# of users who either are receiving the message or might have been
|
|
|
|
# mentioned in it, and so can't use get_ids_for.
|
|
|
|
#
|
|
|
|
# Further in the do_send_messages code path, once
|
|
|
|
# `mentioned_user_ids` has been computed via bugdown, we'll filter
|
|
|
|
# these data structures for just those users who are either a
|
|
|
|
# direct recipient or were mentioned; for now, we're just making
|
|
|
|
# sure we have the data we need for that without extra database
|
|
|
|
# queries.
|
|
|
|
default_bot_user_ids = set([
|
|
|
|
row['id']
|
|
|
|
for row in rows
|
|
|
|
if row['is_bot'] and row['bot_type'] == UserProfile.DEFAULT_BOT
|
|
|
|
])
|
|
|
|
|
2017-09-09 15:26:44 +02:00
|
|
|
service_bot_tuples = [
|
2017-09-09 16:14:06 +02:00
|
|
|
(row['id'], row['bot_type'])
|
|
|
|
for row in rows
|
2017-09-14 18:41:53 +02:00
|
|
|
if is_service_bot(row)
|
2017-09-09 15:26:44 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
info = dict(
|
|
|
|
active_user_ids=active_user_ids,
|
|
|
|
push_notify_user_ids=push_notify_user_ids,
|
2017-08-17 16:55:32 +02:00
|
|
|
stream_push_user_ids=stream_push_user_ids,
|
2017-11-21 04:35:26 +01:00
|
|
|
stream_email_user_ids=stream_email_user_ids,
|
2017-09-09 15:26:44 +02:00
|
|
|
um_eligible_user_ids=um_eligible_user_ids,
|
|
|
|
long_term_idle_user_ids=long_term_idle_user_ids,
|
2017-10-24 20:08:19 +02:00
|
|
|
default_bot_user_ids=default_bot_user_ids,
|
2017-09-09 15:26:44 +02:00
|
|
|
service_bot_tuples=service_bot_tuples
|
|
|
|
) # type: RecipientInfoResult
|
|
|
|
return info
|
2016-10-24 09:56:35 +02:00
|
|
|
|
2017-12-29 12:51:56 +01:00
|
|
|
def get_service_bot_events(sender: UserProfile, service_bot_tuples: List[Tuple[int, int]],
|
|
|
|
mentioned_user_ids: Set[int], active_user_ids: Set[int],
|
|
|
|
recipient_type: int) -> Dict[str, List[Dict[str, Any]]]:
|
2017-09-26 23:55:15 +02:00
|
|
|
|
|
|
|
event_dict = defaultdict(list) # type: Dict[str, List[Dict[str, Any]]]
|
|
|
|
|
|
|
|
# Avoid infinite loops by preventing messages sent by bots from generating
|
|
|
|
# Service events.
|
|
|
|
if sender.is_bot:
|
|
|
|
return event_dict
|
|
|
|
|
2018-08-21 17:50:11 +02:00
|
|
|
def maybe_add_event(user_profile_id: int, bot_type: int) -> None:
|
2017-09-26 23:55:15 +02:00
|
|
|
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
|
|
|
queue_name = 'outgoing_webhooks'
|
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
queue_name = 'embedded_bots'
|
|
|
|
else:
|
|
|
|
logging.error(
|
|
|
|
'Unexpected bot_type for Service bot id=%s: %s' %
|
|
|
|
(user_profile_id, bot_type))
|
2018-08-21 17:50:11 +02:00
|
|
|
return
|
2017-09-26 23:55:15 +02:00
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
is_stream = (recipient_type == Recipient.STREAM)
|
|
|
|
|
2017-10-27 02:11:04 +02:00
|
|
|
# Important note: service_bot_tuples may contain service bots
|
|
|
|
# who were not actually mentioned in the message (e.g. if
|
|
|
|
# mention syntax for that bot appeared in a code block).
|
|
|
|
# Thus, it is important to filter any users who aren't part of
|
|
|
|
# either mentioned_user_ids (the actual mentioned users) or
|
|
|
|
# active_user_ids (the actual recipients).
|
|
|
|
#
|
|
|
|
# So even though this is implied by the logic below, we filter
|
2018-08-13 16:21:10 +02:00
|
|
|
# these not-actually-mentioned users here, to help keep this
|
2017-10-27 02:11:04 +02:00
|
|
|
# function future-proof.
|
|
|
|
if user_profile_id not in mentioned_user_ids and user_profile_id not in active_user_ids:
|
2018-08-21 17:50:11 +02:00
|
|
|
return
|
2017-10-27 02:11:04 +02:00
|
|
|
|
2018-06-04 07:24:21 +02:00
|
|
|
# Mention triggers, for stream messages
|
|
|
|
if is_stream and user_profile_id in mentioned_user_ids:
|
2017-09-26 23:55:15 +02:00
|
|
|
trigger = 'mention'
|
|
|
|
# PM triggers for personal and huddle messsages
|
2017-10-24 19:25:50 +02:00
|
|
|
elif (not is_stream) and (user_profile_id in active_user_ids):
|
2017-09-26 23:55:15 +02:00
|
|
|
trigger = 'private_message'
|
|
|
|
else:
|
2018-08-21 17:50:11 +02:00
|
|
|
return
|
2017-09-26 23:55:15 +02:00
|
|
|
|
|
|
|
event_dict[queue_name].append({
|
|
|
|
'trigger': trigger,
|
|
|
|
'user_profile_id': user_profile_id,
|
|
|
|
})
|
|
|
|
|
2018-08-21 17:50:11 +02:00
|
|
|
for user_profile_id, bot_type in service_bot_tuples:
|
|
|
|
maybe_add_event(
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
bot_type=bot_type,
|
|
|
|
)
|
|
|
|
|
2017-09-26 23:55:15 +02:00
|
|
|
return event_dict
|
|
|
|
|
2018-03-22 19:16:13 +01:00
|
|
|
def do_schedule_messages(messages: Sequence[Mapping[str, Any]]) -> List[int]:
|
2018-01-01 20:42:48 +01:00
|
|
|
scheduled_messages = [] # type: List[ScheduledMessage]
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
scheduled_message = ScheduledMessage()
|
|
|
|
scheduled_message.sender = message['message'].sender
|
|
|
|
scheduled_message.recipient = message['message'].recipient
|
2018-11-01 15:31:55 +01:00
|
|
|
topic_name = message['message'].topic_name()
|
|
|
|
scheduled_message.set_topic_name(topic_name=topic_name)
|
2018-01-01 20:42:48 +01:00
|
|
|
scheduled_message.content = message['message'].content
|
|
|
|
scheduled_message.sending_client = message['message'].sending_client
|
|
|
|
scheduled_message.stream = message['stream']
|
|
|
|
scheduled_message.realm = message['realm']
|
|
|
|
scheduled_message.scheduled_timestamp = message['deliver_at']
|
2018-01-12 12:38:45 +01:00
|
|
|
if message['delivery_type'] == 'send_later':
|
|
|
|
scheduled_message.delivery_type = ScheduledMessage.SEND_LATER
|
|
|
|
elif message['delivery_type'] == 'remind':
|
|
|
|
scheduled_message.delivery_type = ScheduledMessage.REMIND
|
2018-01-01 20:42:48 +01:00
|
|
|
|
|
|
|
scheduled_messages.append(scheduled_message)
|
|
|
|
|
|
|
|
ScheduledMessage.objects.bulk_create(scheduled_messages)
|
|
|
|
return [scheduled_message.id for scheduled_message in scheduled_messages]
|
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, Any]]],
|
2019-07-24 08:47:38 +02:00
|
|
|
email_gateway: Optional[bool]=False,
|
|
|
|
mark_as_read: List[int]=[]) -> List[int]:
|
2018-11-30 00:48:13 +01:00
|
|
|
"""See
|
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
|
|
|
|
for high-level documentation on this subsystem.
|
|
|
|
"""
|
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
# Filter out messages which didn't pass internal_prep_message properly
|
2017-03-19 23:29:29 +01:00
|
|
|
messages = [message for message in messages_maybe_none if message is not None]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
|
|
|
# Filter out zephyr mirror anomalies where the message was already sent
|
2017-05-17 20:39:57 +02:00
|
|
|
already_sent_ids = [] # type: List[int]
|
|
|
|
new_messages = [] # type: List[MutableMapping[str, Any]]
|
2013-08-12 20:05:57 +02:00
|
|
|
for message in messages:
|
|
|
|
if isinstance(message['message'], int):
|
|
|
|
already_sent_ids.append(message['message'])
|
|
|
|
else:
|
|
|
|
new_messages.append(message)
|
|
|
|
messages = new_messages
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
links_for_embed = set() # type: Set[str]
|
2013-05-10 22:56:22 +02:00
|
|
|
# For consistency, changes to the default values for these gets should also be applied
|
|
|
|
# to the default args in do_send_message
|
|
|
|
for message in messages:
|
|
|
|
message['rendered_content'] = message.get('rendered_content', None)
|
|
|
|
message['stream'] = message.get('stream', None)
|
2017-07-14 19:30:23 +02:00
|
|
|
message['local_id'] = message.get('local_id', None)
|
2014-01-07 23:14:13 +01:00
|
|
|
message['sender_queue_id'] = message.get('sender_queue_id', None)
|
2017-01-22 05:40:00 +01:00
|
|
|
message['realm'] = message.get('realm', message['message'].sender.realm)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2017-10-24 02:47:09 +02:00
|
|
|
mention_data = bugdown.MentionData(
|
|
|
|
realm_id=message['realm'].id,
|
|
|
|
content=message['message'].content,
|
|
|
|
)
|
|
|
|
message['mention_data'] = mention_data
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message['message'].is_stream_message():
|
2017-10-24 00:07:03 +02:00
|
|
|
stream_id = message['message'].recipient.type_id
|
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream_id,
|
|
|
|
topic_name=message['message'].topic_name()
|
2017-12-24 22:01:01 +01:00
|
|
|
) # type: Optional[StreamTopicTarget]
|
2017-10-24 00:07:03 +02:00
|
|
|
else:
|
|
|
|
stream_topic = None
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
recipient=message['message'].recipient,
|
|
|
|
sender_id=message['message'].sender_id,
|
|
|
|
stream_topic=stream_topic,
|
2017-10-24 19:25:50 +02:00
|
|
|
possibly_mentioned_user_ids=mention_data.get_user_ids(),
|
2017-10-24 00:07:03 +02:00
|
|
|
)
|
2017-09-09 15:26:44 +02:00
|
|
|
|
|
|
|
message['active_user_ids'] = info['active_user_ids']
|
|
|
|
message['push_notify_user_ids'] = info['push_notify_user_ids']
|
2017-08-17 16:55:32 +02:00
|
|
|
message['stream_push_user_ids'] = info['stream_push_user_ids']
|
2017-11-21 04:35:26 +01:00
|
|
|
message['stream_email_user_ids'] = info['stream_email_user_ids']
|
2017-09-09 15:26:44 +02:00
|
|
|
message['um_eligible_user_ids'] = info['um_eligible_user_ids']
|
|
|
|
message['long_term_idle_user_ids'] = info['long_term_idle_user_ids']
|
2017-10-24 20:08:19 +02:00
|
|
|
message['default_bot_user_ids'] = info['default_bot_user_ids']
|
2017-09-09 15:26:44 +02:00
|
|
|
message['service_bot_tuples'] = info['service_bot_tuples']
|
2017-09-09 03:45:34 +02:00
|
|
|
|
2017-10-11 09:39:56 +02:00
|
|
|
# Render our messages.
|
2016-09-14 21:58:44 +02:00
|
|
|
assert message['message'].rendered_content is None
|
2017-10-24 02:47:09 +02:00
|
|
|
|
2016-09-14 21:58:44 +02:00
|
|
|
rendered_content = render_incoming_message(
|
|
|
|
message['message'],
|
2016-09-15 00:24:44 +02:00
|
|
|
message['message'].content,
|
2017-09-09 02:50:57 +02:00
|
|
|
message['active_user_ids'],
|
2017-10-24 02:47:09 +02:00
|
|
|
message['realm'],
|
|
|
|
mention_data=message['mention_data'],
|
2017-11-03 12:13:17 +01:00
|
|
|
email_gateway=email_gateway,
|
2017-10-24 02:47:09 +02:00
|
|
|
)
|
2016-10-04 16:49:16 +02:00
|
|
|
message['message'].rendered_content = rendered_content
|
|
|
|
message['message'].rendered_content_version = bugdown_version
|
2016-10-27 12:06:44 +02:00
|
|
|
links_for_embed |= message['message'].links_for_preview
|
2016-09-14 21:58:44 +02:00
|
|
|
|
2017-10-27 14:47:54 +02:00
|
|
|
# Add members of the mentioned user groups into `mentions_user_ids`.
|
|
|
|
mention_data = message['mention_data']
|
|
|
|
for group_id in message['message'].mentions_user_group_ids:
|
|
|
|
members = message['mention_data'].get_group_members(group_id)
|
|
|
|
message['message'].mentions_user_ids.update(members)
|
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
'''
|
|
|
|
Once we have the actual list of mentioned ids from message
|
|
|
|
rendering, we can patch in "default bots" (aka normal bots)
|
|
|
|
who were directly mentioned in this message as eligible to
|
|
|
|
get UserMessage rows.
|
|
|
|
'''
|
|
|
|
mentioned_user_ids = message['message'].mentions_user_ids
|
|
|
|
default_bot_user_ids = message['default_bot_user_ids']
|
|
|
|
mentioned_bot_user_ids = default_bot_user_ids & mentioned_user_ids
|
|
|
|
message['um_eligible_user_ids'] |= mentioned_bot_user_ids
|
|
|
|
|
2017-10-11 09:39:56 +02:00
|
|
|
# Update calculated fields of the message
|
2014-02-21 21:18:38 +01:00
|
|
|
message['message'].update_calculated_fields()
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
# Save the message receipts in the database
|
2017-05-17 20:39:57 +02:00
|
|
|
user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]]
|
2015-08-19 21:04:49 +02:00
|
|
|
with transaction.atomic():
|
2013-05-10 22:56:22 +02:00
|
|
|
Message.objects.bulk_create([message['message'] for message in messages])
|
2017-09-10 21:36:23 +02:00
|
|
|
ums = [] # type: List[UserMessageLite]
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
2017-05-24 22:00:49 +02:00
|
|
|
# Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows;
|
|
|
|
# they will be processed later.
|
2017-09-10 20:36:08 +02:00
|
|
|
mentioned_user_ids = message['message'].mentions_user_ids
|
|
|
|
user_messages = create_user_messages(
|
|
|
|
message=message['message'],
|
|
|
|
um_eligible_user_ids=message['um_eligible_user_ids'],
|
|
|
|
long_term_idle_user_ids=message['long_term_idle_user_ids'],
|
2018-09-11 07:45:32 +02:00
|
|
|
stream_push_user_ids = message['stream_push_user_ids'],
|
|
|
|
stream_email_user_ids = message['stream_email_user_ids'],
|
2017-09-10 20:36:08 +02:00
|
|
|
mentioned_user_ids=mentioned_user_ids,
|
2019-07-24 08:47:38 +02:00
|
|
|
mark_as_read=mark_as_read
|
2017-09-10 20:36:08 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
for um in user_messages:
|
2017-08-26 14:18:20 +02:00
|
|
|
user_message_flags[message['message'].id][um.user_profile_id] = um.flags_list()
|
2017-07-18 16:03:47 +02:00
|
|
|
|
|
|
|
ums.extend(user_messages)
|
2017-04-20 22:04:08 +02:00
|
|
|
|
2017-09-26 23:55:15 +02:00
|
|
|
message['message'].service_queue_events = get_service_bot_events(
|
|
|
|
sender=message['message'].sender,
|
|
|
|
service_bot_tuples=message['service_bot_tuples'],
|
|
|
|
mentioned_user_ids=mentioned_user_ids,
|
2017-10-24 19:25:50 +02:00
|
|
|
active_user_ids=message['active_user_ids'],
|
2017-09-26 23:55:15 +02:00
|
|
|
recipient_type=message['message'].recipient.type,
|
|
|
|
)
|
2017-05-24 22:00:49 +02:00
|
|
|
|
2017-09-10 18:31:35 +02:00
|
|
|
bulk_insert_ums(ums)
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# Claim attachments in message
|
|
|
|
for message in messages:
|
|
|
|
if Message.content_has_attachment(message['message'].content):
|
2016-07-24 21:52:41 +02:00
|
|
|
do_claim_attachments(message['message'])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2018-02-23 12:26:56 +01:00
|
|
|
for message in messages:
|
|
|
|
do_widget_post_save_actions(message)
|
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
for message in messages:
|
2017-04-20 22:04:08 +02:00
|
|
|
# Deliver events to the real-time push system, as well as
|
|
|
|
# enqueuing any additional processing triggered by the message.
|
2017-10-20 21:34:05 +02:00
|
|
|
wide_message_dict = MessageDict.wide_dict(message['message'])
|
2017-09-05 21:10:37 +02:00
|
|
|
|
2013-07-02 00:14:58 +02:00
|
|
|
user_flags = user_message_flags.get(message['message'].id, {})
|
2013-09-15 19:10:16 +02:00
|
|
|
sender = message['message'].sender
|
2017-10-20 21:34:05 +02:00
|
|
|
message_type = wide_message_dict['type']
|
2017-09-05 20:50:25 +02:00
|
|
|
|
2017-10-07 17:59:19 +02:00
|
|
|
presence_idle_user_ids = get_active_presence_idle_user_ids(
|
2017-09-05 20:50:25 +02:00
|
|
|
realm=sender.realm,
|
|
|
|
sender_id=sender.id,
|
|
|
|
message_type=message_type,
|
2017-09-09 04:14:28 +02:00
|
|
|
active_user_ids=message['active_user_ids'],
|
2017-09-05 20:50:25 +02:00
|
|
|
user_flags=user_flags,
|
|
|
|
)
|
2013-09-15 19:10:16 +02:00
|
|
|
|
2014-01-27 19:48:32 +01:00
|
|
|
event = dict(
|
2017-09-05 21:10:37 +02:00
|
|
|
type='message',
|
|
|
|
message=message['message'].id,
|
2017-10-20 21:34:05 +02:00
|
|
|
message_dict=wide_message_dict,
|
2017-10-07 17:59:19 +02:00
|
|
|
presence_idle_user_ids=presence_idle_user_ids,
|
2017-09-05 21:10:37 +02:00
|
|
|
)
|
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
'''
|
|
|
|
TODO: We may want to limit user_ids to only those users who have
|
|
|
|
UserMessage rows, if only for minor performance reasons.
|
|
|
|
|
|
|
|
For now we queue events for all subscribers/sendees of the
|
|
|
|
message, since downstream code may still do notifications
|
|
|
|
that don't require UserMessage rows.
|
|
|
|
|
|
|
|
Our automated tests have gotten better on this codepath,
|
|
|
|
but we may have coverage gaps, so we should be careful
|
|
|
|
about changing the next line.
|
|
|
|
'''
|
|
|
|
user_ids = message['active_user_ids'] | set(user_flags.keys())
|
|
|
|
|
2017-09-09 14:09:36 +02:00
|
|
|
users = [
|
|
|
|
dict(
|
|
|
|
id=user_id,
|
|
|
|
flags=user_flags.get(user_id, []),
|
2017-08-17 16:55:32 +02:00
|
|
|
always_push_notify=(user_id in message['push_notify_user_ids']),
|
|
|
|
stream_push_notify=(user_id in message['stream_push_user_ids']),
|
2017-11-21 04:35:26 +01:00
|
|
|
stream_email_notify=(user_id in message['stream_email_user_ids']),
|
2017-09-09 14:09:36 +02:00
|
|
|
)
|
2017-10-24 19:25:50 +02:00
|
|
|
for user_id in user_ids
|
2017-09-09 14:09:36 +02:00
|
|
|
]
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message['message'].is_stream_message():
|
2013-07-02 00:14:58 +02:00
|
|
|
# Note: This is where authorization for single-stream
|
|
|
|
# get_updates happens! We only attach stream data to the
|
|
|
|
# notify new_message request if it's a public stream,
|
|
|
|
# ensuring that in the tornado server, non-public stream
|
|
|
|
# messages are only associated to their subscribed users.
|
|
|
|
if message['stream'] is None:
|
2017-11-08 03:56:01 +01:00
|
|
|
stream_id = message['message'].recipient.type_id
|
|
|
|
message['stream'] = Stream.objects.select_related("realm").get(id=stream_id)
|
2017-05-24 23:49:19 +02:00
|
|
|
assert message['stream'] is not None # assert needed because stubs for django are missing
|
2013-07-02 00:14:58 +02:00
|
|
|
if message['stream'].is_public():
|
2017-01-03 21:04:55 +01:00
|
|
|
event['realm_id'] = message['stream'].realm_id
|
2014-01-27 19:48:32 +01:00
|
|
|
event['stream_name'] = message['stream'].name
|
2013-08-28 00:01:50 +02:00
|
|
|
if message['stream'].invite_only:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['invite_only'] = True
|
2019-03-04 17:50:49 +01:00
|
|
|
if message['stream'].first_message_id is None:
|
|
|
|
message['stream'].first_message_id = message['message'].id
|
|
|
|
message['stream'].save(update_fields=["first_message_id"])
|
2017-07-14 19:30:23 +02:00
|
|
|
if message['local_id'] is not None:
|
|
|
|
event['local_id'] = message['local_id']
|
2014-01-07 23:14:13 +01:00
|
|
|
if message['sender_queue_id'] is not None:
|
2014-01-27 19:48:32 +01:00
|
|
|
event['sender_queue_id'] = message['sender_queue_id']
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(message['realm'], event, users)
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2019-03-01 01:53:18 +01:00
|
|
|
if url_embed_preview_enabled(message['message']) and links_for_embed:
|
2016-10-27 12:06:44 +02:00
|
|
|
event_data = {
|
|
|
|
'message_id': message['message'].id,
|
|
|
|
'message_content': message['message'].content,
|
2017-01-22 05:55:30 +01:00
|
|
|
'message_realm_id': message['realm'].id,
|
2016-10-27 12:06:44 +02:00
|
|
|
'urls': links_for_embed}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish('embed_links', event_data)
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2017-09-09 14:23:12 +02:00
|
|
|
if (settings.ENABLE_FEEDBACK and settings.FEEDBACK_BOT and
|
|
|
|
message['message'].recipient.type == Recipient.PERSONAL):
|
|
|
|
|
2017-10-03 00:38:33 +02:00
|
|
|
feedback_bot_id = get_system_bot(email=settings.FEEDBACK_BOT).id
|
2017-09-14 18:24:39 +02:00
|
|
|
if feedback_bot_id in message['active_user_ids']:
|
2017-09-09 14:23:12 +02:00
|
|
|
queue_json_publish(
|
|
|
|
'feedback_messages',
|
2017-10-20 21:34:05 +02:00
|
|
|
wide_message_dict,
|
2017-09-09 14:23:12 +02:00
|
|
|
)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-09-27 19:43:28 +02:00
|
|
|
if message['message'].recipient.type == Recipient.PERSONAL:
|
2017-11-18 02:05:54 +01:00
|
|
|
welcome_bot_id = get_system_bot(settings.WELCOME_BOT).id
|
2017-09-27 19:43:28 +02:00
|
|
|
if (welcome_bot_id in message['active_user_ids'] and
|
|
|
|
welcome_bot_id != message['message'].sender_id):
|
|
|
|
send_welcome_bot_response(message)
|
|
|
|
|
2017-05-24 22:00:49 +02:00
|
|
|
for queue_name, events in message['message'].service_queue_events.items():
|
|
|
|
for event in events:
|
|
|
|
queue_json_publish(
|
|
|
|
queue_name,
|
|
|
|
{
|
2017-10-20 21:34:05 +02:00
|
|
|
"message": wide_message_dict,
|
2017-05-24 22:00:49 +02:00
|
|
|
"trigger": event['trigger'],
|
2017-09-09 03:45:34 +02:00
|
|
|
"user_profile_id": event["user_profile_id"],
|
2017-11-24 13:18:46 +01:00
|
|
|
}
|
2017-05-24 22:00:49 +02:00
|
|
|
)
|
2017-04-20 22:04:08 +02:00
|
|
|
|
2013-08-13 17:17:04 +02:00
|
|
|
# Note that this does not preserve the order of message ids
|
|
|
|
# returned. In practice, this shouldn't matter, as we only
|
|
|
|
# mirror single zephyr messages at a time and don't otherwise
|
|
|
|
# intermingle sending zephyr messages with other messages.
|
2013-08-12 20:05:57 +02:00
|
|
|
return already_sent_ids + [message['message'].id for message in messages]
|
2013-08-08 19:37:40 +02:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class UserMessageLite:
|
2017-09-10 21:36:23 +02:00
|
|
|
'''
|
|
|
|
The Django ORM is too slow for bulk operations. This class
|
|
|
|
is optimized for the simple use case of inserting a bunch of
|
|
|
|
rows into zerver_usermessage.
|
|
|
|
'''
|
2018-10-12 23:42:17 +02:00
|
|
|
def __init__(self, user_profile_id: int, message_id: int, flags: int) -> None:
|
2017-09-10 21:36:23 +02:00
|
|
|
self.user_profile_id = user_profile_id
|
|
|
|
self.message_id = message_id
|
2018-10-12 23:42:17 +02:00
|
|
|
self.flags = flags
|
2017-09-10 21:36:23 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def flags_list(self) -> List[str]:
|
2017-09-10 21:36:23 +02:00
|
|
|
return UserMessage.flags_list_for_flags(self.flags)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_user_messages(message: Message,
|
|
|
|
um_eligible_user_ids: Set[int],
|
|
|
|
long_term_idle_user_ids: Set[int],
|
2018-09-11 07:45:32 +02:00
|
|
|
stream_push_user_ids: Set[int],
|
|
|
|
stream_email_user_ids: Set[int],
|
2019-07-24 08:47:38 +02:00
|
|
|
mentioned_user_ids: Set[int],
|
|
|
|
mark_as_read: List[int]=[]) -> List[UserMessageLite]:
|
2017-09-10 20:36:08 +02:00
|
|
|
ums_to_create = []
|
|
|
|
for user_profile_id in um_eligible_user_ids:
|
2017-09-10 21:36:23 +02:00
|
|
|
um = UserMessageLite(
|
2017-09-10 20:36:08 +02:00
|
|
|
user_profile_id=user_profile_id,
|
2017-09-10 21:36:23 +02:00
|
|
|
message_id=message.id,
|
2018-10-12 23:42:17 +02:00
|
|
|
flags=0,
|
2017-09-10 20:36:08 +02:00
|
|
|
)
|
|
|
|
ums_to_create.append(um)
|
|
|
|
|
|
|
|
# These properties on the Message are set via
|
|
|
|
# render_markdown by code in the bugdown inline patterns
|
|
|
|
wildcard = message.mentions_wildcard
|
|
|
|
ids_with_alert_words = message.user_ids_with_alert_words
|
|
|
|
|
|
|
|
for um in ums_to_create:
|
2019-07-24 08:47:38 +02:00
|
|
|
if (um.user_profile_id == message.sender.id and
|
|
|
|
message.sent_by_human()) or \
|
|
|
|
um.user_profile_id in mark_as_read:
|
2017-09-10 20:36:08 +02:00
|
|
|
um.flags |= UserMessage.flags.read
|
|
|
|
if wildcard:
|
|
|
|
um.flags |= UserMessage.flags.wildcard_mentioned
|
|
|
|
if um.user_profile_id in mentioned_user_ids:
|
|
|
|
um.flags |= UserMessage.flags.mentioned
|
|
|
|
if um.user_profile_id in ids_with_alert_words:
|
|
|
|
um.flags |= UserMessage.flags.has_alert_word
|
2018-08-08 11:34:56 +02:00
|
|
|
if message.recipient.type in [Recipient.HUDDLE, Recipient.PERSONAL]:
|
|
|
|
um.flags |= UserMessage.flags.is_private
|
2017-09-10 20:36:08 +02:00
|
|
|
|
2018-09-11 07:45:32 +02:00
|
|
|
# For long_term_idle (aka soft-deactivated) users, we are allowed
|
|
|
|
# to optimize by lazily not creating UserMessage rows that would
|
|
|
|
# have the default 0 flag set (since the soft-reactivation logic
|
|
|
|
# knows how to create those when the user comes back). We need to
|
|
|
|
# create the UserMessage rows for these long_term_idle users
|
|
|
|
# non-lazily in a few cases:
|
|
|
|
#
|
|
|
|
# * There are nonzero flags (e.g. the user was mentioned), since
|
|
|
|
# that case is rare and this saves a lot of complexity in
|
|
|
|
# soft-reactivation.
|
|
|
|
#
|
|
|
|
# * If the user is going to be notified (e.g. they get push/email
|
|
|
|
# notifications for every message on a stream), since in that
|
|
|
|
# case the notifications code will call `access_message` on the
|
|
|
|
# message to re-verify permissions, and for private streams,
|
|
|
|
# will get an error if the UserMessage row doesn't exist yet.
|
2019-03-08 02:48:54 +01:00
|
|
|
#
|
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
|
|
|
|
# for details on this system.
|
2017-09-10 20:36:08 +02:00
|
|
|
user_messages = []
|
|
|
|
for um in ums_to_create:
|
|
|
|
if (um.user_profile_id in long_term_idle_user_ids and
|
2018-09-11 07:45:32 +02:00
|
|
|
um.user_profile_id not in stream_push_user_ids and
|
|
|
|
um.user_profile_id not in stream_email_user_ids and
|
2017-10-28 21:53:47 +02:00
|
|
|
message.is_stream_message() and
|
2017-09-10 20:36:08 +02:00
|
|
|
int(um.flags) == 0):
|
|
|
|
continue
|
|
|
|
user_messages.append(um)
|
|
|
|
|
|
|
|
return user_messages
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_insert_ums(ums: List[UserMessageLite]) -> None:
|
2017-09-10 18:31:35 +02:00
|
|
|
'''
|
|
|
|
Doing bulk inserts this way is much faster than using Django,
|
|
|
|
since we don't have any ORM overhead. Profiling with 1000
|
|
|
|
users shows a speedup of 0.436 -> 0.027 seconds, so we're
|
|
|
|
talking about a 15x speedup.
|
|
|
|
'''
|
|
|
|
if not ums:
|
|
|
|
return
|
|
|
|
|
|
|
|
vals = ','.join([
|
|
|
|
'(%d, %d, %d)' % (um.user_profile_id, um.message_id, um.flags)
|
|
|
|
for um in ums
|
|
|
|
])
|
|
|
|
query = '''
|
|
|
|
INSERT into
|
|
|
|
zerver_usermessage (user_profile_id, message_id, flags)
|
|
|
|
VALUES
|
|
|
|
''' + vals
|
|
|
|
|
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute(query)
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
def do_add_submessage(realm: Realm,
|
|
|
|
sender_id: int,
|
2018-02-12 10:53:36 +01:00
|
|
|
message_id: int,
|
|
|
|
msg_type: str,
|
|
|
|
content: str,
|
|
|
|
) -> None:
|
|
|
|
submessage = SubMessage(
|
|
|
|
sender_id=sender_id,
|
|
|
|
message_id=message_id,
|
|
|
|
msg_type=msg_type,
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
submessage.save()
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type="submessage",
|
|
|
|
msg_type=msg_type,
|
|
|
|
message_id=message_id,
|
2018-05-30 22:41:15 +02:00
|
|
|
submessage_id=submessage.id,
|
2018-02-12 10:53:36 +01:00
|
|
|
sender_id=sender_id,
|
2018-05-30 21:12:16 +02:00
|
|
|
content=content,
|
2018-02-12 10:53:36 +01:00
|
|
|
)
|
|
|
|
ums = UserMessage.objects.filter(message_id=message_id)
|
|
|
|
target_user_ids = [um.user_profile_id for um in ums]
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, target_user_ids)
|
2018-02-12 10:53:36 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_reaction_update(user_profile: UserProfile, message: Message,
|
2018-05-11 02:01:29 +02:00
|
|
|
reaction: Reaction, op: str) -> None:
|
2016-11-03 18:49:00 +01:00
|
|
|
user_dict = {'user_id': user_profile.id,
|
|
|
|
'email': user_profile.email,
|
|
|
|
'full_name': user_profile.full_name}
|
|
|
|
|
|
|
|
event = {'type': 'reaction',
|
2017-03-23 03:57:38 +01:00
|
|
|
'op': op,
|
2016-11-03 18:49:00 +01:00
|
|
|
'user': user_dict,
|
|
|
|
'message_id': message.id,
|
2017-05-01 07:29:56 +02:00
|
|
|
'emoji_name': reaction.emoji_name,
|
|
|
|
'emoji_code': reaction.emoji_code,
|
|
|
|
'reaction_type': reaction.reaction_type} # type: Dict[str, Any]
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2016-12-06 07:19:34 +01:00
|
|
|
# Update the cached message since new reaction is added.
|
|
|
|
update_to_dict_cache([message])
|
|
|
|
|
2016-11-03 18:49:00 +01:00
|
|
|
# Recipients for message update events, including reactions, are
|
|
|
|
# everyone who got the original message. This means reactions
|
|
|
|
# won't live-update in preview narrows, but it's the right
|
2016-11-30 08:14:46 +01:00
|
|
|
# performance tradeoff, since otherwise we'd need to send all
|
|
|
|
# reactions to public stream messages to every browser for every
|
|
|
|
# client in the organization, which doesn't scale.
|
2017-03-23 04:15:32 +01:00
|
|
|
#
|
|
|
|
# However, to ensure that reactions do live-update for any user
|
|
|
|
# who has actually participated in reacting to a message, we add a
|
|
|
|
# "historical" UserMessage row for any user who reacts to message,
|
|
|
|
# subscribing them to future notifications.
|
2016-11-30 08:14:46 +01:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [um.user_profile_id for um in ums])
|
2016-11-30 08:14:46 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_add_reaction_legacy(user_profile: UserProfile, message: Message, emoji_name: str) -> None:
|
2017-05-01 07:29:56 +02:00
|
|
|
(emoji_code, reaction_type) = emoji_name_to_emoji_code(user_profile.realm, emoji_name)
|
|
|
|
reaction = Reaction(user_profile=user_profile, message=message,
|
|
|
|
emoji_name=emoji_name, emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type)
|
2018-12-29 23:07:43 +01:00
|
|
|
try:
|
|
|
|
reaction.save()
|
|
|
|
except django.db.utils.IntegrityError: # nocoverage
|
|
|
|
# This can happen when a race results in the check in views
|
|
|
|
# code not catching an attempt to double-add a reaction, or
|
|
|
|
# perhaps if the emoji_name/emoji_code mapping is busted.
|
|
|
|
raise JsonableError(_("Reaction already exists."))
|
|
|
|
|
2017-05-01 07:29:56 +02:00
|
|
|
notify_reaction_update(user_profile, message, reaction, "add")
|
2017-03-23 03:57:38 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_remove_reaction_legacy(user_profile: UserProfile, message: Message, emoji_name: str) -> None:
|
2017-05-01 07:29:56 +02:00
|
|
|
reaction = Reaction.objects.filter(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
emoji_name=emoji_name).get()
|
|
|
|
reaction.delete()
|
|
|
|
notify_reaction_update(user_profile, message, reaction, "remove")
|
2016-11-03 18:49:00 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_reaction(user_profile: UserProfile, message: Message,
|
2018-05-11 02:01:29 +02:00
|
|
|
emoji_name: str, emoji_code: str, reaction_type: str) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
reaction = Reaction(user_profile=user_profile, message=message,
|
|
|
|
emoji_name=emoji_name, emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type)
|
2018-12-29 23:07:43 +01:00
|
|
|
try:
|
|
|
|
reaction.save()
|
|
|
|
except django.db.utils.IntegrityError: # nocoverage
|
|
|
|
# This can happen when a race results in the check in views
|
|
|
|
# code not catching an attempt to double-add a reaction, or
|
|
|
|
# perhaps if the emoji_name/emoji_code mapping is busted.
|
|
|
|
raise JsonableError(_("Reaction already exists."))
|
|
|
|
|
2017-10-08 09:34:59 +02:00
|
|
|
notify_reaction_update(user_profile, message, reaction, "add")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_reaction(user_profile: UserProfile, message: Message,
|
2018-05-11 02:01:29 +02:00
|
|
|
emoji_code: str, reaction_type: str) -> None:
|
2017-10-08 09:34:59 +02:00
|
|
|
reaction = Reaction.objects.filter(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
emoji_code=emoji_code,
|
|
|
|
reaction_type=reaction_type).get()
|
|
|
|
reaction.delete()
|
|
|
|
notify_reaction_update(user_profile, message, reaction, "remove")
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
def do_send_typing_notification(realm: Realm, notification: Dict[str, Any]) -> None:
|
2017-09-09 15:09:48 +02:00
|
|
|
recipient_user_profiles = get_typing_user_profiles(notification['recipient'],
|
|
|
|
notification['sender'].id)
|
2016-10-28 18:50:21 +02:00
|
|
|
# Only deliver the notification to active user recipients
|
|
|
|
user_ids_to_notify = [profile.id for profile in recipient_user_profiles if profile.is_active]
|
|
|
|
sender_dict = {'user_id': notification['sender'].id, 'email': notification['sender'].email}
|
|
|
|
# Include a list of recipients in the event body to help identify where the typing is happening
|
2017-11-08 03:56:01 +01:00
|
|
|
recipient_dicts = [{'user_id': profile.id, 'email': profile.email}
|
|
|
|
for profile in recipient_user_profiles]
|
2016-10-12 20:57:59 +02:00
|
|
|
event = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
type = 'typing',
|
|
|
|
op = notification['op'],
|
|
|
|
sender = sender_dict,
|
|
|
|
recipients = recipient_dicts)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, user_ids_to_notify)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
|
|
|
# check_send_typing_notification:
|
|
|
|
# Checks the typing notification and sends it
|
2019-08-07 10:42:16 +02:00
|
|
|
def check_send_typing_notification(sender: UserProfile, notification_to: Union[Sequence[str], Sequence[int]],
|
2018-05-11 02:01:29 +02:00
|
|
|
operator: str) -> None:
|
2016-10-12 20:57:59 +02:00
|
|
|
typing_notification = check_typing_notification(sender, notification_to, operator)
|
2018-11-02 23:33:54 +01:00
|
|
|
do_send_typing_notification(sender.realm, typing_notification)
|
2016-10-12 20:57:59 +02:00
|
|
|
|
|
|
|
# check_typing_notification:
|
|
|
|
# Returns typing notification ready for sending with do_send_typing_notification on success
|
|
|
|
# or the error message (string) on error.
|
2018-08-28 21:48:47 +02:00
|
|
|
def check_typing_notification(sender: UserProfile,
|
|
|
|
notification_to: Union[Sequence[str], Sequence[int]],
|
2018-05-11 02:01:29 +02:00
|
|
|
operator: str) -> Dict[str, Any]:
|
2016-10-12 20:57:59 +02:00
|
|
|
if len(notification_to) == 0:
|
|
|
|
raise JsonableError(_('Missing parameter: \'to\' (recipient)'))
|
|
|
|
elif operator not in ('start', 'stop'):
|
|
|
|
raise JsonableError(_('Invalid \'op\' value (should be start or stop)'))
|
2018-08-28 21:48:47 +02:00
|
|
|
|
2018-05-21 03:27:25 +02:00
|
|
|
try:
|
2018-08-28 21:48:47 +02:00
|
|
|
if isinstance(notification_to[0], str):
|
|
|
|
emails = cast(Sequence[str], notification_to)
|
|
|
|
recipient = recipient_for_emails(emails, False, sender, sender)
|
|
|
|
elif isinstance(notification_to[0], int):
|
|
|
|
user_ids = cast(Sequence[int], notification_to)
|
|
|
|
recipient = recipient_for_user_ids(user_ids, sender)
|
2018-05-21 03:27:25 +02:00
|
|
|
except ValidationError as e:
|
|
|
|
assert isinstance(e.messages[0], str)
|
|
|
|
raise JsonableError(e.messages[0])
|
2018-05-21 03:32:33 +02:00
|
|
|
assert recipient.type != Recipient.STREAM
|
2016-10-24 09:56:35 +02:00
|
|
|
return {'sender': sender, 'recipient': recipient, 'op': operator}
|
2016-10-12 20:57:59 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_stream_creation_event(stream: Stream, user_ids: List[int]) -> None:
|
2017-08-16 21:03:44 +02:00
|
|
|
event = dict(type="stream", op="create",
|
|
|
|
streams=[stream.to_dict()])
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(stream.realm, event, user_ids)
|
2017-08-16 21:03:44 +02:00
|
|
|
|
2018-05-03 00:07:08 +02:00
|
|
|
def get_default_value_for_history_public_to_subscribers(
|
|
|
|
realm: Realm,
|
|
|
|
invite_only: bool,
|
|
|
|
history_public_to_subscribers: Optional[bool]
|
|
|
|
) -> bool:
|
2018-04-27 01:00:26 +02:00
|
|
|
if invite_only:
|
|
|
|
if history_public_to_subscribers is None:
|
2018-05-25 01:52:50 +02:00
|
|
|
# A private stream's history is non-public by default
|
|
|
|
history_public_to_subscribers = False
|
2018-04-27 01:00:26 +02:00
|
|
|
else:
|
|
|
|
# If we later decide to support public streams without
|
|
|
|
# history, we can remove this code path.
|
|
|
|
history_public_to_subscribers = True
|
|
|
|
|
|
|
|
if realm.is_zephyr_mirror_realm:
|
|
|
|
# In the Zephyr mirroring model, history is unconditionally
|
|
|
|
# not public to subscribers, even for public streams.
|
|
|
|
history_public_to_subscribers = False
|
|
|
|
|
2018-05-03 00:07:08 +02:00
|
|
|
return history_public_to_subscribers
|
|
|
|
|
2019-03-01 09:10:40 +01:00
|
|
|
def render_stream_description(text: str) -> str:
|
|
|
|
return bugdown_convert(text, no_previews=True)
|
|
|
|
|
2018-05-03 00:07:08 +02:00
|
|
|
def create_stream_if_needed(realm: Realm,
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_name: str,
|
2018-05-03 00:07:08 +02:00
|
|
|
*,
|
|
|
|
invite_only: bool=False,
|
2018-05-30 16:02:13 +02:00
|
|
|
is_announcement_only: bool=False,
|
2018-05-03 00:07:08 +02:00
|
|
|
history_public_to_subscribers: Optional[bool]=None,
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_description: str="") -> Tuple[Stream, bool]:
|
2018-05-03 00:07:08 +02:00
|
|
|
|
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
|
|
|
realm, invite_only, history_public_to_subscribers)
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
(stream, created) = Stream.objects.get_or_create(
|
2017-10-08 21:16:51 +02:00
|
|
|
realm=realm,
|
|
|
|
name__iexact=stream_name,
|
|
|
|
defaults = dict(
|
|
|
|
name=stream_name,
|
|
|
|
description=stream_description,
|
|
|
|
invite_only=invite_only,
|
2018-05-30 15:32:51 +02:00
|
|
|
is_announcement_only=is_announcement_only,
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
2017-10-08 21:16:51 +02:00
|
|
|
is_in_zephyr_realm=realm.is_zephyr_mirror_realm
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2013-01-10 22:01:33 +01:00
|
|
|
if created:
|
2019-03-01 09:10:40 +01:00
|
|
|
stream.rendered_description = render_stream_description(stream_description)
|
2019-01-11 13:48:22 +01:00
|
|
|
stream.save(update_fields=["rendered_description"])
|
2013-01-10 22:01:33 +01:00
|
|
|
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
2017-08-16 21:04:57 +02:00
|
|
|
if stream.is_public():
|
2018-06-03 19:11:52 +02:00
|
|
|
send_stream_creation_event(stream, active_non_guest_user_ids(stream.realm_id))
|
2018-03-16 10:57:17 +01:00
|
|
|
else:
|
2019-06-20 23:36:15 +02:00
|
|
|
realm_admin_ids = [user.id for user in
|
|
|
|
stream.realm.get_admin_users_and_bots()]
|
2018-03-16 10:57:17 +01:00
|
|
|
send_stream_creation_event(stream, realm_admin_ids)
|
2013-01-23 20:39:02 +01:00
|
|
|
return stream, created
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2018-03-21 22:05:21 +01:00
|
|
|
def ensure_stream(realm: Realm,
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_name: str,
|
2018-03-21 22:05:21 +01:00
|
|
|
invite_only: bool=False,
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_description: str="") -> Stream:
|
2018-04-27 01:00:26 +02:00
|
|
|
return create_stream_if_needed(realm, stream_name,
|
|
|
|
invite_only=invite_only,
|
|
|
|
stream_description=stream_description)[0]
|
2018-03-21 22:05:21 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_streams_if_needed(realm: Realm,
|
|
|
|
stream_dicts: List[Mapping[str, Any]]) -> Tuple[List[Stream], List[Stream]]:
|
2016-11-20 21:55:50 +01:00
|
|
|
"""Note that stream_dict["name"] is assumed to already be stripped of
|
|
|
|
whitespace"""
|
2017-05-17 20:39:57 +02:00
|
|
|
added_streams = [] # type: List[Stream]
|
|
|
|
existing_streams = [] # type: List[Stream]
|
2016-11-20 21:55:50 +01:00
|
|
|
for stream_dict in stream_dicts:
|
2018-04-27 01:00:26 +02:00
|
|
|
stream, created = create_stream_if_needed(
|
|
|
|
realm,
|
|
|
|
stream_dict["name"],
|
|
|
|
invite_only=stream_dict.get("invite_only", False),
|
2018-05-30 15:32:51 +02:00
|
|
|
is_announcement_only=stream_dict.get("is_announcement_only", False),
|
2018-04-27 01:00:26 +02:00
|
|
|
history_public_to_subscribers=stream_dict.get("history_public_to_subscribers"),
|
|
|
|
stream_description=stream_dict.get("description", "")
|
|
|
|
)
|
2016-11-20 20:33:41 +01:00
|
|
|
|
2016-09-15 16:22:09 +02:00
|
|
|
if created:
|
|
|
|
added_streams.append(stream)
|
|
|
|
else:
|
|
|
|
existing_streams.append(stream)
|
|
|
|
|
|
|
|
return added_streams, existing_streams
|
|
|
|
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_recipient_from_user_ids(recipient_profile_ids: Set[int],
|
2019-06-28 21:42:19 +02:00
|
|
|
forwarded_mirror_message: bool,
|
2017-11-05 11:15:10 +01:00
|
|
|
forwarder_user_profile: Optional[UserProfile],
|
|
|
|
sender: UserProfile) -> Recipient:
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-10-28 21:19:34 +02:00
|
|
|
# Avoid mutating the passed in set of recipient_profile_ids.
|
|
|
|
recipient_profile_ids = set(recipient_profile_ids)
|
|
|
|
|
2017-07-24 23:55:33 +02:00
|
|
|
# If the private message is just between the sender and
|
|
|
|
# another person, force it to be a personal internally
|
|
|
|
|
2019-06-28 21:42:19 +02:00
|
|
|
if forwarded_mirror_message:
|
|
|
|
# In our mirroring integrations with some third-party
|
|
|
|
# protocols, bots subscribed to the third-party protocol
|
|
|
|
# forward to Zulip messages that they received in the
|
|
|
|
# third-party service. The permissions model for that
|
|
|
|
# forwarding is that users can only submit to Zulip private
|
|
|
|
# messages they personally received, and here we do the check
|
|
|
|
# for whether forwarder_user_profile is among the private
|
|
|
|
# message recipients of the message.
|
2017-07-24 23:55:33 +02:00
|
|
|
assert forwarder_user_profile is not None
|
|
|
|
if forwarder_user_profile.id not in recipient_profile_ids:
|
|
|
|
raise ValidationError(_("User not authorized for this query"))
|
|
|
|
|
|
|
|
if (len(recipient_profile_ids) == 2 and sender.id in recipient_profile_ids):
|
|
|
|
recipient_profile_ids.remove(sender.id)
|
|
|
|
|
|
|
|
if len(recipient_profile_ids) > 1:
|
|
|
|
# Make sure the sender is included in huddle messages
|
|
|
|
recipient_profile_ids.add(sender.id)
|
2017-10-28 21:14:41 +02:00
|
|
|
return get_huddle_recipient(recipient_profile_ids)
|
2017-07-24 23:55:33 +02:00
|
|
|
else:
|
2017-10-28 21:31:21 +02:00
|
|
|
return get_personal_recipient(list(recipient_profile_ids)[0])
|
2017-07-24 23:55:33 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_recipient_user_profiles(user_profiles: List[UserProfile],
|
2019-06-27 20:29:53 +02:00
|
|
|
sender: UserProfile,
|
|
|
|
allow_deactivated: bool=False) -> Set[int]:
|
2013-03-18 18:57:34 +01:00
|
|
|
recipient_profile_ids = set()
|
2016-11-02 00:51:11 +01:00
|
|
|
|
|
|
|
# We exempt cross-realm bots from the check that all the recipients
|
2017-03-13 18:29:31 +01:00
|
|
|
# are in the same realm.
|
|
|
|
realms = set()
|
2017-12-07 21:15:34 +01:00
|
|
|
if not is_cross_realm_bot_email(sender.email):
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(sender.realm_id)
|
2015-01-31 07:55:18 +01:00
|
|
|
|
2017-07-25 02:03:24 +02:00
|
|
|
for user_profile in user_profiles:
|
2019-06-27 20:29:53 +02:00
|
|
|
if (not user_profile.is_active and not user_profile.is_mirror_dummy and
|
|
|
|
not allow_deactivated) or user_profile.realm.deactivated:
|
2017-07-25 02:03:24 +02:00
|
|
|
raise ValidationError(_("'%s' is no longer using Zulip.") % (user_profile.email,))
|
2014-01-14 09:25:04 +01:00
|
|
|
recipient_profile_ids.add(user_profile.id)
|
2017-12-07 21:15:34 +01:00
|
|
|
if not is_cross_realm_bot_email(user_profile.email):
|
2017-03-13 18:29:31 +01:00
|
|
|
realms.add(user_profile.realm_id)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-03-13 18:29:31 +01:00
|
|
|
if len(realms) > 1:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise ValidationError(_("You can't send private messages outside of your organization."))
|
2014-01-14 09:25:04 +01:00
|
|
|
|
2017-07-25 02:03:24 +02:00
|
|
|
return recipient_profile_ids
|
|
|
|
|
2019-06-28 21:42:19 +02:00
|
|
|
def recipient_for_emails(emails: Iterable[str], forwarded_mirror_message: bool,
|
2017-12-29 12:51:56 +01:00
|
|
|
forwarder_user_profile: Optional[UserProfile],
|
|
|
|
sender: UserProfile) -> Recipient:
|
2017-07-25 02:03:24 +02:00
|
|
|
|
2018-08-24 23:33:08 +02:00
|
|
|
# This helper should only be used for searches.
|
|
|
|
# Other features are moving toward supporting ids.
|
|
|
|
user_profiles = [] # type: List[UserProfile]
|
|
|
|
for email in emails:
|
|
|
|
try:
|
|
|
|
user_profile = get_user_including_cross_realm(email, sender.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise ValidationError(_("Invalid email '%s'") % (email,))
|
|
|
|
user_profiles.append(user_profile)
|
2017-08-18 01:57:48 +02:00
|
|
|
|
|
|
|
return recipient_for_user_profiles(
|
|
|
|
user_profiles=user_profiles,
|
2019-06-28 21:42:19 +02:00
|
|
|
forwarded_mirror_message=forwarded_mirror_message,
|
2017-08-18 01:57:48 +02:00
|
|
|
forwarder_user_profile=forwarder_user_profile,
|
|
|
|
sender=sender
|
|
|
|
)
|
|
|
|
|
2018-08-21 02:41:48 +02:00
|
|
|
def recipient_for_user_ids(user_ids: Iterable[int], sender: UserProfile) -> Recipient:
|
|
|
|
user_profiles = [] # type: List[UserProfile]
|
|
|
|
for user_id in user_ids:
|
|
|
|
try:
|
|
|
|
user_profile = get_user_by_id_in_realm_including_cross_realm(
|
|
|
|
user_id, sender.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise ValidationError(_("Invalid user ID {}").format(user_id))
|
2018-08-21 02:41:48 +02:00
|
|
|
user_profiles.append(user_profile)
|
|
|
|
|
|
|
|
return recipient_for_user_profiles(
|
|
|
|
user_profiles=user_profiles,
|
2019-06-28 21:42:19 +02:00
|
|
|
forwarded_mirror_message=False,
|
2018-08-21 02:41:48 +02:00
|
|
|
forwarder_user_profile=None,
|
|
|
|
sender=sender
|
|
|
|
)
|
|
|
|
|
2019-06-28 21:42:19 +02:00
|
|
|
def recipient_for_user_profiles(user_profiles: List[UserProfile], forwarded_mirror_message: bool,
|
2017-12-29 12:51:56 +01:00
|
|
|
forwarder_user_profile: Optional[UserProfile],
|
2019-06-27 20:29:53 +02:00
|
|
|
sender: UserProfile, allow_deactivated: bool=False) -> Recipient:
|
2017-07-25 02:03:24 +02:00
|
|
|
|
2019-06-27 20:29:53 +02:00
|
|
|
recipient_profile_ids = validate_recipient_user_profiles(user_profiles, sender,
|
|
|
|
allow_deactivated=allow_deactivated)
|
2017-07-25 02:03:24 +02:00
|
|
|
|
2019-06-28 21:42:19 +02:00
|
|
|
return get_recipient_from_user_ids(recipient_profile_ids, forwarded_mirror_message,
|
2017-07-24 23:55:33 +02:00
|
|
|
forwarder_user_profile, sender)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def already_sent_mirrored_message_id(message: Message) -> Optional[int]:
|
2013-03-18 18:57:34 +01:00
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
|
|
|
# For huddle messages, we use a 10-second window because the
|
|
|
|
# timestamps aren't guaranteed to actually match between two
|
|
|
|
# copies of the same message.
|
|
|
|
time_window = datetime.timedelta(seconds=10)
|
|
|
|
else:
|
|
|
|
time_window = datetime.timedelta(seconds=0)
|
|
|
|
|
2018-11-01 15:48:14 +01:00
|
|
|
query = Message.objects.filter(
|
2013-03-18 18:57:34 +01:00
|
|
|
sender=message.sender,
|
|
|
|
recipient=message.recipient,
|
|
|
|
content=message.content,
|
|
|
|
sending_client=message.sending_client,
|
2013-08-12 20:14:54 +02:00
|
|
|
pub_date__gte=message.pub_date - time_window,
|
|
|
|
pub_date__lte=message.pub_date + time_window)
|
2013-08-12 20:05:57 +02:00
|
|
|
|
2018-11-01 15:48:14 +01:00
|
|
|
messages = filter_by_exact_message_topic(
|
|
|
|
query=query,
|
|
|
|
message=message,
|
|
|
|
)
|
|
|
|
|
2013-08-12 20:05:57 +02:00
|
|
|
if messages.exists():
|
|
|
|
return messages[0].id
|
|
|
|
return None
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2018-08-22 02:21:34 +02:00
|
|
|
def extract_recipients(
|
|
|
|
s: Union[str, Iterable[str], Iterable[int]]
|
|
|
|
) -> Union[List[str], List[int]]:
|
2014-02-14 19:29:42 +01:00
|
|
|
# We try to accept multiple incoming formats for recipients.
|
|
|
|
# See test_extract_recipients() for examples of what we allow.
|
2019-08-02 23:14:40 +02:00
|
|
|
|
|
|
|
if isinstance(s, str):
|
|
|
|
try:
|
|
|
|
data = ujson.loads(s)
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
data = s
|
|
|
|
else:
|
2014-02-14 19:39:11 +01:00
|
|
|
data = s
|
|
|
|
|
2017-09-27 10:06:17 +02:00
|
|
|
if isinstance(data, str):
|
2017-07-28 01:12:01 +02:00
|
|
|
data = data.split(',')
|
2014-02-14 19:39:11 +01:00
|
|
|
|
|
|
|
if not isinstance(data, list):
|
|
|
|
raise ValueError("Invalid data type for recipients")
|
|
|
|
|
2018-08-22 02:21:34 +02:00
|
|
|
if not data:
|
|
|
|
# We don't complain about empty message recipients here
|
|
|
|
return data
|
2013-03-18 19:10:21 +01:00
|
|
|
|
2018-08-22 02:21:34 +02:00
|
|
|
if isinstance(data[0], str):
|
|
|
|
recipients = extract_emails(data) # type: Union[List[str], List[int]]
|
|
|
|
|
|
|
|
if isinstance(data[0], int):
|
|
|
|
recipients = extract_user_ids(data)
|
|
|
|
|
|
|
|
# Remove any duplicates.
|
|
|
|
return list(set(recipients)) # type: ignore # mypy gets confused about what's passed to set()
|
|
|
|
|
|
|
|
def extract_user_ids(user_ids: Iterable[int]) -> List[int]:
|
|
|
|
recipients = []
|
|
|
|
for user_id in user_ids:
|
|
|
|
if not isinstance(user_id, int):
|
|
|
|
raise TypeError("Recipient lists may contain emails or user IDs, but not both.")
|
|
|
|
|
|
|
|
recipients.append(user_id)
|
|
|
|
|
|
|
|
return recipients
|
|
|
|
|
|
|
|
def extract_emails(emails: Iterable[str]) -> List[str]:
|
|
|
|
recipients = []
|
|
|
|
for email in emails:
|
|
|
|
if not isinstance(email, str):
|
|
|
|
raise TypeError("Recipient lists may contain emails or user IDs, but not both.")
|
|
|
|
|
|
|
|
email = email.strip()
|
|
|
|
if email:
|
|
|
|
recipients.append(email)
|
|
|
|
|
|
|
|
return recipients
|
2013-03-18 19:10:21 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_send_stream_message(sender: UserProfile, client: Client, stream_name: str,
|
|
|
|
topic: str, body: str) -> int:
|
2019-02-06 00:20:44 +01:00
|
|
|
addressee = Addressee.for_stream_name(stream_name, topic)
|
2017-09-30 04:14:34 +02:00
|
|
|
message = check_message(sender, client, addressee, body)
|
|
|
|
|
|
|
|
return do_send_messages([message])[0]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_send_private_message(sender: UserProfile, client: Client,
|
2018-05-11 02:01:29 +02:00
|
|
|
receiving_user: UserProfile, body: str) -> int:
|
2017-10-02 07:22:42 +02:00
|
|
|
addressee = Addressee.for_user_profile(receiving_user)
|
|
|
|
message = check_message(sender, client, addressee, body)
|
|
|
|
|
|
|
|
return do_send_messages([message])[0]
|
|
|
|
|
2013-03-18 18:57:34 +01:00
|
|
|
# check_send_message:
|
2013-08-12 22:00:06 +02:00
|
|
|
# Returns the id of the sent message. Has same argspec as check_message.
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_send_message(sender: UserProfile, client: Client, message_type_name: str,
|
2018-09-22 05:10:57 +02:00
|
|
|
message_to: Union[Sequence[int], Sequence[str]],
|
|
|
|
topic_name: Optional[str],
|
2018-05-11 02:01:29 +02:00
|
|
|
message_content: str, realm: Optional[Realm]=None,
|
2017-12-29 12:51:56 +01:00
|
|
|
forged: bool=False, forged_timestamp: Optional[float]=None,
|
|
|
|
forwarder_user_profile: Optional[UserProfile]=None,
|
2018-05-11 02:01:29 +02:00
|
|
|
local_id: Optional[str]=None,
|
2018-05-21 15:23:46 +02:00
|
|
|
sender_queue_id: Optional[str]=None,
|
|
|
|
widget_content: Optional[str]=None) -> int:
|
2017-04-27 22:48:06 +02:00
|
|
|
|
|
|
|
addressee = Addressee.legacy_build(
|
2017-08-18 12:26:43 +02:00
|
|
|
sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
message_type_name,
|
|
|
|
message_to,
|
2017-10-27 14:13:22 +02:00
|
|
|
topic_name)
|
2017-04-27 22:48:06 +02:00
|
|
|
|
|
|
|
message = check_message(sender, client, addressee,
|
|
|
|
message_content, realm, forged, forged_timestamp,
|
2018-05-21 15:23:46 +02:00
|
|
|
forwarder_user_profile, local_id, sender_queue_id,
|
|
|
|
widget_content)
|
2013-08-08 19:37:40 +02:00
|
|
|
return do_send_messages([message])[0]
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2018-01-01 20:42:48 +01:00
|
|
|
def check_schedule_message(sender: UserProfile, client: Client,
|
2018-09-19 23:12:03 +02:00
|
|
|
message_type_name: str,
|
|
|
|
message_to: Union[Sequence[str], Sequence[int]],
|
2018-05-11 02:01:29 +02:00
|
|
|
topic_name: Optional[str], message_content: str,
|
|
|
|
delivery_type: str, deliver_at: datetime.datetime,
|
2018-01-01 20:42:48 +01:00
|
|
|
realm: Optional[Realm]=None,
|
|
|
|
forwarder_user_profile: Optional[UserProfile]=None
|
|
|
|
) -> int:
|
|
|
|
addressee = Addressee.legacy_build(
|
|
|
|
sender,
|
|
|
|
message_type_name,
|
|
|
|
message_to,
|
|
|
|
topic_name)
|
|
|
|
|
|
|
|
message = check_message(sender, client, addressee,
|
|
|
|
message_content, realm=realm,
|
|
|
|
forwarder_user_profile=forwarder_user_profile)
|
|
|
|
message['deliver_at'] = deliver_at
|
2018-01-12 12:38:45 +01:00
|
|
|
message['delivery_type'] = delivery_type
|
2018-02-26 22:20:51 +01:00
|
|
|
|
|
|
|
recipient = message['message'].recipient
|
|
|
|
if (delivery_type == 'remind' and (recipient.type != Recipient.STREAM and
|
|
|
|
recipient.type_id != sender.id)):
|
|
|
|
raise JsonableError(_("Reminders can only be set for streams."))
|
|
|
|
|
2018-01-01 20:42:48 +01:00
|
|
|
return do_schedule_messages([message])[0]
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_stream_name(stream_name: str) -> None:
|
2017-01-30 07:01:19 +01:00
|
|
|
if stream_name.strip() == "":
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Invalid stream name '%s'") % (stream_name,))
|
2013-08-22 23:40:27 +02:00
|
|
|
if len(stream_name) > Stream.MAX_NAME_LENGTH:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Stream name too long (limit: %s characters).") % (Stream.MAX_NAME_LENGTH,))
|
2017-04-26 01:28:22 +02:00
|
|
|
for i in stream_name:
|
|
|
|
if ord(i) == 0:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Stream name '%s' contains NULL (0x00) characters.") % (stream_name,))
|
2013-08-22 23:40:27 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_default_stream_group_name(group_name: str) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
if group_name.strip() == "":
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Invalid default stream group name '%s'") % (group_name,))
|
2017-11-01 18:20:34 +01:00
|
|
|
if len(group_name) > DefaultStreamGroup.MAX_NAME_LENGTH:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Default stream group name too long (limit: %s characters)")
|
|
|
|
% (DefaultStreamGroup.MAX_NAME_LENGTH,))
|
2017-11-01 18:20:34 +01:00
|
|
|
for i in group_name:
|
|
|
|
if ord(i) == 0:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Default stream group name '%s' contains NULL (0x00) characters.")
|
|
|
|
% (group_name,))
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2018-05-05 22:17:33 +02:00
|
|
|
def send_rate_limited_pm_notification_to_bot_owner(sender: UserProfile,
|
|
|
|
realm: Realm,
|
2018-05-11 02:01:29 +02:00
|
|
|
content: str) -> None:
|
2018-05-05 22:17:33 +02:00
|
|
|
"""
|
|
|
|
Sends a PM error notification to a bot's owner if one hasn't already
|
|
|
|
been sent in the last 5 minutes.
|
|
|
|
"""
|
2016-07-27 01:45:29 +02:00
|
|
|
if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated:
|
2013-09-19 22:42:30 +02:00
|
|
|
return
|
|
|
|
|
2016-11-15 05:40:00 +01:00
|
|
|
if not sender.is_bot or sender.bot_owner is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Don't send these notifications for cross-realm bot messages
|
|
|
|
# (e.g. from EMAIL_GATEWAY_BOT) since the owner for
|
|
|
|
# EMAIL_GATEWAY_BOT is probably the server administrator, not
|
|
|
|
# the owner of the bot who could potentially fix the problem.
|
|
|
|
if sender.realm != realm:
|
|
|
|
return
|
|
|
|
|
|
|
|
# We warn the user once every 5 minutes to avoid a flood of
|
|
|
|
# PMs on a misconfigured integration, re-using the
|
|
|
|
# UserProfile.last_reminder field, which is not used for bots.
|
|
|
|
last_reminder = sender.last_reminder
|
|
|
|
waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD)
|
2017-04-15 04:03:56 +02:00
|
|
|
if last_reminder and timezone_now() - last_reminder <= waitperiod:
|
2016-11-15 05:40:00 +01:00
|
|
|
return
|
|
|
|
|
2018-05-05 22:17:33 +02:00
|
|
|
internal_send_private_message(realm, get_system_bot(settings.NOTIFICATION_BOT),
|
|
|
|
sender.bot_owner, content)
|
|
|
|
|
|
|
|
sender.last_reminder = timezone_now()
|
|
|
|
sender.save(update_fields=['last_reminder'])
|
|
|
|
|
|
|
|
|
2019-07-11 00:53:34 +02:00
|
|
|
def send_pm_if_empty_stream(stream: Optional[Stream],
|
2019-01-28 05:28:29 +01:00
|
|
|
realm: Realm,
|
2019-07-11 00:53:34 +02:00
|
|
|
sender: UserProfile,
|
|
|
|
stream_name: Optional[str]=None,
|
2019-01-28 05:28:29 +01:00
|
|
|
stream_id: Optional[int]=None) -> None:
|
2018-05-05 22:17:33 +02:00
|
|
|
"""If a bot sends a message to a stream that doesn't exist or has no
|
|
|
|
subscribers, sends a notification to the bot owner (if not a
|
|
|
|
cross-realm bot) so that the owner can correct the issue."""
|
|
|
|
if not sender.is_bot or sender.bot_owner is None:
|
|
|
|
return
|
|
|
|
|
2019-07-21 23:37:47 +02:00
|
|
|
arg_dict = {
|
|
|
|
"bot_identity": sender.delivery_email,
|
|
|
|
"stream_id": stream_id,
|
|
|
|
"stream_name": stream_name,
|
|
|
|
}
|
2016-11-15 05:40:00 +01:00
|
|
|
if stream is None:
|
2019-07-11 01:22:32 +02:00
|
|
|
if stream_id is not None:
|
2019-07-21 23:37:47 +02:00
|
|
|
content = _("Your bot `%(bot_identity)s` tried to send a message to stream ID "
|
|
|
|
"%(stream_id)s, but there is no stream with that ID.") % arg_dict
|
2019-07-11 01:22:32 +02:00
|
|
|
else:
|
2019-07-11 00:53:34 +02:00
|
|
|
assert(stream_name is not None)
|
2019-07-21 23:37:47 +02:00
|
|
|
content = _("Your bot `%(bot_identity)s` tried to send a message to stream "
|
|
|
|
"#**%(stream_name)s**, but that stream does not exist. "
|
|
|
|
"Click [here](#streams/new) to create it.") % arg_dict
|
2016-11-15 05:40:00 +01:00
|
|
|
else:
|
2019-07-11 01:01:37 +02:00
|
|
|
if num_subscribers_for_stream_id(stream.id) > 0:
|
|
|
|
return
|
2019-07-21 23:37:47 +02:00
|
|
|
content = _("Your bot `%(bot_identity)s` tried to send a message to "
|
|
|
|
"stream #**%(stream_name)s**. The stream exists but "
|
|
|
|
"does not have any subscribers.") % arg_dict
|
2017-04-27 20:42:13 +02:00
|
|
|
|
2018-05-05 22:17:33 +02:00
|
|
|
send_rate_limited_pm_notification_to_bot_owner(sender, realm, content)
|
2013-09-19 22:37:24 +02:00
|
|
|
|
2018-08-14 17:39:24 +02:00
|
|
|
def validate_sender_can_write_to_stream(sender: UserProfile,
|
|
|
|
stream: Stream,
|
|
|
|
forwarder_user_profile: Optional[UserProfile]) -> None:
|
|
|
|
# Our caller is responsible for making sure that `stream` actually
|
|
|
|
# matches the realm of the sender.
|
|
|
|
|
|
|
|
if stream.is_announcement_only:
|
2019-05-25 17:10:09 +02:00
|
|
|
if sender.is_realm_admin or is_cross_realm_bot_email(sender.email):
|
|
|
|
pass
|
|
|
|
elif sender.is_bot and (sender.bot_owner is not None and
|
|
|
|
sender.bot_owner.is_realm_admin):
|
|
|
|
pass
|
|
|
|
else:
|
2018-08-14 17:39:24 +02:00
|
|
|
raise JsonableError(_("Only organization administrators can send to this stream."))
|
|
|
|
|
2018-08-14 20:50:02 +02:00
|
|
|
if not (stream.invite_only or sender.is_guest):
|
|
|
|
# This is a public stream and sender is not a guest user
|
2018-08-14 17:39:24 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
if subscribed_to_stream(sender, stream.id):
|
|
|
|
# It is private, but your are subscribed
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.is_api_super_user:
|
|
|
|
return
|
|
|
|
|
|
|
|
if (forwarder_user_profile is not None and forwarder_user_profile.is_api_super_user):
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.is_bot and (sender.bot_owner is not None and
|
|
|
|
subscribed_to_stream(sender.bot_owner, stream.id)):
|
|
|
|
# Bots can send to any stream their owner can.
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.email == settings.WELCOME_BOT:
|
|
|
|
# The welcome bot welcomes folks to the stream.
|
|
|
|
return
|
|
|
|
|
|
|
|
if sender.email == settings.NOTIFICATION_BOT:
|
|
|
|
return
|
|
|
|
|
|
|
|
# All other cases are an error.
|
|
|
|
raise JsonableError(_("Not authorized to send to stream '%s'") % (stream.name,))
|
|
|
|
|
2019-01-26 04:14:25 +01:00
|
|
|
def validate_stream_name_with_pm_notification(stream_name: str, realm: Realm,
|
|
|
|
sender: UserProfile) -> Stream:
|
|
|
|
stream_name = stream_name.strip()
|
|
|
|
check_stream_name(stream_name)
|
|
|
|
|
|
|
|
try:
|
|
|
|
stream = get_stream(stream_name, realm)
|
2019-07-11 00:53:34 +02:00
|
|
|
send_pm_if_empty_stream(stream, realm, sender)
|
2019-01-26 04:14:25 +01:00
|
|
|
except Stream.DoesNotExist:
|
2019-07-11 00:53:34 +02:00
|
|
|
send_pm_if_empty_stream(None, realm, sender, stream_name=stream_name)
|
2019-01-26 04:14:25 +01:00
|
|
|
raise StreamDoesNotExistError(escape(stream_name))
|
|
|
|
|
|
|
|
return stream
|
2018-08-14 17:39:24 +02:00
|
|
|
|
2019-01-28 05:28:29 +01:00
|
|
|
def validate_stream_id_with_pm_notification(stream_id: int, realm: Realm,
|
|
|
|
sender: UserProfile) -> Stream:
|
|
|
|
try:
|
|
|
|
stream = get_stream_by_id_in_realm(stream_id, realm)
|
2019-07-11 00:53:34 +02:00
|
|
|
send_pm_if_empty_stream(stream, realm, sender)
|
2019-01-28 05:28:29 +01:00
|
|
|
except Stream.DoesNotExist:
|
2019-07-11 00:53:34 +02:00
|
|
|
send_pm_if_empty_stream(None, realm, sender, stream_id=stream_id)
|
2019-01-28 05:28:29 +01:00
|
|
|
raise StreamWithIDDoesNotExistError(stream_id)
|
|
|
|
|
|
|
|
return stream
|
|
|
|
|
2013-05-10 22:56:22 +02:00
|
|
|
# check_message:
|
|
|
|
# Returns message ready for sending with do_send_message on success or the error message (string) on error.
|
2017-12-29 12:51:56 +01:00
|
|
|
def check_message(sender: UserProfile, client: Client, addressee: Addressee,
|
2018-05-11 02:01:29 +02:00
|
|
|
message_content_raw: str, realm: Optional[Realm]=None, forged: bool=False,
|
2017-12-29 12:51:56 +01:00
|
|
|
forged_timestamp: Optional[float]=None,
|
|
|
|
forwarder_user_profile: Optional[UserProfile]=None,
|
2018-05-11 02:01:29 +02:00
|
|
|
local_id: Optional[str]=None,
|
2018-05-21 15:23:46 +02:00
|
|
|
sender_queue_id: Optional[str]=None,
|
|
|
|
widget_content: Optional[str]=None) -> Dict[str, Any]:
|
2018-11-30 00:48:13 +01:00
|
|
|
"""See
|
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
|
|
|
|
for high-level documentation on this subsystem.
|
|
|
|
"""
|
2013-03-18 18:57:34 +01:00
|
|
|
stream = None
|
2017-04-27 22:48:06 +02:00
|
|
|
|
2017-02-18 23:47:18 +01:00
|
|
|
message_content = message_content_raw.rstrip()
|
2017-02-12 04:22:13 +01:00
|
|
|
if len(message_content) == 0:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("Message must not be empty"))
|
2017-10-02 21:56:30 +02:00
|
|
|
if '\x00' in message_content:
|
|
|
|
raise JsonableError(_("Message must not contain null bytes"))
|
|
|
|
|
2013-11-22 18:33:22 +01:00
|
|
|
message_content = truncate_body(message_content)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
if realm is None:
|
|
|
|
realm = sender.realm
|
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
if addressee.is_stream():
|
2017-10-27 14:17:51 +02:00
|
|
|
topic_name = addressee.topic()
|
|
|
|
topic_name = truncate_topic(topic_name)
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2019-01-26 04:14:25 +01:00
|
|
|
stream_name = addressee.stream_name()
|
2019-01-28 05:28:29 +01:00
|
|
|
stream_id = addressee.stream_id()
|
|
|
|
|
|
|
|
if stream_name is not None:
|
|
|
|
stream = validate_stream_name_with_pm_notification(stream_name, realm, sender)
|
2019-02-07 02:05:34 +01:00
|
|
|
elif stream_id is not None:
|
2019-01-28 05:28:29 +01:00
|
|
|
stream = validate_stream_id_with_pm_notification(stream_id, realm, sender)
|
2019-02-07 02:05:34 +01:00
|
|
|
else:
|
|
|
|
stream = addressee.stream()
|
2019-03-21 23:34:34 +01:00
|
|
|
assert stream is not None
|
2019-01-28 05:28:29 +01:00
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2013-05-08 21:23:07 +02:00
|
|
|
|
2018-08-14 17:39:24 +02:00
|
|
|
# This will raise JsonableError if there are problems.
|
|
|
|
validate_sender_can_write_to_stream(
|
|
|
|
sender=sender,
|
|
|
|
stream=stream,
|
|
|
|
forwarder_user_profile=forwarder_user_profile
|
|
|
|
)
|
2013-08-26 18:07:52 +02:00
|
|
|
|
2017-04-27 22:48:06 +02:00
|
|
|
elif addressee.is_private():
|
2017-08-18 05:01:22 +02:00
|
|
|
user_profiles = addressee.user_profiles()
|
2017-11-08 03:56:01 +01:00
|
|
|
mirror_message = client and client.name in ["zephyr_mirror", "irc_mirror",
|
|
|
|
"jabber_mirror", "JabberMirror"]
|
2019-06-28 21:42:19 +02:00
|
|
|
|
|
|
|
# API Super-users who set the `forged` flag are allowed to
|
|
|
|
# forge messages sent by any user, so we disable the
|
|
|
|
# `forwarded_mirror_message` security check in that case.
|
|
|
|
forwarded_mirror_message = mirror_message and not forged
|
2013-03-18 18:57:34 +01:00
|
|
|
try:
|
2019-06-28 21:42:19 +02:00
|
|
|
recipient = recipient_for_user_profiles(user_profiles,
|
|
|
|
forwarded_mirror_message,
|
2017-08-18 05:01:22 +02:00
|
|
|
forwarder_user_profile, sender)
|
2015-11-01 17:08:33 +01:00
|
|
|
except ValidationError as e:
|
2017-09-27 10:06:17 +02:00
|
|
|
assert isinstance(e.messages[0], str)
|
2013-08-12 22:00:06 +02:00
|
|
|
raise JsonableError(e.messages[0])
|
2013-03-18 18:57:34 +01:00
|
|
|
else:
|
2018-08-13 17:18:31 +02:00
|
|
|
# This is defensive code--Addressee already validates
|
|
|
|
# the message type.
|
|
|
|
raise AssertionError("Invalid message type")
|
2013-03-18 18:57:34 +01:00
|
|
|
|
|
|
|
message = Message()
|
|
|
|
message.sender = sender
|
|
|
|
message.content = message_content
|
|
|
|
message.recipient = recipient
|
2017-04-27 22:48:06 +02:00
|
|
|
if addressee.is_stream():
|
2018-11-01 16:05:30 +01:00
|
|
|
message.set_topic_name(topic_name)
|
2013-10-17 17:08:25 +02:00
|
|
|
if forged and forged_timestamp is not None:
|
2013-03-18 18:57:34 +01:00
|
|
|
# Forged messages come with a timestamp
|
|
|
|
message.pub_date = timestamp_to_datetime(forged_timestamp)
|
|
|
|
else:
|
2017-04-15 04:03:56 +02:00
|
|
|
message.pub_date = timezone_now()
|
2013-03-18 18:57:34 +01:00
|
|
|
message.sending_client = client
|
|
|
|
|
2016-09-14 21:58:44 +02:00
|
|
|
# We render messages later in the process.
|
|
|
|
assert message.rendered_content is None
|
2013-06-28 16:02:58 +02:00
|
|
|
|
2013-08-12 20:05:57 +02:00
|
|
|
if client.name == "zephyr_mirror":
|
|
|
|
id = already_sent_mirrored_message_id(message)
|
|
|
|
if id is not None:
|
|
|
|
return {'message': id}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2018-05-21 15:23:46 +02:00
|
|
|
if widget_content is not None:
|
|
|
|
try:
|
|
|
|
widget_content = ujson.loads(widget_content)
|
|
|
|
except Exception:
|
|
|
|
raise JsonableError(_('Widgets: API programmer sent invalid JSON content'))
|
|
|
|
|
|
|
|
error_msg = check_widget_content(widget_content)
|
|
|
|
if error_msg:
|
|
|
|
raise JsonableError(_('Widgets: %s') % (error_msg,))
|
|
|
|
|
2017-07-14 19:30:23 +02:00
|
|
|
return {'message': message, 'stream': stream, 'local_id': local_id,
|
2018-05-21 15:23:46 +02:00
|
|
|
'sender_queue_id': sender_queue_id, 'realm': realm,
|
|
|
|
'widget_content': widget_content}
|
2013-03-18 18:57:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def _internal_prep_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
addressee: Addressee,
|
2018-05-11 02:01:29 +02:00
|
|
|
content: str) -> Optional[Dict[str, Any]]:
|
2013-05-10 22:56:22 +02:00
|
|
|
"""
|
|
|
|
Create a message object and checks it, but doesn't send it or save it to the database.
|
|
|
|
The internal function that calls this can therefore batch send a bunch of created
|
|
|
|
messages together as one database query.
|
|
|
|
Call do_send_messages with a list of the return values of this method.
|
|
|
|
"""
|
2017-10-02 21:56:30 +02:00
|
|
|
# Remove any null bytes from the content
|
2013-01-10 22:01:33 +01:00
|
|
|
if len(content) > MAX_MESSAGE_LENGTH:
|
|
|
|
content = content[0:3900] + "\n\n[message was too long and has been truncated]"
|
2013-03-08 20:54:53 +01:00
|
|
|
|
2013-03-18 19:10:21 +01:00
|
|
|
if realm is None:
|
2017-01-22 05:11:53 +01:00
|
|
|
raise RuntimeError("None is not a valid realm for internal_prep_message!")
|
2017-04-27 19:33:15 +02:00
|
|
|
|
2019-01-30 04:56:53 +01:00
|
|
|
# If we have a stream name, and the stream doesn't exist, we
|
|
|
|
# create it here (though this code path should probably be removed
|
|
|
|
# eventually, moving that responsibility to the caller). If
|
|
|
|
# addressee.stream_name() is None (i.e. we're sending to a stream
|
|
|
|
# by ID), we skip this, as the stream object must already exist.
|
2019-03-20 18:13:09 +01:00
|
|
|
if addressee.is_stream():
|
|
|
|
stream_name = addressee.stream_name()
|
|
|
|
if stream_name is not None:
|
|
|
|
ensure_stream(realm, stream_name)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2013-08-12 22:00:06 +02:00
|
|
|
try:
|
2017-04-27 22:48:06 +02:00
|
|
|
return check_message(sender, get_client("Internal"), addressee,
|
|
|
|
content, realm=realm)
|
2015-11-01 17:08:33 +01:00
|
|
|
except JsonableError as e:
|
2017-11-04 05:34:38 +01:00
|
|
|
logging.exception("Error queueing internal message by %s: %s" % (sender.email, e))
|
2013-08-12 22:00:06 +02:00
|
|
|
|
|
|
|
return None
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2019-02-07 02:05:34 +01:00
|
|
|
def internal_prep_stream_message(
|
|
|
|
realm: Realm, sender: UserProfile,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream: Stream, topic: str, content: str
|
2019-02-07 02:05:34 +01:00
|
|
|
) -> Optional[Dict[str, Any]]:
|
2017-04-27 19:44:09 +02:00
|
|
|
"""
|
|
|
|
See _internal_prep_message for details of how this works.
|
|
|
|
"""
|
2019-02-09 03:01:35 +01:00
|
|
|
addressee = Addressee.for_stream(stream, topic)
|
|
|
|
|
|
|
|
return _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
|
|
|
addressee=addressee,
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
|
|
|
def internal_prep_stream_message_by_name(
|
|
|
|
realm: Realm, sender: UserProfile,
|
|
|
|
stream_name: str, topic: str, content: str
|
|
|
|
) -> Optional[Dict[str, Any]]:
|
|
|
|
"""
|
|
|
|
See _internal_prep_message for details of how this works.
|
|
|
|
"""
|
|
|
|
addressee = Addressee.for_stream_name(stream_name, topic)
|
2017-04-27 19:44:09 +02:00
|
|
|
|
|
|
|
return _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
addressee=addressee,
|
2017-04-27 19:44:09 +02:00
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_prep_private_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
recipient_user: UserProfile,
|
2018-05-11 02:01:29 +02:00
|
|
|
content: str) -> Optional[Dict[str, Any]]:
|
2017-04-27 20:42:13 +02:00
|
|
|
"""
|
|
|
|
See _internal_prep_message for details of how this works.
|
|
|
|
"""
|
2017-08-18 05:02:02 +02:00
|
|
|
addressee = Addressee.for_user_profile(recipient_user)
|
2017-04-27 20:42:13 +02:00
|
|
|
|
|
|
|
return _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
2017-04-27 22:48:06 +02:00
|
|
|
addressee=addressee,
|
2017-04-27 20:42:13 +02:00
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def internal_send_message(realm: Realm, sender_email: str, recipient_type_name: str,
|
|
|
|
recipients: str, topic_name: str, content: str,
|
2019-05-20 04:18:06 +02:00
|
|
|
email_gateway: Optional[bool]=False) -> Optional[int]:
|
2017-11-27 01:43:30 +01:00
|
|
|
"""internal_send_message should only be used where `sender_email` is a
|
|
|
|
system bot."""
|
|
|
|
|
2017-11-27 01:43:30 +01:00
|
|
|
# Verify the user is in fact a system bot
|
2017-12-07 21:15:34 +01:00
|
|
|
assert(is_cross_realm_bot_email(sender_email) or sender_email == settings.ERROR_BOT)
|
2017-11-27 01:43:30 +01:00
|
|
|
|
2017-11-27 01:43:30 +01:00
|
|
|
sender = get_system_bot(sender_email)
|
|
|
|
parsed_recipients = extract_recipients(recipients)
|
|
|
|
|
|
|
|
addressee = Addressee.legacy_build(
|
|
|
|
sender,
|
|
|
|
recipient_type_name,
|
|
|
|
parsed_recipients,
|
|
|
|
topic_name,
|
|
|
|
realm=realm)
|
|
|
|
|
|
|
|
msg = _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
|
|
|
addressee=addressee,
|
|
|
|
content=content,
|
|
|
|
)
|
2013-05-10 22:56:22 +02:00
|
|
|
if msg is None:
|
2019-05-20 04:18:06 +02:00
|
|
|
return None
|
2013-05-10 22:56:22 +02:00
|
|
|
|
2019-05-20 04:18:06 +02:00
|
|
|
message_ids = do_send_messages([msg], email_gateway=email_gateway)
|
|
|
|
return message_ids[0]
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_send_private_message(realm: Realm,
|
|
|
|
sender: UserProfile,
|
|
|
|
recipient_user: UserProfile,
|
2019-05-20 04:18:06 +02:00
|
|
|
content: str) -> Optional[int]:
|
2017-08-18 05:02:02 +02:00
|
|
|
message = internal_prep_private_message(realm, sender, recipient_user, content)
|
2017-06-11 18:31:30 +02:00
|
|
|
if message is None:
|
2019-05-20 04:18:06 +02:00
|
|
|
return None
|
|
|
|
message_ids = do_send_messages([message])
|
|
|
|
return message_ids[0]
|
2017-06-11 18:31:30 +02:00
|
|
|
|
2019-02-08 03:13:14 +01:00
|
|
|
def internal_send_stream_message(
|
|
|
|
realm: Realm, sender: UserProfile,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream: Stream, topic: str, content: str
|
2019-05-20 04:18:06 +02:00
|
|
|
) -> Optional[int]:
|
2019-02-08 03:13:14 +01:00
|
|
|
message = internal_prep_stream_message(
|
2019-02-09 03:01:35 +01:00
|
|
|
realm, sender, stream,
|
|
|
|
topic, content
|
|
|
|
)
|
|
|
|
|
|
|
|
if message is None:
|
2019-05-20 04:18:06 +02:00
|
|
|
return None
|
|
|
|
message_ids = do_send_messages([message])
|
|
|
|
return message_ids[0]
|
2019-02-09 03:01:35 +01:00
|
|
|
|
|
|
|
def internal_send_stream_message_by_name(
|
|
|
|
realm: Realm, sender: UserProfile,
|
|
|
|
stream_name: str, topic: str, content: str
|
2019-05-20 04:18:06 +02:00
|
|
|
) -> Optional[int]:
|
2019-02-09 03:01:35 +01:00
|
|
|
message = internal_prep_stream_message_by_name(
|
|
|
|
realm, sender, stream_name,
|
|
|
|
topic, content
|
2019-02-08 03:13:14 +01:00
|
|
|
)
|
|
|
|
|
2017-11-27 01:41:07 +01:00
|
|
|
if message is None:
|
2019-05-20 04:18:06 +02:00
|
|
|
return None
|
|
|
|
message_ids = do_send_messages([message])
|
|
|
|
return message_ids[0]
|
2017-11-27 01:41:07 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def internal_send_huddle_message(realm: Realm, sender: UserProfile, emails: List[str],
|
2019-05-20 04:18:06 +02:00
|
|
|
content: str) -> Optional[int]:
|
2017-11-27 01:41:07 +01:00
|
|
|
addressee = Addressee.for_private(emails, realm)
|
|
|
|
message = _internal_prep_message(
|
|
|
|
realm=realm,
|
|
|
|
sender=sender,
|
|
|
|
addressee=addressee,
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
if message is None:
|
2019-05-20 04:18:06 +02:00
|
|
|
return None
|
|
|
|
message_ids = do_send_messages([message])
|
|
|
|
return message_ids[0]
|
2017-11-27 01:41:07 +01:00
|
|
|
|
2018-05-14 02:52:36 +02:00
|
|
|
def pick_color(user_profile: UserProfile, subs: Iterable[Subscription]) -> str:
|
2013-03-10 19:36:45 +01:00
|
|
|
# These colors are shared with the palette in subs.js.
|
2013-05-10 17:43:27 +02:00
|
|
|
used_colors = [sub.color for sub in subs if sub.active]
|
2016-08-03 23:37:12 +02:00
|
|
|
available_colors = [s for s in STREAM_ASSIGNMENT_COLORS if s not in used_colors]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
|
|
|
if available_colors:
|
|
|
|
return available_colors[0]
|
|
|
|
else:
|
2016-08-03 23:37:12 +02:00
|
|
|
return STREAM_ASSIGNMENT_COLORS[len(used_colors) % len(STREAM_ASSIGNMENT_COLORS)]
|
2013-03-10 19:36:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_user_access_to_subscribers(user_profile: Optional[UserProfile],
|
|
|
|
stream: Stream) -> None:
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Validates whether the user can view the subscribers of a stream. Raises a JsonableError if:
|
2013-09-30 21:58:36 +02:00
|
|
|
* The user and the stream are in different realms
|
2013-09-06 23:25:43 +02:00
|
|
|
* The realm is MIT and the stream is not invite only.
|
|
|
|
* The stream is invite only, requesting_user is passed, and that user
|
|
|
|
does not subscribe to the stream.
|
|
|
|
"""
|
2016-05-26 13:27:00 +02:00
|
|
|
validate_user_access_to_subscribers_helper(
|
2013-10-02 19:46:40 +02:00
|
|
|
user_profile,
|
2017-03-13 19:52:38 +01:00
|
|
|
{"realm_id": stream.realm_id,
|
2013-10-02 19:46:40 +02:00
|
|
|
"invite_only": stream.invite_only},
|
|
|
|
# We use a lambda here so that we only compute whether the
|
|
|
|
# user is subscribed if we have to
|
2017-09-17 21:05:00 +02:00
|
|
|
lambda: subscribed_to_stream(cast(UserProfile, user_profile), stream.id))
|
2013-10-02 19:46:40 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def validate_user_access_to_subscribers_helper(user_profile: Optional[UserProfile],
|
|
|
|
stream_dict: Mapping[str, Any],
|
|
|
|
check_user_subscribed: Callable[[], bool]) -> None:
|
2018-06-02 17:46:58 +02:00
|
|
|
"""Helper for validate_user_access_to_subscribers that doesn't require
|
|
|
|
a full stream object. This function is a bit hard to read,
|
|
|
|
because it is carefully optimized for performance in the two code
|
|
|
|
paths we call it from:
|
|
|
|
|
|
|
|
* In `bulk_get_subscriber_user_ids`, we already know whether the
|
|
|
|
user was subscribed via `sub_dict`, and so we want to avoid a
|
|
|
|
database query at all (especially since it calls this in a loop);
|
|
|
|
* In `validate_user_access_to_subscribers`, we want to only check
|
|
|
|
if the user is subscribed when we absolutely have to, since it
|
|
|
|
costs a database query.
|
|
|
|
|
|
|
|
The `check_user_subscribed` argument is a function that reports
|
|
|
|
whether the user is subscribed to the stream.
|
|
|
|
|
|
|
|
Note also that we raise a ValidationError in cases where the
|
|
|
|
caller is doing the wrong thing (maybe these should be
|
|
|
|
AssertionErrors), and JsonableError for 400 type errors.
|
2013-10-02 19:46:40 +02:00
|
|
|
"""
|
2016-07-27 01:54:16 +02:00
|
|
|
if user_profile is None:
|
|
|
|
raise ValidationError("Missing user to validate access for")
|
|
|
|
|
|
|
|
if user_profile.realm_id != stream_dict["realm_id"]:
|
|
|
|
raise ValidationError("Requesting user not in given realm")
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2018-06-02 09:25:39 +02:00
|
|
|
# Guest users can access subscribed public stream's subscribers
|
|
|
|
if user_profile.is_guest:
|
|
|
|
if check_user_subscribed():
|
|
|
|
return
|
|
|
|
# We could put an AssertionError here; in that we don't have
|
|
|
|
# any code paths that would allow a guest user to access other
|
|
|
|
# streams in the first place.
|
|
|
|
|
2018-04-20 20:59:22 +02:00
|
|
|
if not user_profile.can_access_public_streams() and not stream_dict["invite_only"]:
|
2018-03-08 02:05:50 +01:00
|
|
|
raise JsonableError(_("Subscriber data is not available for this stream"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2018-02-14 17:59:01 +01:00
|
|
|
# Organization administrators can view subscribers for all streams.
|
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return
|
|
|
|
|
2018-03-13 23:12:18 +01:00
|
|
|
if (stream_dict["invite_only"] and not check_user_subscribed()):
|
2018-06-27 00:04:03 +02:00
|
|
|
raise JsonableError(_("Unable to retrieve subscribers for private stream"))
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_get_subscriber_user_ids(stream_dicts: Iterable[Mapping[str, Any]],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
sub_dict: Mapping[int, bool],
|
|
|
|
stream_recipient: StreamRecipientMap) -> Dict[int, List[int]]:
|
2017-12-12 04:28:19 +01:00
|
|
|
"""sub_dict maps stream_id => whether the user is subscribed to that stream."""
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts = []
|
|
|
|
for stream_dict in stream_dicts:
|
2013-09-30 22:09:43 +02:00
|
|
|
try:
|
2013-10-02 19:46:40 +02:00
|
|
|
validate_user_access_to_subscribers_helper(user_profile, stream_dict,
|
|
|
|
lambda: sub_dict[stream_dict["id"]])
|
2013-09-30 22:09:43 +02:00
|
|
|
except JsonableError:
|
|
|
|
continue
|
2013-10-02 19:46:40 +02:00
|
|
|
target_stream_dicts.append(stream_dict)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2017-09-13 20:00:36 +02:00
|
|
|
stream_ids = [stream['id'] for stream in target_stream_dicts]
|
|
|
|
stream_recipient.populate_for_stream_ids(stream_ids)
|
|
|
|
recipient_ids = sorted([
|
|
|
|
stream_recipient.recipient_id_for(stream_id)
|
|
|
|
for stream_id in stream_ids
|
|
|
|
])
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
result = dict((stream["id"], []) for stream in stream_dicts) # type: Dict[int, List[int]]
|
2017-09-13 21:33:15 +02:00
|
|
|
if not recipient_ids:
|
|
|
|
return result
|
|
|
|
|
|
|
|
'''
|
|
|
|
The raw SQL below leads to more than a 2x speedup when tested with
|
|
|
|
20k+ total subscribers. (For large realms with lots of default
|
|
|
|
streams, this function deals with LOTS of data, so it is important
|
|
|
|
to optimize.)
|
|
|
|
'''
|
|
|
|
|
|
|
|
id_list = ', '.join(str(recipient_id) for recipient_id in recipient_ids)
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
SELECT
|
|
|
|
zerver_subscription.recipient_id,
|
|
|
|
zerver_subscription.user_profile_id
|
|
|
|
FROM
|
|
|
|
zerver_subscription
|
|
|
|
INNER JOIN zerver_userprofile ON
|
|
|
|
zerver_userprofile.id = zerver_subscription.user_profile_id
|
|
|
|
WHERE
|
|
|
|
zerver_subscription.recipient_id in (%s) AND
|
|
|
|
zerver_subscription.active AND
|
|
|
|
zerver_userprofile.is_active
|
|
|
|
ORDER BY
|
|
|
|
zerver_subscription.recipient_id
|
|
|
|
''' % (id_list,)
|
|
|
|
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2017-09-13 20:00:36 +02:00
|
|
|
recip_to_stream_id = stream_recipient.recipient_to_stream_id_dict()
|
2017-09-13 21:47:43 +02:00
|
|
|
|
|
|
|
'''
|
|
|
|
Using groupby/itemgetter here is important for performance, at scale.
|
|
|
|
It makes it so that all interpreter overhead is just O(N) in nature.
|
|
|
|
'''
|
|
|
|
for recip_id, recip_rows in itertools.groupby(rows, itemgetter(0)):
|
|
|
|
user_profile_ids = [r[1] for r in recip_rows]
|
2017-09-13 20:00:36 +02:00
|
|
|
stream_id = recip_to_stream_id[recip_id]
|
2017-09-13 21:47:43 +02:00
|
|
|
result[stream_id] = list(user_profile_ids)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_subscribers_query(stream: Stream, requesting_user: Optional[UserProfile]) -> QuerySet:
|
2016-05-25 06:55:14 +02:00
|
|
|
# TODO: Make a generic stub for QuerySet
|
2013-09-30 22:03:16 +02:00
|
|
|
""" Build a query to get the subscribers list for a stream, raising a JsonableError if:
|
|
|
|
|
2016-05-26 14:25:48 +02:00
|
|
|
'realm' is optional in stream.
|
2013-09-30 22:03:16 +02:00
|
|
|
|
|
|
|
The caller can refine this query with select_related(), values(), etc. depending
|
|
|
|
on whether it wants objects or just certain fields
|
|
|
|
"""
|
|
|
|
validate_user_access_to_subscribers(requesting_user, stream)
|
|
|
|
|
2013-09-06 23:25:43 +02:00
|
|
|
# Note that non-active users may still have "active" subscriptions, because we
|
|
|
|
# want to be able to easily reactivate them with their old subscriptions. This
|
|
|
|
# is why the query here has to look at the UserProfile.is_active flag.
|
2017-10-29 15:40:07 +01:00
|
|
|
subscriptions = get_active_subscriptions_for_stream_id(stream.id).filter(
|
|
|
|
user_profile__is_active=True
|
|
|
|
)
|
2013-09-13 19:22:28 +02:00
|
|
|
return subscriptions
|
2013-09-06 23:25:43 +02:00
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_subscriber_emails(stream: Stream,
|
2018-05-11 02:01:29 +02:00
|
|
|
requesting_user: Optional[UserProfile]=None) -> List[str]:
|
2016-05-26 14:25:48 +02:00
|
|
|
subscriptions_query = get_subscribers_query(stream, requesting_user)
|
|
|
|
subscriptions = subscriptions_query.values('user_profile__email')
|
2013-09-13 19:30:05 +02:00
|
|
|
return [subscription['user_profile__email'] for subscription in subscriptions]
|
|
|
|
|
2013-09-06 23:25:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_subscriptions_added(user_profile: UserProfile,
|
|
|
|
sub_pairs: Iterable[Tuple[Subscription, Stream]],
|
|
|
|
stream_user_ids: Callable[[Stream], List[int]],
|
2017-04-03 17:13:42 +02:00
|
|
|
recent_traffic: Dict[int, int],
|
2017-11-05 11:15:10 +01:00
|
|
|
no_log: bool=False) -> None:
|
2013-05-10 17:43:27 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_added',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for sub, stream in sub_pairs],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2013-09-07 01:06:41 +02:00
|
|
|
# Send a notification to the user who subscribed.
|
2013-06-28 17:49:51 +02:00
|
|
|
payload = [dict(name=stream.name,
|
2014-02-05 19:58:41 +01:00
|
|
|
stream_id=stream.id,
|
2018-08-02 23:46:05 +02:00
|
|
|
in_home_view=not subscription.is_muted,
|
|
|
|
is_muted=subscription.is_muted,
|
2013-06-28 17:49:51 +02:00
|
|
|
invite_only=stream.invite_only,
|
2019-04-07 20:29:25 +02:00
|
|
|
is_web_public=stream.is_web_public,
|
2018-05-13 15:50:48 +02:00
|
|
|
is_announcement_only=stream.is_announcement_only,
|
2013-08-12 21:13:07 +02:00
|
|
|
color=subscription.color,
|
2013-09-07 03:08:01 +02:00
|
|
|
email_address=encode_email_address(stream),
|
2014-02-05 23:21:02 +01:00
|
|
|
desktop_notifications=subscription.desktop_notifications,
|
|
|
|
audible_notifications=subscription.audible_notifications,
|
2017-08-17 16:55:32 +02:00
|
|
|
push_notifications=subscription.push_notifications,
|
2017-11-21 04:35:26 +01:00
|
|
|
email_notifications=subscription.email_notifications,
|
2014-01-22 20:20:10 +01:00
|
|
|
description=stream.description,
|
2019-01-11 13:48:22 +01:00
|
|
|
rendered_description=stream.rendered_description,
|
2016-07-01 07:26:09 +02:00
|
|
|
pin_to_top=subscription.pin_to_top,
|
2017-04-03 17:13:42 +02:00
|
|
|
is_old_stream=is_old_stream(stream.date_created),
|
2019-03-04 17:50:49 +01:00
|
|
|
first_message_id=stream.first_message_id,
|
2017-04-03 17:13:42 +02:00
|
|
|
stream_weekly_traffic=get_average_weekly_stream_traffic(
|
|
|
|
stream.id, stream.date_created, recent_traffic),
|
2018-05-07 23:14:15 +02:00
|
|
|
subscribers=stream_user_ids(stream),
|
|
|
|
history_public_to_subscribers=stream.history_public_to_subscribers)
|
2016-12-11 14:30:45 +01:00
|
|
|
for (subscription, stream) in sub_pairs]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_peer_user_ids_for_stream_change(stream: Stream,
|
|
|
|
altered_user_ids: Iterable[int],
|
|
|
|
subscribed_user_ids: Iterable[int]) -> Set[int]:
|
2016-10-19 23:49:04 +02:00
|
|
|
'''
|
2017-12-12 04:28:19 +01:00
|
|
|
altered_user_ids is the user_ids that we are adding/removing
|
|
|
|
subscribed_user_ids is the already-subscribed user_ids
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
Based on stream policy, we notify the correct bystanders, while
|
|
|
|
not notifying altered_users (who get subscribers via another event)
|
|
|
|
'''
|
|
|
|
|
|
|
|
if stream.invite_only:
|
|
|
|
# PRIVATE STREAMS
|
2018-02-14 17:59:01 +01:00
|
|
|
# Realm admins can access all private stream subscribers. Send them an
|
|
|
|
# event even if they aren't subscribed to stream.
|
2019-06-20 23:36:15 +02:00
|
|
|
realm_admin_ids = [user.id for user in stream.realm.get_admin_users_and_bots()]
|
2018-02-14 17:59:01 +01:00
|
|
|
user_ids_to_notify = []
|
|
|
|
user_ids_to_notify.extend(realm_admin_ids)
|
|
|
|
user_ids_to_notify.extend(subscribed_user_ids)
|
|
|
|
return set(user_ids_to_notify) - set(altered_user_ids)
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
else:
|
|
|
|
# PUBLIC STREAMS
|
|
|
|
# We now do "peer_add" or "peer_remove" events even for streams
|
|
|
|
# users were never subscribed to, in order for the neversubscribed
|
|
|
|
# structure to stay up-to-date.
|
2018-06-03 19:11:52 +02:00
|
|
|
return set(active_non_guest_user_ids(stream.realm_id)) - set(altered_user_ids)
|
2016-10-19 23:49:04 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_ids_for_streams(streams: Iterable[Stream]) -> Dict[int, List[int]]:
|
2017-10-29 15:52:01 +01:00
|
|
|
stream_ids = [stream.id for stream in streams]
|
|
|
|
|
|
|
|
all_subs = get_active_subscriptions_for_stream_ids(stream_ids).filter(
|
2017-10-06 17:35:55 +02:00
|
|
|
user_profile__is_active=True,
|
|
|
|
).values(
|
|
|
|
'recipient__type_id',
|
|
|
|
'user_profile_id',
|
|
|
|
).order_by(
|
|
|
|
'recipient__type_id',
|
|
|
|
)
|
|
|
|
|
|
|
|
get_stream_id = itemgetter('recipient__type_id')
|
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = defaultdict(list) # type: Dict[int, List[int]]
|
2017-10-06 17:35:55 +02:00
|
|
|
for stream_id, rows in itertools.groupby(all_subs, get_stream_id):
|
2017-10-07 16:00:39 +02:00
|
|
|
user_ids = [row['user_profile_id'] for row in rows]
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream[stream_id] = user_ids
|
2017-10-06 17:35:55 +02:00
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
return all_subscribers_by_stream
|
2016-10-20 19:17:47 +02:00
|
|
|
|
2018-08-15 14:56:43 +02:00
|
|
|
def get_last_message_id() -> int:
|
|
|
|
# We generally use this function to populate RealmAuditLog, and
|
|
|
|
# the max id here is actually systemwide, not per-realm. I
|
|
|
|
# assume there's some advantage in not filtering by realm.
|
|
|
|
last_id = Message.objects.aggregate(Max('id'))['id__max']
|
|
|
|
if last_id is None:
|
|
|
|
# During initial realm creation, there might be 0 messages in
|
|
|
|
# the database; in that case, the `aggregate` query returns
|
|
|
|
# None. Since we want an int for "beginning of time", use -1.
|
|
|
|
last_id = -1
|
|
|
|
return last_id
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
SubT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
def bulk_add_subscriptions(streams: Iterable[Stream],
|
|
|
|
users: Iterable[UserProfile],
|
2019-03-20 17:46:50 +01:00
|
|
|
color_map: Optional[Dict[str, str]]=None,
|
2017-11-05 11:15:10 +01:00
|
|
|
from_stream_creation: bool=False,
|
|
|
|
acting_user: Optional[UserProfile]=None) -> SubT:
|
2017-10-29 19:15:35 +01:00
|
|
|
users = list(users)
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams]) # type: Mapping[int, Recipient]
|
|
|
|
recipients = [recipient.id for recipient in recipients_map.values()] # type: List[int]
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
stream_map = {} # type: Dict[int, Stream]
|
2013-06-25 19:26:58 +02:00
|
|
|
for stream in streams:
|
|
|
|
stream_map[recipients_map[stream.id].id] = stream
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]]
|
2017-10-29 19:15:35 +01:00
|
|
|
all_subs_query = get_stream_subscriptions_for_users(users).select_related('user_profile')
|
|
|
|
for sub in all_subs_query:
|
2013-05-10 17:43:27 +02:00
|
|
|
subs_by_user[sub.user_profile_id].append(sub)
|
|
|
|
|
2018-12-03 19:35:32 +01:00
|
|
|
realm = users[0].realm
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
already_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
|
|
|
subs_to_activate = [] # type: List[Tuple[Subscription, Stream]]
|
|
|
|
new_subs = [] # type: List[Tuple[UserProfile, int, Stream]]
|
2013-05-10 17:43:27 +02:00
|
|
|
for user_profile in users:
|
2017-05-17 20:39:57 +02:00
|
|
|
needs_new_sub = set(recipients) # type: Set[int]
|
2013-05-10 17:43:27 +02:00
|
|
|
for sub in subs_by_user[user_profile.id]:
|
2013-06-25 19:26:58 +02:00
|
|
|
if sub.recipient_id in needs_new_sub:
|
|
|
|
needs_new_sub.remove(sub.recipient_id)
|
2013-05-10 17:43:27 +02:00
|
|
|
if sub.active:
|
2013-06-25 19:26:58 +02:00
|
|
|
already_subscribed.append((user_profile, stream_map[sub.recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
else:
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_to_activate.append((sub, stream_map[sub.recipient_id]))
|
|
|
|
# Mark the sub as active, without saving, so that
|
|
|
|
# pick_color will consider this to be an active
|
|
|
|
# subscription when picking colors
|
|
|
|
sub.active = True
|
|
|
|
for recipient_id in needs_new_sub:
|
|
|
|
new_subs.append((user_profile, recipient_id, stream_map[recipient_id]))
|
2013-05-10 17:43:27 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_to_add = [] # type: List[Tuple[Subscription, Stream]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (user_profile, recipient_id, stream) in new_subs:
|
2019-03-20 17:46:50 +01:00
|
|
|
if color_map is not None and stream.name in color_map:
|
2019-01-10 15:03:15 +01:00
|
|
|
color = color_map[stream.name]
|
|
|
|
else:
|
|
|
|
color = pick_color(user_profile, subs_by_user[user_profile.id])
|
|
|
|
|
2013-06-25 19:26:58 +02:00
|
|
|
sub_to_add = Subscription(user_profile=user_profile, active=True,
|
2019-02-13 10:22:16 +01:00
|
|
|
color=color, recipient_id=recipient_id)
|
2013-06-25 19:26:58 +02:00
|
|
|
subs_by_user[user_profile.id].append(sub_to_add)
|
|
|
|
subs_to_add.append((sub_to_add, stream))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
2018-12-03 19:35:32 +01:00
|
|
|
occupied_streams_before = list(get_occupied_streams(realm))
|
2014-03-02 06:46:54 +01:00
|
|
|
Subscription.objects.bulk_create([sub for (sub, stream) in subs_to_add])
|
2017-11-08 03:56:01 +01:00
|
|
|
sub_ids = [sub.id for (sub, stream) in subs_to_activate]
|
|
|
|
Subscription.objects.filter(id__in=sub_ids).update(active=True)
|
2018-12-03 19:35:32 +01:00
|
|
|
occupied_streams_after = list(get_occupied_streams(realm))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2017-07-12 17:28:27 +02:00
|
|
|
# Log Subscription Activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
2018-08-15 14:56:43 +02:00
|
|
|
event_last_message_id = get_last_message_id()
|
2018-02-22 17:45:43 +01:00
|
|
|
|
2017-07-12 17:28:27 +02:00
|
|
|
all_subscription_logs = [] # type: (List[RealmAuditLog])
|
|
|
|
for (sub, stream) in subs_to_add:
|
2018-12-03 19:35:32 +01:00
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=realm,
|
2017-07-17 00:40:15 +02:00
|
|
|
acting_user=acting_user,
|
2017-07-12 17:28:27 +02:00
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
2017-07-22 13:42:54 +02:00
|
|
|
event_last_message_id=event_last_message_id,
|
2018-07-10 06:54:06 +02:00
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
2017-07-12 17:28:27 +02:00
|
|
|
event_time=event_time))
|
|
|
|
for (sub, stream) in subs_to_activate:
|
2018-12-03 19:35:32 +01:00
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=realm,
|
2017-07-17 00:40:15 +02:00
|
|
|
acting_user=acting_user,
|
2017-07-12 17:28:27 +02:00
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
2018-07-10 07:01:24 +02:00
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
|
2017-07-12 17:28:27 +02:00
|
|
|
event_time=event_time))
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
new_occupied_streams = [stream for stream in
|
|
|
|
set(occupied_streams_after) - set(occupied_streams_before)
|
|
|
|
if not stream.invite_only]
|
2017-07-17 00:33:12 +02:00
|
|
|
if new_occupied_streams and not from_stream_creation:
|
2014-03-02 06:46:54 +01:00
|
|
|
event = dict(type="stream", op="occupy",
|
|
|
|
streams=[stream.to_dict()
|
|
|
|
for stream in new_occupied_streams])
|
2018-12-03 19:35:32 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2013-06-25 19:26:58 +02:00
|
|
|
|
2013-09-25 23:11:01 +02:00
|
|
|
# Notify all existing users on streams that users have joined
|
|
|
|
|
|
|
|
# First, get all users subscribed to the streams that we care about
|
|
|
|
# We fetch all subscription information upfront, as it's used throughout
|
|
|
|
# the following code and we want to minize DB queries
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = get_user_ids_for_streams(streams=streams)
|
2013-09-25 23:11:01 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_stream_subscriber_user_ids(stream: Stream) -> List[int]:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
return []
|
2017-10-08 21:33:53 +02:00
|
|
|
user_ids = all_subscribers_by_stream[stream.id]
|
2017-10-07 16:00:39 +02:00
|
|
|
return user_ids
|
2013-09-25 23:11:01 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]]
|
|
|
|
new_streams = set() # type: Set[Tuple[int, int]]
|
2013-06-25 19:26:58 +02:00
|
|
|
for (sub, stream) in subs_to_add + subs_to_activate:
|
2013-06-28 17:49:51 +02:00
|
|
|
sub_tuples_by_user[sub.user_profile.id].append((sub, stream))
|
2013-09-25 23:11:01 +02:00
|
|
|
new_streams.add((sub.user_profile.id, stream.id))
|
2013-06-28 17:49:51 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# We now send several types of events to notify browsers. The
|
|
|
|
# first batch is notifications to users on invite-only streams
|
|
|
|
# that the stream exists.
|
|
|
|
for stream in streams:
|
2017-08-16 21:04:57 +02:00
|
|
|
if not stream.is_public():
|
2018-03-16 10:57:17 +01:00
|
|
|
# Users newly added to invite-only streams
|
|
|
|
# need a `create` notification. The former, because
|
|
|
|
# they need the stream to exist before
|
2018-02-01 17:25:42 +01:00
|
|
|
# they get the "subscribe" notification, and the latter so
|
|
|
|
# they can manage the new stream.
|
2018-03-16 10:57:17 +01:00
|
|
|
# Realm admins already have all created private streams.
|
2019-06-20 23:36:15 +02:00
|
|
|
realm_admin_ids = [user.id for user in realm.get_admin_users_and_bots()]
|
2018-03-16 10:57:17 +01:00
|
|
|
new_users_ids = [user.id for user in users if (user.id, stream.id) in new_streams and
|
|
|
|
user.id not in realm_admin_ids]
|
|
|
|
send_stream_creation_event(stream, new_users_ids)
|
2017-01-29 01:21:31 +01:00
|
|
|
|
2018-08-16 18:37:28 +02:00
|
|
|
stream_ids = {stream.id for stream in streams}
|
|
|
|
recent_traffic = get_streams_traffic(stream_ids=stream_ids)
|
2017-01-29 01:21:31 +01:00
|
|
|
# The second batch is events for the users themselves that they
|
|
|
|
# were subscribed to the new streams.
|
2013-06-28 17:49:51 +02:00
|
|
|
for user_profile in users:
|
|
|
|
if len(sub_tuples_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
2013-09-13 21:11:41 +02:00
|
|
|
sub_pairs = sub_tuples_by_user[user_profile.id]
|
2017-04-03 17:13:42 +02:00
|
|
|
notify_subscriptions_added(user_profile, sub_pairs, fetch_stream_subscriber_user_ids,
|
|
|
|
recent_traffic)
|
2013-09-13 23:09:19 +02:00
|
|
|
|
2017-01-29 01:21:31 +01:00
|
|
|
# The second batch is events for other users who are tracking the
|
|
|
|
# subscribers lists of streams in their browser; everyone for
|
|
|
|
# public streams and only existing subscribers for private streams.
|
2013-09-13 23:09:19 +02:00
|
|
|
for stream in streams:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2013-09-25 23:11:01 +02:00
|
|
|
continue
|
|
|
|
|
2017-10-06 16:52:18 +02:00
|
|
|
new_user_ids = [user.id for user in users if (user.id, stream.id) in new_streams]
|
2017-10-08 21:33:53 +02:00
|
|
|
subscribed_user_ids = all_subscribers_by_stream[stream.id]
|
2016-10-19 23:49:04 +02:00
|
|
|
|
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids=new_user_ids,
|
|
|
|
subscribed_user_ids=subscribed_user_ids,
|
2016-10-19 23:49:04 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
2017-10-06 16:52:18 +02:00
|
|
|
for new_user_id in new_user_ids:
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="peer_add",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=[stream.name],
|
2017-10-06 16:52:18 +02:00
|
|
|
user_id=new_user_id)
|
2018-12-03 19:35:32 +01:00
|
|
|
send_event(realm, event, peer_user_ids)
|
2016-10-19 23:49:04 +02:00
|
|
|
|
2016-05-30 07:32:56 +02:00
|
|
|
return ([(user_profile, stream) for (user_profile, recipient_id, stream) in new_subs] +
|
|
|
|
[(sub.user_profile, stream) for (sub, stream) in subs_to_activate],
|
2013-05-10 17:43:27 +02:00
|
|
|
already_subscribed)
|
|
|
|
|
2018-01-11 21:36:11 +01:00
|
|
|
def get_available_notification_sounds() -> List[str]:
|
2019-07-17 02:29:08 +02:00
|
|
|
notification_sounds_path = static_path('audio/notification_sounds')
|
2018-01-11 21:36:11 +01:00
|
|
|
available_notification_sounds = []
|
|
|
|
|
|
|
|
for file_name in os.listdir(notification_sounds_path):
|
|
|
|
root, ext = os.path.splitext(file_name)
|
2019-02-07 22:36:18 +01:00
|
|
|
if '.' in root: # nocoverage
|
|
|
|
# Exclude e.g. zulip.abcd1234.ogg (generated by production hash-naming)
|
|
|
|
# to avoid spurious duplicates.
|
|
|
|
continue
|
2018-01-11 21:36:11 +01:00
|
|
|
if ext == '.ogg':
|
|
|
|
available_notification_sounds.append(root)
|
|
|
|
|
|
|
|
return available_notification_sounds
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_subscriptions_removed(user_profile: UserProfile, streams: Iterable[Stream],
|
|
|
|
no_log: bool=False) -> None:
|
2013-06-28 17:16:55 +02:00
|
|
|
if not no_log:
|
|
|
|
log_event({'type': 'subscription_removed',
|
|
|
|
'user': user_profile.email,
|
2013-06-28 17:49:51 +02:00
|
|
|
'names': [stream.name for stream in streams],
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': user_profile.realm.string_id})
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2014-02-07 19:06:02 +01:00
|
|
|
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription", op="remove",
|
2014-01-24 23:24:44 +01:00
|
|
|
subscriptions=payload)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
SubAndRemovedT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
|
|
|
|
def bulk_remove_subscriptions(users: Iterable[UserProfile],
|
|
|
|
streams: Iterable[Stream],
|
2018-03-14 00:13:21 +01:00
|
|
|
acting_client: Client,
|
2017-11-05 11:15:10 +01:00
|
|
|
acting_user: Optional[UserProfile]=None) -> SubAndRemovedT:
|
2016-10-20 20:12:39 +02:00
|
|
|
|
2017-10-30 14:34:02 +01:00
|
|
|
users = list(users)
|
2017-10-29 20:19:57 +01:00
|
|
|
streams = list(streams)
|
2017-10-30 14:34:02 +01:00
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
stream_dict = {stream.id: stream for stream in streams}
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
existing_subs_by_user = get_bulk_stream_subscriber_info(users, stream_dict)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_non_subscribed_tups() -> List[Tuple[UserProfile, Stream]]:
|
2017-10-29 20:19:57 +01:00
|
|
|
stream_ids = {stream.id for stream in streams}
|
|
|
|
|
|
|
|
not_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
|
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
user_sub_stream_info = existing_subs_by_user[user_profile.id]
|
|
|
|
|
|
|
|
subscribed_stream_ids = {
|
|
|
|
stream.id
|
|
|
|
for (sub, stream) in user_sub_stream_info
|
|
|
|
}
|
|
|
|
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
|
|
|
|
|
|
|
|
for stream_id in not_subscribed_stream_ids:
|
|
|
|
stream = stream_dict[stream_id]
|
|
|
|
not_subscribed.append((user_profile, stream))
|
|
|
|
|
|
|
|
return not_subscribed
|
|
|
|
|
|
|
|
not_subscribed = get_non_subscribed_tups()
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
subs_to_deactivate = [] # type: List[Tuple[Subscription, Stream]]
|
2017-10-29 20:19:57 +01:00
|
|
|
sub_ids_to_deactivate = [] # type: List[int]
|
|
|
|
|
|
|
|
# This loop just flattens out our data into big lists for
|
|
|
|
# bulk operations.
|
|
|
|
for tup_list in existing_subs_by_user.values():
|
|
|
|
for (sub, stream) in tup_list:
|
|
|
|
subs_to_deactivate.append((sub, stream))
|
|
|
|
sub_ids_to_deactivate.append(sub.id)
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-10-30 14:34:02 +01:00
|
|
|
our_realm = users[0].realm
|
|
|
|
|
2014-03-02 06:46:54 +01:00
|
|
|
# TODO: XXX: This transaction really needs to be done at the serializeable
|
|
|
|
# transaction isolation level.
|
|
|
|
with transaction.atomic():
|
2017-10-30 14:34:02 +01:00
|
|
|
occupied_streams_before = list(get_occupied_streams(our_realm))
|
2017-10-29 20:19:57 +01:00
|
|
|
Subscription.objects.filter(
|
|
|
|
id__in=sub_ids_to_deactivate,
|
|
|
|
) .update(active=False)
|
2017-10-30 14:34:02 +01:00
|
|
|
occupied_streams_after = list(get_occupied_streams(our_realm))
|
2014-03-02 06:46:54 +01:00
|
|
|
|
2017-07-12 17:28:27 +02:00
|
|
|
# Log Subscription Activities in RealmAuditLog
|
|
|
|
event_time = timezone_now()
|
2018-08-15 14:56:43 +02:00
|
|
|
event_last_message_id = get_last_message_id()
|
2017-07-12 17:28:27 +02:00
|
|
|
all_subscription_logs = [] # type: (List[RealmAuditLog])
|
|
|
|
for (sub, stream) in subs_to_deactivate:
|
|
|
|
all_subscription_logs.append(RealmAuditLog(realm=sub.user_profile.realm,
|
|
|
|
modified_user=sub.user_profile,
|
|
|
|
modified_stream=stream,
|
|
|
|
event_last_message_id=event_last_message_id,
|
2018-07-10 06:56:58 +02:00
|
|
|
event_type=RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
|
2017-07-12 17:28:27 +02:00
|
|
|
event_time=event_time))
|
|
|
|
# Now since we have all log objects generated we can do a bulk insert
|
|
|
|
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
|
|
|
|
2017-05-17 20:39:57 +02:00
|
|
|
altered_user_dict = defaultdict(list) # type: Dict[int, List[UserProfile]]
|
|
|
|
streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]]
|
2013-06-28 17:16:55 +02:00
|
|
|
for (sub, stream) in subs_to_deactivate:
|
2013-06-28 17:49:51 +02:00
|
|
|
streams_by_user[sub.user_profile_id].append(stream)
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_user_dict[stream.id].append(sub.user_profile)
|
2013-06-28 17:49:51 +02:00
|
|
|
|
|
|
|
for user_profile in users:
|
|
|
|
if len(streams_by_user[user_profile.id]) == 0:
|
|
|
|
continue
|
|
|
|
notify_subscriptions_removed(user_profile, streams_by_user[user_profile.id])
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2017-11-13 21:24:51 +01:00
|
|
|
event = {'type': 'mark_stream_messages_as_read',
|
2018-03-14 00:09:11 +01:00
|
|
|
'client_id': acting_client.id,
|
2017-11-13 21:24:51 +01:00
|
|
|
'user_profile_id': user_profile.id,
|
|
|
|
'stream_ids': [stream.id for stream in streams]}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("deferred_work", event)
|
2017-11-13 21:24:51 +01:00
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
all_subscribers_by_stream = get_user_ids_for_streams(streams=streams)
|
2016-10-20 20:12:39 +02:00
|
|
|
|
2018-08-21 19:20:54 +02:00
|
|
|
def send_peer_remove_event(stream: Stream) -> None:
|
2017-10-08 21:16:51 +02:00
|
|
|
if stream.is_in_zephyr_realm and not stream.invite_only:
|
2018-08-21 19:20:54 +02:00
|
|
|
return
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
altered_users = altered_user_dict[stream.id]
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids = [u.id for u in altered_users]
|
|
|
|
|
2017-10-08 21:33:53 +02:00
|
|
|
subscribed_user_ids = all_subscribers_by_stream[stream.id]
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2016-10-20 20:12:39 +02:00
|
|
|
peer_user_ids = get_peer_user_ids_for_stream_change(
|
|
|
|
stream=stream,
|
2017-10-06 16:52:18 +02:00
|
|
|
altered_user_ids=altered_user_ids,
|
|
|
|
subscribed_user_ids=subscribed_user_ids,
|
2016-10-20 20:12:39 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if peer_user_ids:
|
|
|
|
for removed_user in altered_users:
|
|
|
|
event = dict(type="subscription",
|
|
|
|
op="peer_remove",
|
|
|
|
subscriptions=[stream.name],
|
2016-11-08 15:04:18 +01:00
|
|
|
user_id=removed_user.id)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(our_realm, event, peer_user_ids)
|
2016-10-20 18:15:59 +02:00
|
|
|
|
2018-08-21 19:20:54 +02:00
|
|
|
for stream in streams:
|
|
|
|
send_peer_remove_event(stream=stream)
|
|
|
|
|
2018-08-07 15:26:04 +02:00
|
|
|
new_vacant_streams = [stream for stream in
|
|
|
|
set(occupied_streams_before) - set(occupied_streams_after)]
|
|
|
|
new_vacant_private_streams = [stream for stream in new_vacant_streams
|
|
|
|
if stream.invite_only]
|
|
|
|
new_vacant_public_streams = [stream for stream in new_vacant_streams
|
|
|
|
if not stream.invite_only]
|
|
|
|
if new_vacant_public_streams:
|
|
|
|
event = dict(type="stream", op="vacate",
|
|
|
|
streams=[stream.to_dict()
|
|
|
|
for stream in new_vacant_public_streams])
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(our_realm, event, active_user_ids(our_realm.id))
|
2018-08-07 15:26:04 +02:00
|
|
|
if new_vacant_private_streams:
|
|
|
|
# Deactivate any newly-vacant private streams
|
|
|
|
for stream in new_vacant_private_streams:
|
|
|
|
do_deactivate_stream(stream)
|
|
|
|
|
2017-10-29 20:19:57 +01:00
|
|
|
return (
|
|
|
|
[(sub.user_profile, stream) for (sub, stream) in subs_to_deactivate],
|
|
|
|
not_subscribed,
|
|
|
|
)
|
2013-06-28 17:16:55 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def log_subscription_property_change(user_email: str, stream_name: str, property: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
value: Any) -> None:
|
2013-01-10 22:01:33 +01:00
|
|
|
event = {'type': 'subscription_property',
|
|
|
|
'property': property,
|
2013-04-08 18:01:01 +02:00
|
|
|
'user': user_email,
|
|
|
|
'stream_name': stream_name,
|
|
|
|
'value': value}
|
2013-01-10 22:01:33 +01:00
|
|
|
log_event(event)
|
|
|
|
|
2017-12-29 12:51:56 +01:00
|
|
|
def do_change_subscription_property(user_profile: UserProfile, sub: Subscription,
|
2018-05-11 02:01:29 +02:00
|
|
|
stream: Stream, property_name: str, value: Any
|
2017-12-29 12:51:56 +01:00
|
|
|
) -> None:
|
2018-08-02 23:46:05 +02:00
|
|
|
database_property_name = property_name
|
|
|
|
event_property_name = property_name
|
|
|
|
database_value = value
|
|
|
|
event_value = value
|
|
|
|
|
|
|
|
# For this property, is_muted is used in the database, but
|
|
|
|
# in_home_view in the API, since we haven't migrated the events
|
|
|
|
# API to the new name yet.
|
|
|
|
if property_name == "in_home_view":
|
|
|
|
database_property_name = "is_muted"
|
|
|
|
database_value = not value
|
|
|
|
if property_name == "is_muted":
|
|
|
|
event_property_name = "in_home_view"
|
|
|
|
event_value = not value
|
|
|
|
|
|
|
|
setattr(sub, database_property_name, database_value)
|
|
|
|
sub.save(update_fields=[database_property_name])
|
2017-03-05 01:30:48 +01:00
|
|
|
log_subscription_property_change(user_profile.email, stream.name,
|
2018-08-02 23:46:05 +02:00
|
|
|
database_property_name, database_value)
|
2014-02-06 21:21:21 +01:00
|
|
|
event = dict(type="subscription",
|
2014-01-24 23:24:44 +01:00
|
|
|
op="update",
|
|
|
|
email=user_profile.email,
|
2018-08-02 23:46:05 +02:00
|
|
|
property=event_property_name,
|
|
|
|
value=event_value,
|
2017-03-05 01:30:48 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-07-16 22:21:41 +02:00
|
|
|
|
2018-05-15 22:05:02 +02:00
|
|
|
def do_change_password(user_profile: UserProfile, password: str, commit: bool=True) -> None:
|
|
|
|
user_profile.set_password(password)
|
2013-01-10 22:01:33 +01:00
|
|
|
if commit:
|
2013-03-08 19:53:00 +01:00
|
|
|
user_profile.save(update_fields=["password"])
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-14 06:07:14 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
2018-07-22 18:31:13 +02:00
|
|
|
modified_user=user_profile, event_type=RealmAuditLog.USER_PASSWORD_CHANGED,
|
2017-03-14 06:07:14 +01:00
|
|
|
event_time=event_time)
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_change_full_name(user_profile: UserProfile, full_name: str,
|
2017-12-23 21:01:40 +01:00
|
|
|
acting_user: Optional[UserProfile]) -> None:
|
2017-08-17 01:17:54 +02:00
|
|
|
old_name = user_profile.full_name
|
2013-01-10 22:01:33 +01:00
|
|
|
user_profile.full_name = full_name
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["full_name"])
|
2017-04-07 07:28:28 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
2018-07-10 06:18:54 +02:00
|
|
|
modified_user=user_profile, event_type=RealmAuditLog.USER_FULL_NAME_CHANGED,
|
2017-08-17 01:17:54 +02:00
|
|
|
event_time=event_time, extra_data=old_name)
|
2014-02-26 19:55:29 +01:00
|
|
|
payload = dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
2014-02-26 19:55:29 +01:00
|
|
|
full_name=user_profile.full_name)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_user', op='update', person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2014-02-26 19:55:29 +01:00
|
|
|
if user_profile.is_bot:
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot', op='update', bot=payload),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_change_full_name(user_profile: UserProfile, full_name_raw: str,
|
|
|
|
acting_user: UserProfile) -> str:
|
2017-11-16 02:29:53 +01:00
|
|
|
"""Verifies that the user's proposed full name is valid. The caller
|
|
|
|
is responsible for checking check permissions. Returns the new
|
|
|
|
full name, which may differ from what was passed in (because this
|
|
|
|
function strips whitespace)."""
|
|
|
|
new_full_name = check_full_name(full_name_raw)
|
|
|
|
do_change_full_name(user_profile, new_full_name, acting_user)
|
|
|
|
return new_full_name
|
|
|
|
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
def check_change_bot_full_name(user_profile: UserProfile, full_name_raw: str,
|
|
|
|
acting_user: UserProfile) -> None:
|
|
|
|
new_full_name = check_full_name(full_name_raw)
|
|
|
|
|
|
|
|
if new_full_name == user_profile.full_name:
|
|
|
|
# Our web app will try to patch full_name even if the user didn't
|
|
|
|
# modify the name in the form. We just silently ignore those
|
|
|
|
# situations.
|
|
|
|
return
|
|
|
|
|
|
|
|
check_bot_name_available(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
full_name=new_full_name,
|
|
|
|
)
|
|
|
|
do_change_full_name(user_profile, new_full_name, acting_user)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_bot_owner(user_profile: UserProfile, bot_owner: UserProfile,
|
|
|
|
acting_user: UserProfile) -> None:
|
2018-03-06 22:32:03 +01:00
|
|
|
previous_owner = user_profile.bot_owner
|
2017-02-24 06:36:54 +01:00
|
|
|
user_profile.bot_owner = bot_owner
|
2017-11-20 19:52:10 +01:00
|
|
|
user_profile.save() # Can't use update_fields because of how the foreign key works.
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-31 17:27:08 +02:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
2018-07-22 18:31:13 +02:00
|
|
|
modified_user=user_profile, event_type=RealmAuditLog.USER_BOT_OWNER_CHANGED,
|
2017-03-31 17:27:08 +02:00
|
|
|
event_time=event_time)
|
2018-03-06 22:32:03 +01:00
|
|
|
|
|
|
|
update_users = bot_owner_user_ids(user_profile)
|
|
|
|
|
|
|
|
# For admins, update event is sent instead of delete/add
|
|
|
|
# event. bot_data of admin contains all the
|
|
|
|
# bots and none of them should be removed/(added again).
|
|
|
|
|
|
|
|
# Delete the bot from previous owner's bot data.
|
|
|
|
if previous_owner and not previous_owner.is_realm_admin:
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2018-03-06 22:32:03 +01:00
|
|
|
op="delete",
|
|
|
|
bot=dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
)),
|
|
|
|
{previous_owner.id, })
|
|
|
|
# Do not send update event for previous bot owner.
|
|
|
|
update_users = update_users - {previous_owner.id, }
|
|
|
|
|
|
|
|
# Notify the new owner that the bot has been added.
|
|
|
|
if not bot_owner.is_realm_admin:
|
|
|
|
add_event = created_bot_event(user_profile)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, add_event, {bot_owner.id, })
|
2018-03-06 22:32:03 +01:00
|
|
|
# Do not send update event for bot_owner.
|
|
|
|
update_users = update_users - {bot_owner.id, }
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2017-02-24 06:36:54 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
owner_id=user_profile.bot_owner.id,
|
|
|
|
)),
|
2018-03-06 22:32:03 +01:00
|
|
|
update_users)
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2019-05-21 18:47:30 +02:00
|
|
|
# Since `bot_owner_id` is included in the user profile dict we need
|
|
|
|
# to update the users dict with the new bot owner id
|
|
|
|
event = dict(
|
|
|
|
type="realm_user",
|
|
|
|
op="update",
|
|
|
|
person=dict(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
bot_owner_id=user_profile.bot_owner.id,
|
|
|
|
),
|
|
|
|
) # type: Dict[str, Any]
|
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_change_tos_version(user_profile: UserProfile, tos_version: str) -> None:
|
2016-08-10 03:05:26 +02:00
|
|
|
user_profile.tos_version = tos_version
|
|
|
|
user_profile.save(update_fields=["tos_version"])
|
2017-04-07 07:29:29 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=user_profile,
|
2018-07-10 06:24:03 +02:00
|
|
|
modified_user=user_profile,
|
|
|
|
event_type=RealmAuditLog.USER_TOS_VERSION_CHANGED,
|
2017-04-07 07:29:29 +02:00
|
|
|
event_time=event_time)
|
2016-08-10 03:05:26 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_regenerate_api_key(user_profile: UserProfile, acting_user: UserProfile) -> None:
|
2019-01-05 02:25:06 +01:00
|
|
|
old_api_key = user_profile.api_key
|
2018-08-01 11:18:37 +02:00
|
|
|
user_profile.api_key = generate_api_key()
|
2014-02-26 20:02:43 +01:00
|
|
|
user_profile.save(update_fields=["api_key"])
|
2019-01-05 02:25:06 +01:00
|
|
|
|
|
|
|
# We need to explicitly delete the old API key from our caches,
|
|
|
|
# because the on-save handler for flushing the UserProfile object
|
|
|
|
# in zerver/lib/cache.py only has access to the new API key.
|
|
|
|
cache_delete(user_profile_by_api_key_cache_key(old_api_key))
|
|
|
|
|
2017-04-06 12:27:58 +02:00
|
|
|
event_time = timezone_now()
|
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, acting_user=acting_user,
|
2018-07-10 06:29:08 +02:00
|
|
|
modified_user=user_profile, event_type=RealmAuditLog.USER_API_KEY_CHANGED,
|
2017-04-06 12:27:58 +02:00
|
|
|
event_time=event_time)
|
2013-07-16 21:32:33 +02:00
|
|
|
|
2014-02-26 20:17:19 +01:00
|
|
|
if user_profile.is_bot:
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2014-02-26 20:17:19 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
api_key=user_profile.api_key,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-26 20:17:19 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_change_avatar_fields(user_profile: UserProfile, avatar_source: str) -> None:
|
2014-02-26 20:54:59 +01:00
|
|
|
user_profile.avatar_source = avatar_source
|
2017-01-28 19:05:20 +01:00
|
|
|
user_profile.avatar_version += 1
|
|
|
|
user_profile.save(update_fields=["avatar_source", "avatar_version"])
|
2017-04-15 04:03:56 +02:00
|
|
|
event_time = timezone_now()
|
2017-03-23 22:02:35 +01:00
|
|
|
RealmAuditLog.objects.create(realm=user_profile.realm, modified_user=user_profile,
|
2018-07-22 18:31:13 +02:00
|
|
|
event_type=RealmAuditLog.USER_AVATAR_SOURCE_CHANGED,
|
2017-03-23 22:02:35 +01:00
|
|
|
extra_data={'avatar_source': avatar_source},
|
|
|
|
event_time=event_time)
|
2014-02-26 20:54:59 +01:00
|
|
|
|
2014-02-26 21:05:10 +01:00
|
|
|
if user_profile.is_bot:
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2014-02-26 21:05:10 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
avatar_url=avatar_url(user_profile),
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2016-08-19 00:28:28 +02:00
|
|
|
|
2017-02-21 17:55:32 +01:00
|
|
|
payload = dict(
|
|
|
|
email=user_profile.email,
|
2017-04-27 00:04:11 +02:00
|
|
|
avatar_source=user_profile.avatar_source,
|
2017-02-21 17:55:32 +01:00
|
|
|
avatar_url=avatar_url(user_profile),
|
2017-04-27 00:04:11 +02:00
|
|
|
avatar_url_medium=avatar_url(user_profile, medium=True),
|
2017-02-21 17:55:32 +01:00
|
|
|
user_id=user_profile.id
|
|
|
|
)
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_user',
|
2017-02-21 17:55:32 +01:00
|
|
|
op='update',
|
|
|
|
person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2014-02-26 21:05:10 +01:00
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def do_delete_avatar_image(user: UserProfile) -> None:
|
|
|
|
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_GRAVATAR)
|
|
|
|
delete_avatar_image(user)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_change_icon_source(realm: Realm, icon_source: str, log: bool=True) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
realm.icon_source = icon_source
|
|
|
|
realm.icon_version += 1
|
|
|
|
realm.save(update_fields=["icon_source", "icon_version"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'realm_change_icon',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': realm.string_id,
|
2017-02-21 03:41:20 +01:00
|
|
|
'icon_source': icon_source})
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm,
|
|
|
|
dict(type='realm',
|
2017-02-26 20:35:23 +01:00
|
|
|
op='update_dict',
|
|
|
|
property="icon",
|
|
|
|
data=dict(icon_source=realm.icon_source,
|
|
|
|
icon_url=realm_icon_url(realm))),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(realm.id))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def do_change_logo_source(realm: Realm, logo_source: str, night: bool) -> None:
|
|
|
|
if not night:
|
|
|
|
realm.logo_source = logo_source
|
|
|
|
realm.logo_version += 1
|
|
|
|
realm.save(update_fields=["logo_source", "logo_version"])
|
|
|
|
|
|
|
|
else:
|
|
|
|
realm.night_logo_source = logo_source
|
|
|
|
realm.night_logo_version += 1
|
|
|
|
realm.save(update_fields=["night_logo_source", "night_logo_version"])
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
RealmAuditLog.objects.create(event_type=RealmAuditLog.REALM_LOGO_CHANGED,
|
|
|
|
realm=realm, event_time=timezone_now())
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
if not night:
|
|
|
|
send_event(realm,
|
|
|
|
dict(type='realm',
|
|
|
|
op='update_dict',
|
|
|
|
property="logo",
|
|
|
|
data=dict(logo_source=realm.logo_source,
|
|
|
|
logo_url=realm_logo_url(realm, night))),
|
|
|
|
active_user_ids(realm.id))
|
|
|
|
|
|
|
|
else:
|
|
|
|
send_event(realm,
|
|
|
|
dict(type='realm',
|
|
|
|
op='update_dict',
|
|
|
|
property="night_logo",
|
|
|
|
data=dict(night_logo_source=realm.night_logo_source,
|
|
|
|
night_logo_url=realm_logo_url(realm, night))),
|
|
|
|
active_user_ids(realm.id))
|
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2018-12-13 07:54:43 +01:00
|
|
|
def do_change_plan_type(realm: Realm, plan_type: int) -> None:
|
2018-08-09 21:38:22 +02:00
|
|
|
old_value = realm.plan_type
|
|
|
|
realm.plan_type = plan_type
|
|
|
|
realm.save(update_fields=['plan_type'])
|
|
|
|
RealmAuditLog.objects.create(event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED,
|
2018-12-13 07:54:43 +01:00
|
|
|
realm=realm, event_time=timezone_now(),
|
2018-08-09 21:38:22 +02:00
|
|
|
extra_data={'old_value': old_value, 'new_value': plan_type})
|
|
|
|
|
2018-10-24 06:09:01 +02:00
|
|
|
if plan_type == Realm.STANDARD:
|
|
|
|
realm.max_invites = Realm.INVITES_STANDARD_REALM_DAILY_MAX
|
2018-10-17 10:50:59 +02:00
|
|
|
realm.message_visibility_limit = None
|
2019-01-14 11:22:59 +01:00
|
|
|
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_STANDARD
|
2018-10-24 06:09:01 +02:00
|
|
|
elif plan_type == Realm.STANDARD_FREE:
|
|
|
|
realm.max_invites = Realm.INVITES_STANDARD_REALM_DAILY_MAX
|
2018-10-17 10:50:59 +02:00
|
|
|
realm.message_visibility_limit = None
|
2019-01-14 11:22:59 +01:00
|
|
|
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_STANDARD
|
2018-08-31 20:09:36 +02:00
|
|
|
elif plan_type == Realm.LIMITED:
|
|
|
|
realm.max_invites = settings.INVITES_DEFAULT_REALM_DAILY_MAX
|
2018-10-17 10:50:59 +02:00
|
|
|
realm.message_visibility_limit = Realm.MESSAGE_VISIBILITY_LIMITED
|
2019-01-14 11:22:59 +01:00
|
|
|
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_LIMITED
|
2018-10-25 07:54:37 +02:00
|
|
|
|
|
|
|
update_first_visible_message_id(realm)
|
|
|
|
|
2019-01-14 11:22:59 +01:00
|
|
|
realm.save(update_fields=['_max_invites', 'message_visibility_limit', 'upload_quota_gb'])
|
2018-08-22 13:36:37 +02:00
|
|
|
|
2019-06-11 12:43:08 +02:00
|
|
|
event = {'type': 'realm', 'op': 'update', 'property': 'plan_type', 'value': plan_type,
|
|
|
|
'extra_data': {'upload_quota': realm.upload_quota_bytes()}}
|
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_sending_stream(user_profile: UserProfile, stream: Optional[Stream],
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
user_profile.default_sending_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_sending_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_sending_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:23:18 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_name = stream.name # type: Optional[str]
|
2014-02-26 21:23:18 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2014-02-26 21:23:18 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_sending_stream=stream_name,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_events_register_stream(user_profile: UserProfile,
|
|
|
|
stream: Optional[Stream],
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
user_profile.default_events_register_stream = stream
|
|
|
|
user_profile.save(update_fields=['default_events_register_stream'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_events_register_stream',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'stream': str(stream)})
|
2014-02-26 21:34:12 +01:00
|
|
|
if user_profile.is_bot:
|
|
|
|
if stream:
|
2018-05-11 02:01:29 +02:00
|
|
|
stream_name = stream.name # type: Optional[str]
|
2014-02-26 21:34:12 +01:00
|
|
|
else:
|
|
|
|
stream_name = None
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2014-02-26 21:34:12 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_events_register_stream=stream_name,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_default_all_public_streams(user_profile: UserProfile, value: bool,
|
|
|
|
log: bool=True) -> None:
|
2014-02-13 19:39:54 +01:00
|
|
|
user_profile.default_all_public_streams = value
|
|
|
|
user_profile.save(update_fields=['default_all_public_streams'])
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'user_change_default_all_public_streams',
|
|
|
|
'user': user_profile.email,
|
|
|
|
'value': str(value)})
|
2014-02-26 21:15:31 +01:00
|
|
|
if user_profile.is_bot:
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2014-02-26 21:15:31 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=user_profile.email,
|
2016-10-26 05:18:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
default_all_public_streams=user_profile.default_all_public_streams,
|
|
|
|
)),
|
2017-10-07 17:53:14 +02:00
|
|
|
bot_owner_user_ids(user_profile))
|
2014-02-13 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_is_admin(user_profile: UserProfile, value: bool,
|
|
|
|
permission: str='administer') -> None:
|
2016-02-08 03:59:38 +01:00
|
|
|
if permission == "administer":
|
|
|
|
user_profile.is_realm_admin = value
|
|
|
|
user_profile.save(update_fields=["is_realm_admin"])
|
|
|
|
elif permission == "api_super_user":
|
|
|
|
user_profile.is_api_super_user = value
|
|
|
|
user_profile.save(update_fields=["is_api_super_user"])
|
2014-01-21 19:27:22 +01:00
|
|
|
else:
|
2018-05-15 19:45:31 +02:00
|
|
|
raise AssertionError("Invalid admin permission")
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2015-09-20 19:32:01 +02:00
|
|
|
if permission == 'administer':
|
|
|
|
event = dict(type="realm_user", op="update",
|
|
|
|
person=dict(email=user_profile.email,
|
2017-01-21 15:27:56 +01:00
|
|
|
user_id=user_profile.id,
|
2016-02-08 03:59:38 +01:00
|
|
|
is_admin=value))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2018-10-19 12:29:46 +02:00
|
|
|
def do_change_is_guest(user_profile: UserProfile, value: bool) -> None:
|
|
|
|
user_profile.is_guest = value
|
|
|
|
user_profile.save(update_fields=["is_guest"])
|
|
|
|
event = dict(type="realm_user", op="update",
|
|
|
|
person=dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
is_guest=value))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2018-10-19 12:29:46 +02:00
|
|
|
|
|
|
|
|
2018-04-27 01:00:26 +02:00
|
|
|
def do_change_stream_invite_only(stream: Stream, invite_only: bool,
|
|
|
|
history_public_to_subscribers: Optional[bool]=None) -> None:
|
2018-05-03 00:07:08 +02:00
|
|
|
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
|
|
|
|
stream.realm,
|
|
|
|
invite_only,
|
|
|
|
history_public_to_subscribers
|
|
|
|
)
|
2017-01-30 03:52:55 +01:00
|
|
|
stream.invite_only = invite_only
|
2018-04-27 01:00:26 +02:00
|
|
|
stream.history_public_to_subscribers = history_public_to_subscribers
|
|
|
|
stream.save(update_fields=['invite_only', 'history_public_to_subscribers'])
|
2019-05-02 19:43:27 +02:00
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="invite_only",
|
|
|
|
value=invite_only,
|
|
|
|
history_public_to_subscribers=history_public_to_subscribers,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
2014-01-02 23:20:33 +01:00
|
|
|
|
2018-04-27 10:05:14 +02:00
|
|
|
def do_change_stream_web_public(stream: Stream, is_web_public: bool) -> None:
|
|
|
|
stream.is_web_public = is_web_public
|
|
|
|
stream.save(update_fields=['is_web_public'])
|
|
|
|
|
2018-05-12 07:25:42 +02:00
|
|
|
def do_change_stream_announcement_only(stream: Stream, is_announcement_only: bool) -> None:
|
|
|
|
stream.is_announcement_only = is_announcement_only
|
|
|
|
stream.save(update_fields=['is_announcement_only'])
|
2019-05-02 19:43:27 +02:00
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property="is_announcement_only",
|
|
|
|
value=is_announcement_only,
|
|
|
|
stream_id=stream.id,
|
|
|
|
name=stream.name,
|
|
|
|
)
|
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
2018-05-12 07:25:42 +02:00
|
|
|
|
2019-01-05 12:47:38 +01:00
|
|
|
def do_rename_stream(stream: Stream,
|
|
|
|
new_name: str,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
log: bool=True) -> Dict[str, str]:
|
2013-08-22 17:45:23 +02:00
|
|
|
old_name = stream.name
|
|
|
|
stream.name = new_name
|
|
|
|
stream.save(update_fields=["name"])
|
|
|
|
|
|
|
|
if log:
|
|
|
|
log_event({'type': 'stream_name_change',
|
2017-03-13 07:06:54 +01:00
|
|
|
'realm': stream.realm.string_id,
|
2013-08-22 17:45:23 +02:00
|
|
|
'new_name': new_name})
|
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2013-08-22 17:45:23 +02:00
|
|
|
messages = Message.objects.filter(recipient=recipient).only("id")
|
|
|
|
|
|
|
|
# Update the display recipient and stream, which are easy single
|
|
|
|
# items to set.
|
2017-09-17 22:26:43 +02:00
|
|
|
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
|
|
|
|
new_cache_key = get_stream_cache_key(stream.name, stream.realm_id)
|
2013-08-28 18:00:19 +02:00
|
|
|
if old_cache_key != new_cache_key:
|
|
|
|
cache_delete(old_cache_key)
|
|
|
|
cache_set(new_cache_key, stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
cache_set(display_recipient_cache_key(recipient.id), stream.name)
|
|
|
|
|
|
|
|
# Delete cache entries for everything else, which is cheaper and
|
|
|
|
# clearer than trying to set them. display_recipient is the out of
|
|
|
|
# date field in all cases.
|
|
|
|
cache_delete_many(
|
2017-10-20 20:29:49 +02:00
|
|
|
to_dict_cache_key_id(message.id) for message in messages)
|
2014-02-02 15:30:33 +01:00
|
|
|
new_email = encode_email_address(stream)
|
2013-08-22 17:45:23 +02:00
|
|
|
|
2014-01-22 23:25:03 +01:00
|
|
|
# We will tell our users to essentially
|
|
|
|
# update stream.name = new_name where name = old_name
|
2014-02-02 15:30:33 +01:00
|
|
|
# and update stream.email = new_email where name = old_name.
|
|
|
|
# We could optimize this by trying to send one message, but the
|
|
|
|
# client code really wants one property update at a time, and
|
|
|
|
# updating stream names is a pretty infrequent operation.
|
|
|
|
# More importantly, we want to key these updates by id, not name,
|
|
|
|
# since id is the immutable primary key, and obviously name is not.
|
|
|
|
data_updates = [
|
|
|
|
['email_address', new_email],
|
|
|
|
['name', new_name],
|
|
|
|
]
|
|
|
|
for property, value in data_updates:
|
|
|
|
event = dict(
|
|
|
|
op="update",
|
|
|
|
type="stream",
|
|
|
|
property=property,
|
|
|
|
value=value,
|
2017-03-05 01:50:25 +01:00
|
|
|
stream_id=stream.id,
|
|
|
|
name=old_name,
|
2014-02-02 15:30:33 +01:00
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
2019-01-05 12:47:38 +01:00
|
|
|
sender = get_system_bot(settings.NOTIFICATION_BOT)
|
|
|
|
internal_send_stream_message(
|
|
|
|
stream.realm,
|
|
|
|
sender,
|
2019-02-09 03:01:35 +01:00
|
|
|
stream,
|
2019-07-19 20:23:05 +02:00
|
|
|
Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
|
2019-07-19 20:13:21 +02:00
|
|
|
_('@_**%(user_name)s|%(user_id)d** renamed stream **%(old_stream_name)s** to '
|
|
|
|
'**%(new_stream_name)s**.') % {
|
|
|
|
'user_name': user_profile.full_name,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'old_stream_name': old_name,
|
|
|
|
'new_stream_name': new_name}
|
2019-01-05 12:47:38 +01:00
|
|
|
)
|
2013-09-10 11:46:18 +02:00
|
|
|
# Even though the token doesn't change, the web client needs to update the
|
|
|
|
# email forwarding address to display the correctly-escaped new name.
|
2014-02-02 15:30:33 +01:00
|
|
|
return {"email_address": new_email}
|
2013-09-10 11:46:18 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_change_stream_description(stream: Stream, new_description: str) -> None:
|
2014-01-22 20:20:10 +01:00
|
|
|
stream.description = new_description
|
2019-03-01 09:10:40 +01:00
|
|
|
stream.rendered_description = render_stream_description(new_description)
|
2019-01-11 13:48:22 +01:00
|
|
|
stream.save(update_fields=['description', 'rendered_description'])
|
2014-01-22 20:20:10 +01:00
|
|
|
|
2017-03-05 01:50:25 +01:00
|
|
|
event = dict(
|
|
|
|
type='stream',
|
|
|
|
op='update',
|
|
|
|
property='description',
|
|
|
|
name=stream.name,
|
|
|
|
stream_id=stream.id,
|
|
|
|
value=new_description,
|
2019-01-11 13:48:22 +01:00
|
|
|
rendered_description=stream.rendered_description
|
2017-03-05 01:50:25 +01:00
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(stream.realm, event, can_access_stream_user_ids(stream))
|
2014-01-22 20:20:10 +01:00
|
|
|
|
2018-07-27 23:26:29 +02:00
|
|
|
def do_create_realm(string_id: str, name: str,
|
|
|
|
emails_restricted_to_domains: Optional[bool]=None) -> Realm:
|
2019-05-04 04:47:44 +02:00
|
|
|
if Realm.objects.filter(string_id=string_id).exists():
|
2017-08-24 04:52:34 +02:00
|
|
|
raise AssertionError("Realm %s already exists!" % (string_id,))
|
|
|
|
|
|
|
|
kwargs = {} # type: Dict[str, Any]
|
2018-07-27 23:26:29 +02:00
|
|
|
if emails_restricted_to_domains is not None:
|
|
|
|
kwargs['emails_restricted_to_domains'] = emails_restricted_to_domains
|
2017-08-24 04:52:34 +02:00
|
|
|
realm = Realm(string_id=string_id, name=name, **kwargs)
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
# Create stream once Realm object has been saved
|
2019-02-24 08:14:13 +01:00
|
|
|
notifications_stream = ensure_stream(
|
|
|
|
realm, Realm.DEFAULT_NOTIFICATION_STREAM_NAME,
|
|
|
|
stream_description="Everyone is added to this stream by default. Welcome! :octopus:")
|
2017-08-24 04:52:34 +02:00
|
|
|
realm.notifications_stream = notifications_stream
|
2017-11-15 23:44:31 +01:00
|
|
|
|
2019-02-24 04:40:44 +01:00
|
|
|
# With the current initial streams situation, the only public
|
|
|
|
# stream is the notifications_stream.
|
|
|
|
DefaultStream.objects.create(stream=notifications_stream, realm=realm)
|
|
|
|
|
2018-03-21 22:05:21 +01:00
|
|
|
signup_notifications_stream = ensure_stream(
|
2017-10-04 02:07:44 +02:00
|
|
|
realm, Realm.INITIAL_PRIVATE_STREAM_NAME, invite_only=True,
|
2017-11-15 23:44:31 +01:00
|
|
|
stream_description="A private stream for core team members.")
|
|
|
|
realm.signup_notifications_stream = signup_notifications_stream
|
|
|
|
|
|
|
|
realm.save(update_fields=['notifications_stream', 'signup_notifications_stream'])
|
2017-08-24 04:52:34 +02:00
|
|
|
|
2018-12-13 08:19:29 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
do_change_plan_type(realm, Realm.LIMITED)
|
|
|
|
|
2017-08-24 04:52:34 +02:00
|
|
|
# Log the event
|
|
|
|
log_event({"type": "realm_created",
|
|
|
|
"string_id": string_id,
|
2018-07-27 23:26:29 +02:00
|
|
|
"emails_restricted_to_domains": emails_restricted_to_domains})
|
2017-08-24 04:52:34 +02:00
|
|
|
|
2019-07-18 01:28:54 +02:00
|
|
|
# Send a notification to the admin realm
|
|
|
|
signup_message = "Signups enabled"
|
|
|
|
admin_realm = get_system_bot(settings.NOTIFICATION_BOT).realm
|
|
|
|
internal_send_message(admin_realm, settings.NOTIFICATION_BOT, "stream",
|
|
|
|
"signups", realm.display_subdomain, signup_message)
|
2017-08-24 04:52:34 +02:00
|
|
|
return realm
|
2013-01-10 22:01:33 +01:00
|
|
|
|
2019-06-29 22:00:44 +02:00
|
|
|
def do_change_notification_settings(user_profile: UserProfile, name: str,
|
|
|
|
value: Union[bool, int, str], log: bool=True) -> None:
|
2017-05-23 03:19:21 +02:00
|
|
|
"""Takes in a UserProfile object, the name of a global notification
|
|
|
|
preference to update, and the value to update to
|
|
|
|
"""
|
2013-10-16 17:24:52 +02:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
notification_setting_type = UserProfile.notification_setting_types[name]
|
|
|
|
assert isinstance(value, notification_setting_type), (
|
|
|
|
'Cannot update %s: %s is not an instance of %s' % (
|
|
|
|
name, value, notification_setting_type,))
|
2016-09-19 22:55:18 +02:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
setattr(user_profile, name, value)
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
# Disabling digest emails should clear a user's email queue
|
2017-05-24 01:21:02 +02:00
|
|
|
if name == 'enable_digest_emails' and not value:
|
2019-01-04 01:50:21 +01:00
|
|
|
clear_scheduled_emails([user_profile.id], ScheduledEmail.DIGEST)
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
user_profile.save(update_fields=[name])
|
2014-02-13 23:48:03 +01:00
|
|
|
event = {'type': 'update_global_notifications',
|
|
|
|
'user': user_profile.email,
|
2017-05-23 03:19:21 +02:00
|
|
|
'notification_name': name,
|
|
|
|
'setting': value}
|
2013-12-02 01:39:10 +01:00
|
|
|
if log:
|
2014-02-13 23:48:03 +01:00
|
|
|
log_event(event)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-12-02 01:39:10 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_enter_sends(user_profile: UserProfile, enter_sends: bool) -> None:
|
2013-02-27 23:18:38 +01:00
|
|
|
user_profile.enter_sends = enter_sends
|
2013-03-21 21:29:28 +01:00
|
|
|
user_profile.save(update_fields=["enter_sends"])
|
2013-02-27 23:18:38 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_set_user_display_setting(user_profile: UserProfile,
|
|
|
|
setting_name: str,
|
2019-03-17 14:48:51 +01:00
|
|
|
setting_value: Union[bool, str, int]) -> None:
|
2017-04-07 00:05:55 +02:00
|
|
|
property_type = UserProfile.property_types[setting_name]
|
|
|
|
assert isinstance(setting_value, property_type)
|
|
|
|
setattr(user_profile, setting_name, setting_value)
|
|
|
|
user_profile.save(update_fields=[setting_name])
|
2017-03-14 10:53:09 +01:00
|
|
|
event = {'type': 'update_display_settings',
|
|
|
|
'user': user_profile.email,
|
2017-04-07 00:05:55 +02:00
|
|
|
'setting_name': setting_name,
|
2017-03-14 10:53:09 +01:00
|
|
|
'setting': setting_value}
|
2018-03-05 03:46:07 +01:00
|
|
|
if setting_name == "default_language":
|
|
|
|
assert isinstance(setting_value, str)
|
|
|
|
event['language_name'] = get_language_name(setting_value)
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2017-03-14 10:53:09 +01:00
|
|
|
|
2017-04-02 20:57:27 +02:00
|
|
|
# Updates to the timezone display setting are sent to all users
|
|
|
|
if setting_name == "timezone":
|
|
|
|
payload = dict(email=user_profile.email,
|
|
|
|
user_id=user_profile.id,
|
|
|
|
timezone=user_profile.timezone)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm,
|
|
|
|
dict(type='realm_user', op='update', person=payload),
|
2017-09-16 21:26:54 +02:00
|
|
|
active_user_ids(user_profile.realm_id))
|
2017-04-02 20:57:27 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def lookup_default_stream_groups(default_stream_group_names: List[str],
|
|
|
|
realm: Realm) -> List[DefaultStreamGroup]:
|
2017-10-26 20:31:43 +02:00
|
|
|
default_stream_groups = []
|
|
|
|
for group_name in default_stream_group_names:
|
|
|
|
try:
|
|
|
|
default_stream_group = DefaultStreamGroup.objects.get(
|
|
|
|
name=group_name, realm=realm)
|
|
|
|
except DefaultStreamGroup.DoesNotExist:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_('Invalid default stream group %s') % (group_name,))
|
2017-10-26 20:31:43 +02:00
|
|
|
default_stream_groups.append(default_stream_group)
|
|
|
|
return default_stream_groups
|
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
def notify_default_streams(realm: Realm) -> None:
|
2016-05-20 22:08:42 +02:00
|
|
|
event = dict(
|
|
|
|
type="default_streams",
|
2018-11-02 23:33:54 +01:00
|
|
|
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm.id))
|
2016-05-20 22:08:42 +02:00
|
|
|
)
|
2019-03-01 01:26:57 +01:00
|
|
|
send_event(realm, event, active_non_guest_user_ids(realm.id))
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
def notify_default_stream_groups(realm: Realm) -> None:
|
|
|
|
event = dict(
|
|
|
|
type="default_stream_groups",
|
|
|
|
default_stream_groups=default_stream_groups_to_dicts_sorted(get_default_stream_groups(realm))
|
|
|
|
)
|
2019-03-01 01:26:57 +01:00
|
|
|
send_event(realm, event, active_non_guest_user_ids(realm.id))
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_add_default_stream(stream: Stream) -> None:
|
2017-09-17 00:34:13 +02:00
|
|
|
realm_id = stream.realm_id
|
|
|
|
stream_id = stream.id
|
|
|
|
if not DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).exists():
|
|
|
|
DefaultStream.objects.create(realm_id=realm_id, stream_id=stream_id)
|
2018-11-02 23:33:54 +01:00
|
|
|
notify_default_streams(stream.realm)
|
2014-01-27 19:39:54 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_default_stream(stream: Stream) -> None:
|
2017-09-17 00:34:13 +02:00
|
|
|
realm_id = stream.realm_id
|
|
|
|
stream_id = stream.id
|
|
|
|
DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).delete()
|
2018-11-02 23:33:54 +01:00
|
|
|
notify_default_streams(stream.realm)
|
2014-01-27 20:02:20 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_create_default_stream_group(realm: Realm, group_name: str,
|
|
|
|
description: str, streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
default_streams = get_default_streams_for_realm(realm.id)
|
|
|
|
for stream in streams:
|
|
|
|
if stream in default_streams:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
|
|
|
"'%(stream_name)s' is a default stream and cannot be added to '%(group_name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group_name': group_name})
|
2017-11-01 18:20:34 +01:00
|
|
|
|
|
|
|
check_default_stream_group_name(group_name)
|
2017-11-15 19:57:52 +01:00
|
|
|
(group, created) = DefaultStreamGroup.objects.get_or_create(
|
|
|
|
name=group_name, realm=realm, description=description)
|
2017-11-01 18:20:34 +01:00
|
|
|
if not created:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_("Default stream group '%(group_name)s' already exists")
|
|
|
|
% {'group_name': group_name})
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2018-01-31 08:22:07 +01:00
|
|
|
group.streams.set(streams)
|
2017-11-01 18:20:34 +01:00
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_add_streams_to_default_stream_group(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
default_streams = get_default_streams_for_realm(realm.id)
|
|
|
|
for stream in streams:
|
|
|
|
if stream in default_streams:
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
2018-03-04 22:59:07 +01:00
|
|
|
"'%(stream_name)s' is a default stream and cannot be added to '%(group_name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group_name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
if stream in group.streams.all():
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
2018-03-04 22:59:07 +01:00
|
|
|
"Stream '%(stream_name)s' is already present in default stream group '%(group_name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group_name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
group.streams.add(stream)
|
|
|
|
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
def do_remove_streams_from_default_stream_group(realm: Realm, group: DefaultStreamGroup,
|
|
|
|
streams: List[Stream]) -> None:
|
2017-11-01 18:20:34 +01:00
|
|
|
for stream in streams:
|
|
|
|
if stream not in group.streams.all():
|
2017-11-16 01:05:08 +01:00
|
|
|
raise JsonableError(_(
|
2018-03-04 22:59:07 +01:00
|
|
|
"Stream '%(stream_name)s' is not present in default stream group '%(group_name)s'")
|
|
|
|
% {'stream_name': stream.name, 'group_name': group.name})
|
2017-11-01 18:20:34 +01:00
|
|
|
group.streams.remove(stream)
|
|
|
|
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_change_default_stream_group_name(realm: Realm, group: DefaultStreamGroup,
|
2018-05-11 02:01:29 +02:00
|
|
|
new_group_name: str) -> None:
|
2017-11-14 21:06:02 +01:00
|
|
|
if group.name == new_group_name:
|
|
|
|
raise JsonableError(_("This default stream group is already named '%s'") % (new_group_name,))
|
|
|
|
|
|
|
|
if DefaultStreamGroup.objects.filter(name=new_group_name, realm=realm).exists():
|
|
|
|
raise JsonableError(_("Default stream group '%s' already exists") % (new_group_name,))
|
|
|
|
|
|
|
|
group.name = new_group_name
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-15 19:57:52 +01:00
|
|
|
def do_change_default_stream_group_description(realm: Realm, group: DefaultStreamGroup,
|
2018-05-11 02:01:29 +02:00
|
|
|
new_description: str) -> None:
|
2017-11-14 20:51:34 +01:00
|
|
|
group.description = new_description
|
|
|
|
group.save()
|
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
def do_remove_default_stream_group(realm: Realm, group: DefaultStreamGroup) -> None:
|
|
|
|
group.delete()
|
2017-11-01 18:20:34 +01:00
|
|
|
notify_default_stream_groups(realm)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_default_streams_for_realm(realm_id: int) -> List[Stream]:
|
2013-04-05 17:04:50 +02:00
|
|
|
return [default.stream for default in
|
2017-11-15 19:57:52 +01:00
|
|
|
DefaultStream.objects.select_related("stream", "stream__realm").filter(
|
|
|
|
realm_id=realm_id)]
|
2014-01-27 18:02:41 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_default_subs(user_profile: UserProfile) -> List[Stream]:
|
2014-01-27 18:02:41 +01:00
|
|
|
# Right now default streams are realm-wide. This wrapper gives us flexibility
|
|
|
|
# to some day further customize how we set up default streams for new users.
|
2017-09-17 00:34:13 +02:00
|
|
|
return get_default_streams_for_realm(user_profile.realm_id)
|
2013-01-11 23:36:41 +01:00
|
|
|
|
2016-05-20 22:08:42 +02:00
|
|
|
# returns default streams in json serializeable format
|
2017-11-05 11:15:10 +01:00
|
|
|
def streams_to_dicts_sorted(streams: List[Stream]) -> List[Dict[str, Any]]:
|
2016-05-20 22:08:42 +02:00
|
|
|
return sorted([stream.to_dict() for stream in streams], key=lambda elt: elt["name"])
|
|
|
|
|
2017-11-01 18:20:34 +01:00
|
|
|
def default_stream_groups_to_dicts_sorted(groups: List[DefaultStreamGroup]) -> List[Dict[str, Any]]:
|
|
|
|
return sorted([group.to_dict() for group in groups], key=lambda elt: elt["name"])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_activity_interval(user_profile: UserProfile,
|
|
|
|
log_time: datetime.datetime) -> None:
|
2017-04-15 07:20:16 +02:00
|
|
|
effective_end = log_time + UserActivityInterval.MIN_INTERVAL_LENGTH
|
2013-09-27 22:22:52 +02:00
|
|
|
# This code isn't perfect, because with various races we might end
|
|
|
|
# up creating two overlapping intervals, but that shouldn't happen
|
|
|
|
# often, and can be corrected for in post-processing
|
2013-09-04 00:00:44 +02:00
|
|
|
try:
|
2013-10-08 21:19:56 +02:00
|
|
|
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
|
|
|
|
# There are two ways our intervals could overlap:
|
|
|
|
# (1) The start of the new interval could be inside the old interval
|
|
|
|
# (2) The end of the new interval could be inside the old interval
|
|
|
|
# In either case, we just extend the old interval to include the new interval.
|
|
|
|
if ((log_time <= last.end and log_time >= last.start) or
|
2016-12-03 18:19:09 +01:00
|
|
|
(effective_end <= last.end and effective_end >= last.start)):
|
2013-09-27 22:22:52 +02:00
|
|
|
last.end = max(last.end, effective_end)
|
|
|
|
last.start = min(last.start, log_time)
|
|
|
|
last.save(update_fields=["start", "end"])
|
2013-09-04 00:00:44 +02:00
|
|
|
return
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2013-10-08 21:19:56 +02:00
|
|
|
# Otherwise, the intervals don't overlap, so we should make a new one
|
2013-09-04 00:00:44 +02:00
|
|
|
UserActivityInterval.objects.create(user_profile=user_profile, start=log_time,
|
|
|
|
end=effective_end)
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_activity')
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_activity(user_profile: UserProfile,
|
|
|
|
client: Client,
|
2018-05-11 02:01:29 +02:00
|
|
|
query: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
log_time: datetime.datetime) -> None:
|
2013-11-01 19:02:11 +01:00
|
|
|
(activity, created) = UserActivity.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
query = query,
|
|
|
|
defaults={'last_visit': log_time, 'count': 0})
|
|
|
|
|
2013-01-11 23:36:41 +01:00
|
|
|
activity.count += 1
|
2013-01-11 21:16:42 +01:00
|
|
|
activity.last_visit = log_time
|
2013-03-21 21:29:28 +01:00
|
|
|
activity.save(update_fields=["last_visit", "count"])
|
2013-01-11 21:16:42 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_presence_changed(user_profile: UserProfile, presence: UserPresence) -> None:
|
2013-04-05 00:13:03 +02:00
|
|
|
presence_dict = presence.to_dict()
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="presence", email=user_profile.email,
|
|
|
|
server_timestamp=time.time(),
|
2017-04-25 11:50:30 +02:00
|
|
|
presence={presence_dict['client']: presence_dict})
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def consolidate_client(client: Client) -> Client:
|
2014-02-13 14:28:08 +01:00
|
|
|
# The web app reports a client as 'website'
|
|
|
|
# The desktop app reports a client as ZulipDesktop
|
|
|
|
# due to it setting a custom user agent. We want both
|
|
|
|
# to count as web users
|
|
|
|
|
|
|
|
# Alias ZulipDesktop to website
|
|
|
|
if client.name in ['ZulipDesktop']:
|
|
|
|
return get_client('website')
|
|
|
|
else:
|
|
|
|
return client
|
|
|
|
|
2013-04-16 22:58:21 +02:00
|
|
|
@statsd_increment('user_presence')
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_presence(user_profile: UserProfile,
|
|
|
|
client: Client,
|
|
|
|
log_time: datetime.datetime,
|
|
|
|
status: int) -> None:
|
2014-02-13 14:28:08 +01:00
|
|
|
client = consolidate_client(client)
|
2013-11-01 19:02:11 +01:00
|
|
|
(presence, created) = UserPresence.objects.get_or_create(
|
|
|
|
user_profile = user_profile,
|
|
|
|
client = client,
|
|
|
|
defaults = {'timestamp': log_time,
|
|
|
|
'status': status})
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2013-08-20 20:57:26 +02:00
|
|
|
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
|
2013-04-05 00:13:03 +02:00
|
|
|
was_idle = presence.status == UserPresence.IDLE
|
|
|
|
became_online = (status == UserPresence.ACTIVE) and (stale_status or was_idle)
|
2013-04-03 22:00:02 +02:00
|
|
|
|
2014-03-17 18:35:43 +01:00
|
|
|
# If an object was created, it has already been saved.
|
|
|
|
#
|
|
|
|
# We suppress changes from ACTIVE to IDLE before stale_status is reached;
|
|
|
|
# this protects us from the user having two clients open: one active, the
|
|
|
|
# other idle. Without this check, we would constantly toggle their status
|
|
|
|
# between the two states.
|
|
|
|
if not created and stale_status or was_idle or status == presence.status:
|
2013-06-24 19:10:25 +02:00
|
|
|
# The following block attempts to only update the "status"
|
|
|
|
# field in the event that it actually changed. This is
|
|
|
|
# important to avoid flushing the UserPresence cache when the
|
|
|
|
# data it would return to a client hasn't actually changed
|
|
|
|
# (see the UserPresence post_save hook for details).
|
2013-06-24 19:05:41 +02:00
|
|
|
presence.timestamp = log_time
|
2013-06-24 19:10:25 +02:00
|
|
|
update_fields = ["timestamp"]
|
|
|
|
if presence.status != status:
|
|
|
|
presence.status = status
|
|
|
|
update_fields.append("status")
|
|
|
|
presence.save(update_fields=update_fields)
|
2013-02-08 23:44:15 +01:00
|
|
|
|
2018-04-20 20:55:29 +02:00
|
|
|
if not user_profile.realm.presence_disabled and (created or became_online):
|
2013-04-03 22:00:02 +02:00
|
|
|
# Push event to all users in the realm so they see the new user
|
|
|
|
# appear in the presence list immediately, or the newly online
|
2013-09-15 20:49:04 +02:00
|
|
|
# user without delay. Note that we won't send an update here for a
|
|
|
|
# timestamp update, because we rely on the browser to ping us every 50
|
|
|
|
# seconds for realm-wide status updates, and those updates should have
|
|
|
|
# recent timestamps, which means the browser won't think active users
|
|
|
|
# have gone idle. If we were more aggressive in this function about
|
|
|
|
# sending timestamp updates, we could eliminate the ping responses, but
|
|
|
|
# that's not a high priority for now, considering that most of our non-MIT
|
|
|
|
# realms are pretty small.
|
2013-04-03 22:00:02 +02:00
|
|
|
send_presence_changed(user_profile, presence)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_user_activity_interval(user_profile: UserProfile, log_time: datetime.datetime) -> None:
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'time': datetime_to_timestamp(log_time)}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("user_activity_interval", event)
|
2013-09-06 21:52:12 +02:00
|
|
|
|
2017-12-29 12:51:56 +01:00
|
|
|
def update_user_presence(user_profile: UserProfile, client: Client, log_time: datetime.datetime,
|
|
|
|
status: int, new_user_input: bool) -> None:
|
2016-11-28 23:29:01 +01:00
|
|
|
event = {'user_profile_id': user_profile.id,
|
|
|
|
'status': status,
|
|
|
|
'time': datetime_to_timestamp(log_time),
|
|
|
|
'client': client.name}
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("user_presence", event)
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-09-10 17:32:40 +02:00
|
|
|
if new_user_input:
|
2013-09-06 21:52:12 +02:00
|
|
|
update_user_activity_interval(user_profile, log_time)
|
|
|
|
|
2018-03-14 00:01:04 +01:00
|
|
|
def do_update_pointer(user_profile: UserProfile, client: Client,
|
|
|
|
pointer: int, update_flags: bool=False) -> None:
|
2014-01-24 23:50:24 +01:00
|
|
|
prev_pointer = user_profile.pointer
|
|
|
|
user_profile.pointer = pointer
|
|
|
|
user_profile.save(update_fields=["pointer"])
|
|
|
|
|
2018-08-10 22:17:55 +02:00
|
|
|
if update_flags: # nocoverage
|
|
|
|
# This block of code is compatibility code for the
|
|
|
|
# legacy/original Zulip Android app natively. It's a shim
|
|
|
|
# that will mark as read any messages up until the pointer
|
|
|
|
# move; we expect to remove this feature entirely before long,
|
|
|
|
# when we drop support for the old Android app entirely.
|
2018-08-02 01:29:06 +02:00
|
|
|
app_message_ids = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message__id__gt=prev_pointer,
|
|
|
|
message__id__lte=pointer).extra(where=[
|
|
|
|
UserMessage.where_unread(),
|
2018-08-22 00:08:41 +02:00
|
|
|
UserMessage.where_active_push_notification(),
|
2018-08-02 01:29:06 +02:00
|
|
|
]).values_list("message_id", flat=True)
|
|
|
|
|
2014-01-24 23:50:24 +01:00
|
|
|
UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__gt=prev_pointer,
|
2018-08-02 01:29:06 +02:00
|
|
|
message__id__lte=pointer).extra(where=[UserMessage.where_unread()]) \
|
|
|
|
.update(flags=F('flags').bitor(UserMessage.flags.read))
|
|
|
|
do_clear_mobile_push_notifications_for_ids(user_profile, app_message_ids)
|
2014-01-24 23:50:24 +01:00
|
|
|
|
2014-01-24 23:52:04 +01:00
|
|
|
event = dict(type='pointer', pointer=pointer)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2014-01-24 23:50:24 +01:00
|
|
|
|
2019-01-21 18:19:59 +01:00
|
|
|
def do_update_user_status(user_profile: UserProfile,
|
2019-01-21 19:06:03 +01:00
|
|
|
away: Optional[bool],
|
|
|
|
status_text: Optional[str],
|
|
|
|
client_id: int) -> None:
|
2019-01-21 18:19:59 +01:00
|
|
|
if away:
|
|
|
|
status = UserStatus.AWAY
|
|
|
|
else:
|
|
|
|
status = UserStatus.NORMAL
|
|
|
|
|
2018-12-17 22:04:07 +01:00
|
|
|
realm = user_profile.realm
|
2019-01-21 18:19:59 +01:00
|
|
|
|
|
|
|
update_user_status(
|
2018-12-17 22:04:07 +01:00
|
|
|
user_profile_id=user_profile.id,
|
2019-01-21 18:19:59 +01:00
|
|
|
status=status,
|
2019-01-21 19:06:03 +01:00
|
|
|
status_text=status_text,
|
2018-12-17 22:04:07 +01:00
|
|
|
client_id=client_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type='user_status',
|
|
|
|
user_id=user_profile.id,
|
|
|
|
)
|
2019-01-21 19:06:03 +01:00
|
|
|
|
|
|
|
if away is not None:
|
|
|
|
event['away'] = away
|
|
|
|
|
|
|
|
if status_text is not None:
|
|
|
|
event['status_text'] = status_text
|
|
|
|
|
2018-12-17 22:04:07 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
|
|
|
|
2018-03-14 00:04:16 +01:00
|
|
|
def do_mark_all_as_read(user_profile: UserProfile, client: Client) -> int:
|
2017-08-04 20:26:38 +02:00
|
|
|
log_statsd_event('bankruptcy')
|
|
|
|
|
|
|
|
msgs = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile
|
|
|
|
).extra(
|
|
|
|
where=[UserMessage.where_unread()]
|
|
|
|
)
|
|
|
|
|
|
|
|
count = msgs.update(
|
|
|
|
flags=F('flags').bitor(UserMessage.flags.read)
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type='update_message_flags',
|
|
|
|
operation='add',
|
|
|
|
flag='read',
|
|
|
|
messages=[], # we don't send messages, since the client reloads anyway
|
|
|
|
all=True
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2017-08-04 20:26:38 +02:00
|
|
|
|
|
|
|
statsd.incr("mark_all_as_read", count)
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
all_push_message_ids = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile,
|
2018-08-22 00:08:41 +02:00
|
|
|
).extra(
|
|
|
|
where=[UserMessage.where_active_push_notification()],
|
|
|
|
).values_list("message_id", flat=True)[0:10000]
|
2018-08-02 01:29:06 +02:00
|
|
|
do_clear_mobile_push_notifications_for_ids(user_profile, all_push_message_ids)
|
|
|
|
|
2017-08-04 20:26:38 +02:00
|
|
|
return count
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mark_stream_messages_as_read(user_profile: UserProfile,
|
2018-03-14 00:09:11 +01:00
|
|
|
client: Client,
|
|
|
|
stream: Stream,
|
2018-05-11 02:01:29 +02:00
|
|
|
topic_name: Optional[str]=None) -> int:
|
2017-08-06 15:00:08 +02:00
|
|
|
log_statsd_event('mark_stream_as_read')
|
|
|
|
|
|
|
|
msgs = UserMessage.objects.filter(
|
|
|
|
user_profile=user_profile
|
|
|
|
)
|
|
|
|
|
2017-10-28 20:26:11 +02:00
|
|
|
recipient = get_stream_recipient(stream.id)
|
2017-08-06 15:00:08 +02:00
|
|
|
msgs = msgs.filter(message__recipient=recipient)
|
|
|
|
|
|
|
|
if topic_name:
|
2018-11-01 18:06:55 +01:00
|
|
|
msgs = filter_by_topic_name_via_message(
|
|
|
|
query=msgs,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
2017-08-06 15:00:08 +02:00
|
|
|
|
|
|
|
msgs = msgs.extra(
|
|
|
|
where=[UserMessage.where_unread()]
|
|
|
|
)
|
|
|
|
|
|
|
|
message_ids = list(msgs.values_list('message__id', flat=True))
|
|
|
|
|
|
|
|
count = msgs.update(
|
|
|
|
flags=F('flags').bitor(UserMessage.flags.read)
|
|
|
|
)
|
|
|
|
|
|
|
|
event = dict(
|
|
|
|
type='update_message_flags',
|
|
|
|
operation='add',
|
|
|
|
flag='read',
|
|
|
|
messages=message_ids,
|
|
|
|
all=False,
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2018-08-02 01:29:06 +02:00
|
|
|
do_clear_mobile_push_notifications_for_ids(user_profile, message_ids)
|
2017-08-06 15:00:08 +02:00
|
|
|
|
|
|
|
statsd.incr("mark_stream_as_read", count)
|
|
|
|
return count
|
|
|
|
|
2018-08-02 01:29:06 +02:00
|
|
|
def do_clear_mobile_push_notifications_for_ids(user_profile: UserProfile,
|
|
|
|
message_ids: List[int]) -> None:
|
2019-02-14 01:08:51 +01:00
|
|
|
filtered_message_ids = list(UserMessage.objects.filter(
|
|
|
|
message_id__in=message_ids,
|
|
|
|
user_profile=user_profile,
|
|
|
|
).extra(
|
|
|
|
where=[UserMessage.where_active_push_notification()],
|
|
|
|
).values_list('message_id', flat=True))
|
|
|
|
|
|
|
|
num_detached = settings.MAX_UNBATCHED_REMOVE_NOTIFICATIONS - 1
|
|
|
|
for message_id in filtered_message_ids[:num_detached]:
|
|
|
|
# Older clients (all clients older than 2019-02-13) will only
|
|
|
|
# see the first message ID in a given notification-message.
|
|
|
|
# To help them out, send a few of these separately.
|
|
|
|
queue_json_publish("missedmessage_mobile_notifications", {
|
|
|
|
"type": "remove",
|
2018-08-02 01:29:06 +02:00
|
|
|
"user_profile_id": user_profile.id,
|
2019-02-14 01:08:51 +01:00
|
|
|
"message_ids": [message_id],
|
|
|
|
})
|
|
|
|
if filtered_message_ids[num_detached:]:
|
|
|
|
queue_json_publish("missedmessage_mobile_notifications", {
|
2018-08-02 01:29:06 +02:00
|
|
|
"type": "remove",
|
2019-02-14 01:08:51 +01:00
|
|
|
"user_profile_id": user_profile.id,
|
|
|
|
"message_ids": filtered_message_ids[num_detached:],
|
|
|
|
})
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_message_flags(user_profile: UserProfile,
|
2018-03-14 00:05:55 +01:00
|
|
|
client: Client,
|
2018-05-11 02:01:29 +02:00
|
|
|
operation: str,
|
|
|
|
flag: str,
|
2018-08-02 02:36:38 +02:00
|
|
|
messages: List[int]) -> int:
|
2019-06-04 09:26:45 +02:00
|
|
|
valid_flags = [item for item in UserMessage.flags
|
|
|
|
if item not in UserMessage.NON_API_FLAGS]
|
2018-08-10 00:17:36 +02:00
|
|
|
if flag not in valid_flags:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Invalid flag: '%s'") % (flag,))
|
2019-06-04 09:26:45 +02:00
|
|
|
if flag in UserMessage.NON_EDITABLE_FLAGS:
|
|
|
|
raise JsonableError(_("Flag not editable: '%s'") % (flag,))
|
2013-06-25 20:22:40 +02:00
|
|
|
flagattr = getattr(UserMessage.flags, flag)
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2017-08-06 15:00:08 +02:00
|
|
|
assert messages is not None
|
|
|
|
msgs = UserMessage.objects.filter(user_profile=user_profile,
|
|
|
|
message__id__in=messages)
|
2019-02-02 03:39:28 +01:00
|
|
|
# This next block allows you to star any message, even those you
|
|
|
|
# didn't receive (e.g. because you're looking at a public stream
|
|
|
|
# you're not subscribed to, etc.). The problem is that starring
|
|
|
|
# is a flag boolean on UserMessage, and UserMessage rows are
|
|
|
|
# normally created only when you receive a message to support
|
|
|
|
# searching your personal history. So we need to create one. We
|
|
|
|
# add UserMessage.flags.historical, so that features that need
|
|
|
|
# "messages you actually received" can exclude these UserMessages.
|
2017-08-06 15:00:08 +02:00
|
|
|
if msgs.count() == 0:
|
|
|
|
if not len(messages) == 1:
|
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
|
|
|
if flag != "starred":
|
|
|
|
raise JsonableError(_("Invalid message(s)"))
|
|
|
|
# Validate that the user could have read the relevant message
|
|
|
|
message = access_message(user_profile, messages[0])[0]
|
|
|
|
|
|
|
|
# OK, this is a message that you legitimately have access
|
|
|
|
# to via narrowing to the stream it is on, even though you
|
|
|
|
# didn't actually receive it. So we create a historical,
|
|
|
|
# read UserMessage message row for you to star.
|
|
|
|
UserMessage.objects.create(user_profile=user_profile,
|
|
|
|
message=message,
|
|
|
|
flags=UserMessage.flags.historical | UserMessage.flags.read)
|
2013-04-16 22:58:21 +02:00
|
|
|
|
2013-06-25 20:22:40 +02:00
|
|
|
if operation == 'add':
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitor(flagattr))
|
2013-06-25 20:22:40 +02:00
|
|
|
elif operation == 'remove':
|
2013-06-25 20:26:50 +02:00
|
|
|
count = msgs.update(flags=F('flags').bitand(~flagattr))
|
2017-08-25 09:34:56 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid message flags operation")
|
2013-03-25 20:37:00 +01:00
|
|
|
|
2013-08-05 23:56:09 +02:00
|
|
|
event = {'type': 'update_message_flags',
|
|
|
|
'operation': operation,
|
|
|
|
'flag': flag,
|
|
|
|
'messages': messages,
|
2017-08-04 21:02:59 +02:00
|
|
|
'all': False}
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-08-05 23:56:09 +02:00
|
|
|
|
2018-08-02 01:29:06 +02:00
|
|
|
if flag == "read" and operation == "add":
|
|
|
|
do_clear_mobile_push_notifications_for_ids(user_profile, messages)
|
|
|
|
|
2013-06-25 20:26:50 +02:00
|
|
|
statsd.incr("flags.%s.%s" % (flag, operation), count)
|
2016-07-13 03:16:42 +02:00
|
|
|
return count
|
2013-02-11 21:47:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def subscribed_to_stream(user_profile: UserProfile, stream_id: int) -> bool:
|
2018-08-15 19:12:25 +02:00
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream_id).exists()
|
2013-02-04 23:41:49 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def truncate_content(content: str, max_length: int, truncation_message: str) -> str:
|
2013-11-22 18:33:22 +01:00
|
|
|
if len(content) > max_length:
|
|
|
|
content = content[:max_length - len(truncation_message)] + truncation_message
|
|
|
|
return content
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def truncate_body(body: str) -> str:
|
2018-11-25 07:40:16 +01:00
|
|
|
return truncate_content(body, MAX_MESSAGE_LENGTH, "\n[message truncated]")
|
2013-11-22 18:33:22 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def truncate_topic(topic: str) -> str:
|
2018-11-01 21:23:48 +01:00
|
|
|
return truncate_content(topic, MAX_TOPIC_NAME_LENGTH, "...")
|
2013-11-22 18:33:22 +01:00
|
|
|
|
2017-09-27 15:06:03 +02:00
|
|
|
MessageUpdateUserInfoResult = TypedDict('MessageUpdateUserInfoResult', {
|
|
|
|
'message_user_ids': Set[int],
|
2017-09-27 16:47:13 +02:00
|
|
|
'mention_user_ids': Set[int],
|
2017-09-27 15:06:03 +02:00
|
|
|
})
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_user_info_for_message_updates(message_id: int) -> MessageUpdateUserInfoResult:
|
2017-09-27 15:06:03 +02:00
|
|
|
|
|
|
|
# We exclude UserMessage.flags.historical rows since those
|
|
|
|
# users did not receive the message originally, and thus
|
|
|
|
# probably are not relevant for reprocessed alert_words,
|
|
|
|
# mentions and similar rendering features. This may be a
|
|
|
|
# decision we change in the future.
|
|
|
|
query = UserMessage.objects.filter(
|
|
|
|
message=message_id,
|
|
|
|
flags=~UserMessage.flags.historical
|
2017-09-27 16:47:13 +02:00
|
|
|
).values('user_profile_id', 'flags')
|
|
|
|
rows = list(query)
|
|
|
|
|
|
|
|
message_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in rows
|
|
|
|
}
|
2017-09-27 15:06:03 +02:00
|
|
|
|
2017-09-27 16:47:13 +02:00
|
|
|
mask = UserMessage.flags.mentioned | UserMessage.flags.wildcard_mentioned
|
|
|
|
|
|
|
|
mention_user_ids = {
|
|
|
|
row['user_profile_id']
|
|
|
|
for row in rows
|
|
|
|
if int(row['flags']) & mask
|
|
|
|
}
|
2017-09-27 15:06:03 +02:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
message_user_ids=message_user_ids,
|
2017-09-27 16:47:13 +02:00
|
|
|
mention_user_ids=mention_user_ids,
|
2017-09-27 15:06:03 +02:00
|
|
|
)
|
2014-01-08 19:42:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_user_message_flags(message: Message, ums: Iterable[UserMessage]) -> None:
|
2014-01-07 19:40:02 +01:00
|
|
|
wildcard = message.mentions_wildcard
|
|
|
|
mentioned_ids = message.mentions_user_ids
|
2014-01-08 19:42:45 +01:00
|
|
|
ids_with_alert_words = message.user_ids_with_alert_words
|
2017-05-17 20:39:57 +02:00
|
|
|
changed_ums = set() # type: Set[UserMessage]
|
2014-01-08 19:42:45 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_flag(um: UserMessage, should_set: bool, flag: int) -> None:
|
2014-01-08 19:42:45 +01:00
|
|
|
if should_set:
|
|
|
|
if not (um.flags & flag):
|
|
|
|
um.flags |= flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
else:
|
|
|
|
if (um.flags & flag):
|
|
|
|
um.flags &= ~flag
|
|
|
|
changed_ums.add(um)
|
|
|
|
|
|
|
|
for um in ums:
|
|
|
|
has_alert_word = um.user_profile_id in ids_with_alert_words
|
|
|
|
update_flag(um, has_alert_word, UserMessage.flags.has_alert_word)
|
|
|
|
|
2014-01-07 19:40:02 +01:00
|
|
|
mentioned = um.user_profile_id in mentioned_ids
|
|
|
|
update_flag(um, mentioned, UserMessage.flags.mentioned)
|
|
|
|
|
|
|
|
update_flag(um, wildcard, UserMessage.flags.wildcard_mentioned)
|
|
|
|
|
2014-01-08 19:42:45 +01:00
|
|
|
for um in changed_ums:
|
|
|
|
um.save(update_fields=['flags'])
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def update_to_dict_cache(changed_messages: List[Message]) -> List[int]:
|
2016-12-08 02:26:16 +01:00
|
|
|
"""Updates the message as stored in the to_dict cache (for serving
|
|
|
|
messages)."""
|
|
|
|
items_for_remote_cache = {}
|
|
|
|
message_ids = []
|
|
|
|
for changed_message in changed_messages:
|
|
|
|
message_ids.append(changed_message.id)
|
2017-10-20 20:29:49 +02:00
|
|
|
key = to_dict_cache_key_id(changed_message.id)
|
|
|
|
value = MessageDict.to_dict_uncached(changed_message)
|
|
|
|
items_for_remote_cache[key] = (value,)
|
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
cache_set_many(items_for_remote_cache)
|
|
|
|
return message_ids
|
|
|
|
|
2016-10-27 12:06:44 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_embedded_data(user_profile: UserProfile,
|
|
|
|
message: Message,
|
2018-05-11 02:01:29 +02:00
|
|
|
content: Optional[str],
|
|
|
|
rendered_content: Optional[str]) -> None:
|
2016-10-27 12:06:44 +02:00
|
|
|
event = {
|
|
|
|
'type': 'update_message',
|
|
|
|
'sender': user_profile.email,
|
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
|
|
|
changed_messages = [message]
|
|
|
|
|
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
|
|
|
|
|
|
|
if content is not None:
|
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
message.content = content
|
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
|
|
|
|
|
|
|
message.save(update_fields=["content", "rendered_content"])
|
|
|
|
|
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def user_info(um: UserMessage) -> Dict[str, Any]:
|
2016-10-27 12:06:44 +02:00
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, list(map(user_info, ums)))
|
2016-10-27 12:06:44 +02:00
|
|
|
|
2016-07-22 23:45:24 +02:00
|
|
|
# We use transaction.atomic to support select_for_update in the attachment codepath.
|
|
|
|
@transaction.atomic
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_update_message(user_profile: UserProfile, message: Message, topic_name: Optional[str],
|
|
|
|
propagate_mode: str, content: Optional[str],
|
|
|
|
rendered_content: Optional[str], prior_mention_user_ids: Set[int],
|
2017-12-29 12:51:56 +01:00
|
|
|
mention_user_ids: Set[int]) -> int:
|
2019-03-20 20:48:10 +01:00
|
|
|
"""
|
|
|
|
The main function for message editing. A message edit event can
|
|
|
|
modify:
|
|
|
|
* the message's content (in which case the caller will have
|
|
|
|
set both content and rendered_content),
|
|
|
|
* the topic, in which case the caller will have set topic_name
|
|
|
|
* or both
|
|
|
|
|
|
|
|
With topic edits, propagate_mode determines whether other message
|
|
|
|
also have their topics edited.
|
|
|
|
"""
|
2013-05-14 21:18:11 +02:00
|
|
|
event = {'type': 'update_message',
|
2017-02-20 00:23:42 +01:00
|
|
|
# TODO: We probably want to remove the 'sender' field
|
|
|
|
# after confirming it isn't used by any consumers.
|
2013-05-14 21:18:11 +02:00
|
|
|
'sender': user_profile.email,
|
2017-02-20 00:23:42 +01:00
|
|
|
'user_id': user_profile.id,
|
2017-05-17 20:39:57 +02:00
|
|
|
'message_id': message.id} # type: Dict[str, Any]
|
2017-02-20 00:23:42 +01:00
|
|
|
edit_history_event = {
|
|
|
|
'user_id': user_profile.id,
|
2017-05-17 20:39:57 +02:00
|
|
|
} # type: Dict[str, Any]
|
2013-09-03 22:07:59 +02:00
|
|
|
changed_messages = [message]
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2019-03-22 16:32:32 +01:00
|
|
|
stream_being_edited = None
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
stream_id = message.recipient.type_id
|
2019-03-22 16:32:32 +01:00
|
|
|
stream_being_edited = Stream.objects.get(id=stream_id)
|
|
|
|
event['stream_name'] = stream_being_edited.name
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
|
2016-06-21 21:34:41 +02:00
|
|
|
ums = UserMessage.objects.filter(message=message.id)
|
2014-01-08 19:37:15 +01:00
|
|
|
|
2013-05-14 21:18:11 +02:00
|
|
|
if content is not None:
|
2019-03-20 17:58:59 +01:00
|
|
|
assert rendered_content is not None
|
2014-01-08 19:42:45 +01:00
|
|
|
update_user_message_flags(message, ums)
|
|
|
|
|
2017-07-16 11:00:44 +02:00
|
|
|
# One could imagine checking realm.allow_edit_history here and
|
|
|
|
# modifying the events based on that setting, but doing so
|
|
|
|
# doesn't really make sense. We need to send the edit event
|
|
|
|
# to clients regardless, and a client already had access to
|
|
|
|
# the original/pre-edit content of the message anyway. That
|
|
|
|
# setting must be enforced on the client side, and making a
|
|
|
|
# change here simply complicates the logic for clients parsing
|
|
|
|
# edit history events.
|
2013-05-14 21:18:11 +02:00
|
|
|
event['orig_content'] = message.content
|
|
|
|
event['orig_rendered_content'] = message.rendered_content
|
2013-05-23 22:31:14 +02:00
|
|
|
edit_history_event["prev_content"] = message.content
|
|
|
|
edit_history_event["prev_rendered_content"] = message.rendered_content
|
|
|
|
edit_history_event["prev_rendered_content_version"] = message.rendered_content_version
|
2013-05-14 21:18:11 +02:00
|
|
|
message.content = content
|
2016-10-04 16:49:16 +02:00
|
|
|
message.rendered_content = rendered_content
|
|
|
|
message.rendered_content_version = bugdown_version
|
2013-05-14 21:18:11 +02:00
|
|
|
event["content"] = content
|
|
|
|
event["rendered_content"] = rendered_content
|
2017-02-20 00:23:08 +01:00
|
|
|
event['prev_rendered_content_version'] = message.rendered_content_version
|
2018-01-21 19:27:36 +01:00
|
|
|
event['is_me_message'] = Message.is_status_message(content, rendered_content)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-07-07 09:47:15 +02:00
|
|
|
prev_content = edit_history_event['prev_content']
|
|
|
|
if Message.content_has_attachment(prev_content) or Message.content_has_attachment(message.content):
|
|
|
|
check_attachment_reference_change(prev_content, message)
|
|
|
|
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
2017-10-27 14:46:01 +02:00
|
|
|
if topic_name is not None:
|
|
|
|
new_topic_name = topic_name
|
2017-10-24 00:07:03 +02:00
|
|
|
else:
|
|
|
|
new_topic_name = message.topic_name()
|
|
|
|
|
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream_id,
|
|
|
|
topic_name=new_topic_name,
|
2017-12-24 22:01:01 +01:00
|
|
|
) # type: Optional[StreamTopicTarget]
|
2017-10-24 00:07:03 +02:00
|
|
|
else:
|
|
|
|
stream_topic = None
|
|
|
|
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
# TODO: We may want a slightly leaner of this function for updates.
|
2017-10-24 00:07:03 +02:00
|
|
|
info = get_recipient_info(
|
|
|
|
recipient=message.recipient,
|
|
|
|
sender_id=message.sender_id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
event['push_notify_user_ids'] = list(info['push_notify_user_ids'])
|
|
|
|
event['stream_push_user_ids'] = list(info['stream_push_user_ids'])
|
2017-11-21 04:35:26 +01:00
|
|
|
event['stream_email_user_ids'] = list(info['stream_email_user_ids'])
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
event['prior_mention_user_ids'] = list(prior_mention_user_ids)
|
|
|
|
event['mention_user_ids'] = list(mention_user_ids)
|
2017-10-07 17:59:19 +02:00
|
|
|
event['presence_idle_user_ids'] = filter_presence_idle_user_ids(info['active_user_ids'])
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
|
2017-10-27 14:46:01 +02:00
|
|
|
if topic_name is not None:
|
|
|
|
orig_topic_name = message.topic_name()
|
|
|
|
topic_name = truncate_topic(topic_name)
|
2014-03-11 21:33:50 +01:00
|
|
|
event["propagate_mode"] = propagate_mode
|
2018-11-01 16:05:30 +01:00
|
|
|
message.set_topic_name(topic_name)
|
2014-03-11 14:40:22 +01:00
|
|
|
event["stream_id"] = message.recipient.type_id
|
2018-11-01 18:26:20 +01:00
|
|
|
|
|
|
|
# These fields have legacy field names.
|
|
|
|
event[ORIG_TOPIC] = orig_topic_name
|
|
|
|
event[TOPIC_NAME] = topic_name
|
2018-11-01 19:07:27 +01:00
|
|
|
event[TOPIC_LINKS] = bugdown.topic_links(message.sender.realm_id, topic_name)
|
2018-11-09 17:53:59 +01:00
|
|
|
edit_history_event[LEGACY_PREV_TOPIC] = orig_topic_name
|
2013-09-03 22:07:59 +02:00
|
|
|
|
2013-09-13 18:12:29 +02:00
|
|
|
if propagate_mode in ["change_later", "change_all"]:
|
2018-11-01 19:55:14 +01:00
|
|
|
messages_list = update_messages_for_topic_edit(
|
|
|
|
message=message,
|
|
|
|
propagate_mode=propagate_mode,
|
|
|
|
orig_topic_name=orig_topic_name,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
2013-09-03 22:07:59 +02:00
|
|
|
|
|
|
|
changed_messages += messages_list
|
2013-05-21 17:48:46 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
message.last_edit_time = timezone_now()
|
2017-05-24 23:49:19 +02:00
|
|
|
assert message.last_edit_time is not None # assert needed because stubs for django are missing
|
2013-05-21 17:48:46 +02:00
|
|
|
event['edit_timestamp'] = datetime_to_timestamp(message.last_edit_time)
|
|
|
|
edit_history_event['timestamp'] = event['edit_timestamp']
|
|
|
|
if message.edit_history is not None:
|
2018-03-20 23:16:27 +01:00
|
|
|
edit_history = ujson.loads(message.edit_history)
|
2013-05-21 17:48:46 +02:00
|
|
|
edit_history.insert(0, edit_history_event)
|
|
|
|
else:
|
|
|
|
edit_history = [edit_history_event]
|
2013-06-18 23:55:55 +02:00
|
|
|
message.edit_history = ujson.dumps(edit_history)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2018-11-01 20:12:59 +01:00
|
|
|
# This does message.save(update_fields=[...])
|
|
|
|
save_message_for_edit_use_case(message=message)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2016-12-08 02:26:16 +01:00
|
|
|
event['message_ids'] = update_to_dict_cache(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def user_info(um: UserMessage) -> Dict[str, Any]:
|
2014-01-08 19:37:15 +01:00
|
|
|
return {
|
|
|
|
'id': um.user_profile_id,
|
|
|
|
'flags': um.flags_list()
|
|
|
|
}
|
2019-03-22 16:32:32 +01:00
|
|
|
|
|
|
|
def subscriber_info(user_id: int) -> Dict[str, Any]:
|
|
|
|
return {
|
|
|
|
'id': user_id,
|
|
|
|
'flags': ['read']
|
|
|
|
}
|
|
|
|
|
|
|
|
# The following blocks arranges that users who are subscribed to a
|
|
|
|
# stream and can see history from before they subscribed get
|
|
|
|
# live-update when old messages are edited (e.g. if the user does
|
|
|
|
# a topic edit themself).
|
|
|
|
#
|
|
|
|
# We still don't send an update event to users who are not
|
|
|
|
# subscribed to this stream and don't have a UserMessage row. This
|
|
|
|
# means if a non-subscriber is viewing the narrow, they won't get
|
|
|
|
# a real-time updates. This is a balance between sending
|
|
|
|
# message-edit notifications for every public stream to every user
|
|
|
|
# in the organization (too expansive, and also not what we do for
|
|
|
|
# newly sent messages anyway) and having magical live-updates
|
|
|
|
# where possible.
|
|
|
|
users_to_be_notified = list(map(user_info, ums))
|
|
|
|
if stream_being_edited is not None:
|
|
|
|
if stream_being_edited.is_history_public_to_subscribers:
|
|
|
|
subscribers = get_active_subscriptions_for_stream_id(stream_id)
|
|
|
|
# We exclude long-term idle users, since they by definition have no active clients.
|
|
|
|
subscribers = subscribers.exclude(user_profile__long_term_idle=True)
|
|
|
|
# Remove duplicates by excluding the id of users already in users_to_be_notified list.
|
|
|
|
# This is the case where a user both has a UserMessage row and is a current Subscriber
|
|
|
|
subscribers = subscribers.exclude(user_profile_id__in=[um.user_profile_id for um in ums])
|
|
|
|
# All users that are subscribed to the stream must be notified when a message is edited
|
|
|
|
subscribers_ids = [user.user_profile_id for user in subscribers]
|
|
|
|
users_to_be_notified += list(map(subscriber_info, subscribers_ids))
|
|
|
|
|
|
|
|
send_event(user_profile.realm, event, users_to_be_notified)
|
2017-01-24 02:07:12 +01:00
|
|
|
return len(changed_messages)
|
2013-05-14 21:18:11 +02:00
|
|
|
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2019-01-18 10:37:59 +01:00
|
|
|
def do_delete_messages(user_profile: UserProfile, messages: Iterable[Message]) -> None:
|
|
|
|
message_ids = []
|
|
|
|
for message in messages:
|
|
|
|
message_ids.append(message.id)
|
|
|
|
message_type = "stream"
|
|
|
|
if not message.is_stream_message():
|
|
|
|
message_type = "private"
|
2018-04-02 00:21:21 +02:00
|
|
|
|
2019-01-18 10:37:59 +01:00
|
|
|
event = {
|
|
|
|
'type': 'delete_message',
|
2019-03-20 04:16:22 +01:00
|
|
|
'sender': message.sender.email,
|
|
|
|
'sender_id': message.sender_id,
|
2019-01-18 10:37:59 +01:00
|
|
|
'message_id': message.id,
|
|
|
|
'message_type': message_type, } # type: Dict[str, Any]
|
|
|
|
if message_type == "stream":
|
|
|
|
event['stream_id'] = message.recipient.type_id
|
|
|
|
event['topic'] = message.topic_name()
|
|
|
|
else:
|
2019-03-20 04:16:22 +01:00
|
|
|
event['recipient_id'] = message.recipient_id
|
2019-01-18 10:37:59 +01:00
|
|
|
|
|
|
|
# TODO: Each part of the following should be changed to bulk
|
|
|
|
# queries, since right now if you delete 1000 messages, you'll
|
|
|
|
# end up doing 1000 database queries in a loop and timing out.
|
|
|
|
ums = [{'id': um.user_profile_id} for um in
|
|
|
|
UserMessage.objects.filter(message=message.id)]
|
|
|
|
move_messages_to_archive([message.id])
|
|
|
|
send_event(user_profile.realm, event, ums)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2019-01-18 09:58:40 +01:00
|
|
|
def do_delete_messages_by_sender(user: UserProfile) -> None:
|
2018-09-13 16:29:12 +02:00
|
|
|
message_ids = Message.objects.filter(sender=user).values_list('id', flat=True).order_by('id')
|
|
|
|
if message_ids:
|
|
|
|
move_messages_to_archive(message_ids)
|
2018-09-14 13:14:40 +02:00
|
|
|
|
2018-08-16 18:37:28 +02:00
|
|
|
def get_streams_traffic(stream_ids: Set[int]) -> Dict[int, int]:
|
2017-04-03 17:13:42 +02:00
|
|
|
stat = COUNT_STATS['messages_in_stream:is_bot:day']
|
|
|
|
traffic_from = timezone_now() - datetime.timedelta(days=28)
|
|
|
|
|
|
|
|
query = StreamCount.objects.filter(property=stat.property,
|
|
|
|
end_time__gt=traffic_from)
|
2018-08-16 18:37:28 +02:00
|
|
|
query = query.filter(stream_id__in=stream_ids)
|
2017-04-03 17:13:42 +02:00
|
|
|
|
2018-03-04 22:56:47 +01:00
|
|
|
traffic_list = query.values('stream_id').annotate(value=Sum('value'))
|
2017-04-03 17:13:42 +02:00
|
|
|
traffic_dict = {}
|
|
|
|
for traffic in traffic_list:
|
|
|
|
traffic_dict[traffic["stream_id"]] = traffic["value"]
|
|
|
|
|
|
|
|
return traffic_dict
|
|
|
|
|
|
|
|
def round_to_2_significant_digits(number: int) -> int:
|
|
|
|
return int(round(number, 2 - len(str(number))))
|
|
|
|
|
2018-06-20 21:08:55 +02:00
|
|
|
STREAM_TRAFFIC_CALCULATION_MIN_AGE_DAYS = 7
|
|
|
|
|
2017-04-03 17:13:42 +02:00
|
|
|
def get_average_weekly_stream_traffic(stream_id: int, stream_date_created: datetime.datetime,
|
2018-07-23 23:05:32 +02:00
|
|
|
recent_traffic: Dict[int, int]) -> Optional[int]:
|
2017-04-03 17:13:42 +02:00
|
|
|
try:
|
|
|
|
stream_traffic = recent_traffic[stream_id]
|
|
|
|
except KeyError:
|
2018-07-24 00:32:52 +02:00
|
|
|
stream_traffic = 0
|
2017-04-03 17:13:42 +02:00
|
|
|
|
2018-06-20 22:39:14 +02:00
|
|
|
stream_age = (timezone_now() - stream_date_created).days
|
2017-04-03 17:13:42 +02:00
|
|
|
|
|
|
|
if stream_age >= 28:
|
|
|
|
average_weekly_traffic = int(stream_traffic // 4)
|
2018-06-20 21:08:55 +02:00
|
|
|
elif stream_age >= STREAM_TRAFFIC_CALCULATION_MIN_AGE_DAYS:
|
2018-06-20 18:33:14 +02:00
|
|
|
average_weekly_traffic = int(stream_traffic * 7 // stream_age)
|
2017-04-03 17:13:42 +02:00
|
|
|
else:
|
2018-07-23 23:05:32 +02:00
|
|
|
return None
|
2017-04-03 17:13:42 +02:00
|
|
|
|
2018-06-20 21:20:59 +02:00
|
|
|
if average_weekly_traffic == 0 and stream_traffic > 0:
|
|
|
|
average_weekly_traffic = 1
|
|
|
|
|
2017-04-03 17:13:42 +02:00
|
|
|
return round_to_2_significant_digits(average_weekly_traffic)
|
|
|
|
|
|
|
|
def is_old_stream(stream_date_created: datetime.datetime) -> bool:
|
2018-07-23 22:52:45 +02:00
|
|
|
return (timezone_now() - stream_date_created).days \
|
2018-06-20 21:08:55 +02:00
|
|
|
>= STREAM_TRAFFIC_CALCULATION_MIN_AGE_DAYS
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2018-05-08 21:01:34 +02:00
|
|
|
SubHelperT = Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]]
|
|
|
|
|
|
|
|
def get_web_public_subs(realm: Realm) -> SubHelperT:
|
|
|
|
color_idx = 0
|
|
|
|
|
|
|
|
def get_next_color() -> str:
|
|
|
|
nonlocal color_idx
|
|
|
|
color = STREAM_ASSIGNMENT_COLORS[color_idx]
|
|
|
|
color_idx = (color_idx + 1) % len(STREAM_ASSIGNMENT_COLORS)
|
|
|
|
return color
|
|
|
|
|
|
|
|
subscribed = [
|
|
|
|
{'name': stream.name,
|
2018-08-02 23:46:05 +02:00
|
|
|
'is_muted': False,
|
2018-05-08 21:01:34 +02:00
|
|
|
'invite_only': False,
|
2018-05-13 15:50:48 +02:00
|
|
|
'is_announcement_only': stream.is_announcement_only,
|
2018-05-08 21:01:34 +02:00
|
|
|
'color': get_next_color(),
|
|
|
|
'desktop_notifications': True,
|
|
|
|
'audible_notifications': True,
|
|
|
|
'push_notifications': False,
|
|
|
|
'pin_to_top': False,
|
|
|
|
'stream_id': stream.id,
|
|
|
|
'description': stream.description,
|
2019-01-11 13:48:22 +01:00
|
|
|
'rendered_description': stream.rendered_description,
|
2018-05-08 21:01:34 +02:00
|
|
|
'is_old_stream': is_old_stream(stream.date_created),
|
2019-03-04 17:50:49 +01:00
|
|
|
'first_message_id': stream.first_message_id,
|
2018-05-08 21:01:34 +02:00
|
|
|
'stream_weekly_traffic': get_average_weekly_stream_traffic(stream.id,
|
|
|
|
stream.date_created,
|
|
|
|
{}),
|
|
|
|
'email_address': ''}
|
|
|
|
for stream in Stream.objects.filter(realm=realm, is_web_public=True, deactivated=False)]
|
|
|
|
return (subscribed, [], [])
|
|
|
|
|
2013-10-02 18:45:10 +02:00
|
|
|
# In general, it's better to avoid using .values() because it makes
|
|
|
|
# the code pretty ugly, but in this case, it has significant
|
|
|
|
# performance impact for loading / for users with large numbers of
|
|
|
|
# subscriptions, so it's worth optimizing.
|
2017-11-05 11:15:10 +01:00
|
|
|
def gather_subscriptions_helper(user_profile: UserProfile,
|
|
|
|
include_subscribers: bool=True) -> SubHelperT:
|
2017-10-29 17:11:11 +01:00
|
|
|
sub_dicts = get_stream_subscriptions_for_user(user_profile).values(
|
2018-08-02 23:46:05 +02:00
|
|
|
"recipient_id", "is_muted", "color", "desktop_notifications",
|
2017-11-21 04:35:26 +01:00
|
|
|
"audible_notifications", "push_notifications", "email_notifications",
|
|
|
|
"active", "pin_to_top"
|
2017-09-13 20:00:36 +02:00
|
|
|
).order_by("recipient_id")
|
|
|
|
|
|
|
|
sub_dicts = list(sub_dicts)
|
|
|
|
sub_recipient_ids = [
|
|
|
|
sub['recipient_id']
|
|
|
|
for sub in sub_dicts
|
|
|
|
]
|
|
|
|
stream_recipient = StreamRecipientMap()
|
|
|
|
stream_recipient.populate_for_recipient_ids(sub_recipient_ids)
|
|
|
|
|
|
|
|
stream_ids = set() # type: Set[int]
|
|
|
|
for sub in sub_dicts:
|
|
|
|
sub['stream_id'] = stream_recipient.stream_id_for(sub['recipient_id'])
|
|
|
|
stream_ids.add(sub['stream_id'])
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2018-08-16 18:37:28 +02:00
|
|
|
recent_traffic = get_streams_traffic(stream_ids=stream_ids)
|
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams = get_active_streams(user_profile.realm).select_related(
|
2018-05-13 15:50:48 +02:00
|
|
|
"realm").values("id", "name", "invite_only", "is_announcement_only", "realm_id",
|
2019-01-11 13:48:22 +01:00
|
|
|
"email_token", "description", "rendered_description", "date_created",
|
2019-04-07 20:29:25 +02:00
|
|
|
"history_public_to_subscribers", "first_message_id", "is_web_public")
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_dicts = [stream for stream in all_streams if stream['id'] in stream_ids]
|
2013-02-12 20:42:59 +01:00
|
|
|
stream_hash = {}
|
2013-10-02 19:46:40 +02:00
|
|
|
for stream in stream_dicts:
|
|
|
|
stream_hash[stream["id"]] = stream
|
2013-02-12 20:42:59 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id = [stream["id"] for stream in all_streams]
|
|
|
|
|
2013-06-12 21:15:32 +02:00
|
|
|
subscribed = []
|
|
|
|
unsubscribed = []
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed = []
|
2013-06-12 21:15:32 +02:00
|
|
|
|
2014-01-24 23:30:53 +01:00
|
|
|
# Deactivated streams aren't in stream_hash.
|
2017-09-13 20:00:36 +02:00
|
|
|
streams = [stream_hash[sub["stream_id"]] for sub in sub_dicts
|
|
|
|
if sub["stream_id"] in stream_hash]
|
|
|
|
streams_subscribed_map = dict((sub["stream_id"], sub["active"]) for sub in sub_dicts)
|
2013-09-30 22:09:43 +02:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
# Add never subscribed streams to streams_subscribed_map
|
|
|
|
streams_subscribed_map.update({stream['id']: False for stream in all_streams if stream not in streams})
|
|
|
|
|
2017-02-20 07:52:37 +01:00
|
|
|
if include_subscribers:
|
2017-09-13 20:00:36 +02:00
|
|
|
subscriber_map = bulk_get_subscriber_user_ids(
|
|
|
|
all_streams,
|
|
|
|
user_profile,
|
|
|
|
streams_subscribed_map,
|
|
|
|
stream_recipient
|
|
|
|
) # type: Mapping[int, Optional[List[int]]]
|
2017-02-20 07:52:37 +01:00
|
|
|
else:
|
|
|
|
# If we're not including subscribers, always return None,
|
|
|
|
# which the below code needs to check for anyway.
|
|
|
|
subscriber_map = defaultdict(lambda: None)
|
2016-07-12 23:57:16 +02:00
|
|
|
|
|
|
|
sub_unsub_stream_ids = set()
|
2013-10-02 18:45:10 +02:00
|
|
|
for sub in sub_dicts:
|
2017-09-13 20:00:36 +02:00
|
|
|
sub_unsub_stream_ids.add(sub["stream_id"])
|
|
|
|
stream = stream_hash.get(sub["stream_id"])
|
2014-01-24 23:30:53 +01:00
|
|
|
if not stream:
|
|
|
|
# This stream has been deactivated, don't include it.
|
|
|
|
continue
|
|
|
|
|
2017-03-19 23:29:29 +01:00
|
|
|
subscribers = subscriber_map[stream["id"]] # type: Optional[List[int]]
|
2013-09-07 02:20:51 +02:00
|
|
|
|
|
|
|
# Important: don't show the subscribers if the stream is invite only
|
2018-03-16 12:28:19 +01:00
|
|
|
# and this user isn't on it anymore (or a realm administrator).
|
|
|
|
if stream["invite_only"] and not (sub["active"] or user_profile.is_realm_admin):
|
2013-09-07 02:20:51 +02:00
|
|
|
subscribers = None
|
|
|
|
|
2018-06-02 09:25:39 +02:00
|
|
|
# Guest users lose access to subscribers when they are unsubscribed.
|
|
|
|
if not sub["active"] and user_profile.is_guest:
|
|
|
|
subscribers = None
|
|
|
|
|
2013-10-02 19:46:40 +02:00
|
|
|
stream_dict = {'name': stream["name"],
|
2018-08-02 23:46:05 +02:00
|
|
|
'in_home_view': not sub["is_muted"],
|
|
|
|
'is_muted': sub["is_muted"],
|
2013-10-02 19:46:40 +02:00
|
|
|
'invite_only': stream["invite_only"],
|
2019-04-07 20:29:25 +02:00
|
|
|
'is_web_public': stream["is_web_public"],
|
2018-05-13 15:50:48 +02:00
|
|
|
'is_announcement_only': stream["is_announcement_only"],
|
2013-10-02 18:45:10 +02:00
|
|
|
'color': sub["color"],
|
2014-02-05 23:21:02 +01:00
|
|
|
'desktop_notifications': sub["desktop_notifications"],
|
|
|
|
'audible_notifications': sub["audible_notifications"],
|
2017-08-17 16:55:32 +02:00
|
|
|
'push_notifications': sub["push_notifications"],
|
2017-11-21 04:35:26 +01:00
|
|
|
'email_notifications': sub["email_notifications"],
|
2016-07-01 07:26:09 +02:00
|
|
|
'pin_to_top': sub["pin_to_top"],
|
2014-02-05 19:58:41 +01:00
|
|
|
'stream_id': stream["id"],
|
2019-03-04 17:50:49 +01:00
|
|
|
'first_message_id': stream["first_message_id"],
|
2014-01-24 20:40:06 +01:00
|
|
|
'description': stream["description"],
|
2019-01-11 13:48:22 +01:00
|
|
|
'rendered_description': stream["rendered_description"],
|
2017-04-03 17:13:42 +02:00
|
|
|
'is_old_stream': is_old_stream(stream["date_created"]),
|
|
|
|
'stream_weekly_traffic': get_average_weekly_stream_traffic(stream["id"],
|
|
|
|
stream["date_created"],
|
|
|
|
recent_traffic),
|
2018-05-07 23:14:15 +02:00
|
|
|
'email_address': encode_email_address_helper(stream["name"], stream["email_token"]),
|
|
|
|
'history_public_to_subscribers': stream['history_public_to_subscribers']}
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2013-09-07 02:20:51 +02:00
|
|
|
if subscribers is not None:
|
2013-09-30 21:53:49 +02:00
|
|
|
stream_dict['subscribers'] = subscribers
|
2013-10-02 18:45:10 +02:00
|
|
|
if sub["active"]:
|
2013-09-30 21:53:49 +02:00
|
|
|
subscribed.append(stream_dict)
|
2013-06-12 21:15:32 +02:00
|
|
|
else:
|
2013-09-30 21:53:49 +02:00
|
|
|
unsubscribed.append(stream_dict)
|
2013-01-28 23:06:35 +01:00
|
|
|
|
2016-07-12 23:57:16 +02:00
|
|
|
all_streams_id_set = set(all_streams_id)
|
2018-04-20 20:59:22 +02:00
|
|
|
if user_profile.can_access_public_streams():
|
2016-10-23 06:04:28 +02:00
|
|
|
never_subscribed_stream_ids = all_streams_id_set - sub_unsub_stream_ids
|
2018-04-20 20:59:22 +02:00
|
|
|
else:
|
|
|
|
never_subscribed_stream_ids = set()
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed_streams = [ns_stream_dict for ns_stream_dict in all_streams
|
|
|
|
if ns_stream_dict['id'] in never_subscribed_stream_ids]
|
|
|
|
|
|
|
|
for stream in never_subscribed_streams:
|
2017-08-22 16:18:35 +02:00
|
|
|
is_public = (not stream['invite_only'])
|
|
|
|
if is_public or user_profile.is_realm_admin:
|
2016-07-12 23:57:16 +02:00
|
|
|
stream_dict = {'name': stream['name'],
|
|
|
|
'invite_only': stream['invite_only'],
|
2019-04-07 20:29:25 +02:00
|
|
|
'is_web_public': stream['is_web_public'],
|
2018-05-13 15:50:48 +02:00
|
|
|
'is_announcement_only': stream['is_announcement_only'],
|
2016-07-12 23:57:16 +02:00
|
|
|
'stream_id': stream['id'],
|
2019-03-04 17:50:49 +01:00
|
|
|
'first_message_id': stream["first_message_id"],
|
2017-04-03 17:13:42 +02:00
|
|
|
'is_old_stream': is_old_stream(stream["date_created"]),
|
|
|
|
'stream_weekly_traffic': get_average_weekly_stream_traffic(stream["id"],
|
|
|
|
stream["date_created"],
|
|
|
|
recent_traffic),
|
2018-05-07 23:14:15 +02:00
|
|
|
'description': stream['description'],
|
2019-01-11 13:48:22 +01:00
|
|
|
'rendered_description': stream["rendered_description"],
|
2018-05-07 23:14:15 +02:00
|
|
|
'history_public_to_subscribers': stream['history_public_to_subscribers']}
|
2018-03-16 12:28:19 +01:00
|
|
|
if is_public or user_profile.is_realm_admin:
|
2017-08-22 16:18:35 +02:00
|
|
|
subscribers = subscriber_map[stream["id"]]
|
|
|
|
if subscribers is not None:
|
|
|
|
stream_dict['subscribers'] = subscribers
|
2016-07-12 23:57:16 +02:00
|
|
|
never_subscribed.append(stream_dict)
|
2016-10-30 20:15:43 +01:00
|
|
|
return (sorted(subscribed, key=lambda x: x['name']),
|
|
|
|
sorted(unsubscribed, key=lambda x: x['name']),
|
|
|
|
sorted(never_subscribed, key=lambda x: x['name']))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def gather_subscriptions(user_profile: UserProfile) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
2016-10-30 20:15:43 +01:00
|
|
|
subscribed, unsubscribed, never_subscribed = gather_subscriptions_helper(user_profile)
|
2013-10-20 21:35:58 +02:00
|
|
|
user_ids = set()
|
2016-10-23 06:29:56 +02:00
|
|
|
for subs in [subscribed, unsubscribed, never_subscribed]:
|
2013-10-20 21:35:58 +02:00
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
|
|
|
for subscriber in sub['subscribers']:
|
|
|
|
user_ids.add(subscriber)
|
|
|
|
email_dict = get_emails_from_user_ids(list(user_ids))
|
|
|
|
|
|
|
|
for subs in [subscribed, unsubscribed]:
|
|
|
|
for sub in subs:
|
|
|
|
if 'subscribers' in sub:
|
2017-09-13 23:46:58 +02:00
|
|
|
sub['subscribers'] = sorted([email_dict[user_id] for user_id in sub['subscribers']])
|
2013-10-20 21:35:58 +02:00
|
|
|
|
|
|
|
return (subscribed, unsubscribed)
|
2013-03-28 18:07:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_active_presence_idle_user_ids(realm: Realm,
|
|
|
|
sender_id: int,
|
|
|
|
message_type: str,
|
|
|
|
active_user_ids: Set[int],
|
|
|
|
user_flags: Dict[int, List[str]]) -> List[int]:
|
2017-10-02 16:35:35 +02:00
|
|
|
'''
|
|
|
|
Given a list of active_user_ids, we build up a subset
|
|
|
|
of those users who fit these criteria:
|
|
|
|
|
|
|
|
* They are likely to need notifications (either due
|
|
|
|
to mentions or being PM'ed).
|
|
|
|
* They are no longer "present" according to the
|
|
|
|
UserPresence table.
|
|
|
|
'''
|
|
|
|
|
2017-09-05 20:50:25 +02:00
|
|
|
if realm.presence_disabled:
|
|
|
|
return []
|
|
|
|
|
|
|
|
is_pm = message_type == 'private'
|
|
|
|
|
|
|
|
user_ids = set()
|
2017-09-09 04:14:28 +02:00
|
|
|
for user_id in active_user_ids:
|
|
|
|
flags = user_flags.get(user_id, []) # type: Iterable[str]
|
2017-09-05 20:50:25 +02:00
|
|
|
mentioned = 'mentioned' in flags
|
2017-09-14 14:30:16 +02:00
|
|
|
private_message = is_pm and user_id != sender_id
|
|
|
|
if mentioned or private_message:
|
2017-09-09 04:14:28 +02:00
|
|
|
user_ids.add(user_id)
|
2017-09-05 20:50:25 +02:00
|
|
|
|
2017-10-07 17:59:19 +02:00
|
|
|
return filter_presence_idle_user_ids(user_ids)
|
2017-09-27 15:50:38 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
|
2017-09-05 20:50:25 +02:00
|
|
|
if not user_ids:
|
|
|
|
return []
|
|
|
|
|
|
|
|
# 140 seconds is consistent with presence.js:OFFLINE_THRESHOLD_SECS
|
|
|
|
recent = timezone_now() - datetime.timedelta(seconds=140)
|
|
|
|
rows = UserPresence.objects.filter(
|
|
|
|
user_profile_id__in=user_ids,
|
2018-03-17 00:23:38 +01:00
|
|
|
status=UserPresence.ACTIVE,
|
2017-09-05 20:50:25 +02:00
|
|
|
timestamp__gte=recent
|
|
|
|
).distinct('user_profile_id').values('user_profile_id')
|
|
|
|
active_user_ids = {row['user_profile_id'] for row in rows}
|
|
|
|
idle_user_ids = user_ids - active_user_ids
|
|
|
|
return sorted(list(idle_user_ids))
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def get_status_dict(requesting_user_profile: UserProfile) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
2016-07-27 02:09:10 +02:00
|
|
|
if requesting_user_profile.realm.presence_disabled:
|
2016-07-27 01:45:29 +02:00
|
|
|
# Return an empty dict if presence is disabled in this realm
|
2013-09-13 23:33:11 +02:00
|
|
|
return defaultdict(dict)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
Optimize user presence/activity query.
The get_status_dict_by_realm helper gets called whenever our
realm user_presences cache expires, and it used to query these fields:
"zerver_userpresence"."id", "zerver_userpresence"."user_profile_id", "zerver_userpresence"."client_id", "zerver_userpresence"."timestamp", "zerver_userpresence"."status", "zerver_userprofile"."id", "zerver_userprofile"."password", "zerver_userprofile"."last_login", "zerver_userprofile"."is_superuser", "zerver_userprofile"."email", "zerver_userprofile"."is_staff", "zerver_userprofile"."is_active", "zerver_userprofile"."is_bot", "zerver_userprofile"."date_joined", "zerver_userprofile"."bot_owner_id", "zerver_userprofile"."full_name", "zerver_userprofile"."short_name", "zerver_userprofile"."pointer", "zerver_userprofile"."last_pointer_updater", "zerver_userprofile"."realm_id", "zerver_userprofile"."api_key", "zerver_userprofile"."enable_desktop_notifications", "zerver_userprofile"."enable_sounds", "zerver_userprofile"."enter_sends", "zerver_userprofile"."enable_offline_email_notifications", "zerver_userprofile"."last_reminder", "zerver_userprofile"."rate_limits", "zerver_userprofile"."avatar_source", "zerver_userprofile"."tutorial_status", "zerver_userprofile"."onboarding_steps", "zerver_userprofile"."invites_granted", "zerver_userprofile"."invites_used", "zerver_userprofile"."alert_words", "zerver_userprofile"."muted_topics", "zerver_client"."id", "zerver_client"."name"
Now it queries just the fields it needs:
"zerver_client"."name", "zerver_userpresence"."status", "zerver_userpresence"."timestamp", "zerver_userprofile"."email" FROM "zerver_userpresence"
Also, get_status_dict_by_realm is now namespaced under UserPresence as a static method.
(imported from commit be1266844b6bd28b6c615594796713c026a850a1)
2013-09-14 23:59:03 +02:00
|
|
|
return UserPresence.get_status_dict_by_realm(requesting_user_profile.realm_id)
|
2013-04-05 00:13:03 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
|
2017-12-07 21:15:34 +01:00
|
|
|
users = bulk_get_users(list(settings.CROSS_REALM_BOT_EMAILS), None,
|
2017-11-16 02:28:50 +01:00
|
|
|
base_query=UserProfile.objects.filter(
|
2017-11-27 23:46:07 +01:00
|
|
|
realm__string_id=settings.SYSTEM_BOT_REALM)).values()
|
2016-12-02 00:08:34 +01:00
|
|
|
return [{'email': user.email,
|
|
|
|
'user_id': user.id,
|
|
|
|
'is_admin': user.is_realm_admin,
|
|
|
|
'is_bot': user.is_bot,
|
2018-08-02 00:06:38 +02:00
|
|
|
'avatar_url': avatar_url(user),
|
|
|
|
'timezone': user.timezone,
|
|
|
|
'date_joined': user.date_joined.isoformat(),
|
2016-12-02 00:08:34 +01:00
|
|
|
'full_name': user.full_name}
|
2017-11-27 23:35:50 +01:00
|
|
|
for user in users
|
2017-11-27 23:41:05 +01:00
|
|
|
# Important: We filter here, is addition to in
|
|
|
|
# `base_query`, because of how bulk_get_users shares its
|
|
|
|
# cache with other UserProfile caches.
|
2017-11-27 23:46:07 +01:00
|
|
|
if user.realm.string_id == settings.SYSTEM_BOT_REALM]
|
2016-11-02 23:48:47 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_confirmation_email(invitee: PreregistrationUser,
|
2017-12-06 22:31:11 +01:00
|
|
|
referrer: UserProfile) -> None:
|
2013-04-08 18:27:07 +02:00
|
|
|
"""
|
|
|
|
Send the confirmation/welcome e-mail to an invited user.
|
|
|
|
"""
|
2017-07-08 04:38:13 +02:00
|
|
|
activation_url = create_confirmation_link(invitee, referrer.realm.host, Confirmation.INVITATION)
|
2018-04-26 19:31:55 +02:00
|
|
|
context = {'referrer_full_name': referrer.full_name, 'referrer_email': referrer.email,
|
|
|
|
'activate_url': activation_url, 'referrer_realm_name': referrer.realm.name}
|
2017-11-04 05:34:38 +01:00
|
|
|
from_name = "%s (via Zulip)" % (referrer.full_name,)
|
2018-12-03 23:26:51 +01:00
|
|
|
send_email('zerver/emails/invitation', to_emails=[invitee.email], from_name=from_name,
|
2018-12-14 08:41:42 +01:00
|
|
|
from_address=FromAddress.tokenized_no_reply_address(),
|
|
|
|
language=referrer.realm.default_language, context=context)
|
2013-05-03 20:24:55 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def email_not_system_bot(email: str) -> None:
|
2017-12-07 21:15:34 +01:00
|
|
|
if is_cross_realm_bot_email(email):
|
2017-11-22 20:05:53 +01:00
|
|
|
raise ValidationError('%s is an email address reserved for system bots' % (email,))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def validate_email_for_realm(target_realm: Realm, email: str) -> None:
|
2018-04-28 19:59:39 +02:00
|
|
|
email_not_system_bot(email)
|
2017-11-22 20:22:11 +01:00
|
|
|
|
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
existing_user_profile = get_user_by_delivery_email(email, target_realm)
|
2017-08-25 07:05:27 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2017-11-22 20:22:11 +01:00
|
|
|
return
|
2017-08-25 07:05:27 +02:00
|
|
|
|
2018-05-21 04:23:56 +02:00
|
|
|
if existing_user_profile.is_active:
|
|
|
|
if existing_user_profile.is_mirror_dummy:
|
2017-11-27 00:58:56 +01:00
|
|
|
raise AssertionError("Mirror dummy user is already active!")
|
2017-08-25 07:05:27 +02:00
|
|
|
# Other users should not already exist at all.
|
2019-02-14 13:59:23 +01:00
|
|
|
raise ValidationError('%s already has an account' % (email,), code = _("Already has an account."))
|
2018-05-21 04:23:56 +02:00
|
|
|
elif not existing_user_profile.is_mirror_dummy:
|
2019-02-14 13:59:23 +01:00
|
|
|
raise ValidationError('The account for %s has been deactivated' % (email,),
|
|
|
|
code = _("Account has been deactivated."))
|
2017-08-25 07:05:27 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def validate_email(user_profile: UserProfile, email: str) -> Tuple[Optional[str], Optional[str]]:
|
2017-01-16 05:35:52 +01:00
|
|
|
try:
|
|
|
|
validators.validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return _("Invalid address."), None
|
|
|
|
|
2018-03-14 12:54:05 +01:00
|
|
|
try:
|
|
|
|
email_allowed_for_realm(email, user_profile.realm)
|
|
|
|
except DomainNotAllowedForRealmError:
|
2017-01-16 05:35:52 +01:00
|
|
|
return _("Outside your domain."), None
|
2018-05-04 18:15:54 +02:00
|
|
|
except DisposableEmailError:
|
|
|
|
return _("Please use your real email address."), None
|
2018-06-20 13:08:07 +02:00
|
|
|
except EmailContainsPlusError:
|
|
|
|
return _("Email addresses containing + are not allowed."), None
|
2017-01-16 05:35:52 +01:00
|
|
|
|
|
|
|
try:
|
2017-08-25 07:05:27 +02:00
|
|
|
validate_email_for_realm(user_profile.realm, email)
|
2019-02-14 13:59:23 +01:00
|
|
|
except ValidationError as error:
|
|
|
|
return None, (error.code)
|
2017-01-16 05:35:52 +01:00
|
|
|
|
|
|
|
return None, None
|
|
|
|
|
2017-07-25 02:02:30 +02:00
|
|
|
class InvitationError(JsonableError):
|
|
|
|
code = ErrorCode.INVITATION_FAILED
|
|
|
|
data_fields = ['errors', 'sent_invitations']
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def __init__(self, msg: str, errors: List[Tuple[str, str]], sent_invitations: bool) -> None:
|
|
|
|
self._msg = msg # type: str
|
|
|
|
self.errors = errors # type: List[Tuple[str, str]]
|
2017-07-25 02:02:30 +02:00
|
|
|
self.sent_invitations = sent_invitations # type: bool
|
|
|
|
|
2017-12-07 05:59:40 +01:00
|
|
|
def estimate_recent_invites(realms: Iterable[Realm], *, days: int) -> int:
|
2017-12-05 21:39:45 +01:00
|
|
|
'''An upper bound on the number of invites sent in the last `days` days'''
|
|
|
|
recent_invites = RealmCount.objects.filter(
|
2017-12-07 05:59:40 +01:00
|
|
|
realm__in=realms,
|
2017-12-05 21:39:45 +01:00
|
|
|
property='invites_sent::day',
|
|
|
|
end_time__gte=timezone_now() - datetime.timedelta(days=days)
|
|
|
|
).aggregate(Sum('value'))['value__sum']
|
|
|
|
if recent_invites is None:
|
|
|
|
return 0
|
|
|
|
return recent_invites
|
|
|
|
|
2018-08-21 22:13:14 +02:00
|
|
|
def check_invite_limit(realm: Realm, num_invitees: int) -> None:
|
2017-12-07 05:59:40 +01:00
|
|
|
'''Discourage using invitation emails as a vector for carrying spam.'''
|
|
|
|
msg = _("You do not have enough remaining invites. "
|
|
|
|
"Please contact %s to have your limit raised. "
|
|
|
|
"No invitations were sent.") % (settings.ZULIP_ADMINISTRATOR,)
|
2018-08-21 22:13:14 +02:00
|
|
|
if not settings.OPEN_REALM_CREATION:
|
|
|
|
return
|
|
|
|
|
|
|
|
recent_invites = estimate_recent_invites([realm], days=1)
|
|
|
|
if num_invitees + recent_invites > realm.max_invites:
|
|
|
|
raise InvitationError(msg, [], sent_invitations=False)
|
|
|
|
|
|
|
|
default_max = settings.INVITES_DEFAULT_REALM_DAILY_MAX
|
|
|
|
newrealm_age = datetime.timedelta(days=settings.INVITES_NEW_REALM_DAYS)
|
|
|
|
if realm.date_created <= timezone_now() - newrealm_age:
|
|
|
|
# If this isn't a "newly-created" realm, we're done. The
|
|
|
|
# remaining code applies an aggregate limit across all
|
|
|
|
# "new" realms, to address sudden bursts of spam realms.
|
|
|
|
return
|
2017-12-07 05:59:40 +01:00
|
|
|
|
2018-08-21 22:13:14 +02:00
|
|
|
if realm.max_invites > default_max:
|
|
|
|
# If a user is on a realm where we've bumped up
|
|
|
|
# max_invites, then we exempt them from invite limits.
|
|
|
|
return
|
|
|
|
|
|
|
|
new_realms = Realm.objects.filter(
|
|
|
|
date_created__gte=timezone_now() - newrealm_age,
|
|
|
|
_max_invites__lte=default_max,
|
|
|
|
).all()
|
|
|
|
|
|
|
|
for days, count in settings.INVITES_NEW_REALM_LIMIT_DAYS:
|
|
|
|
recent_invites = estimate_recent_invites(new_realms, days=days)
|
|
|
|
if num_invitees + recent_invites > count:
|
|
|
|
raise InvitationError(msg, [], sent_invitations=False)
|
2017-11-30 01:53:09 +01:00
|
|
|
|
2017-12-05 07:59:27 +01:00
|
|
|
def do_invite_users(user_profile: UserProfile,
|
|
|
|
invitee_emails: SizedTextIterable,
|
|
|
|
streams: Iterable[Stream],
|
2018-12-30 11:06:12 +01:00
|
|
|
invite_as: Optional[int]=PreregistrationUser.INVITE_AS['MEMBER']) -> None:
|
2017-12-05 07:59:27 +01:00
|
|
|
|
2018-08-21 22:13:14 +02:00
|
|
|
check_invite_limit(user_profile.realm, len(invitee_emails))
|
2017-12-07 05:29:41 +01:00
|
|
|
|
2017-12-05 06:31:21 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
if not realm.invite_required:
|
|
|
|
# Inhibit joining an open realm to send spam invitations.
|
|
|
|
min_age = datetime.timedelta(days=settings.INVITES_MIN_USER_AGE_DAYS)
|
|
|
|
if (user_profile.date_joined > timezone_now() - min_age
|
|
|
|
and not user_profile.is_realm_admin):
|
|
|
|
raise InvitationError(
|
|
|
|
_("Your account is too new to send invites for this organization. "
|
|
|
|
"Ask an organization admin, or a more experienced user."),
|
|
|
|
[], sent_invitations=False)
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
validated_emails = [] # type: List[str]
|
|
|
|
errors = [] # type: List[Tuple[str, str]]
|
|
|
|
skipped = [] # type: List[Tuple[str, str]]
|
2013-07-08 17:57:04 +02:00
|
|
|
for email in invitee_emails:
|
|
|
|
if email == '':
|
|
|
|
continue
|
2017-01-16 05:35:52 +01:00
|
|
|
email_error, email_skipped = validate_email(user_profile, email)
|
|
|
|
if not (email_error or email_skipped):
|
|
|
|
validated_emails.append(email)
|
|
|
|
elif email_error:
|
|
|
|
errors.append((email, email_error))
|
|
|
|
elif email_skipped:
|
|
|
|
skipped.append((email, email_skipped))
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if errors:
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(
|
|
|
|
_("Some emails did not validate, so we didn't send any invitations."),
|
|
|
|
errors + skipped, sent_invitations=False)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped and len(skipped) == len(invitee_emails):
|
|
|
|
# All e-mails were skipped, so we didn't actually invite anyone.
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(_("We weren't able to invite anyone."),
|
|
|
|
skipped, sent_invitations=False)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
# We do this here rather than in the invite queue processor since this
|
|
|
|
# is used for rate limiting invitations, rather than keeping track of
|
|
|
|
# when exactly invitations were sent
|
|
|
|
do_increment_logging_stat(user_profile.realm, COUNT_STATS['invites_sent::day'],
|
|
|
|
None, timezone_now(), increment=len(validated_emails))
|
|
|
|
|
2016-12-28 02:06:19 +01:00
|
|
|
# Now that we are past all the possible errors, we actually create
|
|
|
|
# the PreregistrationUser objects and trigger the email invitations.
|
|
|
|
for email in validated_emails:
|
|
|
|
# The logged in user is the referrer.
|
2017-10-15 18:34:47 +02:00
|
|
|
prereg_user = PreregistrationUser(email=email, referred_by=user_profile,
|
2018-12-30 11:06:12 +01:00
|
|
|
invited_as=invite_as,
|
2017-11-08 22:02:59 +01:00
|
|
|
realm=user_profile.realm)
|
2016-12-28 02:06:19 +01:00
|
|
|
prereg_user.save()
|
2017-09-17 18:49:23 +02:00
|
|
|
stream_ids = [stream.id for stream in streams]
|
|
|
|
prereg_user.streams.set(stream_ids)
|
2016-12-28 02:06:19 +01:00
|
|
|
|
2017-12-06 22:31:11 +01:00
|
|
|
event = {"prereg_id": prereg_user.id, "referrer_id": user_profile.id}
|
2017-11-24 13:18:46 +01:00
|
|
|
queue_json_publish("invites", event)
|
2013-07-08 17:57:04 +02:00
|
|
|
|
|
|
|
if skipped:
|
2017-07-25 02:02:30 +02:00
|
|
|
raise InvitationError(_("Some of those addresses are already using Zulip, "
|
|
|
|
"so we didn't send them an invitation. We did send "
|
|
|
|
"invitations to everyone else!"),
|
|
|
|
skipped, sent_invitations=True)
|
2017-12-14 22:22:17 +01:00
|
|
|
notify_invites_changed(user_profile)
|
2013-07-26 16:51:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_get_user_invites(user_profile: UserProfile) -> List[Dict[str, Any]]:
|
2018-02-21 18:21:50 +01:00
|
|
|
days_to_activate = settings.INVITATION_LINK_VALIDITY_DAYS
|
2017-10-21 03:15:12 +02:00
|
|
|
active_value = getattr(confirmation_settings, 'STATUS_ACTIVE', 1)
|
|
|
|
|
|
|
|
lowest_datetime = timezone_now() - datetime.timedelta(days=days_to_activate)
|
|
|
|
prereg_users = PreregistrationUser.objects.exclude(status=active_value).filter(
|
|
|
|
invited_at__gte=lowest_datetime,
|
|
|
|
referred_by__realm=user_profile.realm)
|
|
|
|
|
|
|
|
invites = []
|
|
|
|
|
|
|
|
for invitee in prereg_users:
|
|
|
|
invites.append(dict(email=invitee.email,
|
|
|
|
ref=invitee.referred_by.email,
|
2017-12-19 08:23:03 +01:00
|
|
|
invited=datetime_to_timestamp(invitee.invited_at),
|
2017-12-12 15:10:09 +01:00
|
|
|
id=invitee.id,
|
2019-02-15 19:09:25 +01:00
|
|
|
invited_as=invitee.invited_as,
|
|
|
|
is_multiuse=False))
|
|
|
|
|
|
|
|
multiuse_confirmation_objs = Confirmation.objects.filter(realm=user_profile.realm,
|
|
|
|
type=Confirmation.MULTIUSE_INVITE,
|
|
|
|
date_sent__gte=lowest_datetime)
|
|
|
|
for confirmation_obj in multiuse_confirmation_objs:
|
|
|
|
invite = confirmation_obj.content_object
|
|
|
|
invites.append(dict(ref=invite.referred_by.email,
|
|
|
|
invited=datetime_to_timestamp(confirmation_obj.date_sent),
|
|
|
|
id=invite.id,
|
2019-02-15 23:28:07 +01:00
|
|
|
link_url=confirmation_url(confirmation_obj.confirmation_key,
|
|
|
|
user_profile.realm.host,
|
|
|
|
Confirmation.MULTIUSE_INVITE),
|
2019-02-15 19:09:25 +01:00
|
|
|
invited_as=invite.invited_as,
|
|
|
|
is_multiuse=True))
|
2017-10-21 03:15:12 +02:00
|
|
|
return invites
|
|
|
|
|
2019-02-06 22:57:14 +01:00
|
|
|
def do_create_multiuse_invite_link(referred_by: UserProfile, invited_as: int,
|
|
|
|
streams: Optional[List[Stream]]=[]) -> str:
|
2018-02-13 16:13:04 +01:00
|
|
|
realm = referred_by.realm
|
|
|
|
invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by)
|
|
|
|
if streams:
|
2018-01-31 08:22:07 +01:00
|
|
|
invite.streams.set(streams)
|
2019-02-06 22:57:14 +01:00
|
|
|
invite.invited_as = invited_as
|
|
|
|
invite.save()
|
2019-02-15 19:09:25 +01:00
|
|
|
notify_invites_changed(referred_by)
|
2018-02-13 16:13:04 +01:00
|
|
|
return create_confirmation_link(invite, realm.host, Confirmation.MULTIUSE_INVITE)
|
|
|
|
|
2017-12-05 20:01:55 +01:00
|
|
|
def do_revoke_user_invite(prereg_user: PreregistrationUser) -> None:
|
2017-10-21 03:15:12 +02:00
|
|
|
email = prereg_user.email
|
|
|
|
|
|
|
|
# Delete both the confirmation objects and the prereg_user object.
|
|
|
|
# TODO: Probably we actaully want to set the confirmation objects
|
2019-02-15 19:09:25 +01:00
|
|
|
# to a "revoked" status so that we can give the invited user a better
|
2017-10-21 03:15:12 +02:00
|
|
|
# error message.
|
|
|
|
content_type = ContentType.objects.get_for_model(PreregistrationUser)
|
|
|
|
Confirmation.objects.filter(content_type=content_type,
|
|
|
|
object_id=prereg_user.id).delete()
|
|
|
|
prereg_user.delete()
|
|
|
|
clear_scheduled_invitation_emails(email)
|
2017-12-14 22:22:17 +01:00
|
|
|
notify_invites_changed(prereg_user)
|
2017-10-21 03:15:12 +02:00
|
|
|
|
2019-02-15 19:09:25 +01:00
|
|
|
def do_revoke_multi_use_invite(multiuse_invite: MultiuseInvite) -> None:
|
|
|
|
content_type = ContentType.objects.get_for_model(MultiuseInvite)
|
|
|
|
Confirmation.objects.filter(content_type=content_type,
|
|
|
|
object_id=multiuse_invite.id).delete()
|
|
|
|
multiuse_invite.delete()
|
|
|
|
notify_invites_changed(multiuse_invite.referred_by)
|
|
|
|
|
2017-12-19 08:23:03 +01:00
|
|
|
def do_resend_user_invite_email(prereg_user: PreregistrationUser) -> int:
|
2019-03-21 23:37:15 +01:00
|
|
|
# These are two structurally for the caller's code path.
|
|
|
|
assert prereg_user.referred_by is not None
|
|
|
|
assert prereg_user.realm is not None
|
|
|
|
|
2018-08-21 22:13:14 +02:00
|
|
|
check_invite_limit(prereg_user.referred_by.realm, 1)
|
2017-12-05 08:03:09 +01:00
|
|
|
|
2017-10-21 03:15:12 +02:00
|
|
|
prereg_user.invited_at = timezone_now()
|
|
|
|
prereg_user.save()
|
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
do_increment_logging_stat(prereg_user.realm, COUNT_STATS['invites_sent::day'],
|
|
|
|
None, prereg_user.invited_at)
|
|
|
|
|
2017-10-21 03:15:12 +02:00
|
|
|
clear_scheduled_invitation_emails(prereg_user.email)
|
2017-12-05 07:51:25 +01:00
|
|
|
# We don't store the custom email body, so just set it to None
|
2017-12-05 09:01:41 +01:00
|
|
|
event = {"prereg_id": prereg_user.id, "referrer_id": prereg_user.referred_by.id, "email_body": None}
|
2017-12-05 07:51:25 +01:00
|
|
|
queue_json_publish("invites", event)
|
2017-10-21 03:15:12 +02:00
|
|
|
|
2017-12-19 08:23:03 +01:00
|
|
|
return datetime_to_timestamp(prereg_user.invited_at)
|
2017-10-21 03:15:12 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_realm_emoji(realm: Realm) -> None:
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_emoji", op="update",
|
|
|
|
realm_emoji=realm.get_emoji())
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2013-08-22 19:54:35 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def check_add_realm_emoji(realm: Realm,
|
2018-05-11 02:01:29 +02:00
|
|
|
name: str,
|
2018-03-11 18:55:20 +01:00
|
|
|
author: UserProfile,
|
|
|
|
image_file: File) -> Optional[RealmEmoji]:
|
|
|
|
realm_emoji = RealmEmoji(realm=realm, name=name, author=author)
|
|
|
|
realm_emoji.full_clean()
|
|
|
|
realm_emoji.save()
|
|
|
|
|
|
|
|
emoji_file_name = get_emoji_file_name(image_file.name, realm_emoji.id)
|
|
|
|
emoji_uploaded_successfully = False
|
|
|
|
try:
|
|
|
|
upload_emoji_image(image_file, emoji_file_name, author)
|
|
|
|
emoji_uploaded_successfully = True
|
|
|
|
finally:
|
|
|
|
if not emoji_uploaded_successfully:
|
|
|
|
realm_emoji.delete()
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
realm_emoji.file_name = emoji_file_name
|
|
|
|
realm_emoji.save(update_fields=['file_name'])
|
|
|
|
notify_realm_emoji(realm_emoji.realm)
|
|
|
|
return realm_emoji
|
2013-08-22 19:15:54 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_remove_realm_emoji(realm: Realm, name: str) -> None:
|
2018-03-11 18:55:20 +01:00
|
|
|
emoji = RealmEmoji.objects.get(realm=realm, name=name, deactivated=False)
|
2017-05-22 17:08:37 +02:00
|
|
|
emoji.deactivated = True
|
|
|
|
emoji.save(update_fields=['deactivated'])
|
2013-08-22 19:54:35 +02:00
|
|
|
notify_realm_emoji(realm)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def notify_alert_words(user_profile: UserProfile, words: Iterable[str]) -> None:
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="alert_words", alert_words=words)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_add_alert_words(user_profile: UserProfile, alert_words: Iterable[str]) -> None:
|
2013-09-11 17:24:27 +02:00
|
|
|
words = add_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_remove_alert_words(user_profile: UserProfile, alert_words: Iterable[str]) -> None:
|
2013-09-11 17:24:27 +02:00
|
|
|
words = remove_user_alert_words(user_profile, alert_words)
|
|
|
|
notify_alert_words(user_profile, words)
|
2013-09-03 22:41:17 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_set_alert_words(user_profile: UserProfile, alert_words: List[str]) -> None:
|
2013-09-03 22:41:17 +02:00
|
|
|
set_user_alert_words(user_profile, alert_words)
|
2013-09-11 17:24:27 +02:00
|
|
|
notify_alert_words(user_profile, alert_words)
|
2013-09-10 00:06:24 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mute_topic(user_profile: UserProfile, stream: Stream, recipient: Recipient, topic: str) -> None:
|
2017-08-30 02:19:34 +02:00
|
|
|
add_topic_mute(user_profile, stream.id, recipient.id, topic)
|
|
|
|
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2017-08-24 17:58:40 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_unmute_topic(user_profile: UserProfile, stream: Stream, topic: str) -> None:
|
2017-08-30 02:19:34 +02:00
|
|
|
remove_topic_mute(user_profile, stream.id, topic)
|
2017-08-24 17:58:40 +02:00
|
|
|
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2013-10-07 17:35:22 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_mark_hotspot_as_read(user: UserProfile, hotspot: str) -> None:
|
2017-01-24 01:48:35 +01:00
|
|
|
UserHotspot.objects.get_or_create(user=user, hotspot=hotspot)
|
|
|
|
event = dict(type="hotspots", hotspots=get_next_hotspots(user))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user.realm, event, [user.id])
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def notify_realm_filters(realm: Realm) -> None:
|
2016-12-31 03:08:43 +01:00
|
|
|
realm_filters = realm_filters_for_realm(realm.id)
|
2014-01-24 23:24:44 +01:00
|
|
|
event = dict(type="realm_filters", realm_filters=realm_filters)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2014-01-06 23:42:02 +01:00
|
|
|
|
2014-01-27 19:43:55 +01:00
|
|
|
# NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
|
|
|
|
# RegExp syntax. In addition to JS-compatible syntax, the following features are available:
|
|
|
|
# * Named groups will be converted to numbered groups automatically
|
|
|
|
# * Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_add_realm_filter(realm: Realm, pattern: str, url_format_string: str) -> int:
|
2016-02-13 19:17:15 +01:00
|
|
|
pattern = pattern.strip()
|
|
|
|
url_format_string = url_format_string.strip()
|
|
|
|
realm_filter = RealmFilter(
|
|
|
|
realm=realm, pattern=pattern,
|
|
|
|
url_format_string=url_format_string)
|
|
|
|
realm_filter.full_clean()
|
|
|
|
realm_filter.save()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2016-02-13 19:17:15 +01:00
|
|
|
return realm_filter.id
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_remove_realm_filter(realm: Realm, pattern: Optional[str]=None,
|
2017-11-05 11:15:10 +01:00
|
|
|
id: Optional[int]=None) -> None:
|
2016-02-13 19:17:15 +01:00
|
|
|
if pattern is not None:
|
|
|
|
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
|
|
|
|
else:
|
|
|
|
RealmFilter.objects.get(realm=realm, pk=id).delete()
|
2014-01-06 23:42:02 +01:00
|
|
|
notify_realm_filters(realm)
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def get_emails_from_user_ids(user_ids: Sequence[int]) -> Dict[int, str]:
|
2013-10-20 21:10:03 +02:00
|
|
|
# We may eventually use memcached to speed this up, but the DB is fast.
|
|
|
|
return UserProfile.emails_from_ids(user_ids)
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_add_realm_domain(realm: Realm, domain: str, allow_subdomains: bool) -> (RealmDomain):
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain = RealmDomain.objects.create(realm=realm, domain=domain,
|
|
|
|
allow_subdomains=allow_subdomains)
|
2016-12-26 19:19:02 +01:00
|
|
|
event = dict(type="realm_domains", op="add",
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain=dict(domain=realm_domain.domain,
|
|
|
|
allow_subdomains=realm_domain.allow_subdomains))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-03-31 20:10:29 +02:00
|
|
|
return realm_domain
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_change_realm_domain(realm_domain: RealmDomain, allow_subdomains: bool) -> None:
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain.allow_subdomains = allow_subdomains
|
|
|
|
realm_domain.save(update_fields=['allow_subdomains'])
|
2017-01-21 08:19:03 +01:00
|
|
|
event = dict(type="realm_domains", op="change",
|
2017-03-31 20:10:29 +02:00
|
|
|
realm_domain=dict(domain=realm_domain.domain,
|
|
|
|
allow_subdomains=realm_domain.allow_subdomains))
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm_domain.realm, event, active_user_ids(realm_domain.realm_id))
|
2017-01-21 08:19:03 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_domain(realm_domain: RealmDomain) -> None:
|
2017-03-31 20:10:29 +02:00
|
|
|
realm = realm_domain.realm
|
|
|
|
domain = realm_domain.domain
|
|
|
|
realm_domain.delete()
|
2018-07-27 23:26:29 +02:00
|
|
|
if RealmDomain.objects.filter(realm=realm).count() == 0 and realm.emails_restricted_to_domains:
|
2017-03-31 20:10:29 +02:00
|
|
|
# If this was the last realm domain, we mark the realm as no
|
2017-01-26 10:52:56 +01:00
|
|
|
# longer restricted to domain, because the feature doesn't do
|
|
|
|
# anything if there are no domains, and this is probably less
|
|
|
|
# confusing than the alternative.
|
2018-07-27 23:26:29 +02:00
|
|
|
do_set_realm_property(realm, 'emails_restricted_to_domains', False)
|
2017-01-21 09:09:27 +01:00
|
|
|
event = dict(type="realm_domains", op="remove", domain=domain)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_occupied_streams(realm: Realm) -> QuerySet:
|
2016-05-25 06:55:14 +02:00
|
|
|
# TODO: Make a generic stub for QuerySet
|
2014-03-02 06:46:54 +01:00
|
|
|
""" Get streams with subscribers """
|
2014-03-06 23:34:44 +01:00
|
|
|
subs_filter = Subscription.objects.filter(active=True, user_profile__realm=realm,
|
|
|
|
user_profile__is_active=True).values('recipient_id')
|
2014-03-02 06:46:54 +01:00
|
|
|
stream_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
|
|
|
|
|
|
|
|
return Stream.objects.filter(id__in=stream_ids, realm=realm, deactivated=False)
|
|
|
|
|
2018-05-08 21:01:34 +02:00
|
|
|
def get_web_public_streams(realm: Realm) -> List[Dict[str, Any]]:
|
|
|
|
query = Stream.objects.filter(realm=realm, deactivated=False, is_web_public=True)
|
|
|
|
streams = [(row.to_dict()) for row in query]
|
|
|
|
return streams
|
|
|
|
|
2019-02-28 22:20:24 +01:00
|
|
|
def do_get_streams(
|
|
|
|
user_profile: UserProfile, include_public: bool=True,
|
|
|
|
include_subscribed: bool=True, include_all_active: bool=False,
|
|
|
|
include_default: bool=False, include_owner_subscribed: bool=False
|
|
|
|
) -> List[Dict[str, Any]]:
|
2016-02-08 03:59:38 +01:00
|
|
|
if include_all_active and not user_profile.is_api_super_user:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("User not authorized for this query"))
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2018-04-20 20:59:22 +02:00
|
|
|
include_public = include_public and user_profile.can_access_public_streams()
|
2014-03-02 06:46:54 +01:00
|
|
|
# Start out with all streams in the realm with subscribers
|
|
|
|
query = get_occupied_streams(user_profile.realm)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
if not include_all_active:
|
2017-10-29 17:11:11 +01:00
|
|
|
user_subs = get_stream_subscriptions_for_user(user_profile).filter(
|
|
|
|
active=True,
|
|
|
|
).select_related('recipient')
|
2014-03-02 06:29:15 +01:00
|
|
|
|
2019-02-28 22:20:24 +01:00
|
|
|
# We construct a query as the or (|) of the various sources
|
|
|
|
# this user requested streams from.
|
2019-07-27 00:45:43 +02:00
|
|
|
query_filter = None # type: Optional[Q]
|
2019-02-28 22:20:24 +01:00
|
|
|
|
|
|
|
def add_filter_option(option: Q) -> None:
|
|
|
|
nonlocal query_filter
|
|
|
|
if query_filter is None:
|
|
|
|
query_filter = option
|
|
|
|
else:
|
|
|
|
query_filter |= option
|
|
|
|
|
2014-03-02 06:29:15 +01:00
|
|
|
if include_subscribed:
|
|
|
|
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
|
2019-02-28 22:20:24 +01:00
|
|
|
add_filter_option(recipient_check)
|
2014-03-02 06:29:15 +01:00
|
|
|
if include_public:
|
|
|
|
invite_only_check = Q(invite_only=False)
|
2019-02-28 22:20:24 +01:00
|
|
|
add_filter_option(invite_only_check)
|
|
|
|
if include_owner_subscribed and user_profile.is_bot:
|
|
|
|
assert user_profile.bot_owner is not None
|
|
|
|
owner_subs = get_stream_subscriptions_for_user(user_profile.bot_owner).filter(
|
|
|
|
active=True,
|
|
|
|
).select_related('recipient')
|
|
|
|
owner_subscribed_check = Q(id__in=[sub.recipient.type_id for sub in owner_subs])
|
|
|
|
add_filter_option(owner_subscribed_check)
|
|
|
|
|
|
|
|
if query_filter is not None:
|
|
|
|
query = query.filter(query_filter)
|
2014-03-02 06:29:15 +01:00
|
|
|
else:
|
2019-02-28 22:20:24 +01:00
|
|
|
# Don't bother doing to the database with no valid sources
|
2014-03-02 06:29:15 +01:00
|
|
|
query = []
|
|
|
|
|
2016-06-04 00:08:20 +02:00
|
|
|
streams = [(row.to_dict()) for row in query]
|
2014-03-02 06:29:15 +01:00
|
|
|
streams.sort(key=lambda elt: elt["name"])
|
2016-05-20 22:08:42 +02:00
|
|
|
if include_default:
|
|
|
|
is_default = {}
|
2017-09-17 00:34:13 +02:00
|
|
|
default_streams = get_default_streams_for_realm(user_profile.realm_id)
|
2016-05-20 22:08:42 +02:00
|
|
|
for default_stream in default_streams:
|
|
|
|
is_default[default_stream.id] = True
|
|
|
|
for stream in streams:
|
|
|
|
stream['is_default'] = is_default.get(stream["stream_id"], False)
|
2014-03-02 06:29:15 +01:00
|
|
|
|
|
|
|
return streams
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2018-05-04 22:57:36 +02:00
|
|
|
def notify_attachment_update(user_profile: UserProfile, op: str,
|
|
|
|
attachment_dict: Dict[str, Any]) -> None:
|
|
|
|
event = {
|
|
|
|
'type': 'attachment',
|
|
|
|
'op': op,
|
|
|
|
'attachment': attachment_dict,
|
2019-01-17 12:05:09 +01:00
|
|
|
"upload_space_used": user_profile.realm.currently_used_upload_space_bytes(),
|
2018-05-04 22:57:36 +02:00
|
|
|
}
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_claim_attachments(message: Message) -> None:
|
2016-07-24 21:52:41 +02:00
|
|
|
attachment_url_list = attachment_url_re.findall(message.content)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
for url in attachment_url_list:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-24 21:52:41 +02:00
|
|
|
user_profile = message.sender
|
2016-06-17 18:49:36 +02:00
|
|
|
is_message_realm_public = False
|
2017-10-28 21:53:47 +02:00
|
|
|
if message.is_stream_message():
|
2016-07-24 21:52:41 +02:00
|
|
|
is_message_realm_public = Stream.objects.get(id=message.recipient.type_id).is_public()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-04-14 00:59:59 +02:00
|
|
|
if not validate_attachment_request(user_profile, path_id):
|
2017-04-14 01:15:46 +02:00
|
|
|
# Technically, there are 2 cases here:
|
|
|
|
# * The user put something in their message that has the form
|
|
|
|
# of an upload, but doesn't correspond to a file that doesn't
|
|
|
|
# exist. validate_attachment_request will return None.
|
|
|
|
# * The user is trying to send a link to a file they don't have permission to
|
|
|
|
# access themselves. validate_attachment_request will return False.
|
|
|
|
#
|
|
|
|
# Either case is unusual and suggests a UI bug that got
|
|
|
|
# the user in this situation, so we log in these cases.
|
|
|
|
logging.warning("User %s tried to share upload %s in message %s, but lacks permission" % (
|
|
|
|
user_profile.id, path_id, message.id))
|
2017-04-14 00:59:59 +02:00
|
|
|
continue
|
|
|
|
|
2018-05-04 22:57:36 +02:00
|
|
|
attachment = claim_attachment(user_profile, path_id, message, is_message_realm_public)
|
|
|
|
notify_attachment_update(user_profile, "update", attachment.to_dict())
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_delete_old_unclaimed_attachments(weeks_ago: int) -> None:
|
2016-03-24 20:24:01 +01:00
|
|
|
old_unclaimed_attachments = get_old_unclaimed_attachments(weeks_ago)
|
|
|
|
|
|
|
|
for attachment in old_unclaimed_attachments:
|
|
|
|
delete_message_image(attachment.path_id)
|
|
|
|
attachment.delete()
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_attachment_reference_change(prev_content: str, message: Message) -> None:
|
2016-07-07 09:47:15 +02:00
|
|
|
new_content = message.content
|
|
|
|
prev_attachments = set(attachment_url_re.findall(prev_content))
|
|
|
|
new_attachments = set(attachment_url_re.findall(new_content))
|
|
|
|
|
|
|
|
to_remove = list(prev_attachments - new_attachments)
|
|
|
|
path_ids = []
|
|
|
|
for url in to_remove:
|
2016-07-11 03:07:37 +02:00
|
|
|
path_id = attachment_url_to_path_id(url)
|
2016-07-07 09:47:15 +02:00
|
|
|
path_ids.append(path_id)
|
|
|
|
|
|
|
|
attachments_to_update = Attachment.objects.filter(path_id__in=path_ids).select_for_update()
|
2016-07-24 22:03:22 +02:00
|
|
|
message.attachment_set.remove(*attachments_to_update)
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
to_add = list(new_attachments - prev_attachments)
|
2016-07-24 22:03:22 +02:00
|
|
|
if len(to_add) > 0:
|
2016-07-07 09:47:15 +02:00
|
|
|
do_claim_attachments(message)
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-12-11 07:24:44 +01:00
|
|
|
def notify_realm_custom_profile_fields(realm: Realm, operation: str) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
fields = custom_profile_fields_for_realm(realm.id)
|
|
|
|
event = dict(type="custom_profile_fields",
|
2017-12-11 07:24:44 +01:00
|
|
|
op=operation,
|
2017-03-17 10:07:22 +01:00
|
|
|
fields=[f.as_dict() for f in fields])
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm.id))
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def try_add_realm_custom_profile_field(realm: Realm, name: str, field_type: int,
|
|
|
|
hint: str='',
|
2019-03-19 21:38:43 +01:00
|
|
|
field_data: Optional[ProfileFieldData]=None) -> CustomProfileField:
|
2017-03-17 10:07:22 +01:00
|
|
|
field = CustomProfileField(realm=realm, name=name, field_type=field_type)
|
2018-03-31 07:30:24 +02:00
|
|
|
field.hint = hint
|
2019-05-27 10:59:55 +02:00
|
|
|
if (field.field_type == CustomProfileField.CHOICE or
|
|
|
|
field.field_type == CustomProfileField.EXTERNAL_ACCOUNT):
|
2018-04-08 09:50:05 +02:00
|
|
|
field.field_data = ujson.dumps(field_data or {})
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
field.save()
|
2018-04-08 18:13:37 +02:00
|
|
|
field.order = field.id
|
|
|
|
field.save(update_fields=['order'])
|
2017-12-11 07:24:44 +01:00
|
|
|
notify_realm_custom_profile_fields(realm, 'add')
|
2017-03-17 10:07:22 +01:00
|
|
|
return field
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_remove_realm_custom_profile_field(realm: Realm, field: CustomProfileField) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
"""
|
|
|
|
Deleting a field will also delete the user profile data
|
|
|
|
associated with it in CustomProfileFieldValue model.
|
|
|
|
"""
|
|
|
|
field.delete()
|
2017-12-11 07:24:44 +01:00
|
|
|
notify_realm_custom_profile_fields(realm, 'delete')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-09-14 13:14:40 +02:00
|
|
|
def do_remove_realm_custom_profile_fields(realm: Realm) -> None:
|
|
|
|
CustomProfileField.objects.filter(realm=realm).delete()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def try_update_realm_custom_profile_field(realm: Realm, field: CustomProfileField,
|
2018-05-11 02:01:29 +02:00
|
|
|
name: str, hint: str='',
|
2019-03-19 21:38:43 +01:00
|
|
|
field_data: Optional[ProfileFieldData]=None) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
field.name = name
|
2018-03-31 07:30:24 +02:00
|
|
|
field.hint = hint
|
2019-05-27 10:59:55 +02:00
|
|
|
if (field.field_type == CustomProfileField.CHOICE or
|
|
|
|
field.field_type == CustomProfileField.EXTERNAL_ACCOUNT):
|
2018-04-08 09:50:05 +02:00
|
|
|
field.field_data = ujson.dumps(field_data or {})
|
|
|
|
field.save()
|
2017-12-11 07:24:44 +01:00
|
|
|
notify_realm_custom_profile_fields(realm, 'update')
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2018-04-08 18:13:37 +02:00
|
|
|
def try_reorder_realm_custom_profile_fields(realm: Realm, order: List[int]) -> None:
|
|
|
|
order_mapping = dict((_[1], _[0]) for _ in enumerate(order))
|
|
|
|
fields = CustomProfileField.objects.filter(realm=realm)
|
|
|
|
for field in fields:
|
|
|
|
if field.id not in order_mapping:
|
|
|
|
raise JsonableError(_("Invalid order mapping."))
|
|
|
|
for field in fields:
|
|
|
|
field.order = order_mapping[field.id]
|
|
|
|
field.save(update_fields=['order'])
|
|
|
|
notify_realm_custom_profile_fields(realm, 'update')
|
|
|
|
|
2018-06-05 12:57:02 +02:00
|
|
|
def notify_user_update_custom_profile_data(user_profile: UserProfile,
|
2018-06-07 20:01:31 +02:00
|
|
|
field: Dict[str, Union[int, str, List[int], None]]) -> None:
|
2018-11-06 10:05:31 +01:00
|
|
|
data = dict(id=field['id'])
|
2018-08-09 14:02:32 +02:00
|
|
|
if field['type'] == CustomProfileField.USER:
|
2018-11-06 10:05:31 +01:00
|
|
|
data["value"] = ujson.dumps(field['value'])
|
2018-08-09 14:02:32 +02:00
|
|
|
else:
|
2018-11-06 10:05:31 +01:00
|
|
|
data['value'] = field['value']
|
|
|
|
if field['rendered_value']:
|
|
|
|
data['rendered_value'] = field['rendered_value']
|
|
|
|
payload = dict(user_id=user_profile.id, custom_profile_field=data)
|
2018-06-05 12:57:02 +02:00
|
|
|
event = dict(type="realm_user", op="update", person=payload)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_profile.realm, event, active_user_ids(user_profile.realm.id))
|
2018-06-05 12:57:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_update_user_custom_profile_data(user_profile: UserProfile,
|
2018-06-07 20:01:31 +02:00
|
|
|
data: List[Dict[str, Union[int, str, List[int]]]]) -> None:
|
2017-03-17 10:07:22 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
for field in data:
|
2018-11-06 10:05:31 +01:00
|
|
|
field_value, created = CustomProfileFieldValue.objects.get_or_create(
|
2018-08-23 07:44:39 +02:00
|
|
|
user_profile=user_profile,
|
2018-11-06 10:05:31 +01:00
|
|
|
field_id=field['id'])
|
|
|
|
field_value.value = field['value']
|
|
|
|
if field_value.field.is_renderable():
|
2019-03-01 09:10:40 +01:00
|
|
|
field_value.rendered_value = render_stream_description(str(field['value']))
|
2018-11-06 10:05:31 +01:00
|
|
|
field_value.save(update_fields=['value', 'rendered_value'])
|
|
|
|
else:
|
|
|
|
field_value.save(update_fields=['value'])
|
2018-08-23 07:44:39 +02:00
|
|
|
notify_user_update_custom_profile_data(user_profile, {
|
|
|
|
"id": field_value.field_id,
|
|
|
|
"value": field_value.value,
|
2018-11-06 10:05:31 +01:00
|
|
|
"rendered_value": field_value.rendered_value,
|
2018-08-23 07:44:39 +02:00
|
|
|
"type": field_value.field.field_type})
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2019-01-15 11:52:14 +01:00
|
|
|
def check_remove_custom_profile_field_value(user_profile: UserProfile,
|
|
|
|
field_id: Union[int, str, List[int]]
|
|
|
|
) -> None:
|
|
|
|
try:
|
|
|
|
field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id)
|
|
|
|
field_value = CustomProfileFieldValue.objects.get(field=field, user_profile=user_profile)
|
|
|
|
field_value.delete()
|
|
|
|
notify_user_update_custom_profile_data(user_profile, {'id': field_id,
|
|
|
|
'value': None,
|
|
|
|
'rendered_value': None,
|
|
|
|
'type': field.field_type})
|
|
|
|
except CustomProfileField.DoesNotExist:
|
|
|
|
raise JsonableError(_('Field id {id} not found.').format(id=field_id))
|
|
|
|
except CustomProfileFieldValue.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2017-11-14 07:31:31 +01:00
|
|
|
def do_send_create_user_group_event(user_group: UserGroup, members: List[UserProfile]) -> None:
|
|
|
|
event = dict(type="user_group",
|
|
|
|
op="add",
|
|
|
|
group=dict(name=user_group.name,
|
|
|
|
members=[member.id for member in members],
|
|
|
|
description=user_group.description,
|
|
|
|
id=user_group.id,
|
|
|
|
),
|
|
|
|
)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
|
2017-11-14 07:31:31 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def check_add_user_group(realm: Realm, name: str, initial_members: List[UserProfile],
|
|
|
|
description: str) -> None:
|
2017-11-01 10:04:16 +01:00
|
|
|
try:
|
2017-11-14 07:31:31 +01:00
|
|
|
user_group = create_user_group(name, initial_members, realm, description=description)
|
|
|
|
do_send_create_user_group_event(user_group, initial_members)
|
2017-11-01 10:04:16 +01:00
|
|
|
except django.db.utils.IntegrityError:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("User group '%s' already exists.") % (name,))
|
2017-11-02 07:53:08 +01:00
|
|
|
|
2017-11-14 08:00:18 +01:00
|
|
|
def do_send_user_group_update_event(user_group: UserGroup, data: Dict[str, Any]) -> None:
|
|
|
|
event = dict(type="user_group", op='update', group_id=user_group.id, data=data)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
|
2017-11-14 08:00:18 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_update_user_group_name(user_group: UserGroup, name: str) -> None:
|
2018-08-08 16:10:59 +02:00
|
|
|
try:
|
|
|
|
user_group.name = name
|
|
|
|
user_group.save(update_fields=['name'])
|
|
|
|
except django.db.utils.IntegrityError:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("User group '%s' already exists.") % (name,))
|
2017-11-14 08:00:18 +01:00
|
|
|
do_send_user_group_update_event(user_group, dict(name=name))
|
2017-11-02 07:53:08 +01:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_update_user_group_description(user_group: UserGroup, description: str) -> None:
|
2017-11-02 07:53:08 +01:00
|
|
|
user_group.description = description
|
|
|
|
user_group.save(update_fields=['description'])
|
2017-11-14 08:00:53 +01:00
|
|
|
do_send_user_group_update_event(user_group, dict(description=description))
|
2017-11-02 08:53:30 +01:00
|
|
|
|
2018-03-12 02:47:49 +01:00
|
|
|
def do_update_outgoing_webhook_service(bot_profile: UserProfile,
|
|
|
|
service_interface: int,
|
2018-05-11 02:01:29 +02:00
|
|
|
service_payload_url: str) -> None:
|
2018-01-16 20:34:12 +01:00
|
|
|
# TODO: First service is chosen because currently one bot can only have one service.
|
|
|
|
# Update this once multiple services are supported.
|
|
|
|
service = get_bot_services(bot_profile.id)[0]
|
|
|
|
service.base_url = service_payload_url
|
|
|
|
service.interface = service_interface
|
|
|
|
service.save()
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(bot_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2018-01-16 20:34:12 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=bot_profile.email,
|
|
|
|
user_id=bot_profile.id,
|
|
|
|
services = [dict(base_url=service.base_url,
|
2018-05-30 11:09:35 +02:00
|
|
|
interface=service.interface,
|
|
|
|
token=service.token,)],
|
2018-01-16 20:34:12 +01:00
|
|
|
),
|
|
|
|
),
|
|
|
|
bot_owner_user_ids(bot_profile))
|
|
|
|
|
2018-01-30 19:21:13 +01:00
|
|
|
def do_update_bot_config_data(bot_profile: UserProfile,
|
2018-05-11 02:01:29 +02:00
|
|
|
config_data: Dict[str, str]) -> None:
|
2018-01-30 19:21:13 +01:00
|
|
|
for key, value in config_data.items():
|
|
|
|
set_bot_config(bot_profile, key, value)
|
2018-03-04 20:34:58 +01:00
|
|
|
updated_config_data = get_bot_config(bot_profile)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(bot_profile.realm,
|
|
|
|
dict(type='realm_bot',
|
2018-01-30 19:21:13 +01:00
|
|
|
op='update',
|
|
|
|
bot=dict(email=bot_profile.email,
|
|
|
|
user_id=bot_profile.id,
|
2018-03-04 20:34:58 +01:00
|
|
|
services = [dict(config_data=updated_config_data)],
|
2018-01-30 19:21:13 +01:00
|
|
|
),
|
|
|
|
),
|
|
|
|
bot_owner_user_ids(bot_profile))
|
|
|
|
|
2018-04-02 19:26:16 +02:00
|
|
|
def get_service_dicts_for_bot(user_profile_id: str) -> List[Dict[str, Any]]:
|
2018-02-10 15:44:58 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
|
|
|
services = get_bot_services(user_profile_id)
|
2018-05-11 02:01:29 +02:00
|
|
|
service_dicts = [] # type: List[Dict[str, Any]]
|
2018-02-10 15:44:58 +01:00
|
|
|
if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
|
|
|
service_dicts = [{'base_url': service.base_url,
|
|
|
|
'interface': service.interface,
|
2018-05-30 11:09:35 +02:00
|
|
|
'token': service.token,
|
2018-02-10 15:44:58 +01:00
|
|
|
}
|
|
|
|
for service in services]
|
|
|
|
elif user_profile.bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
try:
|
|
|
|
service_dicts = [{'config_data': get_bot_config(user_profile),
|
|
|
|
'service_name': services[0].name
|
|
|
|
}]
|
|
|
|
# A ConfigError just means that there are no config entries for user_profile.
|
|
|
|
except ConfigError:
|
|
|
|
pass
|
|
|
|
return service_dicts
|
|
|
|
|
2018-05-04 22:33:32 +02:00
|
|
|
def get_service_dicts_for_bots(bot_dicts: List[Dict[str, Any]],
|
|
|
|
realm: Realm) -> Dict[int, List[Dict[str, Any]]]:
|
|
|
|
bot_profile_ids = [bot_dict['id'] for bot_dict in bot_dicts]
|
2018-05-04 22:39:45 +02:00
|
|
|
bot_services_by_uid = defaultdict(list) # type: Dict[int, List[Service]]
|
|
|
|
for service in Service.objects.filter(user_profile_id__in=bot_profile_ids):
|
2018-05-05 00:21:36 +02:00
|
|
|
bot_services_by_uid[service.user_profile_id].append(service)
|
2018-04-02 19:52:54 +02:00
|
|
|
|
2018-05-04 22:34:54 +02:00
|
|
|
embedded_bot_ids = [bot_dict['id'] for bot_dict in bot_dicts
|
|
|
|
if bot_dict['bot_type'] == UserProfile.EMBEDDED_BOT]
|
|
|
|
embedded_bot_configs = get_bot_configs(embedded_bot_ids)
|
2018-04-02 19:52:54 +02:00
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
service_dicts_by_uid = {} # type: Dict[int, List[Dict[str, Any]]]
|
2018-05-04 22:26:17 +02:00
|
|
|
for bot_dict in bot_dicts:
|
|
|
|
bot_profile_id = bot_dict["id"]
|
|
|
|
bot_type = bot_dict["bot_type"]
|
|
|
|
services = bot_services_by_uid[bot_profile_id]
|
2018-05-11 02:01:29 +02:00
|
|
|
service_dicts = [] # type: List[Dict[str, Any]]
|
2018-05-04 22:26:17 +02:00
|
|
|
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
2018-04-02 19:52:54 +02:00
|
|
|
service_dicts = [{'base_url': service.base_url,
|
|
|
|
'interface': service.interface,
|
2018-05-30 11:09:35 +02:00
|
|
|
'token': service.token,
|
2018-04-02 19:52:54 +02:00
|
|
|
}
|
|
|
|
for service in services]
|
2018-05-04 22:26:17 +02:00
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
if bot_profile_id in embedded_bot_configs.keys():
|
|
|
|
bot_config = embedded_bot_configs[bot_profile_id]
|
2018-04-02 19:52:54 +02:00
|
|
|
service_dicts = [{'config_data': bot_config,
|
|
|
|
'service_name': services[0].name
|
|
|
|
}]
|
2018-05-04 22:26:17 +02:00
|
|
|
service_dicts_by_uid[bot_profile_id] = service_dicts
|
2018-04-02 19:52:54 +02:00
|
|
|
return service_dicts_by_uid
|
|
|
|
|
2018-02-10 15:44:58 +01:00
|
|
|
def get_owned_bot_dicts(user_profile: UserProfile,
|
|
|
|
include_all_realm_bots_if_admin: bool=True) -> List[Dict[str, Any]]:
|
|
|
|
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
|
|
|
|
result = get_bot_dicts_in_realm(user_profile.realm)
|
|
|
|
else:
|
|
|
|
result = UserProfile.objects.filter(realm=user_profile.realm, is_bot=True,
|
|
|
|
bot_owner=user_profile).values(*bot_dict_fields)
|
2018-05-04 22:33:32 +02:00
|
|
|
services_by_ids = get_service_dicts_for_bots(result, user_profile.realm)
|
2018-02-10 15:44:58 +01:00
|
|
|
return [{'email': botdict['email'],
|
|
|
|
'user_id': botdict['id'],
|
|
|
|
'full_name': botdict['full_name'],
|
|
|
|
'bot_type': botdict['bot_type'],
|
|
|
|
'is_active': botdict['is_active'],
|
|
|
|
'api_key': botdict['api_key'],
|
|
|
|
'default_sending_stream': botdict['default_sending_stream__name'],
|
|
|
|
'default_events_register_stream': botdict['default_events_register_stream__name'],
|
|
|
|
'default_all_public_streams': botdict['default_all_public_streams'],
|
|
|
|
'owner': botdict['bot_owner__email'],
|
|
|
|
'avatar_url': avatar_url_from_dict(botdict),
|
2018-04-02 19:52:54 +02:00
|
|
|
'services': services_by_ids[botdict['id']],
|
2018-02-10 15:44:58 +01:00
|
|
|
}
|
|
|
|
for botdict in result]
|
|
|
|
|
2018-05-11 02:01:29 +02:00
|
|
|
def do_send_user_group_members_update_event(event_name: str,
|
2017-11-14 08:01:39 +01:00
|
|
|
user_group: UserGroup,
|
|
|
|
user_ids: List[int]) -> None:
|
|
|
|
event = dict(type="user_group",
|
|
|
|
op=event_name,
|
|
|
|
group_id=user_group.id,
|
|
|
|
user_ids=user_ids)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
|
2017-11-14 08:01:39 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def bulk_add_members_to_user_group(user_group: UserGroup,
|
|
|
|
user_profiles: List[UserProfile]) -> None:
|
2017-11-14 08:40:53 +01:00
|
|
|
memberships = [UserGroupMembership(user_group_id=user_group.id,
|
2017-11-02 08:53:30 +01:00
|
|
|
user_profile=user_profile)
|
|
|
|
for user_profile in user_profiles]
|
|
|
|
UserGroupMembership.objects.bulk_create(memberships)
|
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
user_ids = [up.id for up in user_profiles]
|
|
|
|
do_send_user_group_members_update_event('add_members', user_group, user_ids)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def remove_members_from_user_group(user_group: UserGroup,
|
|
|
|
user_profiles: List[UserProfile]) -> None:
|
2017-11-02 08:53:30 +01:00
|
|
|
UserGroupMembership.objects.filter(
|
2017-11-14 08:35:28 +01:00
|
|
|
user_group_id=user_group.id,
|
2017-11-02 08:53:30 +01:00
|
|
|
user_profile__in=user_profiles).delete()
|
2017-11-14 08:01:50 +01:00
|
|
|
|
|
|
|
user_ids = [up.id for up in user_profiles]
|
|
|
|
do_send_user_group_members_update_event('remove_members', user_group, user_ids)
|
2017-11-15 08:09:06 +01:00
|
|
|
|
2018-11-02 23:33:54 +01:00
|
|
|
def do_send_delete_user_group_event(realm: Realm, user_group_id: int,
|
|
|
|
realm_id: int) -> None:
|
2017-11-15 08:09:49 +01:00
|
|
|
event = dict(type="user_group",
|
|
|
|
op="remove",
|
|
|
|
group_id=user_group_id)
|
2018-11-02 23:33:54 +01:00
|
|
|
send_event(realm, event, active_user_ids(realm_id))
|
2017-11-15 08:09:49 +01:00
|
|
|
|
2018-02-19 13:38:18 +01:00
|
|
|
def check_delete_user_group(user_group_id: int, user_profile: UserProfile) -> None:
|
|
|
|
user_group = access_user_group_by_id(user_group_id, user_profile)
|
2017-11-15 08:09:06 +01:00
|
|
|
user_group.delete()
|
2018-11-02 23:33:54 +01:00
|
|
|
do_send_delete_user_group_event(user_profile.realm, user_group_id, user_profile.realm.id)
|
2018-05-23 05:40:28 +02:00
|
|
|
|
|
|
|
def missing_any_realm_internal_bots() -> bool:
|
|
|
|
bot_emails = [bot['email_template'] % (settings.INTERNAL_BOT_DOMAIN,)
|
|
|
|
for bot in settings.REALM_INTERNAL_BOTS]
|
|
|
|
bot_counts = dict(UserProfile.objects.filter(email__in=bot_emails)
|
|
|
|
.values_list('email')
|
|
|
|
.annotate(Count('id')))
|
|
|
|
realm_count = Realm.objects.count()
|
|
|
|
return any(bot_counts.get(email, 0) < realm_count for email in bot_emails)
|
2018-11-12 14:15:49 +01:00
|
|
|
|
|
|
|
def do_send_realm_reactivation_email(realm: Realm) -> None:
|
2019-02-15 23:28:07 +01:00
|
|
|
url = create_confirmation_link(realm, realm.host, Confirmation.REALM_REACTIVATION)
|
|
|
|
context = {'confirmation_url': url,
|
2018-11-12 14:15:49 +01:00
|
|
|
'realm_uri': realm.uri,
|
|
|
|
'realm_name': realm.name}
|
2018-12-03 23:26:51 +01:00
|
|
|
send_email_to_admins(
|
|
|
|
'zerver/emails/realm_reactivation', realm,
|
|
|
|
from_address=FromAddress.tokenized_no_reply_address(),
|
|
|
|
from_name="Zulip Account Security", context=context)
|
2018-12-28 20:45:54 +01:00
|
|
|
|
|
|
|
def get_zoom_video_call_url(realm: Realm) -> str:
|
|
|
|
response = request_zoom_video_call_url(
|
|
|
|
realm.zoom_user_id,
|
|
|
|
realm.zoom_api_key,
|
|
|
|
realm.zoom_api_secret
|
|
|
|
)
|
|
|
|
|
|
|
|
if response is None:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
return response['join_url']
|
2019-03-27 00:57:33 +01:00
|
|
|
|
2019-08-02 00:14:58 +02:00
|
|
|
def notify_realm_export(user_profile: UserProfile) -> None:
|
2019-03-27 00:57:33 +01:00
|
|
|
# In the future, we may want to send this event to all realm admins.
|
2019-06-24 02:51:13 +02:00
|
|
|
event = dict(type='realm_export',
|
|
|
|
exports=get_realm_exports_serialized(user_profile))
|
2019-03-27 00:57:33 +01:00
|
|
|
send_event(user_profile.realm, event, [user_profile.id])
|