2017-11-16 19:51:44 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
import copy
|
|
|
|
|
|
|
|
from django.utils.translation import ugettext as _
|
|
|
|
from django.conf import settings
|
|
|
|
from importlib import import_module
|
|
|
|
from typing import (
|
2019-02-02 23:53:55 +01:00
|
|
|
Any, Callable, Dict, Iterable, Optional, Sequence, Set
|
2017-02-10 23:04:46 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
session_engine = import_module(settings.SESSION_ENGINE)
|
|
|
|
|
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2018-01-07 19:24:14 +01:00
|
|
|
from zerver.lib.bot_config import load_bot_config_template
|
2017-01-24 01:48:35 +01:00
|
|
|
from zerver.lib.hotspots import get_next_hotspots
|
2019-08-18 15:09:18 +02:00
|
|
|
from zerver.lib.integrations import EMBEDDED_BOTS, WEBHOOK_INTEGRATIONS
|
2017-05-23 03:02:01 +02:00
|
|
|
from zerver.lib.message import (
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
aggregate_unread_data,
|
2017-05-23 03:02:01 +02:00
|
|
|
apply_unread_message_event,
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
get_raw_unread_data,
|
2019-03-20 04:15:58 +01:00
|
|
|
get_recent_conversations_recipient_id,
|
|
|
|
get_recent_private_conversations,
|
2018-08-14 23:57:20 +02:00
|
|
|
get_starred_message_ids,
|
2019-08-03 02:24:00 +02:00
|
|
|
remove_message_id_from_unread_mgs,
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2019-01-27 18:57:15 +01:00
|
|
|
from zerver.lib.narrow import check_supported_events_narrow_filter, read_stop_words
|
2020-02-06 17:41:55 +01:00
|
|
|
from zerver.lib.presence import (
|
|
|
|
get_presences_for_realm,
|
2020-02-06 17:52:12 +01:00
|
|
|
get_presence_for_user,
|
2020-02-06 17:41:55 +01:00
|
|
|
)
|
2018-05-08 20:45:13 +02:00
|
|
|
from zerver.lib.push_notifications import push_notifications_enabled
|
2019-03-12 02:48:01 +01:00
|
|
|
from zerver.lib.soft_deactivation import reactivate_user_if_soft_deactivated
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2019-08-19 19:46:45 +02:00
|
|
|
from zerver.lib.realm_logo import get_realm_logo_url
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.request import JsonableError
|
2019-02-13 10:22:16 +01:00
|
|
|
from zerver.lib.stream_subscription import handle_stream_notifications_compatibility
|
2018-11-08 15:01:45 +01:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2017-08-24 17:58:40 +02:00
|
|
|
from zerver.lib.topic_mutes import get_topic_mutes
|
2017-05-23 03:02:01 +02:00
|
|
|
from zerver.lib.actions import (
|
2020-01-13 22:11:19 +01:00
|
|
|
do_get_streams,
|
|
|
|
get_default_streams_for_realm,
|
2020-01-13 22:11:19 +01:00
|
|
|
gather_subscriptions_helper,
|
2020-02-06 15:54:57 +01:00
|
|
|
streams_to_dicts_sorted,
|
2018-02-10 15:44:58 +01:00
|
|
|
default_stream_groups_to_dicts_sorted,
|
|
|
|
get_owned_bot_dicts,
|
2018-01-11 21:36:11 +01:00
|
|
|
get_available_notification_sounds,
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.users import get_cross_realm_dicts, get_raw_user_data
|
2017-11-07 07:56:26 +01:00
|
|
|
from zerver.lib.user_groups import user_groups_in_realm_serialized
|
2019-01-21 19:06:03 +01:00
|
|
|
from zerver.lib.user_status import get_user_info_dict
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.tornado.event_queue import request_event_queue, get_user_events
|
2020-02-03 16:39:43 +01:00
|
|
|
from zerver.models import (
|
2020-02-29 15:34:00 +01:00
|
|
|
Client, Message, Realm, UserProfile, UserMessage,
|
2020-02-03 16:39:43 +01:00
|
|
|
get_user_profile_by_id, realm_filters_for_realm,
|
|
|
|
custom_profile_fields_for_realm, get_realm_domains,
|
2018-04-30 11:48:00 +02:00
|
|
|
get_default_stream_groups, CustomProfileField, Stream
|
2020-02-03 16:39:43 +01:00
|
|
|
)
|
2017-10-24 20:59:11 +02:00
|
|
|
from zproject.backends import email_auth_enabled, password_auth_enabled
|
2020-04-20 00:57:28 +02:00
|
|
|
from version import ZULIP_VERSION, API_FEATURE_LEVEL
|
2019-05-27 10:59:55 +02:00
|
|
|
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2019-03-02 18:23:57 +01:00
|
|
|
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
|
2019-08-19 19:46:45 +02:00
|
|
|
state['realm_logo_url'] = get_realm_logo_url(realm, night = False)
|
2019-03-02 18:23:57 +01:00
|
|
|
state['realm_logo_source'] = realm.logo_source
|
2019-08-19 19:46:45 +02:00
|
|
|
state['realm_night_logo_url'] = get_realm_logo_url(realm, night = True)
|
2019-03-02 18:23:57 +01:00
|
|
|
state['realm_night_logo_source'] = realm.night_logo_source
|
|
|
|
state['max_logo_file_size'] = settings.MAX_LOGO_FILE_SIZE
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def always_want(msg_type: str) -> bool:
|
2017-10-21 23:10:22 +02:00
|
|
|
'''
|
|
|
|
This function is used as a helper in
|
|
|
|
fetch_initial_state_data, when the user passes
|
|
|
|
in None for event_types, and we want to fetch
|
|
|
|
info for every event type. Defining this at module
|
|
|
|
level makes it easier to mock.
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
# Fetch initial data. When event_types is not specified, clients want
|
|
|
|
# all event types. Whenever you add new code to this function, you
|
|
|
|
# should also add corresponding events for changes in the data
|
|
|
|
# structures and new code to apply_events (and add a test in EventsRegisterTest).
|
2017-12-19 18:52:26 +01:00
|
|
|
def fetch_initial_state_data(user_profile: UserProfile,
|
|
|
|
event_types: Optional[Iterable[str]],
|
|
|
|
queue_id: str, client_gravatar: bool,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool = False,
|
2017-12-19 18:52:26 +01:00
|
|
|
include_subscribers: bool = True) -> Dict[str, Any]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
state: Dict[str, Any] = {'queue_id': queue_id}
|
2018-05-07 07:51:14 +02:00
|
|
|
realm = user_profile.realm
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if event_types is None:
|
2017-10-21 23:10:22 +02:00
|
|
|
# return True always
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
want: Callable[[str], bool] = always_want
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
want = set(event_types).__contains__
|
|
|
|
|
|
|
|
if want('alert_words'):
|
|
|
|
state['alert_words'] = user_alert_words(user_profile)
|
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
if want('custom_profile_fields'):
|
2018-05-07 07:51:14 +02:00
|
|
|
fields = custom_profile_fields_for_realm(realm.id)
|
2017-03-17 10:07:22 +01:00
|
|
|
state['custom_profile_fields'] = [f.as_dict() for f in fields]
|
2018-08-15 11:35:18 +02:00
|
|
|
state['custom_profile_field_types'] = CustomProfileField.FIELD_TYPE_CHOICES_DICT
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
if want('hotspots'):
|
|
|
|
state['hotspots'] = get_next_hotspots(user_profile)
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('message'):
|
2017-03-24 07:51:46 +01:00
|
|
|
# The client should use get_messages() to fetch messages
|
2017-02-10 23:04:46 +01:00
|
|
|
# starting with the max_message_id. They will get messages
|
|
|
|
# newer than that ID via get_events()
|
2020-02-29 15:34:00 +01:00
|
|
|
user_messages = UserMessage.objects \
|
|
|
|
.filter(user_profile=user_profile) \
|
|
|
|
.order_by('-message_id') \
|
|
|
|
.values('message_id')[:1]
|
|
|
|
if user_messages:
|
|
|
|
state['max_message_id'] = user_messages[0]['message_id']
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
state['max_message_id'] = -1
|
|
|
|
|
|
|
|
if want('muted_topics'):
|
2017-08-24 17:58:40 +02:00
|
|
|
state['muted_topics'] = get_topic_mutes(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('pointer'):
|
|
|
|
state['pointer'] = user_profile.pointer
|
|
|
|
|
|
|
|
if want('presence'):
|
2020-02-06 17:41:55 +01:00
|
|
|
state['presences'] = get_presences_for_realm(realm, slim_presence)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm'):
|
2017-04-19 05:30:38 +02:00
|
|
|
for property_name in Realm.property_types:
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_' + property_name] = getattr(realm, property_name)
|
2017-07-07 20:11:44 +02:00
|
|
|
|
2019-03-01 00:12:40 +01:00
|
|
|
# Don't send the zoom API secret to clients.
|
|
|
|
if state.get('realm_zoom_api_secret'):
|
|
|
|
state['realm_zoom_api_secret'] = ''
|
|
|
|
|
2017-07-07 20:11:44 +02:00
|
|
|
# Most state is handled via the property_types framework;
|
|
|
|
# these manual entries are for those realm settings that don't
|
|
|
|
# fit into that framework.
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_authentication_methods'] = realm.authentication_methods_dict()
|
|
|
|
state['realm_allow_message_editing'] = realm.allow_message_editing
|
2017-12-03 00:50:48 +01:00
|
|
|
state['realm_allow_community_topic_editing'] = realm.allow_community_topic_editing
|
2017-11-26 09:12:10 +01:00
|
|
|
state['realm_allow_message_deleting'] = realm.allow_message_deleting
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_message_content_edit_limit_seconds'] = realm.message_content_edit_limit_seconds
|
2017-11-26 09:12:10 +01:00
|
|
|
state['realm_message_content_delete_limit_seconds'] = realm.message_content_delete_limit_seconds
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_icon_url'] = realm_icon_url(realm)
|
|
|
|
state['realm_icon_source'] = realm.icon_source
|
2017-03-06 06:22:28 +01:00
|
|
|
state['max_icon_file_size'] = settings.MAX_ICON_FILE_SIZE
|
2019-03-02 18:23:57 +01:00
|
|
|
add_realm_logo_fields(state, realm)
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_bot_domain'] = realm.get_bot_domain()
|
|
|
|
state['realm_uri'] = realm.uri
|
2018-04-23 14:51:30 +02:00
|
|
|
state['realm_available_video_chat_providers'] = realm.VIDEO_CHAT_PROVIDERS
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_presence_disabled'] = realm.presence_disabled
|
2019-04-06 06:34:49 +02:00
|
|
|
state['settings_send_digest_emails'] = settings.SEND_DIGEST_EMAILS
|
2018-08-01 12:51:35 +02:00
|
|
|
state['realm_digest_emails_enabled'] = realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_is_zephyr_mirror_realm'] = realm.is_zephyr_mirror_realm
|
|
|
|
state['realm_email_auth_enabled'] = email_auth_enabled(realm)
|
|
|
|
state['realm_password_auth_enabled'] = password_auth_enabled(realm)
|
2018-05-08 20:45:13 +02:00
|
|
|
state['realm_push_notifications_enabled'] = push_notifications_enabled()
|
2019-01-11 17:51:13 +01:00
|
|
|
state['realm_upload_quota'] = realm.upload_quota_bytes()
|
2019-01-16 09:33:17 +01:00
|
|
|
state['realm_plan_type'] = realm.plan_type
|
2019-06-12 08:56:28 +02:00
|
|
|
state['plan_includes_wide_organization_logo'] = realm.plan_type != Realm.LIMITED
|
|
|
|
state['upgrade_text_for_wide_organization_logo'] = str(Realm.UPGRADE_TEXT_STANDARD)
|
2019-05-27 10:59:55 +02:00
|
|
|
state['realm_default_external_accounts'] = DEFAULT_EXTERNAL_ACCOUNTS
|
|
|
|
|
2017-11-10 03:34:13 +01:00
|
|
|
if realm.notifications_stream and not realm.notifications_stream.deactivated:
|
|
|
|
notifications_stream = realm.notifications_stream
|
2017-05-17 03:48:47 +02:00
|
|
|
state['realm_notifications_stream_id'] = notifications_stream.id
|
|
|
|
else:
|
|
|
|
state['realm_notifications_stream_id'] = -1
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-05-17 15:09:31 +02:00
|
|
|
signup_notifications_stream = realm.get_signup_notifications_stream()
|
|
|
|
if signup_notifications_stream:
|
2017-10-20 16:55:04 +02:00
|
|
|
state['realm_signup_notifications_stream_id'] = signup_notifications_stream.id
|
|
|
|
else:
|
|
|
|
state['realm_signup_notifications_stream_id'] = -1
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('realm_domains'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_domains'] = get_realm_domains(realm)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_emoji'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_emoji'] = realm.get_emoji()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_filters'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_filters'] = realm_filters_for_realm(realm.id)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-11-07 07:56:26 +01:00
|
|
|
if want('realm_user_groups'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_user_groups'] = user_groups_in_realm_serialized(realm)
|
2017-11-07 07:56:26 +01:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('realm_user'):
|
2020-02-07 02:33:15 +01:00
|
|
|
state['raw_users'] = get_raw_user_data(realm, user_profile,
|
|
|
|
client_gravatar=client_gravatar)
|
2018-02-05 21:42:54 +01:00
|
|
|
|
|
|
|
# For the user's own avatar URL, we force
|
|
|
|
# client_gravatar=False, since that saves some unnecessary
|
|
|
|
# client-side code for handing medium-size avatars. See #8253
|
|
|
|
# for details.
|
2017-04-27 00:04:11 +02:00
|
|
|
state['avatar_source'] = user_profile.avatar_source
|
2017-11-02 20:55:44 +01:00
|
|
|
state['avatar_url_medium'] = avatar_url(
|
|
|
|
user_profile,
|
|
|
|
medium=True,
|
2018-02-05 21:42:54 +01:00
|
|
|
client_gravatar=False,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
|
|
|
state['avatar_url'] = avatar_url(
|
|
|
|
user_profile,
|
|
|
|
medium=False,
|
2018-02-05 21:42:54 +01:00
|
|
|
client_gravatar=False,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
2018-02-05 21:42:54 +01:00
|
|
|
|
2017-04-27 00:17:57 +02:00
|
|
|
state['can_create_streams'] = user_profile.can_create_streams()
|
2018-08-13 00:46:29 +02:00
|
|
|
state['can_subscribe_other_users'] = user_profile.can_subscribe_other_users()
|
2017-05-17 05:23:13 +02:00
|
|
|
state['cross_realm_bots'] = list(get_cross_realm_dicts())
|
2017-04-27 00:19:34 +02:00
|
|
|
state['is_admin'] = user_profile.is_realm_admin
|
2018-06-08 14:43:27 +02:00
|
|
|
state['is_guest'] = user_profile.is_guest
|
2017-04-27 00:21:16 +02:00
|
|
|
state['user_id'] = user_profile.id
|
2017-04-27 00:23:10 +02:00
|
|
|
state['enter_sends'] = user_profile.enter_sends
|
2017-04-27 00:24:54 +02:00
|
|
|
state['email'] = user_profile.email
|
2018-07-16 09:07:16 +02:00
|
|
|
state['delivery_email'] = user_profile.delivery_email
|
2017-04-27 00:26:49 +02:00
|
|
|
state['full_name'] = user_profile.full_name
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_bot'):
|
|
|
|
state['realm_bots'] = get_owned_bot_dicts(user_profile)
|
|
|
|
|
2017-10-19 16:25:06 +02:00
|
|
|
# This does not yet have an apply_event counterpart, since currently,
|
|
|
|
# new entries for EMBEDDED_BOTS can only be added directly in the codebase.
|
|
|
|
if want('realm_embedded_bots'):
|
2018-01-07 19:24:14 +01:00
|
|
|
realm_embedded_bots = []
|
|
|
|
for bot in EMBEDDED_BOTS:
|
|
|
|
realm_embedded_bots.append({'name': bot.name,
|
|
|
|
'config': load_bot_config_template(bot.name)})
|
|
|
|
state['realm_embedded_bots'] = realm_embedded_bots
|
2017-10-19 16:25:06 +02:00
|
|
|
|
2019-08-18 15:09:18 +02:00
|
|
|
# This does not have an apply_events counterpart either since
|
|
|
|
# this data is mostly static.
|
|
|
|
if want('realm_incoming_webhook_bots'):
|
|
|
|
realm_incoming_webhook_bots = []
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS:
|
|
|
|
realm_incoming_webhook_bots.append({
|
|
|
|
'name': integration.name,
|
2019-08-23 13:41:25 +02:00
|
|
|
'config': {c[1]: c[0] for c in integration.config_options}
|
2019-08-18 15:09:18 +02:00
|
|
|
})
|
|
|
|
state['realm_incoming_webhook_bots'] = realm_incoming_webhook_bots
|
|
|
|
|
2019-03-20 04:15:58 +01:00
|
|
|
if want('recent_private_conversations'):
|
|
|
|
# A data structure containing records of this form:
|
|
|
|
#
|
|
|
|
# [{'max_message_id': 700175, 'user_ids': [801]}]
|
|
|
|
#
|
|
|
|
# for all recent private message conversations, ordered by the
|
|
|
|
# highest message ID in the conversation. The user_ids list
|
|
|
|
# is the list of users other than the current user in the
|
|
|
|
# private message conversation (so it is [] for PMs to self).
|
|
|
|
# Note that raw_recent_private_conversations is an
|
|
|
|
# intermediate form as a dictionary keyed by recipient_id,
|
|
|
|
# which is more efficient to update, and is rewritten to the
|
|
|
|
# final format in post_process_state.
|
|
|
|
state['raw_recent_private_conversations'] = get_recent_private_conversations(user_profile)
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('subscription'):
|
2017-02-20 07:52:37 +01:00
|
|
|
subscriptions, unsubscribed, never_subscribed = gather_subscriptions_helper(
|
|
|
|
user_profile, include_subscribers=include_subscribers)
|
2017-02-10 23:04:46 +01:00
|
|
|
state['subscriptions'] = subscriptions
|
|
|
|
state['unsubscribed'] = unsubscribed
|
|
|
|
state['never_subscribed'] = never_subscribed
|
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
if want('update_message_flags') and want('message'):
|
|
|
|
# Keeping unread_msgs updated requires both message flag updates and
|
|
|
|
# message updates. This is due to the fact that new messages will not
|
|
|
|
# generate a flag update so we need to use the flags field in the
|
|
|
|
# message event.
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
state['raw_unread_msgs'] = get_raw_unread_data(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-08-14 23:57:20 +02:00
|
|
|
if want('starred_messages'):
|
|
|
|
state['starred_messages'] = get_starred_message_ids(user_profile)
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('stream'):
|
|
|
|
state['streams'] = do_get_streams(user_profile)
|
2018-04-30 11:48:00 +02:00
|
|
|
state['stream_name_max_length'] = Stream.MAX_NAME_LENGTH
|
|
|
|
state['stream_description_max_length'] = Stream.MAX_DESCRIPTION_LENGTH
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('default_streams'):
|
2019-03-01 01:26:57 +01:00
|
|
|
if user_profile.is_guest:
|
|
|
|
state['realm_default_streams'] = []
|
|
|
|
else:
|
|
|
|
state['realm_default_streams'] = streams_to_dicts_sorted(
|
|
|
|
get_default_streams_for_realm(realm.id))
|
2017-11-01 18:20:34 +01:00
|
|
|
if want('default_stream_groups'):
|
2019-03-01 01:26:57 +01:00
|
|
|
if user_profile.is_guest:
|
|
|
|
state['realm_default_stream_groups'] = []
|
|
|
|
else:
|
|
|
|
state['realm_default_stream_groups'] = default_stream_groups_to_dicts_sorted(
|
|
|
|
get_default_stream_groups(realm))
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2019-01-27 18:57:15 +01:00
|
|
|
if want('stop_words'):
|
|
|
|
state['stop_words'] = read_stop_words()
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('update_display_settings'):
|
2017-05-26 14:16:59 +02:00
|
|
|
for prop in UserProfile.property_types:
|
|
|
|
state[prop] = getattr(user_profile, prop)
|
2017-04-26 23:49:40 +02:00
|
|
|
state['emojiset_choices'] = user_profile.emojiset_choices()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('update_global_notifications'):
|
2017-05-26 14:16:59 +02:00
|
|
|
for notification in UserProfile.notification_setting_types:
|
|
|
|
state[notification] = getattr(user_profile, notification)
|
2018-01-11 21:36:11 +01:00
|
|
|
state['available_notification_sounds'] = get_available_notification_sounds()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-12-18 17:17:08 +01:00
|
|
|
if want('user_status'):
|
2019-01-21 19:06:03 +01:00
|
|
|
state['user_status'] = get_user_info_dict(realm_id=realm.id)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2017-02-27 08:30:26 +01:00
|
|
|
if want('zulip_version'):
|
|
|
|
state['zulip_version'] = ZULIP_VERSION
|
2020-04-20 00:57:28 +02:00
|
|
|
state['zulip_feature_level'] = API_FEATURE_LEVEL
|
2017-02-27 08:30:26 +01:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
return state
|
|
|
|
|
2017-12-19 18:52:26 +01:00
|
|
|
def apply_events(state: Dict[str, Any], events: Iterable[Dict[str, Any]],
|
|
|
|
user_profile: UserProfile, client_gravatar: bool,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool, include_subscribers: bool = True,
|
2017-12-19 18:52:26 +01:00
|
|
|
fetch_event_types: Optional[Iterable[str]] = None) -> None:
|
2017-02-10 23:04:46 +01:00
|
|
|
for event in events:
|
2017-04-26 23:29:25 +02:00
|
|
|
if fetch_event_types is not None and event['type'] not in fetch_event_types:
|
|
|
|
# TODO: continuing here is not, most precisely, correct.
|
|
|
|
# In theory, an event of one type, e.g. `realm_user`,
|
|
|
|
# could modify state that doesn't come from that
|
|
|
|
# `fetch_event_types` value, e.g. the `our_person` part of
|
|
|
|
# that code path. But it should be extremely rare, and
|
|
|
|
# fixing that will require a nontrivial refactor of
|
|
|
|
# `apply_event`. For now, be careful in your choice of
|
|
|
|
# `fetch_event_types`.
|
|
|
|
continue
|
2020-02-02 17:29:05 +01:00
|
|
|
apply_event(state, event, user_profile,
|
|
|
|
client_gravatar, slim_presence, include_subscribers)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def apply_event(state: Dict[str, Any],
|
|
|
|
event: Dict[str, Any],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
client_gravatar: bool,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool,
|
2017-11-05 11:15:10 +01:00
|
|
|
include_subscribers: bool) -> None:
|
2017-02-20 20:09:48 +01:00
|
|
|
if event['type'] == "message":
|
|
|
|
state['max_message_id'] = max(state['max_message_id'], event['message']['id'])
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
if 'raw_unread_msgs' in state:
|
2017-10-12 01:21:34 +02:00
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile,
|
|
|
|
state['raw_unread_msgs'],
|
|
|
|
event['message'],
|
|
|
|
event['flags'],
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['type'] != "stream":
|
2019-03-20 04:15:58 +01:00
|
|
|
if 'raw_recent_private_conversations' in state:
|
|
|
|
# Handle maintaining the recent_private_conversations data structure.
|
|
|
|
conversations = state['raw_recent_private_conversations']
|
|
|
|
recipient_id = get_recent_conversations_recipient_id(
|
|
|
|
user_profile, event['message']['recipient_id'],
|
|
|
|
event['message']["sender_id"])
|
|
|
|
|
|
|
|
if recipient_id not in conversations:
|
|
|
|
conversations[recipient_id] = dict(
|
2020-01-01 16:27:14 +01:00
|
|
|
user_ids=sorted([user_dict['id'] for user_dict in
|
|
|
|
event['message']['display_recipient'] if
|
|
|
|
user_dict['id'] != user_profile.id])
|
2019-03-20 04:15:58 +01:00
|
|
|
)
|
|
|
|
conversations[recipient_id]['max_message_id'] = event['message']['id']
|
2019-03-04 17:50:49 +01:00
|
|
|
return
|
|
|
|
|
2019-03-20 04:15:58 +01:00
|
|
|
# Below, we handle maintaining first_message_id.
|
2019-03-13 22:12:59 +01:00
|
|
|
for sub_dict in state.get('subscriptions', []):
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['stream_id'] == sub_dict['stream_id']:
|
|
|
|
if sub_dict['first_message_id'] is None:
|
|
|
|
sub_dict['first_message_id'] = event['message']['id']
|
2019-03-13 22:12:59 +01:00
|
|
|
for stream_dict in state.get('streams', []):
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['stream_id'] == stream_dict['stream_id']:
|
|
|
|
if stream_dict['first_message_id'] is None:
|
|
|
|
stream_dict['first_message_id'] = event['message']['id']
|
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
elif event['type'] == "hotspots":
|
|
|
|
state['hotspots'] = event['hotspots']
|
2017-03-17 10:07:22 +01:00
|
|
|
elif event['type'] == "custom_profile_fields":
|
|
|
|
state['custom_profile_fields'] = event['fields']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "pointer":
|
|
|
|
state['pointer'] = max(state['pointer'], event['pointer'])
|
|
|
|
elif event['type'] == "realm_user":
|
|
|
|
person = event['person']
|
2017-10-21 16:33:07 +02:00
|
|
|
person_user_id = person['user_id']
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
if event['op'] == "add":
|
2017-10-21 18:36:09 +02:00
|
|
|
person = copy.deepcopy(person)
|
2017-11-02 21:40:12 +01:00
|
|
|
if client_gravatar:
|
|
|
|
if 'gravatar.com' in person['avatar_url']:
|
|
|
|
person['avatar_url'] = None
|
2017-10-21 18:36:09 +02:00
|
|
|
person['is_active'] = True
|
2018-03-12 01:27:08 +01:00
|
|
|
if not person['is_bot']:
|
|
|
|
person['profile_data'] = {}
|
2017-10-21 16:33:07 +02:00
|
|
|
state['raw_users'][person_user_id] = person
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == "remove":
|
2017-10-21 18:36:09 +02:00
|
|
|
state['raw_users'][person_user_id]['is_active'] = False
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'update':
|
2017-10-21 16:33:07 +02:00
|
|
|
is_me = (person_user_id == user_profile.id)
|
|
|
|
|
|
|
|
if is_me:
|
|
|
|
if ('avatar_url' in person and 'avatar_url' in state):
|
|
|
|
state['avatar_source'] = person['avatar_source']
|
|
|
|
state['avatar_url'] = person['avatar_url']
|
|
|
|
state['avatar_url_medium'] = person['avatar_url_medium']
|
|
|
|
|
2018-08-02 08:47:13 +02:00
|
|
|
for field in ['is_admin', 'delivery_email', 'email', 'full_name']:
|
2017-10-21 16:33:07 +02:00
|
|
|
if field in person and field in state:
|
|
|
|
state[field] = person[field]
|
|
|
|
|
|
|
|
# In the unlikely event that the current user
|
|
|
|
# just changed to/from being an admin, we need
|
|
|
|
# to add/remove the data on all bots in the
|
|
|
|
# realm. This is ugly and probably better
|
|
|
|
# solved by removing the all-realm-bots data
|
|
|
|
# given to admin users from this flow.
|
|
|
|
if ('is_admin' in person and 'realm_bots' in state):
|
|
|
|
prev_state = state['raw_users'][user_profile.id]
|
|
|
|
was_admin = prev_state['is_admin']
|
|
|
|
now_admin = person['is_admin']
|
|
|
|
|
|
|
|
if was_admin and not now_admin:
|
|
|
|
state['realm_bots'] = []
|
|
|
|
if not was_admin and now_admin:
|
|
|
|
state['realm_bots'] = get_owned_bot_dicts(user_profile)
|
|
|
|
|
2018-02-05 21:42:54 +01:00
|
|
|
if client_gravatar and 'avatar_url' in person:
|
|
|
|
# Respect the client_gravatar setting in the `users` data.
|
|
|
|
if 'gravatar.com' in person['avatar_url']:
|
|
|
|
person['avatar_url'] = None
|
|
|
|
person['avatar_url_medium'] = None
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
if person_user_id in state['raw_users']:
|
|
|
|
p = state['raw_users'][person_user_id]
|
|
|
|
for field in p:
|
|
|
|
if field in person:
|
|
|
|
p[field] = person[field]
|
2018-07-09 11:49:08 +02:00
|
|
|
if 'custom_profile_field' in person:
|
|
|
|
custom_field_id = person['custom_profile_field']['id']
|
|
|
|
custom_field_new_value = person['custom_profile_field']['value']
|
2018-12-31 07:45:33 +01:00
|
|
|
if 'rendered_value' in person['custom_profile_field']:
|
|
|
|
p['profile_data'][custom_field_id] = {
|
|
|
|
'value': custom_field_new_value,
|
|
|
|
'rendered_value': person['custom_profile_field']['rendered_value']
|
|
|
|
}
|
|
|
|
else:
|
|
|
|
p['profile_data'][custom_field_id] = {
|
|
|
|
'value': custom_field_new_value
|
|
|
|
}
|
2017-10-21 16:33:07 +02:00
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == 'realm_bot':
|
|
|
|
if event['op'] == 'add':
|
|
|
|
state['realm_bots'].append(event['bot'])
|
|
|
|
|
|
|
|
if event['op'] == 'remove':
|
|
|
|
email = event['bot']['email']
|
2017-02-06 20:45:26 +01:00
|
|
|
for bot in state['realm_bots']:
|
|
|
|
if bot['email'] == email:
|
|
|
|
bot['is_active'] = False
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2018-03-06 22:32:03 +01:00
|
|
|
if event['op'] == 'delete':
|
|
|
|
state['realm_bots'] = [item for item
|
|
|
|
in state['realm_bots'] if item['email'] != event['bot']['email']]
|
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
if event['op'] == 'update':
|
|
|
|
for bot in state['realm_bots']:
|
|
|
|
if bot['email'] == event['bot']['email']:
|
2017-02-24 06:36:54 +01:00
|
|
|
if 'owner_id' in event['bot']:
|
|
|
|
bot['owner'] = get_user_profile_by_id(event['bot']['owner_id']).email
|
|
|
|
else:
|
|
|
|
bot.update(event['bot'])
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
elif event['type'] == 'stream':
|
|
|
|
if event['op'] == 'create':
|
|
|
|
for stream in event['streams']:
|
|
|
|
if not stream['invite_only']:
|
|
|
|
stream_data = copy.deepcopy(stream)
|
|
|
|
if include_subscribers:
|
|
|
|
stream_data['subscribers'] = []
|
2018-07-23 23:05:32 +02:00
|
|
|
stream_data['stream_weekly_traffic'] = None
|
2017-04-03 17:13:42 +02:00
|
|
|
stream_data['is_old_stream'] = False
|
2020-02-04 21:50:55 +01:00
|
|
|
stream_data['stream_post_policy'] = Stream.STREAM_POST_POLICY_EVERYONE
|
2017-02-20 20:09:48 +01:00
|
|
|
# Add stream to never_subscribed (if not invite_only)
|
|
|
|
state['never_subscribed'].append(stream_data)
|
2017-03-24 05:49:23 +01:00
|
|
|
state['streams'].append(stream)
|
|
|
|
state['streams'].sort(key=lambda elt: elt["name"])
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
if event['op'] == 'delete':
|
|
|
|
deleted_stream_ids = {stream['stream_id'] for stream in event['streams']}
|
|
|
|
state['streams'] = [s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids]
|
|
|
|
state['never_subscribed'] = [stream for stream in state['never_subscribed'] if
|
|
|
|
stream['stream_id'] not in deleted_stream_ids]
|
|
|
|
|
|
|
|
if event['op'] == 'update':
|
|
|
|
# For legacy reasons, we call stream data 'subscriptions' in
|
|
|
|
# the state var here, for the benefit of the JS code.
|
|
|
|
for obj in state['subscriptions']:
|
|
|
|
if obj['name'].lower() == event['name'].lower():
|
|
|
|
obj[event['property']] = event['value']
|
2019-01-11 13:48:22 +01:00
|
|
|
if event['property'] == "description":
|
|
|
|
obj['rendered_description'] = event['rendered_description']
|
2017-02-20 20:09:48 +01:00
|
|
|
# Also update the pure streams data
|
|
|
|
for stream in state['streams']:
|
|
|
|
if stream['name'].lower() == event['name'].lower():
|
|
|
|
prop = event['property']
|
|
|
|
if prop in stream:
|
|
|
|
stream[prop] = event['value']
|
2019-01-11 13:48:22 +01:00
|
|
|
if prop == 'description':
|
|
|
|
stream['rendered_description'] = event['rendered_description']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == "occupy":
|
|
|
|
state['streams'] += event['streams']
|
|
|
|
elif event['op'] == "vacate":
|
|
|
|
stream_ids = [s["stream_id"] for s in event['streams']]
|
|
|
|
state['streams'] = [s for s in state['streams'] if s["stream_id"] not in stream_ids]
|
|
|
|
elif event['type'] == 'default_streams':
|
|
|
|
state['realm_default_streams'] = event['default_streams']
|
2017-11-01 18:20:34 +01:00
|
|
|
elif event['type'] == 'default_stream_groups':
|
|
|
|
state['realm_default_stream_groups'] = event['default_stream_groups']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == 'realm':
|
|
|
|
if event['op'] == "update":
|
|
|
|
field = 'realm_' + event['property']
|
|
|
|
state[field] = event['value']
|
2017-04-27 00:17:57 +02:00
|
|
|
|
2019-06-11 12:43:08 +02:00
|
|
|
if event['property'] == 'plan_type':
|
|
|
|
# Then there are some extra fields that also need to be set.
|
2019-06-12 08:56:28 +02:00
|
|
|
state['plan_includes_wide_organization_logo'] = event['value'] != Realm.LIMITED
|
2019-06-11 12:43:08 +02:00
|
|
|
state['realm_upload_quota'] = event['extra_data']['upload_quota']
|
|
|
|
|
2017-04-27 00:17:57 +02:00
|
|
|
# Tricky interaction: Whether we can create streams can get changed here.
|
2019-05-06 16:34:31 +02:00
|
|
|
if (field in ['realm_create_stream_policy',
|
2017-05-29 03:21:10 +02:00
|
|
|
'realm_waiting_period_threshold']) and 'can_create_streams' in state:
|
2017-04-27 00:17:57 +02:00
|
|
|
state['can_create_streams'] = user_profile.can_create_streams()
|
2019-04-08 19:23:00 +02:00
|
|
|
|
|
|
|
if (field in ['realm_invite_to_stream_policy',
|
|
|
|
'realm_waiting_period_threshold']) and 'can_subscribe_other_users' in state:
|
2018-08-21 20:20:59 +02:00
|
|
|
state['can_subscribe_other_users'] = user_profile.can_subscribe_other_users()
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == "update_dict":
|
|
|
|
for key, value in event['data'].items():
|
|
|
|
state['realm_' + key] = value
|
2017-04-20 08:21:31 +02:00
|
|
|
# It's a bit messy, but this is where we need to
|
|
|
|
# update the state for whether password authentication
|
|
|
|
# is enabled on this server.
|
|
|
|
if key == 'authentication_methods':
|
|
|
|
state['realm_password_auth_enabled'] = (value['Email'] or value['LDAP'])
|
2017-10-24 20:59:11 +02:00
|
|
|
state['realm_email_auth_enabled'] = value['Email']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "subscription":
|
|
|
|
if not include_subscribers and event['op'] in ['peer_add', 'peer_remove']:
|
|
|
|
return
|
|
|
|
|
|
|
|
if event['op'] in ["add"]:
|
2017-10-07 16:00:39 +02:00
|
|
|
if not include_subscribers:
|
2017-02-20 20:09:48 +01:00
|
|
|
# Avoid letting 'subscribers' entries end up in the list
|
|
|
|
for i, sub in enumerate(event['subscriptions']):
|
|
|
|
event['subscriptions'][i] = copy.deepcopy(event['subscriptions'][i])
|
|
|
|
del event['subscriptions'][i]['subscribers']
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def name(sub: Dict[str, Any]) -> str:
|
2017-02-20 20:09:48 +01:00
|
|
|
return sub['name'].lower()
|
|
|
|
|
|
|
|
if event['op'] == "add":
|
|
|
|
added_names = set(map(name, event["subscriptions"]))
|
|
|
|
was_added = lambda s: name(s) in added_names
|
|
|
|
|
|
|
|
# add the new subscriptions
|
|
|
|
state['subscriptions'] += event['subscriptions']
|
|
|
|
|
|
|
|
# remove them from unsubscribed if they had been there
|
|
|
|
state['unsubscribed'] = [s for s in state['unsubscribed'] if not was_added(s)]
|
|
|
|
|
|
|
|
# remove them from never_subscribed if they had been there
|
|
|
|
state['never_subscribed'] = [s for s in state['never_subscribed'] if not was_added(s)]
|
|
|
|
|
|
|
|
elif event['op'] == "remove":
|
|
|
|
removed_names = set(map(name, event["subscriptions"]))
|
|
|
|
was_removed = lambda s: name(s) in removed_names
|
|
|
|
|
|
|
|
# Find the subs we are affecting.
|
|
|
|
removed_subs = list(filter(was_removed, state['subscriptions']))
|
|
|
|
|
|
|
|
# Remove our user from the subscribers of the removed subscriptions.
|
|
|
|
if include_subscribers:
|
|
|
|
for sub in removed_subs:
|
2019-12-31 17:12:21 +01:00
|
|
|
sub['subscribers'].remove(user_profile.id)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# We must effectively copy the removed subscriptions from subscriptions to
|
|
|
|
# unsubscribe, since we only have the name in our data structure.
|
|
|
|
state['unsubscribed'] += removed_subs
|
|
|
|
|
|
|
|
# Now filter out the removed subscriptions from subscriptions.
|
|
|
|
state['subscriptions'] = [s for s in state['subscriptions'] if not was_removed(s)]
|
|
|
|
|
|
|
|
elif event['op'] == 'update':
|
|
|
|
for sub in state['subscriptions']:
|
|
|
|
if sub['name'].lower() == event['name'].lower():
|
|
|
|
sub[event['property']] = event['value']
|
|
|
|
elif event['op'] == 'peer_add':
|
|
|
|
user_id = event['user_id']
|
|
|
|
for sub in state['subscriptions']:
|
|
|
|
if (sub['name'] in event['subscriptions'] and
|
|
|
|
user_id not in sub['subscribers']):
|
|
|
|
sub['subscribers'].append(user_id)
|
|
|
|
for sub in state['never_subscribed']:
|
|
|
|
if (sub['name'] in event['subscriptions'] and
|
|
|
|
user_id not in sub['subscribers']):
|
|
|
|
sub['subscribers'].append(user_id)
|
|
|
|
elif event['op'] == 'peer_remove':
|
|
|
|
user_id = event['user_id']
|
|
|
|
for sub in state['subscriptions']:
|
|
|
|
if (sub['name'] in event['subscriptions'] and
|
|
|
|
user_id in sub['subscribers']):
|
|
|
|
sub['subscribers'].remove(user_id)
|
|
|
|
elif event['type'] == "presence":
|
2020-02-02 17:29:05 +01:00
|
|
|
if slim_presence:
|
2020-02-03 16:25:13 +01:00
|
|
|
user_key = str(event['user_id'])
|
2020-02-02 17:29:05 +01:00
|
|
|
else:
|
|
|
|
user_key = event['email']
|
2020-02-06 17:52:12 +01:00
|
|
|
state['presences'][user_key] = get_presence_for_user(
|
2020-02-03 17:09:18 +01:00
|
|
|
event['user_id'], slim_presence)[user_key]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_message":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in /register, so we don't need to
|
|
|
|
# do anything for content updates, but we may need to update
|
|
|
|
# the unread_msgs data if the topic of an unread message changed.
|
2018-11-08 15:01:45 +01:00
|
|
|
if TOPIC_NAME in event:
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
stream_dict = state['raw_unread_msgs']['stream_dict']
|
2018-11-08 15:01:45 +01:00
|
|
|
topic = event[TOPIC_NAME]
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
for message_id in event['message_ids']:
|
|
|
|
if message_id in stream_dict:
|
|
|
|
stream_dict[message_id]['topic'] = topic
|
2017-05-14 21:14:26 +02:00
|
|
|
elif event['type'] == "delete_message":
|
|
|
|
max_message = Message.objects.filter(
|
|
|
|
usermessage__user_profile=user_profile).order_by('-id').first()
|
|
|
|
if max_message:
|
|
|
|
state['max_message_id'] = max_message.id
|
|
|
|
else:
|
|
|
|
state['max_message_id'] = -1
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-08-03 02:21:35 +02:00
|
|
|
if 'raw_unread_msgs' in state:
|
|
|
|
remove_id = event['message_id']
|
|
|
|
remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
# The remainder of this block is about maintaining recent_private_conversations
|
|
|
|
if 'raw_recent_private_conversations' not in state or event['message_type'] != 'private':
|
|
|
|
return
|
|
|
|
|
|
|
|
recipient_id = get_recent_conversations_recipient_id(user_profile, event['recipient_id'],
|
|
|
|
event['sender_id'])
|
|
|
|
|
|
|
|
# Ideally, we'd have test coverage for these two blocks. To
|
|
|
|
# do that, we'll need a test where we delete not-the-latest
|
|
|
|
# messages or delete a private message not in
|
|
|
|
# recent_private_conversations.
|
|
|
|
if recipient_id not in state['raw_recent_private_conversations']: # nocoverage
|
|
|
|
return
|
|
|
|
|
|
|
|
old_max_message_id = state['raw_recent_private_conversations'][recipient_id]['max_message_id']
|
|
|
|
if old_max_message_id != event['message_id']: # nocoverage
|
|
|
|
return
|
|
|
|
|
|
|
|
# OK, we just deleted what had been the max_message_id for
|
|
|
|
# this recent conversation; we need to recompute that value
|
|
|
|
# from scratch. Definitely don't need to re-query everything,
|
|
|
|
# but this case is likely rare enough that it's reasonable to do so.
|
|
|
|
state['raw_recent_private_conversations'] = \
|
|
|
|
get_recent_private_conversations(user_profile)
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "reaction":
|
|
|
|
# The client will get the message with the reactions directly
|
2018-02-12 10:53:36 +01:00
|
|
|
pass
|
|
|
|
elif event['type'] == "submessage":
|
|
|
|
# The client will get submessages with their messages
|
2017-02-20 20:09:48 +01:00
|
|
|
pass
|
2017-03-18 03:50:41 +01:00
|
|
|
elif event['type'] == 'typing':
|
|
|
|
# Typing notification events are transient and thus ignored
|
|
|
|
pass
|
2018-05-04 22:57:36 +02:00
|
|
|
elif event['type'] == "attachment":
|
|
|
|
# Attachment events are just for updating the "uploads" UI;
|
|
|
|
# they are not sent directly.
|
|
|
|
pass
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_message_flags":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in `/register`, so most flags we
|
|
|
|
# can ignore, but we do need to update the unread_msgs data if
|
|
|
|
# unread state is changed.
|
2019-08-03 02:21:35 +02:00
|
|
|
if 'raw_unread_msgs' in state and event['flag'] == 'read' and event['operation'] == 'add':
|
2017-05-23 03:02:01 +02:00
|
|
|
for remove_id in event['messages']:
|
2019-08-03 02:21:35 +02:00
|
|
|
remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id)
|
2020-03-23 05:30:23 +01:00
|
|
|
if event['flag'] == 'starred' and 'starred_messages' in state:
|
|
|
|
if event['operation'] == 'add':
|
|
|
|
state['starred_messages'] += event['messages']
|
|
|
|
if event['operation'] == 'remove':
|
|
|
|
state['starred_messages'] = [message for message in state['starred_messages']
|
|
|
|
if not (message in event['messages'])]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "realm_domains":
|
|
|
|
if event['op'] == 'add':
|
2017-03-31 20:10:29 +02:00
|
|
|
state['realm_domains'].append(event['realm_domain'])
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'change':
|
|
|
|
for realm_domain in state['realm_domains']:
|
2017-03-31 20:10:29 +02:00
|
|
|
if realm_domain['domain'] == event['realm_domain']['domain']:
|
|
|
|
realm_domain['allow_subdomains'] = event['realm_domain']['allow_subdomains']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'remove':
|
2017-03-31 20:10:29 +02:00
|
|
|
state['realm_domains'] = [realm_domain for realm_domain in state['realm_domains']
|
|
|
|
if realm_domain['domain'] != event['domain']]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "realm_emoji":
|
|
|
|
state['realm_emoji'] = event['realm_emoji']
|
2019-06-24 02:51:13 +02:00
|
|
|
elif event['type'] == 'realm_export':
|
|
|
|
# These realm export events are only available to
|
|
|
|
# administrators, and aren't included in page_params.
|
|
|
|
pass
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "alert_words":
|
|
|
|
state['alert_words'] = event['alert_words']
|
|
|
|
elif event['type'] == "muted_topics":
|
|
|
|
state['muted_topics'] = event["muted_topics"]
|
|
|
|
elif event['type'] == "realm_filters":
|
|
|
|
state['realm_filters'] = event["realm_filters"]
|
|
|
|
elif event['type'] == "update_display_settings":
|
2017-07-07 23:26:44 +02:00
|
|
|
assert event['setting_name'] in UserProfile.property_types
|
|
|
|
state[event['setting_name']] = event['setting']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_global_notifications":
|
2017-07-07 23:26:44 +02:00
|
|
|
assert event['notification_name'] in UserProfile.notification_setting_types
|
|
|
|
state[event['notification_name']] = event['setting']
|
2017-12-14 22:22:17 +01:00
|
|
|
elif event['type'] == "invites_changed":
|
|
|
|
pass
|
2017-11-14 07:31:31 +01:00
|
|
|
elif event['type'] == "user_group":
|
|
|
|
if event['op'] == 'add':
|
|
|
|
state['realm_user_groups'].append(event['group'])
|
|
|
|
state['realm_user_groups'].sort(key=lambda group: group['id'])
|
2017-11-14 08:00:18 +01:00
|
|
|
elif event['op'] == 'update':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
user_group.update(event['data'])
|
2017-11-14 08:01:39 +01:00
|
|
|
elif event['op'] == 'add_members':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
user_group['members'].extend(event['user_ids'])
|
|
|
|
user_group['members'].sort()
|
2017-11-14 08:01:50 +01:00
|
|
|
elif event['op'] == 'remove_members':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
members = set(user_group['members'])
|
|
|
|
user_group['members'] = list(members - set(event['user_ids']))
|
|
|
|
user_group['members'].sort()
|
2017-11-15 08:09:49 +01:00
|
|
|
elif event['op'] == 'remove':
|
|
|
|
state['realm_user_groups'] = [ug for ug in state['realm_user_groups']
|
|
|
|
if ug['id'] != event['group_id']]
|
2018-12-18 17:17:08 +01:00
|
|
|
elif event['type'] == 'user_status':
|
|
|
|
user_id = event['user_id']
|
2019-01-21 19:06:03 +01:00
|
|
|
user_status = state['user_status']
|
|
|
|
away = event.get('away')
|
|
|
|
status_text = event.get('status_text')
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2019-01-21 19:06:03 +01:00
|
|
|
if user_id not in user_status:
|
|
|
|
user_status[user_id] = dict()
|
|
|
|
|
|
|
|
if away is not None:
|
|
|
|
if away:
|
|
|
|
user_status[user_id]['away'] = True
|
|
|
|
else:
|
|
|
|
user_status[user_id].pop('away', None)
|
|
|
|
|
|
|
|
if status_text is not None:
|
|
|
|
if status_text == '':
|
|
|
|
user_status[user_id].pop('status_text', None)
|
|
|
|
else:
|
|
|
|
user_status[user_id]['status_text'] = status_text
|
|
|
|
|
|
|
|
if not user_status[user_id]:
|
|
|
|
user_status.pop(user_id, None)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2019-01-21 19:06:03 +01:00
|
|
|
state['user_status'] = user_status
|
2017-02-20 20:09:48 +01:00
|
|
|
else:
|
2017-03-24 06:38:06 +01:00
|
|
|
raise AssertionError("Unexpected event type %s" % (event['type'],))
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-12-19 18:52:26 +01:00
|
|
|
def do_events_register(user_profile: UserProfile, user_client: Client,
|
|
|
|
apply_markdown: bool = True,
|
|
|
|
client_gravatar: bool = False,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool = False,
|
2017-12-19 18:52:26 +01:00
|
|
|
event_types: Optional[Iterable[str]] = None,
|
|
|
|
queue_lifespan_secs: int = 0,
|
|
|
|
all_public_streams: bool = False,
|
|
|
|
include_subscribers: bool = True,
|
2019-02-13 10:22:16 +01:00
|
|
|
notification_settings_null: bool = False,
|
2018-05-11 01:40:23 +02:00
|
|
|
narrow: Iterable[Sequence[str]] = [],
|
2017-12-19 18:52:26 +01:00
|
|
|
fetch_event_types: Optional[Iterable[str]] = None) -> Dict[str, Any]:
|
2017-02-10 23:04:46 +01:00
|
|
|
# Technically we don't need to check this here because
|
|
|
|
# build_narrow_filter will check it, but it's nicer from an error
|
|
|
|
# handling perspective to do it before contacting Tornado
|
|
|
|
check_supported_events_narrow_filter(narrow)
|
2017-04-26 23:29:25 +02:00
|
|
|
|
2020-03-07 01:15:34 +01:00
|
|
|
if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
# If real email addresses are not available to the user, their
|
2019-02-05 07:12:37 +01:00
|
|
|
# clients cannot compute gravatars, so we force-set it to false.
|
|
|
|
client_gravatar = False
|
|
|
|
|
2017-04-26 23:29:25 +02:00
|
|
|
# Note that we pass event_types, not fetch_event_types here, since
|
|
|
|
# that's what controls which future events are sent.
|
2020-02-02 17:29:05 +01:00
|
|
|
queue_id = request_event_queue(user_profile, user_client,
|
|
|
|
apply_markdown, client_gravatar, slim_presence,
|
2017-02-10 23:04:46 +01:00
|
|
|
queue_lifespan_secs, event_types, all_public_streams,
|
|
|
|
narrow=narrow)
|
|
|
|
|
|
|
|
if queue_id is None:
|
|
|
|
raise JsonableError(_("Could not allocate event queue"))
|
2017-04-26 23:29:25 +02:00
|
|
|
|
|
|
|
if fetch_event_types is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
event_types_set: Optional[Set[str]] = set(fetch_event_types)
|
2017-04-26 23:29:25 +02:00
|
|
|
elif event_types is not None:
|
|
|
|
event_types_set = set(event_types)
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
event_types_set = None
|
|
|
|
|
2017-07-16 09:41:38 +02:00
|
|
|
# Fill up the UserMessage rows if a soft-deactivated user has returned
|
2019-03-12 02:48:01 +01:00
|
|
|
reactivate_user_if_soft_deactivated(user_profile)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2017-02-20 07:52:37 +01:00
|
|
|
ret = fetch_initial_state_data(user_profile, event_types_set, queue_id,
|
2017-11-02 20:55:44 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence=slim_presence,
|
2017-02-20 07:52:37 +01:00
|
|
|
include_subscribers=include_subscribers)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
# Apply events that came in while we were fetching initial data
|
|
|
|
events = get_user_events(user_profile, queue_id, -1)
|
2017-04-26 23:29:25 +02:00
|
|
|
apply_events(ret, events, user_profile, include_subscribers=include_subscribers,
|
2020-02-02 17:29:05 +01:00
|
|
|
client_gravatar=client_gravatar, slim_presence=slim_presence,
|
2017-04-26 23:29:25 +02:00
|
|
|
fetch_event_types=fetch_event_types)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(user_profile, ret, notification_settings_null)
|
2018-05-07 07:42:01 +02:00
|
|
|
|
|
|
|
if len(events) > 0:
|
|
|
|
ret['last_event_id'] = events[-1]['id']
|
|
|
|
else:
|
|
|
|
ret['last_event_id'] = -1
|
|
|
|
return ret
|
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
def post_process_state(user_profile: UserProfile, ret: Dict[str, Any],
|
|
|
|
notification_settings_null: bool) -> None:
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
'''
|
|
|
|
NOTE:
|
|
|
|
|
|
|
|
Below is an example of post-processing initial state data AFTER we
|
|
|
|
apply events. For large payloads like `unread_msgs`, it's helpful
|
|
|
|
to have an intermediate data structure that is easy to manipulate
|
|
|
|
with O(1)-type operations as we apply events.
|
|
|
|
|
|
|
|
Then, only at the end, we put it in the form that's more appropriate
|
|
|
|
for client.
|
|
|
|
'''
|
|
|
|
if 'raw_unread_msgs' in ret:
|
|
|
|
ret['unread_msgs'] = aggregate_unread_data(ret['raw_unread_msgs'])
|
|
|
|
del ret['raw_unread_msgs']
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
'''
|
|
|
|
See the note above; the same technique applies below.
|
|
|
|
'''
|
2019-03-20 04:15:58 +01:00
|
|
|
if 'raw_users' in ret:
|
2017-10-21 18:36:09 +02:00
|
|
|
user_dicts = list(ret['raw_users'].values())
|
2019-04-09 04:07:03 +02:00
|
|
|
user_dicts = sorted(user_dicts, key=lambda x: x['user_id'])
|
2017-10-21 18:36:09 +02:00
|
|
|
|
|
|
|
ret['realm_users'] = [d for d in user_dicts if d['is_active']]
|
|
|
|
ret['realm_non_active_users'] = [d for d in user_dicts if not d['is_active']]
|
|
|
|
|
|
|
|
'''
|
|
|
|
Be aware that we do intentional aliasing in the below code.
|
|
|
|
We can now safely remove the `is_active` field from all the
|
|
|
|
dicts that got partitioned into the two lists above.
|
|
|
|
|
|
|
|
We remove the field because it's already implied, and sending
|
|
|
|
it to clients makes clients prone to bugs where they "trust"
|
|
|
|
the field but don't actually update in live updates. It also
|
|
|
|
wastes bandwidth.
|
|
|
|
'''
|
|
|
|
for d in user_dicts:
|
|
|
|
d.pop('is_active')
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
del ret['raw_users']
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
if 'raw_recent_private_conversations' in ret:
|
|
|
|
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
|
|
|
|
ret['recent_private_conversations'] = sorted([
|
|
|
|
dict(
|
|
|
|
**value
|
|
|
|
) for (recipient_id, value) in ret['raw_recent_private_conversations'].items()
|
|
|
|
], key = lambda x: -x["max_message_id"])
|
|
|
|
del ret['raw_recent_private_conversations']
|
2019-02-13 10:22:16 +01:00
|
|
|
|
|
|
|
if not notification_settings_null and 'subscriptions' in ret:
|
|
|
|
for stream_dict in ret['subscriptions'] + ret['unsubscribed']:
|
|
|
|
handle_stream_notifications_compatibility(user_profile, stream_dict,
|
|
|
|
notification_settings_null)
|