2017-11-16 19:51:44 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2017-02-10 23:04:46 +01:00
|
|
|
import copy
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Callable, Dict, Iterable, Optional, Sequence, Set
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_VERSION
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
default_stream_groups_to_dicts_sorted,
|
|
|
|
do_get_streams,
|
|
|
|
gather_subscriptions_helper,
|
|
|
|
get_available_notification_sounds,
|
|
|
|
get_default_streams_for_realm,
|
|
|
|
get_owned_bot_dicts,
|
2020-09-22 17:41:36 +02:00
|
|
|
get_web_public_streams,
|
|
|
|
get_web_public_subs,
|
2020-06-11 00:54:34 +02:00
|
|
|
streams_to_dicts_sorted,
|
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2018-01-07 19:24:14 +01:00
|
|
|
from zerver.lib.bot_config import load_bot_config_template
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
|
2017-01-24 01:48:35 +01:00
|
|
|
from zerver.lib.hotspots import get_next_hotspots
|
2019-08-18 15:09:18 +02:00
|
|
|
from zerver.lib.integrations import EMBEDDED_BOTS, WEBHOOK_INTEGRATIONS
|
2017-05-23 03:02:01 +02:00
|
|
|
from zerver.lib.message import (
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
aggregate_unread_data,
|
2017-05-23 03:02:01 +02:00
|
|
|
apply_unread_message_event,
|
2020-09-27 19:12:52 +02:00
|
|
|
extract_unread_data_from_um_rows,
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
get_raw_unread_data,
|
2019-03-20 04:15:58 +01:00
|
|
|
get_recent_conversations_recipient_id,
|
|
|
|
get_recent_private_conversations,
|
2018-08-14 23:57:20 +02:00
|
|
|
get_starred_message_ids,
|
2019-08-03 02:24:00 +02:00
|
|
|
remove_message_id_from_unread_mgs,
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2019-01-27 18:57:15 +01:00
|
|
|
from zerver.lib.narrow import check_supported_events_narrow_filter, read_stop_words
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.presence import get_presence_for_user, get_presences_for_realm
|
2018-05-08 20:45:13 +02:00
|
|
|
from zerver.lib.push_notifications import push_notifications_enabled
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2020-06-08 11:53:24 +02:00
|
|
|
from zerver.lib.realm_logo import get_realm_logo_source, get_realm_logo_url
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.request import JsonableError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.soft_deactivation import reactivate_user_if_soft_deactivated
|
2019-02-13 10:22:16 +01:00
|
|
|
from zerver.lib.stream_subscription import handle_stream_notifications_compatibility
|
2018-11-08 15:01:45 +01:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2017-08-24 17:58:40 +02:00
|
|
|
from zerver.lib.topic_mutes import get_topic_mutes
|
2017-11-07 07:56:26 +01:00
|
|
|
from zerver.lib.user_groups import user_groups_in_realm_serialized
|
2019-01-21 19:06:03 +01:00
|
|
|
from zerver.lib.user_status import get_user_info_dict
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.users import get_cross_realm_dicts, get_raw_user_data, is_administrator_role
|
2020-02-03 16:39:43 +01:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
Client,
|
|
|
|
CustomProfileField,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
Stream,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
custom_profile_fields_for_realm,
|
|
|
|
get_default_stream_groups,
|
|
|
|
get_realm_domains,
|
2020-05-12 15:36:52 +02:00
|
|
|
realm_filters_for_realm,
|
2020-02-03 16:39:43 +01:00
|
|
|
)
|
2020-08-10 18:40:38 +02:00
|
|
|
from zerver.tornado.django_api import get_user_events, request_event_queue
|
2017-10-24 20:59:11 +02:00
|
|
|
from zproject.backends import email_auth_enabled, password_auth_enabled
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2019-03-02 18:23:57 +01:00
|
|
|
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
|
2019-08-19 19:46:45 +02:00
|
|
|
state['realm_logo_url'] = get_realm_logo_url(realm, night = False)
|
2020-06-08 11:53:24 +02:00
|
|
|
state['realm_logo_source'] = get_realm_logo_source(realm, night = False)
|
2019-08-19 19:46:45 +02:00
|
|
|
state['realm_night_logo_url'] = get_realm_logo_url(realm, night = True)
|
2020-06-08 11:53:24 +02:00
|
|
|
state['realm_night_logo_source'] = get_realm_logo_source(realm, night = True)
|
2019-03-02 18:23:57 +01:00
|
|
|
state['max_logo_file_size'] = settings.MAX_LOGO_FILE_SIZE
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def always_want(msg_type: str) -> bool:
|
2017-10-21 23:10:22 +02:00
|
|
|
'''
|
|
|
|
This function is used as a helper in
|
|
|
|
fetch_initial_state_data, when the user passes
|
|
|
|
in None for event_types, and we want to fetch
|
|
|
|
info for every event type. Defining this at module
|
|
|
|
level makes it easier to mock.
|
|
|
|
'''
|
|
|
|
return True
|
|
|
|
|
2020-10-14 14:25:45 +02:00
|
|
|
def fetch_initial_state_data(
|
|
|
|
user_profile: Optional[UserProfile],
|
2021-01-17 17:58:50 +01:00
|
|
|
*,
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
event_types: Optional[Iterable[str]] = None,
|
|
|
|
queue_id: Optional[str] = "",
|
|
|
|
client_gravatar: bool = False,
|
|
|
|
user_avatar_url_field_optional: bool = False,
|
2020-10-14 14:25:45 +02:00
|
|
|
slim_presence: bool = False,
|
2020-10-14 14:47:06 +02:00
|
|
|
include_subscribers: bool = True,
|
|
|
|
include_streams: bool = True,
|
2020-10-14 14:25:45 +02:00
|
|
|
) -> Dict[str, Any]:
|
2020-09-22 17:41:36 +02:00
|
|
|
"""When `event_types` is None, fetches the core data powering the
|
|
|
|
webapp's `page_params` and `/api/v1/register` (for mobile/terminal
|
|
|
|
apps). Can also fetch a subset as determined by `event_types`.
|
|
|
|
|
|
|
|
The user_profile=None code path is used for logged-out public
|
|
|
|
access to streams with is_web_public=True.
|
|
|
|
|
|
|
|
Whenever you add new code to this function, you should also add
|
|
|
|
corresponding events for changes in the data structures and new
|
|
|
|
code to apply_events (and add a test in test_events.py).
|
|
|
|
"""
|
2021-01-17 17:58:50 +01:00
|
|
|
if realm is None:
|
|
|
|
assert user_profile is not None
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
state: Dict[str, Any] = {'queue_id': queue_id}
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if event_types is None:
|
2017-10-21 23:10:22 +02:00
|
|
|
# return True always
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
want: Callable[[str], bool] = always_want
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
want = set(event_types).__contains__
|
|
|
|
|
2020-04-29 12:39:39 +02:00
|
|
|
# Show the version info unconditionally.
|
|
|
|
state['zulip_version'] = ZULIP_VERSION
|
|
|
|
state['zulip_feature_level'] = API_FEATURE_LEVEL
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('alert_words'):
|
2020-09-22 17:41:36 +02:00
|
|
|
state['alert_words'] = [] if user_profile is None else user_alert_words(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-03-17 10:07:22 +01:00
|
|
|
if want('custom_profile_fields'):
|
2018-05-07 07:51:14 +02:00
|
|
|
fields = custom_profile_fields_for_realm(realm.id)
|
2017-03-17 10:07:22 +01:00
|
|
|
state['custom_profile_fields'] = [f.as_dict() for f in fields]
|
2020-10-17 03:00:33 +02:00
|
|
|
state['custom_profile_field_types'] = {
|
|
|
|
item[4]: {"id": item[0], "name": str(item[1])} for item in CustomProfileField.ALL_FIELD_TYPES
|
|
|
|
}
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
if want('hotspots'):
|
2020-09-22 17:41:36 +02:00
|
|
|
# Even if we offered special hotspots for guests without an
|
|
|
|
# account, we'd maybe need to store their state using cookies
|
|
|
|
# or local storage, rather than in the database.
|
|
|
|
state['hotspots'] = [] if user_profile is None else get_next_hotspots(user_profile)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('message'):
|
2020-09-22 17:41:36 +02:00
|
|
|
# Since the introduction of `anchor="latest"` in the API,
|
|
|
|
# `max_message_id` is primarily used for generating `local_id`
|
|
|
|
# values that are higher than this. We likely can eventually
|
|
|
|
# remove this parameter from the API.
|
|
|
|
user_messages = []
|
|
|
|
if user_profile is not None:
|
|
|
|
user_messages = UserMessage.objects \
|
|
|
|
.filter(user_profile=user_profile) \
|
|
|
|
.order_by('-message_id') \
|
|
|
|
.values('message_id')[:1]
|
2020-02-29 15:34:00 +01:00
|
|
|
if user_messages:
|
|
|
|
state['max_message_id'] = user_messages[0]['message_id']
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
state['max_message_id'] = -1
|
|
|
|
|
|
|
|
if want('muted_topics'):
|
2020-09-22 17:41:36 +02:00
|
|
|
state['muted_topics'] = [] if user_profile is None else get_topic_mutes(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('presence'):
|
2020-09-22 13:58:59 +02:00
|
|
|
state['presences'] = {} if user_profile is None else get_presences_for_realm(realm, slim_presence)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm'):
|
2017-04-19 05:30:38 +02:00
|
|
|
for property_name in Realm.property_types:
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_' + property_name] = getattr(realm, property_name)
|
2017-07-07 20:11:44 +02:00
|
|
|
|
|
|
|
# Most state is handled via the property_types framework;
|
|
|
|
# these manual entries are for those realm settings that don't
|
|
|
|
# fit into that framework.
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_authentication_methods'] = realm.authentication_methods_dict()
|
2020-09-22 14:01:50 +02:00
|
|
|
|
|
|
|
# We pretend these features are disabled because guests can't
|
|
|
|
# access them. In the future, we may want to move this logic
|
|
|
|
# to the frontends, so that we can correctly display what
|
|
|
|
# these fields are in the settings.
|
|
|
|
state['realm_allow_message_editing'] = False if user_profile is None else realm.allow_message_editing
|
|
|
|
state['realm_allow_community_topic_editing'] = False if user_profile is None else realm.allow_community_topic_editing
|
|
|
|
state['realm_allow_message_deleting'] = False if user_profile is None else realm.allow_message_deleting
|
|
|
|
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_message_content_edit_limit_seconds'] = realm.message_content_edit_limit_seconds
|
2017-11-26 09:12:10 +01:00
|
|
|
state['realm_message_content_delete_limit_seconds'] = realm.message_content_delete_limit_seconds
|
2020-06-09 23:37:51 +02:00
|
|
|
state['realm_community_topic_editing_limit_seconds'] = \
|
|
|
|
Realm.DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS
|
2020-09-22 13:58:59 +02:00
|
|
|
|
|
|
|
# This setting determines whether to send presence and also
|
|
|
|
# whether to display of users list in the right sidebar; we
|
|
|
|
# want both behaviors for logged-out users. We may in the
|
|
|
|
# future choose to move this logic to the frontend.
|
|
|
|
state['realm_presence_disabled'] = True if user_profile is None else realm.presence_disabled
|
|
|
|
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_icon_url'] = realm_icon_url(realm)
|
|
|
|
state['realm_icon_source'] = realm.icon_source
|
2017-03-06 06:22:28 +01:00
|
|
|
state['max_icon_file_size'] = settings.MAX_ICON_FILE_SIZE
|
2019-03-02 18:23:57 +01:00
|
|
|
add_realm_logo_fields(state, realm)
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_bot_domain'] = realm.get_bot_domain()
|
|
|
|
state['realm_uri'] = realm.uri
|
2018-04-23 14:51:30 +02:00
|
|
|
state['realm_available_video_chat_providers'] = realm.VIDEO_CHAT_PROVIDERS
|
2019-04-06 06:34:49 +02:00
|
|
|
state['settings_send_digest_emails'] = settings.SEND_DIGEST_EMAILS
|
2018-08-01 12:51:35 +02:00
|
|
|
state['realm_digest_emails_enabled'] = realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS
|
2017-11-10 03:34:13 +01:00
|
|
|
state['realm_is_zephyr_mirror_realm'] = realm.is_zephyr_mirror_realm
|
|
|
|
state['realm_email_auth_enabled'] = email_auth_enabled(realm)
|
|
|
|
state['realm_password_auth_enabled'] = password_auth_enabled(realm)
|
2018-05-08 20:45:13 +02:00
|
|
|
state['realm_push_notifications_enabled'] = push_notifications_enabled()
|
2019-01-11 17:51:13 +01:00
|
|
|
state['realm_upload_quota'] = realm.upload_quota_bytes()
|
2019-01-16 09:33:17 +01:00
|
|
|
state['realm_plan_type'] = realm.plan_type
|
2020-05-08 13:30:34 +02:00
|
|
|
state['zulip_plan_is_not_limited'] = realm.plan_type != Realm.LIMITED
|
2019-06-12 08:56:28 +02:00
|
|
|
state['upgrade_text_for_wide_organization_logo'] = str(Realm.UPGRADE_TEXT_STANDARD)
|
2019-05-27 10:59:55 +02:00
|
|
|
state['realm_default_external_accounts'] = DEFAULT_EXTERNAL_ACCOUNTS
|
2020-09-01 11:03:32 +02:00
|
|
|
state['jitsi_server_url'] = settings.JITSI_SERVER_URL.rstrip('/')
|
2020-05-08 10:02:10 +02:00
|
|
|
state['development_environment'] = settings.DEVELOPMENT
|
|
|
|
state['server_generation'] = settings.SERVER_GENERATION
|
|
|
|
state['password_min_length'] = settings.PASSWORD_MIN_LENGTH
|
|
|
|
state['password_min_guesses'] = settings.PASSWORD_MIN_GUESSES
|
|
|
|
state['max_file_upload_size_mib'] = settings.MAX_FILE_UPLOAD_SIZE
|
|
|
|
state['max_avatar_file_size_mib'] = settings.MAX_AVATAR_FILE_SIZE
|
|
|
|
state['server_inline_image_preview'] = settings.INLINE_IMAGE_PREVIEW
|
|
|
|
state['server_inline_url_embed_preview'] = settings.INLINE_URL_EMBED_PREVIEW
|
|
|
|
state['server_avatar_changes_disabled'] = settings.AVATAR_CHANGES_DISABLED
|
|
|
|
state['server_name_changes_disabled'] = settings.NAME_CHANGES_DISABLED
|
2019-05-27 10:59:55 +02:00
|
|
|
|
2017-11-10 03:34:13 +01:00
|
|
|
if realm.notifications_stream and not realm.notifications_stream.deactivated:
|
|
|
|
notifications_stream = realm.notifications_stream
|
2017-05-17 03:48:47 +02:00
|
|
|
state['realm_notifications_stream_id'] = notifications_stream.id
|
|
|
|
else:
|
|
|
|
state['realm_notifications_stream_id'] = -1
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-05-17 15:09:31 +02:00
|
|
|
signup_notifications_stream = realm.get_signup_notifications_stream()
|
|
|
|
if signup_notifications_stream:
|
2017-10-20 16:55:04 +02:00
|
|
|
state['realm_signup_notifications_stream_id'] = signup_notifications_stream.id
|
|
|
|
else:
|
|
|
|
state['realm_signup_notifications_stream_id'] = -1
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('realm_domains'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_domains'] = get_realm_domains(realm)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_emoji'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_emoji'] = realm.get_emoji()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_filters'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_filters'] = realm_filters_for_realm(realm.id)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-11-07 07:56:26 +01:00
|
|
|
if want('realm_user_groups'):
|
2018-05-07 07:51:14 +02:00
|
|
|
state['realm_user_groups'] = user_groups_in_realm_serialized(realm)
|
2017-11-07 07:56:26 +01:00
|
|
|
|
2020-09-23 21:25:58 +02:00
|
|
|
if user_profile is not None:
|
|
|
|
settings_user = user_profile
|
|
|
|
else:
|
|
|
|
# When UserProfile=None, we want to serve the values for various
|
|
|
|
# settings as the defaults. Instead of copying the default values
|
|
|
|
# from models.py here, we access these default values from a
|
|
|
|
# temporary UserProfile object that will not be saved to the database.
|
|
|
|
#
|
|
|
|
# We also can set various fields to avoid duplicating code
|
|
|
|
# unnecessarily.
|
|
|
|
settings_user = UserProfile(
|
|
|
|
full_name="Anonymous User",
|
|
|
|
email="username@example.com",
|
|
|
|
delivery_email="username@example.com",
|
|
|
|
realm=realm,
|
|
|
|
# We tag logged-out users as guests because most guest
|
|
|
|
# restrictions apply to these users as well, and it lets
|
|
|
|
# us avoid unnecessary conditionals.
|
|
|
|
role=UserProfile.ROLE_GUEST,
|
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
# ID=0 is not used in real Zulip databases, ensuring this is unique.
|
|
|
|
id=0,
|
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('realm_user'):
|
2020-02-07 02:33:15 +01:00
|
|
|
state['raw_users'] = get_raw_user_data(realm, user_profile,
|
2020-06-13 10:10:05 +02:00
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional)
|
2017-05-17 05:23:13 +02:00
|
|
|
state['cross_realm_bots'] = list(get_cross_realm_dicts())
|
2020-09-22 17:41:36 +02:00
|
|
|
|
2020-09-23 21:25:58 +02:00
|
|
|
# For the user's own avatar URL, we force
|
|
|
|
# client_gravatar=False, since that saves some unnecessary
|
|
|
|
# client-side code for handing medium-size avatars. See #8253
|
|
|
|
# for details.
|
|
|
|
state['avatar_source'] = settings_user.avatar_source
|
|
|
|
state['avatar_url_medium'] = avatar_url(
|
|
|
|
settings_user,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
state['avatar_url'] = avatar_url(
|
|
|
|
settings_user,
|
|
|
|
medium=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
state['can_create_streams'] = settings_user.can_create_streams()
|
|
|
|
state['can_subscribe_other_users'] = settings_user.can_subscribe_other_users()
|
|
|
|
state['is_admin'] = settings_user.is_realm_admin
|
|
|
|
state['is_owner'] = settings_user.is_realm_owner
|
|
|
|
state['is_guest'] = settings_user.is_guest
|
|
|
|
state['user_id'] = settings_user.id
|
|
|
|
state['enter_sends'] = settings_user.enter_sends
|
|
|
|
state['email'] = settings_user.email
|
|
|
|
state['delivery_email'] = settings_user.delivery_email
|
|
|
|
state['full_name'] = settings_user.full_name
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('realm_bot'):
|
2020-09-22 17:41:36 +02:00
|
|
|
state['realm_bots'] = [] if user_profile is None else get_owned_bot_dicts(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-10-19 16:25:06 +02:00
|
|
|
# This does not yet have an apply_event counterpart, since currently,
|
|
|
|
# new entries for EMBEDDED_BOTS can only be added directly in the codebase.
|
|
|
|
if want('realm_embedded_bots'):
|
2018-01-07 19:24:14 +01:00
|
|
|
realm_embedded_bots = []
|
|
|
|
for bot in EMBEDDED_BOTS:
|
|
|
|
realm_embedded_bots.append({'name': bot.name,
|
|
|
|
'config': load_bot_config_template(bot.name)})
|
|
|
|
state['realm_embedded_bots'] = realm_embedded_bots
|
2017-10-19 16:25:06 +02:00
|
|
|
|
2019-08-18 15:09:18 +02:00
|
|
|
# This does not have an apply_events counterpart either since
|
|
|
|
# this data is mostly static.
|
|
|
|
if want('realm_incoming_webhook_bots'):
|
|
|
|
realm_incoming_webhook_bots = []
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS:
|
|
|
|
realm_incoming_webhook_bots.append({
|
|
|
|
'name': integration.name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'config': {c[1]: c[0] for c in integration.config_options},
|
2019-08-18 15:09:18 +02:00
|
|
|
})
|
|
|
|
state['realm_incoming_webhook_bots'] = realm_incoming_webhook_bots
|
|
|
|
|
2019-03-20 04:15:58 +01:00
|
|
|
if want('recent_private_conversations'):
|
|
|
|
# A data structure containing records of this form:
|
|
|
|
#
|
|
|
|
# [{'max_message_id': 700175, 'user_ids': [801]}]
|
|
|
|
#
|
|
|
|
# for all recent private message conversations, ordered by the
|
|
|
|
# highest message ID in the conversation. The user_ids list
|
|
|
|
# is the list of users other than the current user in the
|
|
|
|
# private message conversation (so it is [] for PMs to self).
|
|
|
|
# Note that raw_recent_private_conversations is an
|
|
|
|
# intermediate form as a dictionary keyed by recipient_id,
|
|
|
|
# which is more efficient to update, and is rewritten to the
|
|
|
|
# final format in post_process_state.
|
2020-09-22 17:41:36 +02:00
|
|
|
state['raw_recent_private_conversations'] = {} if user_profile is None else get_recent_private_conversations(user_profile)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('subscription'):
|
2020-09-22 17:41:36 +02:00
|
|
|
if user_profile is not None:
|
2021-01-14 21:44:56 +01:00
|
|
|
sub_info = gather_subscriptions_helper(
|
|
|
|
user_profile,
|
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
)
|
2020-09-22 17:41:36 +02:00
|
|
|
else:
|
2021-01-14 21:44:56 +01:00
|
|
|
sub_info = get_web_public_subs(realm)
|
|
|
|
|
|
|
|
state['subscriptions'] = sub_info.subscriptions
|
|
|
|
state['unsubscribed'] = sub_info.unsubscribed
|
|
|
|
state['never_subscribed'] = sub_info.never_subscribed
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-05-23 03:02:01 +02:00
|
|
|
if want('update_message_flags') and want('message'):
|
|
|
|
# Keeping unread_msgs updated requires both message flag updates and
|
|
|
|
# message updates. This is due to the fact that new messages will not
|
|
|
|
# generate a flag update so we need to use the flags field in the
|
|
|
|
# message event.
|
2020-09-27 19:12:52 +02:00
|
|
|
|
|
|
|
if user_profile is not None:
|
2020-09-22 17:41:36 +02:00
|
|
|
state['raw_unread_msgs'] = get_raw_unread_data(user_profile)
|
2020-09-27 19:12:52 +02:00
|
|
|
else:
|
|
|
|
# For logged-out visitors, we treat all messages as read;
|
|
|
|
# calling this helper lets us return empty objects in the
|
|
|
|
# appropriate format.
|
|
|
|
state['raw_unread_msgs'] = extract_unread_data_from_um_rows([], user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-08-14 23:57:20 +02:00
|
|
|
if want('starred_messages'):
|
2020-09-22 17:41:36 +02:00
|
|
|
state['starred_messages'] = [] if user_profile is None else get_starred_message_ids(user_profile)
|
2018-08-14 23:57:20 +02:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('stream'):
|
2020-10-14 14:47:06 +02:00
|
|
|
if include_streams:
|
|
|
|
# The webapp doesn't use the data from here; instead,
|
|
|
|
# it uses data from state["subscriptions"] and other
|
|
|
|
# places.
|
|
|
|
if user_profile is not None:
|
|
|
|
state['streams'] = do_get_streams(user_profile)
|
|
|
|
else:
|
2020-09-27 06:49:16 +02:00
|
|
|
# TODO: This line isn't used by the webapp because it
|
|
|
|
# gets these data via the `subscriptions` key; it will
|
|
|
|
# be used when the mobile apps support logged-out
|
|
|
|
# access.
|
|
|
|
state['streams'] = get_web_public_streams(realm) # nocoverage
|
2018-04-30 11:48:00 +02:00
|
|
|
state['stream_name_max_length'] = Stream.MAX_NAME_LENGTH
|
|
|
|
state['stream_description_max_length'] = Stream.MAX_DESCRIPTION_LENGTH
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('default_streams'):
|
2020-09-23 21:25:58 +02:00
|
|
|
if settings_user.is_guest:
|
2020-09-22 17:41:36 +02:00
|
|
|
# Guest users and logged-out users don't have access to
|
|
|
|
# all default streams, so we pretend the organization
|
|
|
|
# doesn't have any.
|
2019-03-01 01:26:57 +01:00
|
|
|
state['realm_default_streams'] = []
|
|
|
|
else:
|
|
|
|
state['realm_default_streams'] = streams_to_dicts_sorted(
|
|
|
|
get_default_streams_for_realm(realm.id))
|
2017-11-01 18:20:34 +01:00
|
|
|
if want('default_stream_groups'):
|
2020-09-23 21:25:58 +02:00
|
|
|
if settings_user.is_guest:
|
2019-03-01 01:26:57 +01:00
|
|
|
state['realm_default_stream_groups'] = []
|
|
|
|
else:
|
|
|
|
state['realm_default_stream_groups'] = default_stream_groups_to_dicts_sorted(
|
|
|
|
get_default_stream_groups(realm))
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2019-01-27 18:57:15 +01:00
|
|
|
if want('stop_words'):
|
|
|
|
state['stop_words'] = read_stop_words()
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
if want('update_display_settings'):
|
2017-05-26 14:16:59 +02:00
|
|
|
for prop in UserProfile.property_types:
|
2020-09-23 21:25:58 +02:00
|
|
|
state[prop] = getattr(settings_user, prop)
|
2020-09-22 17:41:36 +02:00
|
|
|
state['emojiset_choices'] = UserProfile.emojiset_choices()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if want('update_global_notifications'):
|
2017-05-26 14:16:59 +02:00
|
|
|
for notification in UserProfile.notification_setting_types:
|
2020-09-23 21:25:58 +02:00
|
|
|
state[notification] = getattr(settings_user, notification)
|
2018-01-11 21:36:11 +01:00
|
|
|
state['available_notification_sounds'] = get_available_notification_sounds()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2018-12-18 17:17:08 +01:00
|
|
|
if want('user_status'):
|
2020-09-22 17:41:36 +02:00
|
|
|
# We require creating an account to access statuses.
|
|
|
|
state['user_status'] = {} if user_profile is None else get_user_info_dict(realm_id=realm.id)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2019-11-16 09:26:28 +01:00
|
|
|
if want('video_calls'):
|
2020-09-23 21:25:58 +02:00
|
|
|
state['has_zoom_token'] = settings_user.zoom_token is not None
|
2019-11-16 09:26:28 +01:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
return state
|
|
|
|
|
2017-12-19 18:52:26 +01:00
|
|
|
def apply_events(state: Dict[str, Any], events: Iterable[Dict[str, Any]],
|
|
|
|
user_profile: UserProfile, client_gravatar: bool,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool, include_subscribers: bool = True,
|
2017-12-19 18:52:26 +01:00
|
|
|
fetch_event_types: Optional[Iterable[str]] = None) -> None:
|
2017-02-10 23:04:46 +01:00
|
|
|
for event in events:
|
2017-04-26 23:29:25 +02:00
|
|
|
if fetch_event_types is not None and event['type'] not in fetch_event_types:
|
|
|
|
# TODO: continuing here is not, most precisely, correct.
|
|
|
|
# In theory, an event of one type, e.g. `realm_user`,
|
|
|
|
# could modify state that doesn't come from that
|
|
|
|
# `fetch_event_types` value, e.g. the `our_person` part of
|
|
|
|
# that code path. But it should be extremely rare, and
|
|
|
|
# fixing that will require a nontrivial refactor of
|
|
|
|
# `apply_event`. For now, be careful in your choice of
|
|
|
|
# `fetch_event_types`.
|
|
|
|
continue
|
2020-02-02 17:29:05 +01:00
|
|
|
apply_event(state, event, user_profile,
|
|
|
|
client_gravatar, slim_presence, include_subscribers)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def apply_event(state: Dict[str, Any],
|
|
|
|
event: Dict[str, Any],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
client_gravatar: bool,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence: bool,
|
2017-11-05 11:15:10 +01:00
|
|
|
include_subscribers: bool) -> None:
|
2017-02-20 20:09:48 +01:00
|
|
|
if event['type'] == "message":
|
|
|
|
state['max_message_id'] = max(state['max_message_id'], event['message']['id'])
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
if 'raw_unread_msgs' in state:
|
2017-10-12 01:21:34 +02:00
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile,
|
|
|
|
state['raw_unread_msgs'],
|
|
|
|
event['message'],
|
|
|
|
event['flags'],
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['type'] != "stream":
|
2019-03-20 04:15:58 +01:00
|
|
|
if 'raw_recent_private_conversations' in state:
|
|
|
|
# Handle maintaining the recent_private_conversations data structure.
|
|
|
|
conversations = state['raw_recent_private_conversations']
|
|
|
|
recipient_id = get_recent_conversations_recipient_id(
|
|
|
|
user_profile, event['message']['recipient_id'],
|
|
|
|
event['message']["sender_id"])
|
|
|
|
|
|
|
|
if recipient_id not in conversations:
|
|
|
|
conversations[recipient_id] = dict(
|
2020-09-02 06:20:26 +02:00
|
|
|
user_ids=sorted(user_dict['id'] for user_dict in
|
|
|
|
event['message']['display_recipient'] if
|
|
|
|
user_dict['id'] != user_profile.id),
|
2019-03-20 04:15:58 +01:00
|
|
|
)
|
|
|
|
conversations[recipient_id]['max_message_id'] = event['message']['id']
|
2019-03-04 17:50:49 +01:00
|
|
|
return
|
|
|
|
|
2019-03-20 04:15:58 +01:00
|
|
|
# Below, we handle maintaining first_message_id.
|
2019-03-13 22:12:59 +01:00
|
|
|
for sub_dict in state.get('subscriptions', []):
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['stream_id'] == sub_dict['stream_id']:
|
|
|
|
if sub_dict['first_message_id'] is None:
|
|
|
|
sub_dict['first_message_id'] = event['message']['id']
|
2019-03-13 22:12:59 +01:00
|
|
|
for stream_dict in state.get('streams', []):
|
2019-03-04 17:50:49 +01:00
|
|
|
if event['message']['stream_id'] == stream_dict['stream_id']:
|
|
|
|
if stream_dict['first_message_id'] is None:
|
|
|
|
stream_dict['first_message_id'] = event['message']['id']
|
|
|
|
|
2017-01-24 01:48:35 +01:00
|
|
|
elif event['type'] == "hotspots":
|
|
|
|
state['hotspots'] = event['hotspots']
|
2017-03-17 10:07:22 +01:00
|
|
|
elif event['type'] == "custom_profile_fields":
|
|
|
|
state['custom_profile_fields'] = event['fields']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "realm_user":
|
|
|
|
person = event['person']
|
2017-10-21 16:33:07 +02:00
|
|
|
person_user_id = person['user_id']
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
if event['op'] == "add":
|
2017-10-21 18:36:09 +02:00
|
|
|
person = copy.deepcopy(person)
|
2017-11-02 21:40:12 +01:00
|
|
|
if client_gravatar:
|
2020-06-25 21:09:38 +02:00
|
|
|
if person['avatar_url'].startswith("https://secure.gravatar.com"):
|
2017-11-02 21:40:12 +01:00
|
|
|
person['avatar_url'] = None
|
2017-10-21 18:36:09 +02:00
|
|
|
person['is_active'] = True
|
2018-03-12 01:27:08 +01:00
|
|
|
if not person['is_bot']:
|
|
|
|
person['profile_data'] = {}
|
2017-10-21 16:33:07 +02:00
|
|
|
state['raw_users'][person_user_id] = person
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == "remove":
|
2017-10-21 18:36:09 +02:00
|
|
|
state['raw_users'][person_user_id]['is_active'] = False
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'update':
|
2017-10-21 16:33:07 +02:00
|
|
|
is_me = (person_user_id == user_profile.id)
|
|
|
|
|
|
|
|
if is_me:
|
|
|
|
if ('avatar_url' in person and 'avatar_url' in state):
|
|
|
|
state['avatar_source'] = person['avatar_source']
|
|
|
|
state['avatar_url'] = person['avatar_url']
|
|
|
|
state['avatar_url_medium'] = person['avatar_url_medium']
|
|
|
|
|
2020-05-30 21:43:19 +02:00
|
|
|
if 'role' in person:
|
|
|
|
state['is_admin'] = is_administrator_role(person['role'])
|
2020-06-03 19:49:45 +02:00
|
|
|
state['is_owner'] = person['role'] == UserProfile.ROLE_REALM_OWNER
|
2020-05-30 21:43:19 +02:00
|
|
|
state['is_guest'] = person['role'] == UserProfile.ROLE_GUEST
|
2020-06-01 02:31:30 +02:00
|
|
|
# Recompute properties based on is_admin/is_guest
|
|
|
|
state['can_create_streams'] = user_profile.can_create_streams()
|
|
|
|
state['can_subscribe_other_users'] = user_profile.can_subscribe_other_users()
|
|
|
|
|
|
|
|
# TODO: Probably rather than writing the perfect
|
|
|
|
# live-update code for the case of racing with the
|
|
|
|
# current user changing roles, we should just do a
|
|
|
|
# full refetch.
|
|
|
|
if 'never_subscribed' in state:
|
2021-01-14 21:44:56 +01:00
|
|
|
sub_info = gather_subscriptions_helper(
|
|
|
|
user_profile,
|
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
)
|
|
|
|
state['subscriptions'] = sub_info.subscriptions
|
|
|
|
state['unsubscribed'] = sub_info.unsubscribed
|
|
|
|
state['never_subscribed'] = sub_info.never_subscribed
|
|
|
|
|
2020-06-01 02:31:30 +02:00
|
|
|
if 'streams' in state:
|
|
|
|
state['streams'] = do_get_streams(user_profile)
|
2020-05-30 21:43:19 +02:00
|
|
|
|
|
|
|
for field in ['delivery_email', 'email', 'full_name']:
|
2017-10-21 16:33:07 +02:00
|
|
|
if field in person and field in state:
|
|
|
|
state[field] = person[field]
|
|
|
|
|
|
|
|
# In the unlikely event that the current user
|
|
|
|
# just changed to/from being an admin, we need
|
|
|
|
# to add/remove the data on all bots in the
|
|
|
|
# realm. This is ugly and probably better
|
|
|
|
# solved by removing the all-realm-bots data
|
|
|
|
# given to admin users from this flow.
|
2020-05-30 21:43:19 +02:00
|
|
|
if ('role' in person and 'realm_bots' in state):
|
2017-10-21 16:33:07 +02:00
|
|
|
prev_state = state['raw_users'][user_profile.id]
|
|
|
|
was_admin = prev_state['is_admin']
|
2020-05-30 21:43:19 +02:00
|
|
|
now_admin = is_administrator_role(person['role'])
|
2017-10-21 16:33:07 +02:00
|
|
|
|
|
|
|
if was_admin and not now_admin:
|
|
|
|
state['realm_bots'] = []
|
|
|
|
if not was_admin and now_admin:
|
|
|
|
state['realm_bots'] = get_owned_bot_dicts(user_profile)
|
|
|
|
|
2018-02-05 21:42:54 +01:00
|
|
|
if client_gravatar and 'avatar_url' in person:
|
|
|
|
# Respect the client_gravatar setting in the `users` data.
|
2020-06-25 21:09:38 +02:00
|
|
|
if person['avatar_url'].startswith("https://secure.gravatar.com"):
|
2018-02-05 21:42:54 +01:00
|
|
|
person['avatar_url'] = None
|
|
|
|
person['avatar_url_medium'] = None
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
if person_user_id in state['raw_users']:
|
|
|
|
p = state['raw_users'][person_user_id]
|
|
|
|
for field in p:
|
|
|
|
if field in person:
|
|
|
|
p[field] = person[field]
|
2020-05-30 21:43:19 +02:00
|
|
|
if 'role' in person:
|
|
|
|
p['is_admin'] = is_administrator_role(person['role'])
|
2020-06-03 19:49:45 +02:00
|
|
|
p['is_owner'] = person['role'] == UserProfile.ROLE_REALM_OWNER
|
2020-05-30 21:43:19 +02:00
|
|
|
p['is_guest'] = person['role'] == UserProfile.ROLE_GUEST
|
2018-07-09 11:49:08 +02:00
|
|
|
if 'custom_profile_field' in person:
|
|
|
|
custom_field_id = person['custom_profile_field']['id']
|
|
|
|
custom_field_new_value = person['custom_profile_field']['value']
|
2018-12-31 07:45:33 +01:00
|
|
|
if 'rendered_value' in person['custom_profile_field']:
|
2020-08-07 03:24:19 +02:00
|
|
|
p['profile_data'][str(custom_field_id)] = {
|
2018-12-31 07:45:33 +01:00
|
|
|
'value': custom_field_new_value,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'rendered_value': person['custom_profile_field']['rendered_value'],
|
2018-12-31 07:45:33 +01:00
|
|
|
}
|
|
|
|
else:
|
2020-08-07 03:24:19 +02:00
|
|
|
p['profile_data'][str(custom_field_id)] = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'value': custom_field_new_value,
|
2018-12-31 07:45:33 +01:00
|
|
|
}
|
2017-10-21 16:33:07 +02:00
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == 'realm_bot':
|
|
|
|
if event['op'] == 'add':
|
|
|
|
state['realm_bots'].append(event['bot'])
|
|
|
|
|
|
|
|
if event['op'] == 'remove':
|
2020-05-12 20:28:53 +02:00
|
|
|
user_id = event['bot']['user_id']
|
2017-02-06 20:45:26 +01:00
|
|
|
for bot in state['realm_bots']:
|
2020-05-12 20:28:53 +02:00
|
|
|
if bot['user_id'] == user_id:
|
2017-02-06 20:45:26 +01:00
|
|
|
bot['is_active'] = False
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2018-03-06 22:32:03 +01:00
|
|
|
if event['op'] == 'delete':
|
|
|
|
state['realm_bots'] = [item for item
|
2020-05-12 20:28:53 +02:00
|
|
|
in state['realm_bots'] if item['user_id'] != event['bot']['user_id']]
|
2018-03-06 22:32:03 +01:00
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
if event['op'] == 'update':
|
|
|
|
for bot in state['realm_bots']:
|
2020-05-12 20:28:53 +02:00
|
|
|
if bot['user_id'] == event['bot']['user_id']:
|
2017-02-24 06:36:54 +01:00
|
|
|
if 'owner_id' in event['bot']:
|
2020-05-10 19:21:08 +02:00
|
|
|
bot_owner_id = event['bot']['owner_id']
|
|
|
|
bot['owner_id'] = bot_owner_id
|
2017-02-24 06:36:54 +01:00
|
|
|
else:
|
|
|
|
bot.update(event['bot'])
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
elif event['type'] == 'stream':
|
|
|
|
if event['op'] == 'create':
|
|
|
|
for stream in event['streams']:
|
|
|
|
if not stream['invite_only']:
|
|
|
|
stream_data = copy.deepcopy(stream)
|
|
|
|
if include_subscribers:
|
|
|
|
stream_data['subscribers'] = []
|
2020-06-16 19:39:43 +02:00
|
|
|
|
|
|
|
# We know the stream has no traffic, and this
|
|
|
|
# field is not present in the event.
|
|
|
|
#
|
|
|
|
# TODO: Probably this should just be added to the event.
|
2018-07-23 23:05:32 +02:00
|
|
|
stream_data['stream_weekly_traffic'] = None
|
2020-06-16 19:39:43 +02:00
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
# Add stream to never_subscribed (if not invite_only)
|
|
|
|
state['never_subscribed'].append(stream_data)
|
2020-12-01 14:13:09 +01:00
|
|
|
if 'streams' in state:
|
|
|
|
state['streams'].append(stream)
|
|
|
|
|
|
|
|
if 'streams' in state:
|
|
|
|
state['streams'].sort(key=lambda elt: elt["name"])
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
if event['op'] == 'delete':
|
|
|
|
deleted_stream_ids = {stream['stream_id'] for stream in event['streams']}
|
2020-12-01 14:13:09 +01:00
|
|
|
if 'streams' in state:
|
|
|
|
state['streams'] = [s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids]
|
2017-02-20 20:09:48 +01:00
|
|
|
state['never_subscribed'] = [stream for stream in state['never_subscribed'] if
|
|
|
|
stream['stream_id'] not in deleted_stream_ids]
|
|
|
|
|
|
|
|
if event['op'] == 'update':
|
|
|
|
# For legacy reasons, we call stream data 'subscriptions' in
|
|
|
|
# the state var here, for the benefit of the JS code.
|
|
|
|
for obj in state['subscriptions']:
|
|
|
|
if obj['name'].lower() == event['name'].lower():
|
|
|
|
obj[event['property']] = event['value']
|
2019-01-11 13:48:22 +01:00
|
|
|
if event['property'] == "description":
|
|
|
|
obj['rendered_description'] = event['rendered_description']
|
2017-02-20 20:09:48 +01:00
|
|
|
# Also update the pure streams data
|
2020-12-01 14:13:09 +01:00
|
|
|
if 'streams' in state:
|
|
|
|
for stream in state['streams']:
|
|
|
|
if stream['name'].lower() == event['name'].lower():
|
|
|
|
prop = event['property']
|
|
|
|
if prop in stream:
|
|
|
|
stream[prop] = event['value']
|
|
|
|
if prop == 'description':
|
|
|
|
stream['rendered_description'] = event['rendered_description']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == 'default_streams':
|
|
|
|
state['realm_default_streams'] = event['default_streams']
|
2017-11-01 18:20:34 +01:00
|
|
|
elif event['type'] == 'default_stream_groups':
|
|
|
|
state['realm_default_stream_groups'] = event['default_stream_groups']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == 'realm':
|
|
|
|
if event['op'] == "update":
|
|
|
|
field = 'realm_' + event['property']
|
|
|
|
state[field] = event['value']
|
2017-04-27 00:17:57 +02:00
|
|
|
|
2019-06-11 12:43:08 +02:00
|
|
|
if event['property'] == 'plan_type':
|
|
|
|
# Then there are some extra fields that also need to be set.
|
2020-05-08 13:30:34 +02:00
|
|
|
state['zulip_plan_is_not_limited'] = event['value'] != Realm.LIMITED
|
2019-06-11 12:43:08 +02:00
|
|
|
state['realm_upload_quota'] = event['extra_data']['upload_quota']
|
|
|
|
|
2020-04-03 22:53:51 +02:00
|
|
|
policy_permission_dict = {'create_stream_policy': 'can_create_streams',
|
|
|
|
'invite_to_stream_policy': 'can_subscribe_other_users'}
|
|
|
|
|
|
|
|
# Tricky interaction: Whether we can create streams and can subscribe other users
|
|
|
|
# can get changed here.
|
|
|
|
|
|
|
|
if field == 'realm_waiting_period_threshold':
|
|
|
|
for policy, permission in policy_permission_dict.items():
|
|
|
|
if permission in state:
|
|
|
|
state[permission] = user_profile.has_permission(policy)
|
|
|
|
|
|
|
|
if event['property'] in policy_permission_dict.keys():
|
|
|
|
if policy_permission_dict[event['property']] in state:
|
|
|
|
state[policy_permission_dict[event['property']]] = user_profile.has_permission(
|
|
|
|
event['property'])
|
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == "update_dict":
|
|
|
|
for key, value in event['data'].items():
|
|
|
|
state['realm_' + key] = value
|
2017-04-20 08:21:31 +02:00
|
|
|
# It's a bit messy, but this is where we need to
|
|
|
|
# update the state for whether password authentication
|
|
|
|
# is enabled on this server.
|
|
|
|
if key == 'authentication_methods':
|
|
|
|
state['realm_password_auth_enabled'] = (value['Email'] or value['LDAP'])
|
2017-10-24 20:59:11 +02:00
|
|
|
state['realm_email_auth_enabled'] = value['Email']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "subscription":
|
|
|
|
if not include_subscribers and event['op'] in ['peer_add', 'peer_remove']:
|
|
|
|
return
|
|
|
|
|
|
|
|
if event['op'] in ["add"]:
|
2017-10-07 16:00:39 +02:00
|
|
|
if not include_subscribers:
|
2017-02-20 20:09:48 +01:00
|
|
|
# Avoid letting 'subscribers' entries end up in the list
|
|
|
|
for i, sub in enumerate(event['subscriptions']):
|
|
|
|
event['subscriptions'][i] = copy.deepcopy(event['subscriptions'][i])
|
|
|
|
del event['subscriptions'][i]['subscribers']
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def name(sub: Dict[str, Any]) -> str:
|
2017-02-20 20:09:48 +01:00
|
|
|
return sub['name'].lower()
|
|
|
|
|
|
|
|
if event['op'] == "add":
|
|
|
|
added_names = set(map(name, event["subscriptions"]))
|
|
|
|
was_added = lambda s: name(s) in added_names
|
|
|
|
|
|
|
|
# add the new subscriptions
|
|
|
|
state['subscriptions'] += event['subscriptions']
|
|
|
|
|
|
|
|
# remove them from unsubscribed if they had been there
|
|
|
|
state['unsubscribed'] = [s for s in state['unsubscribed'] if not was_added(s)]
|
|
|
|
|
|
|
|
# remove them from never_subscribed if they had been there
|
|
|
|
state['never_subscribed'] = [s for s in state['never_subscribed'] if not was_added(s)]
|
|
|
|
|
|
|
|
elif event['op'] == "remove":
|
|
|
|
removed_names = set(map(name, event["subscriptions"]))
|
|
|
|
was_removed = lambda s: name(s) in removed_names
|
|
|
|
|
|
|
|
# Find the subs we are affecting.
|
|
|
|
removed_subs = list(filter(was_removed, state['subscriptions']))
|
|
|
|
|
|
|
|
# Remove our user from the subscribers of the removed subscriptions.
|
|
|
|
if include_subscribers:
|
|
|
|
for sub in removed_subs:
|
2019-12-31 17:12:21 +01:00
|
|
|
sub['subscribers'].remove(user_profile.id)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# We must effectively copy the removed subscriptions from subscriptions to
|
|
|
|
# unsubscribe, since we only have the name in our data structure.
|
|
|
|
state['unsubscribed'] += removed_subs
|
|
|
|
|
|
|
|
# Now filter out the removed subscriptions from subscriptions.
|
|
|
|
state['subscriptions'] = [s for s in state['subscriptions'] if not was_removed(s)]
|
|
|
|
|
|
|
|
elif event['op'] == 'update':
|
|
|
|
for sub in state['subscriptions']:
|
|
|
|
if sub['name'].lower() == event['name'].lower():
|
|
|
|
sub[event['property']] = event['value']
|
|
|
|
elif event['op'] == 'peer_add':
|
2020-10-22 14:14:02 +02:00
|
|
|
stream_ids = set(event["stream_ids"])
|
|
|
|
user_ids = set(event["user_ids"])
|
2020-09-01 07:46:12 +02:00
|
|
|
for sub_dict in [state["subscriptions"], state['unsubscribed'], state["never_subscribed"]]:
|
2020-10-22 14:14:02 +02:00
|
|
|
for sub in sub_dict:
|
|
|
|
if sub["stream_id"] in stream_ids:
|
|
|
|
subscribers = set(sub["subscribers"]) | user_ids
|
|
|
|
sub["subscribers"] = sorted(list(subscribers))
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'peer_remove':
|
2020-10-22 14:14:02 +02:00
|
|
|
stream_ids = set(event["stream_ids"])
|
|
|
|
user_ids = set(event["user_ids"])
|
2020-08-31 18:33:09 +02:00
|
|
|
for sub_dict in [state["subscriptions"], state['unsubscribed'], state['never_subscribed']]:
|
2020-10-22 14:14:02 +02:00
|
|
|
for sub in sub_dict:
|
|
|
|
if sub["stream_id"] in stream_ids:
|
|
|
|
subscribers = set(sub["subscribers"]) - user_ids
|
|
|
|
sub["subscribers"] = sorted(list(subscribers))
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "presence":
|
2020-02-02 17:29:05 +01:00
|
|
|
if slim_presence:
|
2020-02-03 16:25:13 +01:00
|
|
|
user_key = str(event['user_id'])
|
2020-02-02 17:29:05 +01:00
|
|
|
else:
|
|
|
|
user_key = event['email']
|
2020-02-06 17:52:12 +01:00
|
|
|
state['presences'][user_key] = get_presence_for_user(
|
2020-02-03 17:09:18 +01:00
|
|
|
event['user_id'], slim_presence)[user_key]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_message":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in /register, so we don't need to
|
|
|
|
# do anything for content updates, but we may need to update
|
|
|
|
# the unread_msgs data if the topic of an unread message changed.
|
2020-07-06 09:30:59 +02:00
|
|
|
if 'new_stream_id' in event:
|
|
|
|
stream_dict = state['raw_unread_msgs']['stream_dict']
|
|
|
|
stream_id = event['new_stream_id']
|
|
|
|
for message_id in event['message_ids']:
|
|
|
|
if message_id in stream_dict:
|
|
|
|
stream_dict[message_id]['stream_id'] = stream_id
|
|
|
|
|
2018-11-08 15:01:45 +01:00
|
|
|
if TOPIC_NAME in event:
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
stream_dict = state['raw_unread_msgs']['stream_dict']
|
2018-11-08 15:01:45 +01:00
|
|
|
topic = event[TOPIC_NAME]
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
for message_id in event['message_ids']:
|
|
|
|
if message_id in stream_dict:
|
|
|
|
stream_dict[message_id]['topic'] = topic
|
2017-05-14 21:14:26 +02:00
|
|
|
elif event['type'] == "delete_message":
|
2020-06-10 13:47:08 +02:00
|
|
|
if 'message_id' in event:
|
|
|
|
message_ids = [event['message_id']]
|
|
|
|
else:
|
|
|
|
message_ids = event['message_ids'] # nocoverage
|
2017-05-14 21:14:26 +02:00
|
|
|
max_message = Message.objects.filter(
|
|
|
|
usermessage__user_profile=user_profile).order_by('-id').first()
|
|
|
|
if max_message:
|
|
|
|
state['max_message_id'] = max_message.id
|
|
|
|
else:
|
|
|
|
state['max_message_id'] = -1
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-08-03 02:21:35 +02:00
|
|
|
if 'raw_unread_msgs' in state:
|
2020-06-10 13:47:08 +02:00
|
|
|
for remove_id in message_ids:
|
|
|
|
remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
# The remainder of this block is about maintaining recent_private_conversations
|
|
|
|
if 'raw_recent_private_conversations' not in state or event['message_type'] != 'private':
|
|
|
|
return
|
|
|
|
|
|
|
|
recipient_id = get_recent_conversations_recipient_id(user_profile, event['recipient_id'],
|
|
|
|
event['sender_id'])
|
|
|
|
|
|
|
|
# Ideally, we'd have test coverage for these two blocks. To
|
|
|
|
# do that, we'll need a test where we delete not-the-latest
|
|
|
|
# messages or delete a private message not in
|
|
|
|
# recent_private_conversations.
|
|
|
|
if recipient_id not in state['raw_recent_private_conversations']: # nocoverage
|
|
|
|
return
|
|
|
|
|
|
|
|
old_max_message_id = state['raw_recent_private_conversations'][recipient_id]['max_message_id']
|
2020-06-10 13:47:08 +02:00
|
|
|
if old_max_message_id not in message_ids: # nocoverage
|
2019-03-20 04:15:58 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# OK, we just deleted what had been the max_message_id for
|
|
|
|
# this recent conversation; we need to recompute that value
|
|
|
|
# from scratch. Definitely don't need to re-query everything,
|
|
|
|
# but this case is likely rare enough that it's reasonable to do so.
|
|
|
|
state['raw_recent_private_conversations'] = \
|
|
|
|
get_recent_private_conversations(user_profile)
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "reaction":
|
|
|
|
# The client will get the message with the reactions directly
|
2018-02-12 10:53:36 +01:00
|
|
|
pass
|
|
|
|
elif event['type'] == "submessage":
|
|
|
|
# The client will get submessages with their messages
|
2017-02-20 20:09:48 +01:00
|
|
|
pass
|
2017-03-18 03:50:41 +01:00
|
|
|
elif event['type'] == 'typing':
|
|
|
|
# Typing notification events are transient and thus ignored
|
|
|
|
pass
|
2018-05-04 22:57:36 +02:00
|
|
|
elif event['type'] == "attachment":
|
|
|
|
# Attachment events are just for updating the "uploads" UI;
|
|
|
|
# they are not sent directly.
|
|
|
|
pass
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_message_flags":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in `/register`, so most flags we
|
|
|
|
# can ignore, but we do need to update the unread_msgs data if
|
|
|
|
# unread state is changed.
|
2020-08-19 21:51:47 +02:00
|
|
|
if 'raw_unread_msgs' in state and event['flag'] == 'read' and event['op'] == 'add':
|
2017-05-23 03:02:01 +02:00
|
|
|
for remove_id in event['messages']:
|
2019-08-03 02:21:35 +02:00
|
|
|
remove_message_id_from_unread_mgs(state['raw_unread_msgs'], remove_id)
|
2020-03-23 05:30:23 +01:00
|
|
|
if event['flag'] == 'starred' and 'starred_messages' in state:
|
2020-08-19 21:51:47 +02:00
|
|
|
if event['op'] == 'add':
|
2020-03-23 05:30:23 +01:00
|
|
|
state['starred_messages'] += event['messages']
|
2020-08-19 21:51:47 +02:00
|
|
|
if event['op'] == 'remove':
|
2020-03-23 05:30:23 +01:00
|
|
|
state['starred_messages'] = [message for message in state['starred_messages']
|
|
|
|
if not (message in event['messages'])]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "realm_domains":
|
|
|
|
if event['op'] == 'add':
|
2017-03-31 20:10:29 +02:00
|
|
|
state['realm_domains'].append(event['realm_domain'])
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'change':
|
|
|
|
for realm_domain in state['realm_domains']:
|
2017-03-31 20:10:29 +02:00
|
|
|
if realm_domain['domain'] == event['realm_domain']['domain']:
|
|
|
|
realm_domain['allow_subdomains'] = event['realm_domain']['allow_subdomains']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['op'] == 'remove':
|
2017-03-31 20:10:29 +02:00
|
|
|
state['realm_domains'] = [realm_domain for realm_domain in state['realm_domains']
|
|
|
|
if realm_domain['domain'] != event['domain']]
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "realm_emoji":
|
|
|
|
state['realm_emoji'] = event['realm_emoji']
|
2019-06-24 02:51:13 +02:00
|
|
|
elif event['type'] == 'realm_export':
|
|
|
|
# These realm export events are only available to
|
|
|
|
# administrators, and aren't included in page_params.
|
|
|
|
pass
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "alert_words":
|
|
|
|
state['alert_words'] = event['alert_words']
|
|
|
|
elif event['type'] == "muted_topics":
|
|
|
|
state['muted_topics'] = event["muted_topics"]
|
|
|
|
elif event['type'] == "realm_filters":
|
|
|
|
state['realm_filters'] = event["realm_filters"]
|
|
|
|
elif event['type'] == "update_display_settings":
|
2017-07-07 23:26:44 +02:00
|
|
|
assert event['setting_name'] in UserProfile.property_types
|
|
|
|
state[event['setting_name']] = event['setting']
|
2017-02-20 20:09:48 +01:00
|
|
|
elif event['type'] == "update_global_notifications":
|
2017-07-07 23:26:44 +02:00
|
|
|
assert event['notification_name'] in UserProfile.notification_setting_types
|
|
|
|
state[event['notification_name']] = event['setting']
|
2017-12-14 22:22:17 +01:00
|
|
|
elif event['type'] == "invites_changed":
|
|
|
|
pass
|
2017-11-14 07:31:31 +01:00
|
|
|
elif event['type'] == "user_group":
|
|
|
|
if event['op'] == 'add':
|
|
|
|
state['realm_user_groups'].append(event['group'])
|
|
|
|
state['realm_user_groups'].sort(key=lambda group: group['id'])
|
2017-11-14 08:00:18 +01:00
|
|
|
elif event['op'] == 'update':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
user_group.update(event['data'])
|
2017-11-14 08:01:39 +01:00
|
|
|
elif event['op'] == 'add_members':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
user_group['members'].extend(event['user_ids'])
|
|
|
|
user_group['members'].sort()
|
2017-11-14 08:01:50 +01:00
|
|
|
elif event['op'] == 'remove_members':
|
|
|
|
for user_group in state['realm_user_groups']:
|
|
|
|
if user_group['id'] == event['group_id']:
|
|
|
|
members = set(user_group['members'])
|
|
|
|
user_group['members'] = list(members - set(event['user_ids']))
|
|
|
|
user_group['members'].sort()
|
2017-11-15 08:09:49 +01:00
|
|
|
elif event['op'] == 'remove':
|
|
|
|
state['realm_user_groups'] = [ug for ug in state['realm_user_groups']
|
|
|
|
if ug['id'] != event['group_id']]
|
2018-12-18 17:17:08 +01:00
|
|
|
elif event['type'] == 'user_status':
|
2020-08-07 04:58:22 +02:00
|
|
|
user_id_str = str(event['user_id'])
|
2019-01-21 19:06:03 +01:00
|
|
|
user_status = state['user_status']
|
|
|
|
away = event.get('away')
|
|
|
|
status_text = event.get('status_text')
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2020-08-07 04:58:22 +02:00
|
|
|
if user_id_str not in user_status:
|
2020-09-02 08:14:51 +02:00
|
|
|
user_status[user_id_str] = {}
|
2019-01-21 19:06:03 +01:00
|
|
|
|
|
|
|
if away is not None:
|
|
|
|
if away:
|
2020-08-07 04:58:22 +02:00
|
|
|
user_status[user_id_str]['away'] = True
|
2019-01-21 19:06:03 +01:00
|
|
|
else:
|
2020-08-07 04:58:22 +02:00
|
|
|
user_status[user_id_str].pop('away', None)
|
2019-01-21 19:06:03 +01:00
|
|
|
|
|
|
|
if status_text is not None:
|
|
|
|
if status_text == '':
|
2020-08-07 04:58:22 +02:00
|
|
|
user_status[user_id_str].pop('status_text', None)
|
2019-01-21 19:06:03 +01:00
|
|
|
else:
|
2020-08-07 04:58:22 +02:00
|
|
|
user_status[user_id_str]['status_text'] = status_text
|
2019-01-21 19:06:03 +01:00
|
|
|
|
2020-08-07 04:58:22 +02:00
|
|
|
if not user_status[user_id_str]:
|
|
|
|
user_status.pop(user_id_str, None)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2019-01-21 19:06:03 +01:00
|
|
|
state['user_status'] = user_status
|
2019-11-16 09:26:28 +01:00
|
|
|
elif event['type'] == 'has_zoom_token':
|
|
|
|
state['has_zoom_token'] = event['value']
|
2017-02-20 20:09:48 +01:00
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError("Unexpected event type {}".format(event['type']))
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2020-10-14 14:25:45 +02:00
|
|
|
def do_events_register(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
user_client: Client,
|
|
|
|
apply_markdown: bool = True,
|
|
|
|
client_gravatar: bool = False,
|
|
|
|
slim_presence: bool = False,
|
|
|
|
event_types: Optional[Iterable[str]] = None,
|
|
|
|
queue_lifespan_secs: int = 0,
|
|
|
|
all_public_streams: bool = False,
|
|
|
|
include_subscribers: bool = True,
|
2020-10-14 14:47:06 +02:00
|
|
|
include_streams: bool = True,
|
2020-10-14 14:25:45 +02:00
|
|
|
client_capabilities: Dict[str, bool] = {},
|
|
|
|
narrow: Iterable[Sequence[str]] = [],
|
|
|
|
fetch_event_types: Optional[Iterable[str]] = None
|
|
|
|
) -> Dict[str, Any]:
|
2017-02-10 23:04:46 +01:00
|
|
|
# Technically we don't need to check this here because
|
|
|
|
# build_narrow_filter will check it, but it's nicer from an error
|
|
|
|
# handling perspective to do it before contacting Tornado
|
|
|
|
check_supported_events_narrow_filter(narrow)
|
2017-04-26 23:29:25 +02:00
|
|
|
|
2020-06-10 13:42:13 +02:00
|
|
|
notification_settings_null = client_capabilities.get('notification_settings_null', False)
|
2020-06-10 13:47:08 +02:00
|
|
|
bulk_message_deletion = client_capabilities.get('bulk_message_deletion', False)
|
2020-06-13 10:10:05 +02:00
|
|
|
user_avatar_url_field_optional = client_capabilities.get('user_avatar_url_field_optional', False)
|
2020-06-10 13:42:13 +02:00
|
|
|
|
2020-03-07 01:15:34 +01:00
|
|
|
if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
# If real email addresses are not available to the user, their
|
2019-02-05 07:12:37 +01:00
|
|
|
# clients cannot compute gravatars, so we force-set it to false.
|
|
|
|
client_gravatar = False
|
|
|
|
|
2017-04-26 23:29:25 +02:00
|
|
|
# Note that we pass event_types, not fetch_event_types here, since
|
|
|
|
# that's what controls which future events are sent.
|
2020-02-02 17:29:05 +01:00
|
|
|
queue_id = request_event_queue(user_profile, user_client,
|
|
|
|
apply_markdown, client_gravatar, slim_presence,
|
2017-02-10 23:04:46 +01:00
|
|
|
queue_lifespan_secs, event_types, all_public_streams,
|
2020-06-10 13:47:08 +02:00
|
|
|
narrow=narrow,
|
|
|
|
bulk_message_deletion=bulk_message_deletion)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if queue_id is None:
|
|
|
|
raise JsonableError(_("Could not allocate event queue"))
|
2017-04-26 23:29:25 +02:00
|
|
|
|
|
|
|
if fetch_event_types is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
event_types_set: Optional[Set[str]] = set(fetch_event_types)
|
2017-04-26 23:29:25 +02:00
|
|
|
elif event_types is not None:
|
|
|
|
event_types_set = set(event_types)
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
event_types_set = None
|
|
|
|
|
2017-07-16 09:41:38 +02:00
|
|
|
# Fill up the UserMessage rows if a soft-deactivated user has returned
|
2019-03-12 02:48:01 +01:00
|
|
|
reactivate_user_if_soft_deactivated(user_profile)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2020-10-14 14:25:45 +02:00
|
|
|
ret = fetch_initial_state_data(
|
|
|
|
user_profile,
|
2021-01-17 17:58:50 +01:00
|
|
|
event_types=event_types_set,
|
|
|
|
queue_id=queue_id,
|
2020-10-14 14:25:45 +02:00
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
|
|
|
slim_presence=slim_presence,
|
2020-10-14 14:47:06 +02:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
include_streams=include_streams,
|
2020-10-14 14:25:45 +02:00
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
# Apply events that came in while we were fetching initial data
|
|
|
|
events = get_user_events(user_profile, queue_id, -1)
|
2017-04-26 23:29:25 +02:00
|
|
|
apply_events(ret, events, user_profile, include_subscribers=include_subscribers,
|
2020-02-02 17:29:05 +01:00
|
|
|
client_gravatar=client_gravatar, slim_presence=slim_presence,
|
2017-04-26 23:29:25 +02:00
|
|
|
fetch_event_types=fetch_event_types)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(user_profile, ret, notification_settings_null)
|
2018-05-07 07:42:01 +02:00
|
|
|
|
|
|
|
if len(events) > 0:
|
|
|
|
ret['last_event_id'] = events[-1]['id']
|
|
|
|
else:
|
|
|
|
ret['last_event_id'] = -1
|
|
|
|
return ret
|
|
|
|
|
2020-09-27 06:15:42 +02:00
|
|
|
def post_process_state(user_profile: Optional[UserProfile], ret: Dict[str, Any],
|
2019-02-13 10:22:16 +01:00
|
|
|
notification_settings_null: bool) -> None:
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
'''
|
|
|
|
NOTE:
|
|
|
|
|
|
|
|
Below is an example of post-processing initial state data AFTER we
|
|
|
|
apply events. For large payloads like `unread_msgs`, it's helpful
|
|
|
|
to have an intermediate data structure that is easy to manipulate
|
|
|
|
with O(1)-type operations as we apply events.
|
|
|
|
|
|
|
|
Then, only at the end, we put it in the form that's more appropriate
|
|
|
|
for client.
|
|
|
|
'''
|
|
|
|
if 'raw_unread_msgs' in ret:
|
|
|
|
ret['unread_msgs'] = aggregate_unread_data(ret['raw_unread_msgs'])
|
|
|
|
del ret['raw_unread_msgs']
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
'''
|
|
|
|
See the note above; the same technique applies below.
|
|
|
|
'''
|
2019-03-20 04:15:58 +01:00
|
|
|
if 'raw_users' in ret:
|
2017-10-21 18:36:09 +02:00
|
|
|
user_dicts = list(ret['raw_users'].values())
|
2019-04-09 04:07:03 +02:00
|
|
|
user_dicts = sorted(user_dicts, key=lambda x: x['user_id'])
|
2017-10-21 18:36:09 +02:00
|
|
|
|
|
|
|
ret['realm_users'] = [d for d in user_dicts if d['is_active']]
|
|
|
|
ret['realm_non_active_users'] = [d for d in user_dicts if not d['is_active']]
|
|
|
|
|
|
|
|
'''
|
|
|
|
Be aware that we do intentional aliasing in the below code.
|
|
|
|
We can now safely remove the `is_active` field from all the
|
|
|
|
dicts that got partitioned into the two lists above.
|
|
|
|
|
|
|
|
We remove the field because it's already implied, and sending
|
|
|
|
it to clients makes clients prone to bugs where they "trust"
|
|
|
|
the field but don't actually update in live updates. It also
|
|
|
|
wastes bandwidth.
|
|
|
|
'''
|
|
|
|
for d in user_dicts:
|
|
|
|
d.pop('is_active')
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
del ret['raw_users']
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
if 'raw_recent_private_conversations' in ret:
|
|
|
|
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
|
2020-09-02 06:20:26 +02:00
|
|
|
ret['recent_private_conversations'] = sorted((
|
2019-03-20 04:15:58 +01:00
|
|
|
dict(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
**value,
|
2019-03-20 04:15:58 +01:00
|
|
|
) for (recipient_id, value) in ret['raw_recent_private_conversations'].items()
|
2020-09-02 06:20:26 +02:00
|
|
|
), key = lambda x: -x["max_message_id"])
|
2019-03-20 04:15:58 +01:00
|
|
|
del ret['raw_recent_private_conversations']
|
2019-02-13 10:22:16 +01:00
|
|
|
|
|
|
|
if not notification_settings_null and 'subscriptions' in ret:
|
|
|
|
for stream_dict in ret['subscriptions'] + ret['unsubscribed']:
|
|
|
|
handle_stream_notifications_compatibility(user_profile, stream_dict,
|
|
|
|
notification_settings_null)
|