2017-11-16 19:51:44 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2017-02-10 23:04:46 +01:00
|
|
|
import copy
|
2024-02-14 20:27:17 +01:00
|
|
|
import logging
|
2021-05-20 20:01:51 +02:00
|
|
|
import time
|
2022-10-06 11:56:48 +02:00
|
|
|
from typing import Any, Callable, Collection, Dict, Iterable, Mapping, Optional, Sequence, Set
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-07-30 12:25:53 +02:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_MERGE_BASE, ZULIP_VERSION
|
2023-08-03 02:09:35 +02:00
|
|
|
from zerver.actions.default_streams import default_stream_groups_to_dicts_sorted
|
2024-02-05 23:52:25 +01:00
|
|
|
from zerver.actions.realm_settings import get_realm_authentication_methods_for_page_params_api
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import get_owned_bot_dicts
|
2022-06-21 23:56:52 +02:00
|
|
|
from zerver.lib import emoji
|
2017-02-10 23:04:46 +01:00
|
|
|
from zerver.lib.alert_words import user_alert_words
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2018-01-07 19:24:14 +01:00
|
|
|
from zerver.lib.bot_config import load_bot_config_template
|
2021-06-03 18:56:36 +02:00
|
|
|
from zerver.lib.compatibility import is_outdated_server
|
2023-07-08 21:20:28 +02:00
|
|
|
from zerver.lib.default_streams import get_default_streams_for_realm_as_dicts
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2022-09-16 12:08:28 +02:00
|
|
|
from zerver.lib.external_accounts import get_default_external_accounts
|
2023-08-07 12:25:50 +02:00
|
|
|
from zerver.lib.integrations import (
|
|
|
|
EMBEDDED_BOTS,
|
|
|
|
WEBHOOK_INTEGRATIONS,
|
|
|
|
get_all_event_types_for_integration,
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
from zerver.lib.message import (
|
2021-06-09 13:31:39 +02:00
|
|
|
add_message_to_unread_msgs,
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
aggregate_unread_data,
|
2017-05-23 03:02:01 +02:00
|
|
|
apply_unread_message_event,
|
2020-09-27 19:12:52 +02:00
|
|
|
extract_unread_data_from_um_rows,
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
get_raw_unread_data,
|
2019-03-20 04:15:58 +01:00
|
|
|
get_recent_conversations_recipient_id,
|
|
|
|
get_recent_private_conversations,
|
2018-08-14 23:57:20 +02:00
|
|
|
get_starred_message_ids,
|
2019-08-03 02:24:00 +02:00
|
|
|
remove_message_id_from_unread_mgs,
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2023-02-10 14:33:24 +01:00
|
|
|
from zerver.lib.muted_users import get_user_mutes
|
2024-04-15 23:10:59 +02:00
|
|
|
from zerver.lib.narrow_helpers import NarrowTerm, read_stop_words
|
|
|
|
from zerver.lib.narrow_predicate import check_narrow_for_events
|
2024-05-10 16:17:04 +02:00
|
|
|
from zerver.lib.onboarding_steps import get_next_onboarding_steps
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.presence import get_presence_for_user, get_presences_for_realm
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2020-06-08 11:53:24 +02:00
|
|
|
from zerver.lib.realm_logo import get_realm_logo_source, get_realm_logo_url
|
2023-04-20 04:40:41 +02:00
|
|
|
from zerver.lib.scheduled_messages import get_undelivered_scheduled_messages
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.soft_deactivation import reactivate_user_if_soft_deactivated
|
2022-04-14 23:26:40 +02:00
|
|
|
from zerver.lib.sounds import get_available_notification_sounds
|
2019-02-13 10:22:16 +01:00
|
|
|
from zerver.lib.stream_subscription import handle_stream_notifications_compatibility
|
2022-04-14 23:42:50 +02:00
|
|
|
from zerver.lib.streams import do_get_streams, get_web_public_streams
|
2023-09-29 20:04:27 +02:00
|
|
|
from zerver.lib.subscription_info import (
|
|
|
|
build_unsubscribed_sub_from_stream_dict,
|
|
|
|
gather_subscriptions_helper,
|
|
|
|
get_web_public_subs,
|
|
|
|
)
|
2021-09-04 01:42:30 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2022-02-10 01:45:44 +01:00
|
|
|
from zerver.lib.timezone import canonicalize_timezone
|
2018-11-08 15:01:45 +01:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2023-10-19 16:50:26 +02:00
|
|
|
from zerver.lib.user_groups import (
|
2024-05-23 16:21:25 +02:00
|
|
|
get_group_setting_value_for_api,
|
2024-06-24 12:26:17 +02:00
|
|
|
get_recursive_membership_groups,
|
2023-10-19 16:50:26 +02:00
|
|
|
get_server_supported_permission_settings,
|
|
|
|
user_groups_in_realm_serialized,
|
|
|
|
)
|
2024-05-15 18:22:16 +02:00
|
|
|
from zerver.lib.user_status import get_all_users_status_dict
|
2022-02-25 21:48:56 +01:00
|
|
|
from zerver.lib.user_topics import get_topic_mutes, get_user_topics
|
2023-08-30 23:40:24 +02:00
|
|
|
from zerver.lib.users import (
|
|
|
|
get_cross_realm_dicts,
|
2023-10-24 05:11:34 +02:00
|
|
|
get_data_for_inaccessible_user,
|
2023-11-08 08:21:24 +01:00
|
|
|
get_users_for_api,
|
2023-08-30 23:40:24 +02:00
|
|
|
is_administrator_role,
|
|
|
|
max_message_id_for_user,
|
|
|
|
)
|
2024-03-20 02:30:08 +01:00
|
|
|
from zerver.lib.utils import optional_bytes_to_mib
|
2020-02-03 16:39:43 +01:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
Client,
|
|
|
|
CustomProfileField,
|
2021-07-24 06:56:56 +02:00
|
|
|
Draft,
|
2020-06-11 00:54:34 +02:00
|
|
|
Realm,
|
2021-07-21 13:40:46 +02:00
|
|
|
RealmUserDefault,
|
2023-09-29 20:04:27 +02:00
|
|
|
Recipient,
|
2020-06-11 00:54:34 +02:00
|
|
|
Stream,
|
2023-09-29 20:04:27 +02:00
|
|
|
Subscription,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2021-06-22 18:42:31 +02:00
|
|
|
UserStatus,
|
2022-02-25 21:48:56 +01:00
|
|
|
UserTopic,
|
2020-02-03 16:39:43 +01:00
|
|
|
)
|
2023-12-05 19:32:36 +01:00
|
|
|
from zerver.models.constants import MAX_TOPIC_NAME_LENGTH
|
2023-12-15 20:57:08 +01:00
|
|
|
from zerver.models.custom_profile_fields import custom_profile_fields_for_realm
|
2023-12-15 02:51:31 +01:00
|
|
|
from zerver.models.linkifiers import linkifiers_for_realm
|
2023-12-15 02:43:18 +01:00
|
|
|
from zerver.models.realm_emoji import get_all_custom_emoji_for_realm
|
2023-12-15 02:58:46 +01:00
|
|
|
from zerver.models.realm_playgrounds import get_realm_playgrounds
|
2024-05-30 04:04:44 +02:00
|
|
|
from zerver.models.realms import (
|
|
|
|
CommonMessagePolicyEnum,
|
|
|
|
EditTopicPolicyEnum,
|
|
|
|
get_corresponding_policy_value_for_group_setting,
|
|
|
|
get_realm_domains,
|
2024-06-12 15:43:02 +02:00
|
|
|
get_realm_with_settings,
|
2024-05-30 04:04:44 +02:00
|
|
|
)
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_default_stream_groups
|
2020-08-10 18:40:38 +02:00
|
|
|
from zerver.tornado.django_api import get_user_events, request_event_queue
|
2017-10-24 20:59:11 +02:00
|
|
|
from zproject.backends import email_auth_enabled, password_auth_enabled
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2019-03-02 18:23:57 +01:00
|
|
|
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_logo_url"] = get_realm_logo_url(realm, night=False)
|
|
|
|
state["realm_logo_source"] = get_realm_logo_source(realm, night=False)
|
|
|
|
state["realm_night_logo_url"] = get_realm_logo_url(realm, night=True)
|
|
|
|
state["realm_night_logo_source"] = get_realm_logo_source(realm, night=True)
|
2021-05-29 08:59:21 +02:00
|
|
|
state["max_logo_file_size_mib"] = settings.MAX_LOGO_FILE_SIZE_MIB
|
2019-03-02 18:23:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def always_want(msg_type: str) -> bool:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-21 23:10:22 +02:00
|
|
|
This function is used as a helper in
|
|
|
|
fetch_initial_state_data, when the user passes
|
|
|
|
in None for event_types, and we want to fetch
|
|
|
|
info for every event type. Defining this at module
|
|
|
|
level makes it easier to mock.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-21 23:10:22 +02:00
|
|
|
return True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-14 14:25:45 +02:00
|
|
|
def fetch_initial_state_data(
|
|
|
|
user_profile: Optional[UserProfile],
|
2021-01-17 17:58:50 +01:00
|
|
|
*,
|
2024-06-15 07:12:06 +02:00
|
|
|
realm: Realm,
|
2021-01-17 17:58:50 +01:00
|
|
|
event_types: Optional[Iterable[str]] = None,
|
|
|
|
queue_id: Optional[str] = "",
|
|
|
|
client_gravatar: bool = False,
|
|
|
|
user_avatar_url_field_optional: bool = False,
|
2021-07-30 17:44:42 +02:00
|
|
|
user_settings_object: bool = False,
|
2020-10-14 14:25:45 +02:00
|
|
|
slim_presence: bool = False,
|
2024-06-05 21:36:22 +02:00
|
|
|
presence_last_update_id_fetched_by_client: Optional[int] = None,
|
2020-10-14 14:47:06 +02:00
|
|
|
include_subscribers: bool = True,
|
|
|
|
include_streams: bool = True,
|
2022-07-13 18:44:48 +02:00
|
|
|
spectator_requested_language: Optional[str] = None,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported: bool = True,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template: bool = False,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete: bool = False,
|
2020-10-14 14:25:45 +02:00
|
|
|
) -> Dict[str, Any]:
|
2020-09-22 17:41:36 +02:00
|
|
|
"""When `event_types` is None, fetches the core data powering the
|
2021-05-14 00:16:30 +02:00
|
|
|
web app's `page_params` and `/api/v1/register` (for mobile/terminal
|
2020-09-22 17:41:36 +02:00
|
|
|
apps). Can also fetch a subset as determined by `event_types`.
|
|
|
|
|
|
|
|
The user_profile=None code path is used for logged-out public
|
|
|
|
access to streams with is_web_public=True.
|
|
|
|
|
|
|
|
Whenever you add new code to this function, you should also add
|
|
|
|
corresponding events for changes in the data structures and new
|
|
|
|
code to apply_events (and add a test in test_events.py).
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
state: Dict[str, Any] = {"queue_id": queue_id}
|
2017-02-10 23:04:46 +01:00
|
|
|
|
|
|
|
if event_types is None:
|
2017-10-21 23:10:22 +02:00
|
|
|
# return True always
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
want: Callable[[str], bool] = always_want
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
want = set(event_types).__contains__
|
|
|
|
|
2020-04-29 12:39:39 +02:00
|
|
|
# Show the version info unconditionally.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["zulip_version"] = ZULIP_VERSION
|
|
|
|
state["zulip_feature_level"] = API_FEATURE_LEVEL
|
2021-07-30 12:25:53 +02:00
|
|
|
state["zulip_merge_base"] = ZULIP_MERGE_BASE
|
2020-04-29 12:39:39 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("alert_words"):
|
|
|
|
state["alert_words"] = [] if user_profile is None else user_alert_words(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2022-10-13 13:38:08 +02:00
|
|
|
if want("custom_profile_fields"):
|
|
|
|
if user_profile is None:
|
|
|
|
# Spectators can't access full user profiles or
|
|
|
|
# personal settings, so we send an empty list.
|
|
|
|
state["custom_profile_fields"] = []
|
|
|
|
else:
|
|
|
|
fields = custom_profile_fields_for_realm(realm.id)
|
|
|
|
state["custom_profile_fields"] = [f.as_dict() for f in fields]
|
2021-02-12 08:20:45 +01:00
|
|
|
state["custom_profile_field_types"] = {
|
2021-02-12 08:19:30 +01:00
|
|
|
item[4]: {"id": item[0], "name": str(item[1])}
|
|
|
|
for item in CustomProfileField.ALL_FIELD_TYPES
|
2020-10-17 03:00:33 +02:00
|
|
|
}
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-10-27 19:05:10 +02:00
|
|
|
if not pronouns_field_type_supported:
|
|
|
|
for field in state["custom_profile_fields"]:
|
|
|
|
if field["type"] == CustomProfileField.PRONOUNS:
|
|
|
|
field["type"] = CustomProfileField.SHORT_TEXT
|
|
|
|
|
|
|
|
del state["custom_profile_field_types"]["PRONOUNS"]
|
|
|
|
|
2023-12-02 11:30:35 +01:00
|
|
|
if want("onboarding_steps"):
|
|
|
|
# Even if we offered special onboarding steps for guests without an
|
2020-09-22 17:41:36 +02:00
|
|
|
# account, we'd maybe need to store their state using cookies
|
|
|
|
# or local storage, rather than in the database.
|
2023-12-02 11:30:35 +01:00
|
|
|
state["onboarding_steps"] = (
|
|
|
|
[] if user_profile is None else get_next_onboarding_steps(user_profile)
|
|
|
|
)
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("message"):
|
2020-09-22 17:41:36 +02:00
|
|
|
# Since the introduction of `anchor="latest"` in the API,
|
|
|
|
# `max_message_id` is primarily used for generating `local_id`
|
|
|
|
# values that are higher than this. We likely can eventually
|
|
|
|
# remove this parameter from the API.
|
2023-08-30 23:40:24 +02:00
|
|
|
state["max_message_id"] = max_message_id_for_user(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-07-24 06:56:56 +02:00
|
|
|
if want("drafts"):
|
2021-12-03 12:31:18 +01:00
|
|
|
if user_profile is None:
|
|
|
|
state["drafts"] = []
|
|
|
|
else:
|
|
|
|
# Note: if a user ever disables syncing drafts then all of
|
|
|
|
# their old drafts stored on the server will be deleted and
|
|
|
|
# simply retained in local storage. In which case user_drafts
|
|
|
|
# would just be an empty queryset.
|
|
|
|
user_draft_objects = Draft.objects.filter(user_profile=user_profile).order_by(
|
|
|
|
"-last_edit_time"
|
|
|
|
)[: settings.MAX_DRAFTS_IN_REGISTER_RESPONSE]
|
|
|
|
user_draft_dicts = [draft.to_dict() for draft in user_draft_objects]
|
|
|
|
state["drafts"] = user_draft_dicts
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2023-04-20 04:40:41 +02:00
|
|
|
if want("scheduled_messages"):
|
|
|
|
state["scheduled_messages"] = (
|
|
|
|
[] if user_profile is None else get_undelivered_scheduled_messages(user_profile)
|
|
|
|
)
|
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if want("muted_topics") and (
|
2022-02-25 21:48:56 +01:00
|
|
|
# Suppress muted_topics data for clients that explicitly
|
|
|
|
# support user_topic. This allows clients to request both the
|
|
|
|
# user_topic and muted_topics, and receive the duplicate
|
|
|
|
# muted_topics data only from older servers that don't yet
|
|
|
|
# support user_topic.
|
2023-12-05 18:45:07 +01:00
|
|
|
event_types is None or not want("user_topic")
|
2023-01-18 02:59:37 +01:00
|
|
|
):
|
|
|
|
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-03-27 12:23:32 +01:00
|
|
|
if want("muted_users"):
|
|
|
|
state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("presence"):
|
2024-06-05 21:36:22 +02:00
|
|
|
if presence_last_update_id_fetched_by_client is not None:
|
|
|
|
# This param being submitted by the client, means they want to use
|
|
|
|
# the modern API.
|
|
|
|
slim_presence = True
|
|
|
|
|
|
|
|
if user_profile is not None:
|
|
|
|
presences, presence_last_update_id_fetched_by_server = get_presences_for_realm(
|
|
|
|
realm,
|
|
|
|
slim_presence,
|
|
|
|
last_update_id_fetched_by_client=presence_last_update_id_fetched_by_client,
|
|
|
|
requesting_user_profile=user_profile,
|
|
|
|
)
|
|
|
|
state["presences"] = presences
|
|
|
|
state["presence_last_update_id"] = presence_last_update_id_fetched_by_server
|
|
|
|
else:
|
|
|
|
state["presences"] = {}
|
|
|
|
|
2021-05-20 20:01:51 +02:00
|
|
|
# Send server_timestamp, to match the format of `GET /presence` requests.
|
|
|
|
state["server_timestamp"] = time.time()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm"):
|
2021-05-20 00:27:14 +02:00
|
|
|
# The realm bundle includes both realm properties and server
|
2023-04-16 10:19:08 +02:00
|
|
|
# properties, since it's rare that one would want one and not
|
2021-05-20 00:27:14 +02:00
|
|
|
# the other. We expect most clients to want it.
|
|
|
|
#
|
|
|
|
# A note on naming: For some settings, one could imagine
|
|
|
|
# having a server-level value and a realm-level value (with
|
|
|
|
# the server value serving as the default for the realm
|
|
|
|
# value). For such settings, we prefer the following naming
|
|
|
|
# scheme:
|
|
|
|
#
|
|
|
|
# * realm_inline_image_preview (current realm setting)
|
|
|
|
# * server_inline_image_preview (server-level default)
|
|
|
|
#
|
|
|
|
# In situations where for backwards-compatibility reasons we
|
|
|
|
# have an unadorned name, we should arrange that clients using
|
|
|
|
# that unadorned name work correctly (i.e. that should be the
|
|
|
|
# currently active setting, not a server-level default).
|
|
|
|
#
|
|
|
|
# Other settings, which are just server-level settings or data
|
|
|
|
# about the version of Zulip, can be named without prefixes,
|
|
|
|
# e.g. giphy_rating_options or development_environment.
|
2017-04-19 05:30:38 +02:00
|
|
|
for property_name in Realm.property_types:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_" + property_name] = getattr(realm, property_name)
|
2017-07-07 20:11:44 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
for (
|
|
|
|
setting_name,
|
2023-10-31 12:28:49 +01:00
|
|
|
permission_configuration,
|
2023-08-09 15:06:56 +02:00
|
|
|
) in Realm.REALM_PERMISSION_GROUP_SETTINGS.items():
|
2024-05-23 16:21:25 +02:00
|
|
|
if setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS_WITH_NEW_API_FORMAT:
|
|
|
|
setting_value = getattr(realm, setting_name)
|
|
|
|
state["realm_" + setting_name] = get_group_setting_value_for_api(setting_value)
|
|
|
|
continue
|
|
|
|
|
2023-10-31 12:28:49 +01:00
|
|
|
state["realm_" + setting_name] = getattr(realm, permission_configuration.id_field_name)
|
2023-08-09 15:06:56 +02:00
|
|
|
|
2024-05-30 04:04:44 +02:00
|
|
|
state["realm_create_public_stream_policy"] = (
|
|
|
|
get_corresponding_policy_value_for_group_setting(
|
|
|
|
realm, "can_create_public_channel_group", Realm.COMMON_POLICY_TYPES
|
|
|
|
)
|
|
|
|
)
|
2024-06-17 17:42:08 +02:00
|
|
|
state["realm_create_private_stream_policy"] = (
|
|
|
|
get_corresponding_policy_value_for_group_setting(
|
|
|
|
realm, "can_create_private_channel_group", Realm.COMMON_POLICY_TYPES
|
|
|
|
)
|
|
|
|
)
|
2024-05-30 04:04:44 +02:00
|
|
|
|
2017-07-07 20:11:44 +02:00
|
|
|
# Most state is handled via the property_types framework;
|
|
|
|
# these manual entries are for those realm settings that don't
|
|
|
|
# fit into that framework.
|
2023-04-17 22:14:14 +02:00
|
|
|
realm_authentication_methods_dict = realm.authentication_methods_dict()
|
2024-02-05 23:52:25 +01:00
|
|
|
state["realm_authentication_methods"] = (
|
|
|
|
get_realm_authentication_methods_for_page_params_api(
|
|
|
|
realm, realm_authentication_methods_dict
|
|
|
|
)
|
|
|
|
)
|
2020-09-22 14:01:50 +02:00
|
|
|
|
2021-05-20 00:35:26 +02:00
|
|
|
# We pretend these features are disabled because anonymous
|
|
|
|
# users can't access them. In the future, we may want to move
|
|
|
|
# this logic to the frontends, so that we can correctly
|
|
|
|
# display what these fields are in the settings.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_allow_message_editing"] = (
|
2021-02-12 08:19:30 +01:00
|
|
|
False if user_profile is None else realm.allow_message_editing
|
|
|
|
)
|
2021-05-26 12:21:37 +02:00
|
|
|
state["realm_edit_topic_policy"] = (
|
2024-05-22 11:43:10 +02:00
|
|
|
EditTopicPolicyEnum.ADMINS_ONLY if user_profile is None else realm.edit_topic_policy
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-06-08 13:45:14 +02:00
|
|
|
state["realm_delete_own_message_policy"] = (
|
2024-05-22 11:43:10 +02:00
|
|
|
CommonMessagePolicyEnum.ADMINS_ONLY
|
|
|
|
if user_profile is None
|
|
|
|
else realm.delete_own_message_policy
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-09-22 14:01:50 +02:00
|
|
|
|
2020-09-22 13:58:59 +02:00
|
|
|
# This setting determines whether to send presence and also
|
|
|
|
# whether to display of users list in the right sidebar; we
|
|
|
|
# want both behaviors for logged-out users. We may in the
|
|
|
|
# future choose to move this logic to the frontend.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_presence_disabled"] = True if user_profile is None else realm.presence_disabled
|
2020-09-22 13:58:59 +02:00
|
|
|
|
2021-05-20 00:35:26 +02:00
|
|
|
# Important: Encode units in the client-facing API name.
|
2021-05-29 08:51:07 +02:00
|
|
|
state["max_avatar_file_size_mib"] = settings.MAX_AVATAR_FILE_SIZE_MIB
|
2021-05-20 00:35:26 +02:00
|
|
|
state["max_file_upload_size_mib"] = settings.MAX_FILE_UPLOAD_SIZE
|
2021-05-29 08:55:34 +02:00
|
|
|
state["max_icon_file_size_mib"] = settings.MAX_ICON_FILE_SIZE_MIB
|
2024-03-20 02:30:08 +01:00
|
|
|
upload_quota_bytes = realm.upload_quota_bytes()
|
|
|
|
state["realm_upload_quota_mib"] = optional_bytes_to_mib(upload_quota_bytes)
|
2021-05-20 00:35:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_icon_url"] = realm_icon_url(realm)
|
|
|
|
state["realm_icon_source"] = realm.icon_source
|
2019-03-02 18:23:57 +01:00
|
|
|
add_realm_logo_fields(state, realm)
|
2021-05-20 00:35:26 +02:00
|
|
|
|
2024-05-07 06:08:52 +02:00
|
|
|
# TODO/compatibility: realm_uri is a deprecated alias for realm_url that
|
|
|
|
# can be removed once there are no longer clients relying on it.
|
|
|
|
state["realm_url"] = state["realm_uri"] = realm.url
|
2021-05-20 00:35:26 +02:00
|
|
|
state["realm_bot_domain"] = realm.get_bot_domain()
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_available_video_chat_providers"] = realm.VIDEO_CHAT_PROVIDERS
|
|
|
|
state["settings_send_digest_emails"] = settings.SEND_DIGEST_EMAILS
|
2021-05-20 00:35:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_digest_emails_enabled"] = (
|
2021-02-12 08:19:30 +01:00
|
|
|
realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS
|
|
|
|
)
|
2023-04-17 22:14:14 +02:00
|
|
|
state["realm_email_auth_enabled"] = email_auth_enabled(
|
|
|
|
realm, realm_authentication_methods_dict
|
|
|
|
)
|
|
|
|
state["realm_password_auth_enabled"] = password_auth_enabled(
|
|
|
|
realm, realm_authentication_methods_dict
|
|
|
|
)
|
2021-05-20 00:35:26 +02:00
|
|
|
|
|
|
|
state["server_generation"] = settings.SERVER_GENERATION
|
|
|
|
state["realm_is_zephyr_mirror_realm"] = realm.is_zephyr_mirror_realm
|
|
|
|
state["development_environment"] = settings.DEVELOPMENT
|
2022-04-11 19:26:16 +02:00
|
|
|
state["realm_org_type"] = realm.org_type
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_plan_type"] = realm.plan_type
|
2021-10-18 23:28:17 +02:00
|
|
|
state["zulip_plan_is_not_limited"] = realm.plan_type != Realm.PLAN_TYPE_LIMITED
|
2021-02-12 08:20:45 +01:00
|
|
|
state["upgrade_text_for_wide_organization_logo"] = str(Realm.UPGRADE_TEXT_STANDARD)
|
2021-05-20 00:35:26 +02:00
|
|
|
|
2023-11-23 22:07:41 +01:00
|
|
|
if realm.push_notifications_enabled_end_timestamp is not None:
|
|
|
|
state["realm_push_notifications_enabled_end_timestamp"] = datetime_to_timestamp(
|
|
|
|
realm.push_notifications_enabled_end_timestamp
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
state["realm_push_notifications_enabled_end_timestamp"] = None
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["password_min_length"] = settings.PASSWORD_MIN_LENGTH
|
|
|
|
state["password_min_guesses"] = settings.PASSWORD_MIN_GUESSES
|
|
|
|
state["server_inline_image_preview"] = settings.INLINE_IMAGE_PREVIEW
|
|
|
|
state["server_inline_url_embed_preview"] = settings.INLINE_URL_EMBED_PREVIEW
|
|
|
|
state["server_avatar_changes_disabled"] = settings.AVATAR_CHANGES_DISABLED
|
|
|
|
state["server_name_changes_disabled"] = settings.NAME_CHANGES_DISABLED
|
2022-05-16 20:35:54 +02:00
|
|
|
state["server_web_public_streams_enabled"] = settings.WEB_PUBLIC_STREAMS_ENABLED
|
2023-03-02 16:00:49 +01:00
|
|
|
state["giphy_rating_options"] = realm.get_giphy_rating_options()
|
2019-05-27 10:59:55 +02:00
|
|
|
|
2022-06-21 23:56:52 +02:00
|
|
|
state["server_emoji_data_url"] = emoji.data_url()
|
|
|
|
|
2021-06-03 18:56:36 +02:00
|
|
|
state["server_needs_upgrade"] = is_outdated_server(user_profile)
|
2024-01-29 00:32:21 +01:00
|
|
|
state["event_queue_longpoll_timeout_seconds"] = (
|
|
|
|
settings.EVENT_QUEUE_LONGPOLL_TIMEOUT_SECONDS
|
|
|
|
)
|
2021-06-03 18:56:36 +02:00
|
|
|
|
2023-11-23 22:07:41 +01:00
|
|
|
# TODO: This probably belongs on the server object.
|
2022-09-16 12:08:28 +02:00
|
|
|
state["realm_default_external_accounts"] = get_default_external_accounts()
|
2021-05-20 00:35:26 +02:00
|
|
|
|
2023-09-19 19:03:08 +02:00
|
|
|
server_default_jitsi_server_url = (
|
|
|
|
settings.JITSI_SERVER_URL.rstrip("/") if settings.JITSI_SERVER_URL is not None else None
|
|
|
|
)
|
|
|
|
state["server_jitsi_server_url"] = server_default_jitsi_server_url
|
|
|
|
state["jitsi_server_url"] = (
|
|
|
|
realm.jitsi_server_url
|
|
|
|
if realm.jitsi_server_url is not None
|
|
|
|
else server_default_jitsi_server_url
|
|
|
|
)
|
2021-05-19 04:05:04 +02:00
|
|
|
|
2024-02-07 12:13:02 +01:00
|
|
|
new_stream_announcements_stream = realm.get_new_stream_announcements_stream()
|
|
|
|
if new_stream_announcements_stream:
|
|
|
|
state["realm_new_stream_announcements_stream_id"] = new_stream_announcements_stream.id
|
2017-05-17 03:48:47 +02:00
|
|
|
else:
|
2024-02-07 12:13:02 +01:00
|
|
|
state["realm_new_stream_announcements_stream_id"] = -1
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
signup_announcements_stream = realm.get_signup_announcements_stream()
|
|
|
|
if signup_announcements_stream:
|
|
|
|
state["realm_signup_announcements_stream_id"] = signup_announcements_stream.id
|
2017-10-20 16:55:04 +02:00
|
|
|
else:
|
2024-02-07 17:11:43 +01:00
|
|
|
state["realm_signup_announcements_stream_id"] = -1
|
2017-10-20 16:55:04 +02:00
|
|
|
|
2024-01-26 14:45:37 +01:00
|
|
|
zulip_update_announcements_stream = realm.get_zulip_update_announcements_stream()
|
|
|
|
if zulip_update_announcements_stream:
|
|
|
|
state["realm_zulip_update_announcements_stream_id"] = (
|
|
|
|
zulip_update_announcements_stream.id
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
state["realm_zulip_update_announcements_stream_id"] = -1
|
|
|
|
|
2021-04-10 17:50:58 +02:00
|
|
|
state["max_stream_name_length"] = Stream.MAX_NAME_LENGTH
|
|
|
|
state["max_stream_description_length"] = Stream.MAX_DESCRIPTION_LENGTH
|
|
|
|
state["max_topic_length"] = MAX_TOPIC_NAME_LENGTH
|
2021-06-03 15:04:22 +02:00
|
|
|
state["max_message_length"] = settings.MAX_MESSAGE_LENGTH
|
2021-09-04 01:42:30 +02:00
|
|
|
if realm.demo_organization_scheduled_deletion_date is not None:
|
|
|
|
state["demo_organization_scheduled_deletion_date"] = datetime_to_timestamp(
|
|
|
|
realm.demo_organization_scheduled_deletion_date
|
|
|
|
)
|
2023-08-11 19:46:58 +02:00
|
|
|
state["realm_date_created"] = datetime_to_timestamp(realm.date_created)
|
2021-04-10 17:50:58 +02:00
|
|
|
|
2023-02-20 22:39:40 +01:00
|
|
|
# Presence system parameters for client behavior.
|
|
|
|
state["server_presence_ping_interval_seconds"] = settings.PRESENCE_PING_INTERVAL_SECS
|
|
|
|
state["server_presence_offline_threshold_seconds"] = settings.OFFLINE_THRESHOLD_SECS
|
2023-08-17 14:42:41 +02:00
|
|
|
# Typing notifications protocol parameters for client behavior.
|
2024-01-29 00:32:21 +01:00
|
|
|
state["server_typing_started_expiry_period_milliseconds"] = (
|
|
|
|
settings.TYPING_STARTED_EXPIRY_PERIOD_MILLISECONDS
|
|
|
|
)
|
|
|
|
state["server_typing_stopped_wait_period_milliseconds"] = (
|
|
|
|
settings.TYPING_STOPPED_WAIT_PERIOD_MILLISECONDS
|
|
|
|
)
|
|
|
|
state["server_typing_started_wait_period_milliseconds"] = (
|
|
|
|
settings.TYPING_STARTED_WAIT_PERIOD_MILLISECONDS
|
|
|
|
)
|
2023-10-19 16:50:26 +02:00
|
|
|
|
|
|
|
state["server_supported_permission_settings"] = get_server_supported_permission_settings()
|
2021-07-21 13:40:46 +02:00
|
|
|
if want("realm_user_settings_defaults"):
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
state["realm_user_settings_defaults"] = {}
|
|
|
|
for property_name in RealmUserDefault.property_types:
|
|
|
|
state["realm_user_settings_defaults"][property_name] = getattr(
|
|
|
|
realm_user_default, property_name
|
|
|
|
)
|
|
|
|
|
2023-12-05 18:45:07 +01:00
|
|
|
state["realm_user_settings_defaults"]["emojiset_choices"] = (
|
|
|
|
RealmUserDefault.emojiset_choices()
|
|
|
|
)
|
|
|
|
state["realm_user_settings_defaults"]["available_notification_sounds"] = (
|
|
|
|
get_available_notification_sounds()
|
|
|
|
)
|
2021-07-21 13:40:46 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_domains"):
|
|
|
|
state["realm_domains"] = get_realm_domains(realm)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_emoji"):
|
2023-07-14 12:37:29 +02:00
|
|
|
state["realm_emoji"] = get_all_custom_emoji_for_realm(realm.id)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-03-30 12:51:54 +02:00
|
|
|
if want("realm_linkifiers"):
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
if linkifier_url_template:
|
|
|
|
state["realm_linkifiers"] = linkifiers_for_realm(realm.id)
|
|
|
|
else:
|
|
|
|
# When URL template is not supported by the client, return an empty list
|
|
|
|
# because the new format is incompatible with the old URL format strings
|
|
|
|
# and the client would not render it properly.
|
|
|
|
state["realm_linkifiers"] = []
|
2021-03-30 12:51:54 +02:00
|
|
|
|
|
|
|
# Backwards compatibility code.
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_filters"):
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
# Always return an empty list because the new URL template format is incompatible
|
|
|
|
# with the old URL format string, because legacy clients that use the
|
|
|
|
# backwards-compatible `realm_filters` event would not render the it properly.
|
|
|
|
state["realm_filters"] = []
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2020-10-28 04:00:46 +01:00
|
|
|
if want("realm_playgrounds"):
|
|
|
|
state["realm_playgrounds"] = get_realm_playgrounds(realm)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_user_groups"):
|
|
|
|
state["realm_user_groups"] = user_groups_in_realm_serialized(realm)
|
2017-11-07 07:56:26 +01:00
|
|
|
|
2020-09-23 21:25:58 +02:00
|
|
|
if user_profile is not None:
|
|
|
|
settings_user = user_profile
|
|
|
|
else:
|
2022-07-13 18:44:48 +02:00
|
|
|
assert spectator_requested_language is not None
|
2020-09-23 21:25:58 +02:00
|
|
|
# When UserProfile=None, we want to serve the values for various
|
|
|
|
# settings as the defaults. Instead of copying the default values
|
2023-12-15 01:16:00 +01:00
|
|
|
# from models/users.py here, we access these default values from a
|
2020-09-23 21:25:58 +02:00
|
|
|
# temporary UserProfile object that will not be saved to the database.
|
|
|
|
#
|
|
|
|
# We also can set various fields to avoid duplicating code
|
|
|
|
# unnecessarily.
|
|
|
|
settings_user = UserProfile(
|
|
|
|
full_name="Anonymous User",
|
|
|
|
email="username@example.com",
|
|
|
|
delivery_email="username@example.com",
|
|
|
|
realm=realm,
|
|
|
|
# We tag logged-out users as guests because most guest
|
|
|
|
# restrictions apply to these users as well, and it lets
|
|
|
|
# us avoid unnecessary conditionals.
|
|
|
|
role=UserProfile.ROLE_GUEST,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=False,
|
2020-09-23 21:25:58 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
# ID=0 is not used in real Zulip databases, ensuring this is unique.
|
|
|
|
id=0,
|
2022-07-13 18:44:48 +02:00
|
|
|
default_language=spectator_requested_language,
|
2023-11-27 18:09:38 +01:00
|
|
|
# Set home view to recent conversations for spectators regardless of default.
|
|
|
|
web_home_view="recent_topics",
|
2020-09-23 21:25:58 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_user"):
|
2023-11-08 08:21:24 +01:00
|
|
|
state["raw_users"] = get_users_for_api(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm,
|
|
|
|
user_profile,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
2021-11-20 17:36:04 +01:00
|
|
|
# Don't send custom profile field values to spectators.
|
|
|
|
include_custom_profile_fields=user_profile is not None,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
state["cross_realm_bots"] = list(get_cross_realm_dicts())
|
2020-09-22 17:41:36 +02:00
|
|
|
|
2020-09-23 21:25:58 +02:00
|
|
|
# For the user's own avatar URL, we force
|
|
|
|
# client_gravatar=False, since that saves some unnecessary
|
|
|
|
# client-side code for handing medium-size avatars. See #8253
|
|
|
|
# for details.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["avatar_source"] = settings_user.avatar_source
|
|
|
|
state["avatar_url_medium"] = avatar_url(
|
2020-09-23 21:25:58 +02:00
|
|
|
settings_user,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
state["avatar_url"] = avatar_url(
|
2020-09-23 21:25:58 +02:00
|
|
|
settings_user,
|
|
|
|
medium=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
2024-06-24 12:26:17 +02:00
|
|
|
settings_user_recursive_group_ids = set(
|
|
|
|
get_recursive_membership_groups(settings_user).values_list("id", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
state["can_create_private_streams"] = (
|
|
|
|
realm.can_create_private_channel_group_id in settings_user_recursive_group_ids
|
|
|
|
)
|
|
|
|
state["can_create_public_streams"] = (
|
|
|
|
realm.can_create_public_channel_group_id in settings_user_recursive_group_ids
|
|
|
|
)
|
|
|
|
|
2024-06-17 15:09:56 +02:00
|
|
|
state["can_create_web_public_streams"] = settings_user.can_create_web_public_streams()
|
2021-03-27 05:48:37 +01:00
|
|
|
# TODO/compatibility: Deprecated in Zulip 5.0 (feature level
|
|
|
|
# 102); we can remove this once we no longer need to support
|
|
|
|
# legacy mobile app versions that read the old property.
|
|
|
|
state["can_create_streams"] = (
|
2024-06-17 15:09:56 +02:00
|
|
|
state["can_create_private_streams"]
|
|
|
|
or state["can_create_public_streams"]
|
|
|
|
or state["can_create_web_public_streams"]
|
2021-03-27 05:48:37 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
state["can_subscribe_other_users"] = settings_user.can_subscribe_other_users()
|
2023-06-26 23:38:08 +02:00
|
|
|
state["can_invite_others_to_realm"] = settings_user.can_invite_users_by_email()
|
2021-02-12 08:20:45 +01:00
|
|
|
state["is_admin"] = settings_user.is_realm_admin
|
|
|
|
state["is_owner"] = settings_user.is_realm_owner
|
2021-04-19 20:56:15 +02:00
|
|
|
state["is_moderator"] = settings_user.is_moderator
|
2021-02-12 08:20:45 +01:00
|
|
|
state["is_guest"] = settings_user.is_guest
|
2021-05-28 12:51:50 +02:00
|
|
|
state["is_billing_admin"] = settings_user.is_billing_admin
|
2021-02-12 08:20:45 +01:00
|
|
|
state["user_id"] = settings_user.id
|
|
|
|
state["email"] = settings_user.email
|
|
|
|
state["delivery_email"] = settings_user.delivery_email
|
|
|
|
state["full_name"] = settings_user.full_name
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_bot"):
|
|
|
|
state["realm_bots"] = [] if user_profile is None else get_owned_bot_dicts(user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2017-10-19 16:25:06 +02:00
|
|
|
# This does not yet have an apply_event counterpart, since currently,
|
|
|
|
# new entries for EMBEDDED_BOTS can only be added directly in the codebase.
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_embedded_bots"):
|
2023-07-31 22:52:35 +02:00
|
|
|
state["realm_embedded_bots"] = [
|
|
|
|
{"name": bot.name, "config": load_bot_config_template(bot.name)}
|
|
|
|
for bot in EMBEDDED_BOTS
|
|
|
|
]
|
2017-10-19 16:25:06 +02:00
|
|
|
|
2023-10-01 21:37:04 +02:00
|
|
|
# This does not have an apply_events counterpart either since this
|
|
|
|
# data is mostly static. This excludes the legacy webhook
|
|
|
|
# integrations as those do not follow the same URL construction
|
|
|
|
# patterns as other integrations.
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("realm_incoming_webhook_bots"):
|
2023-07-31 22:52:35 +02:00
|
|
|
state["realm_incoming_webhook_bots"] = [
|
|
|
|
{
|
|
|
|
"name": integration.name,
|
2023-08-07 12:25:50 +02:00
|
|
|
"display_name": integration.display_name,
|
|
|
|
"all_event_types": get_all_event_types_for_integration(integration),
|
2023-07-31 22:52:35 +02:00
|
|
|
"config": {c[1]: c[0] for c in integration.config_options},
|
|
|
|
}
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS
|
2023-10-01 21:37:04 +02:00
|
|
|
if integration.legacy is False
|
2023-07-31 22:52:35 +02:00
|
|
|
]
|
2019-08-18 15:09:18 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("recent_private_conversations"):
|
2019-03-20 04:15:58 +01:00
|
|
|
# A data structure containing records of this form:
|
|
|
|
#
|
|
|
|
# [{'max_message_id': 700175, 'user_ids': [801]}]
|
|
|
|
#
|
2023-06-19 16:42:11 +02:00
|
|
|
# for all recent direct message conversations, ordered by the
|
|
|
|
# highest message ID in the conversation. The user_ids list
|
2019-03-20 04:15:58 +01:00
|
|
|
# is the list of users other than the current user in the
|
2023-06-19 16:42:11 +02:00
|
|
|
# direct message conversation (so it is [] for direct messages
|
|
|
|
# to self).
|
|
|
|
#
|
2019-03-20 04:15:58 +01:00
|
|
|
# Note that raw_recent_private_conversations is an
|
|
|
|
# intermediate form as a dictionary keyed by recipient_id,
|
|
|
|
# which is more efficient to update, and is rewritten to the
|
|
|
|
# final format in post_process_state.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["raw_recent_private_conversations"] = (
|
2021-02-12 08:19:30 +01:00
|
|
|
{} if user_profile is None else get_recent_private_conversations(user_profile)
|
|
|
|
)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("subscription"):
|
2020-09-22 17:41:36 +02:00
|
|
|
if user_profile is not None:
|
2021-01-14 21:44:56 +01:00
|
|
|
sub_info = gather_subscriptions_helper(
|
|
|
|
user_profile,
|
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
)
|
2020-09-22 17:41:36 +02:00
|
|
|
else:
|
2021-01-14 21:44:56 +01:00
|
|
|
sub_info = get_web_public_subs(realm)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["subscriptions"] = sub_info.subscriptions
|
|
|
|
state["unsubscribed"] = sub_info.unsubscribed
|
|
|
|
state["never_subscribed"] = sub_info.never_subscribed
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("update_message_flags") and want("message"):
|
2017-05-23 03:02:01 +02:00
|
|
|
# Keeping unread_msgs updated requires both message flag updates and
|
|
|
|
# message updates. This is due to the fact that new messages will not
|
|
|
|
# generate a flag update so we need to use the flags field in the
|
|
|
|
# message event.
|
2020-09-27 19:12:52 +02:00
|
|
|
|
|
|
|
if user_profile is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["raw_unread_msgs"] = get_raw_unread_data(user_profile)
|
2020-09-27 19:12:52 +02:00
|
|
|
else:
|
|
|
|
# For logged-out visitors, we treat all messages as read;
|
|
|
|
# calling this helper lets us return empty objects in the
|
|
|
|
# appropriate format.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["raw_unread_msgs"] = extract_unread_data_from_um_rows([], user_profile)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("starred_messages"):
|
|
|
|
state["starred_messages"] = (
|
2021-02-12 08:19:30 +01:00
|
|
|
[] if user_profile is None else get_starred_message_ids(user_profile)
|
|
|
|
)
|
2018-08-14 23:57:20 +02:00
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if want("stream") and include_streams:
|
|
|
|
# The web app doesn't use the data from here; instead,
|
|
|
|
# it uses data from state["subscriptions"] and other
|
|
|
|
# places.
|
|
|
|
if user_profile is not None:
|
|
|
|
state["streams"] = do_get_streams(
|
2023-07-06 18:54:20 +02:00
|
|
|
user_profile,
|
|
|
|
include_web_public=True,
|
|
|
|
include_all_active=user_profile.is_realm_admin,
|
2023-01-18 02:59:37 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
# TODO: This line isn't used by the web app because it
|
|
|
|
# gets these data via the `subscriptions` key; it will
|
|
|
|
# be used when the mobile apps support logged-out
|
|
|
|
# access.
|
|
|
|
state["streams"] = get_web_public_streams(realm) # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("default_streams"):
|
2020-09-23 21:25:58 +02:00
|
|
|
if settings_user.is_guest:
|
2020-09-22 17:41:36 +02:00
|
|
|
# Guest users and logged-out users don't have access to
|
|
|
|
# all default streams, so we pretend the organization
|
|
|
|
# doesn't have any.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_default_streams"] = []
|
2019-03-01 01:26:57 +01:00
|
|
|
else:
|
2023-07-08 21:20:28 +02:00
|
|
|
state["realm_default_streams"] = get_default_streams_for_realm_as_dicts(realm.id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("default_stream_groups"):
|
2020-09-23 21:25:58 +02:00
|
|
|
if settings_user.is_guest:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_default_stream_groups"] = []
|
2019-03-01 01:26:57 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_default_stream_groups"] = default_stream_groups_to_dicts_sorted(
|
2021-02-12 08:19:30 +01:00
|
|
|
get_default_stream_groups(realm)
|
|
|
|
)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("stop_words"):
|
|
|
|
state["stop_words"] = read_stop_words()
|
2019-01-27 18:57:15 +01:00
|
|
|
|
2021-07-30 17:44:42 +02:00
|
|
|
if want("update_display_settings") and not user_settings_object:
|
2021-08-11 15:34:25 +02:00
|
|
|
for prop in UserProfile.display_settings_legacy:
|
2020-09-23 21:25:58 +02:00
|
|
|
state[prop] = getattr(settings_user, prop)
|
2021-06-29 14:51:35 +02:00
|
|
|
state["emojiset_choices"] = UserProfile.emojiset_choices()
|
2022-02-10 01:45:44 +01:00
|
|
|
state["timezone"] = canonicalize_timezone(settings_user.timezone)
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-07-30 17:44:42 +02:00
|
|
|
if want("update_global_notifications") and not user_settings_object:
|
2021-08-11 15:34:25 +02:00
|
|
|
for notification in UserProfile.notification_settings_legacy:
|
2020-09-23 21:25:58 +02:00
|
|
|
state[notification] = getattr(settings_user, notification)
|
2021-02-12 08:20:45 +01:00
|
|
|
state["available_notification_sounds"] = get_available_notification_sounds()
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-07-26 19:55:14 +02:00
|
|
|
if want("user_settings"):
|
|
|
|
state["user_settings"] = {}
|
|
|
|
|
|
|
|
for prop in UserProfile.property_types:
|
|
|
|
state["user_settings"][prop] = getattr(settings_user, prop)
|
|
|
|
|
|
|
|
state["user_settings"]["emojiset_choices"] = UserProfile.emojiset_choices()
|
2022-02-10 01:45:44 +01:00
|
|
|
state["user_settings"]["timezone"] = canonicalize_timezone(settings_user.timezone)
|
2023-12-05 18:45:07 +01:00
|
|
|
state["user_settings"]["available_notification_sounds"] = (
|
|
|
|
get_available_notification_sounds()
|
|
|
|
)
|
2021-07-26 19:55:14 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("user_status"):
|
2020-09-22 17:41:36 +02:00
|
|
|
# We require creating an account to access statuses.
|
2022-09-16 18:15:15 +02:00
|
|
|
state["user_status"] = (
|
2023-10-17 12:56:39 +02:00
|
|
|
{}
|
|
|
|
if user_profile is None
|
2024-05-15 18:22:16 +02:00
|
|
|
else get_all_users_status_dict(realm=realm, user_profile=user_profile)
|
2022-09-16 18:15:15 +02:00
|
|
|
)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2022-02-25 21:48:56 +01:00
|
|
|
if want("user_topic"):
|
|
|
|
state["user_topics"] = [] if user_profile is None else get_user_topics(user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if want("video_calls"):
|
|
|
|
state["has_zoom_token"] = settings_user.zoom_token is not None
|
2019-11-16 09:26:28 +01:00
|
|
|
|
2021-03-19 13:21:18 +01:00
|
|
|
if want("giphy"):
|
|
|
|
# Normally, it would be a nasty security bug to send a
|
|
|
|
# server's API key to end users. However, GIPHY's API key
|
|
|
|
# security model is precisely to do that; every service
|
|
|
|
# publishes its API key (and GIPHY's client-side JS libraries
|
|
|
|
# require the API key to work). This security model makes
|
|
|
|
# sense because GIPHY API keys are all essentially equivalent
|
|
|
|
# in letting one search for GIFs; GIPHY only requires API keys
|
|
|
|
# to exist at all so that they can deactivate them in cases of
|
|
|
|
# abuse.
|
|
|
|
state["giphy_api_key"] = settings.GIPHY_API_KEY if settings.GIPHY_API_KEY else ""
|
|
|
|
|
2020-10-07 13:56:30 +02:00
|
|
|
if user_profile is None:
|
|
|
|
# To ensure we have the correct user state set.
|
|
|
|
assert state["is_admin"] is False
|
|
|
|
assert state["is_owner"] is False
|
|
|
|
assert state["is_guest"] is True
|
|
|
|
|
2017-02-10 23:04:46 +01:00
|
|
|
return state
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-01-19 15:52:45 +01:00
|
|
|
def apply_events(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
*,
|
|
|
|
state: Dict[str, Any],
|
|
|
|
events: Iterable[Dict[str, Any]],
|
2021-04-30 00:15:33 +02:00
|
|
|
fetch_event_types: Optional[Collection[str]],
|
2021-01-19 15:52:45 +01:00
|
|
|
client_gravatar: bool,
|
|
|
|
slim_presence: bool,
|
|
|
|
include_subscribers: bool,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template: bool,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete: bool,
|
2021-01-19 15:52:45 +01:00
|
|
|
) -> None:
|
2017-02-10 23:04:46 +01:00
|
|
|
for event in events:
|
2021-02-12 08:20:45 +01:00
|
|
|
if fetch_event_types is not None and event["type"] not in fetch_event_types:
|
2017-04-26 23:29:25 +02:00
|
|
|
# TODO: continuing here is not, most precisely, correct.
|
|
|
|
# In theory, an event of one type, e.g. `realm_user`,
|
|
|
|
# could modify state that doesn't come from that
|
|
|
|
# `fetch_event_types` value, e.g. the `our_person` part of
|
|
|
|
# that code path. But it should be extremely rare, and
|
|
|
|
# fixing that will require a nontrivial refactor of
|
|
|
|
# `apply_event`. For now, be careful in your choice of
|
|
|
|
# `fetch_event_types`.
|
|
|
|
continue
|
2021-01-19 23:32:25 +01:00
|
|
|
apply_event(
|
|
|
|
user_profile,
|
|
|
|
state=state,
|
|
|
|
event=event,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
include_subscribers=include_subscribers,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2021-01-19 23:32:25 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-01-19 23:32:25 +01:00
|
|
|
def apply_event(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
*,
|
|
|
|
state: Dict[str, Any],
|
|
|
|
event: Dict[str, Any],
|
|
|
|
client_gravatar: bool,
|
|
|
|
slim_presence: bool,
|
|
|
|
include_subscribers: bool,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template: bool,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete: bool,
|
2021-01-19 23:32:25 +01:00
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if event["type"] == "message":
|
|
|
|
state["max_message_id"] = max(state["max_message_id"], event["message"]["id"])
|
2023-09-30 12:40:39 +02:00
|
|
|
if "raw_unread_msgs" in state and "read" not in event["flags"]:
|
2017-10-12 01:21:34 +02:00
|
|
|
apply_unread_message_event(
|
|
|
|
user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
state["raw_unread_msgs"],
|
|
|
|
event["message"],
|
|
|
|
event["flags"],
|
2017-10-12 01:21:34 +02:00
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if event["message"]["type"] != "stream":
|
|
|
|
if "raw_recent_private_conversations" in state:
|
2019-03-20 04:15:58 +01:00
|
|
|
# Handle maintaining the recent_private_conversations data structure.
|
2021-02-12 08:20:45 +01:00
|
|
|
conversations = state["raw_recent_private_conversations"]
|
2019-03-20 04:15:58 +01:00
|
|
|
recipient_id = get_recent_conversations_recipient_id(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile, event["message"]["recipient_id"], event["message"]["sender_id"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
if recipient_id not in conversations:
|
|
|
|
conversations[recipient_id] = dict(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_ids=sorted(
|
2021-02-12 08:20:45 +01:00
|
|
|
user_dict["id"]
|
|
|
|
for user_dict in event["message"]["display_recipient"]
|
|
|
|
if user_dict["id"] != user_profile.id
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2019-03-20 04:15:58 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
conversations[recipient_id]["max_message_id"] = event["message"]["id"]
|
2019-03-04 17:50:49 +01:00
|
|
|
return
|
|
|
|
|
2019-03-20 04:15:58 +01:00
|
|
|
# Below, we handle maintaining first_message_id.
|
2021-02-12 08:20:45 +01:00
|
|
|
for sub_dict in state.get("subscriptions", []):
|
2023-01-18 02:59:37 +01:00
|
|
|
if (
|
|
|
|
event["message"]["stream_id"] == sub_dict["stream_id"]
|
|
|
|
and sub_dict["first_message_id"] is None
|
|
|
|
):
|
|
|
|
sub_dict["first_message_id"] = event["message"]["id"]
|
2021-02-12 08:20:45 +01:00
|
|
|
for stream_dict in state.get("streams", []):
|
2023-01-18 02:59:37 +01:00
|
|
|
if (
|
|
|
|
event["message"]["stream_id"] == stream_dict["stream_id"]
|
|
|
|
and stream_dict["first_message_id"] is None
|
|
|
|
):
|
|
|
|
stream_dict["first_message_id"] = event["message"]["id"]
|
2021-02-12 08:20:45 +01:00
|
|
|
|
2021-07-02 02:13:55 +02:00
|
|
|
elif event["type"] == "heartbeat":
|
|
|
|
# It may be impossible for a heartbeat event to actually reach
|
|
|
|
# this code path. But in any case, they're noops.
|
|
|
|
pass
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
elif event["type"] == "drafts":
|
|
|
|
if event["op"] == "add":
|
|
|
|
state["drafts"].extend(event["drafts"])
|
|
|
|
else:
|
|
|
|
if event["op"] == "update":
|
|
|
|
event_draft_idx = event["draft"]["id"]
|
|
|
|
|
|
|
|
def _draft_update_action(i: int) -> None:
|
|
|
|
state["drafts"][i] = event["draft"]
|
|
|
|
|
|
|
|
elif event["op"] == "remove":
|
|
|
|
event_draft_idx = event["draft_id"]
|
|
|
|
|
|
|
|
def _draft_update_action(i: int) -> None:
|
|
|
|
del state["drafts"][i]
|
|
|
|
|
|
|
|
# We have to perform a linear search for the draft that
|
|
|
|
# was either edited or removed since we have a list
|
|
|
|
# ordered by the last edited timestamp and not id.
|
|
|
|
state_draft_idx = None
|
|
|
|
for idx, draft in enumerate(state["drafts"]):
|
|
|
|
if draft["id"] == event_draft_idx:
|
|
|
|
state_draft_idx = idx
|
|
|
|
break
|
|
|
|
assert state_draft_idx is not None
|
|
|
|
_draft_update_action(state_draft_idx)
|
|
|
|
|
2023-04-20 04:40:41 +02:00
|
|
|
elif event["type"] == "scheduled_messages":
|
|
|
|
if event["op"] == "add":
|
|
|
|
# Since bulk addition of scheduled messages will not be used by a normal user.
|
|
|
|
assert len(event["scheduled_messages"]) == 1
|
|
|
|
|
|
|
|
state["scheduled_messages"].append(event["scheduled_messages"][0])
|
|
|
|
# Sort in ascending order of scheduled_delivery_timestamp.
|
|
|
|
state["scheduled_messages"].sort(
|
|
|
|
key=lambda scheduled_message: scheduled_message["scheduled_delivery_timestamp"]
|
|
|
|
)
|
|
|
|
|
|
|
|
if event["op"] == "update":
|
|
|
|
for idx, scheduled_message in enumerate(state["scheduled_messages"]):
|
|
|
|
if (
|
|
|
|
scheduled_message["scheduled_message_id"]
|
|
|
|
== event["scheduled_message"]["scheduled_message_id"]
|
|
|
|
):
|
|
|
|
state["scheduled_messages"][idx] = event["scheduled_message"]
|
|
|
|
# If scheduled_delivery_timestamp was changed, we need to sort it again.
|
|
|
|
if (
|
|
|
|
scheduled_message["scheduled_delivery_timestamp"]
|
|
|
|
!= event["scheduled_message"]["scheduled_delivery_timestamp"]
|
|
|
|
):
|
|
|
|
state["scheduled_messages"].sort(
|
|
|
|
key=lambda scheduled_message: scheduled_message[
|
|
|
|
"scheduled_delivery_timestamp"
|
|
|
|
]
|
|
|
|
)
|
|
|
|
break
|
|
|
|
|
|
|
|
if event["op"] == "remove":
|
|
|
|
for idx, scheduled_message in enumerate(state["scheduled_messages"]):
|
|
|
|
if scheduled_message["scheduled_message_id"] == event["scheduled_message_id"]:
|
|
|
|
del state["scheduled_messages"][idx]
|
|
|
|
|
2023-12-02 11:30:35 +01:00
|
|
|
elif event["type"] == "onboarding_steps":
|
|
|
|
state["onboarding_steps"] = event["onboarding_steps"]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "custom_profile_fields":
|
|
|
|
state["custom_profile_fields"] = event["fields"]
|
2021-03-26 18:03:27 +01:00
|
|
|
custom_profile_field_ids = {field["id"] for field in state["custom_profile_fields"]}
|
|
|
|
|
|
|
|
if "raw_users" in state:
|
|
|
|
for user_dict in state["raw_users"].values():
|
|
|
|
if "profile_data" not in user_dict:
|
|
|
|
continue
|
|
|
|
profile_data = user_dict["profile_data"]
|
2023-02-02 04:35:24 +01:00
|
|
|
for field_id, field_data in list(profile_data.items()):
|
2021-03-26 18:03:27 +01:00
|
|
|
if int(field_id) not in custom_profile_field_ids:
|
|
|
|
del profile_data[field_id]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "realm_user":
|
|
|
|
person = event["person"]
|
|
|
|
person_user_id = person["user_id"]
|
|
|
|
|
|
|
|
if event["op"] == "add":
|
2017-10-21 18:36:09 +02:00
|
|
|
person = copy.deepcopy(person)
|
2021-10-26 09:15:16 +02:00
|
|
|
|
|
|
|
if client_gravatar:
|
|
|
|
email_address_visibility = UserProfile.objects.get(
|
|
|
|
id=person_user_id
|
|
|
|
).email_address_visibility
|
|
|
|
if email_address_visibility != UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
|
|
|
client_gravatar = False
|
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if client_gravatar and person["avatar_url"].startswith("https://secure.gravatar.com/"):
|
|
|
|
person["avatar_url"] = None
|
2021-02-12 08:20:45 +01:00
|
|
|
person["is_active"] = True
|
|
|
|
if not person["is_bot"]:
|
|
|
|
person["profile_data"] = {}
|
|
|
|
state["raw_users"][person_user_id] = person
|
|
|
|
elif event["op"] == "update":
|
2021-02-12 08:19:30 +01:00
|
|
|
is_me = person_user_id == user_profile.id
|
2017-10-21 16:33:07 +02:00
|
|
|
|
|
|
|
if is_me:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "avatar_url" in person and "avatar_url" in state:
|
|
|
|
state["avatar_source"] = person["avatar_source"]
|
|
|
|
state["avatar_url"] = person["avatar_url"]
|
|
|
|
state["avatar_url_medium"] = person["avatar_url_medium"]
|
|
|
|
|
|
|
|
if "role" in person:
|
|
|
|
state["is_admin"] = is_administrator_role(person["role"])
|
|
|
|
state["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
|
2021-04-19 20:56:15 +02:00
|
|
|
state["is_moderator"] = person["role"] == UserProfile.ROLE_MODERATOR
|
2021-02-12 08:20:45 +01:00
|
|
|
state["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
|
2020-06-01 02:31:30 +02:00
|
|
|
# Recompute properties based on is_admin/is_guest
|
2021-03-27 05:48:37 +01:00
|
|
|
state["can_create_private_streams"] = user_profile.can_create_private_streams()
|
|
|
|
state["can_create_public_streams"] = user_profile.can_create_public_streams()
|
2024-01-29 00:32:21 +01:00
|
|
|
state["can_create_web_public_streams"] = (
|
|
|
|
user_profile.can_create_web_public_streams()
|
|
|
|
)
|
2021-03-27 05:48:37 +01:00
|
|
|
state["can_create_streams"] = (
|
2021-10-04 19:28:33 +02:00
|
|
|
state["can_create_private_streams"]
|
|
|
|
or state["can_create_public_streams"]
|
|
|
|
or state["can_create_web_public_streams"]
|
2021-03-27 05:48:37 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
state["can_subscribe_other_users"] = user_profile.can_subscribe_other_users()
|
2023-06-26 23:38:08 +02:00
|
|
|
state["can_invite_others_to_realm"] = user_profile.can_invite_users_by_email()
|
2020-06-01 02:31:30 +02:00
|
|
|
|
2021-05-19 16:07:01 +02:00
|
|
|
if state["is_guest"]:
|
|
|
|
state["realm_default_streams"] = []
|
|
|
|
else:
|
2023-07-08 21:20:28 +02:00
|
|
|
state["realm_default_streams"] = get_default_streams_for_realm_as_dicts(
|
|
|
|
user_profile.realm_id
|
2021-05-19 16:07:01 +02:00
|
|
|
)
|
|
|
|
|
2021-05-28 12:51:50 +02:00
|
|
|
for field in ["delivery_email", "email", "full_name", "is_billing_admin"]:
|
2017-10-21 16:33:07 +02:00
|
|
|
if field in person and field in state:
|
|
|
|
state[field] = person[field]
|
|
|
|
|
2021-10-04 18:52:57 +02:00
|
|
|
if "new_email" in person:
|
|
|
|
state["email"] = person["new_email"]
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
# In the unlikely event that the current user
|
|
|
|
# just changed to/from being an admin, we need
|
|
|
|
# to add/remove the data on all bots in the
|
|
|
|
# realm. This is ugly and probably better
|
|
|
|
# solved by removing the all-realm-bots data
|
|
|
|
# given to admin users from this flow.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "role" in person and "realm_bots" in state:
|
|
|
|
prev_state = state["raw_users"][user_profile.id]
|
|
|
|
was_admin = prev_state["is_admin"]
|
|
|
|
now_admin = is_administrator_role(person["role"])
|
2017-10-21 16:33:07 +02:00
|
|
|
|
|
|
|
if was_admin and not now_admin:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_bots"] = []
|
2017-10-21 16:33:07 +02:00
|
|
|
if not was_admin and now_admin:
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_bots"] = get_owned_bot_dicts(user_profile)
|
2017-10-21 16:33:07 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if person_user_id in state["raw_users"]:
|
|
|
|
p = state["raw_users"][person_user_id]
|
2021-10-26 09:15:16 +02:00
|
|
|
|
|
|
|
if "avatar_url" in person:
|
|
|
|
# Respect the client_gravatar setting in the `users` data.
|
|
|
|
if client_gravatar:
|
|
|
|
email_address_visibility = UserProfile.objects.get(
|
|
|
|
id=person_user_id
|
|
|
|
).email_address_visibility
|
|
|
|
if (
|
|
|
|
email_address_visibility
|
|
|
|
!= UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE
|
|
|
|
):
|
|
|
|
client_gravatar = False
|
|
|
|
|
|
|
|
if client_gravatar and person["avatar_url"].startswith(
|
|
|
|
"https://secure.gravatar.com/"
|
|
|
|
):
|
|
|
|
person["avatar_url"] = None
|
|
|
|
person["avatar_url_medium"] = None
|
|
|
|
|
2017-10-21 16:33:07 +02:00
|
|
|
for field in p:
|
|
|
|
if field in person:
|
|
|
|
p[field] = person[field]
|
2023-07-31 19:29:15 +02:00
|
|
|
|
|
|
|
if "role" in person:
|
|
|
|
p["is_admin"] = is_administrator_role(person["role"])
|
|
|
|
p["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
|
|
|
|
p["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
|
|
|
|
|
|
|
|
if "is_billing_admin" in person:
|
|
|
|
p["is_billing_admin"] = person["is_billing_admin"]
|
|
|
|
|
|
|
|
if "custom_profile_field" in person:
|
2023-07-31 19:39:57 +02:00
|
|
|
custom_field_id = str(person["custom_profile_field"]["id"])
|
2023-07-31 19:29:15 +02:00
|
|
|
custom_field_new_value = person["custom_profile_field"]["value"]
|
2023-07-31 19:39:57 +02:00
|
|
|
if custom_field_new_value is None and "profile_data" in p:
|
|
|
|
p["profile_data"].pop(custom_field_id, None)
|
|
|
|
elif "rendered_value" in person["custom_profile_field"]:
|
|
|
|
p["profile_data"][custom_field_id] = {
|
2023-07-31 19:29:15 +02:00
|
|
|
"value": custom_field_new_value,
|
|
|
|
"rendered_value": person["custom_profile_field"]["rendered_value"],
|
|
|
|
}
|
|
|
|
else:
|
2023-07-31 19:39:57 +02:00
|
|
|
p["profile_data"][custom_field_id] = {
|
2023-07-31 19:29:15 +02:00
|
|
|
"value": custom_field_new_value,
|
|
|
|
}
|
|
|
|
|
|
|
|
if "new_email" in person:
|
|
|
|
p["email"] = person["new_email"]
|
2023-10-30 12:50:40 +01:00
|
|
|
|
|
|
|
if "is_active" in person and not person["is_active"] and include_subscribers:
|
|
|
|
for sub in state["subscriptions"]:
|
|
|
|
sub["subscribers"] = [
|
|
|
|
user_id for user_id in sub["subscribers"] if user_id != person_user_id
|
|
|
|
]
|
2023-10-24 05:11:34 +02:00
|
|
|
elif event["op"] == "remove":
|
|
|
|
if person_user_id in state["raw_users"]:
|
2023-10-24 19:47:39 +02:00
|
|
|
if user_list_incomplete:
|
|
|
|
del state["raw_users"][person_user_id]
|
|
|
|
else:
|
|
|
|
inaccessible_user_dict = get_data_for_inaccessible_user(
|
|
|
|
user_profile.realm, person_user_id
|
|
|
|
)
|
|
|
|
state["raw_users"][person_user_id] = inaccessible_user_dict
|
2023-11-13 17:11:13 +01:00
|
|
|
|
|
|
|
if include_subscribers:
|
|
|
|
for sub in state["subscriptions"]:
|
|
|
|
sub["subscribers"] = [
|
|
|
|
user_id for user_id in sub["subscribers"] if user_id != person_user_id
|
|
|
|
]
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "realm_bot":
|
|
|
|
if event["op"] == "add":
|
|
|
|
state["realm_bots"].append(event["bot"])
|
2021-03-23 19:48:47 +01:00
|
|
|
elif event["op"] == "delete":
|
2021-02-12 08:20:45 +01:00
|
|
|
state["realm_bots"] = [
|
|
|
|
item for item in state["realm_bots"] if item["user_id"] != event["bot"]["user_id"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-03-23 19:48:47 +01:00
|
|
|
elif event["op"] == "update":
|
2021-02-12 08:20:45 +01:00
|
|
|
for bot in state["realm_bots"]:
|
|
|
|
if bot["user_id"] == event["bot"]["user_id"]:
|
|
|
|
if "owner_id" in event["bot"]:
|
|
|
|
bot_owner_id = event["bot"]["owner_id"]
|
|
|
|
bot["owner_id"] = bot_owner_id
|
2017-02-24 06:36:54 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot.update(event["bot"])
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "stream":
|
|
|
|
if event["op"] == "create":
|
|
|
|
for stream in event["streams"]:
|
2023-07-11 13:13:09 +02:00
|
|
|
stream_data = copy.deepcopy(stream)
|
|
|
|
if include_subscribers:
|
|
|
|
stream_data["subscribers"] = []
|
2020-06-16 19:39:43 +02:00
|
|
|
|
2023-09-29 20:04:27 +02:00
|
|
|
# Here we need to query the database to check whether the
|
|
|
|
# user was previously subscribed. If they were, we need to
|
|
|
|
# include the stream in the unsubscribed list after adding
|
|
|
|
# personal subscription metadata (such as configured stream
|
|
|
|
# color; most of the other personal setting have no effect
|
|
|
|
# when not subscribed).
|
|
|
|
unsubscribed_stream_sub = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient__type_id=stream["stream_id"],
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
).values(
|
|
|
|
*Subscription.API_FIELDS,
|
|
|
|
"recipient_id",
|
|
|
|
"active",
|
|
|
|
)
|
|
|
|
|
|
|
|
if len(unsubscribed_stream_sub) == 1:
|
|
|
|
unsubscribed_stream_dict = build_unsubscribed_sub_from_stream_dict(
|
|
|
|
user_profile, unsubscribed_stream_sub[0], stream_data
|
|
|
|
)
|
|
|
|
if include_subscribers:
|
|
|
|
unsubscribed_stream_dict["subscribers"] = []
|
|
|
|
state["unsubscribed"].append(unsubscribed_stream_dict)
|
|
|
|
else:
|
|
|
|
assert len(unsubscribed_stream_sub) == 0
|
|
|
|
state["never_subscribed"].append(stream_data)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "streams" in state:
|
|
|
|
state["streams"].append(stream)
|
|
|
|
|
2023-09-29 20:04:27 +02:00
|
|
|
state["unsubscribed"].sort(key=lambda elt: elt["name"])
|
2023-07-11 13:13:09 +02:00
|
|
|
state["never_subscribed"].sort(key=lambda elt: elt["name"])
|
2021-02-12 08:20:45 +01:00
|
|
|
if "streams" in state:
|
|
|
|
state["streams"].sort(key=lambda elt: elt["name"])
|
|
|
|
|
|
|
|
if event["op"] == "delete":
|
|
|
|
deleted_stream_ids = {stream["stream_id"] for stream in event["streams"]}
|
|
|
|
if "streams" in state:
|
|
|
|
state["streams"] = [
|
|
|
|
s for s in state["streams"] if s["stream_id"] not in deleted_stream_ids
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2023-09-29 20:09:45 +02:00
|
|
|
|
2023-11-14 13:08:12 +01:00
|
|
|
state["subscriptions"] = [
|
|
|
|
stream
|
|
|
|
for stream in state["subscriptions"]
|
|
|
|
if stream["stream_id"] not in deleted_stream_ids
|
|
|
|
]
|
|
|
|
|
2023-09-29 20:09:45 +02:00
|
|
|
state["unsubscribed"] = [
|
|
|
|
stream
|
|
|
|
for stream in state["unsubscribed"]
|
|
|
|
if stream["stream_id"] not in deleted_stream_ids
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["never_subscribed"] = [
|
2021-02-12 08:19:30 +01:00
|
|
|
stream
|
2021-02-12 08:20:45 +01:00
|
|
|
for stream in state["never_subscribed"]
|
|
|
|
if stream["stream_id"] not in deleted_stream_ids
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if event["op"] == "update":
|
2017-02-20 20:09:48 +01:00
|
|
|
# For legacy reasons, we call stream data 'subscriptions' in
|
|
|
|
# the state var here, for the benefit of the JS code.
|
2022-06-17 12:38:24 +02:00
|
|
|
for sub_list in [
|
|
|
|
state["subscriptions"],
|
|
|
|
state["unsubscribed"],
|
|
|
|
state["never_subscribed"],
|
|
|
|
]:
|
|
|
|
for obj in sub_list:
|
|
|
|
if obj["name"].lower() == event["name"].lower():
|
|
|
|
obj[event["property"]] = event["value"]
|
|
|
|
if event["property"] == "description":
|
|
|
|
obj["rendered_description"] = event["rendered_description"]
|
|
|
|
if event.get("history_public_to_subscribers") is not None:
|
|
|
|
obj["history_public_to_subscribers"] = event[
|
|
|
|
"history_public_to_subscribers"
|
|
|
|
]
|
|
|
|
if event.get("is_web_public") is not None:
|
|
|
|
obj["is_web_public"] = event["is_web_public"]
|
2017-02-20 20:09:48 +01:00
|
|
|
# Also update the pure streams data
|
2021-02-12 08:20:45 +01:00
|
|
|
if "streams" in state:
|
|
|
|
for stream in state["streams"]:
|
|
|
|
if stream["name"].lower() == event["name"].lower():
|
|
|
|
prop = event["property"]
|
2020-12-01 14:13:09 +01:00
|
|
|
if prop in stream:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream[prop] = event["value"]
|
|
|
|
if prop == "description":
|
|
|
|
stream["rendered_description"] = event["rendered_description"]
|
2020-11-10 14:11:19 +01:00
|
|
|
if event.get("history_public_to_subscribers") is not None:
|
|
|
|
stream["history_public_to_subscribers"] = event[
|
|
|
|
"history_public_to_subscribers"
|
|
|
|
]
|
|
|
|
if event.get("is_web_public") is not None:
|
|
|
|
stream["is_web_public"] = event["is_web_public"]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "default_streams":
|
|
|
|
state["realm_default_streams"] = event["default_streams"]
|
|
|
|
elif event["type"] == "default_stream_groups":
|
|
|
|
state["realm_default_stream_groups"] = event["default_stream_groups"]
|
|
|
|
elif event["type"] == "realm":
|
|
|
|
if event["op"] == "update":
|
|
|
|
field = "realm_" + event["property"]
|
|
|
|
state[field] = event["value"]
|
|
|
|
|
|
|
|
if event["property"] == "plan_type":
|
2019-06-11 12:43:08 +02:00
|
|
|
# Then there are some extra fields that also need to be set.
|
2021-10-18 23:28:17 +02:00
|
|
|
state["zulip_plan_is_not_limited"] = event["value"] != Realm.PLAN_TYPE_LIMITED
|
2024-03-20 02:30:08 +01:00
|
|
|
# upload_quota is in bytes, so we need to convert it to MiB.
|
|
|
|
upload_quota_bytes = event["extra_data"]["upload_quota"]
|
|
|
|
state["realm_upload_quota_mib"] = optional_bytes_to_mib(upload_quota_bytes)
|
2019-06-11 12:43:08 +02:00
|
|
|
|
2023-09-19 19:03:08 +02:00
|
|
|
if field == "realm_jitsi_server_url":
|
|
|
|
state["jitsi_server_url"] = (
|
|
|
|
state["realm_jitsi_server_url"]
|
|
|
|
if state["realm_jitsi_server_url"] is not None
|
|
|
|
else state["server_jitsi_server_url"]
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
policy_permission_dict = {
|
2021-10-04 19:28:33 +02:00
|
|
|
"create_web_public_stream_policy": "can_create_web_public_streams",
|
2021-02-12 08:20:45 +01:00
|
|
|
"invite_to_stream_policy": "can_subscribe_other_users",
|
2021-04-03 21:09:26 +02:00
|
|
|
"invite_to_realm_policy": "can_invite_others_to_realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2020-04-03 22:53:51 +02:00
|
|
|
|
|
|
|
# Tricky interaction: Whether we can create streams and can subscribe other users
|
|
|
|
# can get changed here.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if field == "realm_waiting_period_threshold":
|
2020-04-03 22:53:51 +02:00
|
|
|
for policy, permission in policy_permission_dict.items():
|
|
|
|
if permission in state:
|
|
|
|
state[permission] = user_profile.has_permission(policy)
|
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
if (
|
|
|
|
event["property"] in policy_permission_dict
|
|
|
|
and policy_permission_dict[event["property"]] in state
|
|
|
|
):
|
|
|
|
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
|
|
|
|
event["property"]
|
|
|
|
)
|
2020-04-03 22:53:51 +02:00
|
|
|
|
2021-03-27 05:48:37 +01:00
|
|
|
# Finally, we need to recompute this value from its inputs.
|
|
|
|
state["can_create_streams"] = (
|
2021-10-04 19:28:33 +02:00
|
|
|
state["can_create_private_streams"]
|
|
|
|
or state["can_create_public_streams"]
|
|
|
|
or state["can_create_web_public_streams"]
|
2021-03-27 05:48:37 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["op"] == "update_dict":
|
|
|
|
for key, value in event["data"].items():
|
2024-02-16 00:56:08 +01:00
|
|
|
state["realm_" + key] = value
|
|
|
|
# It's a bit messy, but this is where we need to
|
|
|
|
# update the state for whether password authentication
|
|
|
|
# is enabled on this server.
|
2021-02-12 08:20:45 +01:00
|
|
|
if key == "authentication_methods":
|
2024-02-16 00:56:08 +01:00
|
|
|
state["realm_password_auth_enabled"] = (
|
|
|
|
value["Email"]["enabled"] or value["LDAP"]["enabled"]
|
|
|
|
)
|
|
|
|
state["realm_email_auth_enabled"] = value["Email"]["enabled"]
|
2024-05-24 13:21:52 +02:00
|
|
|
|
2024-06-17 12:48:16 +02:00
|
|
|
if key in ["can_create_public_channel_group", "can_create_private_channel_group"]:
|
|
|
|
if key == "can_create_public_channel_group":
|
|
|
|
state["realm_create_public_stream_policy"] = (
|
|
|
|
get_corresponding_policy_value_for_group_setting(
|
|
|
|
user_profile.realm,
|
|
|
|
"can_create_public_channel_group",
|
|
|
|
Realm.COMMON_POLICY_TYPES,
|
|
|
|
)
|
2024-05-30 04:04:44 +02:00
|
|
|
)
|
2024-06-17 12:48:16 +02:00
|
|
|
state["can_create_public_streams"] = user_profile.has_permission(key)
|
|
|
|
else:
|
2024-06-17 17:42:08 +02:00
|
|
|
state["realm_create_private_stream_policy"] = (
|
|
|
|
get_corresponding_policy_value_for_group_setting(
|
|
|
|
user_profile.realm,
|
|
|
|
"can_create_private_channel_group",
|
|
|
|
Realm.COMMON_POLICY_TYPES,
|
|
|
|
)
|
|
|
|
)
|
2024-06-17 12:48:16 +02:00
|
|
|
state["can_create_private_streams"] = user_profile.has_permission(key)
|
|
|
|
|
2024-05-24 13:21:52 +02:00
|
|
|
state["can_create_streams"] = (
|
|
|
|
state["can_create_private_streams"]
|
|
|
|
or state["can_create_public_streams"]
|
|
|
|
or state["can_create_web_public_streams"]
|
|
|
|
)
|
2021-03-13 20:00:05 +01:00
|
|
|
elif event["op"] == "deactivated":
|
|
|
|
# The realm has just been deactivated. If our request had
|
|
|
|
# arrived a moment later, we'd have rendered the
|
|
|
|
# deactivation UI; if it'd been a moment sooner, we've
|
|
|
|
# have rendered the app and then immediately got this
|
|
|
|
# event (or actually, more likely, an auth error on GET
|
|
|
|
# /events) and immediately reloaded into the same
|
|
|
|
# deactivation UI. Passing achieves the same result.
|
|
|
|
pass
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-07-21 13:40:46 +02:00
|
|
|
elif event["type"] == "realm_user_settings_defaults":
|
|
|
|
if event["op"] == "update":
|
|
|
|
state["realm_user_settings_defaults"][event["property"]] = event["value"]
|
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "subscription":
|
2021-01-20 14:25:16 +01:00
|
|
|
if event["op"] == "add":
|
|
|
|
added_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
|
|
|
|
was_added = lambda s: s["stream_id"] in added_stream_ids
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2021-01-20 15:14:52 +01:00
|
|
|
existing_stream_ids = {sub["stream_id"] for sub in state["subscriptions"]}
|
|
|
|
|
2017-02-20 20:09:48 +01:00
|
|
|
# add the new subscriptions
|
2021-01-20 15:14:52 +01:00
|
|
|
for sub in event["subscriptions"]:
|
|
|
|
if sub["stream_id"] not in existing_stream_ids:
|
|
|
|
if "subscribers" in sub and not include_subscribers:
|
|
|
|
sub = copy.deepcopy(sub)
|
|
|
|
del sub["subscribers"]
|
|
|
|
state["subscriptions"].append(sub)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# remove them from unsubscribed if they had been there
|
2021-02-12 08:20:45 +01:00
|
|
|
state["unsubscribed"] = [s for s in state["unsubscribed"] if not was_added(s)]
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# remove them from never_subscribed if they had been there
|
2021-02-12 08:20:45 +01:00
|
|
|
state["never_subscribed"] = [s for s in state["never_subscribed"] if not was_added(s)]
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2021-01-20 14:25:16 +01:00
|
|
|
elif event["op"] == "remove":
|
|
|
|
removed_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
|
|
|
|
was_removed = lambda s: s["stream_id"] in removed_stream_ids
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# Find the subs we are affecting.
|
2021-02-12 08:20:45 +01:00
|
|
|
removed_subs = list(filter(was_removed, state["subscriptions"]))
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# Remove our user from the subscribers of the removed subscriptions.
|
|
|
|
if include_subscribers:
|
|
|
|
for sub in removed_subs:
|
2021-02-12 08:20:45 +01:00
|
|
|
sub["subscribers"].remove(user_profile.id)
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["unsubscribed"] += removed_subs
|
2017-02-20 20:09:48 +01:00
|
|
|
|
|
|
|
# Now filter out the removed subscriptions from subscriptions.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["subscriptions"] = [s for s in state["subscriptions"] if not was_removed(s)]
|
2017-02-20 20:09:48 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["op"] == "update":
|
|
|
|
for sub in state["subscriptions"]:
|
2021-03-06 13:20:32 +01:00
|
|
|
if sub["stream_id"] == event["stream_id"]:
|
2021-02-12 08:20:45 +01:00
|
|
|
sub[event["property"]] = event["value"]
|
|
|
|
elif event["op"] == "peer_add":
|
2021-01-20 13:53:46 +01:00
|
|
|
if include_subscribers:
|
|
|
|
stream_ids = set(event["stream_ids"])
|
|
|
|
user_ids = set(event["user_ids"])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for sub_dict in [
|
|
|
|
state["subscriptions"],
|
|
|
|
state["unsubscribed"],
|
|
|
|
state["never_subscribed"],
|
|
|
|
]:
|
2021-01-20 13:53:46 +01:00
|
|
|
for sub in sub_dict:
|
|
|
|
if sub["stream_id"] in stream_ids:
|
|
|
|
subscribers = set(sub["subscribers"]) | user_ids
|
2022-10-30 00:35:32 +02:00
|
|
|
sub["subscribers"] = sorted(subscribers)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["op"] == "peer_remove":
|
2021-01-20 13:53:46 +01:00
|
|
|
if include_subscribers:
|
|
|
|
stream_ids = set(event["stream_ids"])
|
|
|
|
user_ids = set(event["user_ids"])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for sub_dict in [
|
|
|
|
state["subscriptions"],
|
|
|
|
state["unsubscribed"],
|
|
|
|
state["never_subscribed"],
|
|
|
|
]:
|
2021-01-20 13:53:46 +01:00
|
|
|
for sub in sub_dict:
|
|
|
|
if sub["stream_id"] in stream_ids:
|
|
|
|
subscribers = set(sub["subscribers"]) - user_ids
|
2022-10-30 00:35:32 +02:00
|
|
|
sub["subscribers"] = sorted(subscribers)
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "presence":
|
2024-06-05 21:36:22 +02:00
|
|
|
# Note: Fetch_initial_state_data includes
|
|
|
|
# a presence_last_update_id value, reflecting the Max .last_update_id
|
|
|
|
# value of the UserPresence objects in the data. Events don't carry
|
|
|
|
# information about the last_update_id of the UserPresence object
|
|
|
|
# to which they correspond, so we don't (and can't) attempt to update that initial
|
|
|
|
# presence data here.
|
|
|
|
# This means that the state resulting from fetch_initial_state + apply_events will not
|
|
|
|
# match the state of a hypothetical fetch_initial_state fetch that included the fully
|
|
|
|
# updated data. This is intended and not a bug.
|
2020-02-02 17:29:05 +01:00
|
|
|
if slim_presence:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_key = str(event["user_id"])
|
2020-02-02 17:29:05 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_key = event["email"]
|
|
|
|
state["presences"][user_key] = get_presence_for_user(event["user_id"], slim_presence)[
|
2021-02-12 08:19:30 +01:00
|
|
|
user_key
|
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "update_message":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in /register, so we don't need to
|
|
|
|
# do anything for content updates, but we may need to update
|
|
|
|
# the unread_msgs data if the topic of an unread message changed.
|
2023-05-10 22:03:07 +02:00
|
|
|
if "raw_unread_msgs" in state and "new_stream_id" in event:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict = state["raw_unread_msgs"]["stream_dict"]
|
|
|
|
stream_id = event["new_stream_id"]
|
|
|
|
for message_id in event["message_ids"]:
|
2020-07-06 09:30:59 +02:00
|
|
|
if message_id in stream_dict:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict[message_id]["stream_id"] = stream_id
|
2020-07-06 09:30:59 +02:00
|
|
|
|
2023-05-10 22:03:07 +02:00
|
|
|
if "raw_unread_msgs" in state and TOPIC_NAME in event:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict = state["raw_unread_msgs"]["stream_dict"]
|
2024-01-14 14:38:50 +01:00
|
|
|
topic_name = event[TOPIC_NAME]
|
2021-02-12 08:20:45 +01:00
|
|
|
for message_id in event["message_ids"]:
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
if message_id in stream_dict:
|
2024-01-14 14:38:50 +01:00
|
|
|
stream_dict[message_id]["topic"] = topic_name
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "delete_message":
|
|
|
|
if "message_id" in event:
|
|
|
|
message_ids = [event["message_id"]]
|
2020-06-10 13:47:08 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
message_ids = event["message_ids"] # nocoverage
|
2023-08-30 23:40:24 +02:00
|
|
|
state["max_message_id"] = max_message_id_for_user(user_profile)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "raw_unread_msgs" in state:
|
2020-06-10 13:47:08 +02:00
|
|
|
for remove_id in message_ids:
|
2021-02-12 08:20:45 +01:00
|
|
|
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
|
2019-03-20 04:15:58 +01:00
|
|
|
|
|
|
|
# The remainder of this block is about maintaining recent_private_conversations
|
2021-02-12 08:20:45 +01:00
|
|
|
if "raw_recent_private_conversations" not in state or event["message_type"] != "private":
|
2019-03-20 04:15:58 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# OK, we just deleted what had been the max_message_id for
|
|
|
|
# this recent conversation; we need to recompute that value
|
|
|
|
# from scratch. Definitely don't need to re-query everything,
|
|
|
|
# but this case is likely rare enough that it's reasonable to do so.
|
2021-02-12 08:20:45 +01:00
|
|
|
state["raw_recent_private_conversations"] = get_recent_private_conversations(user_profile)
|
|
|
|
elif event["type"] == "reaction":
|
2017-02-20 20:09:48 +01:00
|
|
|
# The client will get the message with the reactions directly
|
2018-02-12 10:53:36 +01:00
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "submessage":
|
2018-02-12 10:53:36 +01:00
|
|
|
# The client will get submessages with their messages
|
2017-02-20 20:09:48 +01:00
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "typing":
|
2017-03-18 03:50:41 +01:00
|
|
|
# Typing notification events are transient and thus ignored
|
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "attachment":
|
2018-05-04 22:57:36 +02:00
|
|
|
# Attachment events are just for updating the "uploads" UI;
|
|
|
|
# they are not sent directly.
|
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "update_message_flags":
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
# We don't return messages in `/register`, so most flags we
|
|
|
|
# can ignore, but we do need to update the unread_msgs data if
|
|
|
|
# unread state is changed.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "raw_unread_msgs" in state and event["flag"] == "read" and event["op"] == "add":
|
|
|
|
for remove_id in event["messages"]:
|
|
|
|
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
|
2023-05-10 22:03:07 +02:00
|
|
|
if "raw_unread_msgs" in state and event["flag"] == "read" and event["op"] == "remove":
|
2021-06-09 13:31:39 +02:00
|
|
|
for message_id_str, message_details in event["message_details"].items():
|
|
|
|
add_message_to_unread_msgs(
|
|
|
|
user_profile.id,
|
|
|
|
state["raw_unread_msgs"],
|
|
|
|
int(message_id_str),
|
|
|
|
message_details,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
if event["flag"] == "starred" and "starred_messages" in state:
|
|
|
|
if event["op"] == "add":
|
|
|
|
state["starred_messages"] += event["messages"]
|
|
|
|
if event["op"] == "remove":
|
|
|
|
state["starred_messages"] = [
|
2021-02-12 08:19:30 +01:00
|
|
|
message
|
2021-02-12 08:20:45 +01:00
|
|
|
for message in state["starred_messages"]
|
2022-09-17 03:20:47 +02:00
|
|
|
if message not in event["messages"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "realm_domains":
|
|
|
|
if event["op"] == "add":
|
|
|
|
state["realm_domains"].append(event["realm_domain"])
|
|
|
|
elif event["op"] == "change":
|
|
|
|
for realm_domain in state["realm_domains"]:
|
|
|
|
if realm_domain["domain"] == event["realm_domain"]["domain"]:
|
|
|
|
realm_domain["allow_subdomains"] = event["realm_domain"]["allow_subdomains"]
|
|
|
|
elif event["op"] == "remove":
|
|
|
|
state["realm_domains"] = [
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_domain
|
2021-02-12 08:20:45 +01:00
|
|
|
for realm_domain in state["realm_domains"]
|
|
|
|
if realm_domain["domain"] != event["domain"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "realm_emoji":
|
|
|
|
state["realm_emoji"] = event["realm_emoji"]
|
|
|
|
elif event["type"] == "realm_export":
|
2019-06-24 02:51:13 +02:00
|
|
|
# These realm export events are only available to
|
|
|
|
# administrators, and aren't included in page_params.
|
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "alert_words":
|
|
|
|
state["alert_words"] = event["alert_words"]
|
|
|
|
elif event["type"] == "muted_topics":
|
|
|
|
state["muted_topics"] = event["muted_topics"]
|
2021-03-27 12:23:32 +01:00
|
|
|
elif event["type"] == "muted_users":
|
|
|
|
state["muted_users"] = event["muted_users"]
|
2021-03-30 12:51:54 +02:00
|
|
|
elif event["type"] == "realm_linkifiers":
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
# We only send realm_linkifiers event to clients that indicate
|
|
|
|
# support for linkifiers with URL templates. Otherwise, silently
|
|
|
|
# ignore the event.
|
|
|
|
if linkifier_url_template:
|
|
|
|
state["realm_linkifiers"] = event["realm_linkifiers"]
|
2020-10-28 04:00:46 +01:00
|
|
|
elif event["type"] == "realm_playgrounds":
|
|
|
|
state["realm_playgrounds"] = event["realm_playgrounds"]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "update_display_settings":
|
2021-07-07 12:21:35 +02:00
|
|
|
if event["setting_name"] != "timezone":
|
2021-08-11 15:34:25 +02:00
|
|
|
assert event["setting_name"] in UserProfile.display_settings_legacy
|
2021-02-12 08:20:45 +01:00
|
|
|
state[event["setting_name"]] = event["setting"]
|
|
|
|
elif event["type"] == "update_global_notifications":
|
2021-08-11 15:34:25 +02:00
|
|
|
assert event["notification_name"] in UserProfile.notification_settings_legacy
|
2021-02-12 08:20:45 +01:00
|
|
|
state[event["notification_name"]] = event["setting"]
|
2021-07-26 08:35:27 +02:00
|
|
|
elif event["type"] == "user_settings":
|
2022-02-24 21:15:43 +01:00
|
|
|
# time zone setting is not included in property_types dict because
|
2021-08-11 15:34:25 +02:00
|
|
|
# this setting is not a part of UserBaseSettings class.
|
2021-07-26 08:35:27 +02:00
|
|
|
if event["property"] != "timezone":
|
2021-08-11 15:34:25 +02:00
|
|
|
assert event["property"] in UserProfile.property_types
|
2021-10-03 08:40:03 +02:00
|
|
|
if event["property"] in {
|
|
|
|
**UserProfile.display_settings_legacy,
|
|
|
|
**UserProfile.notification_settings_legacy,
|
|
|
|
}:
|
|
|
|
state[event["property"]] = event["value"]
|
2021-07-26 19:55:14 +02:00
|
|
|
state["user_settings"][event["property"]] = event["value"]
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "invites_changed":
|
2017-12-14 22:22:17 +01:00
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "user_group":
|
|
|
|
if event["op"] == "add":
|
|
|
|
state["realm_user_groups"].append(event["group"])
|
|
|
|
state["realm_user_groups"].sort(key=lambda group: group["id"])
|
|
|
|
elif event["op"] == "update":
|
|
|
|
for user_group in state["realm_user_groups"]:
|
|
|
|
if user_group["id"] == event["group_id"]:
|
|
|
|
user_group.update(event["data"])
|
|
|
|
elif event["op"] == "add_members":
|
|
|
|
for user_group in state["realm_user_groups"]:
|
|
|
|
if user_group["id"] == event["group_id"]:
|
|
|
|
user_group["members"].extend(event["user_ids"])
|
|
|
|
user_group["members"].sort()
|
|
|
|
elif event["op"] == "remove_members":
|
|
|
|
for user_group in state["realm_user_groups"]:
|
|
|
|
if user_group["id"] == event["group_id"]:
|
|
|
|
members = set(user_group["members"])
|
2023-09-12 23:19:57 +02:00
|
|
|
user_group["members"] = sorted(members - set(event["user_ids"]))
|
2022-03-01 07:52:47 +01:00
|
|
|
elif event["op"] == "add_subgroups":
|
|
|
|
for user_group in state["realm_user_groups"]:
|
|
|
|
if user_group["id"] == event["group_id"]:
|
2022-05-16 17:02:44 +02:00
|
|
|
user_group["direct_subgroup_ids"].extend(event["direct_subgroup_ids"])
|
|
|
|
user_group["direct_subgroup_ids"].sort()
|
2022-03-01 07:52:47 +01:00
|
|
|
elif event["op"] == "remove_subgroups":
|
|
|
|
for user_group in state["realm_user_groups"]:
|
|
|
|
if user_group["id"] == event["group_id"]:
|
2022-05-16 17:02:44 +02:00
|
|
|
subgroups = set(user_group["direct_subgroup_ids"])
|
2023-09-12 23:19:57 +02:00
|
|
|
user_group["direct_subgroup_ids"] = sorted(
|
2022-05-16 17:02:44 +02:00
|
|
|
subgroups - set(event["direct_subgroup_ids"])
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["op"] == "remove":
|
|
|
|
state["realm_user_groups"] = [
|
|
|
|
ug for ug in state["realm_user_groups"] if ug["id"] != event["group_id"]
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2021-03-23 19:48:47 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "user_status":
|
|
|
|
user_id_str = str(event["user_id"])
|
|
|
|
user_status = state["user_status"]
|
|
|
|
away = event.get("away")
|
|
|
|
status_text = event.get("status_text")
|
2021-06-22 18:42:31 +02:00
|
|
|
emoji_name = event.get("emoji_name")
|
|
|
|
emoji_code = event.get("emoji_code")
|
|
|
|
reaction_type = event.get("reaction_type")
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2020-08-07 04:58:22 +02:00
|
|
|
if user_id_str not in user_status:
|
2020-09-02 08:14:51 +02:00
|
|
|
user_status[user_id_str] = {}
|
2019-01-21 19:06:03 +01:00
|
|
|
|
|
|
|
if away is not None:
|
|
|
|
if away:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_status[user_id_str]["away"] = True
|
2019-01-21 19:06:03 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_status[user_id_str].pop("away", None)
|
2019-01-21 19:06:03 +01:00
|
|
|
|
|
|
|
if status_text is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if status_text == "":
|
|
|
|
user_status[user_id_str].pop("status_text", None)
|
2019-01-21 19:06:03 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_status[user_id_str]["status_text"] = status_text
|
2019-01-21 19:06:03 +01:00
|
|
|
|
2021-06-22 18:42:31 +02:00
|
|
|
if emoji_name is not None:
|
|
|
|
if emoji_name == "":
|
|
|
|
user_status[user_id_str].pop("emoji_name", None)
|
|
|
|
else:
|
|
|
|
user_status[user_id_str]["emoji_name"] = emoji_name
|
|
|
|
|
|
|
|
if emoji_code is not None:
|
|
|
|
if emoji_code == "":
|
|
|
|
user_status[user_id_str].pop("emoji_code", None)
|
|
|
|
else:
|
|
|
|
user_status[user_id_str]["emoji_code"] = emoji_code
|
|
|
|
|
|
|
|
if reaction_type is not None:
|
|
|
|
if reaction_type == UserStatus.UNICODE_EMOJI and emoji_name == "":
|
|
|
|
user_status[user_id_str].pop("reaction_type", None)
|
|
|
|
else:
|
|
|
|
user_status[user_id_str]["reaction_type"] = reaction_type
|
|
|
|
|
2020-08-07 04:58:22 +02:00
|
|
|
if not user_status[user_id_str]:
|
|
|
|
user_status.pop(user_id_str, None)
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
state["user_status"] = user_status
|
2022-02-25 21:48:56 +01:00
|
|
|
elif event["type"] == "user_topic":
|
2023-03-12 16:19:42 +01:00
|
|
|
if event["visibility_policy"] == UserTopic.VisibilityPolicy.INHERIT:
|
2022-02-25 21:48:56 +01:00
|
|
|
user_topics_state = state["user_topics"]
|
|
|
|
for i in range(len(user_topics_state)):
|
|
|
|
if (
|
|
|
|
user_topics_state[i]["stream_id"] == event["stream_id"]
|
|
|
|
and user_topics_state[i]["topic_name"] == event["topic_name"]
|
|
|
|
):
|
|
|
|
del user_topics_state[i]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
fields = ["stream_id", "topic_name", "visibility_policy", "last_updated"]
|
|
|
|
state["user_topics"].append({x: event[x] for x in fields})
|
2021-02-12 08:20:45 +01:00
|
|
|
elif event["type"] == "has_zoom_token":
|
|
|
|
state["has_zoom_token"] = event["value"]
|
2024-02-14 20:27:17 +01:00
|
|
|
elif event["type"] == "web_reload_client":
|
|
|
|
# This is an unlikely race, where the queue was created with a
|
|
|
|
# previous Tornado process, which restarted, and subsequently
|
|
|
|
# was told by restart-server to tell its old clients to
|
|
|
|
# reload. We warn, since we do not expect this race to be
|
|
|
|
# possible, but the worst expected outcome is that the client
|
|
|
|
# retains the old JS instead of reloading.
|
|
|
|
logging.warning("Got a web_reload_client event during apply_events")
|
2024-02-27 15:51:17 +01:00
|
|
|
elif event["type"] == "restart":
|
|
|
|
# The Tornado process restarted. This has no effect; we ignore it.
|
|
|
|
pass
|
2017-02-20 20:09:48 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Unexpected event type {}".format(event["type"]))
|
2017-02-10 23:04:46 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-14 14:25:45 +02:00
|
|
|
def do_events_register(
|
2022-05-03 23:48:52 +02:00
|
|
|
user_profile: Optional[UserProfile],
|
2022-05-03 23:54:44 +02:00
|
|
|
realm: Realm,
|
2020-10-14 14:25:45 +02:00
|
|
|
user_client: Client,
|
|
|
|
apply_markdown: bool = True,
|
|
|
|
client_gravatar: bool = False,
|
|
|
|
slim_presence: bool = False,
|
2024-06-05 21:36:22 +02:00
|
|
|
presence_last_update_id_fetched_by_client: Optional[int] = None,
|
2021-04-30 00:15:33 +02:00
|
|
|
event_types: Optional[Sequence[str]] = None,
|
2020-10-14 14:25:45 +02:00
|
|
|
queue_lifespan_secs: int = 0,
|
|
|
|
all_public_streams: bool = False,
|
|
|
|
include_subscribers: bool = True,
|
2020-10-14 14:47:06 +02:00
|
|
|
include_streams: bool = True,
|
2022-10-06 11:56:48 +02:00
|
|
|
client_capabilities: Mapping[str, bool] = {},
|
2023-06-30 00:50:04 +02:00
|
|
|
narrow: Collection[NarrowTerm] = [],
|
2021-04-30 00:15:33 +02:00
|
|
|
fetch_event_types: Optional[Collection[str]] = None,
|
2022-07-13 18:44:48 +02:00
|
|
|
spectator_requested_language: Optional[str] = None,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported: bool = True,
|
2020-10-14 14:25:45 +02:00
|
|
|
) -> Dict[str, Any]:
|
2017-02-10 23:04:46 +01:00
|
|
|
# Technically we don't need to check this here because
|
2023-06-29 22:41:44 +02:00
|
|
|
# build_narrow_predicate will check it, but it's nicer from an error
|
2017-02-10 23:04:46 +01:00
|
|
|
# handling perspective to do it before contacting Tornado
|
2023-06-30 00:50:04 +02:00
|
|
|
check_narrow_for_events(narrow)
|
2017-04-26 23:29:25 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
notification_settings_null = client_capabilities.get("notification_settings_null", False)
|
|
|
|
bulk_message_deletion = client_capabilities.get("bulk_message_deletion", False)
|
2021-02-12 08:19:30 +01:00
|
|
|
user_avatar_url_field_optional = client_capabilities.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_avatar_url_field_optional", False
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-04-18 18:12:35 +02:00
|
|
|
stream_typing_notifications = client_capabilities.get("stream_typing_notifications", False)
|
2021-07-24 19:51:25 +02:00
|
|
|
user_settings_object = client_capabilities.get("user_settings_object", False)
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template = client_capabilities.get("linkifier_url_template", False)
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete = client_capabilities.get("user_list_incomplete", False)
|
2020-06-10 13:42:13 +02:00
|
|
|
|
2017-04-26 23:29:25 +02:00
|
|
|
if fetch_event_types is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
event_types_set: Optional[Set[str]] = set(fetch_event_types)
|
2017-04-26 23:29:25 +02:00
|
|
|
elif event_types is not None:
|
|
|
|
event_types_set = set(event_types)
|
2017-02-10 23:04:46 +01:00
|
|
|
else:
|
|
|
|
event_types_set = None
|
|
|
|
|
2024-06-12 15:43:02 +02:00
|
|
|
# Fetch the realm object again to prefetch all the
|
|
|
|
# group settings which support anonymous groups
|
|
|
|
# to avoid unnecessary DB queries.
|
|
|
|
realm = get_realm_with_settings(realm_id=realm.id)
|
|
|
|
|
2022-05-03 23:48:52 +02:00
|
|
|
if user_profile is None:
|
2022-09-26 17:37:04 +02:00
|
|
|
# TODO: Unify the two fetch_initial_state_data code paths.
|
|
|
|
assert client_gravatar is False
|
|
|
|
assert include_subscribers is False
|
|
|
|
assert include_streams is False
|
2022-05-03 23:48:52 +02:00
|
|
|
ret = fetch_initial_state_data(
|
|
|
|
user_profile,
|
|
|
|
realm=realm,
|
|
|
|
event_types=event_types_set,
|
|
|
|
queue_id=None,
|
|
|
|
# Force client_gravatar=False for security reasons.
|
2022-09-26 17:37:04 +02:00
|
|
|
client_gravatar=client_gravatar,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2022-05-03 23:48:52 +02:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
|
|
|
user_settings_object=user_settings_object,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2024-06-05 21:36:22 +02:00
|
|
|
# These presence params are a noop, because presence is not included.
|
2022-05-03 23:48:52 +02:00
|
|
|
slim_presence=True,
|
2024-06-05 21:36:22 +02:00
|
|
|
presence_last_update_id_fetched_by_client=None,
|
2022-05-03 23:48:52 +02:00
|
|
|
# Force include_subscribers=False for security reasons.
|
2022-09-26 17:37:04 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2022-05-03 23:48:52 +02:00
|
|
|
# Force include_streams=False for security reasons.
|
2022-09-26 17:37:04 +02:00
|
|
|
include_streams=include_streams,
|
2022-07-13 18:44:48 +02:00
|
|
|
spectator_requested_language=spectator_requested_language,
|
2022-05-03 23:48:52 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
post_process_state(user_profile, ret, notification_settings_null=False)
|
|
|
|
return ret
|
|
|
|
|
2017-07-16 09:41:38 +02:00
|
|
|
# Fill up the UserMessage rows if a soft-deactivated user has returned
|
2019-03-12 02:48:01 +01:00
|
|
|
reactivate_user_if_soft_deactivated(user_profile)
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2023-06-30 00:50:04 +02:00
|
|
|
legacy_narrow = [[nt.operator, nt.operand] for nt in narrow]
|
|
|
|
|
2024-02-14 20:27:17 +01:00
|
|
|
# Note that we pass event_types, not fetch_event_types here, since
|
|
|
|
# that's what controls which future events are sent.
|
|
|
|
queue_id = request_event_queue(
|
|
|
|
user_profile,
|
|
|
|
user_client,
|
|
|
|
apply_markdown,
|
|
|
|
client_gravatar,
|
|
|
|
slim_presence,
|
|
|
|
queue_lifespan_secs,
|
|
|
|
event_types,
|
|
|
|
all_public_streams,
|
|
|
|
narrow=legacy_narrow,
|
|
|
|
bulk_message_deletion=bulk_message_deletion,
|
|
|
|
stream_typing_notifications=stream_typing_notifications,
|
|
|
|
user_settings_object=user_settings_object,
|
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
|
|
|
linkifier_url_template=linkifier_url_template,
|
|
|
|
user_list_incomplete=user_list_incomplete,
|
|
|
|
)
|
2021-04-29 23:04:57 +02:00
|
|
|
|
2024-02-14 20:27:17 +01:00
|
|
|
if queue_id is None:
|
|
|
|
raise JsonableError(_("Could not allocate event queue"))
|
|
|
|
|
|
|
|
ret = fetch_initial_state_data(
|
|
|
|
user_profile,
|
2024-06-15 07:12:06 +02:00
|
|
|
realm=realm,
|
2024-02-14 20:27:17 +01:00
|
|
|
event_types=event_types_set,
|
|
|
|
queue_id=queue_id,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
|
|
|
user_settings_object=user_settings_object,
|
|
|
|
slim_presence=slim_presence,
|
2024-06-05 21:36:22 +02:00
|
|
|
presence_last_update_id_fetched_by_client=presence_last_update_id_fetched_by_client,
|
2024-02-14 20:27:17 +01:00
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
include_streams=include_streams,
|
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
|
|
|
linkifier_url_template=linkifier_url_template,
|
|
|
|
user_list_incomplete=user_list_incomplete,
|
|
|
|
)
|
2021-04-29 23:04:57 +02:00
|
|
|
|
2024-02-14 20:27:17 +01:00
|
|
|
# Apply events that came in while we were fetching initial data
|
|
|
|
events = get_user_events(user_profile, queue_id, -1)
|
|
|
|
apply_events(
|
|
|
|
user_profile,
|
|
|
|
state=ret,
|
|
|
|
events=events,
|
|
|
|
fetch_event_types=fetch_event_types,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
include_subscribers=include_subscribers,
|
|
|
|
linkifier_url_template=linkifier_url_template,
|
|
|
|
user_list_incomplete=user_list_incomplete,
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(user_profile, ret, notification_settings_null)
|
2018-05-07 07:42:01 +02:00
|
|
|
|
|
|
|
if len(events) > 0:
|
2021-02-12 08:20:45 +01:00
|
|
|
ret["last_event_id"] = events[-1]["id"]
|
2018-05-07 07:42:01 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
ret["last_event_id"] = -1
|
2018-05-07 07:42:01 +02:00
|
|
|
return ret
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def post_process_state(
|
|
|
|
user_profile: Optional[UserProfile], ret: Dict[str, Any], notification_settings_null: bool
|
|
|
|
) -> None:
|
|
|
|
"""
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
NOTE:
|
|
|
|
|
|
|
|
Below is an example of post-processing initial state data AFTER we
|
|
|
|
apply events. For large payloads like `unread_msgs`, it's helpful
|
|
|
|
to have an intermediate data structure that is easy to manipulate
|
|
|
|
with O(1)-type operations as we apply events.
|
|
|
|
|
|
|
|
Then, only at the end, we put it in the form that's more appropriate
|
|
|
|
for client.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if "raw_unread_msgs" in ret:
|
|
|
|
ret["unread_msgs"] = aggregate_unread_data(ret["raw_unread_msgs"])
|
|
|
|
del ret["raw_unread_msgs"]
|
Simplify how we apply events for unread messages.
The logic to apply events to page_params['unread_msgs'] was
complicated due to the aggregated data structures that we pass
down to the client.
Now we defer the aggregation logic until after we apply the
events. This leads to some simplifications in that codepath,
as well as some performance enhancements.
The intermediate data structure has sets and dictionaries that
generally are keyed by message_id, so most message-related
updates are O(1) in nature.
Also, by waiting to compute the counts until the end, it's a
bit less messy to try to keep track of increments/decrements.
Instead, we just update the dictionaries and sets during the
event-apply phase.
This change also fixes some corner cases:
* We now respect mutes when updating counts.
* For message updates, instead of bluntly updating
the whole topic bucket, we update individual
message ids.
Unfortunately, this change doesn't seem to address the pesky
test that fails sporadically on Travis, related to mention
updates. It will change the symptom, slightly, though.
2017-10-05 00:34:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-21 16:33:07 +02:00
|
|
|
See the note above; the same technique applies below.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
|
|
|
if "raw_users" in ret:
|
2023-09-12 23:19:57 +02:00
|
|
|
user_dicts = sorted(ret["raw_users"].values(), key=lambda x: x["user_id"])
|
2017-10-21 18:36:09 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ret["realm_users"] = [d for d in user_dicts if d["is_active"]]
|
|
|
|
ret["realm_non_active_users"] = [d for d in user_dicts if not d["is_active"]]
|
2017-10-21 18:36:09 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-21 18:36:09 +02:00
|
|
|
Be aware that we do intentional aliasing in the below code.
|
|
|
|
We can now safely remove the `is_active` field from all the
|
|
|
|
dicts that got partitioned into the two lists above.
|
|
|
|
|
|
|
|
We remove the field because it's already implied, and sending
|
|
|
|
it to clients makes clients prone to bugs where they "trust"
|
|
|
|
the field but don't actually update in live updates. It also
|
|
|
|
wastes bandwidth.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-21 18:36:09 +02:00
|
|
|
for d in user_dicts:
|
2021-02-12 08:20:45 +01:00
|
|
|
d.pop("is_active")
|
2017-10-21 18:36:09 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
del ret["raw_users"]
|
2019-03-20 04:15:58 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "raw_recent_private_conversations" in ret:
|
2019-03-20 04:15:58 +01:00
|
|
|
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
|
2021-02-12 08:20:45 +01:00
|
|
|
ret["recent_private_conversations"] = sorted(
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
dict(
|
|
|
|
**value,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
for (recipient_id, value) in ret["raw_recent_private_conversations"].items()
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
key=lambda x: -x["max_message_id"],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
del ret["raw_recent_private_conversations"]
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if not notification_settings_null and "subscriptions" in ret:
|
|
|
|
for stream_dict in ret["subscriptions"] + ret["unsubscribed"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
handle_stream_notifications_compatibility(
|
|
|
|
user_profile, stream_dict, notification_settings_null
|
|
|
|
)
|