2020-09-25 21:53:00 +02:00
|
|
|
# This module is a collection of testing helpers for validating the
|
|
|
|
# schema of "events" sent by Zulip's server-to-client push system.
|
|
|
|
#
|
|
|
|
# By policy, every event generated by Zulip's API should be validated
|
|
|
|
# by a test in test_events.py with a schema checker here (which is
|
|
|
|
# validated, in turn, against the OpenAPI documentation for GET
|
2021-05-14 00:16:30 +02:00
|
|
|
# /events in zulip.yaml and the fixtures used by the Zulip web app
|
2020-09-25 21:53:00 +02:00
|
|
|
# frontend).
|
|
|
|
#
|
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html
|
2020-09-28 17:49:30 +02:00
|
|
|
#
|
|
|
|
# The general paradigm here is that if you have an event with type foo_bar
|
|
|
|
# then you declare foo_bar_event to be an instance of event_dict_type. And
|
|
|
|
# then you make a checker function by saying:
|
|
|
|
#
|
|
|
|
# check_foo_bar = make_checker(foo_bar_event)
|
|
|
|
#
|
|
|
|
# And then the caller can use the checker as follows:
|
|
|
|
#
|
|
|
|
# check_foo_bar(var_name, event)
|
|
|
|
#
|
|
|
|
# For more complicated events, you may write custom checkers that check
|
|
|
|
# aspects of the data that go beyond simply validating that the data
|
|
|
|
# matches an event_dict_type based schema. This typically happens with
|
|
|
|
# events where you either have a Union type or optional_keys.
|
|
|
|
#
|
|
|
|
# See check_delete_message and check_presence for examples of this
|
|
|
|
# paradigm.
|
2020-09-25 21:53:00 +02:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
from typing import Sequence
|
2020-07-30 18:11:19 +02:00
|
|
|
|
|
|
|
from zerver.lib.data_types import (
|
|
|
|
DictType,
|
|
|
|
EnumType,
|
|
|
|
Equals,
|
|
|
|
ListType,
|
2020-08-05 19:56:34 +02:00
|
|
|
NumberType,
|
2020-07-30 18:11:19 +02:00
|
|
|
OptionalType,
|
2020-08-06 13:40:42 +02:00
|
|
|
StringDictType,
|
2020-08-06 20:31:12 +02:00
|
|
|
TupleType,
|
2020-07-30 18:11:19 +02:00
|
|
|
UnionType,
|
|
|
|
UrlType,
|
|
|
|
check_data,
|
|
|
|
event_dict_type,
|
|
|
|
make_checker,
|
2020-07-08 02:14:11 +02:00
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
from zerver.lib.topic import ORIG_TOPIC, TOPIC_LINKS, TOPIC_NAME
|
2022-07-05 16:42:29 +02:00
|
|
|
from zerver.models import Realm, RealmUserDefault, Stream, UserProfile
|
2020-06-30 18:38:42 +02:00
|
|
|
|
2020-07-08 12:53:52 +02:00
|
|
|
# These fields are used for "stream" events, and are included in the
|
|
|
|
# larger "subscription" events that also contain personal settings.
|
2023-07-27 16:42:21 +02:00
|
|
|
default_stream_fields = [
|
2023-07-12 12:57:57 +02:00
|
|
|
("can_remove_subscribers_group", int),
|
2024-03-22 19:37:19 +01:00
|
|
|
("creator_id", OptionalType(int)),
|
2022-03-12 11:45:43 +01:00
|
|
|
("date_created", int),
|
2020-07-30 18:11:19 +02:00
|
|
|
("description", str),
|
|
|
|
("first_message_id", OptionalType(int)),
|
|
|
|
("history_public_to_subscribers", bool),
|
|
|
|
("invite_only", bool),
|
|
|
|
("is_announcement_only", bool),
|
|
|
|
("is_web_public", bool),
|
2020-08-05 11:57:45 +02:00
|
|
|
("message_retention_days", OptionalType(int)),
|
2020-07-30 18:11:19 +02:00
|
|
|
("name", str),
|
|
|
|
("rendered_description", str),
|
|
|
|
("stream_id", int),
|
|
|
|
("stream_post_policy", int),
|
2020-07-08 12:53:52 +02:00
|
|
|
]
|
|
|
|
|
2023-07-27 16:42:21 +02:00
|
|
|
basic_stream_fields = [
|
|
|
|
*default_stream_fields,
|
|
|
|
("stream_weekly_traffic", OptionalType(int)),
|
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
subscription_fields: Sequence[tuple[str, object]] = [
|
2020-07-08 14:13:16 +02:00
|
|
|
*basic_stream_fields,
|
2020-07-30 18:11:19 +02:00
|
|
|
("audible_notifications", OptionalType(bool)),
|
|
|
|
("color", str),
|
|
|
|
("desktop_notifications", OptionalType(bool)),
|
|
|
|
("email_notifications", OptionalType(bool)),
|
|
|
|
("in_home_view", bool),
|
|
|
|
("is_muted", bool),
|
|
|
|
("pin_to_top", bool),
|
|
|
|
("push_notifications", OptionalType(bool)),
|
2021-01-20 15:14:52 +01:00
|
|
|
# We may try to remove subscribers from some events in
|
|
|
|
# the future for clients that don't want subscriber
|
|
|
|
# info.
|
|
|
|
("subscribers", ListType(int)),
|
2020-07-30 18:11:19 +02:00
|
|
|
("wildcard_mentions_notify", OptionalType(bool)),
|
2020-07-08 14:13:16 +02:00
|
|
|
]
|
|
|
|
|
2020-06-30 18:38:42 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
value_type = UnionType(
|
2020-07-08 02:14:11 +02:00
|
|
|
[
|
2020-07-30 18:11:19 +02:00
|
|
|
bool,
|
|
|
|
int,
|
|
|
|
str,
|
2020-07-08 02:14:11 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
optional_value_type = UnionType(
|
2020-07-08 13:35:37 +02:00
|
|
|
[
|
2020-07-30 18:11:19 +02:00
|
|
|
bool,
|
|
|
|
int,
|
|
|
|
str,
|
|
|
|
Equals(None),
|
2020-07-08 13:35:37 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
alert_words_event = event_dict_type(
|
2020-07-18 17:11:41 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("alert_words")),
|
|
|
|
("alert_words", ListType(str)),
|
2020-07-18 17:11:41 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_alert_words = make_checker(alert_words_event)
|
2020-07-18 17:11:41 +02:00
|
|
|
|
2020-08-06 13:08:42 +02:00
|
|
|
attachment_message_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("date_sent", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
attachment_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("name", str),
|
|
|
|
("size", int),
|
|
|
|
("path_id", str),
|
|
|
|
("create_time", int),
|
|
|
|
("messages", ListType(attachment_message_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
attachment_add_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("attachment")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("attachment", attachment_type),
|
|
|
|
("upload_space_used", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_attachment_add = make_checker(attachment_add_event)
|
|
|
|
|
|
|
|
attachment_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("attachment")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("attachment", DictType([("id", int)])),
|
|
|
|
("upload_space_used", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_attachment_remove = make_checker(attachment_remove_event)
|
|
|
|
|
|
|
|
attachment_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("attachment")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("attachment", attachment_type),
|
|
|
|
("upload_space_used", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_attachment_update = make_checker(attachment_update_event)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
custom_profile_field_type = DictType(
|
2020-07-18 17:02:28 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("id", int),
|
|
|
|
("type", int),
|
|
|
|
("name", str),
|
|
|
|
("hint", str),
|
|
|
|
("field_data", str),
|
|
|
|
("order", int),
|
2024-03-19 14:22:03 +01:00
|
|
|
("required", bool),
|
2020-07-30 18:11:19 +02:00
|
|
|
],
|
2022-07-12 21:04:47 +02:00
|
|
|
optional_keys=[
|
|
|
|
("display_in_profile_summary", bool),
|
|
|
|
],
|
2020-07-18 17:02:28 +02:00
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
custom_profile_fields_event = event_dict_type(
|
2020-07-18 17:02:28 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("custom_profile_fields")),
|
|
|
|
("fields", ListType(custom_profile_field_type)),
|
2020-07-18 17:02:28 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_custom_profile_fields = make_checker(custom_profile_fields_event)
|
2020-07-18 17:02:28 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_stream_group = DictType(
|
2020-08-01 14:33:03 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("name", str),
|
|
|
|
("id", int),
|
|
|
|
("description", str),
|
2023-07-27 16:42:21 +02:00
|
|
|
("streams", ListType(DictType(default_stream_fields))),
|
2020-08-01 14:33:03 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
default_stream_groups_event = event_dict_type(
|
2020-08-01 14:33:03 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("default_stream_groups")),
|
|
|
|
("default_stream_groups", ListType(_check_stream_group)),
|
2020-08-01 14:33:03 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_default_stream_groups = make_checker(default_stream_groups_event)
|
2020-08-01 14:33:03 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
default_streams_event = event_dict_type(
|
2020-08-01 14:36:13 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("default_streams")),
|
2023-07-27 16:42:21 +02:00
|
|
|
("default_streams", ListType(DictType(default_stream_fields))),
|
2020-08-01 14:36:13 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_default_streams = make_checker(default_streams_event)
|
2020-08-01 14:36:13 +02:00
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# The event type has an unusual number of optional fields. The
|
|
|
|
# message_id/message_ids fields are conditional on the
|
|
|
|
# bulk_message_deletion client_capability, whereas the other fields
|
|
|
|
# are conditional on private vs. stream messages.
|
2020-08-14 15:12:27 +02:00
|
|
|
delete_message_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("delete_message")),
|
|
|
|
("message_type", EnumType(["private", "stream"])),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("message_id", int),
|
|
|
|
("message_ids", ListType(int)),
|
|
|
|
("stream_id", int),
|
|
|
|
("topic", str),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
_check_delete_message = make_checker(delete_message_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_delete_message(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-14 15:12:27 +02:00
|
|
|
message_type: str,
|
|
|
|
num_message_ids: int,
|
|
|
|
is_legacy: bool,
|
|
|
|
) -> None:
|
|
|
|
_check_delete_message(var_name, event)
|
|
|
|
|
|
|
|
keys = {"id", "type", "message_type"}
|
|
|
|
|
|
|
|
assert event["message_type"] == message_type
|
|
|
|
|
|
|
|
if message_type == "stream":
|
|
|
|
keys |= {"stream_id", "topic"}
|
|
|
|
elif message_type == "private":
|
2021-06-12 16:35:17 +02:00
|
|
|
pass
|
2020-08-14 15:12:27 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("unexpected message_type")
|
|
|
|
|
|
|
|
if is_legacy:
|
|
|
|
assert num_message_ids == 1
|
|
|
|
keys.add("message_id")
|
|
|
|
else:
|
|
|
|
assert isinstance(event["message_ids"], list)
|
|
|
|
assert num_message_ids == len(event["message_ids"])
|
|
|
|
keys.add("message_ids")
|
|
|
|
|
|
|
|
assert set(event.keys()) == keys
|
|
|
|
|
|
|
|
|
2023-10-03 17:07:22 +02:00
|
|
|
draft_fields = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("type", EnumType(["", "stream", "private"])),
|
|
|
|
("to", ListType(int)),
|
|
|
|
("topic", str),
|
|
|
|
("content", str),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("timestamp", int),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
drafts_add_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("drafts")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("drafts", ListType(draft_fields)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_draft_add = make_checker(drafts_add_event)
|
|
|
|
|
|
|
|
drafts_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("drafts")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("draft", draft_fields),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_draft_update = make_checker(drafts_update_event)
|
|
|
|
|
|
|
|
drafts_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("drafts")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("draft_id", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_draft_remove = make_checker(drafts_remove_event)
|
|
|
|
|
|
|
|
|
2020-08-16 17:26:24 +02:00
|
|
|
has_zoom_token_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("has_zoom_token")),
|
|
|
|
("value", bool),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_has_zoom_token = make_checker(has_zoom_token_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_has_zoom_token(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-16 17:26:24 +02:00
|
|
|
value: bool,
|
|
|
|
) -> None:
|
|
|
|
_check_has_zoom_token(var_name, event)
|
|
|
|
assert event["value"] == value
|
|
|
|
|
|
|
|
|
2021-07-02 02:13:55 +02:00
|
|
|
heartbeat_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("heartbeat")),
|
|
|
|
]
|
|
|
|
)
|
2023-01-02 20:50:23 +01:00
|
|
|
_check_heartbeat = make_checker(heartbeat_event)
|
2021-07-02 02:13:55 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_heartbeat(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-07-02 02:13:55 +02:00
|
|
|
) -> None:
|
2023-01-02 20:50:23 +01:00
|
|
|
_check_heartbeat(var_name, event)
|
2021-07-02 02:13:55 +02:00
|
|
|
|
|
|
|
|
2023-12-02 11:30:35 +01:00
|
|
|
_onboarding_steps = DictType(
|
2020-08-05 19:56:34 +02:00
|
|
|
required_keys=[
|
2023-12-05 12:47:03 +01:00
|
|
|
("type", str),
|
2020-08-05 19:56:34 +02:00
|
|
|
("name", str),
|
2024-05-10 15:37:43 +02:00
|
|
|
]
|
2020-08-05 19:56:34 +02:00
|
|
|
)
|
|
|
|
|
2023-12-02 11:30:35 +01:00
|
|
|
onboarding_steps_event = event_dict_type(
|
2020-08-05 19:56:34 +02:00
|
|
|
required_keys=[
|
2023-12-02 11:30:35 +01:00
|
|
|
("type", Equals("onboarding_steps")),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2023-12-02 11:30:35 +01:00
|
|
|
"onboarding_steps",
|
|
|
|
ListType(_onboarding_steps),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2020-08-05 19:56:34 +02:00
|
|
|
]
|
|
|
|
)
|
2023-12-02 11:30:35 +01:00
|
|
|
check_onboarding_steps = make_checker(onboarding_steps_event)
|
2020-08-05 19:56:34 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
invites_changed_event = event_dict_type(
|
2020-07-18 16:33:03 +02:00
|
|
|
required_keys=[
|
|
|
|
# the most boring event...no metadata
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("invites_changed")),
|
2020-07-18 16:33:03 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_invites_changed = make_checker(invites_changed_event)
|
2020-07-18 16:33:03 +02:00
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# This type, like other instances of TupleType, is a legacy feature of
|
|
|
|
# a very old Zulip API; we plan to replace it with an object as those
|
|
|
|
# are more extensible.
|
2020-08-06 20:31:12 +02:00
|
|
|
muted_topic_type = TupleType(
|
|
|
|
[
|
|
|
|
str, # stream name
|
|
|
|
str, # topic name
|
|
|
|
int, # timestamp
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
muted_topics_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("muted_topics")),
|
|
|
|
("muted_topics", ListType(muted_topic_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_muted_topics = make_checker(muted_topics_event)
|
|
|
|
|
2022-02-25 21:48:56 +01:00
|
|
|
user_topic_event = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("type", Equals("user_topic")),
|
|
|
|
("stream_id", int),
|
|
|
|
("topic_name", str),
|
|
|
|
("last_updated", int),
|
|
|
|
("visibility_policy", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
check_user_topic = make_checker(user_topic_event)
|
|
|
|
|
2021-03-27 12:23:32 +01:00
|
|
|
muted_user_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("timestamp", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
muted_users_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("muted_users")),
|
|
|
|
("muted_users", ListType(muted_user_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_muted_users = make_checker(muted_users_event)
|
|
|
|
|
2021-01-26 07:32:29 +01:00
|
|
|
_check_topic_links = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("text", str),
|
|
|
|
("url", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2023-10-25 14:02:18 +02:00
|
|
|
basic_message_fields = [
|
2020-07-30 18:11:19 +02:00
|
|
|
("avatar_url", OptionalType(str)),
|
|
|
|
("client", str),
|
|
|
|
("content", str),
|
|
|
|
("content_type", Equals("text/html")),
|
|
|
|
("id", int),
|
|
|
|
("is_me_message", bool),
|
|
|
|
("reactions", ListType(dict)),
|
|
|
|
("recipient_id", int),
|
|
|
|
("sender_realm_str", str),
|
|
|
|
("sender_email", str),
|
|
|
|
("sender_full_name", str),
|
|
|
|
("sender_id", int),
|
|
|
|
(TOPIC_NAME, str),
|
2021-01-26 07:32:29 +01:00
|
|
|
(TOPIC_LINKS, ListType(_check_topic_links)),
|
2020-07-30 18:11:19 +02:00
|
|
|
("submessages", ListType(dict)),
|
|
|
|
("timestamp", int),
|
|
|
|
("type", str),
|
2020-07-10 16:10:58 +02:00
|
|
|
]
|
|
|
|
|
2023-10-25 14:02:18 +02:00
|
|
|
message_fields = [
|
|
|
|
*basic_message_fields,
|
|
|
|
("display_recipient", str),
|
|
|
|
("stream_id", int),
|
|
|
|
]
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
message_event = event_dict_type(
|
2020-07-10 16:10:58 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("message")),
|
|
|
|
("flags", ListType(str)),
|
|
|
|
("message", DictType(message_fields)),
|
2020-07-10 16:10:58 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_message = make_checker(message_event)
|
2020-07-10 16:10:58 +02:00
|
|
|
|
2023-10-25 14:02:18 +02:00
|
|
|
_check_direct_message_display_recipient = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("is_mirror_dummy", bool),
|
|
|
|
("email", str),
|
|
|
|
("full_name", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
direct_message_fields = [
|
|
|
|
*basic_message_fields,
|
|
|
|
("display_recipient", ListType(_check_direct_message_display_recipient)),
|
|
|
|
]
|
|
|
|
direct_message_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("message")),
|
|
|
|
("flags", ListType(str)),
|
|
|
|
("message", DictType(direct_message_fields)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_direct_message = make_checker(direct_message_event)
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# This legacy presence structure is intended to be replaced by a more
|
|
|
|
# sensible data structure.
|
2020-08-13 19:29:07 +02:00
|
|
|
presence_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("status", EnumType(["active", "idle"])),
|
|
|
|
("timestamp", int),
|
|
|
|
("client", str),
|
|
|
|
("pushable", bool),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
presence_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("presence")),
|
|
|
|
("user_id", int),
|
|
|
|
("server_timestamp", NumberType()),
|
|
|
|
("presence", StringDictType(presence_type)),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("email", str),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
_check_presence = make_checker(presence_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_presence(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-13 19:29:07 +02:00
|
|
|
has_email: bool,
|
|
|
|
presence_key: str,
|
|
|
|
status: str,
|
|
|
|
) -> None:
|
|
|
|
_check_presence(var_name, event)
|
|
|
|
|
|
|
|
assert ("email" in event) == has_email
|
|
|
|
|
|
|
|
assert isinstance(event["presence"], dict)
|
|
|
|
|
|
|
|
# Our tests only have one presence value.
|
2023-07-22 00:34:11 +02:00
|
|
|
[(event_presence_key, event_presence_value)] = event["presence"].items()
|
|
|
|
assert event_presence_key == presence_key
|
|
|
|
assert event_presence_value["status"] == status
|
2020-08-13 19:29:07 +02:00
|
|
|
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# Type for the legacy user field; the `user_id` field is intended to
|
|
|
|
# replace this and we expect to remove this once clients have migrated
|
|
|
|
# to support the modern API.
|
|
|
|
reaction_legacy_user_type = DictType(
|
2020-07-17 09:23:12 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("email", str),
|
|
|
|
("full_name", str),
|
|
|
|
("user_id", int),
|
2020-07-17 09:23:12 +02:00
|
|
|
]
|
2020-09-25 21:53:00 +02:00
|
|
|
# We should probably declare is_mirror_dummy as an optional field here.
|
2020-07-17 09:23:12 +02:00
|
|
|
)
|
|
|
|
|
2020-08-17 15:11:19 +02:00
|
|
|
reaction_add_event = event_dict_type(
|
2020-07-17 09:23:12 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("reaction")),
|
2020-08-17 15:11:19 +02:00
|
|
|
("op", Equals("add")),
|
2020-07-30 18:11:19 +02:00
|
|
|
("message_id", int),
|
|
|
|
("emoji_name", str),
|
|
|
|
("emoji_code", str),
|
2023-10-02 21:45:11 +02:00
|
|
|
("reaction_type", EnumType(["unicode_emoji", "realm_emoji", "zulip_extra_emoji"])),
|
2020-07-30 18:11:19 +02:00
|
|
|
("user_id", int),
|
2020-09-25 21:53:00 +02:00
|
|
|
("user", reaction_legacy_user_type),
|
2020-07-17 09:23:12 +02:00
|
|
|
]
|
|
|
|
)
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_add = make_checker(reaction_add_event)
|
2020-07-17 09:23:12 +02:00
|
|
|
|
|
|
|
|
2020-08-17 15:11:19 +02:00
|
|
|
reaction_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("reaction")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("message_id", int),
|
|
|
|
("emoji_name", str),
|
|
|
|
("emoji_code", str),
|
2023-10-02 21:45:11 +02:00
|
|
|
("reaction_type", EnumType(["unicode_emoji", "realm_emoji", "zulip_extra_emoji"])),
|
2020-08-17 15:11:19 +02:00
|
|
|
("user_id", int),
|
2020-09-25 21:53:00 +02:00
|
|
|
("user", reaction_legacy_user_type),
|
2020-08-17 15:11:19 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
check_reaction_remove = make_checker(reaction_remove_event)
|
2020-07-17 09:23:12 +02:00
|
|
|
|
2021-03-13 20:00:05 +01:00
|
|
|
realm_deactivated_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm")),
|
|
|
|
("op", Equals("deactivated")),
|
|
|
|
("realm_id", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_deactivated = make_checker(realm_deactivated_event)
|
2020-07-17 09:23:12 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_services_outgoing_type = DictType(
|
2020-07-08 17:07:29 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("base_url", UrlType()),
|
|
|
|
("interface", int),
|
|
|
|
("token", str),
|
2020-07-08 17:07:29 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-08-13 18:12:22 +02:00
|
|
|
config_data_schema = StringDictType(str)
|
2020-07-08 17:07:29 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_services_embedded_type = DictType(
|
2020-07-08 17:07:29 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("service_name", str),
|
2020-08-06 13:40:42 +02:00
|
|
|
("config_data", config_data_schema),
|
2020-07-08 17:07:29 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
# Note that regular bots just get an empty list of services,
|
2020-07-30 18:11:19 +02:00
|
|
|
# so the sub_validator for ListType won't matter for them.
|
|
|
|
bot_services_type = ListType(
|
|
|
|
UnionType(
|
2020-07-08 17:07:29 +02:00
|
|
|
[
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_services_outgoing_type,
|
|
|
|
bot_services_embedded_type,
|
2020-07-08 17:07:29 +02:00
|
|
|
]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_type = DictType(
|
2020-07-08 17:07:29 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("user_id", int),
|
|
|
|
("api_key", str),
|
|
|
|
("avatar_url", str),
|
|
|
|
("bot_type", int),
|
|
|
|
("default_all_public_streams", bool),
|
|
|
|
("default_events_register_stream", OptionalType(str)),
|
|
|
|
("default_sending_stream", OptionalType(str)),
|
|
|
|
("email", str),
|
|
|
|
("full_name", str),
|
|
|
|
("is_active", bool),
|
|
|
|
("owner_id", int),
|
|
|
|
("services", bot_services_type),
|
2020-07-08 17:07:29 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
realm_bot_add_event = event_dict_type(
|
2020-07-08 17:07:29 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("realm_bot")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("bot", bot_type),
|
2020-07-08 17:07:29 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_realm_bot_add = make_checker(realm_bot_add_event)
|
2020-07-08 17:07:29 +02:00
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_realm_bot_add(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-07-08 17:07:29 +02:00
|
|
|
_check_realm_bot_add(var_name, event)
|
|
|
|
|
2020-08-08 13:41:21 +02:00
|
|
|
assert isinstance(event["bot"], dict)
|
2020-07-08 17:07:29 +02:00
|
|
|
bot_type = event["bot"]["bot_type"]
|
|
|
|
|
|
|
|
services_field = f"{var_name}['bot']['services']"
|
|
|
|
services = event["bot"]["services"]
|
|
|
|
|
|
|
|
if bot_type == UserProfile.DEFAULT_BOT:
|
2020-07-30 18:11:19 +02:00
|
|
|
check_data(Equals([]), services_field, services)
|
2020-07-08 17:07:29 +02:00
|
|
|
elif bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
|
2021-02-12 08:19:30 +01:00
|
|
|
check_data(ListType(bot_services_outgoing_type, length=1), services_field, services)
|
2020-07-08 17:07:29 +02:00
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
2021-02-12 08:19:30 +01:00
|
|
|
check_data(ListType(bot_services_embedded_type, length=1), services_field, services)
|
2020-07-08 17:07:29 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError(f"Unknown bot_type: {bot_type}")
|
|
|
|
|
2020-07-08 02:14:11 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_type_for_delete = DictType(
|
2020-07-08 21:06:22 +02:00
|
|
|
required_keys=[
|
|
|
|
# for legacy reasons we have a dict here
|
|
|
|
# with only one key
|
2020-07-30 18:11:19 +02:00
|
|
|
("user_id", int),
|
2020-07-08 21:06:22 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
realm_bot_delete_event = event_dict_type(
|
2020-07-08 21:06:22 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("realm_bot")),
|
|
|
|
("op", Equals("delete")),
|
|
|
|
("bot", bot_type_for_delete),
|
2020-07-08 21:06:22 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_realm_bot_delete = make_checker(realm_bot_delete_event)
|
2020-07-08 21:06:22 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_type_for_remove = DictType(
|
2020-07-08 21:06:22 +02:00
|
|
|
required_keys=[
|
|
|
|
# Why does remove have full_name but delete doesn't?
|
|
|
|
# Why do we have both a remove and a delete event
|
|
|
|
# for bots? I don't know the answer as I write this.
|
2020-07-30 18:11:19 +02:00
|
|
|
("full_name", str),
|
|
|
|
("user_id", int),
|
2020-07-08 21:06:22 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
bot_type_for_update = DictType(
|
2020-07-08 17:47:56 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("user_id", int),
|
2020-07-08 17:47:56 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("api_key", str),
|
|
|
|
("avatar_url", str),
|
|
|
|
("default_all_public_streams", bool),
|
|
|
|
("default_events_register_stream", OptionalType(str)),
|
|
|
|
("default_sending_stream", OptionalType(str)),
|
|
|
|
("full_name", str),
|
2023-10-30 12:50:40 +01:00
|
|
|
("is_active", bool),
|
2020-07-30 18:11:19 +02:00
|
|
|
("owner_id", int),
|
|
|
|
("services", bot_services_type),
|
2020-07-08 17:47:56 +02:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
realm_bot_update_event = event_dict_type(
|
2020-07-08 17:47:56 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("realm_bot")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("bot", bot_type_for_update),
|
2020-07-08 17:47:56 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_realm_bot_update = make_checker(realm_bot_update_event)
|
2020-07-08 17:47:56 +02:00
|
|
|
|
|
|
|
|
2020-08-08 13:41:21 +02:00
|
|
|
def check_realm_bot_update(
|
|
|
|
# Check schema plus the field.
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-08 13:41:21 +02:00
|
|
|
field: str,
|
|
|
|
) -> None:
|
2020-07-08 17:47:56 +02:00
|
|
|
# Check the overall schema first.
|
|
|
|
_check_realm_bot_update(var_name, event)
|
|
|
|
|
2020-08-08 13:41:21 +02:00
|
|
|
assert isinstance(event["bot"], dict)
|
2020-07-08 17:47:56 +02:00
|
|
|
assert {"user_id", field} == set(event["bot"].keys())
|
|
|
|
|
|
|
|
|
2020-08-17 16:07:25 +02:00
|
|
|
realm_domain_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("domain", str),
|
|
|
|
("allow_subdomains", bool),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
realm_domains_add_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_domains")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("realm_domain", realm_domain_type),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_domains_add = make_checker(realm_domains_add_event)
|
|
|
|
|
|
|
|
realm_domains_change_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_domains")),
|
|
|
|
("op", Equals("change")),
|
|
|
|
("realm_domain", realm_domain_type),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_domains_change = make_checker(realm_domains_change_event)
|
|
|
|
|
|
|
|
realm_domains_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_domains")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("domain", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_domains_remove = make_checker(realm_domains_remove_event)
|
|
|
|
|
2020-10-28 04:00:46 +01:00
|
|
|
realm_playground_type = DictType(
|
2023-05-27 05:04:50 +02:00
|
|
|
required_keys=[("id", int), ("name", str), ("pygments_language", str), ("url_template", str)]
|
2020-10-28 04:00:46 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
realm_playgrounds_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_playgrounds")),
|
|
|
|
("realm_playgrounds", ListType(realm_playground_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_realm_playgrounds = make_checker(realm_playgrounds_event)
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def check_realm_playgrounds(var_name: str, event: dict[str, object]) -> None:
|
2020-10-28 04:00:46 +01:00
|
|
|
_check_realm_playgrounds(var_name, event)
|
|
|
|
assert isinstance(event["realm_playgrounds"], list)
|
|
|
|
|
|
|
|
|
2020-08-18 15:16:02 +02:00
|
|
|
realm_emoji_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", str),
|
|
|
|
("name", str),
|
|
|
|
("source_url", str),
|
|
|
|
("deactivated", bool),
|
|
|
|
("author_id", int),
|
2021-12-29 16:16:15 +01:00
|
|
|
("still_url", OptionalType(str)),
|
2021-08-12 10:19:53 +02:00
|
|
|
],
|
2020-08-18 15:16:02 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
realm_emoji_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_emoji")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("realm_emoji", StringDictType(realm_emoji_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_realm_emoji_update = make_checker(realm_emoji_update_event)
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def check_realm_emoji_update(var_name: str, event: dict[str, object]) -> None:
|
2020-08-18 15:16:02 +02:00
|
|
|
"""
|
|
|
|
The way we send realm emojis is kinda clumsy--we
|
|
|
|
send a dict mapping the emoji id to a sub_dict with
|
|
|
|
the fields (including the id). Ideally we can streamline
|
|
|
|
this and just send a list of dicts. The clients can make
|
|
|
|
a Map as needed.
|
|
|
|
"""
|
|
|
|
_check_realm_emoji_update(var_name, event)
|
|
|
|
|
|
|
|
assert isinstance(event["realm_emoji"], dict)
|
|
|
|
for k, v in event["realm_emoji"].items():
|
|
|
|
assert v["id"] == k
|
|
|
|
|
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
export_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("id", int),
|
|
|
|
("export_time", NumberType()),
|
|
|
|
("acting_user_id", int),
|
|
|
|
("export_url", OptionalType(str)),
|
|
|
|
("deleted_timestamp", OptionalType(NumberType())),
|
|
|
|
("failed_timestamp", OptionalType(NumberType())),
|
|
|
|
("pending", bool),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
realm_export_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_export")),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
"exports",
|
|
|
|
ListType(export_type),
|
|
|
|
),
|
2020-08-05 23:54:26 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_realm_export = make_checker(realm_export_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_realm_export(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-05 23:54:26 +02:00
|
|
|
has_export_url: bool,
|
|
|
|
has_deleted_timestamp: bool,
|
|
|
|
has_failed_timestamp: bool,
|
|
|
|
) -> None:
|
2020-09-25 21:53:00 +02:00
|
|
|
# Check the overall event first, knowing it has some
|
|
|
|
# optional types.
|
2020-08-05 23:54:26 +02:00
|
|
|
_check_realm_export(var_name, event)
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# It's possible to have multiple data exports, but the events tests do not
|
|
|
|
# exercise that case, so we do strict validation for a single export here.
|
2020-08-05 23:54:26 +02:00
|
|
|
assert isinstance(event["exports"], list)
|
|
|
|
assert len(event["exports"]) == 1
|
|
|
|
export = event["exports"][0]
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# Now verify which fields have non-None values.
|
2020-08-05 23:54:26 +02:00
|
|
|
assert has_export_url == (export["export_url"] is not None)
|
|
|
|
assert has_deleted_timestamp == (export["deleted_timestamp"] is not None)
|
|
|
|
assert has_failed_timestamp == (export["failed_timestamp"] is not None)
|
|
|
|
|
|
|
|
|
2021-03-30 12:51:54 +02:00
|
|
|
realm_linkifier_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("pattern", str),
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
("url_template", str),
|
2021-03-30 12:51:54 +02:00
|
|
|
("id", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
realm_linkifiers_event = event_dict_type(
|
|
|
|
[
|
|
|
|
("type", Equals("realm_linkifiers")),
|
|
|
|
("realm_linkifiers", ListType(realm_linkifier_type)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_linkifiers = make_checker(realm_linkifiers_event)
|
|
|
|
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
plan_type_extra_data_type = DictType(
|
2020-07-23 18:01:27 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("upload_quota", int),
|
2020-07-23 18:01:27 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-08 02:14:11 +02:00
|
|
|
"""
|
|
|
|
realm/update events are flexible for values;
|
|
|
|
we will use a more strict checker to check
|
|
|
|
types in a context-specific manner
|
|
|
|
"""
|
2020-07-30 18:11:19 +02:00
|
|
|
realm_update_event = event_dict_type(
|
2020-07-08 02:14:11 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("realm")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("property", str),
|
|
|
|
("value", value_type),
|
2020-07-23 18:01:27 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("extra_data", plan_type_extra_data_type),
|
2020-07-23 18:01:27 +02:00
|
|
|
],
|
2020-07-08 02:14:11 +02:00
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_realm_update = make_checker(realm_update_event)
|
2020-07-08 02:14:11 +02:00
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_realm_update(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-02-12 08:19:30 +01:00
|
|
|
prop: str,
|
|
|
|
) -> None:
|
2020-07-08 02:14:11 +02:00
|
|
|
"""
|
|
|
|
Realm updates have these two fields:
|
|
|
|
|
|
|
|
property
|
|
|
|
value
|
|
|
|
|
|
|
|
We check not only the basic schema, but also that
|
|
|
|
the value people actually matches the type from
|
|
|
|
Realm.property_types that we have configured
|
|
|
|
for the property.
|
|
|
|
"""
|
|
|
|
_check_realm_update(var_name, event)
|
2020-07-23 17:38:53 +02:00
|
|
|
|
|
|
|
assert prop == event["property"]
|
2020-07-08 02:14:11 +02:00
|
|
|
value = event["value"]
|
|
|
|
|
2020-07-23 18:01:27 +02:00
|
|
|
if prop == "plan_type":
|
|
|
|
assert isinstance(value, int)
|
2022-12-12 03:39:16 +01:00
|
|
|
assert "extra_data" in event
|
2020-07-23 18:01:27 +02:00
|
|
|
return
|
|
|
|
|
2023-07-23 21:20:53 +02:00
|
|
|
assert "extra_data" not in event
|
2020-07-23 18:01:27 +02:00
|
|
|
|
2024-01-26 14:45:37 +01:00
|
|
|
if prop in [
|
|
|
|
"new_stream_announcements_stream_id",
|
|
|
|
"signup_announcements_stream_id",
|
|
|
|
"zulip_update_announcements_stream_id",
|
|
|
|
"org_type",
|
|
|
|
]:
|
2020-07-23 17:38:53 +02:00
|
|
|
assert isinstance(value, int)
|
|
|
|
return
|
|
|
|
|
2020-07-08 02:14:11 +02:00
|
|
|
property_type = Realm.property_types[prop]
|
2020-07-23 17:38:53 +02:00
|
|
|
|
2020-07-08 02:14:11 +02:00
|
|
|
if property_type in (bool, int, str):
|
|
|
|
assert isinstance(value, property_type)
|
|
|
|
elif property_type == (int, type(None)):
|
|
|
|
assert isinstance(value, int)
|
2023-09-19 19:03:08 +02:00
|
|
|
elif property_type == (str, type(None)):
|
|
|
|
assert isinstance(value, str)
|
2020-07-08 02:14:11 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError(f"Unexpected property type {property_type}")
|
2020-07-08 12:53:52 +02:00
|
|
|
|
|
|
|
|
2021-07-21 13:40:46 +02:00
|
|
|
realm_user_settings_defaults_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_user_settings_defaults")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("property", str),
|
|
|
|
("value", value_type),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
_check_realm_default_update = make_checker(realm_user_settings_defaults_update_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_realm_default_update(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-07-21 13:40:46 +02:00
|
|
|
prop: str,
|
|
|
|
) -> None:
|
|
|
|
_check_realm_default_update(var_name, event)
|
|
|
|
|
|
|
|
assert prop == event["property"]
|
2021-09-17 18:11:37 +02:00
|
|
|
assert prop != "default_language"
|
2021-07-21 13:40:46 +02:00
|
|
|
assert prop in RealmUserDefault.property_types
|
|
|
|
|
|
|
|
prop_type = RealmUserDefault.property_types[prop]
|
|
|
|
assert isinstance(event["value"], prop_type)
|
|
|
|
|
|
|
|
|
2024-02-16 00:56:08 +01:00
|
|
|
authentication_method_dict = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("enabled", bool),
|
|
|
|
("available", bool),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("unavailable_reason", str),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-08-16 14:52:09 +02:00
|
|
|
authentication_dict = DictType(
|
|
|
|
required_keys=[
|
2024-02-16 00:56:08 +01:00
|
|
|
("Google", authentication_method_dict),
|
|
|
|
("Dev", authentication_method_dict),
|
|
|
|
("LDAP", authentication_method_dict),
|
|
|
|
("GitHub", authentication_method_dict),
|
|
|
|
("Email", authentication_method_dict),
|
2020-08-16 14:52:09 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
authentication_data = DictType(
|
|
|
|
required_keys=[
|
2020-09-25 21:53:00 +02:00
|
|
|
# this single-key dictionary is an annoying consequence of us
|
|
|
|
# using property_type of "default" for legacy reasons
|
|
|
|
# (specifically, to avoid breaking old clients when we
|
|
|
|
# introduced the `update_dict` format).
|
2020-08-16 14:52:09 +02:00
|
|
|
("authentication_methods", authentication_dict),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
icon_data = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("icon_url", str),
|
|
|
|
("icon_source", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
logo_data = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("logo_url", str),
|
|
|
|
("logo_source", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2022-09-22 10:53:37 +02:00
|
|
|
allow_message_editing_data = DictType(
|
2020-08-16 14:52:09 +02:00
|
|
|
required_keys=[
|
|
|
|
("allow_message_editing", bool),
|
2022-09-22 10:53:37 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
message_content_edit_limit_seconds_data = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("message_content_edit_limit_seconds", OptionalType(int)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
edit_topic_policy_data = DictType(
|
|
|
|
required_keys=[
|
2021-05-26 12:21:37 +02:00
|
|
|
("edit_topic_policy", int),
|
2020-08-16 14:52:09 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
night_logo_data = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("night_logo_url", str),
|
|
|
|
("night_logo_source", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2024-05-23 16:21:25 +02:00
|
|
|
group_setting_type = UnionType(
|
|
|
|
[
|
|
|
|
int,
|
|
|
|
DictType(
|
|
|
|
required_keys=[
|
|
|
|
("direct_members", ListType(int)),
|
|
|
|
("direct_subgroups", ListType(int)),
|
|
|
|
]
|
|
|
|
),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
group_setting_update_data_type = DictType(
|
2023-03-23 15:42:00 +01:00
|
|
|
required_keys=[],
|
2024-05-23 16:21:25 +02:00
|
|
|
optional_keys=[
|
|
|
|
("create_multiuse_invite_group", int),
|
|
|
|
("can_access_all_users_group", int),
|
|
|
|
("can_create_public_channel_group", group_setting_type),
|
2024-06-17 12:23:43 +02:00
|
|
|
("can_create_private_channel_group", group_setting_type),
|
2024-07-08 19:00:08 +02:00
|
|
|
("direct_message_initiator_group", group_setting_type),
|
|
|
|
("direct_message_permission_group", group_setting_type),
|
2024-05-23 16:21:25 +02:00
|
|
|
],
|
2023-08-09 15:06:56 +02:00
|
|
|
)
|
|
|
|
|
2020-08-16 14:52:09 +02:00
|
|
|
update_dict_data = UnionType(
|
|
|
|
[
|
2022-09-22 10:53:37 +02:00
|
|
|
allow_message_editing_data,
|
2020-08-16 14:52:09 +02:00
|
|
|
authentication_data,
|
2022-09-22 10:53:37 +02:00
|
|
|
edit_topic_policy_data,
|
2020-08-16 14:52:09 +02:00
|
|
|
icon_data,
|
|
|
|
logo_data,
|
2022-09-22 10:53:37 +02:00
|
|
|
message_content_edit_limit_seconds_data,
|
2020-08-16 14:52:09 +02:00
|
|
|
night_logo_data,
|
2023-08-09 15:06:56 +02:00
|
|
|
group_setting_update_data_type,
|
2020-08-16 14:52:09 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
realm_update_dict_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm")),
|
|
|
|
("op", Equals("update_dict")),
|
|
|
|
("property", EnumType(["default", "icon", "logo", "night_logo"])),
|
|
|
|
("data", update_dict_data),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_realm_update_dict = make_checker(realm_update_dict_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_realm_update_dict(
|
|
|
|
# handle union types
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-16 14:52:09 +02:00
|
|
|
) -> None:
|
|
|
|
_check_realm_update_dict(var_name, event)
|
|
|
|
|
|
|
|
if event["property"] == "default":
|
|
|
|
assert isinstance(event["data"], dict)
|
|
|
|
|
|
|
|
if "allow_message_editing" in event["data"]:
|
2022-09-22 10:53:37 +02:00
|
|
|
sub_type = allow_message_editing_data
|
|
|
|
elif "message_content_edit_limit_seconds" in event["data"]:
|
|
|
|
sub_type = message_content_edit_limit_seconds_data
|
|
|
|
elif "edit_topic_policy" in event["data"]:
|
|
|
|
sub_type = edit_topic_policy_data
|
2020-08-16 14:52:09 +02:00
|
|
|
elif "authentication_methods" in event["data"]:
|
|
|
|
sub_type = authentication_data
|
2023-08-09 15:06:56 +02:00
|
|
|
elif any(
|
|
|
|
setting_name in event["data"] for setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS
|
|
|
|
):
|
|
|
|
sub_type = group_setting_update_data_type
|
2020-08-16 14:52:09 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("unhandled fields in data")
|
|
|
|
|
|
|
|
elif event["property"] == "icon":
|
|
|
|
sub_type = icon_data
|
|
|
|
elif event["property"] == "logo":
|
|
|
|
sub_type = logo_data
|
|
|
|
elif event["property"] == "night_logo":
|
|
|
|
sub_type = night_logo_data
|
|
|
|
else:
|
|
|
|
raise AssertionError("unhandled property: {event['property']}")
|
|
|
|
|
|
|
|
check_data(sub_type, f"{var_name}['data']", event["data"])
|
|
|
|
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# TODO: This type is missing optional fields:
|
|
|
|
#
|
|
|
|
# * delivery_email
|
|
|
|
# * bot-related fields.
|
|
|
|
# * Users with custom profile fields, where profile_data will
|
|
|
|
# be nonempty.
|
|
|
|
#
|
|
|
|
# Only because our test_events.py tests don't cover the relevant cases.
|
2020-08-14 02:14:06 +02:00
|
|
|
realm_user_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("email", str),
|
|
|
|
("avatar_url", OptionalType(str)),
|
|
|
|
("avatar_version", int),
|
|
|
|
("full_name", str),
|
|
|
|
("is_admin", bool),
|
2021-05-28 12:51:50 +02:00
|
|
|
("is_billing_admin", bool),
|
2020-08-14 02:14:06 +02:00
|
|
|
("is_owner", bool),
|
|
|
|
("is_bot", bool),
|
|
|
|
("is_guest", bool),
|
2021-04-11 07:38:09 +02:00
|
|
|
("role", EnumType(UserProfile.ROLE_TYPES)),
|
2020-08-14 02:14:06 +02:00
|
|
|
("is_active", bool),
|
|
|
|
("profile_data", StringDictType(dict)),
|
|
|
|
("timezone", str),
|
|
|
|
("date_joined", str),
|
2022-05-25 13:13:31 +02:00
|
|
|
("delivery_email", OptionalType(str)),
|
2020-08-14 02:14:06 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
realm_user_add_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_user")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("person", realm_user_type),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_realm_user_add = make_checker(realm_user_add_event)
|
|
|
|
|
2020-08-18 18:38:41 +02:00
|
|
|
removed_user_type = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("full_name", str),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
custom_profile_field_type = DictType(
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("id", int),
|
2023-07-31 19:39:57 +02:00
|
|
|
("value", OptionalType(str)),
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("rendered_value", str),
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# This block of types, each named by the dictionary key, makes it
|
|
|
|
# possible to validate the type of all the realm_user update events.
|
2020-08-05 12:31:09 +02:00
|
|
|
realm_user_person_types = dict(
|
|
|
|
# Note that all flavors of person include user_id.
|
|
|
|
avatar_fields=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("avatar_source", str),
|
|
|
|
("avatar_url", OptionalType(str)),
|
|
|
|
("avatar_url_medium", OptionalType(str)),
|
|
|
|
("avatar_version", int),
|
|
|
|
],
|
|
|
|
),
|
|
|
|
bot_owner_id=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("bot_owner_id", int),
|
|
|
|
],
|
|
|
|
),
|
|
|
|
custom_profile_field=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("custom_profile_field", custom_profile_field_type),
|
|
|
|
],
|
|
|
|
),
|
|
|
|
delivery_email=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
2022-05-25 13:13:31 +02:00
|
|
|
("delivery_email", OptionalType(str)),
|
2020-08-05 12:31:09 +02:00
|
|
|
],
|
|
|
|
),
|
2021-10-04 18:52:57 +02:00
|
|
|
email=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("new_email", str),
|
|
|
|
],
|
|
|
|
),
|
2020-08-05 12:31:09 +02:00
|
|
|
full_name=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("full_name", str),
|
|
|
|
],
|
|
|
|
),
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("is_billing_admin", bool),
|
|
|
|
],
|
|
|
|
),
|
2020-08-05 12:31:09 +02:00
|
|
|
role=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("role", EnumType(UserProfile.ROLE_TYPES)),
|
|
|
|
],
|
|
|
|
),
|
|
|
|
timezone=DictType(
|
|
|
|
required_keys=[
|
|
|
|
# we should probably eliminate email here
|
|
|
|
("user_id", int),
|
|
|
|
("email", str),
|
|
|
|
("timezone", str),
|
|
|
|
],
|
|
|
|
),
|
2023-10-30 12:50:40 +01:00
|
|
|
is_active=DictType(
|
|
|
|
required_keys=[
|
|
|
|
("user_id", int),
|
|
|
|
("is_active", bool),
|
|
|
|
],
|
|
|
|
),
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
realm_user_update_event = event_dict_type(
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("realm_user")),
|
|
|
|
("op", Equals("update")),
|
2020-08-05 12:31:09 +02:00
|
|
|
("person", UnionType(list(realm_user_person_types.values()))),
|
|
|
|
],
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_realm_user_update = make_checker(realm_user_update_event)
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_realm_user_update(
|
2020-08-05 12:31:09 +02:00
|
|
|
# person_flavor tells us which extra fields we need
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2020-08-05 12:31:09 +02:00
|
|
|
person_flavor: str,
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
) -> None:
|
|
|
|
_check_realm_user_update(var_name, event)
|
|
|
|
|
2020-08-05 12:31:09 +02:00
|
|
|
check_data(
|
|
|
|
realm_user_person_types[person_flavor],
|
|
|
|
f"{var_name}['person']",
|
|
|
|
event["person"],
|
|
|
|
)
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
|
|
|
|
|
2023-10-24 05:11:34 +02:00
|
|
|
realm_user_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("realm_user")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("person", removed_user_type),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
check_realm_user_remove = make_checker(realm_user_remove_event)
|
|
|
|
|
|
|
|
|
2021-03-13 22:32:51 +01:00
|
|
|
restart_event = event_dict_type(
|
2021-04-18 11:28:39 +02:00
|
|
|
required_keys=[
|
|
|
|
("type", Equals("restart")),
|
|
|
|
("zulip_version", str),
|
2021-07-30 12:25:53 +02:00
|
|
|
("zulip_merge_base", str),
|
2021-04-18 11:28:39 +02:00
|
|
|
("zulip_feature_level", int),
|
|
|
|
("server_generation", int),
|
|
|
|
]
|
2021-03-13 22:32:51 +01:00
|
|
|
)
|
|
|
|
check_restart_event = make_checker(restart_event)
|
|
|
|
|
2024-02-10 04:19:08 +01:00
|
|
|
web_reload_client_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("web_reload_client")),
|
|
|
|
("immediate", bool),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_web_reload_client_event = make_checker(web_reload_client_event)
|
|
|
|
|
2023-10-03 17:02:25 +02:00
|
|
|
scheduled_message_fields = DictType(
|
|
|
|
required_keys=[
|
|
|
|
("scheduled_message_id", int),
|
|
|
|
("type", EnumType(["stream", "private"])),
|
|
|
|
("to", UnionType([ListType(int), int])),
|
|
|
|
("content", str),
|
|
|
|
("rendered_content", str),
|
|
|
|
("scheduled_delivery_timestamp", int),
|
|
|
|
("failed", bool),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("topic", str),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
scheduled_messages_add_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("scheduled_messages")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("scheduled_messages", ListType(scheduled_message_fields)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_scheduled_message_add = make_checker(scheduled_messages_add_event)
|
|
|
|
|
|
|
|
scheduled_messages_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("scheduled_messages")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("scheduled_message", scheduled_message_fields),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_scheduled_message_update = make_checker(scheduled_messages_update_event)
|
|
|
|
|
|
|
|
scheduled_messages_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("scheduled_messages")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("scheduled_message_id", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_scheduled_message_remove = make_checker(scheduled_messages_remove_event)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
stream_create_event = event_dict_type(
|
2020-07-08 12:53:52 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("stream")),
|
|
|
|
("op", Equals("create")),
|
|
|
|
("streams", ListType(DictType(basic_stream_fields))),
|
2020-07-08 12:53:52 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_stream_create = make_checker(stream_create_event)
|
2020-07-08 13:35:37 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
stream_delete_event = event_dict_type(
|
2020-08-01 14:42:06 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("stream")),
|
|
|
|
("op", Equals("delete")),
|
|
|
|
("streams", ListType(DictType(basic_stream_fields))),
|
2020-08-01 14:42:06 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_stream_delete = make_checker(stream_delete_event)
|
2020-08-01 14:42:06 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
stream_update_event = event_dict_type(
|
2020-07-08 13:35:37 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("stream")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("property", str),
|
|
|
|
("value", optional_value_type),
|
|
|
|
("name", str),
|
|
|
|
("stream_id", int),
|
2020-07-08 13:35:37 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("rendered_description", str),
|
|
|
|
("history_public_to_subscribers", bool),
|
2020-11-10 16:02:55 +01:00
|
|
|
("is_web_public", bool),
|
2020-07-08 13:35:37 +02:00
|
|
|
],
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_stream_update = make_checker(stream_update_event)
|
2020-07-08 13:35:37 +02:00
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_stream_update(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-07-08 13:35:37 +02:00
|
|
|
_check_stream_update(var_name, event)
|
|
|
|
prop = event["property"]
|
|
|
|
value = event["value"]
|
|
|
|
|
|
|
|
extra_keys = set(event.keys()) - {
|
|
|
|
"id",
|
|
|
|
"type",
|
|
|
|
"op",
|
|
|
|
"property",
|
|
|
|
"value",
|
|
|
|
"name",
|
|
|
|
"stream_id",
|
2024-06-03 13:49:57 +02:00
|
|
|
"first_message_id",
|
2020-07-08 13:35:37 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if prop == "description":
|
|
|
|
assert extra_keys == {"rendered_description"}
|
|
|
|
assert isinstance(value, str)
|
|
|
|
elif prop == "invite_only":
|
2020-11-10 16:02:55 +01:00
|
|
|
assert extra_keys == {"history_public_to_subscribers", "is_web_public"}
|
2020-07-08 13:35:37 +02:00
|
|
|
assert isinstance(value, bool)
|
|
|
|
elif prop == "message_retention_days":
|
|
|
|
assert extra_keys == set()
|
|
|
|
if value is not None:
|
|
|
|
assert isinstance(value, int)
|
|
|
|
elif prop == "name":
|
|
|
|
assert extra_keys == set()
|
|
|
|
assert isinstance(value, str)
|
|
|
|
elif prop == "stream_post_policy":
|
|
|
|
assert extra_keys == set()
|
|
|
|
assert value in Stream.STREAM_POST_POLICY_TYPES
|
2023-07-12 12:57:57 +02:00
|
|
|
elif prop == "can_remove_subscribers_group":
|
2022-06-27 18:39:33 +02:00
|
|
|
assert extra_keys == set()
|
|
|
|
assert isinstance(value, int)
|
2024-06-03 13:49:57 +02:00
|
|
|
elif prop == "first_message_id":
|
|
|
|
assert extra_keys == set()
|
|
|
|
assert isinstance(value, int)
|
2020-07-08 13:35:37 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError(f"Unknown property: {prop}")
|
2020-07-08 14:13:16 +02:00
|
|
|
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
submessage_event = event_dict_type(
|
2020-07-18 16:27:59 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("submessage")),
|
|
|
|
("message_id", int),
|
|
|
|
("submessage_id", int),
|
|
|
|
("sender_id", int),
|
|
|
|
("msg_type", str),
|
|
|
|
("content", str),
|
2020-07-18 16:27:59 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_submessage = make_checker(submessage_event)
|
2020-07-18 16:27:59 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
single_subscription_type = DictType(
|
2020-07-08 14:13:16 +02:00
|
|
|
required_keys=subscription_fields,
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
subscription_add_event = event_dict_type(
|
2020-07-08 14:13:16 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("subscription")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("subscriptions", ListType(single_subscription_type)),
|
2020-07-08 14:13:16 +02:00
|
|
|
],
|
|
|
|
)
|
2021-01-20 15:14:52 +01:00
|
|
|
check_subscription_add = make_checker(subscription_add_event)
|
2020-07-08 14:20:25 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
subscription_peer_add_event = event_dict_type(
|
2020-07-08 15:04:35 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("subscription")),
|
|
|
|
("op", Equals("peer_add")),
|
2020-10-22 14:14:02 +02:00
|
|
|
("user_ids", ListType(int)),
|
|
|
|
("stream_ids", ListType(int)),
|
2020-07-08 15:04:35 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_subscription_peer_add = make_checker(subscription_peer_add_event)
|
2020-07-08 15:04:35 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
subscription_peer_remove_event = event_dict_type(
|
2020-07-08 15:04:35 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("subscription")),
|
|
|
|
("op", Equals("peer_remove")),
|
2020-10-22 14:14:02 +02:00
|
|
|
("user_ids", ListType(int)),
|
|
|
|
("stream_ids", ListType(int)),
|
2020-07-08 15:04:35 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_subscription_peer_remove = make_checker(subscription_peer_remove_event)
|
2020-07-08 15:04:35 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
remove_sub_type = DictType(
|
2020-07-08 14:20:25 +02:00
|
|
|
required_keys=[
|
|
|
|
# We should eventually just return stream_id here.
|
2020-07-30 18:11:19 +02:00
|
|
|
("name", str),
|
|
|
|
("stream_id", int),
|
2020-07-08 14:20:25 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
subscription_remove_event = event_dict_type(
|
2020-07-08 14:20:25 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("subscription")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("subscriptions", ListType(remove_sub_type)),
|
2020-07-08 14:20:25 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_subscription_remove = make_checker(subscription_remove_event)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
2020-09-25 21:53:00 +02:00
|
|
|
# TODO: Have better validation for value_type; we don't have
|
|
|
|
# test_events tests for non-bool fields like `color`.
|
2020-08-17 14:19:09 +02:00
|
|
|
subscription_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("subscription")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("property", str),
|
|
|
|
("stream_id", int),
|
|
|
|
("value", value_type),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_subscription_update = make_checker(subscription_update_event)
|
|
|
|
|
|
|
|
|
|
|
|
def check_subscription_update(
|
2024-07-12 02:30:17 +02:00
|
|
|
var_name: str, event: dict[str, object], property: str, value: bool
|
2020-08-17 14:19:09 +02:00
|
|
|
) -> None:
|
|
|
|
_check_subscription_update(var_name, event)
|
|
|
|
assert event["property"] == property
|
|
|
|
assert event["value"] == value
|
|
|
|
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
typing_person_type = DictType(
|
2020-07-18 16:39:06 +02:00
|
|
|
required_keys=[
|
|
|
|
# we should eventually just send user_id
|
2020-07-30 18:11:19 +02:00
|
|
|
("email", str),
|
|
|
|
("user_id", int),
|
2020-07-18 16:39:06 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2023-10-01 20:20:05 +02:00
|
|
|
equals_direct_or_stream = EnumType(
|
2021-04-28 06:34:40 +02:00
|
|
|
[
|
2023-10-01 20:20:05 +02:00
|
|
|
"direct",
|
2021-04-28 06:34:40 +02:00
|
|
|
"stream",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
typing_start_event = event_dict_type(
|
2020-07-18 16:39:06 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("typing")),
|
|
|
|
("op", Equals("start")),
|
2023-10-01 20:20:05 +02:00
|
|
|
("message_type", equals_direct_or_stream),
|
2020-07-30 18:11:19 +02:00
|
|
|
("sender", typing_person_type),
|
2020-12-24 21:00:20 +01:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("recipients", ListType(typing_person_type)),
|
2020-12-24 21:00:20 +01:00
|
|
|
("stream_id", int),
|
|
|
|
("topic", str),
|
|
|
|
],
|
2020-07-18 16:39:06 +02:00
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_typing_start = make_checker(typing_start_event)
|
2020-07-18 16:39:06 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
typing_stop_event = event_dict_type(
|
2020-08-27 22:10:07 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("typing")),
|
|
|
|
("op", Equals("stop")),
|
2023-10-01 20:20:05 +02:00
|
|
|
("message_type", equals_direct_or_stream),
|
2020-07-30 18:11:19 +02:00
|
|
|
("sender", typing_person_type),
|
2020-12-24 21:00:20 +01:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("recipients", ListType(typing_person_type)),
|
2020-12-24 21:00:20 +01:00
|
|
|
("stream_id", int),
|
|
|
|
("topic", str),
|
|
|
|
],
|
2020-08-27 22:10:07 +02:00
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_typing_stop = make_checker(typing_stop_event)
|
2020-08-27 22:10:07 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
update_display_settings_event = event_dict_type(
|
2020-07-08 15:29:13 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("update_display_settings")),
|
|
|
|
("setting_name", str),
|
|
|
|
("setting", value_type),
|
|
|
|
("user", str),
|
2020-07-08 15:29:13 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("language_name", str),
|
2020-07-08 15:29:13 +02:00
|
|
|
],
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_update_display_settings = make_checker(update_display_settings_event)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
user_settings_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_settings")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("property", str),
|
|
|
|
("value", value_type),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("language_name", str),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
_check_user_settings_update = make_checker(user_settings_update_event)
|
|
|
|
|
2020-07-08 15:29:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_update_display_settings(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-07-08 15:29:13 +02:00
|
|
|
"""
|
|
|
|
Display setting events have a "setting" field that
|
|
|
|
is more specifically typed according to the
|
|
|
|
UserProfile.property_types dictionary.
|
|
|
|
"""
|
|
|
|
_check_update_display_settings(var_name, event)
|
|
|
|
setting_name = event["setting_name"]
|
|
|
|
setting = event["setting"]
|
|
|
|
|
2020-08-08 13:41:21 +02:00
|
|
|
assert isinstance(setting_name, str)
|
2021-07-07 12:21:35 +02:00
|
|
|
if setting_name == "timezone":
|
|
|
|
assert isinstance(setting, str)
|
|
|
|
else:
|
|
|
|
setting_type = UserProfile.property_types[setting_name]
|
|
|
|
assert isinstance(setting, setting_type)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
|
|
|
if setting_name == "default_language":
|
2022-12-12 03:39:16 +01:00
|
|
|
assert "language_name" in event
|
2020-07-08 15:29:13 +02:00
|
|
|
else:
|
2023-07-23 21:20:53 +02:00
|
|
|
assert "language_name" not in event
|
2020-07-08 15:29:13 +02:00
|
|
|
|
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
def check_user_settings_update(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-07-26 08:35:27 +02:00
|
|
|
) -> None:
|
|
|
|
_check_user_settings_update(var_name, event)
|
|
|
|
setting_name = event["property"]
|
|
|
|
value = event["value"]
|
|
|
|
|
|
|
|
assert isinstance(setting_name, str)
|
|
|
|
if setting_name == "timezone":
|
|
|
|
assert isinstance(value, str)
|
|
|
|
else:
|
2021-08-11 15:34:25 +02:00
|
|
|
setting_type = UserProfile.property_types[setting_name]
|
2021-07-26 08:35:27 +02:00
|
|
|
assert isinstance(value, setting_type)
|
|
|
|
|
|
|
|
if setting_name == "default_language":
|
2022-12-12 03:39:16 +01:00
|
|
|
assert "language_name" in event
|
2021-07-26 08:35:27 +02:00
|
|
|
else:
|
2023-07-23 21:20:53 +02:00
|
|
|
assert "language_name" not in event
|
2021-07-26 08:35:27 +02:00
|
|
|
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
update_global_notifications_event = event_dict_type(
|
2020-07-08 15:29:13 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("update_global_notifications")),
|
|
|
|
("notification_name", str),
|
|
|
|
("setting", value_type),
|
|
|
|
("user", str),
|
2020-07-08 15:29:13 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_update_global_notifications = make_checker(update_global_notifications_event)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_update_global_notifications(
|
2021-02-12 08:19:30 +01:00
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2024-07-12 02:30:23 +02:00
|
|
|
desired_val: bool | int | str,
|
2020-07-08 15:29:13 +02:00
|
|
|
) -> None:
|
|
|
|
"""
|
2021-09-09 12:13:18 +02:00
|
|
|
See UserProfile.notification_settings_legacy for
|
2020-07-08 15:29:13 +02:00
|
|
|
more details.
|
|
|
|
"""
|
|
|
|
_check_update_global_notifications(var_name, event)
|
|
|
|
setting_name = event["notification_name"]
|
|
|
|
setting = event["setting"]
|
|
|
|
assert setting == desired_val
|
|
|
|
|
2020-08-08 13:41:21 +02:00
|
|
|
assert isinstance(setting_name, str)
|
2021-09-09 12:13:18 +02:00
|
|
|
setting_type = UserProfile.notification_settings_legacy[setting_name]
|
2020-07-08 15:29:13 +02:00
|
|
|
assert isinstance(setting, setting_type)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
# user_id field is null for embedded variant of update_message
|
2020-07-10 18:35:58 +02:00
|
|
|
update_message_required_fields = [
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("update_message")),
|
2022-01-14 15:23:49 +01:00
|
|
|
("user_id", OptionalType(int)),
|
2020-07-30 18:11:19 +02:00
|
|
|
("edit_timestamp", int),
|
|
|
|
("message_id", int),
|
|
|
|
("flags", ListType(str)),
|
|
|
|
("message_ids", ListType(int)),
|
2022-01-14 15:23:49 +01:00
|
|
|
("rendering_only", bool),
|
2021-12-15 21:17:21 +01:00
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
update_message_stream_fields: list[tuple[str, object]] = [
|
2021-12-15 21:17:21 +01:00
|
|
|
("stream_id", int),
|
|
|
|
("stream_name", str),
|
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
update_message_content_fields: list[tuple[str, object]] = [
|
2021-12-15 21:17:21 +01:00
|
|
|
("is_me_message", bool),
|
|
|
|
("orig_content", str),
|
|
|
|
("orig_rendered_content", str),
|
|
|
|
("prev_rendered_content_version", int),
|
2022-01-14 15:23:49 +01:00
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
update_message_content_or_embedded_data_fields: list[tuple[str, object]] = [
|
2022-01-14 15:23:49 +01:00
|
|
|
("content", str),
|
2021-12-15 21:17:21 +01:00
|
|
|
("rendered_content", str),
|
|
|
|
]
|
|
|
|
|
|
|
|
update_message_topic_fields = [
|
2021-01-26 07:32:29 +01:00
|
|
|
(TOPIC_LINKS, ListType(_check_topic_links)),
|
2020-07-30 18:11:19 +02:00
|
|
|
(TOPIC_NAME, str),
|
2020-07-10 18:35:58 +02:00
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
update_message_change_stream_fields: list[tuple[str, object]] = [
|
2021-12-20 16:10:19 +01:00
|
|
|
("new_stream_id", int),
|
|
|
|
]
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
update_message_change_stream_or_topic_fields: list[tuple[str, object]] = [
|
2021-12-20 16:10:19 +01:00
|
|
|
(
|
|
|
|
"propagate_mode",
|
|
|
|
EnumType(
|
|
|
|
[
|
|
|
|
# The order here needs to match the OpenAPI definitions
|
|
|
|
"change_one",
|
|
|
|
"change_later",
|
|
|
|
"change_all",
|
|
|
|
]
|
|
|
|
),
|
|
|
|
),
|
|
|
|
(ORIG_TOPIC, str),
|
|
|
|
]
|
|
|
|
|
2021-12-15 21:17:21 +01:00
|
|
|
update_message_optional_fields = (
|
2021-12-20 16:10:19 +01:00
|
|
|
update_message_stream_fields
|
|
|
|
+ update_message_content_fields
|
2022-01-14 15:23:49 +01:00
|
|
|
+ update_message_content_or_embedded_data_fields
|
2021-12-20 16:10:19 +01:00
|
|
|
+ update_message_topic_fields
|
|
|
|
+ update_message_change_stream_fields
|
|
|
|
+ update_message_change_stream_or_topic_fields
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
# The schema here includes the embedded variant of update_message
|
2020-07-30 18:11:19 +02:00
|
|
|
update_message_event = event_dict_type(
|
2020-07-10 18:35:58 +02:00
|
|
|
required_keys=update_message_required_fields,
|
|
|
|
optional_keys=update_message_optional_fields,
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
_check_update_message = make_checker(update_message_event)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
|
|
|
|
|
|
|
def check_update_message(
|
|
|
|
var_name: str,
|
2024-07-12 02:30:17 +02:00
|
|
|
event: dict[str, object],
|
2021-12-15 21:17:21 +01:00
|
|
|
is_stream_message: bool,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_content: bool,
|
|
|
|
has_topic: bool,
|
|
|
|
has_new_stream_id: bool,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only: bool,
|
2020-07-10 18:35:58 +02:00
|
|
|
) -> None:
|
|
|
|
# Always check the basic schema first.
|
|
|
|
_check_update_message(var_name, event)
|
|
|
|
|
|
|
|
actual_keys = set(event.keys())
|
|
|
|
expected_keys = {"id"}
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_required_fields)
|
|
|
|
|
2021-12-15 21:17:21 +01:00
|
|
|
if is_stream_message:
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_stream_fields)
|
|
|
|
|
2020-07-10 18:35:58 +02:00
|
|
|
if has_content:
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_content_fields)
|
2022-01-14 15:23:49 +01:00
|
|
|
expected_keys.update(tup[0] for tup in update_message_content_or_embedded_data_fields)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
|
|
|
if has_topic:
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_topic_fields)
|
2021-12-20 16:10:19 +01:00
|
|
|
expected_keys.update(tup[0] for tup in update_message_change_stream_or_topic_fields)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
2021-12-20 16:10:19 +01:00
|
|
|
if has_new_stream_id:
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_change_stream_fields)
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_change_stream_or_topic_fields)
|
2021-12-15 21:17:21 +01:00
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
if is_embedded_update_only:
|
|
|
|
expected_keys.update(tup[0] for tup in update_message_content_or_embedded_data_fields)
|
|
|
|
assert event["user_id"] is None
|
|
|
|
else:
|
|
|
|
assert isinstance(event["user_id"], int)
|
2020-07-10 18:35:58 +02:00
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
assert event["rendering_only"] == is_embedded_update_only
|
|
|
|
assert expected_keys == actual_keys
|
2020-07-10 18:35:58 +02:00
|
|
|
|
2020-07-17 09:13:10 +02:00
|
|
|
|
2020-08-18 18:08:39 +02:00
|
|
|
update_message_flags_add_event = event_dict_type(
|
2020-07-17 09:13:10 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("update_message_flags")),
|
2020-08-18 18:08:39 +02:00
|
|
|
("op", Equals("add")),
|
|
|
|
("operation", Equals("add")),
|
2020-07-30 18:11:19 +02:00
|
|
|
("flag", str),
|
|
|
|
("messages", ListType(int)),
|
|
|
|
("all", bool),
|
2020-07-17 09:13:10 +02:00
|
|
|
]
|
|
|
|
)
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_add = make_checker(update_message_flags_add_event)
|
2020-07-17 09:13:10 +02:00
|
|
|
|
|
|
|
|
2020-08-18 18:08:39 +02:00
|
|
|
update_message_flags_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("update_message_flags")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("operation", Equals("remove")),
|
2023-09-19 13:22:20 +02:00
|
|
|
("flag", str),
|
2020-08-18 18:08:39 +02:00
|
|
|
("messages", ListType(int)),
|
|
|
|
("all", bool),
|
2021-06-09 13:31:39 +02:00
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
(
|
|
|
|
"message_details",
|
|
|
|
StringDictType(
|
|
|
|
DictType(
|
|
|
|
required_keys=[
|
|
|
|
("type", EnumType(["private", "stream"])),
|
|
|
|
],
|
|
|
|
optional_keys=[
|
|
|
|
("mentioned", bool),
|
|
|
|
("user_ids", ListType(int)),
|
|
|
|
("stream_id", int),
|
|
|
|
("topic", str),
|
|
|
|
("unmuted_stream_msg", bool),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
),
|
|
|
|
)
|
|
|
|
],
|
2020-08-18 18:08:39 +02:00
|
|
|
)
|
|
|
|
check_update_message_flags_remove = make_checker(update_message_flags_remove_event)
|
2020-07-18 17:15:23 +02:00
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
group_type = DictType(
|
2020-07-18 17:19:30 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("id", int),
|
|
|
|
("name", str),
|
|
|
|
("members", ListType(int)),
|
2022-05-16 17:02:44 +02:00
|
|
|
("direct_subgroup_ids", ListType(int)),
|
2020-07-30 18:11:19 +02:00
|
|
|
("description", str),
|
2021-08-06 19:31:00 +02:00
|
|
|
("is_system_group", bool),
|
2024-04-29 05:51:48 +02:00
|
|
|
("can_mention_group", group_setting_type),
|
2020-07-18 17:19:30 +02:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
user_group_add_event = event_dict_type(
|
2020-07-18 17:19:30 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("add")),
|
|
|
|
("group", group_type),
|
2020-07-18 17:19:30 +02:00
|
|
|
]
|
|
|
|
)
|
2020-07-30 18:11:19 +02:00
|
|
|
check_user_group_add = make_checker(user_group_add_event)
|
2020-07-18 17:19:30 +02:00
|
|
|
|
2020-08-14 13:18:52 +02:00
|
|
|
user_group_add_members_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("add_members")),
|
|
|
|
("group_id", int),
|
|
|
|
("user_ids", ListType(int)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_user_group_add_members = make_checker(user_group_add_members_event)
|
|
|
|
|
2020-08-14 13:38:36 +02:00
|
|
|
user_group_remove_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("remove")),
|
|
|
|
("group_id", int),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_user_group_remove = make_checker(user_group_remove_event)
|
|
|
|
|
2020-08-14 13:34:34 +02:00
|
|
|
user_group_remove_members_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("remove_members")),
|
|
|
|
("group_id", int),
|
|
|
|
("user_ids", ListType(int)),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
check_user_group_remove_members = make_checker(user_group_remove_members_event)
|
|
|
|
|
2020-08-14 13:50:55 +02:00
|
|
|
user_group_data_type = DictType(
|
|
|
|
required_keys=[],
|
|
|
|
optional_keys=[
|
|
|
|
("name", str),
|
|
|
|
("description", str),
|
2024-04-30 15:16:58 +02:00
|
|
|
("can_mention_group", group_setting_type),
|
2020-08-14 13:50:55 +02:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
user_group_update_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("update")),
|
|
|
|
("group_id", int),
|
|
|
|
("data", user_group_data_type),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
_check_user_group_update = make_checker(user_group_update_event)
|
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def check_user_group_update(var_name: str, event: dict[str, object], field: str) -> None:
|
2020-08-14 13:50:55 +02:00
|
|
|
_check_user_group_update(var_name, event)
|
|
|
|
|
|
|
|
assert isinstance(event["data"], dict)
|
|
|
|
|
|
|
|
assert set(event["data"].keys()) == {field}
|
|
|
|
|
|
|
|
|
2022-03-01 07:52:47 +01:00
|
|
|
user_group_add_subgroups_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("add_subgroups")),
|
|
|
|
("group_id", int),
|
2022-05-16 17:02:44 +02:00
|
|
|
("direct_subgroup_ids", ListType(int)),
|
2022-03-01 07:52:47 +01:00
|
|
|
]
|
|
|
|
)
|
|
|
|
check_user_group_add_subgroups = make_checker(user_group_add_subgroups_event)
|
|
|
|
|
|
|
|
|
|
|
|
user_group_remove_subgroups_event = event_dict_type(
|
|
|
|
required_keys=[
|
|
|
|
("type", Equals("user_group")),
|
|
|
|
("op", Equals("remove_subgroups")),
|
|
|
|
("group_id", int),
|
2022-05-16 17:02:44 +02:00
|
|
|
("direct_subgroup_ids", ListType(int)),
|
2022-03-01 07:52:47 +01:00
|
|
|
]
|
|
|
|
)
|
|
|
|
check_user_group_remove_subgroups = make_checker(user_group_remove_subgroups_event)
|
|
|
|
|
|
|
|
|
2020-07-30 18:11:19 +02:00
|
|
|
user_status_event = event_dict_type(
|
2020-07-18 17:15:23 +02:00
|
|
|
required_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("type", Equals("user_status")),
|
|
|
|
("user_id", int),
|
2021-01-20 19:53:11 +01:00
|
|
|
],
|
|
|
|
optional_keys=[
|
2020-07-30 18:11:19 +02:00
|
|
|
("away", bool),
|
|
|
|
("status_text", str),
|
2021-06-22 18:42:31 +02:00
|
|
|
("emoji_name", str),
|
|
|
|
("emoji_code", str),
|
2023-10-02 21:45:11 +02:00
|
|
|
("reaction_type", EnumType(["unicode_emoji", "realm_emoji", "zulip_extra_emoji"])),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
2020-07-18 17:15:23 +02:00
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
_check_user_status = make_checker(user_status_event)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def check_user_status(var_name: str, event: dict[str, object], fields: set[str]) -> None:
|
2021-01-20 19:53:11 +01:00
|
|
|
_check_user_status(var_name, event)
|
|
|
|
|
|
|
|
assert set(event.keys()) == {"id", "type", "user_id"} | fields
|