2017-11-16 19:54:24 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2020-09-25 21:53:00 +02:00
|
|
|
#
|
|
|
|
# This module is closely integrated with zerver/lib/event_schema.py
|
|
|
|
# and zerver/lib/data_types.py systems for validating the schemas of
|
|
|
|
# events; it also uses the OpenAPI tools to validate our documentation.
|
2019-04-09 04:07:03 +02:00
|
|
|
import copy
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Iterator
|
2024-04-30 01:07:06 +02:00
|
|
|
from contextlib import contextmanager
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import timedelta
|
2020-06-11 00:54:34 +02:00
|
|
|
from io import StringIO
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2016-06-03 08:00:04 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2023-04-20 04:40:41 +02:00
|
|
|
from dateutil.parser import parse as dateparser
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2022-04-14 23:35:09 +02:00
|
|
|
from zerver.actions.alert_words import do_add_alert_words, do_remove_alert_words
|
2022-04-14 23:55:07 +02:00
|
|
|
from zerver.actions.bots import (
|
|
|
|
do_change_bot_owner,
|
|
|
|
do_change_default_all_public_streams,
|
|
|
|
do_change_default_events_register_stream,
|
|
|
|
do_change_default_sending_stream,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import (
|
2023-07-31 19:39:57 +02:00
|
|
|
check_remove_custom_profile_field_value,
|
2022-04-14 23:46:56 +02:00
|
|
|
do_remove_realm_custom_profile_field,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
try_add_realm_custom_profile_field,
|
|
|
|
try_update_realm_custom_profile_field,
|
|
|
|
)
|
2022-04-14 23:34:23 +02:00
|
|
|
from zerver.actions.default_streams import (
|
|
|
|
do_add_default_stream,
|
|
|
|
do_add_streams_to_default_stream_group,
|
|
|
|
do_change_default_stream_group_description,
|
|
|
|
do_change_default_stream_group_name,
|
|
|
|
do_create_default_stream_group,
|
|
|
|
do_remove_default_stream,
|
|
|
|
do_remove_default_stream_group,
|
|
|
|
do_remove_streams_from_default_stream_group,
|
|
|
|
lookup_default_stream_groups,
|
|
|
|
)
|
2022-04-14 23:36:07 +02:00
|
|
|
from zerver.actions.invites import (
|
|
|
|
do_create_multiuse_invite_link,
|
|
|
|
do_invite_users,
|
|
|
|
do_revoke_multi_use_invite,
|
|
|
|
do_revoke_user_invite,
|
|
|
|
)
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages
|
|
|
|
from zerver.actions.message_edit import do_update_embedded_data, do_update_message
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.message_flags import do_update_message_flags
|
2022-04-14 23:55:22 +02:00
|
|
|
from zerver.actions.muted_users import do_mute_user, do_unmute_user
|
2024-05-10 16:17:04 +02:00
|
|
|
from zerver.actions.onboarding_steps import do_mark_onboarding_step_as_read
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.presence import do_update_user_presence
|
2022-04-14 23:54:01 +02:00
|
|
|
from zerver.actions.reactions import do_add_reaction, do_remove_reaction
|
2022-04-14 23:57:26 +02:00
|
|
|
from zerver.actions.realm_domains import (
|
|
|
|
do_add_realm_domain,
|
|
|
|
do_change_realm_domain,
|
|
|
|
do_remove_realm_domain,
|
|
|
|
)
|
2022-04-14 23:40:49 +02:00
|
|
|
from zerver.actions.realm_emoji import check_add_realm_emoji, do_remove_realm_emoji
|
2022-04-14 23:39:22 +02:00
|
|
|
from zerver.actions.realm_icon import do_change_icon_source
|
2022-04-14 23:32:56 +02:00
|
|
|
from zerver.actions.realm_linkifiers import (
|
2023-08-10 04:09:25 +02:00
|
|
|
check_reorder_linkifiers,
|
2022-04-14 23:32:56 +02:00
|
|
|
do_add_linkifier,
|
|
|
|
do_remove_linkifier,
|
|
|
|
do_update_linkifier,
|
|
|
|
)
|
2022-04-14 23:37:16 +02:00
|
|
|
from zerver.actions.realm_logo import do_change_logo_source
|
2023-07-24 21:14:42 +02:00
|
|
|
from zerver.actions.realm_playgrounds import check_add_realm_playground, do_remove_realm_playground
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import (
|
2022-04-11 19:26:16 +02:00
|
|
|
do_change_realm_org_type,
|
2023-08-09 15:06:56 +02:00
|
|
|
do_change_realm_permission_group_setting,
|
2022-04-14 23:57:15 +02:00
|
|
|
do_change_realm_plan_type,
|
|
|
|
do_deactivate_realm,
|
2023-11-23 22:07:41 +01:00
|
|
|
do_set_push_notifications_enabled_end_timestamp,
|
2022-04-14 23:57:15 +02:00
|
|
|
do_set_realm_authentication_methods,
|
2024-02-07 12:13:02 +01:00
|
|
|
do_set_realm_new_stream_announcements_stream,
|
2022-04-14 23:57:15 +02:00
|
|
|
do_set_realm_property,
|
2024-02-07 17:11:43 +01:00
|
|
|
do_set_realm_signup_announcements_stream,
|
2022-04-14 23:57:15 +02:00
|
|
|
do_set_realm_user_default_setting,
|
2024-01-26 14:45:37 +01:00
|
|
|
do_set_realm_zulip_update_announcements_stream,
|
2022-04-14 23:57:15 +02:00
|
|
|
)
|
2024-09-24 17:01:58 +02:00
|
|
|
from zerver.actions.saved_snippets import do_create_saved_snippet, do_delete_saved_snippet
|
2023-04-20 04:40:41 +02:00
|
|
|
from zerver.actions.scheduled_messages import (
|
|
|
|
check_schedule_message,
|
|
|
|
delete_scheduled_message,
|
2023-05-16 21:18:09 +02:00
|
|
|
edit_scheduled_message,
|
2023-04-20 04:40:41 +02:00
|
|
|
)
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import (
|
|
|
|
bulk_add_subscriptions,
|
|
|
|
bulk_remove_subscriptions,
|
|
|
|
do_change_stream_description,
|
2023-02-17 12:46:14 +01:00
|
|
|
do_change_stream_group_based_setting,
|
2022-04-14 23:51:16 +02:00
|
|
|
do_change_stream_message_retention_days,
|
|
|
|
do_change_stream_permission,
|
|
|
|
do_change_stream_post_policy,
|
|
|
|
do_change_subscription_property,
|
|
|
|
do_deactivate_stream,
|
|
|
|
do_rename_stream,
|
|
|
|
)
|
2022-04-14 23:31:40 +02:00
|
|
|
from zerver.actions.submessage import do_add_submessage
|
2022-04-14 23:31:02 +02:00
|
|
|
from zerver.actions.typing import check_send_typing_notification, do_send_stream_typing_notification
|
2022-04-14 23:30:17 +02:00
|
|
|
from zerver.actions.user_groups import (
|
2022-03-01 07:52:47 +01:00
|
|
|
add_subgroups_to_user_group,
|
2023-09-29 01:45:53 +02:00
|
|
|
bulk_add_members_to_user_groups,
|
|
|
|
bulk_remove_members_from_user_groups,
|
2022-04-14 23:30:17 +02:00
|
|
|
check_add_user_group,
|
2023-06-15 05:24:23 +02:00
|
|
|
do_change_user_group_permission_setting,
|
2024-05-15 15:44:18 +02:00
|
|
|
do_deactivate_user_group,
|
2022-04-14 23:30:17 +02:00
|
|
|
do_update_user_group_description,
|
|
|
|
do_update_user_group_name,
|
2022-03-01 07:52:47 +01:00
|
|
|
remove_subgroups_from_user_group,
|
2022-04-14 23:30:17 +02:00
|
|
|
)
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import (
|
|
|
|
do_change_avatar_fields,
|
|
|
|
do_change_full_name,
|
|
|
|
do_change_user_delivery_email,
|
|
|
|
do_change_user_setting,
|
|
|
|
do_regenerate_api_key,
|
|
|
|
)
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.user_status import do_update_user_status
|
2023-02-03 13:21:25 +01:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import (
|
2023-11-15 22:30:08 +01:00
|
|
|
do_change_is_billing_admin,
|
2022-04-14 23:48:28 +02:00
|
|
|
do_change_user_role,
|
|
|
|
do_deactivate_user,
|
|
|
|
do_update_outgoing_webhook_service,
|
|
|
|
)
|
2022-04-14 23:29:39 +02:00
|
|
|
from zerver.actions.video_calls import do_set_zoom_token
|
2023-08-17 02:34:42 +02:00
|
|
|
from zerver.lib.drafts import DraftData, do_create_drafts, do_delete_draft, do_edit_draft
|
2020-07-08 12:53:52 +02:00
|
|
|
from zerver.lib.event_schema import (
|
2020-07-18 17:11:41 +02:00
|
|
|
check_alert_words,
|
2020-08-06 13:08:42 +02:00
|
|
|
check_attachment_add,
|
|
|
|
check_attachment_remove,
|
|
|
|
check_attachment_update,
|
2020-07-18 17:02:28 +02:00
|
|
|
check_custom_profile_fields,
|
2020-08-01 14:33:03 +02:00
|
|
|
check_default_stream_groups,
|
2020-08-01 14:36:13 +02:00
|
|
|
check_default_streams,
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message,
|
2023-10-25 14:02:18 +02:00
|
|
|
check_direct_message,
|
2023-10-03 17:07:22 +02:00
|
|
|
check_draft_add,
|
|
|
|
check_draft_remove,
|
|
|
|
check_draft_update,
|
2020-08-16 17:26:24 +02:00
|
|
|
check_has_zoom_token,
|
2021-07-02 02:13:55 +02:00
|
|
|
check_heartbeat,
|
2020-07-18 16:33:03 +02:00
|
|
|
check_invites_changed,
|
2020-07-10 16:10:58 +02:00
|
|
|
check_message,
|
2020-08-06 20:31:12 +02:00
|
|
|
check_muted_topics,
|
2021-03-27 12:23:32 +01:00
|
|
|
check_muted_users,
|
2023-12-02 11:30:35 +01:00
|
|
|
check_onboarding_steps,
|
2020-08-13 19:29:07 +02:00
|
|
|
check_presence,
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_add,
|
|
|
|
check_reaction_remove,
|
2020-07-08 17:07:29 +02:00
|
|
|
check_realm_bot_add,
|
2020-07-08 21:06:22 +02:00
|
|
|
check_realm_bot_delete,
|
2020-07-08 17:47:56 +02:00
|
|
|
check_realm_bot_update,
|
2021-03-13 20:00:05 +01:00
|
|
|
check_realm_deactivated,
|
2021-07-21 13:40:46 +02:00
|
|
|
check_realm_default_update,
|
2020-08-17 16:07:25 +02:00
|
|
|
check_realm_domains_add,
|
|
|
|
check_realm_domains_change,
|
|
|
|
check_realm_domains_remove,
|
2020-08-18 15:16:02 +02:00
|
|
|
check_realm_emoji_update,
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export,
|
2021-03-30 12:51:54 +02:00
|
|
|
check_realm_linkifiers,
|
2020-10-28 04:00:46 +01:00
|
|
|
check_realm_playgrounds,
|
2020-07-08 12:53:52 +02:00
|
|
|
check_realm_update,
|
2020-08-16 14:52:09 +02:00
|
|
|
check_realm_update_dict,
|
2020-08-14 02:14:06 +02:00
|
|
|
check_realm_user_add,
|
2023-10-24 05:11:34 +02:00
|
|
|
check_realm_user_remove,
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
check_realm_user_update,
|
2024-09-24 17:01:58 +02:00
|
|
|
check_saved_snippet_add,
|
|
|
|
check_saved_snippet_remove,
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_add,
|
|
|
|
check_scheduled_message_remove,
|
|
|
|
check_scheduled_message_update,
|
2020-07-08 12:53:52 +02:00
|
|
|
check_stream_create,
|
2020-08-01 14:42:06 +02:00
|
|
|
check_stream_delete,
|
2020-07-08 13:35:37 +02:00
|
|
|
check_stream_update,
|
2020-07-18 16:27:59 +02:00
|
|
|
check_submessage,
|
2020-07-08 14:13:16 +02:00
|
|
|
check_subscription_add,
|
2020-07-08 15:04:35 +02:00
|
|
|
check_subscription_peer_add,
|
|
|
|
check_subscription_peer_remove,
|
2020-07-08 14:20:25 +02:00
|
|
|
check_subscription_remove,
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update,
|
2020-07-18 16:39:06 +02:00
|
|
|
check_typing_start,
|
2020-08-27 22:10:07 +02:00
|
|
|
check_typing_stop,
|
2020-07-08 15:29:13 +02:00
|
|
|
check_update_display_settings,
|
2020-07-08 15:29:13 +02:00
|
|
|
check_update_global_notifications,
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message,
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_add,
|
|
|
|
check_update_message_flags_remove,
|
2020-07-18 17:19:30 +02:00
|
|
|
check_user_group_add,
|
2020-08-14 13:18:52 +02:00
|
|
|
check_user_group_add_members,
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_add_subgroups,
|
2020-08-14 13:38:36 +02:00
|
|
|
check_user_group_remove,
|
2020-08-14 13:34:34 +02:00
|
|
|
check_user_group_remove_members,
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_remove_subgroups,
|
2020-08-14 13:50:55 +02:00
|
|
|
check_user_group_update,
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update,
|
2020-07-18 17:15:23 +02:00
|
|
|
check_user_status,
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic,
|
2020-07-08 12:53:52 +02:00
|
|
|
)
|
2024-02-14 20:27:17 +01:00
|
|
|
from zerver.lib.events import apply_events, fetch_initial_state_data, post_process_state
|
2023-10-03 03:22:59 +02:00
|
|
|
from zerver.lib.markdown import render_message_markdown
|
2021-12-29 13:52:27 +01:00
|
|
|
from zerver.lib.mention import MentionBackend, MentionData
|
2023-02-10 14:33:24 +01:00
|
|
|
from zerver.lib.muted_users import get_mute_object
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
create_dummy_file,
|
|
|
|
get_subscription,
|
|
|
|
get_test_image_file,
|
2024-06-21 21:02:36 +02:00
|
|
|
read_test_image_file,
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
stdout_suppressed,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2023-11-23 22:07:41 +01:00
|
|
|
from zerver.lib.timestamp import convert_to_UTC, datetime_to_timestamp
|
2020-07-10 18:35:58 +02:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2022-07-08 17:17:46 +02:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict
|
2024-06-21 21:02:36 +02:00
|
|
|
from zerver.lib.upload import upload_message_attachment
|
2024-05-23 16:21:25 +02:00
|
|
|
from zerver.lib.user_groups import (
|
|
|
|
AnonymousSettingGroupDict,
|
|
|
|
get_group_setting_value_for_api,
|
|
|
|
get_role_based_system_groups_dict,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Attachment,
|
2021-03-26 09:51:43 +01:00
|
|
|
CustomProfileField,
|
2024-06-21 21:02:36 +02:00
|
|
|
ImageAttachment,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
|
|
|
MultiuseInvite,
|
2024-04-17 05:45:32 +02:00
|
|
|
NamedUserGroup,
|
2020-06-11 00:54:34 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmDomain,
|
2024-09-26 12:18:55 +02:00
|
|
|
RealmExport,
|
2023-08-11 01:10:21 +02:00
|
|
|
RealmFilter,
|
2020-10-28 04:00:46 +01:00
|
|
|
RealmPlayground,
|
2021-07-21 13:40:46 +02:00
|
|
|
RealmUserDefault,
|
2024-09-24 17:01:58 +02:00
|
|
|
SavedSnippet,
|
2020-06-11 00:54:34 +02:00
|
|
|
Service,
|
|
|
|
Stream,
|
|
|
|
UserMessage,
|
|
|
|
UserPresence,
|
|
|
|
UserProfile,
|
2021-06-22 18:42:31 +02:00
|
|
|
UserStatus,
|
2023-02-03 12:57:43 +01:00
|
|
|
UserTopic,
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models.clients import get_client
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-09-03 15:33:25 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user_by_delivery_email
|
2020-07-27 16:22:31 +02:00
|
|
|
from zerver.openapi.openapi import validate_against_openapi_schema
|
2024-09-13 10:36:58 +02:00
|
|
|
from zerver.tornado.django_api import send_event_rollback_unsafe
|
2017-10-12 01:37:44 +02:00
|
|
|
from zerver.tornado.event_queue import (
|
|
|
|
allocate_client_descriptor,
|
|
|
|
clear_client_event_queues_for_testing,
|
2021-07-02 02:13:55 +02:00
|
|
|
create_heartbeat_event,
|
2024-02-07 22:03:15 +01:00
|
|
|
mark_clients_to_reload,
|
2024-02-27 15:51:17 +01:00
|
|
|
send_restart_events,
|
2024-02-10 04:19:08 +01:00
|
|
|
send_web_reload_client_events,
|
2017-10-12 01:37:44 +02:00
|
|
|
)
|
2020-10-28 04:00:46 +01:00
|
|
|
from zerver.views.realm_playgrounds import access_playground_by_id
|
2024-06-21 21:02:36 +02:00
|
|
|
from zerver.worker.thumbnail import ensure_thumbnails
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
2020-06-27 17:03:37 +02:00
|
|
|
class BaseAction(ZulipTestCase):
|
2020-09-28 21:35:55 +02:00
|
|
|
"""Core class for verifying the apply_event race handling logic as
|
2024-09-13 10:36:58 +02:00
|
|
|
well as the event formatting logic of any function using send_event_rollback_unsafe.
|
2020-09-28 21:35:55 +02:00
|
|
|
|
|
|
|
See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html#testing
|
|
|
|
for extensive design details for this testing system.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
@contextmanager
|
2021-02-12 08:19:30 +01:00
|
|
|
def verify_action(
|
|
|
|
self,
|
|
|
|
*,
|
2024-07-12 02:30:23 +02:00
|
|
|
event_types: list[str] | None = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
include_subscribers: bool = True,
|
|
|
|
state_change_expected: bool = True,
|
|
|
|
notification_settings_null: bool = False,
|
|
|
|
client_gravatar: bool = True,
|
|
|
|
user_avatar_url_field_optional: bool = False,
|
|
|
|
slim_presence: bool = False,
|
|
|
|
include_streams: bool = True,
|
|
|
|
num_events: int = 1,
|
|
|
|
bulk_message_deletion: bool = True,
|
2021-04-18 18:12:35 +02:00
|
|
|
stream_typing_notifications: bool = True,
|
2021-07-24 19:51:25 +02:00
|
|
|
user_settings_object: bool = False,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported: bool = True,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template: bool = True,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete: bool = False,
|
2024-02-07 22:03:15 +01:00
|
|
|
client_is_old: bool = False,
|
2024-09-12 11:46:48 +02:00
|
|
|
include_deactivated_groups: bool = False,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> Iterator[list[dict[str, Any]]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-12 01:37:44 +02:00
|
|
|
Make sure we have a clean slate of client descriptors for these tests.
|
|
|
|
If we don't do this, then certain failures will only manifest when you
|
2018-08-10 22:43:58 +02:00
|
|
|
run multiple tests within a single test function.
|
2019-03-01 18:21:31 +01:00
|
|
|
|
|
|
|
See also https://zulip.readthedocs.io/en/latest/subsystems/events-system.html#testing
|
|
|
|
for details on the design of this test system.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-12 01:37:44 +02:00
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
2014-01-28 18:11:08 +01:00
|
|
|
client = allocate_client_descriptor(
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(
|
|
|
|
user_profile_id=self.user_profile.id,
|
|
|
|
realm_id=self.user_profile.realm_id,
|
|
|
|
event_types=event_types,
|
|
|
|
client_type_name="website",
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
all_public_streams=False,
|
|
|
|
queue_timeout=600,
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
narrow=[],
|
|
|
|
bulk_message_deletion=bulk_message_deletion,
|
2021-04-18 18:12:35 +02:00
|
|
|
stream_typing_notifications=stream_typing_notifications,
|
2021-07-24 19:51:25 +02:00
|
|
|
user_settings_object=user_settings_object,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2024-09-12 11:46:48 +02:00
|
|
|
include_deactivated_groups=include_deactivated_groups,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2020-06-10 13:47:08 +02:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
# hybrid_state = initial fetch state + re-applying events triggered by our action
|
|
|
|
# normal_state = do action then fetch at the end (the "normal" code path)
|
2017-11-02 20:55:44 +01:00
|
|
|
hybrid_state = fetch_initial_state_data(
|
2021-01-17 17:58:50 +01:00
|
|
|
self.user_profile,
|
2024-06-15 07:12:06 +02:00
|
|
|
realm=self.user_profile.realm,
|
2021-01-17 17:58:50 +01:00
|
|
|
event_types=event_types,
|
2019-11-05 21:17:15 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2020-06-13 10:10:05 +02:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence=slim_presence,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2020-10-14 13:48:24 +02:00
|
|
|
include_streams=include_streams,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2024-09-12 11:46:48 +02:00
|
|
|
include_deactivated_groups=include_deactivated_groups,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
2021-05-09 15:49:19 +02:00
|
|
|
|
2024-02-07 22:03:15 +01:00
|
|
|
if client_is_old:
|
|
|
|
mark_clients_to_reload([client.event_queue.id])
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
events: list[dict[str, Any]] = []
|
2024-04-30 01:07:06 +02:00
|
|
|
|
2024-09-13 10:36:58 +02:00
|
|
|
# We want even those `send_event_rollback_unsafe` calls which have
|
|
|
|
# been hooked to `transaction.on_commit` to execute in tests.
|
2023-04-05 13:36:01 +02:00
|
|
|
# See the comment in `ZulipTestCase.capture_send_event_calls`.
|
2021-05-09 15:49:19 +02:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
2024-04-30 01:07:06 +02:00
|
|
|
yield events
|
|
|
|
|
|
|
|
# Append to an empty list so the result is accessible through the
|
|
|
|
# reference we just yielded.
|
|
|
|
events += client.event_queue.contents()
|
2021-05-09 15:49:19 +02:00
|
|
|
|
2020-07-27 16:22:31 +02:00
|
|
|
content = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"queue_id": "123.12",
|
2020-08-07 01:09:47 +02:00
|
|
|
# The JSON wrapper helps in converting tuples to lists
|
2020-08-01 01:25:34 +02:00
|
|
|
# as tuples aren't valid JSON structure.
|
2021-02-12 08:20:45 +01:00
|
|
|
"events": orjson.loads(orjson.dumps(events)),
|
|
|
|
"msg": "",
|
|
|
|
"result": "success",
|
2020-07-27 16:22:31 +02:00
|
|
|
}
|
2023-10-19 21:05:40 +02:00
|
|
|
validate_against_openapi_schema(content, "/events", "get", "200")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(events, num_events)
|
2019-04-09 04:07:03 +02:00
|
|
|
initial_state = copy.deepcopy(hybrid_state)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, initial_state, notification_settings_null)
|
2020-08-07 01:09:47 +02:00
|
|
|
before = orjson.dumps(initial_state)
|
2021-01-19 15:52:45 +01:00
|
|
|
apply_events(
|
|
|
|
self.user_profile,
|
|
|
|
state=hybrid_state,
|
|
|
|
events=events,
|
|
|
|
fetch_event_types=None,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
include_subscribers=include_subscribers,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2024-09-12 11:46:48 +02:00
|
|
|
include_deactivated_groups=include_deactivated_groups,
|
2021-01-19 15:52:45 +01:00
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, hybrid_state, notification_settings_null)
|
2020-08-07 01:09:47 +02:00
|
|
|
after = orjson.dumps(hybrid_state)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
|
|
|
if state_change_expected:
|
2019-05-09 02:38:29 +02:00
|
|
|
if before == after: # nocoverage
|
2020-08-07 01:09:47 +02:00
|
|
|
print(orjson.dumps(initial_state, option=orjson.OPT_INDENT_2).decode())
|
2019-05-09 02:38:29 +02:00
|
|
|
print(events)
|
2021-02-12 08:19:30 +01:00
|
|
|
raise AssertionError(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Test does not exercise enough code -- events do not change state."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
else:
|
2019-04-09 04:07:03 +02:00
|
|
|
try:
|
|
|
|
self.match_states(initial_state, copy.deepcopy(hybrid_state), events)
|
|
|
|
except AssertionError: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Test is invalid--state actually does change here.")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-02 20:55:44 +01:00
|
|
|
normal_state = fetch_initial_state_data(
|
2021-01-17 17:58:50 +01:00
|
|
|
self.user_profile,
|
2024-06-15 07:12:06 +02:00
|
|
|
realm=self.user_profile.realm,
|
2021-01-17 17:58:50 +01:00
|
|
|
event_types=event_types,
|
2019-11-05 21:17:15 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2020-06-13 10:10:05 +02:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence=slim_presence,
|
2019-04-09 04:07:03 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2020-10-14 13:48:24 +02:00
|
|
|
include_streams=include_streams,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete=user_list_incomplete,
|
2024-09-12 11:46:48 +02:00
|
|
|
include_deactivated_groups=include_deactivated_groups,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, normal_state, notification_settings_null)
|
2017-10-06 21:24:56 +02:00
|
|
|
self.match_states(hybrid_state, normal_state, events)
|
2014-02-04 20:52:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def match_states(
|
2024-07-12 02:30:17 +02:00
|
|
|
self, state1: dict[str, Any], state2: dict[str, Any], events: list[dict[str, Any]]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2024-07-12 02:30:17 +02:00
|
|
|
def normalize(state: dict[str, Any]) -> None:
|
2022-03-11 20:50:41 +01:00
|
|
|
if "never_subscribed" in state:
|
|
|
|
for u in state["never_subscribed"]:
|
|
|
|
if "subscribers" in u:
|
|
|
|
u["subscribers"].sort()
|
|
|
|
if "subscriptions" in state:
|
|
|
|
for u in state["subscriptions"]:
|
|
|
|
if "subscribers" in u:
|
|
|
|
u["subscribers"].sort()
|
|
|
|
state["subscriptions"] = {u["name"]: u for u in state["subscriptions"]}
|
|
|
|
if "unsubscribed" in state:
|
|
|
|
state["unsubscribed"] = {u["name"]: u for u in state["unsubscribed"]}
|
2021-02-12 08:20:45 +01:00
|
|
|
if "realm_bots" in state:
|
|
|
|
state["realm_bots"] = {u["email"]: u for u in state["realm_bots"]}
|
2021-05-20 20:01:51 +02:00
|
|
|
# Since time is different for every call, just fix the value
|
|
|
|
state["server_timestamp"] = 0
|
2024-06-05 21:36:22 +02:00
|
|
|
if "presence_last_update_id" in state:
|
|
|
|
# We don't adjust presence_last_update_id via apply_events,
|
|
|
|
# since events don't carry the relevant information.
|
|
|
|
# Fix the value just like server_timestamp.
|
|
|
|
state["presence_last_update_id"] = 0
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-02-04 19:09:30 +01:00
|
|
|
normalize(state1)
|
|
|
|
normalize(state2)
|
2017-10-06 21:24:56 +02:00
|
|
|
|
|
|
|
# If this assertions fails, we have unusual problems.
|
|
|
|
self.assertEqual(state1.keys(), state2.keys())
|
|
|
|
|
|
|
|
# The far more likely scenario is that some section of
|
2017-10-06 23:08:41 +02:00
|
|
|
# our enormous payload does not get updated properly. We
|
2017-10-06 21:24:56 +02:00
|
|
|
# want the diff here to be developer-friendly, hence
|
|
|
|
# the somewhat tedious code to provide useful output.
|
2017-10-06 22:59:26 +02:00
|
|
|
if state1 != state2: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\n---States DO NOT MATCH---")
|
|
|
|
print("\nEVENTS:\n")
|
2017-10-06 21:24:56 +02:00
|
|
|
|
|
|
|
# Printing out the events is a big help to
|
|
|
|
# developers.
|
|
|
|
import json
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-06 21:24:56 +02:00
|
|
|
for event in events:
|
|
|
|
print(json.dumps(event, indent=4))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\nMISMATCHES:\n")
|
2017-10-06 21:24:56 +02:00
|
|
|
for k in state1:
|
|
|
|
if state1[k] != state2[k]:
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\nkey = " + k)
|
2017-10-06 21:24:56 +02:00
|
|
|
try:
|
|
|
|
self.assertEqual({k: state1[k]}, {k: state2[k]})
|
|
|
|
except AssertionError as e:
|
|
|
|
print(e)
|
2021-02-12 08:19:30 +01:00
|
|
|
print(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-06 21:24:56 +02:00
|
|
|
NOTE:
|
|
|
|
|
|
|
|
This is an advanced test that verifies how
|
|
|
|
we apply events after fetching data. If you
|
|
|
|
do not know how to debug it, you can ask for
|
|
|
|
help on chat.
|
2021-06-09 22:11:26 +02:00
|
|
|
""",
|
|
|
|
flush=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-06 21:24:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Mismatching states")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 17:03:37 +02:00
|
|
|
class NormalActionsTest(BaseAction):
|
2020-07-05 03:34:30 +02:00
|
|
|
def create_bot(self, email: str, **extras: Any) -> UserProfile:
|
2020-06-27 17:03:37 +02:00
|
|
|
return self.create_test_bot(email, self.user_profile, **extras)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mentioned_send_message_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2017-08-10 10:58:39 +02:00
|
|
|
for i in range(3):
|
2021-02-12 08:20:45 +01:00
|
|
|
content = "mentioning... @**" + user.full_name + "** hello " + str(i)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"Verona",
|
|
|
|
content,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2023-06-07 19:19:33 +02:00
|
|
|
|
2023-12-10 14:53:52 +01:00
|
|
|
def test_automatically_follow_topic_where_mentioned(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=user,
|
|
|
|
setting_name="automatically_follow_topics_where_mentioned",
|
|
|
|
setting_value=True,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_num_events() -> int: # nocoverage
|
|
|
|
try:
|
|
|
|
user_topic = UserTopic.objects.get(
|
|
|
|
user_profile=user,
|
|
|
|
stream_id=get_stream("Verona", user.realm).id,
|
|
|
|
topic_name__iexact="test",
|
|
|
|
)
|
|
|
|
if user_topic.visibility_policy != UserTopic.VisibilityPolicy.FOLLOWED:
|
|
|
|
return 3
|
|
|
|
except UserTopic.DoesNotExist:
|
|
|
|
return 3
|
|
|
|
return 1
|
|
|
|
|
|
|
|
for i in range(3):
|
|
|
|
content = "mentioning... @**" + user.full_name + "** hello " + str(i)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=get_num_events()):
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"Verona",
|
|
|
|
content,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2023-12-10 14:53:52 +01:00
|
|
|
|
2023-06-07 19:19:33 +02:00
|
|
|
def test_topic_wildcard_mentioned_send_message_events(self) -> None:
|
|
|
|
for i in range(3):
|
|
|
|
content = "mentioning... @**topic** hello " + str(i)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"Verona",
|
|
|
|
content,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2023-06-03 16:51:38 +02:00
|
|
|
def test_stream_wildcard_mentioned_send_message_events(self) -> None:
|
2019-08-26 05:11:18 +02:00
|
|
|
for i in range(3):
|
2021-02-12 08:20:45 +01:00
|
|
|
content = "mentioning... @**all** hello " + str(i)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
"Verona",
|
|
|
|
content,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2019-08-26 05:11:18 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_pm_send_message_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
|
|
|
self.send_personal_message(
|
2024-05-15 19:24:37 +02:00
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"hola",
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Verify direct message editing - content only edit
|
2021-12-15 21:17:21 +01:00
|
|
|
pm = Message.objects.order_by("-id")[0]
|
|
|
|
content = "new content"
|
2023-10-03 03:22:59 +02:00
|
|
|
rendering_result = render_message_markdown(pm, content)
|
2024-07-12 02:30:17 +02:00
|
|
|
prior_mention_user_ids: set[int] = set()
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend = MentionBackend(self.user_profile.realm_id)
|
2021-12-15 21:17:21 +01:00
|
|
|
mention_data = MentionData(
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend=mention_backend,
|
2021-12-15 21:17:21 +01:00
|
|
|
content=content,
|
2023-12-02 08:54:36 +01:00
|
|
|
message_sender=self.example_user("cordelia"),
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_update_message(
|
2021-12-15 21:17:21 +01:00
|
|
|
self.user_profile,
|
|
|
|
pm,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
False,
|
|
|
|
False,
|
|
|
|
content,
|
|
|
|
rendering_result,
|
|
|
|
prior_mention_user_ids,
|
|
|
|
mention_data,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-12-15 21:17:21 +01:00
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=False,
|
|
|
|
has_content=True,
|
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
def test_direct_message_group_send_message_events(self) -> None:
|
|
|
|
direct_message_group = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("othello"),
|
2017-05-23 03:02:01 +02:00
|
|
|
]
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
2024-07-04 14:05:48 +02:00
|
|
|
self.send_group_direct_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
direct_message_group,
|
|
|
|
"hola",
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
2024-05-15 19:24:37 +02:00
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2023-10-25 14:02:18 +02:00
|
|
|
def test_user_creation_events_on_sending_messages(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
|
|
|
|
self.user_profile = polonius
|
|
|
|
|
|
|
|
# Test that guest will not receive creation event
|
|
|
|
# for bots as they can access all the bots.
|
|
|
|
bot = self.create_test_bot("test2", cordelia, full_name="Test bot")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_personal_message(bot, polonius, "hola", skip_capture_on_commit_callbacks=True)
|
2023-10-25 14:02:18 +02:00
|
|
|
check_direct_message("events[0]", events[0])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_personal_message(
|
|
|
|
cordelia, polonius, "hola", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
|
|
|
check_realm_user_add("events[0]", events[0])
|
|
|
|
check_direct_message("events[1]", events[1])
|
|
|
|
self.assertEqual(events[0]["person"]["user_id"], cordelia.id)
|
2023-10-25 14:02:18 +02:00
|
|
|
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
2024-07-04 14:05:48 +02:00
|
|
|
self.send_group_direct_message(
|
2024-05-15 19:24:37 +02:00
|
|
|
othello, [polonius, desdemona, bot], "hola", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
|
|
|
check_realm_user_add("events[0]", events[0])
|
2023-10-25 14:02:18 +02:00
|
|
|
check_realm_user_add("events[1]", events[1])
|
2024-05-15 19:24:37 +02:00
|
|
|
check_direct_message("events[2]", events[2])
|
|
|
|
user_creation_user_ids = {events[0]["person"]["user_id"], events[1]["person"]["user_id"]}
|
2023-10-25 14:02:18 +02:00
|
|
|
self.assertEqual(user_creation_user_ids, {othello.id, desdemona.id})
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_stream_send_message_events(self) -> None:
|
2023-06-17 17:37:04 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
for stream_name in ["Verona", "Denmark", "core team"]:
|
|
|
|
stream = get_stream(stream_name, hamlet.realm)
|
|
|
|
sub = get_subscription(stream.name, hamlet)
|
|
|
|
do_change_subscription_property(hamlet, sub, stream, "is_muted", True, acting_user=None)
|
|
|
|
|
|
|
|
def verify_events_generated_and_reset_visibility_policy(
|
2024-07-12 02:30:17 +02:00
|
|
|
events: list[dict[str, Any]], stream_name: str, topic_name: str
|
2023-06-17 17:37:04 +02:00
|
|
|
) -> None:
|
|
|
|
# event-type: muted_topics
|
|
|
|
check_muted_topics("events[0]", events[0])
|
|
|
|
# event-type: user_topic
|
|
|
|
check_user_topic("events[1]", events[1])
|
|
|
|
|
|
|
|
if events[2]["type"] == "message":
|
|
|
|
check_message("events[2]", events[2])
|
|
|
|
else:
|
|
|
|
# event-type: reaction
|
|
|
|
check_reaction_add("events[2]", events[2])
|
|
|
|
|
|
|
|
# Reset visibility policy
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet,
|
|
|
|
get_stream(stream_name, hamlet.realm),
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2023-06-17 17:37:04 +02:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.INHERIT,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Events generated during send message action depends on the 'automatically_follow_topics_policy'
|
|
|
|
# and 'automatically_unmute_topics_in_muted_streams_policy' settings. Here we test all the
|
|
|
|
# possible combinations.
|
|
|
|
|
|
|
|
# action: participation
|
|
|
|
# 'automatically_follow_topics_policy' | 'automatically_unmute_topics_in_muted_streams_policy' | visibility_policy
|
|
|
|
# ON_PARTICIPATION | ON_INITIATION | FOLLOWED
|
|
|
|
# ON_PARTICIPATION | ON_PARTICIPATION | FOLLOWED
|
|
|
|
# ON_PARTICIPATION | ON_SEND | FOLLOWED
|
|
|
|
# ON_PARTICIPATION | NEVER | FOLLOWED
|
|
|
|
message_id = self.send_stream_message(hamlet, "Verona", "hello", "topic")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_follow_topics_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
for setting_value in UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES:
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=setting_value,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Three events are generated:
|
|
|
|
# 2 for following the topic and 1 for adding reaction.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=False, num_events=3) as events:
|
|
|
|
do_add_reaction(hamlet, message, "tada", "1f389", "unicode_emoji")
|
2023-06-17 17:37:04 +02:00
|
|
|
verify_events_generated_and_reset_visibility_policy(events, "Verona", "topic")
|
|
|
|
do_remove_reaction(hamlet, message, "1f389", "unicode_emoji")
|
|
|
|
|
|
|
|
# action: send
|
|
|
|
# 'automatically_follow_topics_policy' | 'automatically_unmute_topics_in_muted_streams_policy' | visibility_policy
|
|
|
|
# ON_SEND | ON_INITIATION | FOLLOWED
|
|
|
|
# ON_SEND | ON_PARTICIPATION | FOLLOWED
|
|
|
|
# ON_SEND | ON_SEND | FOLLOWED
|
|
|
|
# ON_SEND | NEVER | FOLLOWED
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_follow_topics_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_SEND,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
for setting_value in UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES:
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=setting_value,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Three events are generated:
|
|
|
|
# 2 for following the topic and 1 for the message sent.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=False, num_events=3) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, "Verona", "hello", "topic", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
verify_events_generated_and_reset_visibility_policy(events, "Verona", "topic")
|
|
|
|
|
|
|
|
# action: initiation
|
|
|
|
# 'automatically_follow_topics_policy' | 'automatically_unmute_topics_in_muted_streams_policy' | visibility_policy
|
|
|
|
# ON_INITIATION | ON_INITIATION | FOLLOWED
|
|
|
|
# ON_INITIATION | ON_PARTICIPATION | FOLLOWED
|
|
|
|
# ON_INITIATION | ON_SEND | FOLLOWED
|
|
|
|
# ON_INITIATION | NEVER | FOLLOWED
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_follow_topics_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_INITIATION,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
for index, setting_value in enumerate(
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES
|
|
|
|
):
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=setting_value,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Three events are generated:
|
|
|
|
# 2 for following the topic and 1 for the message sent.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=False, num_events=3) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet,
|
|
|
|
"Denmark",
|
|
|
|
"hello",
|
|
|
|
f"new topic {index}",
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
verify_events_generated_and_reset_visibility_policy(
|
|
|
|
events, "Denmark", f"new topic {index}"
|
|
|
|
)
|
|
|
|
|
|
|
|
# 'automatically_follow_topics_policy' | 'automatically_unmute_topics_in_muted_streams_policy' | visibility_policy
|
|
|
|
# NEVER | ON_INITIATION | UNMUTED
|
|
|
|
# NEVER | ON_PARTICIPATION | UNMUTED
|
|
|
|
# NEVER | ON_SEND | UNMUTED
|
|
|
|
# NEVER | NEVER | NA
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_follow_topics_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
for setting_value in [
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_INITIATION,
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION,
|
|
|
|
UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_SEND,
|
|
|
|
]:
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=setting_value,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Three events are generated:
|
|
|
|
# 2 for unmuting the topic and 1 for the message sent.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=False, num_events=3) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, "core team", "hello", "topic", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
verify_events_generated_and_reset_visibility_policy(events, "core team", "topic")
|
|
|
|
|
|
|
|
# If current_visibility_policy is already set to the value the policies would set.
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet,
|
|
|
|
get_stream("core team", hamlet.realm),
|
|
|
|
"new Topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.UNMUTED,
|
|
|
|
)
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_ON_PARTICIPATION,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# 1 event for the message sent
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=False, num_events=1) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, "core team", "hello", "new Topic", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
|
|
|
|
do_change_user_setting(
|
|
|
|
user_profile=hamlet,
|
|
|
|
setting_name="automatically_unmute_topics_in_muted_streams_policy",
|
|
|
|
setting_value=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_NEVER,
|
|
|
|
acting_user=None,
|
2017-10-31 18:36:18 +01:00
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
# Only one message event is generated
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=True) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, "core team", "hello", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
# event-type: message
|
2021-02-12 08:20:45 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
assert isinstance(events[0]["message"]["avatar_url"], str)
|
2017-04-20 17:31:41 +02:00
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_gravatar=True) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
hamlet, "core team", "hello", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
assert events[0]["message"]["avatar_url"] is None
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2023-09-25 11:27:15 +02:00
|
|
|
# Here we add coverage for the case where 'apply_unread_message_event'
|
|
|
|
# should be called and unread messages in unmuted or followed topic in
|
|
|
|
# muted stream is treated as unmuted stream message, thus added to 'unmuted_stream_msgs'.
|
2023-06-17 17:37:04 +02:00
|
|
|
stream = get_stream("Verona", hamlet.realm)
|
2023-09-25 11:27:15 +02:00
|
|
|
do_set_user_topic_visibility_policy(
|
2023-06-17 17:37:04 +02:00
|
|
|
hamlet,
|
2023-09-25 11:27:15 +02:00
|
|
|
stream,
|
|
|
|
"test",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.UNMUTED,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("aaron"), "Verona", "hello", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2023-09-25 11:27:15 +02:00
|
|
|
|
2023-09-14 10:35:26 +02:00
|
|
|
def test_stream_update_message_events(self) -> None:
|
2023-09-30 12:40:39 +02:00
|
|
|
iago = self.example_user("iago")
|
|
|
|
self.send_stream_message(iago, "Verona", "hello")
|
2023-09-14 10:35:26 +02:00
|
|
|
|
2021-12-15 21:17:21 +01:00
|
|
|
# Verify stream message editing - content only
|
2021-02-12 08:20:45 +01:00
|
|
|
message = Message.objects.order_by("-id")[0]
|
|
|
|
content = "new content"
|
2023-10-03 03:22:59 +02:00
|
|
|
rendering_result = render_message_markdown(message, content)
|
2024-07-12 02:30:17 +02:00
|
|
|
prior_mention_user_ids: set[int] = set()
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend = MentionBackend(self.user_profile.realm_id)
|
2019-11-28 11:26:57 +01:00
|
|
|
mention_data = MentionData(
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend=mention_backend,
|
2019-11-28 11:26:57 +01:00
|
|
|
content=content,
|
2023-12-02 08:54:36 +01:00
|
|
|
message_sender=iago,
|
2019-11-28 11:26:57 +01:00
|
|
|
)
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_update_message(
|
2020-06-27 17:32:39 +02:00
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
None,
|
2021-12-15 21:17:21 +01:00
|
|
|
None,
|
|
|
|
None,
|
2020-06-27 17:32:39 +02:00
|
|
|
False,
|
|
|
|
False,
|
|
|
|
content,
|
2021-06-17 12:20:40 +02:00
|
|
|
rendering_result,
|
2020-06-27 17:32:39 +02:00
|
|
|
prior_mention_user_ids,
|
2021-02-12 08:19:30 +01:00
|
|
|
mention_data,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
"events[0]",
|
2020-07-10 18:35:58 +02:00
|
|
|
events[0],
|
2021-12-15 21:17:21 +01:00
|
|
|
is_stream_message=True,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_content=True,
|
2021-12-15 21:17:21 +01:00
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Verify stream message editing - topic only
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name = "new_topic"
|
2021-12-15 21:17:21 +01:00
|
|
|
propagate_mode = "change_all"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_update_message(
|
2021-12-15 21:17:21 +01:00
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
None,
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2021-12-15 21:17:21 +01:00
|
|
|
propagate_mode,
|
|
|
|
False,
|
|
|
|
False,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
prior_mention_user_ids,
|
|
|
|
mention_data,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-12-15 21:17:21 +01:00
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=True,
|
|
|
|
has_content=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_topic=True,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
)
|
2017-03-24 05:54:20 +01:00
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
# Verify special case of embedded content update
|
2021-06-17 12:20:40 +02:00
|
|
|
content = "embed_content"
|
2023-10-03 03:22:59 +02:00
|
|
|
rendering_result = render_message_markdown(message, content)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
2024-06-21 21:02:36 +02:00
|
|
|
do_update_embedded_data(self.user_profile, message, rendering_result)
|
2022-01-14 15:23:49 +01:00
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=False,
|
|
|
|
has_content=False,
|
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
|
|
|
is_embedded_update_only=True,
|
|
|
|
)
|
2017-03-24 05:54:20 +01:00
|
|
|
|
2020-07-06 09:30:59 +02:00
|
|
|
# Verify move topic to different stream.
|
2023-05-10 22:03:07 +02:00
|
|
|
self.subscribe(self.user_profile, "Verona")
|
|
|
|
self.subscribe(self.user_profile, "Denmark")
|
2023-09-30 12:40:39 +02:00
|
|
|
self.send_stream_message(iago, "Verona")
|
2020-07-06 09:30:59 +02:00
|
|
|
message_id = self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
propagate_mode = "change_all"
|
2020-07-06 09:30:59 +02:00
|
|
|
prior_mention_user_ids = set()
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=True,
|
|
|
|
# There are 3 events generated for this action
|
|
|
|
# * update_message: For updating existing messages
|
|
|
|
# * 2 new message events: Breadcrumb messages in the new and old topics.
|
|
|
|
num_events=3,
|
|
|
|
) as events:
|
|
|
|
do_update_message(
|
2020-07-06 09:30:59 +02:00
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
stream,
|
2021-12-15 21:17:21 +01:00
|
|
|
None,
|
2020-07-06 09:30:59 +02:00
|
|
|
propagate_mode,
|
|
|
|
True,
|
|
|
|
True,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
set(),
|
2021-02-12 08:19:30 +01:00
|
|
|
None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
"events[0]",
|
2020-07-10 18:35:58 +02:00
|
|
|
events[0],
|
2021-12-15 21:17:21 +01:00
|
|
|
is_stream_message=True,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_content=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
has_topic=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_new_stream_id=True,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
)
|
2020-07-06 09:30:59 +02:00
|
|
|
|
2023-05-10 22:03:07 +02:00
|
|
|
# Move both stream and topic, with update_message_flags
|
|
|
|
# excluded from event types.
|
|
|
|
self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message_id = self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
propagate_mode = "change_all"
|
|
|
|
prior_mention_user_ids = set()
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=True,
|
|
|
|
# Skip "update_message_flags" to exercise the code path
|
|
|
|
# where raw_unread_msgs does not exist in the state.
|
|
|
|
event_types=["message", "update_message"],
|
|
|
|
# There are 3 events generated for this action
|
|
|
|
# * update_message: For updating existing messages
|
|
|
|
# * 2 new message events: Breadcrumb messages in the new and old topics.
|
|
|
|
num_events=3,
|
|
|
|
) as events:
|
|
|
|
do_update_message(
|
2023-05-10 22:03:07 +02:00
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
stream,
|
|
|
|
"final_topic",
|
|
|
|
propagate_mode,
|
|
|
|
True,
|
|
|
|
True,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
set(),
|
|
|
|
None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-05-10 22:03:07 +02:00
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=True,
|
|
|
|
has_content=False,
|
|
|
|
has_topic=True,
|
|
|
|
has_new_stream_id=True,
|
|
|
|
is_embedded_update_only=False,
|
|
|
|
)
|
|
|
|
|
2024-06-21 21:02:36 +02:00
|
|
|
def test_thumbnail_event(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
url = upload_message_attachment(
|
|
|
|
"img.png", "image/png", read_test_image_file("img.png"), self.example_user("iago")
|
2024-08-30 04:13:01 +02:00
|
|
|
)[0]
|
2024-06-21 21:02:36 +02:00
|
|
|
path_id = url[len("/user_upload/") + 1 :]
|
thumbnail: Enqueue thumbnails when we render a spinner.
Thumbnails are usually enqueued in the worker when the image is
uploaded. However, for images which were uploaded before the
existence of the thumbnailing worker, and whose metadata was
backfilled (see previous commit) this leaves a permanent spinner,
since nothing triggers the thumbnail worker for them.
Enqueue a thumbnail worker for every spinner which we render into
Markdown. This ensures that _something_ is attempting to resolve the
spinner which the user sees. In the case of freshly-uploaded images
which are still in the queue, this results in a duplicate entry in the
thumbnailing queue -- this is harmless, since the worker determines
that all of the thumbnails we need have already been generated, and it
does no further work. However, in the case of historical uploads, it
properly kicks off the thumbnailing process and results in a
subsequent message update to include the freshly-generated thumbnail.
While specifically useful for backfilled uploads, this is also
generally a good safety step for a good user experience, as it also
prevents dropped events in the queue from unknown causes from leaving
perpetual spinners in the message feed.
Because `get_user_upload_previews` is potentially called twice for
every message with spinners (see 6f20c15ae9e5), we add an additional
flag to `get_user_upload_previews` to suppress a _second_ event from
being enqueued for every spinner generated.
2024-08-28 16:45:35 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
iago, "Verona", f"[img.png]({url})", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2024-06-21 21:02:36 +02:00
|
|
|
|
|
|
|
# Generating a thumbnail for an image sends a message update event
|
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
ensure_thumbnails(ImageAttachment.objects.get(path_id=path_id))
|
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=False,
|
|
|
|
has_content=False,
|
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
|
|
|
is_embedded_update_only=True,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_message_flags(self) -> None:
|
2017-03-24 03:19:23 +01:00
|
|
|
# Test message flag update events
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("hamlet"),
|
2017-10-28 16:40:28 +02:00
|
|
|
"hello",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_update_message_flags(user_profile, "add", "starred", [message])
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_add("events[0]", events[0])
|
2020-07-17 09:13:10 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_update_message_flags(user_profile, "remove", "starred", [message])
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
2017-03-24 03:19:23 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_read_flag_removes_unread_msg_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
mention = "@**" + user_profile.full_name + "**"
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for content in ["hello", mention]:
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2017-07-21 20:31:25 +02:00
|
|
|
"Verona",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content,
|
2017-07-21 20:31:25 +02:00
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
|
|
|
do_update_message_flags(user_profile, "add", "read", [message])
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_update_message_flags(user_profile, "remove", "read", [message])
|
2021-06-09 13:31:39 +02:00
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
|
|
|
personal_message = self.send_personal_message(
|
2023-09-30 12:40:39 +02:00
|
|
|
from_user=self.example_user("cordelia"), to_user=user_profile, content=content
|
2021-06-09 13:31:39 +02:00
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
|
|
|
do_update_message_flags(user_profile, "add", "read", [personal_message])
|
2021-06-09 13:31:39 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_update_message_flags(user_profile, "remove", "read", [personal_message])
|
2021-06-09 13:31:39 +02:00
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
group_direct_message = self.send_group_direct_message(
|
2021-06-09 13:31:39 +02:00
|
|
|
from_user=self.example_user("cordelia"),
|
|
|
|
to_users=[user_profile, self.example_user("othello")],
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
2024-07-04 14:05:48 +02:00
|
|
|
do_update_message_flags(user_profile, "add", "read", [group_direct_message])
|
2021-06-09 13:31:39 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
2024-07-04 14:05:48 +02:00
|
|
|
do_update_message_flags(user_profile, "remove", "read", [group_direct_message])
|
2021-06-09 13:31:39 +02:00
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_send_message_to_existing_recipient(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("cordelia")
|
2017-10-28 16:40:28 +02:00
|
|
|
self.send_stream_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
sender,
|
2017-05-23 03:02:01 +02:00
|
|
|
"Verona",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"hello 1",
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
sender, "Verona", "hello 2", skip_capture_on_commit_callbacks=True
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2023-11-08 04:53:05 +01:00
|
|
|
def test_events_for_message_from_inaccessible_sender(self) -> None:
|
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
self.send_stream_message(
|
2024-05-15 19:24:37 +02:00
|
|
|
othello,
|
|
|
|
"test_stream1",
|
|
|
|
"hello 2",
|
|
|
|
allow_unsubscribed_sender=True,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-11-08 04:53:05 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
message_obj = events[0]["message"]
|
|
|
|
self.assertEqual(message_obj["sender_full_name"], "Unknown user")
|
|
|
|
self.assertEqual(message_obj["sender_email"], f"user{othello.id}@zulip.testserver")
|
|
|
|
self.assertTrue(message_obj["avatar_url"].endswith("images/unknown-user-avatar.png"))
|
|
|
|
|
|
|
|
iago = self.example_user("iago")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
self.send_stream_message(
|
2024-05-15 19:24:37 +02:00
|
|
|
iago,
|
|
|
|
"test_stream1",
|
|
|
|
"hello 2",
|
|
|
|
allow_unsubscribed_sender=True,
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-11-08 04:53:05 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
message_obj = events[0]["message"]
|
|
|
|
self.assertEqual(message_obj["sender_full_name"], iago.full_name)
|
|
|
|
self.assertEqual(message_obj["sender_email"], iago.delivery_email)
|
|
|
|
self.assertIsNone(message_obj["avatar_url"])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_add_reaction(self) -> None:
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "Verona", "hello")
|
2017-10-08 09:34:59 +02:00
|
|
|
message = Message.objects.get(id=message_id)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_add("events[0]", events[0])
|
2017-10-08 09:34:59 +02:00
|
|
|
|
2021-07-02 02:13:55 +02:00
|
|
|
def test_heartbeat_event(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
2024-09-13 10:36:58 +02:00
|
|
|
send_event_rollback_unsafe(
|
2021-07-02 02:13:55 +02:00
|
|
|
self.user_profile.realm,
|
|
|
|
create_heartbeat_event(),
|
|
|
|
[self.user_profile.id],
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-02 02:13:55 +02:00
|
|
|
check_heartbeat("events[0]", events[0])
|
|
|
|
|
2018-02-12 10:53:36 +01:00
|
|
|
def test_add_submessage(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
stream_name = "Verona"
|
2018-02-12 10:53:36 +01:00
|
|
|
message_id = self.send_stream_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
sender=cordelia,
|
2018-02-12 10:53:36 +01:00
|
|
|
stream_name=stream_name,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_add_submessage(
|
2018-11-02 23:33:54 +01:00
|
|
|
realm=cordelia.realm,
|
2018-02-12 10:53:36 +01:00
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=message_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
2018-02-12 10:53:36 +01:00
|
|
|
content='"stuff"',
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_submessage("events[0]", events[0])
|
2018-02-12 10:53:36 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_remove_reaction(self) -> None:
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "Verona", "hello")
|
2017-10-08 09:34:59 +02:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_remove_reaction(self.user_profile, message, "1f389", "unicode_emoji")
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_remove("events[0]", events[0])
|
2017-10-08 09:34:59 +02:00
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def test_invite_user_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Scotland"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_invite_users(
|
2021-08-01 20:02:06 +02:00
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2019-02-15 19:09:25 +01:00
|
|
|
def test_create_multiuse_invite_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2019-02-15 19:09:25 +01:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_create_multiuse_invite_link(
|
2021-04-05 18:42:45 +02:00
|
|
|
self.user_profile,
|
|
|
|
PreregistrationUser.INVITE_AS["MEMBER"],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes,
|
2023-05-25 16:06:13 +02:00
|
|
|
False,
|
2021-04-05 18:42:45 +02:00
|
|
|
streams,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2019-02-15 19:09:25 +01:00
|
|
|
|
2022-01-14 22:54:49 +01:00
|
|
|
def test_deactivate_user_invites_changed_event(self) -> None:
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
user_profile = self.example_user("cordelia")
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-01-10 22:01:21 +01:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
do_invite_users(
|
|
|
|
user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
[],
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2024-01-10 22:01:21 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
|
|
|
)
|
2022-01-14 22:54:49 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2022-01-14 22:54:49 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def test_revoke_user_invite_event(self) -> None:
|
2022-01-14 22:54:49 +01:00
|
|
|
# We need set self.user_profile to be an admin, so that
|
|
|
|
# we receive the invites_changed event.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-01-10 22:01:21 +01:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
do_invite_users(
|
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2024-01-10 22:01:21 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
prereg_users = PreregistrationUser.objects.filter(
|
|
|
|
referred_by__realm=self.user_profile.realm
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_revoke_user_invite(prereg_users[0])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2019-02-15 19:09:25 +01:00
|
|
|
|
|
|
|
def test_revoke_multiuse_invite_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2021-02-12 08:19:30 +01:00
|
|
|
do_create_multiuse_invite_link(
|
2021-04-05 18:42:45 +02:00
|
|
|
self.user_profile,
|
|
|
|
PreregistrationUser.INVITE_AS["MEMBER"],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes,
|
2023-05-25 16:06:13 +02:00
|
|
|
False,
|
2021-04-05 18:42:45 +02:00
|
|
|
streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-15 19:09:25 +01:00
|
|
|
|
|
|
|
multiuse_object = MultiuseInvite.objects.get()
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_revoke_multi_use_invite(multiuse_object)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
|
|
|
def test_invitation_accept_invite_event(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Scotland"]
|
|
|
|
]
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-01-10 22:01:21 +01:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
do_invite_users(
|
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2024-01-10 22:01:21 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
|
|
|
)
|
2019-11-23 18:15:53 +01:00
|
|
|
prereg_user = PreregistrationUser.objects.get(email="foo@zulip.com")
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2024-03-29 13:00:12 +01:00
|
|
|
with self.verify_action(state_change_expected=True, num_events=6) as events:
|
2024-04-30 01:07:06 +02:00
|
|
|
do_create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
"foo@zulip.com",
|
|
|
|
"password",
|
2020-07-16 14:10:43 +02:00
|
|
|
self.user_profile.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
"full name",
|
2020-07-16 14:10:43 +02:00
|
|
|
prereg_user=prereg_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2024-03-29 13:00:12 +01:00
|
|
|
check_invites_changed("events[6]", events[5])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_typing_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
check_send_typing_notification(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, [self.example_user("cordelia").id], "start"
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_typing_start("events[0]", events[0])
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
check_send_typing_notification(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, [self.example_user("cordelia").id], "stop"
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_typing_stop("events[0]", events[0])
|
2017-03-18 03:50:41 +01:00
|
|
|
|
2020-12-24 21:00:20 +01:00
|
|
|
def test_stream_typing_events(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name = "streams typing"
|
2020-12-24 21:00:20 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_send_stream_typing_notification(
|
2020-12-24 21:00:20 +01:00
|
|
|
self.user_profile,
|
|
|
|
"start",
|
|
|
|
stream,
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-12-24 21:00:20 +01:00
|
|
|
check_typing_start("events[0]", events[0])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
|
|
|
do_send_stream_typing_notification(
|
2020-12-24 21:00:20 +01:00
|
|
|
self.user_profile,
|
|
|
|
"stop",
|
|
|
|
stream,
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-12-24 21:00:20 +01:00
|
|
|
check_typing_stop("events[0]", events[0])
|
|
|
|
|
|
|
|
# Having client_capability `stream_typing_notification=False`
|
|
|
|
# shouldn't produce any events.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=False, stream_typing_notifications=False, num_events=0
|
|
|
|
) as events:
|
|
|
|
do_send_stream_typing_notification(
|
2020-12-24 21:00:20 +01:00
|
|
|
self.user_profile,
|
|
|
|
"start",
|
|
|
|
stream,
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-12-24 21:00:20 +01:00
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=False, stream_typing_notifications=False, num_events=0
|
|
|
|
) as events:
|
|
|
|
do_send_stream_typing_notification(
|
2020-12-24 21:00:20 +01:00
|
|
|
self.user_profile,
|
|
|
|
"stop",
|
|
|
|
stream,
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_name,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-12-24 21:00:20 +01:00
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_custom_profile_fields_events(self) -> None:
|
2021-03-26 09:51:43 +01:00
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
try_add_realm_custom_profile_field(
|
2021-03-26 18:03:27 +01:00
|
|
|
realm=realm, name="Expertise", field_type=CustomProfileField.LONG_TEXT
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
field = realm.customprofilefield_set.get(realm=realm, name="Biography")
|
2018-03-31 07:30:24 +02:00
|
|
|
name = field.name
|
2021-02-12 08:20:45 +01:00
|
|
|
hint = "Biography of the user"
|
2022-07-12 21:04:47 +02:00
|
|
|
display_in_profile_summary = False
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
try_update_realm_custom_profile_field(
|
2024-03-30 09:56:26 +01:00
|
|
|
realm=realm,
|
|
|
|
field=field,
|
|
|
|
name=name,
|
|
|
|
hint=hint,
|
|
|
|
display_in_profile_summary=display_in_profile_summary,
|
2022-07-12 21:04:47 +02:00
|
|
|
)
|
2021-03-26 09:51:43 +01:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_realm_custom_profile_field(realm, field)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-10-27 19:05:10 +02:00
|
|
|
def test_pronouns_type_support_in_custom_profile_fields_events(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
field = CustomProfileField.objects.get(realm=realm, name="Pronouns")
|
|
|
|
name = field.name
|
|
|
|
hint = "What pronouns should people use for you?"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(pronouns_field_type_supported=True) as events:
|
|
|
|
try_update_realm_custom_profile_field(realm, field, name, hint=hint)
|
2022-10-27 19:05:10 +02:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2023-07-22 00:34:11 +02:00
|
|
|
[pronouns_field] = (
|
2022-10-27 19:05:10 +02:00
|
|
|
field_obj for field_obj in events[0]["fields"] if field_obj["id"] == field.id
|
2023-07-22 00:34:11 +02:00
|
|
|
)
|
2022-10-27 19:05:10 +02:00
|
|
|
self.assertEqual(pronouns_field["type"], CustomProfileField.PRONOUNS)
|
|
|
|
|
|
|
|
hint = "What pronouns should people use to refer you?"
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(pronouns_field_type_supported=False) as events:
|
|
|
|
try_update_realm_custom_profile_field(realm=realm, field=field, name=name, hint=hint)
|
2022-10-27 19:05:10 +02:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2023-07-22 00:34:11 +02:00
|
|
|
[pronouns_field] = (
|
2022-10-27 19:05:10 +02:00
|
|
|
field_obj for field_obj in events[0]["fields"] if field_obj["id"] == field.id
|
2023-07-22 00:34:11 +02:00
|
|
|
)
|
2022-10-27 19:05:10 +02:00
|
|
|
self.assertEqual(pronouns_field["type"], CustomProfileField.SHORT_TEXT)
|
|
|
|
|
2018-07-09 11:49:08 +02:00
|
|
|
def test_custom_profile_field_data_events(self) -> None:
|
2019-03-07 21:29:16 +01:00
|
|
|
field_id = self.user_profile.realm.customprofilefield_set.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=self.user_profile.realm, name="Biography"
|
2021-02-12 08:19:30 +01:00
|
|
|
).id
|
2022-07-08 17:17:46 +02:00
|
|
|
field: ProfileDataElementUpdateDict = {
|
2018-07-09 11:49:08 +02:00
|
|
|
"id": field_id,
|
|
|
|
"value": "New value",
|
|
|
|
}
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_custom_profile_data_if_changed(self.user_profile, [field])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["person"]["custom_profile_field"].keys(), {"id", "value", "rendered_value"}
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
)
|
2018-07-09 11:49:08 +02:00
|
|
|
|
2018-08-09 14:02:32 +02:00
|
|
|
# Test we pass correct stringify value in custom-user-field data event
|
2019-03-07 21:29:16 +01:00
|
|
|
field_id = self.user_profile.realm.customprofilefield_set.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=self.user_profile.realm, name="Mentor"
|
2021-02-12 08:19:30 +01:00
|
|
|
).id
|
2018-08-09 14:02:32 +02:00
|
|
|
field = {
|
|
|
|
"id": field_id,
|
|
|
|
"value": [self.example_user("ZOE").id],
|
|
|
|
}
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_custom_profile_data_if_changed(self.user_profile, [field])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
|
|
|
self.assertEqual(events[0]["person"]["custom_profile_field"].keys(), {"id", "value"})
|
2018-08-09 14:02:32 +02:00
|
|
|
|
2023-07-31 19:39:57 +02:00
|
|
|
# Test event for removing custom profile data
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
2024-08-15 16:38:12 +02:00
|
|
|
check_remove_custom_profile_field_value(
|
|
|
|
self.user_profile, field_id, acting_user=self.user_profile
|
|
|
|
)
|
2023-07-31 19:39:57 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
|
|
|
self.assertEqual(events[0]["person"]["custom_profile_field"].keys(), {"id", "value"})
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
# Test event for updating custom profile data for guests.
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
field = {
|
|
|
|
"id": field_id,
|
|
|
|
"value": "New value",
|
|
|
|
}
|
|
|
|
cordelia = self.example_user("cordelia")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
|
|
|
do_update_user_custom_profile_data_if_changed(cordelia, [field])
|
2023-10-11 09:34:26 +02:00
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_custom_profile_data_if_changed(hamlet, [field])
|
2023-10-11 09:34:26 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
|
|
|
self.assertEqual(events[0]["person"]["custom_profile_field"].keys(), {"id", "value"})
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_presence_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(slim_presence=False) as events:
|
|
|
|
do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-08-13 19:29:07 +02:00
|
|
|
|
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
|
|
|
)
|
2020-02-02 17:29:05 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(slim_presence=True) as events:
|
|
|
|
do_update_user_presence(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2020-06-27 17:32:39 +02:00
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
2020-06-11 16:03:47 +02:00
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2017-04-25 11:50:30 +02:00
|
|
|
|
2020-08-13 19:29:07 +02:00
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=False,
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
|
|
|
)
|
2020-02-03 17:09:18 +01:00
|
|
|
|
2020-08-13 19:29:07 +02:00
|
|
|
def test_presence_events_multiple_clients(self) -> None:
|
2020-06-11 16:03:47 +02:00
|
|
|
now = timezone_now()
|
2023-11-19 19:45:19 +01:00
|
|
|
initial_presence = now - timedelta(days=365)
|
2020-06-11 16:03:47 +02:00
|
|
|
UserPresence.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
last_active_time=initial_presence,
|
|
|
|
last_connected_time=initial_presence,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.api_post(
|
|
|
|
self.user_profile,
|
|
|
|
"/api/v1/users/me/presence",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"status": "idle"},
|
2021-02-12 08:19:30 +01:00
|
|
|
HTTP_USER_AGENT="ZulipAndroid/1.0",
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action():
|
|
|
|
do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False, num_events=0):
|
|
|
|
do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("ZulipAndroid/1.0"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_IDLE_INT,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("ZulipAndroid/1.0"),
|
2023-11-19 19:45:19 +01:00
|
|
|
timezone_now() + timedelta(seconds=301),
|
2020-06-11 16:03:47 +02:00
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-08-13 19:29:07 +02:00
|
|
|
|
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=True,
|
2020-06-11 16:03:47 +02:00
|
|
|
# We no longer store information about the client and we simply
|
|
|
|
# set the field to 'website' for backwards compatibility.
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
2020-08-13 19:29:07 +02:00
|
|
|
)
|
2017-03-24 05:26:32 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_register_events(self) -> None:
|
2024-03-29 13:00:12 +01:00
|
|
|
realm = self.user_profile.realm
|
|
|
|
realm.signup_announcements_stream = get_stream("core team", realm)
|
|
|
|
realm.save(update_fields=["signup_announcements_stream"])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=5) as events:
|
|
|
|
self.register("test1@zulip.com", "test1")
|
2021-08-12 12:15:06 +02:00
|
|
|
self.assert_length(events, 5)
|
2021-04-29 17:22:48 +02:00
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_realm_user_add("events[0]", events[0])
|
2018-12-06 23:17:46 +01:00
|
|
|
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(new_user_profile.delivery_email, "test1@zulip.com")
|
2018-12-06 23:17:46 +01:00
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_subscription_peer_add("events[3]", events[3])
|
2021-04-29 17:22:48 +02:00
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_message("events[4]", events[4])
|
2021-04-29 17:22:48 +02:00
|
|
|
self.assertIn(
|
2024-02-06 09:26:58 +01:00
|
|
|
f'data-user-id="{new_user_profile.id}">test1_zulip.com</span> joined this organization.',
|
2024-05-15 19:24:37 +02:00
|
|
|
events[4]["message"]["content"],
|
2021-04-29 17:22:48 +02:00
|
|
|
)
|
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_user_group_add_members("events[1]", events[1])
|
2022-08-15 15:54:50 +02:00
|
|
|
check_user_group_add_members("events[2]", events[2])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2018-12-06 23:17:46 +01:00
|
|
|
def test_register_events_email_address_visibility(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=self.user_profile.realm)
|
|
|
|
do_set_realm_user_default_setting(
|
|
|
|
realm_user_default,
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-03-01 11:33:24 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-03-29 13:00:12 +01:00
|
|
|
realm = self.user_profile.realm
|
|
|
|
realm.signup_announcements_stream = get_stream("core team", realm)
|
|
|
|
realm.save(update_fields=["signup_announcements_stream"])
|
2018-12-06 23:17:46 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=5) as events:
|
|
|
|
self.register("test1@zulip.com", "test1")
|
2021-08-12 12:15:06 +02:00
|
|
|
self.assert_length(events, 5)
|
2024-05-15 19:24:37 +02:00
|
|
|
check_realm_user_add("events[0]", events[0])
|
2018-12-06 23:17:46 +01:00
|
|
|
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
2020-06-10 06:41:04 +02:00
|
|
|
self.assertEqual(new_user_profile.email, f"user{new_user_profile.id}@zulip.testserver")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_subscription_peer_add("events[3]", events[3])
|
2021-04-29 17:22:48 +02:00
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_message("events[4]", events[4])
|
2021-04-29 17:22:48 +02:00
|
|
|
self.assertIn(
|
2024-02-06 09:26:58 +01:00
|
|
|
f'data-user-id="{new_user_profile.id}">test1_zulip.com</span> joined this organization',
|
2024-05-15 19:24:37 +02:00
|
|
|
events[4]["message"]["content"],
|
2021-04-29 17:22:48 +02:00
|
|
|
)
|
|
|
|
|
2024-05-15 19:24:37 +02:00
|
|
|
check_user_group_add_members("events[1]", events[1])
|
2022-08-15 15:54:50 +02:00
|
|
|
check_user_group_add_members("events[2]", events[2])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2023-11-03 04:39:40 +01:00
|
|
|
def test_register_events_for_restricted_users(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
self.register("test1@zulip.com", "test1")
|
2023-11-03 04:39:40 +01:00
|
|
|
|
|
|
|
check_realm_user_add("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["person"]["full_name"], "Unknown user")
|
|
|
|
|
|
|
|
check_user_group_add_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, user_list_incomplete=True) as events:
|
|
|
|
self.register("alice@zulip.com", "alice")
|
2023-10-24 19:47:39 +02:00
|
|
|
|
|
|
|
check_user_group_add_members("events[0]", events[0])
|
|
|
|
check_user_group_add_members("events[1]", events[1])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_alert_words_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_add_alert_words(self.user_profile, ["alert_word"])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_alert_words("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_alert_words(self.user_profile, ["alert_word"])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_alert_words("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2024-09-24 17:01:58 +02:00
|
|
|
def test_saved_replies_events(self) -> None:
|
|
|
|
with self.verify_action() as events:
|
|
|
|
do_create_saved_snippet("Welcome message", "Welcome", self.user_profile)
|
|
|
|
check_saved_snippet_add("events[0]", events[0])
|
|
|
|
|
|
|
|
saved_snippet_id = (
|
|
|
|
SavedSnippet.objects.filter(user_profile=self.user_profile).order_by("id")[0].id
|
|
|
|
)
|
|
|
|
with self.verify_action() as events:
|
|
|
|
do_delete_saved_snippet(saved_snippet_id, self.user_profile)
|
|
|
|
check_saved_snippet_remove("events[0]", events[0])
|
|
|
|
|
2018-12-18 17:17:08 +01:00
|
|
|
def test_away_events(self) -> None:
|
|
|
|
client = get_client("website")
|
2022-09-22 11:56:58 +02:00
|
|
|
|
2023-04-08 15:52:48 +02:00
|
|
|
# Updating user status to away activates the codepath of disabling
|
|
|
|
# the presence_enabled user setting. Correctly simulating the presence
|
|
|
|
# event status for a typical user requires settings the user's date_joined
|
|
|
|
# further into the past. See test_change_presence_enabled for more details,
|
|
|
|
# since it tests that codepath directly.
|
2023-11-19 19:45:19 +01:00
|
|
|
self.user_profile.date_joined = timezone_now() - timedelta(days=15)
|
2023-04-08 15:52:48 +02:00
|
|
|
self.user_profile.save()
|
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Set all
|
|
|
|
away_val = True
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
do_update_user_status(
|
2020-06-27 17:32:39 +02:00
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-02-12 08:20:45 +01:00
|
|
|
status_text="out to lunch",
|
2021-06-22 18:42:31 +02:00
|
|
|
emoji_name="car",
|
|
|
|
emoji_code="1f697",
|
|
|
|
reaction_type=UserStatus.UNICODE_EMOJI,
|
2021-02-12 08:19:30 +01:00
|
|
|
client_id=client.id,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2024-07-31 15:36:00 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], not away_val)
|
2021-06-22 18:42:31 +02:00
|
|
|
check_user_status(
|
2024-07-31 15:36:00 +02:00
|
|
|
"events[2]",
|
|
|
|
events[2],
|
2021-06-22 18:42:31 +02:00
|
|
|
{"away", "status_text", "emoji_name", "emoji_code", "reaction_type"},
|
|
|
|
)
|
2022-09-22 11:56:58 +02:00
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Remove all
|
|
|
|
away_val = False
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
do_update_user_status(
|
2021-06-22 18:42:31 +02:00
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-06-22 18:42:31 +02:00
|
|
|
status_text="",
|
|
|
|
emoji_name="",
|
|
|
|
emoji_code="",
|
|
|
|
reaction_type=UserStatus.UNICODE_EMOJI,
|
|
|
|
client_id=client.id,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2024-07-31 15:36:00 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], not away_val)
|
2021-06-22 18:42:31 +02:00
|
|
|
check_user_status(
|
2024-07-31 15:36:00 +02:00
|
|
|
"events[2]",
|
|
|
|
events[2],
|
2021-06-22 18:42:31 +02:00
|
|
|
{"away", "status_text", "emoji_name", "emoji_code", "reaction_type"},
|
|
|
|
)
|
2022-09-22 11:56:58 +02:00
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Only set away
|
|
|
|
away_val = True
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
do_update_user_status(
|
2021-06-22 18:42:31 +02:00
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-06-22 18:42:31 +02:00
|
|
|
status_text=None,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
client_id=client.id,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
2024-07-31 15:36:00 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], not away_val)
|
|
|
|
check_user_status("events[2]", events[2], {"away"})
|
2022-09-22 11:56:58 +02:00
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Only set status_text
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_status(
|
2021-01-20 19:53:11 +01:00
|
|
|
user_profile=self.user_profile,
|
|
|
|
away=None,
|
|
|
|
status_text="at the beach",
|
2021-06-22 18:42:31 +02:00
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
client_id=client.id,
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
|
|
|
check_user_status("events[0]", events[0], {"status_text"})
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2023-10-17 12:56:39 +02:00
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
|
|
|
|
# Set the date_joined for cordelia here like we did at
|
|
|
|
# the start of this test.
|
2023-11-19 19:45:19 +01:00
|
|
|
cordelia.date_joined = timezone_now() - timedelta(days=15)
|
2023-10-17 12:56:39 +02:00
|
|
|
cordelia.save()
|
|
|
|
|
|
|
|
away_val = False
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
self.settings(CAN_ACCESS_ALL_USERS_GROUP_LIMITS_PRESENCE=True),
|
|
|
|
self.verify_action(num_events=0, state_change_expected=False) as events,
|
|
|
|
):
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=cordelia,
|
|
|
|
away=away_val,
|
|
|
|
status_text="out to lunch",
|
|
|
|
emoji_name="car",
|
|
|
|
emoji_code="1f697",
|
|
|
|
reaction_type=UserStatus.UNICODE_EMOJI,
|
|
|
|
client_id=client.id,
|
|
|
|
)
|
2023-10-17 12:56:39 +02:00
|
|
|
|
|
|
|
away_val = True
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1, state_change_expected=True) as events:
|
|
|
|
do_update_user_status(
|
2023-10-17 12:56:39 +02:00
|
|
|
user_profile=cordelia,
|
|
|
|
away=away_val,
|
|
|
|
status_text="at the beach",
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
client_id=client.id,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-10-17 12:56:39 +02:00
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=True,
|
|
|
|
# We no longer store information about the client and we simply
|
|
|
|
# set the field to 'website' for backwards compatibility.
|
|
|
|
presence_key="website",
|
|
|
|
status="idle",
|
|
|
|
)
|
|
|
|
|
2017-11-14 07:31:31 +01:00
|
|
|
def test_user_group_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
check_add_user_group(
|
2024-09-25 11:51:28 +02:00
|
|
|
self.user_profile.realm, "backend", [othello], "Backend team", acting_user=othello
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_add("events[0]", events[0])
|
2024-09-25 11:51:28 +02:00
|
|
|
self.assertEqual(
|
|
|
|
events[0]["group"]["can_manage_group"],
|
|
|
|
AnonymousSettingGroupDict(direct_members=[12], direct_subgroups=[]),
|
2023-07-24 17:44:11 +02:00
|
|
|
)
|
2024-04-29 05:51:48 +02:00
|
|
|
everyone_group = NamedUserGroup.objects.get(
|
|
|
|
name=SystemGroups.EVERYONE, realm=self.user_profile.realm, is_system_group=True
|
|
|
|
)
|
|
|
|
self.assertEqual(events[0]["group"]["can_mention_group"], everyone_group.id)
|
|
|
|
moderators_group = NamedUserGroup.objects.get(
|
|
|
|
name=SystemGroups.MODERATORS, realm=self.user_profile.realm, is_system_group=True
|
|
|
|
)
|
2024-05-28 09:25:40 +02:00
|
|
|
user_group = self.create_or_update_anonymous_group_for_setting(
|
|
|
|
[othello], [moderators_group]
|
|
|
|
)
|
2024-04-29 05:51:48 +02:00
|
|
|
|
|
|
|
with self.verify_action() as events:
|
|
|
|
check_add_user_group(
|
|
|
|
self.user_profile.realm,
|
|
|
|
"frontend",
|
|
|
|
[othello],
|
|
|
|
"",
|
2023-07-24 17:44:11 +02:00
|
|
|
{"can_manage_group": user_group, "can_mention_group": user_group},
|
2024-09-25 11:51:28 +02:00
|
|
|
acting_user=othello,
|
2024-04-29 05:51:48 +02:00
|
|
|
)
|
|
|
|
check_user_group_add("events[0]", events[0])
|
2023-07-24 17:44:11 +02:00
|
|
|
self.assertEqual(
|
|
|
|
events[0]["group"]["can_manage_group"],
|
|
|
|
AnonymousSettingGroupDict(
|
|
|
|
direct_members=[othello.id], direct_subgroups=[moderators_group.id]
|
|
|
|
),
|
|
|
|
)
|
2024-04-29 05:51:48 +02:00
|
|
|
self.assertEqual(
|
|
|
|
events[0]["group"]["can_mention_group"],
|
|
|
|
AnonymousSettingGroupDict(
|
|
|
|
direct_members=[othello.id], direct_subgroups=[moderators_group.id]
|
|
|
|
),
|
|
|
|
)
|
2017-11-14 07:31:31 +01:00
|
|
|
|
2017-11-14 08:00:18 +01:00
|
|
|
# Test name update
|
2024-04-17 05:45:32 +02:00
|
|
|
backend = NamedUserGroup.objects.get(name="backend")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_group_name(backend, "backendteam", acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_update("events[0]", events[0], "name")
|
2017-11-14 08:00:18 +01:00
|
|
|
|
2017-11-14 08:00:53 +01:00
|
|
|
# Test description update
|
|
|
|
description = "Backend team to deal with backend code."
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_user_group_description(backend, description, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_update("events[0]", events[0], "description")
|
2017-11-14 08:00:53 +01:00
|
|
|
|
2023-06-15 05:24:23 +02:00
|
|
|
# Test can_mention_group setting update
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_user_group_permission_setting(
|
2024-05-28 11:22:42 +02:00
|
|
|
backend,
|
|
|
|
"can_mention_group",
|
|
|
|
moderators_group,
|
|
|
|
old_setting_api_value=everyone_group.id,
|
|
|
|
acting_user=None,
|
2023-06-15 05:24:23 +02:00
|
|
|
)
|
2023-07-14 06:50:33 +02:00
|
|
|
check_user_group_update("events[0]", events[0], "can_mention_group")
|
2024-04-30 15:16:58 +02:00
|
|
|
self.assertEqual(events[0]["data"]["can_mention_group"], moderators_group.id)
|
|
|
|
|
2024-05-28 09:25:40 +02:00
|
|
|
setting_group = self.create_or_update_anonymous_group_for_setting(
|
|
|
|
[othello], [moderators_group]
|
|
|
|
)
|
2024-04-30 15:16:58 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_user_group_permission_setting(
|
2024-05-28 11:22:42 +02:00
|
|
|
backend,
|
|
|
|
"can_mention_group",
|
|
|
|
setting_group,
|
|
|
|
old_setting_api_value=moderators_group.id,
|
|
|
|
acting_user=None,
|
2024-04-30 15:16:58 +02:00
|
|
|
)
|
|
|
|
check_user_group_update("events[0]", events[0], "can_mention_group")
|
|
|
|
self.assertEqual(
|
|
|
|
events[0]["data"]["can_mention_group"],
|
|
|
|
AnonymousSettingGroupDict(
|
|
|
|
direct_members=[othello.id], direct_subgroups=[moderators_group.id]
|
|
|
|
),
|
|
|
|
)
|
2023-06-15 05:24:23 +02:00
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
# Test add members
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
bulk_add_members_to_user_groups([backend], [hamlet.id], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_add_members("events[0]", events[0])
|
2017-11-14 08:01:39 +01:00
|
|
|
|
2017-11-14 08:01:50 +01:00
|
|
|
# Test remove members
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
bulk_remove_members_from_user_groups([backend], [hamlet.id], acting_user=None)
|
2022-11-21 04:06:15 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_remove_members("events[0]", events[0])
|
2017-11-14 08:01:50 +01:00
|
|
|
|
2022-12-14 06:45:55 +01:00
|
|
|
api_design = check_add_user_group(
|
2024-09-25 11:51:28 +02:00
|
|
|
hamlet.realm, "api-design", [hamlet], description="API design team", acting_user=othello
|
2022-03-01 07:52:47 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Test add subgroups
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
add_subgroups_to_user_group(backend, [api_design], acting_user=None)
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_add_subgroups("events[0]", events[0])
|
|
|
|
|
|
|
|
# Test remove subgroups
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
remove_subgroups_from_user_group(backend, [api_design], acting_user=None)
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_remove_subgroups("events[0]", events[0])
|
|
|
|
|
2024-05-15 15:44:18 +02:00
|
|
|
# Test deactivate event
|
|
|
|
with self.verify_action() as events:
|
|
|
|
do_deactivate_user_group(backend, acting_user=None)
|
2024-09-12 11:46:48 +02:00
|
|
|
check_user_group_remove("events[0]", events[0])
|
|
|
|
|
|
|
|
with self.verify_action(include_deactivated_groups=True) as events:
|
|
|
|
do_deactivate_user_group(api_design, acting_user=None)
|
2024-05-15 15:44:18 +02:00
|
|
|
check_user_group_update("events[0]", events[0], "deactivated")
|
|
|
|
|
2024-09-12 11:46:48 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False):
|
|
|
|
do_update_user_group_name(api_design, "api-deisgn-team", acting_user=None)
|
|
|
|
|
|
|
|
with self.verify_action(include_deactivated_groups=True) as events:
|
|
|
|
do_update_user_group_name(api_design, "api-deisgn", acting_user=None)
|
|
|
|
check_user_group_update("events[0]", events[0], "name")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_stream_groups_events(self) -> None:
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Scotland", "Rome", "Denmark"]
|
|
|
|
]
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_create_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, "group1", "This is group1", streams
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
2017-11-01 18:20:34 +01:00
|
|
|
venice_stream = get_stream("Venice", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_add_streams_to_default_stream_group(self.user_profile.realm, group, [venice_stream])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_streams_from_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, [venice_stream]
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_stream_group_description(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, "New description"
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-14 20:51:34 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_stream_group_name(self.user_profile.realm, group, "New group name")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-14 21:06:02 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_default_stream_group(self.user_profile.realm, group)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2019-03-01 01:26:57 +01:00
|
|
|
def test_default_stream_group_events_guest(self) -> None:
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Scotland", "Rome", "Denmark"]
|
|
|
|
]
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_create_default_stream_group(self.user_profile.realm, "group1", "This is group1", streams)
|
2019-03-01 01:26:57 +01:00
|
|
|
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_GUEST, acting_user=None)
|
2019-03-01 01:26:57 +01:00
|
|
|
venice_stream = get_stream("Venice", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False, num_events=0):
|
|
|
|
do_add_streams_to_default_stream_group(self.user_profile.realm, group, [venice_stream])
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_streams_events(self) -> None:
|
2017-01-30 04:23:08 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_add_default_stream(stream)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_streams("events[0]", events[0])
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_default_stream(stream)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_streams("events[0]", events[0])
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2019-03-01 01:26:57 +01:00
|
|
|
def test_default_streams_events_guest(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_GUEST, acting_user=None)
|
2019-03-01 01:26:57 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False, num_events=0):
|
|
|
|
do_add_default_stream(stream)
|
|
|
|
with self.verify_action(state_change_expected=False, num_events=0):
|
|
|
|
do_remove_default_stream(stream)
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_muted_topics_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_muted_topics("events[0]", events[0])
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic("events[1]", events[1])
|
2017-03-24 05:32:50 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-02-03 13:21:25 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.INHERIT,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_muted_topics("events[0]", events[0])
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic("events[1]", events[1])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(event_types=["muted_topics", "user_topic"]) as events:
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic("events[0]", events[0])
|
2017-03-24 05:32:50 +01:00
|
|
|
|
user_topics: Refactor add_topic_mute.
In order to support different types of topic visibility policies,
this renames 'add_topic_mute' to
'set_user_topic_visibility_policy_in_database'
and refactors it to accept a parameter 'visibility_policy'.
Create a corresponding UserTopic row for any visibility policy,
not just muting topics.
When a UserTopic row for (user_profile, stream, topic, recipient_id)
exists already, it updates the row with the new visibility_policy.
In the event of a duplicate request, raises a JsonableError.
i.e., new_visibility_policy == existing_visibility_policy.
There is an increase in the database query count in the message-edit
code path.
Reason:
Earlier, 'add_topic_mute' used 'bulk_create' which either
creates or raises IntegrityError -- 1 query.
Now, 'set_user_topic_visibility_policy' uses get_or_create
-- 2 queries in the case of creating new row.
We can't use the previous approach, because now we have to
handle the case of updating the visibility_policy too.
Also, using bulk_* for a single row is not the correct way.
Co-authored-by: Kartik Srivastava <kaushiksri0908@gmail.com>
Co-authored-by: Prakhar Pratyush <prakhar841301@gmail.com>
2022-09-12 16:39:53 +02:00
|
|
|
def test_unmuted_topics_events(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.UNMUTED,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
user_topics: Refactor add_topic_mute.
In order to support different types of topic visibility policies,
this renames 'add_topic_mute' to
'set_user_topic_visibility_policy_in_database'
and refactors it to accept a parameter 'visibility_policy'.
Create a corresponding UserTopic row for any visibility policy,
not just muting topics.
When a UserTopic row for (user_profile, stream, topic, recipient_id)
exists already, it updates the row with the new visibility_policy.
In the event of a duplicate request, raises a JsonableError.
i.e., new_visibility_policy == existing_visibility_policy.
There is an increase in the database query count in the message-edit
code path.
Reason:
Earlier, 'add_topic_mute' used 'bulk_create' which either
creates or raises IntegrityError -- 1 query.
Now, 'set_user_topic_visibility_policy' uses get_or_create
-- 2 queries in the case of creating new row.
We can't use the previous approach, because now we have to
handle the case of updating the visibility_policy too.
Also, using bulk_* for a single row is not the correct way.
Co-authored-by: Kartik Srivastava <kaushiksri0908@gmail.com>
Co-authored-by: Prakhar Pratyush <prakhar841301@gmail.com>
2022-09-12 16:39:53 +02:00
|
|
|
check_muted_topics("events[0]", events[0])
|
|
|
|
check_user_topic("events[1]", events[1])
|
|
|
|
|
2021-03-27 12:23:32 +01:00
|
|
|
def test_muted_users_events(self) -> None:
|
|
|
|
muted_user = self.example_user("othello")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_mute_user(self.user_profile, muted_user)
|
2022-05-13 03:50:40 +02:00
|
|
|
check_muted_users("events[0]", events[0])
|
2021-03-27 12:23:32 +01:00
|
|
|
|
2021-04-08 06:20:43 +02:00
|
|
|
mute_object = get_mute_object(self.user_profile, muted_user)
|
|
|
|
assert mute_object is not None
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_unmute_user(mute_object)
|
2021-03-27 12:23:32 +01:00
|
|
|
check_muted_users("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_avatar_fields(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_avatar_fields(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, UserProfile.AVATAR_FROM_USER, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "avatar_fields")
|
|
|
|
assert isinstance(events[0]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[0]["person"]["avatar_url_medium"], str)
|
2017-02-21 21:37:16 +01:00
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_avatar_fields(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, UserProfile.AVATAR_FROM_GRAVATAR, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "avatar_fields")
|
|
|
|
self.assertEqual(events[0]["person"]["avatar_url"], None)
|
|
|
|
self.assertEqual(events[0]["person"]["avatar_url_medium"], None)
|
2018-02-05 21:42:54 +01:00
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
|
|
|
do_change_avatar_fields(
|
2023-10-11 09:34:26 +02:00
|
|
|
cordelia, UserProfile.AVATAR_FROM_GRAVATAR, acting_user=cordelia
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_full_name(self) -> None:
|
2024-08-21 13:34:55 +02:00
|
|
|
now = timezone_now()
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_full_name(self.user_profile, "Sir Hamlet", self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "full_name")
|
2024-08-21 13:34:55 +02:00
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:33:25 +02:00
|
|
|
event_type=AuditLogEventType.USER_FULL_NAME_CHANGED,
|
2024-08-21 13:34:55 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Verify no operation if the value isn't changing.
|
|
|
|
with self.verify_action(num_events=0, state_change_expected=False):
|
|
|
|
do_change_full_name(self.user_profile, "Sir Hamlet", self.user_profile)
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:33:25 +02:00
|
|
|
event_type=AuditLogEventType.USER_FULL_NAME_CHANGED,
|
2024-08-21 13:34:55 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
self.user_profile = self.example_user("polonius")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False):
|
|
|
|
do_change_full_name(cordelia, "Cordelia", self.user_profile)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
2023-09-07 00:29:46 +02:00
|
|
|
def test_change_user_delivery_email_email_address_visibility_admins(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-03-01 11:33:24 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-12-06 23:17:46 +01:00
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, client_gravatar=False) as events:
|
2024-08-11 22:38:04 +02:00
|
|
|
do_change_user_delivery_email(
|
|
|
|
self.user_profile, "newhamlet@zulip.com", acting_user=self.user_profile
|
|
|
|
)
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
check_realm_user_update("events[1]", events[1], "avatar_fields")
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url_medium"], str)
|
2018-08-02 08:47:13 +02:00
|
|
|
|
2021-10-05 15:31:11 +02:00
|
|
|
def test_change_user_delivery_email_email_address_visibility_everyone(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
2021-10-05 15:31:11 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
2021-10-05 15:31:11 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3, client_gravatar=False) as events:
|
2024-08-11 22:38:04 +02:00
|
|
|
do_change_user_delivery_email(
|
|
|
|
self.user_profile, "newhamlet@zulip.com", acting_user=self.user_profile
|
|
|
|
)
|
2021-10-05 15:31:11 +02:00
|
|
|
|
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
check_realm_user_update("events[1]", events[1], "avatar_fields")
|
|
|
|
check_realm_user_update("events[2]", events[2], "email")
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url_medium"], str)
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
# Reset hamlet's email to original email.
|
2024-08-11 22:38:04 +02:00
|
|
|
do_change_user_delivery_email(
|
|
|
|
self.user_profile, "hamlet@zulip.com", acting_user=self.user_profile
|
|
|
|
)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
do_change_user_setting(
|
|
|
|
cordelia,
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
self.user_profile = self.example_user("polonius")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False):
|
2024-08-11 22:38:04 +02:00
|
|
|
do_change_user_delivery_email(cordelia, "newcordelia@zulip.com", acting_user=None)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_authentication_methods(self) -> None:
|
|
|
|
def fake_backends() -> Any:
|
2017-02-21 19:35:17 +01:00
|
|
|
backends = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.DevAuthBackend",
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.GoogleAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
|
|
|
return self.settings(AUTHENTICATION_BACKENDS=backends)
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
# Test transitions; any new backends should be tested with T/T/T/F/T
|
2021-02-12 08:19:30 +01:00
|
|
|
for auth_method_dict in (
|
2021-02-12 08:20:45 +01:00
|
|
|
{"Google": True, "Email": True, "GitHub": True, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": True, "GitHub": False, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": False, "GitHub": False, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": False, "GitHub": True, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": False, "Email": False, "GitHub": False, "LDAP": False, "Dev": True},
|
|
|
|
{"Google": False, "Email": False, "GitHub": True, "LDAP": False, "Dev": True},
|
|
|
|
{"Google": False, "Email": True, "GitHub": True, "LDAP": True, "Dev": False},
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2024-07-14 20:30:42 +02:00
|
|
|
with fake_backends(), self.verify_action() as events:
|
|
|
|
do_set_realm_authentication_methods(
|
|
|
|
self.user_profile.realm,
|
|
|
|
auth_method_dict,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_pin_stream(self) -> None:
|
2017-03-05 01:30:48 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2021-04-08 02:41:57 +02:00
|
|
|
do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "pin_to_top", False, acting_user=None
|
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
for pinned in (True, False):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_subscription_property(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
sub,
|
|
|
|
stream,
|
|
|
|
"pin_to_top",
|
|
|
|
pinned,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="pin_to_top",
|
|
|
|
value=pinned,
|
|
|
|
)
|
2016-07-01 07:26:09 +02:00
|
|
|
|
2022-08-09 20:37:07 +02:00
|
|
|
def test_mute_and_unmute_stream(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
|
|
|
|
|
|
|
# While migrating events API from in_home_view to is_muted:
|
|
|
|
# First, test in_home_view sends 2 events: in_home_view and is_muted.
|
|
|
|
do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "in_home_view", False, acting_user=None
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_subscription_property(
|
2022-08-09 20:37:07 +02:00
|
|
|
self.user_profile, sub, stream, "in_home_view", True, acting_user=None
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2022-08-09 20:37:07 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="in_home_view",
|
|
|
|
value=True,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
property="is_muted",
|
|
|
|
value=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Then, test is_muted also sends both events, in the same order.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_subscription_property(
|
2022-08-09 20:37:07 +02:00
|
|
|
self.user_profile, sub, stream, "is_muted", True, acting_user=None
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2022-08-09 20:37:07 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="in_home_view",
|
|
|
|
value=False,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
property="is_muted",
|
|
|
|
value=True,
|
|
|
|
)
|
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
def test_change_stream_notification_settings(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
for setting_name in ["email_notifications"]:
|
2020-08-17 14:06:06 +02:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2020-08-17 14:06:06 +02:00
|
|
|
# First test with notification_settings_null enabled
|
|
|
|
for value in (True, False):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(notification_settings_null=True) as events:
|
|
|
|
do_change_subscription_property(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
sub,
|
|
|
|
stream,
|
|
|
|
setting_name,
|
|
|
|
value,
|
|
|
|
acting_user=None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property=setting_name,
|
|
|
|
value=value,
|
|
|
|
)
|
2020-08-17 14:06:06 +02:00
|
|
|
|
|
|
|
for value in (True, False):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_subscription_property(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
sub,
|
|
|
|
stream,
|
|
|
|
setting_name,
|
|
|
|
value,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property=setting_name,
|
|
|
|
value=value,
|
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2024-02-07 12:13:02 +01:00
|
|
|
def test_change_realm_new_stream_announcements_stream(self) -> None:
|
2017-06-09 20:50:38 +02:00
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
2024-02-07 12:13:02 +01:00
|
|
|
for new_stream_announcements_stream, new_stream_announcements_stream_id in (
|
|
|
|
(stream, stream.id),
|
|
|
|
(None, -1),
|
|
|
|
):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_set_realm_new_stream_announcements_stream(
|
2021-03-08 05:24:00 +01:00
|
|
|
self.user_profile.realm,
|
2024-02-07 12:13:02 +01:00
|
|
|
new_stream_announcements_stream,
|
|
|
|
new_stream_announcements_stream_id,
|
2021-03-08 05:24:00 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-02-07 12:13:02 +01:00
|
|
|
check_realm_update("events[0]", events[0], "new_stream_announcements_stream_id")
|
2017-06-09 20:50:38 +02:00
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
def test_change_realm_signup_announcements_stream(self) -> None:
|
2017-10-20 16:55:04 +02:00
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
2024-02-07 17:11:43 +01:00
|
|
|
for signup_announcements_stream, signup_announcements_stream_id in (
|
2021-02-12 08:19:30 +01:00
|
|
|
(stream, stream.id),
|
|
|
|
(None, -1),
|
|
|
|
):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_set_realm_signup_announcements_stream(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm,
|
2024-02-07 17:11:43 +01:00
|
|
|
signup_announcements_stream,
|
|
|
|
signup_announcements_stream_id,
|
2021-04-02 16:36:43 +02:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-02-07 17:11:43 +01:00
|
|
|
check_realm_update("events[0]", events[0], "signup_announcements_stream_id")
|
2017-10-20 16:55:04 +02:00
|
|
|
|
2024-01-26 14:45:37 +01:00
|
|
|
def test_change_realm_zulip_update_announcements_stream(self) -> None:
|
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
|
|
|
for zulip_update_announcements_stream, zulip_update_announcements_stream_id in (
|
|
|
|
(stream, stream.id),
|
|
|
|
(None, -1),
|
|
|
|
):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_set_realm_zulip_update_announcements_stream(
|
2024-01-26 14:45:37 +01:00
|
|
|
self.user_profile.realm,
|
|
|
|
zulip_update_announcements_stream,
|
|
|
|
zulip_update_announcements_stream_id,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
check_realm_update("events[0]", events[0], "zulip_update_announcements_stream_id")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_is_admin(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2023-07-06 08:38:12 +02:00
|
|
|
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Test private stream")
|
|
|
|
|
2020-05-21 00:13:06 +02:00
|
|
|
for role in [UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
num_events = 6
|
|
|
|
else:
|
|
|
|
num_events = 5
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_role(self.user_profile, role, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2020-06-03 19:49:45 +02:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2021-05-28 12:51:50 +02:00
|
|
|
def test_change_is_billing_admin(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2021-05-28 12:51:50 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_is_billing_admin(self.user_profile, True)
|
2021-05-28 12:51:50 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_billing_admin")
|
|
|
|
self.assertEqual(events[0]["person"]["is_billing_admin"], True)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_is_billing_admin(self.user_profile, False)
|
2023-11-15 22:30:08 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_billing_admin")
|
|
|
|
self.assertEqual(events[0]["person"]["is_billing_admin"], False)
|
|
|
|
|
2020-06-03 19:49:45 +02:00
|
|
|
def test_change_is_owner(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-06-03 19:49:45 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2023-07-06 08:38:12 +02:00
|
|
|
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Test private stream")
|
|
|
|
|
2020-06-03 19:49:45 +02:00
|
|
|
for role in [UserProfile.ROLE_REALM_OWNER, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
num_events = 6
|
|
|
|
else:
|
|
|
|
num_events = 5
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_role(self.user_profile, role, acting_user=None)
|
2021-04-19 20:56:15 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2021-04-19 20:56:15 +02:00
|
|
|
def test_change_is_moderator(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2021-04-19 20:56:15 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
|
|
|
for role in [UserProfile.ROLE_MODERATOR, UserProfile.ROLE_MEMBER]:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
do_change_user_role(self.user_profile, role, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_MODERATOR:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
|
|
|
|
2020-06-01 02:31:30 +02:00
|
|
|
def test_change_is_guest(self) -> None:
|
2021-05-19 16:07:01 +02:00
|
|
|
stream = Stream.objects.get(name="Denmark")
|
|
|
|
do_add_default_stream(stream)
|
|
|
|
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-06-01 02:31:30 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2020-06-01 02:31:30 +02:00
|
|
|
for role in [UserProfile.ROLE_GUEST, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_MEMBER:
|
|
|
|
# When changing role from guest to member, peer_add events are also sent
|
|
|
|
# to make sure the subscribers info is provided to the clients for the
|
|
|
|
# streams added by stream creation event.
|
|
|
|
num_events = 7
|
|
|
|
else:
|
|
|
|
num_events = 5
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_role(self.user_profile, role, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2020-06-01 02:31:30 +02:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_GUEST:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
|
|
|
check_subscription_peer_add("events[6]", events[6])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
def test_change_user_role_for_restricted_users(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
|
|
|
|
for role in [
|
|
|
|
UserProfile.ROLE_REALM_OWNER,
|
|
|
|
UserProfile.ROLE_REALM_ADMINISTRATOR,
|
|
|
|
UserProfile.ROLE_MODERATOR,
|
|
|
|
UserProfile.ROLE_MEMBER,
|
|
|
|
UserProfile.ROLE_GUEST,
|
|
|
|
]:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
old_role = cordelia.role
|
|
|
|
|
|
|
|
num_events = 2
|
|
|
|
if UserProfile.ROLE_MEMBER in [old_role, role]:
|
|
|
|
num_events = 3
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_role(cordelia, role, acting_user=None)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
|
|
|
check_user_group_remove_members("events[0]", events[0])
|
|
|
|
check_user_group_add_members("events[1]", events[1])
|
|
|
|
|
|
|
|
if old_role == UserProfile.ROLE_MEMBER:
|
|
|
|
check_user_group_remove_members("events[2]", events[2])
|
|
|
|
elif role == UserProfile.ROLE_MEMBER:
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_notification_settings(self) -> None:
|
2023-07-31 22:16:30 +02:00
|
|
|
for notification_setting in self.user_profile.notification_setting_types:
|
2022-08-24 21:42:44 +02:00
|
|
|
if notification_setting in [
|
|
|
|
"notification_sound",
|
|
|
|
"desktop_icon_count_display",
|
|
|
|
"presence_enabled",
|
2023-01-14 20:36:37 +01:00
|
|
|
"realm_name_in_email_notifications_policy",
|
2023-06-17 17:37:04 +02:00
|
|
|
"automatically_follow_topics_policy",
|
|
|
|
"automatically_unmute_topics_in_muted_streams_policy",
|
2022-08-24 21:42:44 +02:00
|
|
|
]:
|
2019-06-29 22:00:44 +02:00
|
|
|
# These settings are tested in their own tests.
|
2018-01-11 21:36:11 +01:00
|
|
|
continue
|
|
|
|
|
2021-09-08 15:36:08 +02:00
|
|
|
do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, False, acting_user=self.user_profile
|
|
|
|
)
|
2018-01-11 21:36:11 +01:00
|
|
|
|
2023-05-17 16:01:16 +02:00
|
|
|
num_events = 2
|
|
|
|
is_modern_notification_setting = (
|
|
|
|
notification_setting in self.user_profile.modern_notification_settings
|
|
|
|
)
|
|
|
|
if is_modern_notification_setting:
|
|
|
|
# The legacy event format is not sent for modern_notification_settings
|
|
|
|
# as it exists only for backwards-compatibility with
|
|
|
|
# clients that don't support the new user_settings event type.
|
|
|
|
# We only send the legacy event for settings added before Feature level 89.
|
|
|
|
num_events = 1
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
for setting_value in [True, False]:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_setting(
|
2020-06-27 17:32:39 +02:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2023-05-17 16:01:16 +02:00
|
|
|
if not is_modern_notification_setting:
|
|
|
|
check_update_global_notifications("events[1]", events[1], setting_value)
|
2016-12-08 21:06:23 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
# Also test with notification_settings_null=True
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
notification_settings_null=True,
|
|
|
|
state_change_expected=False,
|
|
|
|
num_events=num_events,
|
|
|
|
) as events:
|
|
|
|
do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2023-05-17 16:01:16 +02:00
|
|
|
if not is_modern_notification_setting:
|
|
|
|
check_update_global_notifications("events[1]", events[1], setting_value)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2022-08-24 21:42:44 +02:00
|
|
|
def test_change_presence_enabled(self) -> None:
|
|
|
|
presence_enabled_setting = "presence_enabled"
|
|
|
|
|
2023-04-08 15:52:48 +02:00
|
|
|
# Disabling presence will lead to the creation of a UserPresence object for the user
|
|
|
|
# with a last_connected_time slightly preceding the moment of flipping the setting
|
|
|
|
# and last_active_time set to None. The presence API defaults to user_profile.date_joined
|
|
|
|
# for backwards compatibility when dealing with a None value. Thus for this test to properly
|
|
|
|
# check that the presence event emitted will have "idle" status, we need to simulate
|
|
|
|
# the (more realistic) scenario where date_joined is further in the past and not super recent.
|
2023-11-19 19:45:19 +01:00
|
|
|
self.user_profile.date_joined = timezone_now() - timedelta(days=15)
|
2023-04-08 15:52:48 +02:00
|
|
|
self.user_profile.save()
|
|
|
|
|
2022-08-24 21:42:44 +02:00
|
|
|
for val in [True, False]:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
do_change_user_setting(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
presence_enabled_setting,
|
|
|
|
val,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2022-08-24 21:42:44 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], val)
|
|
|
|
check_presence(
|
|
|
|
"events[2]",
|
|
|
|
events[2],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if val else "idle",
|
|
|
|
)
|
|
|
|
|
2018-01-11 21:36:11 +01:00
|
|
|
def test_change_notification_sound(self) -> None:
|
|
|
|
notification_setting = "notification_sound"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_user_setting(
|
2021-04-08 11:10:34 +02:00
|
|
|
self.user_profile, notification_setting, "ding", acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], "ding")
|
2018-01-11 21:36:11 +01:00
|
|
|
|
2019-06-29 22:00:44 +02:00
|
|
|
def test_change_desktop_icon_count_display(self) -> None:
|
|
|
|
notification_setting = "desktop_icon_count_display"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, 2, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 2)
|
2019-06-29 22:00:44 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, 1, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 1)
|
2019-06-29 22:00:44 +02:00
|
|
|
|
2023-01-14 20:36:37 +01:00
|
|
|
def test_change_realm_name_in_email_notifications_policy(self) -> None:
|
|
|
|
notification_setting = "realm_name_in_email_notifications_policy"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_user_setting(
|
2023-01-14 20:36:37 +01:00
|
|
|
self.user_profile, notification_setting, 3, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-01-14 20:36:37 +01:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 3)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_user_setting(
|
2023-01-14 20:36:37 +01:00
|
|
|
self.user_profile, notification_setting, 2, acting_user=self.user_profile
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-01-14 20:36:37 +01:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 2)
|
|
|
|
|
2023-06-17 17:37:04 +02:00
|
|
|
def test_change_automatically_follow_topics_policy(self) -> None:
|
|
|
|
notification_setting = "automatically_follow_topics_policy"
|
|
|
|
|
|
|
|
for setting_value in UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_change_user_setting(
|
2023-06-17 17:37:04 +02:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
|
|
|
def test_change_automatically_unmute_topics_in_muted_streams_policy(self) -> None:
|
|
|
|
notification_setting = "automatically_unmute_topics_in_muted_streams_policy"
|
|
|
|
|
|
|
|
for setting_value in UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_change_user_setting(
|
2023-06-17 17:37:04 +02:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-06-17 17:37:04 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
2022-04-11 19:26:16 +02:00
|
|
|
def test_realm_update_org_type(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
2024-06-15 07:12:06 +02:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile, realm=realm)
|
2022-04-11 19:26:16 +02:00
|
|
|
self.assertEqual(state_data["realm_org_type"], Realm.ORG_TYPES["business"]["id"])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_realm_org_type(
|
2022-04-11 19:26:16 +02:00
|
|
|
realm, Realm.ORG_TYPES["government"]["id"], acting_user=self.user_profile
|
|
|
|
)
|
|
|
|
check_realm_update("events[0]", events[0], "org_type")
|
|
|
|
|
2024-06-15 07:12:06 +02:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile, realm=realm)
|
2022-04-11 19:26:16 +02:00
|
|
|
self.assertEqual(state_data["realm_org_type"], Realm.ORG_TYPES["government"]["id"])
|
|
|
|
|
2019-06-11 12:43:08 +02:00
|
|
|
def test_realm_update_plan_type(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
2024-04-18 12:23:46 +02:00
|
|
|
members_group = NamedUserGroup.objects.get(name=SystemGroups.MEMBERS, realm=realm)
|
2023-11-22 12:33:48 +01:00
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm, "can_access_all_users_group", members_group, acting_user=None
|
|
|
|
)
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2024-06-15 07:12:06 +02:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile, realm=realm)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(state_data["realm_plan_type"], Realm.PLAN_TYPE_SELF_HOSTED)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(state_data["zulip_plan_is_not_limited"], True)
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=self.user_profile)
|
2022-06-02 15:56:30 +02:00
|
|
|
check_realm_update("events[0]", events[0], "enable_spectator_access")
|
2023-11-22 12:33:48 +01:00
|
|
|
check_realm_update_dict("events[1]", events[1])
|
|
|
|
check_realm_update("events[2]", events[2], "plan_type")
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2024-06-15 07:12:06 +02:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile, realm=realm)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(state_data["realm_plan_type"], Realm.PLAN_TYPE_LIMITED)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(state_data["zulip_plan_is_not_limited"], False)
|
2019-06-11 12:43:08 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_emoji_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
author = self.example_user("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with get_test_image_file("img.png") as img_file, self.verify_action() as events:
|
|
|
|
check_add_realm_emoji(
|
|
|
|
self.user_profile.realm, "my_emoji", author, img_file, "image/png"
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_emoji_update("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_realm_emoji(
|
2022-04-07 12:24:30 +02:00
|
|
|
self.user_profile.realm, "my_emoji", acting_user=self.user_profile
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_emoji_update("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_filter_events(self) -> None:
|
2020-06-24 21:36:27 +02:00
|
|
|
regex = "#(?P<id>[123])"
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
url = "https://realm.com/my_realm_filter/{id}"
|
2020-06-24 21:36:27 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_add_linkifier(self.user_profile.realm, regex, url, acting_user=None)
|
2021-03-30 12:51:54 +02:00
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2023-08-11 01:10:21 +02:00
|
|
|
linkifier_id = events[0]["realm_linkifiers"][-1]["id"]
|
|
|
|
self.assertEqual(RealmFilter.objects.get(id=linkifier_id).pattern, regex)
|
|
|
|
|
2021-04-15 19:51:36 +02:00
|
|
|
regex = "#(?P<id>[0-9]+)"
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_update_linkifier(self.user_profile.realm, linkifier_id, regex, url, acting_user=None)
|
2021-03-30 12:51:54 +02:00
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2023-08-10 04:09:25 +02:00
|
|
|
linkifier_ids = list(
|
|
|
|
RealmFilter.objects.all().values_list("id", flat=True).order_by("order")
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
check_reorder_linkifiers(
|
2023-08-10 04:09:25 +02:00
|
|
|
self.user_profile.realm, [linkifier_ids[-1], *linkifier_ids[:-1]], acting_user=None
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-08-10 04:09:25 +02:00
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_remove_linkifier(self.user_profile.realm, regex, acting_user=None)
|
2021-04-15 19:51:36 +02:00
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
|
|
|
|
# Redo the checks, but assume that the client does not support URL template.
|
|
|
|
# apply_event should drop the event, and no state change should occur.
|
|
|
|
regex = "#(?P<id>[123])"
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
num_events=1, linkifier_url_template=False, state_change_expected=False
|
|
|
|
) as events:
|
|
|
|
do_add_linkifier(self.user_profile.realm, regex, url, acting_user=None)
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
|
|
|
|
regex = "#(?P<id>[0-9]+)"
|
|
|
|
linkifier_id = events[0]["realm_linkifiers"][0]["id"]
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
num_events=1, linkifier_url_template=False, state_change_expected=False
|
|
|
|
) as events:
|
|
|
|
do_update_linkifier(self.user_profile.realm, linkifier_id, regex, url, acting_user=None)
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
num_events=1, linkifier_url_template=False, state_change_expected=False
|
|
|
|
) as events:
|
|
|
|
do_remove_linkifier(self.user_profile.realm, regex, acting_user=None)
|
2021-04-15 19:51:36 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_domain_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_add_realm_domain(self.user_profile.realm, "zulip.org", False, acting_user=None)
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_domains_add("events[0]", events[0])
|
2020-08-17 16:07:25 +02:00
|
|
|
self.assertEqual(events[0]["realm_domain"]["domain"], "zulip.org")
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["allow_subdomains"], False)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
test_domain = RealmDomain.objects.get(realm=self.user_profile.realm, domain="zulip.org")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_realm_domain(test_domain, True, acting_user=None)
|
2017-02-09 22:44:03 +01:00
|
|
|
|
2020-08-17 16:07:25 +02:00
|
|
|
check_realm_domains_change("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["domain"], "zulip.org")
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["allow_subdomains"], True)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_realm_domain(test_domain, acting_user=None)
|
2020-08-17 16:07:25 +02:00
|
|
|
|
|
|
|
check_realm_domains_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["domain"], "zulip.org")
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2020-10-28 04:00:46 +01:00
|
|
|
def test_realm_playground_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
check_add_realm_playground(
|
2023-05-27 01:33:01 +02:00
|
|
|
self.user_profile.realm,
|
|
|
|
acting_user=None,
|
|
|
|
name="Python playground",
|
|
|
|
pygments_language="Python",
|
2023-05-27 05:04:50 +02:00
|
|
|
url_template="https://python.example.com{code}",
|
2022-03-11 15:16:04 +01:00
|
|
|
)
|
2020-10-28 04:00:46 +01:00
|
|
|
check_realm_playgrounds("events[0]", events[0])
|
|
|
|
|
2021-07-24 16:56:39 +02:00
|
|
|
last_realm_playground = RealmPlayground.objects.last()
|
|
|
|
assert last_realm_playground is not None
|
|
|
|
last_id = last_realm_playground.id
|
2020-10-28 04:00:46 +01:00
|
|
|
realm_playground = access_playground_by_id(self.user_profile.realm, last_id)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_remove_realm_playground(self.user_profile.realm, realm_playground, acting_user=None)
|
2020-10-28 04:00:46 +01:00
|
|
|
check_realm_playgrounds("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_create_bot(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
self.create_bot("test")
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2018-01-30 17:10:10 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
self.create_bot(
|
|
|
|
"test_outgoing_webhook",
|
|
|
|
full_name="Outgoing Webhook Bot",
|
|
|
|
payload_url=orjson.dumps("https://foo.bar.com").decode(),
|
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
)
|
2018-01-30 17:10:10 +01:00
|
|
|
# The third event is the second call of notify_created_bot, which contains additional
|
|
|
|
# data for services (in contrast to the first call).
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
self.create_bot(
|
|
|
|
"test_embedded",
|
|
|
|
full_name="Embedded Bot",
|
|
|
|
service_name="helloworld",
|
|
|
|
config_data=orjson.dumps({"foo": "bar"}).decode(),
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
|
|
|
)
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2018-01-30 19:21:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_full_name(bot, "New Bot Name", self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[1]", events[1], "full_name")
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_regenerate_bot_api_key(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_regenerate_api_key(bot, self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "api_key")
|
2014-02-26 20:17:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_avatar_source(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_avatar_fields(bot, bot.AVATAR_FROM_USER, acting_user=self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "avatar_url")
|
|
|
|
self.assertEqual(events[1]["type"], "realm_user")
|
2014-02-26 21:05:10 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_icon_source(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_change_icon_source(self.user_profile.realm, Realm.ICON_UPLOADED, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-12-03 08:13:58 +01:00
|
|
|
def test_change_realm_light_theme_logo_source(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_change_logo_source(
|
|
|
|
self.user_profile.realm, Realm.LOGO_UPLOADED, False, acting_user=self.user_profile
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2019-03-01 15:52:44 +01:00
|
|
|
|
2021-12-03 08:13:58 +01:00
|
|
|
def test_change_realm_dark_theme_logo_source(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
|
|
|
do_change_logo_source(
|
|
|
|
self.user_profile.realm, Realm.LOGO_UPLOADED, True, acting_user=self.user_profile
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2019-03-01 15:52:44 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_all_public_streams(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_all_public_streams(bot, True, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_all_public_streams")
|
2014-02-26 21:15:31 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_sending_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_sending_stream(bot, stream, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_sending_stream")
|
2014-02-26 21:23:18 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_sending_stream(bot, None, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_sending_stream")
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_events_register_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_events_register_stream(bot, stream, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_events_register_stream")
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_change_default_events_register_stream(bot, None, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_events_register_stream")
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_owner(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
owner = self.example_user("hamlet")
|
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_bot_owner(bot, owner, self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "owner_id")
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2014-02-26 21:34:12 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("aaron")
|
|
|
|
owner = self.example_user("hamlet")
|
|
|
|
bot = self.create_bot("test1", full_name="Test1 Testerson")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_bot_owner(bot, owner, self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_delete("events[0]", events[0])
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2018-03-06 22:32:03 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
previous_owner = self.example_user("aaron")
|
|
|
|
self.user_profile = self.example_user("hamlet")
|
|
|
|
bot = self.create_test_bot("test2", previous_owner, full_name="Test2 Testerson")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_change_bot_owner(bot, self.user_profile, previous_owner)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_add("events[0]", events[0])
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2018-03-06 22:32:03 +01:00
|
|
|
|
2022-05-07 08:56:33 +02:00
|
|
|
def test_peer_remove_events_on_changing_bot_owner(self) -> None:
|
|
|
|
previous_owner = self.example_user("aaron")
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
bot = self.create_test_bot("test2", previous_owner, full_name="Test2 Testerson")
|
|
|
|
private_stream = self.make_stream("private_stream", invite_only=True)
|
|
|
|
self.make_stream("public_stream")
|
|
|
|
self.subscribe(bot, "private_stream")
|
|
|
|
self.subscribe(self.example_user("aaron"), "private_stream")
|
|
|
|
self.subscribe(bot, "public_stream")
|
|
|
|
self.subscribe(self.example_user("aaron"), "public_stream")
|
|
|
|
|
|
|
|
self.make_stream("private_stream_test", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("iago"), "private_stream_test")
|
|
|
|
self.subscribe(bot, "private_stream_test")
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
do_change_bot_owner(bot, self.user_profile, previous_owner)
|
2022-05-07 08:56:33 +02:00
|
|
|
|
|
|
|
check_realm_bot_update("events[0]", events[0], "owner_id")
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
|
|
|
check_subscription_peer_remove("events[2]", events[2])
|
|
|
|
self.assertEqual(events[2]["stream_ids"], [private_stream.id])
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def test_do_update_outgoing_webhook_service(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
bot = self.create_test_bot(
|
2021-02-12 08:20:45 +01:00
|
|
|
"test",
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="Test Bot",
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload_url=orjson.dumps("http://hostname.domain2.com").decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_update_outgoing_webhook_service(bot, 2, "http://hostname.domain2.com")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "services")
|
2018-01-16 20:34:12 +01:00
|
|
|
|
2022-01-14 22:50:27 +01:00
|
|
|
def test_do_deactivate_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_deactivate_user(bot, acting_user=None)
|
2023-10-30 12:50:40 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_active")
|
|
|
|
check_realm_bot_update("events[1]", events[1], "is_active")
|
2014-02-26 22:27:51 +01:00
|
|
|
|
2022-01-14 22:50:27 +01:00
|
|
|
def test_do_deactivate_user(self) -> None:
|
|
|
|
user_profile = self.example_user("cordelia")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2023-10-30 12:50:40 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_active")
|
2022-01-14 22:50:27 +01:00
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
do_reactivate_user(user_profile, acting_user=None)
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
|
|
|
|
# Test that guest users receive event only
|
|
|
|
# if they can access the deactivated user.
|
|
|
|
user_profile = self.example_user("cordelia")
|
|
|
|
self.user_profile = self.example_user("polonius")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2023-10-11 09:34:26 +02:00
|
|
|
|
|
|
|
user_profile = self.example_user("shiva")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2023-10-11 09:34:26 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_active")
|
|
|
|
|
2023-11-13 17:11:13 +01:00
|
|
|
# Guest loses access to deactivated user if the user
|
|
|
|
# was not involved in DMs.
|
|
|
|
user_profile = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2023-11-13 17:11:13 +01:00
|
|
|
check_realm_user_remove("events[0]", events[0])
|
|
|
|
|
|
|
|
user_profile = self.example_user("aaron")
|
|
|
|
# One update event is for a deactivating a bot owned by aaron.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2023-11-13 17:11:13 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "is_active")
|
|
|
|
check_realm_user_update("events[1]", events[1], "is_active")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_reactivate_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2022-01-12 15:30:05 +01:00
|
|
|
self.subscribe(bot, "Denmark")
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(bot, "Test private stream")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(bot, acting_user=None)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
do_reactivate_user(bot, acting_user=None)
|
2023-10-30 12:50:40 +01:00
|
|
|
check_realm_bot_update("events[1]", events[1], "is_active")
|
2022-01-12 15:30:05 +01:00
|
|
|
check_subscription_peer_add("events[2]", events[2])
|
|
|
|
|
|
|
|
# Test 'peer_add' event for private stream is received only if user is subscribed to it.
|
|
|
|
do_deactivate_user(bot, acting_user=None)
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Test private stream")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=4) as events:
|
|
|
|
do_reactivate_user(bot, acting_user=None)
|
2023-10-30 12:50:40 +01:00
|
|
|
check_realm_bot_update("events[1]", events[1], "is_active")
|
2022-01-12 15:30:05 +01:00
|
|
|
check_subscription_peer_add("events[2]", events[2])
|
|
|
|
check_subscription_peer_add("events[3]", events[3])
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2022-04-21 16:04:52 +02:00
|
|
|
do_deactivate_user(bot, acting_user=None)
|
|
|
|
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
|
|
|
|
|
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
|
|
|
bot.refresh_from_db()
|
|
|
|
|
|
|
|
self.user_profile = self.example_user("iago")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=7) as events:
|
|
|
|
do_reactivate_user(bot, acting_user=self.example_user("iago"))
|
2023-10-30 12:50:40 +01:00
|
|
|
check_realm_bot_update("events[1]", events[1], "is_active")
|
2022-04-21 16:04:52 +02:00
|
|
|
check_realm_bot_update("events[2]", events[2], "owner_id")
|
|
|
|
check_realm_user_update("events[3]", events[3], "bot_owner_id")
|
2022-05-16 09:13:41 +02:00
|
|
|
check_subscription_peer_remove("events[4]", events[4])
|
|
|
|
check_stream_delete("events[5]", events[5])
|
2022-04-21 16:04:52 +02:00
|
|
|
|
2021-03-13 20:00:05 +01:00
|
|
|
def test_do_deactivate_realm(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
|
|
|
# We delete sessions of all active users when a realm is
|
|
|
|
# deactivated, and redirect them to a deactivated page in
|
|
|
|
# order to inform that realm/organization has been
|
|
|
|
# deactivated. state_change_expected is False is kinda
|
|
|
|
# correct because were one to somehow compute page_params (as
|
|
|
|
# this test does), but that's not actually possible.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False) as events:
|
2023-09-25 22:59:44 +02:00
|
|
|
do_deactivate_realm(
|
|
|
|
realm, acting_user=None, deactivation_reason="owner_request", email_owners=False
|
|
|
|
)
|
2021-03-13 20:00:05 +01:00
|
|
|
check_realm_deactivated("events[0]", events[0])
|
|
|
|
|
2023-12-01 11:52:41 +01:00
|
|
|
def test_do_mark_onboarding_step_as_read(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
2024-05-10 15:37:43 +02:00
|
|
|
do_mark_onboarding_step_as_read(self.user_profile, "intro_inbox_view_modal")
|
2023-12-02 11:30:35 +01:00
|
|
|
check_onboarding_steps("events[0]", events[0])
|
2017-02-15 21:06:07 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_rename_stream(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
2021-02-12 08:20:45 +01:00
|
|
|
old_name = f"old name{i}"
|
|
|
|
new_name = f"new name{i}"
|
2020-12-01 14:13:09 +01:00
|
|
|
|
|
|
|
stream = self.make_stream(old_name)
|
|
|
|
self.subscribe(self.user_profile, stream.name)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, include_streams=include_streams) as events:
|
|
|
|
do_rename_stream(stream, new_name, self.user_profile)
|
2020-12-01 14:13:09 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["name"], old_name)
|
2020-12-01 14:13:09 +01:00
|
|
|
|
2023-09-29 19:38:07 +02:00
|
|
|
check_message("events[1]", events[1])
|
2020-12-01 14:13:09 +01:00
|
|
|
|
|
|
|
fields = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_email="notification-bot@zulip.com",
|
2020-12-01 14:13:09 +01:00
|
|
|
display_recipient=new_name,
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_full_name="Notification Bot",
|
2020-12-01 14:13:09 +01:00
|
|
|
is_me_message=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
|
|
|
client="Internal",
|
2020-12-01 14:13:09 +01:00
|
|
|
)
|
2020-07-10 16:10:58 +02:00
|
|
|
|
2024-04-17 18:05:37 +02:00
|
|
|
fields[TOPIC_NAME] = "channel events"
|
2020-07-10 16:10:58 +02:00
|
|
|
|
2023-09-29 19:38:07 +02:00
|
|
|
msg = events[1]["message"]
|
2020-12-01 14:13:09 +01:00
|
|
|
for k, v in fields.items():
|
|
|
|
self.assertEqual(msg[k], v)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_deactivate_stream_neversubscribed(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
|
|
|
stream = self.make_stream(f"stream{i}")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_streams=include_streams) as events:
|
|
|
|
do_deactivate_stream(stream, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_delete("events[0]", events[0])
|
2023-07-27 16:42:21 +02:00
|
|
|
self.assertIsNone(events[0]["streams"][0]["stream_weekly_traffic"])
|
2016-07-12 23:57:16 +02:00
|
|
|
|
2023-11-14 13:48:35 +01:00
|
|
|
def test_user_losing_access_on_deactivating_stream(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
realm = hamlet.realm
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
|
|
|
|
stream = get_stream("test_stream1", realm)
|
|
|
|
self.assertCountEqual(
|
|
|
|
self.users_subscribed_to_stream(stream.name, realm), [hamlet, polonius]
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_deactivate_stream(stream, acting_user=None)
|
2023-11-14 13:48:35 +01:00
|
|
|
check_stream_delete("events[0]", events[0])
|
|
|
|
check_realm_user_remove("events[1]", events[1])
|
|
|
|
self.assertEqual(events[1]["person"]["user_id"], hamlet.id)
|
|
|
|
|
|
|
|
# Test that if the subscribers of deactivated stream are involved in
|
|
|
|
# DMs with guest, then the guest does not get "remove" event for them.
|
|
|
|
stream = get_stream("test_stream2", self.user_profile.realm)
|
|
|
|
shiva = self.example_user("shiva")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
self.subscribe(shiva, stream.name)
|
|
|
|
self.assertCountEqual(
|
|
|
|
self.users_subscribed_to_stream(stream.name, realm), [iago, polonius, shiva]
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
do_deactivate_stream(stream, acting_user=None)
|
2023-11-14 13:48:35 +01:00
|
|
|
check_stream_delete("events[0]", events[0])
|
|
|
|
check_realm_user_remove("events[1]", events[1])
|
|
|
|
self.assertEqual(events[1]["person"]["user_id"], iago.id)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subscribe_other_user_never_subscribed(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, include_streams=True) as events:
|
|
|
|
self.subscribe(self.example_user("othello"), f"test_stream{i}")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
2016-07-12 23:57:16 +02:00
|
|
|
|
2020-08-31 18:33:09 +02:00
|
|
|
def test_remove_other_user_never_subscribed(self) -> None:
|
2021-12-24 14:29:40 +01:00
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = othello.realm
|
|
|
|
self.subscribe(othello, "test_stream")
|
2020-08-31 18:33:09 +02:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
bulk_remove_subscriptions(realm, [othello], [stream], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-08-31 18:33:09 +02:00
|
|
|
|
2018-04-02 00:21:21 +02:00
|
|
|
def test_do_delete_message_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-11 12:12:12 +02:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Verona")
|
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Verona")
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [Message.objects.get(id=msg_id), Message.objects.get(id=msg_id_2)]
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, messages, acting_user=None)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="stream",
|
|
|
|
num_message_ids=2,
|
|
|
|
is_legacy=False,
|
|
|
|
)
|
2020-06-11 12:12:12 +02:00
|
|
|
|
|
|
|
def test_do_delete_message_stream_legacy(self) -> None:
|
|
|
|
"""
|
|
|
|
Test for legacy method of deleting messages which
|
|
|
|
sends an event per message to delete to the client.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Verona")
|
2020-06-11 12:12:12 +02:00
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Verona")
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [Message.objects.get(id=msg_id), Message.objects.get(id=msg_id_2)]
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=True, bulk_message_deletion=False, num_events=2
|
|
|
|
) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, messages, acting_user=None)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="stream",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=True,
|
|
|
|
)
|
2020-06-11 12:12:12 +02:00
|
|
|
|
2024-06-03 13:49:57 +02:00
|
|
|
def test_do_delete_first_message_in_stream(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.subscribe(hamlet, "test_stream1")
|
|
|
|
msg_id = self.send_stream_message(hamlet, "test_stream1")
|
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "test_stream1")
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
with self.verify_action(state_change_expected=True, num_events=2) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, [message], acting_user=None)
|
2024-06-03 13:49:57 +02:00
|
|
|
|
|
|
|
check_stream_update("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["property"], "first_message_id")
|
|
|
|
self.assertEqual(events[0]["value"], msg_id_2)
|
|
|
|
|
|
|
|
check_delete_message(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
message_type="stream",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=False,
|
|
|
|
)
|
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
def test_do_delete_message_personal(self) -> None:
|
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.user_profile,
|
|
|
|
"hello",
|
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, [message], acting_user=None)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="private",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=False,
|
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
def test_do_delete_message_personal_legacy(self) -> None:
|
2018-04-02 00:21:21 +02:00
|
|
|
msg_id = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.user_profile,
|
2018-04-02 00:21:21 +02:00
|
|
|
"hello",
|
|
|
|
)
|
|
|
|
message = Message.objects.get(id=msg_id)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True, bulk_message_deletion=False) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, [message], acting_user=None)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="private",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=True,
|
|
|
|
)
|
2018-04-02 00:21:21 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_delete_message_no_max_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("aaron")
|
2017-05-14 21:14:26 +02:00
|
|
|
# Delete all historical messages for this user
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-05-14 21:14:26 +02:00
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona")
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True):
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, [message], acting_user=None)
|
2024-06-15 07:12:06 +02:00
|
|
|
result = fetch_initial_state_data(user_profile, realm=user_profile.realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["max_message_id"], -1)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2023-05-09 17:48:52 +02:00
|
|
|
def test_do_delete_message_with_no_messages(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
2024-07-17 05:11:28 +02:00
|
|
|
do_delete_messages(self.user_profile.realm, [], acting_user=None)
|
2023-05-09 17:48:52 +02:00
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
2018-05-04 22:57:36 +02:00
|
|
|
def test_add_attachment(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-05-04 22:57:36 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2023-04-08 07:01:50 +02:00
|
|
|
url = None
|
2018-05-04 22:57:36 +02:00
|
|
|
|
|
|
|
def do_upload() -> None:
|
2023-04-08 07:01:50 +02:00
|
|
|
nonlocal url
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", response_dict)
|
2024-07-15 07:06:38 +02:00
|
|
|
self.assertIn("url", response_dict)
|
|
|
|
url = response_dict["url"]
|
|
|
|
self.assertEqual(response_dict["uri"], url)
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2023-04-08 07:01:50 +02:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1, state_change_expected=False) as events:
|
|
|
|
do_upload()
|
2020-08-06 13:08:42 +02:00
|
|
|
|
|
|
|
check_attachment_add("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 6)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
|
|
|
# Verify that the DB has the attachment marked as unclaimed
|
2021-02-12 08:20:45 +01:00
|
|
|
entry = Attachment.objects.get(file_name="zulip.txt")
|
2018-05-04 22:57:36 +02:00
|
|
|
self.assertEqual(entry.is_claimed(), False)
|
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.subscribe(hamlet, "Denmark")
|
2023-04-08 07:01:50 +02:00
|
|
|
assert url is not None
|
|
|
|
body = f"First message ...[zulip.txt](http://{hamlet.realm.host}" + url + ")"
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
2024-05-15 19:24:37 +02:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"Denmark",
|
|
|
|
body,
|
|
|
|
"test",
|
|
|
|
skip_capture_on_commit_callbacks=True,
|
|
|
|
)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2020-08-06 13:08:42 +02:00
|
|
|
check_attachment_update("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 6)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2020-08-06 13:08:42 +02:00
|
|
|
# Now remove the attachment
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1, state_change_expected=False) as events:
|
|
|
|
self.client_delete(f"/json/attachments/{entry.id}")
|
2020-08-06 13:08:42 +02:00
|
|
|
|
|
|
|
check_attachment_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 0)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2019-08-02 00:14:58 +02:00
|
|
|
def test_notify_realm_export(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(
|
|
|
|
self.user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
|
|
|
|
)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(self.user_profile)
|
2019-08-07 21:49:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.export.do_export_realm",
|
2024-09-26 12:18:55 +02:00
|
|
|
return_value=(create_dummy_file("test-export.tar.gz"), dict()),
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
stdout_suppressed(),
|
|
|
|
self.assertLogs(level="INFO") as info_logs,
|
|
|
|
self.verify_action(state_change_expected=True, num_events=3) as events,
|
|
|
|
):
|
|
|
|
self.client_post("/json/export/realm")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue("INFO:root:Completed data export for zulip in" in info_logs.output[0])
|
2019-08-07 21:49:54 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
# We get two realm_export events for this action, where the first
|
|
|
|
# is missing the export_url (because it's pending).
|
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export(
|
|
|
|
"events[2]",
|
|
|
|
events[2],
|
|
|
|
has_export_url=True,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2019-03-27 00:57:33 +01:00
|
|
|
|
2019-08-01 19:59:36 +02:00
|
|
|
# Now we check the deletion of the export.
|
2024-09-26 12:18:55 +02:00
|
|
|
export_row = RealmExport.objects.first()
|
|
|
|
assert export_row is not None
|
|
|
|
export_row_id = export_row.id
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False, num_events=1) as events:
|
2024-09-26 12:18:55 +02:00
|
|
|
self.client_delete(f"/json/export/realm/{export_row_id}")
|
2019-08-01 19:59:36 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=True,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
def test_notify_realm_export_on_failure(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(
|
|
|
|
self.user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
self.login_user(self.user_profile)
|
|
|
|
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
mock.patch("zerver.lib.export.do_export_realm", side_effect=Exception("Some failure")),
|
|
|
|
self.assertLogs(level="ERROR") as error_log,
|
|
|
|
):
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
stdout_suppressed(),
|
|
|
|
self.verify_action(state_change_expected=False, num_events=2) as events,
|
|
|
|
):
|
|
|
|
self.client_post("/json/export/realm")
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-07-19 16:09:17 +02:00
|
|
|
# Log is of following format: "ERROR:root:Data export for zulip failed after 0.004499673843383789"
|
|
|
|
# Where last floating number is time and will vary in each test hence the following assertion is
|
|
|
|
# independent of time bit by not matching exact log but only part of it.
|
|
|
|
self.assertTrue("ERROR:root:Data export for zulip failed after" in error_log.output[0])
|
2022-04-28 19:52:34 +02:00
|
|
|
self.assertTrue("Some failure" in error_log.output[0])
|
2020-07-19 16:09:17 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
# We get two events for the export.
|
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
|
|
|
check_realm_export(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=True,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2019-11-16 09:26:28 +01:00
|
|
|
def test_has_zoom_token(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_set_zoom_token(self.user_profile, {"access_token": "token"})
|
2021-02-12 08:20:45 +01:00
|
|
|
check_has_zoom_token("events[0]", events[0], value=True)
|
2019-11-16 09:26:28 +01:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_set_zoom_token(self.user_profile, None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_has_zoom_token("events[0]", events[0], value=False)
|
2020-06-27 18:06:51 +02:00
|
|
|
|
2024-02-27 15:51:17 +01:00
|
|
|
def test_restart_event(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=1, state_change_expected=False):
|
|
|
|
send_restart_events()
|
2024-02-27 15:51:17 +01:00
|
|
|
|
2024-02-10 04:19:08 +01:00
|
|
|
def test_web_reload_client_event(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_is_old=False, num_events=0, state_change_expected=False):
|
|
|
|
send_web_reload_client_events()
|
2024-02-14 20:27:17 +01:00
|
|
|
with self.assertLogs(level="WARNING") as logs:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(client_is_old=True, num_events=1, state_change_expected=False):
|
|
|
|
send_web_reload_client_events()
|
2024-02-14 20:27:17 +01:00
|
|
|
self.assertEqual(
|
|
|
|
logs.output, ["WARNING:root:Got a web_reload_client event during apply_events"]
|
|
|
|
)
|
2021-04-18 11:28:39 +02:00
|
|
|
|
2021-07-24 19:51:25 +02:00
|
|
|
def test_display_setting_event_not_sent(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True, user_settings_object=True) as events:
|
|
|
|
do_change_user_setting(
|
2021-07-24 19:51:25 +02:00
|
|
|
self.user_profile,
|
2023-10-23 09:02:57 +02:00
|
|
|
"web_home_view",
|
2021-07-24 19:51:25 +02:00
|
|
|
"all_messages",
|
2021-09-08 13:25:50 +02:00
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-24 19:51:25 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
|
|
|
def test_notification_setting_event_not_sent(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=True, user_settings_object=True) as events:
|
|
|
|
do_change_user_setting(
|
2021-07-24 19:51:25 +02:00
|
|
|
self.user_profile,
|
|
|
|
"enable_sounds",
|
|
|
|
False,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-24 19:51:25 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 18:06:51 +02:00
|
|
|
class RealmPropertyActionTest(BaseAction):
|
|
|
|
def do_set_realm_property_test(self, name: str) -> None:
|
2024-07-12 02:30:17 +02:00
|
|
|
bool_tests: list[bool] = [True, False, True]
|
|
|
|
test_values: dict[str, Any] = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
default_language=["es", "de", "en"],
|
|
|
|
description=["Realm description", "New description"],
|
2020-06-27 18:06:51 +02:00
|
|
|
digest_weekday=[0, 1, 2],
|
|
|
|
message_retention_days=[10, 20],
|
2021-02-12 08:20:45 +01:00
|
|
|
name=["Zulip", "New Name"],
|
2022-09-06 20:27:48 +02:00
|
|
|
waiting_period_threshold=[1000, 2000],
|
2021-10-04 16:14:07 +02:00
|
|
|
invite_to_stream_policy=Realm.COMMON_POLICY_TYPES,
|
|
|
|
wildcard_mention_policy=Realm.WILDCARD_MENTION_POLICY_TYPES,
|
|
|
|
bot_creation_policy=Realm.BOT_CREATION_POLICY_TYPES,
|
2020-06-27 18:06:51 +02:00
|
|
|
video_chat_provider=[
|
2021-02-12 08:20:45 +01:00
|
|
|
Realm.VIDEO_CHAT_PROVIDERS["jitsi_meet"]["id"],
|
2020-06-27 18:06:51 +02:00
|
|
|
],
|
2023-09-19 19:03:08 +02:00
|
|
|
jitsi_server_url=["https://jitsi1.example.com", "https://jitsi2.example.com"],
|
2021-03-31 13:10:46 +02:00
|
|
|
giphy_rating=[
|
|
|
|
Realm.GIPHY_RATING_OPTIONS["disabled"]["id"],
|
|
|
|
],
|
2021-02-12 08:20:45 +01:00
|
|
|
default_code_block_language=["python", "javascript"],
|
2021-02-12 08:19:30 +01:00
|
|
|
message_content_delete_limit_seconds=[1000, 1100, 1200],
|
2021-10-04 16:14:07 +02:00
|
|
|
invite_to_realm_policy=Realm.INVITE_TO_REALM_POLICY_TYPES,
|
2022-09-29 09:03:12 +02:00
|
|
|
move_messages_between_streams_policy=Realm.MOVE_MESSAGES_BETWEEN_STREAMS_POLICY_TYPES,
|
2021-10-04 16:14:07 +02:00
|
|
|
add_custom_emoji_policy=Realm.COMMON_POLICY_TYPES,
|
2024-09-12 01:03:50 +02:00
|
|
|
edit_topic_policy=Realm.EDIT_TOPIC_POLICY_TYPES,
|
2022-09-22 10:53:37 +02:00
|
|
|
message_content_edit_limit_seconds=[1000, 1100, 1200, None],
|
2023-01-26 12:53:27 +01:00
|
|
|
move_messages_within_stream_limit_seconds=[1000, 1100, 1200],
|
2022-10-11 13:19:49 +02:00
|
|
|
move_messages_between_streams_limit_seconds=[1000, 1100, 1200],
|
2020-06-27 18:06:51 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
vals = test_values.get(name)
|
|
|
|
property_type = Realm.property_types[name]
|
|
|
|
if property_type is bool:
|
|
|
|
vals = bool_tests
|
|
|
|
|
|
|
|
if vals is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"No test created for {name}")
|
2020-06-29 15:34:19 +02:00
|
|
|
now = timezone_now()
|
2023-12-11 20:32:24 +01:00
|
|
|
original_val = getattr(self.user_profile.realm, name)
|
|
|
|
|
2020-06-29 15:34:19 +02:00
|
|
|
do_set_realm_property(self.user_profile.realm, name, vals[0], acting_user=self.user_profile)
|
2023-12-11 20:32:24 +01:00
|
|
|
|
|
|
|
if vals[0] != original_val:
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2023-12-11 20:32:24 +01:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2020-06-29 15:34:19 +02:00
|
|
|
for count, val in enumerate(vals[1:]):
|
|
|
|
now = timezone_now()
|
2020-06-27 18:06:51 +02:00
|
|
|
state_change_expected = True
|
2021-10-04 18:52:57 +02:00
|
|
|
old_value = vals[count]
|
|
|
|
num_events = 1
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=state_change_expected, num_events=num_events
|
|
|
|
) as events:
|
|
|
|
do_set_realm_property(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile.realm,
|
|
|
|
name,
|
|
|
|
val,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-06-29 15:34:19 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2021-02-12 08:19:30 +01:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: val,
|
|
|
|
"property": name,
|
|
|
|
},
|
2021-02-12 08:19:30 +01:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2022-09-22 10:53:37 +02:00
|
|
|
|
|
|
|
if name in [
|
|
|
|
"allow_message_editing",
|
|
|
|
"edit_topic_policy",
|
|
|
|
"message_content_edit_limit_seconds",
|
|
|
|
]:
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
else:
|
|
|
|
check_realm_update("events[0]", events[0], name)
|
2020-06-27 18:06:51 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
def do_set_realm_permission_group_setting_test(self, setting_name: str) -> None:
|
2024-04-18 12:23:46 +02:00
|
|
|
all_system_user_groups = NamedUserGroup.objects.filter(
|
2023-08-09 15:06:56 +02:00
|
|
|
realm=self.user_profile.realm,
|
|
|
|
is_system_group=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
setting_permission_configuration = Realm.REALM_PERMISSION_GROUP_SETTINGS[setting_name]
|
|
|
|
|
|
|
|
default_group_name = setting_permission_configuration.default_group_name
|
|
|
|
default_group = all_system_user_groups.get(name=default_group_name)
|
|
|
|
old_group_id = default_group.id
|
|
|
|
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
self.user_profile.realm,
|
|
|
|
setting_name,
|
|
|
|
default_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2023-08-09 15:06:56 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
for user_group in all_system_user_groups:
|
|
|
|
if user_group.name == default_group_name:
|
|
|
|
continue
|
|
|
|
|
2024-05-31 07:02:00 +02:00
|
|
|
if (
|
|
|
|
not setting_permission_configuration.allow_internet_group
|
|
|
|
and user_group.name == SystemGroups.EVERYONE_ON_INTERNET
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if (
|
|
|
|
not setting_permission_configuration.allow_everyone_group
|
|
|
|
and user_group.name == SystemGroups.EVERYONE
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if (
|
|
|
|
not setting_permission_configuration.allow_owners_group
|
|
|
|
and user_group.name == SystemGroups.OWNERS
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
|
|
|
if (
|
|
|
|
not setting_permission_configuration.allow_nobody_group
|
|
|
|
and user_group.name == SystemGroups.NOBODY
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
now = timezone_now()
|
|
|
|
state_change_expected = True
|
|
|
|
num_events = 1
|
|
|
|
new_group_id = user_group.id
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=state_change_expected, num_events=num_events
|
|
|
|
) as events:
|
|
|
|
do_change_realm_permission_group_setting(
|
2023-08-09 15:06:56 +02:00
|
|
|
self.user_profile.realm,
|
|
|
|
setting_name,
|
|
|
|
user_group,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-08-09 15:06:56 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2023-08-09 15:06:56 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_group_id,
|
|
|
|
RealmAuditLog.NEW_VALUE: new_group_id,
|
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
|
|
|
|
old_group_id = new_group_id
|
|
|
|
|
2024-05-23 16:21:25 +02:00
|
|
|
def do_set_realm_permission_group_setting_to_anonymous_groups_test(
|
|
|
|
self, setting_name: str
|
|
|
|
) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
system_user_groups_dict = get_role_based_system_groups_dict(
|
|
|
|
realm,
|
|
|
|
)
|
|
|
|
|
|
|
|
setting_permission_configuration = Realm.REALM_PERMISSION_GROUP_SETTINGS[setting_name]
|
|
|
|
|
|
|
|
default_group_name = setting_permission_configuration.default_group_name
|
|
|
|
default_group = system_user_groups_dict[default_group_name]
|
|
|
|
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm,
|
|
|
|
setting_name,
|
|
|
|
default_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-05-23 16:21:25 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
admins_group = system_user_groups_dict[SystemGroups.ADMINISTRATORS]
|
|
|
|
|
|
|
|
setting_group = self.create_or_update_anonymous_group_for_setting([othello], [admins_group])
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
with self.verify_action(state_change_expected=True, num_events=1) as events:
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm,
|
|
|
|
setting_name,
|
|
|
|
setting_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-05-23 16:21:25 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: default_group.id,
|
|
|
|
RealmAuditLog.NEW_VALUE: {
|
|
|
|
"direct_members": [othello.id],
|
|
|
|
"direct_subgroups": [admins_group.id],
|
|
|
|
},
|
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
self.assertEqual(
|
|
|
|
events[0]["data"][setting_name],
|
|
|
|
AnonymousSettingGroupDict(
|
|
|
|
direct_members=[othello.id], direct_subgroups=[admins_group.id]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
old_setting_api_value = get_group_setting_value_for_api(setting_group)
|
|
|
|
moderators_group = system_user_groups_dict[SystemGroups.MODERATORS]
|
|
|
|
setting_group = self.create_or_update_anonymous_group_for_setting(
|
|
|
|
[self.user_profile], [moderators_group], existing_setting_group=setting_group
|
|
|
|
)
|
|
|
|
|
|
|
|
# state_change_expected is False here because the initial state will
|
|
|
|
# also have the new setting value due to the setting group already
|
|
|
|
# being modified with the new members.
|
|
|
|
with self.verify_action(state_change_expected=False, num_events=1) as events:
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm,
|
|
|
|
setting_name,
|
|
|
|
setting_group,
|
|
|
|
old_setting_api_value=old_setting_api_value,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-05-23 16:21:25 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: {
|
|
|
|
"direct_members": [othello.id],
|
|
|
|
"direct_subgroups": [admins_group.id],
|
|
|
|
},
|
|
|
|
RealmAuditLog.NEW_VALUE: {
|
|
|
|
"direct_members": [self.user_profile.id],
|
|
|
|
"direct_subgroups": [moderators_group.id],
|
|
|
|
},
|
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
self.assertEqual(
|
|
|
|
events[0]["data"][setting_name],
|
|
|
|
AnonymousSettingGroupDict(
|
|
|
|
direct_members=[self.user_profile.id], direct_subgroups=[moderators_group.id]
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
with self.verify_action(state_change_expected=True, num_events=1) as events:
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
realm,
|
|
|
|
setting_name,
|
|
|
|
default_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2024-05-23 16:21:25 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: {
|
|
|
|
"direct_members": [self.user_profile.id],
|
|
|
|
"direct_subgroups": [moderators_group.id],
|
|
|
|
},
|
|
|
|
RealmAuditLog.NEW_VALUE: default_group.id,
|
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["data"][setting_name], default_group.id)
|
|
|
|
|
2020-06-27 18:06:51 +02:00
|
|
|
def test_change_realm_property(self) -> None:
|
|
|
|
for prop in Realm.property_types:
|
|
|
|
with self.settings(SEND_DIGEST_EMAILS=True):
|
|
|
|
self.do_set_realm_property_test(prop)
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
for prop in Realm.REALM_PERMISSION_GROUP_SETTINGS:
|
|
|
|
with self.settings(SEND_DIGEST_EMAILS=True):
|
|
|
|
self.do_set_realm_permission_group_setting_test(prop)
|
|
|
|
|
2024-05-23 16:21:25 +02:00
|
|
|
for prop in Realm.REALM_PERMISSION_GROUP_SETTINGS_WITH_NEW_API_FORMAT:
|
2024-07-23 15:56:46 +02:00
|
|
|
if Realm.REALM_PERMISSION_GROUP_SETTINGS[prop].require_system_group:
|
|
|
|
# Anonymous system groups aren't relevant when
|
|
|
|
# restricted to system groups.
|
2024-07-01 16:48:16 +02:00
|
|
|
continue
|
2024-05-23 16:21:25 +02:00
|
|
|
with self.settings(SEND_DIGEST_EMAILs=True):
|
|
|
|
self.do_set_realm_permission_group_setting_to_anonymous_groups_test(prop)
|
|
|
|
|
2021-07-21 13:40:46 +02:00
|
|
|
def do_set_realm_user_default_setting_test(self, name: str) -> None:
|
2024-07-12 02:30:17 +02:00
|
|
|
bool_tests: list[bool] = [True, False, True]
|
|
|
|
test_values: dict[str, Any] = dict(
|
2024-07-18 02:07:12 +02:00
|
|
|
web_font_size_px=[UserProfile.WEB_FONT_SIZE_PX_COMPACT],
|
|
|
|
web_line_height_percent=[UserProfile.WEB_LINE_HEIGHT_PERCENT_COMPACT],
|
2021-07-21 13:40:46 +02:00
|
|
|
color_scheme=UserProfile.COLOR_SCHEME_CHOICES,
|
2023-10-23 09:02:57 +02:00
|
|
|
web_home_view=["recent_topics", "inbox", "all_messages"],
|
2021-07-21 13:40:46 +02:00
|
|
|
emojiset=[emojiset["key"] for emojiset in RealmUserDefault.emojiset_choices()],
|
|
|
|
demote_inactive_streams=UserProfile.DEMOTE_STREAMS_CHOICES,
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=UserProfile.WEB_MARK_READ_ON_SCROLL_POLICY_CHOICES,
|
2024-07-03 12:43:47 +02:00
|
|
|
web_channel_default_view=UserProfile.WEB_CHANNEL_DEFAULT_VIEW_CHOICES,
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=UserProfile.USER_LIST_STYLE_CHOICES,
|
2024-07-21 15:23:56 +02:00
|
|
|
web_animate_image_previews=["always", "on_hover", "never"],
|
2023-04-25 12:29:15 +02:00
|
|
|
web_stream_unreads_count_display_policy=UserProfile.WEB_STREAM_UNREADS_COUNT_DISPLAY_POLICY_CHOICES,
|
2023-09-14 11:25:02 +02:00
|
|
|
desktop_icon_count_display=UserProfile.DESKTOP_ICON_COUNT_DISPLAY_CHOICES,
|
2021-07-21 13:40:46 +02:00
|
|
|
notification_sound=["zulip", "ding"],
|
|
|
|
email_notifications_batching_period_seconds=[120, 300],
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=UserProfile.EMAIL_ADDRESS_VISIBILITY_TYPES,
|
2023-01-14 20:36:37 +01:00
|
|
|
realm_name_in_email_notifications_policy=UserProfile.REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_CHOICES,
|
2023-06-17 17:37:04 +02:00
|
|
|
automatically_follow_topics_policy=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES,
|
|
|
|
automatically_unmute_topics_in_muted_streams_policy=UserProfile.AUTOMATICALLY_CHANGE_VISIBILITY_POLICY_CHOICES,
|
2021-07-21 13:40:46 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
vals = test_values.get(name)
|
|
|
|
property_type = RealmUserDefault.property_types[name]
|
|
|
|
|
|
|
|
if property_type is bool:
|
|
|
|
vals = bool_tests
|
|
|
|
|
|
|
|
if vals is None:
|
|
|
|
raise AssertionError(f"No test created for {name}")
|
|
|
|
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=self.user_profile.realm)
|
|
|
|
now = timezone_now()
|
|
|
|
do_set_realm_user_default_setting(
|
|
|
|
realm_user_default, name, vals[0], acting_user=self.user_profile
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 16:46:18 +02:00
|
|
|
event_type=AuditLogEventType.REALM_DEFAULT_USER_SETTINGS_CHANGED,
|
2021-07-21 13:40:46 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
for count, val in enumerate(vals[1:]):
|
|
|
|
now = timezone_now()
|
|
|
|
state_change_expected = True
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=state_change_expected) as events:
|
|
|
|
do_set_realm_user_default_setting(
|
2023-04-13 02:05:54 +02:00
|
|
|
realm_user_default,
|
|
|
|
name,
|
|
|
|
val,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-21 13:40:46 +02:00
|
|
|
|
|
|
|
old_value = vals[count]
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
2024-09-03 16:46:18 +02:00
|
|
|
event_type=AuditLogEventType.REALM_DEFAULT_USER_SETTINGS_CHANGED,
|
2021-07-21 13:40:46 +02:00
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: val,
|
|
|
|
"property": name,
|
|
|
|
},
|
2021-07-21 13:40:46 +02:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_default_update("events[0]", events[0], name)
|
|
|
|
|
|
|
|
def test_change_realm_user_default_setting(self) -> None:
|
|
|
|
for prop in RealmUserDefault.property_types:
|
2021-09-17 18:11:37 +02:00
|
|
|
if prop == "default_language":
|
2021-07-21 13:40:46 +02:00
|
|
|
continue
|
|
|
|
self.do_set_realm_user_default_setting_test(prop)
|
|
|
|
|
2023-11-23 22:07:41 +01:00
|
|
|
def test_do_set_push_notifications_enabled_end_timestamp(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
|
|
|
# Default value of 'push_notifications_enabled_end_timestamp' is None.
|
|
|
|
# Verify that no event is sent when the new value is the same as existing value.
|
|
|
|
new_timestamp = None
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(state_change_expected=False, num_events=0):
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(
|
2023-11-23 22:07:41 +01:00
|
|
|
realm=realm,
|
|
|
|
value=new_timestamp,
|
|
|
|
acting_user=None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2023-11-23 22:07:41 +01:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
old_datetime = timezone_now() - timedelta(days=3)
|
2023-11-23 22:07:41 +01:00
|
|
|
old_timestamp = datetime_to_timestamp(old_datetime)
|
|
|
|
now = timezone_now()
|
|
|
|
timestamp_now = datetime_to_timestamp(now)
|
|
|
|
|
|
|
|
realm.push_notifications_enabled_end_timestamp = old_datetime
|
|
|
|
realm.save(update_fields=["push_notifications_enabled_end_timestamp"])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
state_change_expected=True,
|
|
|
|
num_events=1,
|
|
|
|
) as events:
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(
|
2023-11-23 22:07:41 +01:00
|
|
|
realm=realm,
|
|
|
|
value=timestamp_now,
|
|
|
|
acting_user=None,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(events[0]["type"], "realm")
|
|
|
|
self.assertEqual(events[0]["op"], "update")
|
|
|
|
self.assertEqual(events[0]["property"], "push_notifications_enabled_end_timestamp")
|
|
|
|
self.assertEqual(events[0]["value"], timestamp_now)
|
2023-11-23 22:07:41 +01:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
2024-09-03 15:58:19 +02:00
|
|
|
event_type=AuditLogEventType.REALM_PROPERTY_CHANGED,
|
2023-11-23 22:07:41 +01:00
|
|
|
acting_user=None,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_timestamp,
|
|
|
|
RealmAuditLog.NEW_VALUE: timestamp_now,
|
|
|
|
"property": "push_notifications_enabled_end_timestamp",
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
class UserDisplayActionTest(BaseAction):
|
2021-08-13 16:18:53 +02:00
|
|
|
def do_change_user_settings_test(self, setting_name: str) -> None:
|
2020-06-27 19:04:32 +02:00
|
|
|
"""Test updating each setting in UserProfile.property_types dict."""
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
test_changes: dict[str, Any] = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
emojiset=["twitter"],
|
|
|
|
default_language=["es", "de", "en"],
|
2023-10-23 09:02:57 +02:00
|
|
|
web_home_view=["all_messages", "inbox", "recent_topics"],
|
2021-02-12 08:19:30 +01:00
|
|
|
demote_inactive_streams=[2, 3, 1],
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=[2, 3, 1],
|
2024-07-03 12:43:47 +02:00
|
|
|
web_channel_default_view=[2, 1],
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=[1, 2, 3],
|
2024-07-21 15:23:56 +02:00
|
|
|
web_animate_image_previews=["always", "on_hover", "never"],
|
2023-04-25 12:29:15 +02:00
|
|
|
web_stream_unreads_count_display_policy=[1, 2, 3],
|
2024-03-06 17:25:31 +01:00
|
|
|
web_font_size_px=[12, 16, 18],
|
|
|
|
web_line_height_percent=[105, 120, 160],
|
2021-02-12 08:19:30 +01:00
|
|
|
color_scheme=[2, 3, 1],
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=[5, 4, 1, 2, 3],
|
2020-06-27 19:04:32 +02:00
|
|
|
)
|
|
|
|
|
2021-10-03 20:39:59 +02:00
|
|
|
user_settings_object = True
|
|
|
|
num_events = 1
|
|
|
|
|
|
|
|
legacy_setting = setting_name in UserProfile.display_settings_legacy
|
|
|
|
if legacy_setting:
|
|
|
|
# Two events:`update_display_settings` and `user_settings`.
|
|
|
|
# `update_display_settings` is only sent for settings added
|
|
|
|
# before feature level 89 which introduced `user_settings`.
|
|
|
|
# We send both events so that older clients that do not
|
|
|
|
# rely on `user_settings` don't break.
|
|
|
|
num_events = 2
|
|
|
|
user_settings_object = False
|
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
values = test_changes.get(setting_name)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
|
|
|
property_type = UserProfile.property_types[setting_name]
|
2020-06-27 19:04:32 +02:00
|
|
|
if property_type is bool:
|
|
|
|
if getattr(self.user_profile, setting_name) is False:
|
|
|
|
values = [True, False, True]
|
|
|
|
else:
|
|
|
|
values = [False, True, False]
|
2020-07-08 15:29:13 +02:00
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
if values is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"No test created for {setting_name}")
|
2020-06-27 19:04:32 +02:00
|
|
|
|
|
|
|
for value in values:
|
2021-10-26 09:15:16 +02:00
|
|
|
if setting_name == "email_address_visibility":
|
|
|
|
# When "email_address_visibility" setting is changed, there is at least
|
|
|
|
# one event with type "user_settings" sent to the modified user itself.
|
|
|
|
num_events = 1
|
|
|
|
|
|
|
|
old_value = getattr(self.user_profile, setting_name)
|
|
|
|
if UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE in [old_value, value]:
|
|
|
|
# In case when either the old value or new value of setting is
|
|
|
|
# UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE, "email" field of
|
|
|
|
# UserProfile object is updated and thus two additional events, for
|
|
|
|
# changing email and avatar_url field, are sent.
|
|
|
|
num_events = 3
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
num_events=num_events, user_settings_object=user_settings_object
|
|
|
|
) as events:
|
|
|
|
do_change_user_setting(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
setting_name,
|
|
|
|
value,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2021-10-03 20:39:59 +02:00
|
|
|
if legacy_setting:
|
|
|
|
# Only settings added before feature level 89
|
|
|
|
# generate this event.
|
|
|
|
self.assert_length(events, 2)
|
|
|
|
check_update_display_settings("events[1]", events[1])
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2021-08-13 16:18:53 +02:00
|
|
|
def test_change_user_settings(self) -> None:
|
2020-06-27 19:04:32 +02:00
|
|
|
for prop in UserProfile.property_types:
|
2021-08-11 15:34:25 +02:00
|
|
|
# Notification settings have a separate test suite, which
|
|
|
|
# handles their separate legacy event type.
|
2023-06-17 18:33:40 +02:00
|
|
|
if prop not in UserProfile.notification_setting_types:
|
2021-08-13 16:18:53 +02:00
|
|
|
self.do_change_user_settings_test(prop)
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-07-07 12:21:35 +02:00
|
|
|
def test_set_user_timezone(self) -> None:
|
|
|
|
values = ["America/Denver", "Pacific/Pago_Pago", "Pacific/Galapagos", ""]
|
2021-07-26 08:35:27 +02:00
|
|
|
num_events = 3
|
2021-07-07 12:21:35 +02:00
|
|
|
|
|
|
|
for value in values:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=num_events) as events:
|
|
|
|
do_change_user_setting(
|
2023-04-13 02:05:54 +02:00
|
|
|
self.user_profile,
|
|
|
|
"timezone",
|
|
|
|
value,
|
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-07-07 12:21:35 +02:00
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_display_settings("events[1]", events[1])
|
|
|
|
check_realm_user_update("events[2]", events[2], "timezone")
|
2021-07-07 12:21:35 +02:00
|
|
|
|
2022-05-25 13:13:31 +02:00
|
|
|
def test_delivery_email_events_on_changing_email_address_visibility(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
2022-05-25 13:13:31 +02:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MODERATOR, acting_user=None)
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(user_settings_object=True) as events:
|
|
|
|
do_change_user_setting(
|
2021-10-26 09:15:16 +02:00
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-10-26 09:15:16 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
self.assertIsNone(events[0]["person"]["delivery_email"])
|
2022-05-25 13:13:31 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(user_settings_object=True) as events:
|
|
|
|
do_change_user_setting(
|
2021-10-26 09:15:16 +02:00
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=self.user_profile,
|
2024-04-30 01:07:06 +02:00
|
|
|
)
|
2021-10-26 09:15:16 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
self.assertEqual(events[0]["person"]["delivery_email"], cordelia.delivery_email)
|
2022-05-25 13:13:31 +02:00
|
|
|
|
2023-07-11 13:13:09 +02:00
|
|
|
def test_stream_creation_events(self) -> None:
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Test stream")
|
2023-07-11 13:13:09 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
2023-10-27 05:50:30 +02:00
|
|
|
# Check that guest user does not receive stream creation event of public
|
2023-07-11 13:13:09 +02:00
|
|
|
# stream.
|
|
|
|
self.user_profile = self.example_user("polonius")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Test stream 2")
|
2023-07-11 13:13:09 +02:00
|
|
|
|
2023-10-26 18:14:01 +02:00
|
|
|
# Check that guest user receives stream creation event for web-public stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, state_change_expected=True) as events:
|
|
|
|
self.subscribe(
|
|
|
|
self.example_user("hamlet"), "Web public test stream", is_web_public=True
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
|
2023-07-11 13:13:09 +02:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Private test stream", invite_only=True)
|
2023-07-11 13:13:09 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
|
|
|
# A non-admin user who is not subscribed to the private stream does not
|
|
|
|
# receive stream creation event.
|
|
|
|
self.user_profile = self.example_user("othello")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=0, state_change_expected=False) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Private test stream 2", invite_only=True)
|
2023-07-11 13:13:09 +02:00
|
|
|
|
|
|
|
# An admin user who is not subscribed to the private stream also
|
|
|
|
# receives stream creation event.
|
|
|
|
self.user_profile = self.example_user("iago")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Private test stream 3", invite_only=True)
|
2023-07-11 13:13:09 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
class SubscribeActionTest(BaseAction):
|
|
|
|
def test_subscribe_events(self) -> None:
|
|
|
|
self.do_test_subscribe_events(include_subscribers=True)
|
|
|
|
|
|
|
|
def test_subscribe_events_no_include_subscribers(self) -> None:
|
|
|
|
self.do_test_subscribe_events(include_subscribers=False)
|
|
|
|
|
|
|
|
def do_test_subscribe_events(self, include_subscribers: bool) -> None:
|
|
|
|
# Subscribe to a totally new stream, so it's just Hamlet on it
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
event_types=["subscription"], include_subscribers=include_subscribers
|
|
|
|
) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "test_stream")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
|
|
|
# Add another user to that totally new stream
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
self.subscribe(self.example_user("othello"), "test_stream")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-12-24 14:29:40 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = othello.realm
|
2020-06-28 13:20:01 +02:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
|
|
|
|
2020-08-31 16:42:16 +02:00
|
|
|
# Now remove the first user, to test the normal unsubscribe flow and
|
|
|
|
# 'peer_remove' event for subscribed streams.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [othello], [stream], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2020-10-22 13:40:24 +02:00
|
|
|
# Now remove the user himself, to test the 'remove' event flow
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, include_streams=False, num_events=1
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [hamlet], [stream], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(events[0]["subscriptions"], 1)
|
2020-07-08 14:20:25 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["subscriptions"][0]["name"],
|
|
|
|
"test_stream",
|
2020-07-08 14:20:25 +02:00
|
|
|
)
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2020-09-01 07:46:12 +02:00
|
|
|
# Subscribe other user to test 'peer_add' event flow for unsubscribed stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
2020-09-01 07:46:12 +02:00
|
|
|
event_types=["subscription"],
|
|
|
|
include_subscribers=include_subscribers,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=include_subscribers,
|
2024-04-30 01:07:06 +02:00
|
|
|
) as events:
|
|
|
|
self.subscribe(self.example_user("iago"), "test_stream")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[0]", events[0])
|
2020-08-31 16:42:16 +02:00
|
|
|
|
|
|
|
# Remove the user to test 'peer_remove' event flow for unsubscribed stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [iago], [stream], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-08-31 16:42:16 +02:00
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
# Now resubscribe a user, to make sure that works on a vacated stream
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, include_streams=False, num_events=1
|
|
|
|
) as events:
|
|
|
|
self.subscribe(self.example_user("hamlet"), "test_stream")
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
do_change_stream_description(
|
|
|
|
stream, "new description", acting_user=self.example_user("hamlet")
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-14 20:08:48 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2022-01-29 00:54:13 +01:00
|
|
|
# Update stream privacy - make stream web-public
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=True,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
2020-11-10 14:11:19 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-11 00:41:25 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-11-10 14:11:19 +01:00
|
|
|
|
|
|
|
# Update stream privacy - make stream private
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-11 00:41:25 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2022-06-07 16:22:28 +02:00
|
|
|
# Update stream privacy - make stream public
|
|
|
|
self.user_profile = self.example_user("cordelia")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
2022-06-07 16:22:28 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
|
|
|
self.subscribe(self.example_user("cordelia"), stream.name)
|
|
|
|
self.unsubscribe(self.example_user("cordelia"), stream.name)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, num_events=2, include_streams=False
|
|
|
|
) as events:
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
2022-06-07 16:22:28 +02:00
|
|
|
|
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-06-28 13:20:01 +02:00
|
|
|
# Update stream stream_post_policy property
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=3) as events:
|
|
|
|
do_change_stream_post_policy(
|
|
|
|
stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=self.example_user("hamlet")
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-15 01:04:35 +01:00
|
|
|
check_message("events[2]", events[2])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
do_change_stream_message_retention_days(stream, self.example_user("hamlet"), -1)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2024-04-18 12:23:46 +02:00
|
|
|
moderators_group = NamedUserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.MODERATORS,
|
2022-06-27 18:39:33 +02:00
|
|
|
is_system_group=True,
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=1) as events:
|
|
|
|
do_change_stream_group_based_setting(
|
|
|
|
stream,
|
|
|
|
"can_remove_subscribers_group",
|
|
|
|
moderators_group,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
2022-06-27 18:39:33 +02:00
|
|
|
check_stream_update("events[0]", events[0])
|
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
|
|
|
|
stream = self.make_stream("private", self.user_profile.realm, invite_only=True)
|
2020-08-05 11:57:45 +02:00
|
|
|
stream.message_retention_days = 10
|
|
|
|
stream.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
bulk_add_subscriptions(user_profile.realm, [stream], [user_profile], acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
2020-08-05 11:57:45 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["streams"][0]["message_retention_days"],
|
2020-08-05 11:57:45 +02:00
|
|
|
10,
|
|
|
|
)
|
2023-07-27 16:42:21 +02:00
|
|
|
self.assertIsNone(events[0]["streams"][0]["stream_weekly_traffic"])
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2023-09-29 20:09:45 +02:00
|
|
|
# Add this user to make sure the stream is not deleted on unsubscribing hamlet.
|
|
|
|
self.subscribe(self.example_user("iago"), stream.name)
|
|
|
|
|
|
|
|
# Unsubscribe from invite-only stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [hamlet], [stream], acting_user=None)
|
2023-09-29 20:09:45 +02:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
|
|
|
check_stream_delete("events[1]", events[1])
|
|
|
|
|
2023-06-30 13:14:49 +02:00
|
|
|
stream.invite_only = False
|
|
|
|
stream.save()
|
|
|
|
|
2023-10-26 18:14:01 +02:00
|
|
|
# Test events for guest user.
|
2023-06-30 13:14:49 +02:00
|
|
|
self.user_profile = self.example_user("polonius")
|
2023-10-26 18:14:01 +02:00
|
|
|
|
|
|
|
# Guest user does not receive peer_add/peer_remove events for unsubscribed
|
|
|
|
# public streams.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, num_events=0, state_change_expected=False
|
|
|
|
) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.example_user("othello")], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, num_events=0, state_change_expected=False
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
user_profile.realm, [self.example_user("othello")], [stream], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
|
|
|
|
# Subscribe as a guest to a public stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.user_profile], acting_user=None
|
|
|
|
)
|
2023-06-30 13:14:49 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.example_user("othello")], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
check_subscription_peer_add("events[0]", events[0])
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
user_profile.realm, [self.example_user("othello")], [stream], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
|
|
|
|
2023-09-29 20:09:45 +02:00
|
|
|
# Unsubscribe guest from public stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=2) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [self.user_profile], [stream], acting_user=None)
|
2023-09-29 20:09:45 +02:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
|
|
|
check_stream_delete("events[1]", events[1])
|
|
|
|
|
2023-10-26 18:14:01 +02:00
|
|
|
stream = self.make_stream("web-public-stream", self.user_profile.realm, is_web_public=True)
|
|
|
|
# Guest user receives peer_add/peer_remove events for unsubscribed
|
|
|
|
# web-public streams.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.example_user("othello")], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(
|
|
|
|
include_subscribers=include_subscribers, state_change_expected=include_subscribers
|
|
|
|
) as events:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
user_profile.realm, [self.example_user("othello")], [stream], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
|
|
|
|
# Subscribe as a guest to web-public stream. Guest does not receive stream creation
|
|
|
|
# event for web-public stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=1) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.user_profile], acting_user=None
|
|
|
|
)
|
2023-10-26 18:14:01 +02:00
|
|
|
check_subscription_add("events[0]", events[0])
|
|
|
|
|
2023-09-29 20:09:45 +02:00
|
|
|
# Unsubscribe as a guest to web-public stream. Guest does not receive stream deletion
|
|
|
|
# event for web-public stream.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(include_subscribers=include_subscribers, num_events=1) as events:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
user_profile.realm, [self.user_profile], [stream], acting_user=None
|
|
|
|
)
|
2023-09-29 20:09:45 +02:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
|
|
|
|
2023-10-18 05:24:50 +02:00
|
|
|
def test_user_access_events_on_changing_subscriptions(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
stream = get_stream("test_stream1", realm)
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
bulk_add_subscriptions(realm, [stream], [othello, iago], acting_user=None)
|
2023-10-18 05:24:50 +02:00
|
|
|
check_realm_user_add("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["person"]["user_id"], othello.id)
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
self.assertEqual(set(events[1]["user_ids"]), {iago.id, othello.id})
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [othello, iago], [stream], acting_user=None)
|
2023-10-24 05:11:34 +02:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(set(events[0]["user_ids"]), {iago.id, othello.id})
|
|
|
|
check_realm_user_remove("events[1]", events[1])
|
|
|
|
self.assertEqual(events[1]["person"]["user_id"], othello.id)
|
|
|
|
|
2023-10-24 19:47:39 +02:00
|
|
|
# Check the state change works correctly when user_list_complete
|
|
|
|
# is set to True.
|
|
|
|
self.subscribe(othello, "test_stream1")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=2, user_list_incomplete=True) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [othello], [stream], acting_user=None)
|
2023-10-24 19:47:39 +02:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(set(events[0]["user_ids"]), {othello.id})
|
|
|
|
check_realm_user_remove("events[1]", events[1])
|
|
|
|
self.assertEqual(events[1]["person"]["user_id"], othello.id)
|
|
|
|
|
2023-10-18 05:24:50 +02:00
|
|
|
def test_user_access_events_on_changing_subscriptions_for_guests(self) -> None:
|
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
self.user_profile = polonius
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
stream = self.subscribe(self.example_user("othello"), "new_stream")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
bulk_add_subscriptions(
|
|
|
|
realm, [stream], [polonius, self.example_user("iago")], acting_user=None
|
|
|
|
)
|
2023-10-18 05:24:50 +02:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
check_realm_user_add("events[2]", events[2])
|
|
|
|
self.assertEqual(events[2]["person"]["user_id"], othello.id)
|
|
|
|
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3) as events:
|
|
|
|
bulk_remove_subscriptions(
|
|
|
|
realm, [polonius, self.example_user("iago")], [stream], acting_user=None
|
|
|
|
)
|
2023-10-24 05:11:34 +02:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
|
|
|
check_stream_delete("events[1]", events[1])
|
|
|
|
check_realm_user_remove("events[2]", events[2])
|
|
|
|
self.assertEqual(events[2]["person"]["user_id"], othello.id)
|
|
|
|
|
2023-10-24 19:47:39 +02:00
|
|
|
# Check the state change works correctly when user_list_complete
|
|
|
|
# is set to True.
|
|
|
|
stream = self.subscribe(self.example_user("othello"), "new_stream")
|
|
|
|
self.subscribe(polonius, "new_stream")
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action(num_events=3, user_list_incomplete=True) as events:
|
|
|
|
bulk_remove_subscriptions(realm, [polonius], [stream], acting_user=None)
|
2023-10-24 19:47:39 +02:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
|
|
|
check_stream_delete("events[1]", events[1])
|
|
|
|
check_realm_user_remove("events[2]", events[2])
|
|
|
|
self.assertEqual(events[2]["person"]["user_id"], othello.id)
|
|
|
|
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
class DraftActionTest(BaseAction):
|
|
|
|
def do_enable_drafts_synchronization(self, user_profile: UserProfile) -> None:
|
2021-09-08 13:25:50 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
user_profile, "enable_drafts_synchronization", True, acting_user=self.user_profile
|
|
|
|
)
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2021-07-24 06:56:56 +02:00
|
|
|
def test_draft_create_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
2023-08-17 02:34:42 +02:00
|
|
|
dummy_draft = DraftData(
|
|
|
|
type="",
|
|
|
|
to=[],
|
|
|
|
topic="",
|
|
|
|
content="Sample draft content",
|
|
|
|
timestamp=1596820995,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_create_drafts([dummy_draft], self.user_profile)
|
2023-10-03 17:07:22 +02:00
|
|
|
check_draft_add("events[0]", events[0])
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
def test_draft_edit_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
2023-08-17 02:34:42 +02:00
|
|
|
dummy_draft = DraftData(
|
|
|
|
type="",
|
|
|
|
to=[],
|
|
|
|
topic="",
|
|
|
|
content="Sample draft content",
|
|
|
|
timestamp=1596820995,
|
|
|
|
)
|
2021-07-24 06:56:56 +02:00
|
|
|
draft_id = do_create_drafts([dummy_draft], self.user_profile)[0].id
|
2023-08-17 02:34:42 +02:00
|
|
|
dummy_draft.content = "Some more sample draft content"
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_edit_draft(draft_id, dummy_draft, self.user_profile)
|
2023-10-03 17:07:22 +02:00
|
|
|
check_draft_update("events[0]", events[0])
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
def test_draft_delete_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
2023-08-17 02:34:42 +02:00
|
|
|
dummy_draft = DraftData(
|
|
|
|
type="",
|
|
|
|
to=[],
|
|
|
|
topic="",
|
|
|
|
content="Sample draft content",
|
|
|
|
timestamp=1596820995,
|
|
|
|
)
|
2021-07-24 06:56:56 +02:00
|
|
|
draft_id = do_create_drafts([dummy_draft], self.user_profile)[0].id
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
do_delete_draft(draft_id, self.user_profile)
|
2023-10-03 17:07:22 +02:00
|
|
|
check_draft_remove("events[0]", events[0])
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ScheduledMessagesEventsTest(BaseAction):
|
|
|
|
def test_stream_scheduled_message_create_event(self) -> None:
|
|
|
|
# Create stream scheduled message
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_add("events[0]", events[0])
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
def test_create_event_with_existing_scheduled_messages(self) -> None:
|
|
|
|
# Create stream scheduled message
|
|
|
|
check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message 1",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 17:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that the new scheduled message gets appended correctly.
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message 2",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_add("events[0]", events[0])
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
def test_private_scheduled_message_create_event(self) -> None:
|
2023-06-19 16:26:12 +02:00
|
|
|
# Create direct scheduled message
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"private",
|
|
|
|
[self.example_user("hamlet").id],
|
|
|
|
None,
|
|
|
|
"Direct message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_add("events[0]", events[0])
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
def test_scheduled_message_edit_event(self) -> None:
|
|
|
|
scheduled_message_id = check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
edit_scheduled_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
scheduled_message_id,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
"Edited test topic",
|
|
|
|
"Edited stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-20 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_update("events[0]", events[0])
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
def test_scheduled_message_delete_event(self) -> None:
|
|
|
|
scheduled_message_id = check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2024-04-30 01:07:06 +02:00
|
|
|
with self.verify_action() as events:
|
|
|
|
delete_scheduled_message(self.user_profile, scheduled_message_id)
|
2023-10-03 17:02:25 +02:00
|
|
|
check_scheduled_message_remove("events[0]", events[0])
|