2017-11-16 19:54:24 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
|
2017-02-12 01:59:28 +01:00
|
|
|
# high-level documentation on how this system works.
|
2020-09-25 21:53:00 +02:00
|
|
|
#
|
|
|
|
# This module is closely integrated with zerver/lib/event_schema.py
|
|
|
|
# and zerver/lib/data_types.py systems for validating the schemas of
|
|
|
|
# events; it also uses the OpenAPI tools to validate our documentation.
|
2019-04-09 04:07:03 +02:00
|
|
|
import copy
|
2020-06-11 16:03:47 +02:00
|
|
|
import datetime
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
|
|
|
from io import StringIO
|
2020-07-08 14:13:16 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2016-06-03 08:00:04 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2023-04-20 04:40:41 +02:00
|
|
|
from dateutil.parser import parse as dateparser
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2022-04-14 23:35:09 +02:00
|
|
|
from zerver.actions.alert_words import do_add_alert_words, do_remove_alert_words
|
2022-04-14 23:55:07 +02:00
|
|
|
from zerver.actions.bots import (
|
|
|
|
do_change_bot_owner,
|
|
|
|
do_change_default_all_public_streams,
|
|
|
|
do_change_default_events_register_stream,
|
|
|
|
do_change_default_sending_stream,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import (
|
2023-07-31 19:39:57 +02:00
|
|
|
check_remove_custom_profile_field_value,
|
2022-04-14 23:46:56 +02:00
|
|
|
do_remove_realm_custom_profile_field,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
try_add_realm_custom_profile_field,
|
|
|
|
try_update_realm_custom_profile_field,
|
|
|
|
)
|
2022-04-14 23:34:23 +02:00
|
|
|
from zerver.actions.default_streams import (
|
|
|
|
do_add_default_stream,
|
|
|
|
do_add_streams_to_default_stream_group,
|
|
|
|
do_change_default_stream_group_description,
|
|
|
|
do_change_default_stream_group_name,
|
|
|
|
do_create_default_stream_group,
|
|
|
|
do_remove_default_stream,
|
|
|
|
do_remove_default_stream_group,
|
|
|
|
do_remove_streams_from_default_stream_group,
|
|
|
|
lookup_default_stream_groups,
|
|
|
|
)
|
2022-04-14 23:33:30 +02:00
|
|
|
from zerver.actions.hotspots import do_mark_hotspot_as_read
|
2022-04-14 23:36:07 +02:00
|
|
|
from zerver.actions.invites import (
|
|
|
|
do_create_multiuse_invite_link,
|
|
|
|
do_invite_users,
|
|
|
|
do_revoke_multi_use_invite,
|
|
|
|
do_revoke_user_invite,
|
|
|
|
)
|
2022-07-17 13:00:21 +02:00
|
|
|
from zerver.actions.message_delete import do_delete_messages
|
|
|
|
from zerver.actions.message_edit import do_update_embedded_data, do_update_message
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.message_flags import do_update_message_flags
|
2022-04-14 23:55:22 +02:00
|
|
|
from zerver.actions.muted_users import do_mute_user, do_unmute_user
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.presence import do_update_user_presence
|
2022-04-14 23:54:01 +02:00
|
|
|
from zerver.actions.reactions import do_add_reaction, do_remove_reaction
|
2022-04-14 23:57:26 +02:00
|
|
|
from zerver.actions.realm_domains import (
|
|
|
|
do_add_realm_domain,
|
|
|
|
do_change_realm_domain,
|
|
|
|
do_remove_realm_domain,
|
|
|
|
)
|
2022-04-14 23:40:49 +02:00
|
|
|
from zerver.actions.realm_emoji import check_add_realm_emoji, do_remove_realm_emoji
|
2022-04-14 23:39:22 +02:00
|
|
|
from zerver.actions.realm_icon import do_change_icon_source
|
2022-04-14 23:32:56 +02:00
|
|
|
from zerver.actions.realm_linkifiers import (
|
2023-08-10 04:09:25 +02:00
|
|
|
check_reorder_linkifiers,
|
2022-04-14 23:32:56 +02:00
|
|
|
do_add_linkifier,
|
|
|
|
do_remove_linkifier,
|
|
|
|
do_update_linkifier,
|
|
|
|
)
|
2022-04-14 23:37:16 +02:00
|
|
|
from zerver.actions.realm_logo import do_change_logo_source
|
2023-07-24 21:14:42 +02:00
|
|
|
from zerver.actions.realm_playgrounds import check_add_realm_playground, do_remove_realm_playground
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import (
|
2022-04-11 19:26:16 +02:00
|
|
|
do_change_realm_org_type,
|
2023-08-09 15:06:56 +02:00
|
|
|
do_change_realm_permission_group_setting,
|
2022-04-14 23:57:15 +02:00
|
|
|
do_change_realm_plan_type,
|
|
|
|
do_deactivate_realm,
|
|
|
|
do_set_realm_authentication_methods,
|
|
|
|
do_set_realm_notifications_stream,
|
|
|
|
do_set_realm_property,
|
|
|
|
do_set_realm_signup_notifications_stream,
|
|
|
|
do_set_realm_user_default_setting,
|
|
|
|
)
|
2023-04-20 04:40:41 +02:00
|
|
|
from zerver.actions.scheduled_messages import (
|
|
|
|
check_schedule_message,
|
|
|
|
delete_scheduled_message,
|
2023-05-16 21:18:09 +02:00
|
|
|
edit_scheduled_message,
|
2023-04-20 04:40:41 +02:00
|
|
|
)
|
2022-04-14 23:51:16 +02:00
|
|
|
from zerver.actions.streams import (
|
|
|
|
bulk_add_subscriptions,
|
|
|
|
bulk_remove_subscriptions,
|
|
|
|
do_change_stream_description,
|
2023-02-17 12:46:14 +01:00
|
|
|
do_change_stream_group_based_setting,
|
2022-04-14 23:51:16 +02:00
|
|
|
do_change_stream_message_retention_days,
|
|
|
|
do_change_stream_permission,
|
|
|
|
do_change_stream_post_policy,
|
|
|
|
do_change_subscription_property,
|
|
|
|
do_deactivate_stream,
|
|
|
|
do_rename_stream,
|
|
|
|
)
|
2022-04-14 23:31:40 +02:00
|
|
|
from zerver.actions.submessage import do_add_submessage
|
2022-04-14 23:31:02 +02:00
|
|
|
from zerver.actions.typing import check_send_typing_notification, do_send_stream_typing_notification
|
2022-04-14 23:30:17 +02:00
|
|
|
from zerver.actions.user_groups import (
|
2022-03-01 07:52:47 +01:00
|
|
|
add_subgroups_to_user_group,
|
2022-04-14 23:30:17 +02:00
|
|
|
bulk_add_members_to_user_group,
|
|
|
|
check_add_user_group,
|
|
|
|
check_delete_user_group,
|
2023-06-15 05:24:23 +02:00
|
|
|
do_change_user_group_permission_setting,
|
2022-04-14 23:30:17 +02:00
|
|
|
do_update_user_group_description,
|
|
|
|
do_update_user_group_name,
|
|
|
|
remove_members_from_user_group,
|
2022-03-01 07:52:47 +01:00
|
|
|
remove_subgroups_from_user_group,
|
2022-04-14 23:30:17 +02:00
|
|
|
)
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import (
|
|
|
|
do_change_avatar_fields,
|
|
|
|
do_change_full_name,
|
|
|
|
do_change_user_delivery_email,
|
|
|
|
do_change_user_setting,
|
|
|
|
do_regenerate_api_key,
|
|
|
|
)
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.user_status import do_update_user_status
|
2023-02-03 13:21:25 +01:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import (
|
|
|
|
do_change_user_role,
|
|
|
|
do_deactivate_user,
|
|
|
|
do_make_user_billing_admin,
|
|
|
|
do_update_outgoing_webhook_service,
|
|
|
|
)
|
2022-04-14 23:29:39 +02:00
|
|
|
from zerver.actions.video_calls import do_set_zoom_token
|
2021-07-24 06:56:56 +02:00
|
|
|
from zerver.lib.drafts import do_create_drafts, do_delete_draft, do_edit_draft
|
2020-07-08 12:53:52 +02:00
|
|
|
from zerver.lib.event_schema import (
|
2020-07-18 17:11:41 +02:00
|
|
|
check_alert_words,
|
2020-08-06 13:08:42 +02:00
|
|
|
check_attachment_add,
|
|
|
|
check_attachment_remove,
|
|
|
|
check_attachment_update,
|
2020-07-18 17:02:28 +02:00
|
|
|
check_custom_profile_fields,
|
2020-08-01 14:33:03 +02:00
|
|
|
check_default_stream_groups,
|
2020-08-01 14:36:13 +02:00
|
|
|
check_default_streams,
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message,
|
2020-08-16 17:26:24 +02:00
|
|
|
check_has_zoom_token,
|
2021-07-02 02:13:55 +02:00
|
|
|
check_heartbeat,
|
2020-08-05 19:56:34 +02:00
|
|
|
check_hotspots,
|
2020-07-18 16:33:03 +02:00
|
|
|
check_invites_changed,
|
2020-07-10 16:10:58 +02:00
|
|
|
check_message,
|
2020-08-06 20:31:12 +02:00
|
|
|
check_muted_topics,
|
2021-03-27 12:23:32 +01:00
|
|
|
check_muted_users,
|
2020-08-13 19:29:07 +02:00
|
|
|
check_presence,
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_add,
|
|
|
|
check_reaction_remove,
|
2020-07-08 17:07:29 +02:00
|
|
|
check_realm_bot_add,
|
2020-07-08 21:06:22 +02:00
|
|
|
check_realm_bot_delete,
|
|
|
|
check_realm_bot_remove,
|
2020-07-08 17:47:56 +02:00
|
|
|
check_realm_bot_update,
|
2021-03-13 20:00:05 +01:00
|
|
|
check_realm_deactivated,
|
2021-07-21 13:40:46 +02:00
|
|
|
check_realm_default_update,
|
2020-08-17 16:07:25 +02:00
|
|
|
check_realm_domains_add,
|
|
|
|
check_realm_domains_change,
|
|
|
|
check_realm_domains_remove,
|
2020-08-18 15:16:02 +02:00
|
|
|
check_realm_emoji_update,
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export,
|
2021-03-30 12:51:54 +02:00
|
|
|
check_realm_linkifiers,
|
2020-10-28 04:00:46 +01:00
|
|
|
check_realm_playgrounds,
|
2020-07-08 12:53:52 +02:00
|
|
|
check_realm_update,
|
2020-08-16 14:52:09 +02:00
|
|
|
check_realm_update_dict,
|
2020-08-14 02:14:06 +02:00
|
|
|
check_realm_user_add,
|
2020-08-18 18:38:41 +02:00
|
|
|
check_realm_user_remove,
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
check_realm_user_update,
|
2020-07-08 12:53:52 +02:00
|
|
|
check_stream_create,
|
2020-08-01 14:42:06 +02:00
|
|
|
check_stream_delete,
|
2020-07-08 13:35:37 +02:00
|
|
|
check_stream_update,
|
2020-07-18 16:27:59 +02:00
|
|
|
check_submessage,
|
2020-07-08 14:13:16 +02:00
|
|
|
check_subscription_add,
|
2020-07-08 15:04:35 +02:00
|
|
|
check_subscription_peer_add,
|
|
|
|
check_subscription_peer_remove,
|
2020-07-08 14:20:25 +02:00
|
|
|
check_subscription_remove,
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update,
|
2020-07-18 16:39:06 +02:00
|
|
|
check_typing_start,
|
2020-08-27 22:10:07 +02:00
|
|
|
check_typing_stop,
|
2020-07-08 15:29:13 +02:00
|
|
|
check_update_display_settings,
|
2020-07-08 15:29:13 +02:00
|
|
|
check_update_global_notifications,
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message,
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_add,
|
|
|
|
check_update_message_flags_remove,
|
2020-07-18 17:19:30 +02:00
|
|
|
check_user_group_add,
|
2020-08-14 13:18:52 +02:00
|
|
|
check_user_group_add_members,
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_add_subgroups,
|
2020-08-14 13:38:36 +02:00
|
|
|
check_user_group_remove,
|
2020-08-14 13:34:34 +02:00
|
|
|
check_user_group_remove_members,
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_remove_subgroups,
|
2020-08-14 13:50:55 +02:00
|
|
|
check_user_group_update,
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update,
|
2020-07-18 17:15:23 +02:00
|
|
|
check_user_status,
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic,
|
2020-07-08 12:53:52 +02:00
|
|
|
)
|
2021-04-18 11:28:39 +02:00
|
|
|
from zerver.lib.events import (
|
2022-11-17 09:30:48 +01:00
|
|
|
RestartEventError,
|
2021-04-18 11:28:39 +02:00
|
|
|
apply_events,
|
|
|
|
fetch_initial_state_data,
|
|
|
|
post_process_state,
|
|
|
|
)
|
2021-12-29 13:52:27 +01:00
|
|
|
from zerver.lib.mention import MentionBackend, MentionData
|
2020-06-29 13:19:17 +02:00
|
|
|
from zerver.lib.message import render_markdown
|
2023-02-10 14:33:24 +01:00
|
|
|
from zerver.lib.muted_users import get_mute_object
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
create_dummy_file,
|
|
|
|
get_subscription,
|
|
|
|
get_test_image_file,
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
stdout_suppressed,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2023-04-20 04:40:41 +02:00
|
|
|
from zerver.lib.timestamp import convert_to_UTC
|
2020-07-10 18:35:58 +02:00
|
|
|
from zerver.lib.topic import TOPIC_NAME
|
2022-07-08 17:17:46 +02:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Attachment,
|
2021-03-26 09:51:43 +01:00
|
|
|
CustomProfileField,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
|
|
|
MultiuseInvite,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmDomain,
|
2023-08-11 01:10:21 +02:00
|
|
|
RealmFilter,
|
2020-10-28 04:00:46 +01:00
|
|
|
RealmPlayground,
|
2021-07-21 13:40:46 +02:00
|
|
|
RealmUserDefault,
|
2020-06-11 00:54:34 +02:00
|
|
|
Service,
|
|
|
|
Stream,
|
|
|
|
UserGroup,
|
|
|
|
UserMessage,
|
|
|
|
UserPresence,
|
|
|
|
UserProfile,
|
2021-06-22 18:42:31 +02:00
|
|
|
UserStatus,
|
2023-02-03 12:57:43 +01:00
|
|
|
UserTopic,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_client,
|
|
|
|
get_stream,
|
|
|
|
get_user_by_delivery_email,
|
2014-02-04 20:52:02 +01:00
|
|
|
)
|
2020-07-27 16:22:31 +02:00
|
|
|
from zerver.openapi.openapi import validate_against_openapi_schema
|
2021-07-02 02:13:55 +02:00
|
|
|
from zerver.tornado.django_api import send_event
|
2017-10-12 01:37:44 +02:00
|
|
|
from zerver.tornado.event_queue import (
|
|
|
|
allocate_client_descriptor,
|
|
|
|
clear_client_event_queues_for_testing,
|
2021-07-02 02:13:55 +02:00
|
|
|
create_heartbeat_event,
|
2021-04-18 11:28:39 +02:00
|
|
|
send_restart_events,
|
2017-10-12 01:37:44 +02:00
|
|
|
)
|
2020-10-28 04:00:46 +01:00
|
|
|
from zerver.views.realm_playgrounds import access_playground_by_id
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-05-22 23:02:24 +02:00
|
|
|
|
2020-06-27 17:03:37 +02:00
|
|
|
class BaseAction(ZulipTestCase):
|
2020-09-28 21:35:55 +02:00
|
|
|
"""Core class for verifying the apply_event race handling logic as
|
|
|
|
well as the event formatting logic of any function using send_event.
|
|
|
|
|
|
|
|
See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html#testing
|
|
|
|
for extensive design details for this testing system.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def verify_action(
|
|
|
|
self,
|
|
|
|
action: Callable[[], object],
|
|
|
|
*,
|
|
|
|
event_types: Optional[List[str]] = None,
|
|
|
|
include_subscribers: bool = True,
|
|
|
|
state_change_expected: bool = True,
|
|
|
|
notification_settings_null: bool = False,
|
|
|
|
client_gravatar: bool = True,
|
|
|
|
user_avatar_url_field_optional: bool = False,
|
|
|
|
slim_presence: bool = False,
|
|
|
|
include_streams: bool = True,
|
|
|
|
num_events: int = 1,
|
|
|
|
bulk_message_deletion: bool = True,
|
2021-04-18 18:12:35 +02:00
|
|
|
stream_typing_notifications: bool = True,
|
2021-07-24 19:51:25 +02:00
|
|
|
user_settings_object: bool = False,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported: bool = True,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template: bool = True,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> List[Dict[str, Any]]:
|
|
|
|
"""
|
2017-10-12 01:37:44 +02:00
|
|
|
Make sure we have a clean slate of client descriptors for these tests.
|
|
|
|
If we don't do this, then certain failures will only manifest when you
|
2018-08-10 22:43:58 +02:00
|
|
|
run multiple tests within a single test function.
|
2019-03-01 18:21:31 +01:00
|
|
|
|
|
|
|
See also https://zulip.readthedocs.io/en/latest/subsystems/events-system.html#testing
|
|
|
|
for details on the design of this test system.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2017-10-12 01:37:44 +02:00
|
|
|
clear_client_event_queues_for_testing()
|
|
|
|
|
2014-01-28 18:11:08 +01:00
|
|
|
client = allocate_client_descriptor(
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(
|
|
|
|
user_profile_id=self.user_profile.id,
|
|
|
|
realm_id=self.user_profile.realm_id,
|
|
|
|
event_types=event_types,
|
|
|
|
client_type_name="website",
|
|
|
|
apply_markdown=True,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
all_public_streams=False,
|
|
|
|
queue_timeout=600,
|
|
|
|
last_connection_time=time.time(),
|
|
|
|
narrow=[],
|
|
|
|
bulk_message_deletion=bulk_message_deletion,
|
2021-04-18 18:12:35 +02:00
|
|
|
stream_typing_notifications=stream_typing_notifications,
|
2021-07-24 19:51:25 +02:00
|
|
|
user_settings_object=user_settings_object,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-01-24 06:34:26 +01:00
|
|
|
)
|
2020-06-10 13:47:08 +02:00
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
# hybrid_state = initial fetch state + re-applying events triggered by our action
|
|
|
|
# normal_state = do action then fetch at the end (the "normal" code path)
|
2017-11-02 20:55:44 +01:00
|
|
|
hybrid_state = fetch_initial_state_data(
|
2021-01-17 17:58:50 +01:00
|
|
|
self.user_profile,
|
|
|
|
event_types=event_types,
|
2019-11-05 21:17:15 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2020-06-13 10:10:05 +02:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence=slim_presence,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2020-10-14 13:48:24 +02:00
|
|
|
include_streams=include_streams,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
2021-05-09 15:49:19 +02:00
|
|
|
|
|
|
|
# We want even those `send_event` calls which have been hooked to
|
|
|
|
# `transaction.on_commit` to execute in tests.
|
2023-04-05 13:36:01 +02:00
|
|
|
# See the comment in `ZulipTestCase.capture_send_event_calls`.
|
2021-05-09 15:49:19 +02:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
action()
|
|
|
|
|
2014-01-31 23:23:39 +01:00
|
|
|
events = client.event_queue.contents()
|
2020-07-27 16:22:31 +02:00
|
|
|
content = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"queue_id": "123.12",
|
2020-08-07 01:09:47 +02:00
|
|
|
# The JSON wrapper helps in converting tuples to lists
|
2020-08-01 01:25:34 +02:00
|
|
|
# as tuples aren't valid JSON structure.
|
2021-02-12 08:20:45 +01:00
|
|
|
"events": orjson.loads(orjson.dumps(events)),
|
|
|
|
"msg": "",
|
|
|
|
"result": "success",
|
2020-07-27 16:22:31 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_against_openapi_schema(content, "/events", "get", "200", display_brief_error=True)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(events, num_events)
|
2019-04-09 04:07:03 +02:00
|
|
|
initial_state = copy.deepcopy(hybrid_state)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, initial_state, notification_settings_null)
|
2020-08-07 01:09:47 +02:00
|
|
|
before = orjson.dumps(initial_state)
|
2021-01-19 15:52:45 +01:00
|
|
|
apply_events(
|
|
|
|
self.user_profile,
|
|
|
|
state=hybrid_state,
|
|
|
|
events=events,
|
|
|
|
fetch_event_types=None,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
slim_presence=slim_presence,
|
|
|
|
include_subscribers=include_subscribers,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2021-01-19 15:52:45 +01:00
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, hybrid_state, notification_settings_null)
|
2020-08-07 01:09:47 +02:00
|
|
|
after = orjson.dumps(hybrid_state)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
|
|
|
if state_change_expected:
|
2019-05-09 02:38:29 +02:00
|
|
|
if before == after: # nocoverage
|
2020-08-07 01:09:47 +02:00
|
|
|
print(orjson.dumps(initial_state, option=orjson.OPT_INDENT_2).decode())
|
2019-05-09 02:38:29 +02:00
|
|
|
print(events)
|
2021-02-12 08:19:30 +01:00
|
|
|
raise AssertionError(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Test does not exercise enough code -- events do not change state."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
else:
|
2019-04-09 04:07:03 +02:00
|
|
|
try:
|
|
|
|
self.match_states(initial_state, copy.deepcopy(hybrid_state), events)
|
|
|
|
except AssertionError: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Test is invalid--state actually does change here.")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-02 20:55:44 +01:00
|
|
|
normal_state = fetch_initial_state_data(
|
2021-01-17 17:58:50 +01:00
|
|
|
self.user_profile,
|
|
|
|
event_types=event_types,
|
2019-11-05 21:17:15 +01:00
|
|
|
client_gravatar=client_gravatar,
|
2020-06-13 10:10:05 +02:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
2020-02-02 17:29:05 +01:00
|
|
|
slim_presence=slim_presence,
|
2019-04-09 04:07:03 +02:00
|
|
|
include_subscribers=include_subscribers,
|
2020-10-14 13:48:24 +02:00
|
|
|
include_streams=include_streams,
|
2022-10-27 19:05:10 +02:00
|
|
|
pronouns_field_type_supported=pronouns_field_type_supported,
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
linkifier_url_template=linkifier_url_template,
|
2017-11-02 20:55:44 +01:00
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
post_process_state(self.user_profile, normal_state, notification_settings_null)
|
2017-10-06 21:24:56 +02:00
|
|
|
self.match_states(hybrid_state, normal_state, events)
|
2014-02-04 20:52:02 +01:00
|
|
|
return events
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def match_states(
|
|
|
|
self, state1: Dict[str, Any], state2: Dict[str, Any], events: List[Dict[str, Any]]
|
|
|
|
) -> None:
|
2017-11-05 10:51:25 +01:00
|
|
|
def normalize(state: Dict[str, Any]) -> None:
|
2022-03-11 20:50:41 +01:00
|
|
|
if "never_subscribed" in state:
|
|
|
|
for u in state["never_subscribed"]:
|
|
|
|
if "subscribers" in u:
|
|
|
|
u["subscribers"].sort()
|
|
|
|
if "subscriptions" in state:
|
|
|
|
for u in state["subscriptions"]:
|
|
|
|
if "subscribers" in u:
|
|
|
|
u["subscribers"].sort()
|
|
|
|
state["subscriptions"] = {u["name"]: u for u in state["subscriptions"]}
|
|
|
|
if "unsubscribed" in state:
|
|
|
|
state["unsubscribed"] = {u["name"]: u for u in state["unsubscribed"]}
|
2021-02-12 08:20:45 +01:00
|
|
|
if "realm_bots" in state:
|
|
|
|
state["realm_bots"] = {u["email"]: u for u in state["realm_bots"]}
|
2021-05-20 20:01:51 +02:00
|
|
|
# Since time is different for every call, just fix the value
|
|
|
|
state["server_timestamp"] = 0
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2014-02-04 19:09:30 +01:00
|
|
|
normalize(state1)
|
|
|
|
normalize(state2)
|
2017-10-06 21:24:56 +02:00
|
|
|
|
|
|
|
# If this assertions fails, we have unusual problems.
|
|
|
|
self.assertEqual(state1.keys(), state2.keys())
|
|
|
|
|
|
|
|
# The far more likely scenario is that some section of
|
2017-10-06 23:08:41 +02:00
|
|
|
# our enormous payload does not get updated properly. We
|
2017-10-06 21:24:56 +02:00
|
|
|
# want the diff here to be developer-friendly, hence
|
|
|
|
# the somewhat tedious code to provide useful output.
|
2017-10-06 22:59:26 +02:00
|
|
|
if state1 != state2: # nocoverage
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\n---States DO NOT MATCH---")
|
|
|
|
print("\nEVENTS:\n")
|
2017-10-06 21:24:56 +02:00
|
|
|
|
|
|
|
# Printing out the events is a big help to
|
|
|
|
# developers.
|
|
|
|
import json
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-06 21:24:56 +02:00
|
|
|
for event in events:
|
|
|
|
print(json.dumps(event, indent=4))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\nMISMATCHES:\n")
|
2017-10-06 21:24:56 +02:00
|
|
|
for k in state1:
|
|
|
|
if state1[k] != state2[k]:
|
2021-02-12 08:20:45 +01:00
|
|
|
print("\nkey = " + k)
|
2017-10-06 21:24:56 +02:00
|
|
|
try:
|
|
|
|
self.assertEqual({k: state1[k]}, {k: state2[k]})
|
|
|
|
except AssertionError as e:
|
|
|
|
print(e)
|
2021-02-12 08:19:30 +01:00
|
|
|
print(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-06 21:24:56 +02:00
|
|
|
NOTE:
|
|
|
|
|
|
|
|
This is an advanced test that verifies how
|
|
|
|
we apply events after fetching data. If you
|
|
|
|
do not know how to debug it, you can ask for
|
|
|
|
help on chat.
|
2021-06-09 22:11:26 +02:00
|
|
|
""",
|
|
|
|
flush=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-06 21:24:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError("Mismatching states")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 17:03:37 +02:00
|
|
|
class NormalActionsTest(BaseAction):
|
2020-07-05 03:34:30 +02:00
|
|
|
def create_bot(self, email: str, **extras: Any) -> UserProfile:
|
2020-06-27 17:03:37 +02:00
|
|
|
return self.create_test_bot(email, self.user_profile, **extras)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_mentioned_send_message_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2017-08-10 10:58:39 +02:00
|
|
|
for i in range(3):
|
2021-02-12 08:20:45 +01:00
|
|
|
content = "mentioning... @**" + user.full_name + "** hello " + str(i)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(self.example_user("cordelia"), "Verona", content),
|
2023-06-07 19:19:33 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_topic_wildcard_mentioned_send_message_events(self) -> None:
|
|
|
|
for i in range(3):
|
|
|
|
content = "mentioning... @**topic** hello " + str(i)
|
|
|
|
self.verify_action(
|
|
|
|
lambda: self.send_stream_message(self.example_user("cordelia"), "Verona", content),
|
2017-08-10 10:58:39 +02:00
|
|
|
)
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2023-06-03 16:51:38 +02:00
|
|
|
def test_stream_wildcard_mentioned_send_message_events(self) -> None:
|
2019-08-26 05:11:18 +02:00
|
|
|
for i in range(3):
|
2021-02-12 08:20:45 +01:00
|
|
|
content = "mentioning... @**all** hello " + str(i)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(self.example_user("cordelia"), "Verona", content),
|
2019-08-26 05:11:18 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_pm_send_message_events(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: self.send_personal_message(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
self.example_user("cordelia"), self.example_user("hamlet"), "hola"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Verify direct message editing - content only edit
|
2021-12-15 21:17:21 +01:00
|
|
|
pm = Message.objects.order_by("-id")[0]
|
|
|
|
content = "new content"
|
|
|
|
rendering_result = render_markdown(pm, content)
|
|
|
|
prior_mention_user_ids: Set[int] = set()
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend = MentionBackend(self.user_profile.realm_id)
|
2021-12-15 21:17:21 +01:00
|
|
|
mention_data = MentionData(
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend=mention_backend,
|
2021-12-15 21:17:21 +01:00
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message(
|
|
|
|
self.user_profile,
|
|
|
|
pm,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
False,
|
|
|
|
False,
|
|
|
|
content,
|
|
|
|
rendering_result,
|
|
|
|
prior_mention_user_ids,
|
|
|
|
mention_data,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=False,
|
|
|
|
has_content=True,
|
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_huddle_send_message_events(self) -> None:
|
2017-05-23 03:02:01 +02:00
|
|
|
huddle = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("othello"),
|
2017-05-23 03:02:01 +02:00
|
|
|
]
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_huddle_message(self.example_user("cordelia"), huddle, "hola"),
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_stream_send_message_events(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(self.example_user("hamlet"), "Verona", "hello"),
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
assert isinstance(events[0]["message"]["avatar_url"], str)
|
2017-04-20 17:31:41 +02:00
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.example_user("hamlet"),
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(self.example_user("hamlet"), "Verona", "hello"),
|
2017-10-31 18:36:18 +01:00
|
|
|
client_gravatar=True,
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_message("events[0]", events[0])
|
|
|
|
assert events[0]["message"]["avatar_url"] is None
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2021-12-15 21:17:21 +01:00
|
|
|
# Verify stream message editing - content only
|
2021-02-12 08:20:45 +01:00
|
|
|
message = Message.objects.order_by("-id")[0]
|
|
|
|
content = "new content"
|
2021-06-17 12:20:40 +02:00
|
|
|
rendering_result = render_markdown(message, content)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
prior_mention_user_ids: Set[int] = set()
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend = MentionBackend(self.user_profile.realm_id)
|
2019-11-28 11:26:57 +01:00
|
|
|
mention_data = MentionData(
|
2021-12-29 13:52:27 +01:00
|
|
|
mention_backend=mention_backend,
|
2019-11-28 11:26:57 +01:00
|
|
|
content=content,
|
|
|
|
)
|
Notify offline users about edited stream messages.
We now do push notifications and missed message emails
for offline users who are subscribed to the stream for
a message that has been edited, but we short circuit
the offline-notification logic for any user who presumably
would have already received a notification on the original
message.
This effectively boils down to sending notifications to newly
mentioned users. The motivating use case here is that you
forget to mention somebody in a message, and then you edit
the message to mention the person. If they are offline, they
will now get pushed notifications and missed message emails,
with some minor caveats.
We try to mostly use the same techniques here as the
send-message code path, and we share common code with the
send-message path once we get to the Tornado layer and call
maybe_enqueue_notifications.
The major places where we differ are in a function called
maybe_enqueue_notifications_for_message_update, and the top
of that function short circuits a bunch of cases where we
can mostly assume that the original message had an offline
notification.
We can expect a couple changes in the future:
* Requirements may change here, and it might make sense
to send offline notifications on the update side even
in circumstances where the original message had a
notification.
* We may track more notifications in a DB model, which
may simplify our short-circuit logic.
In the view/action layer, we already had two separate codepaths
for send-message and update-message, but this mostly echoes
what the send-message path does in terms of collecting data
about recipients.
2017-10-03 16:25:12 +02:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message(
|
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
None,
|
2021-12-15 21:17:21 +01:00
|
|
|
None,
|
|
|
|
None,
|
2020-06-27 17:32:39 +02:00
|
|
|
False,
|
|
|
|
False,
|
|
|
|
content,
|
2021-06-17 12:20:40 +02:00
|
|
|
rendering_result,
|
2020-06-27 17:32:39 +02:00
|
|
|
prior_mention_user_ids,
|
2021-02-12 08:19:30 +01:00
|
|
|
mention_data,
|
|
|
|
),
|
2021-12-15 21:17:21 +01:00
|
|
|
state_change_expected=False,
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
"events[0]",
|
2020-07-10 18:35:58 +02:00
|
|
|
events[0],
|
2021-12-15 21:17:21 +01:00
|
|
|
is_stream_message=True,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_content=True,
|
2021-12-15 21:17:21 +01:00
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Verify stream message editing - topic only
|
|
|
|
topic = "new_topic"
|
|
|
|
propagate_mode = "change_all"
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message(
|
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
None,
|
|
|
|
topic,
|
|
|
|
propagate_mode,
|
|
|
|
False,
|
|
|
|
False,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
prior_mention_user_ids,
|
|
|
|
mention_data,
|
|
|
|
),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=True,
|
|
|
|
has_content=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_topic=True,
|
|
|
|
has_new_stream_id=False,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
)
|
2017-03-24 05:54:20 +01:00
|
|
|
|
2022-01-14 15:23:49 +01:00
|
|
|
# Verify special case of embedded content update
|
2021-06-17 12:20:40 +02:00
|
|
|
content = "embed_content"
|
|
|
|
rendering_result = render_markdown(message, content)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-06-17 12:20:40 +02:00
|
|
|
lambda: do_update_embedded_data(self.user_profile, message, content, rendering_result),
|
2017-03-24 05:54:20 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2022-01-14 15:23:49 +01:00
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=False,
|
|
|
|
has_content=False,
|
|
|
|
has_topic=False,
|
|
|
|
has_new_stream_id=False,
|
|
|
|
is_embedded_update_only=True,
|
|
|
|
)
|
2017-03-24 05:54:20 +01:00
|
|
|
|
2020-07-06 09:30:59 +02:00
|
|
|
# Verify move topic to different stream.
|
2023-05-10 22:03:07 +02:00
|
|
|
self.subscribe(self.user_profile, "Verona")
|
|
|
|
self.subscribe(self.user_profile, "Denmark")
|
2020-07-06 09:30:59 +02:00
|
|
|
self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message_id = self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
propagate_mode = "change_all"
|
2020-07-06 09:30:59 +02:00
|
|
|
prior_mention_user_ids = set()
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message(
|
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
stream,
|
2021-12-15 21:17:21 +01:00
|
|
|
None,
|
2020-07-06 09:30:59 +02:00
|
|
|
propagate_mode,
|
|
|
|
True,
|
|
|
|
True,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
set(),
|
2021-02-12 08:19:30 +01:00
|
|
|
None,
|
|
|
|
),
|
2020-07-06 09:30:59 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
# There are 3 events generated for this action
|
|
|
|
# * update_message: For updating existing messages
|
|
|
|
# * 2 new message events: Breadcrumb messages in the new and old topics.
|
|
|
|
num_events=3,
|
|
|
|
)
|
2020-07-10 18:35:58 +02:00
|
|
|
check_update_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
"events[0]",
|
2020-07-10 18:35:58 +02:00
|
|
|
events[0],
|
2021-12-15 21:17:21 +01:00
|
|
|
is_stream_message=True,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_content=False,
|
2021-12-15 21:17:21 +01:00
|
|
|
has_topic=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
has_new_stream_id=True,
|
2022-01-14 15:23:49 +01:00
|
|
|
is_embedded_update_only=False,
|
2020-07-10 18:35:58 +02:00
|
|
|
)
|
2020-07-06 09:30:59 +02:00
|
|
|
|
2023-05-10 22:03:07 +02:00
|
|
|
# Move both stream and topic, with update_message_flags
|
|
|
|
# excluded from event types.
|
|
|
|
self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message_id = self.send_stream_message(self.user_profile, "Verona")
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
propagate_mode = "change_all"
|
|
|
|
prior_mention_user_ids = set()
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message(
|
|
|
|
self.user_profile,
|
|
|
|
message,
|
|
|
|
stream,
|
|
|
|
"final_topic",
|
|
|
|
propagate_mode,
|
|
|
|
True,
|
|
|
|
True,
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
set(),
|
|
|
|
None,
|
|
|
|
),
|
|
|
|
state_change_expected=True,
|
|
|
|
# Skip "update_message_flags" to exercise the code path
|
|
|
|
# where raw_unread_msgs does not exist in the state.
|
|
|
|
event_types=["message", "update_message"],
|
|
|
|
# There are 3 events generated for this action
|
|
|
|
# * update_message: For updating existing messages
|
|
|
|
# * 2 new message events: Breadcrumb messages in the new and old topics.
|
|
|
|
num_events=3,
|
|
|
|
)
|
|
|
|
check_update_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
is_stream_message=True,
|
|
|
|
has_content=False,
|
|
|
|
has_topic=True,
|
|
|
|
has_new_stream_id=True,
|
|
|
|
is_embedded_update_only=False,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_message_flags(self) -> None:
|
2017-03-24 03:19:23 +01:00
|
|
|
# Test message flag update events
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.example_user("hamlet"),
|
2017-10-28 16:40:28 +02:00
|
|
|
"hello",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-03-10 14:30:45 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, "add", "starred", [message]),
|
2018-08-14 23:57:20 +02:00
|
|
|
state_change_expected=True,
|
2017-03-24 03:19:23 +01:00
|
|
|
)
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_add("events[0]", events[0])
|
2020-07-17 09:13:10 +02:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-03-10 14:30:45 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, "remove", "starred", [message]),
|
2018-08-14 23:57:20 +02:00
|
|
|
state_change_expected=True,
|
2017-03-24 03:19:23 +01:00
|
|
|
)
|
2020-08-18 18:08:39 +02:00
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
2017-03-24 03:19:23 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_update_read_flag_removes_unread_msg_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
mention = "@**" + user_profile.full_name + "**"
|
2017-07-21 20:31:25 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for content in ["hello", mention]:
|
2017-10-28 16:40:28 +02:00
|
|
|
message = self.send_stream_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2017-07-21 20:31:25 +02:00
|
|
|
"Verona",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
content,
|
2017-07-21 20:31:25 +02:00
|
|
|
)
|
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
2022-03-10 14:30:45 +01:00
|
|
|
lambda: do_update_message_flags(user_profile, "add", "read", [message]),
|
2017-07-21 20:31:25 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2017-05-23 03:02:01 +02:00
|
|
|
|
2021-06-09 13:31:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message_flags(user_profile, "remove", "read", [message]),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
|
|
|
personal_message = self.send_personal_message(
|
|
|
|
from_user=user_profile, to_user=self.example_user("cordelia"), content=content
|
|
|
|
)
|
|
|
|
self.verify_action(
|
|
|
|
lambda: do_update_message_flags(user_profile, "add", "read", [personal_message]),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message_flags(user_profile, "remove", "read", [personal_message]),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
|
|
|
huddle_message = self.send_huddle_message(
|
|
|
|
from_user=self.example_user("cordelia"),
|
|
|
|
to_users=[user_profile, self.example_user("othello")],
|
|
|
|
content=content,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.verify_action(
|
|
|
|
lambda: do_update_message_flags(user_profile, "add", "read", [huddle_message]),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_message_flags(user_profile, "remove", "read", [huddle_message]),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
check_update_message_flags_remove("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_send_message_to_existing_recipient(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
sender = self.example_user("cordelia")
|
2017-10-28 16:40:28 +02:00
|
|
|
self.send_stream_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
sender,
|
2017-05-23 03:02:01 +02:00
|
|
|
"Verona",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"hello 1",
|
2017-05-23 03:02:01 +02:00
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(sender, "Verona", "hello 2"),
|
2017-05-23 03:02:01 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_add_reaction(self) -> None:
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "Verona", "hello")
|
2017-10-08 09:34:59 +02:00
|
|
|
message = Message.objects.get(id=message_id)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji"),
|
2017-10-08 09:34:59 +02:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_add("events[0]", events[0])
|
2017-10-08 09:34:59 +02:00
|
|
|
|
2021-07-02 02:13:55 +02:00
|
|
|
def test_heartbeat_event(self) -> None:
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: send_event(
|
|
|
|
self.user_profile.realm,
|
|
|
|
create_heartbeat_event(),
|
|
|
|
[self.user_profile.id],
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
check_heartbeat("events[0]", events[0])
|
|
|
|
|
2018-02-12 10:53:36 +01:00
|
|
|
def test_add_submessage(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
stream_name = "Verona"
|
2018-02-12 10:53:36 +01:00
|
|
|
message_id = self.send_stream_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
sender=cordelia,
|
2018-02-12 10:53:36 +01:00
|
|
|
stream_name=stream_name,
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2018-02-12 10:53:36 +01:00
|
|
|
lambda: do_add_submessage(
|
2018-11-02 23:33:54 +01:00
|
|
|
realm=cordelia.realm,
|
2018-02-12 10:53:36 +01:00
|
|
|
sender_id=cordelia.id,
|
|
|
|
message_id=message_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
msg_type="whatever",
|
2018-02-12 10:53:36 +01:00
|
|
|
content='"stuff"',
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_submessage("events[0]", events[0])
|
2018-02-12 10:53:36 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_remove_reaction(self) -> None:
|
2020-03-07 11:43:05 +01:00
|
|
|
message_id = self.send_stream_message(self.example_user("hamlet"), "Verona", "hello")
|
2017-10-08 09:34:59 +02:00
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_remove_reaction(self.user_profile, message, "1f389", "unicode_emoji"),
|
2017-10-08 09:34:59 +02:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2020-08-17 15:11:19 +02:00
|
|
|
check_reaction_remove("events[0]", events[0])
|
2017-10-08 09:34:59 +02:00
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def test_invite_user_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Scotland"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-04-05 18:42:45 +02:00
|
|
|
lambda: do_invite_users(
|
2021-08-01 20:02:06 +02:00
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
),
|
2017-12-14 22:22:17 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2019-02-15 19:09:25 +01:00
|
|
|
def test_create_multiuse_invite_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2019-02-15 19:09:25 +01:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_create_multiuse_invite_link(
|
2021-04-05 18:42:45 +02:00
|
|
|
self.user_profile,
|
|
|
|
PreregistrationUser.INVITE_AS["MEMBER"],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2019-02-15 19:09:25 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2019-02-15 19:09:25 +01:00
|
|
|
|
2022-01-14 22:54:49 +01:00
|
|
|
def test_deactivate_user_invites_changed_event(self) -> None:
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
user_profile = self.example_user("cordelia")
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2022-01-14 22:54:49 +01:00
|
|
|
do_invite_users(
|
|
|
|
user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2022-01-14 22:54:49 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_deactivate_user(user_profile, acting_user=None), num_events=2
|
|
|
|
)
|
|
|
|
check_invites_changed("events[0]", events[0])
|
|
|
|
|
2017-12-14 22:22:17 +01:00
|
|
|
def test_revoke_user_invite_event(self) -> None:
|
2022-01-14 22:54:49 +01:00
|
|
|
# We need set self.user_profile to be an admin, so that
|
|
|
|
# we receive the invites_changed event.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2021-08-01 20:02:06 +02:00
|
|
|
do_invite_users(
|
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-08-01 20:02:06 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
prereg_users = PreregistrationUser.objects.filter(
|
|
|
|
referred_by__realm=self.user_profile.realm
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2017-12-14 22:22:17 +01:00
|
|
|
lambda: do_revoke_user_invite(prereg_users[0]),
|
|
|
|
state_change_expected=False,
|
2019-02-15 19:09:25 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2019-02-15 19:09:25 +01:00
|
|
|
|
|
|
|
def test_revoke_multiuse_invite_event(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Verona"]
|
|
|
|
]
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2021-02-12 08:19:30 +01:00
|
|
|
do_create_multiuse_invite_link(
|
2021-04-05 18:42:45 +02:00
|
|
|
self.user_profile,
|
|
|
|
PreregistrationUser.INVITE_AS["MEMBER"],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-15 19:09:25 +01:00
|
|
|
|
|
|
|
multiuse_object = MultiuseInvite.objects.get()
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2019-02-15 19:09:25 +01:00
|
|
|
lambda: do_revoke_multi_use_invite(multiuse_object),
|
|
|
|
state_change_expected=False,
|
2017-12-14 22:22:17 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_invites_changed("events[0]", events[0])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
|
|
|
def test_invitation_accept_invite_event(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Denmark", "Scotland"]
|
|
|
|
]
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2021-08-01 20:02:06 +02:00
|
|
|
do_invite_users(
|
|
|
|
self.user_profile,
|
|
|
|
["foo@zulip.com"],
|
|
|
|
streams,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-08-01 20:02:06 +02:00
|
|
|
)
|
2019-11-23 18:15:53 +01:00
|
|
|
prereg_user = PreregistrationUser.objects.get(email="foo@zulip.com")
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2020-07-16 14:10:43 +02:00
|
|
|
lambda: do_create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
"foo@zulip.com",
|
|
|
|
"password",
|
2020-07-16 14:10:43 +02:00
|
|
|
self.user_profile.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
"full name",
|
2020-07-16 14:10:43 +02:00
|
|
|
prereg_user=prereg_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2020-07-16 14:10:43 +02:00
|
|
|
),
|
2017-12-14 22:22:17 +01:00
|
|
|
state_change_expected=True,
|
2021-08-12 12:15:06 +02:00
|
|
|
num_events=7,
|
2017-12-14 22:22:17 +01:00
|
|
|
)
|
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_invites_changed("events[1]", events[1])
|
2017-12-14 22:22:17 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_typing_events(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2017-03-18 03:50:41 +01:00
|
|
|
lambda: check_send_typing_notification(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, [self.example_user("cordelia").id], "start"
|
|
|
|
),
|
2017-03-18 03:50:41 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_typing_start("events[0]", events[0])
|
2020-08-27 22:10:07 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: check_send_typing_notification(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, [self.example_user("cordelia").id], "stop"
|
|
|
|
),
|
2020-08-27 22:10:07 +02:00
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_typing_stop("events[0]", events[0])
|
2017-03-18 03:50:41 +01:00
|
|
|
|
2020-12-24 21:00:20 +01:00
|
|
|
def test_stream_typing_events(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
topic = "streams typing"
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_send_stream_typing_notification(
|
|
|
|
self.user_profile,
|
|
|
|
"start",
|
|
|
|
stream,
|
|
|
|
topic,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
check_typing_start("events[0]", events[0])
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_send_stream_typing_notification(
|
|
|
|
self.user_profile,
|
|
|
|
"stop",
|
|
|
|
stream,
|
|
|
|
topic,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
check_typing_stop("events[0]", events[0])
|
|
|
|
|
|
|
|
# Having client_capability `stream_typing_notification=False`
|
|
|
|
# shouldn't produce any events.
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_send_stream_typing_notification(
|
|
|
|
self.user_profile,
|
|
|
|
"start",
|
|
|
|
stream,
|
|
|
|
topic,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
stream_typing_notifications=False,
|
|
|
|
num_events=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_send_stream_typing_notification(
|
|
|
|
self.user_profile,
|
|
|
|
"stop",
|
|
|
|
stream,
|
|
|
|
topic,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
stream_typing_notifications=False,
|
|
|
|
num_events=0,
|
|
|
|
)
|
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_custom_profile_fields_events(self) -> None:
|
2021-03-26 09:51:43 +01:00
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-03-26 18:03:27 +01:00
|
|
|
lambda: try_add_realm_custom_profile_field(
|
|
|
|
realm=realm, name="Expertise", field_type=CustomProfileField.LONG_TEXT
|
|
|
|
)
|
2017-03-17 10:07:22 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
field = realm.customprofilefield_set.get(realm=realm, name="Biography")
|
2018-03-31 07:30:24 +02:00
|
|
|
name = field.name
|
2021-02-12 08:20:45 +01:00
|
|
|
hint = "Biography of the user"
|
2022-07-12 21:04:47 +02:00
|
|
|
display_in_profile_summary = False
|
2018-03-31 07:30:24 +02:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-07-12 21:04:47 +02:00
|
|
|
lambda: try_update_realm_custom_profile_field(
|
|
|
|
realm, field, name, hint=hint, display_in_profile_summary=display_in_profile_summary
|
|
|
|
)
|
2021-03-26 09:51:43 +01:00
|
|
|
)
|
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
|
|
|
|
2021-03-26 18:03:27 +01:00
|
|
|
events = self.verify_action(lambda: do_remove_realm_custom_profile_field(realm, field))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2017-03-17 10:07:22 +01:00
|
|
|
|
2022-10-27 19:05:10 +02:00
|
|
|
def test_pronouns_type_support_in_custom_profile_fields_events(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
field = CustomProfileField.objects.get(realm=realm, name="Pronouns")
|
|
|
|
name = field.name
|
|
|
|
hint = "What pronouns should people use for you?"
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: try_update_realm_custom_profile_field(realm, field, name, hint=hint),
|
|
|
|
pronouns_field_type_supported=True,
|
|
|
|
)
|
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2023-07-22 00:34:11 +02:00
|
|
|
[pronouns_field] = (
|
2022-10-27 19:05:10 +02:00
|
|
|
field_obj for field_obj in events[0]["fields"] if field_obj["id"] == field.id
|
2023-07-22 00:34:11 +02:00
|
|
|
)
|
2022-10-27 19:05:10 +02:00
|
|
|
self.assertEqual(pronouns_field["type"], CustomProfileField.PRONOUNS)
|
|
|
|
|
|
|
|
hint = "What pronouns should people use to refer you?"
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: try_update_realm_custom_profile_field(realm, field, name, hint=hint),
|
|
|
|
pronouns_field_type_supported=False,
|
|
|
|
)
|
|
|
|
check_custom_profile_fields("events[0]", events[0])
|
2023-07-22 00:34:11 +02:00
|
|
|
[pronouns_field] = (
|
2022-10-27 19:05:10 +02:00
|
|
|
field_obj for field_obj in events[0]["fields"] if field_obj["id"] == field.id
|
2023-07-22 00:34:11 +02:00
|
|
|
)
|
2022-10-27 19:05:10 +02:00
|
|
|
self.assertEqual(pronouns_field["type"], CustomProfileField.SHORT_TEXT)
|
|
|
|
|
2018-07-09 11:49:08 +02:00
|
|
|
def test_custom_profile_field_data_events(self) -> None:
|
2019-03-07 21:29:16 +01:00
|
|
|
field_id = self.user_profile.realm.customprofilefield_set.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=self.user_profile.realm, name="Biography"
|
2021-02-12 08:19:30 +01:00
|
|
|
).id
|
2022-07-08 17:17:46 +02:00
|
|
|
field: ProfileDataElementUpdateDict = {
|
2018-07-09 11:49:08 +02:00
|
|
|
"id": field_id,
|
|
|
|
"value": "New value",
|
|
|
|
}
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field])
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["person"]["custom_profile_field"].keys(), {"id", "value", "rendered_value"}
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
)
|
2018-07-09 11:49:08 +02:00
|
|
|
|
2018-08-09 14:02:32 +02:00
|
|
|
# Test we pass correct stringify value in custom-user-field data event
|
2019-03-07 21:29:16 +01:00
|
|
|
field_id = self.user_profile.realm.customprofilefield_set.get(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=self.user_profile.realm, name="Mentor"
|
2021-02-12 08:19:30 +01:00
|
|
|
).id
|
2018-08-09 14:02:32 +02:00
|
|
|
field = {
|
|
|
|
"id": field_id,
|
|
|
|
"value": [self.example_user("ZOE").id],
|
|
|
|
}
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field])
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
|
|
|
self.assertEqual(events[0]["person"]["custom_profile_field"].keys(), {"id", "value"})
|
2018-08-09 14:02:32 +02:00
|
|
|
|
2023-07-31 19:39:57 +02:00
|
|
|
# Test event for removing custom profile data
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: check_remove_custom_profile_field_value(self.user_profile, field_id)
|
|
|
|
)
|
|
|
|
check_realm_user_update("events[0]", events[0], "custom_profile_field")
|
|
|
|
self.assertEqual(events[0]["person"]["custom_profile_field"].keys(), {"id", "value"})
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_presence_events(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
slim_presence=False,
|
|
|
|
)
|
2020-08-13 19:29:07 +02:00
|
|
|
|
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
|
|
|
)
|
2020-02-02 17:29:05 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_presence(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2020-06-27 17:32:39 +02:00
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
2020-06-11 16:03:47 +02:00
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
slim_presence=True,
|
|
|
|
)
|
2017-04-25 11:50:30 +02:00
|
|
|
|
2020-08-13 19:29:07 +02:00
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=False,
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
|
|
|
)
|
2020-02-03 17:09:18 +01:00
|
|
|
|
2020-08-13 19:29:07 +02:00
|
|
|
def test_presence_events_multiple_clients(self) -> None:
|
2020-06-11 16:03:47 +02:00
|
|
|
now = timezone_now()
|
|
|
|
initial_presence = now - datetime.timedelta(days=365)
|
|
|
|
UserPresence.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
last_active_time=initial_presence,
|
|
|
|
last_connected_time=initial_presence,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.api_post(
|
|
|
|
self.user_profile,
|
|
|
|
"/api/v1/users/me/presence",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"status": "idle"},
|
2021-02-12 08:19:30 +01:00
|
|
|
HTTP_USER_AGENT="ZulipAndroid/1.0",
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
|
|
|
lambda: do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-06-11 16:03:47 +02:00
|
|
|
self.verify_action(
|
|
|
|
lambda: do_update_user_presence(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("ZulipAndroid/1.0"),
|
|
|
|
timezone_now(),
|
|
|
|
UserPresence.LEGACY_STATUS_IDLE_INT,
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
num_events=0,
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_presence(
|
2020-06-11 16:03:47 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("ZulipAndroid/1.0"),
|
|
|
|
timezone_now() + datetime.timedelta(seconds=301),
|
|
|
|
UserPresence.LEGACY_STATUS_ACTIVE_INT,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-08-13 19:29:07 +02:00
|
|
|
|
|
|
|
check_presence(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_email=True,
|
2020-06-11 16:03:47 +02:00
|
|
|
# We no longer store information about the client and we simply
|
|
|
|
# set the field to 'website' for backwards compatibility.
|
|
|
|
presence_key="website",
|
|
|
|
status="active",
|
2020-08-13 19:29:07 +02:00
|
|
|
)
|
2017-03-24 05:26:32 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_register_events(self) -> None:
|
2021-08-12 12:15:06 +02:00
|
|
|
events = self.verify_action(lambda: self.register("test1@zulip.com", "test1"), num_events=5)
|
|
|
|
self.assert_length(events, 5)
|
2021-04-29 17:22:48 +02:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_realm_user_add("events[1]", events[1])
|
2018-12-06 23:17:46 +01:00
|
|
|
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(new_user_profile.delivery_email, "test1@zulip.com")
|
2018-12-06 23:17:46 +01:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_subscription_peer_add("events[4]", events[4])
|
2021-04-29 17:22:48 +02:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_message("events[0]", events[0])
|
2021-04-29 17:22:48 +02:00
|
|
|
self.assertIn(
|
|
|
|
f'data-user-id="{new_user_profile.id}">test1_zulip.com</span> just signed up for Zulip',
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
events[0]["message"]["content"],
|
2021-04-29 17:22:48 +02:00
|
|
|
)
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
check_user_group_add_members("events[2]", events[2])
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2018-12-06 23:17:46 +01:00
|
|
|
def test_register_events_email_address_visibility(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=self.user_profile.realm)
|
|
|
|
do_set_realm_user_default_setting(
|
|
|
|
realm_user_default,
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-03-01 11:33:24 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-12-06 23:17:46 +01:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
events = self.verify_action(lambda: self.register("test1@zulip.com", "test1"), num_events=5)
|
|
|
|
self.assert_length(events, 5)
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_realm_user_add("events[1]", events[1])
|
2018-12-06 23:17:46 +01:00
|
|
|
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
|
2020-06-10 06:41:04 +02:00
|
|
|
self.assertEqual(new_user_profile.email, f"user{new_user_profile.id}@zulip.testserver")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_subscription_peer_add("events[4]", events[4])
|
2021-04-29 17:22:48 +02:00
|
|
|
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_message("events[0]", events[0])
|
2021-04-29 17:22:48 +02:00
|
|
|
self.assertIn(
|
|
|
|
f'data-user-id="{new_user_profile.id}">test1_zulip.com</span> just signed up for Zulip',
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
events[0]["message"]["content"],
|
2021-04-29 17:22:48 +02:00
|
|
|
)
|
|
|
|
|
2022-08-15 15:54:50 +02:00
|
|
|
check_user_group_add_members("events[2]", events[2])
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_alert_words_events(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_alert_words("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_remove_alert_words(self.user_profile, ["alert_word"])
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_alert_words("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2018-12-18 17:17:08 +01:00
|
|
|
def test_away_events(self) -> None:
|
|
|
|
client = get_client("website")
|
2022-09-22 11:56:58 +02:00
|
|
|
|
2023-04-08 15:52:48 +02:00
|
|
|
# Updating user status to away activates the codepath of disabling
|
|
|
|
# the presence_enabled user setting. Correctly simulating the presence
|
|
|
|
# event status for a typical user requires settings the user's date_joined
|
|
|
|
# further into the past. See test_change_presence_enabled for more details,
|
|
|
|
# since it tests that codepath directly.
|
|
|
|
self.user_profile.date_joined = timezone_now() - datetime.timedelta(days=15)
|
|
|
|
self.user_profile.save()
|
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Set all
|
|
|
|
away_val = True
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_status(
|
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-02-12 08:20:45 +01:00
|
|
|
status_text="out to lunch",
|
2021-06-22 18:42:31 +02:00
|
|
|
emoji_name="car",
|
|
|
|
emoji_code="1f697",
|
|
|
|
reaction_type=UserStatus.UNICODE_EMOJI,
|
2021-02-12 08:19:30 +01:00
|
|
|
client_id=client.id,
|
2022-09-22 11:56:58 +02:00
|
|
|
),
|
|
|
|
num_events=4,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2021-06-22 18:42:31 +02:00
|
|
|
check_user_status(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
{"away", "status_text", "emoji_name", "emoji_code", "reaction_type"},
|
|
|
|
)
|
2022-09-22 11:56:58 +02:00
|
|
|
check_user_settings_update("events[1]", events[1])
|
|
|
|
check_update_global_notifications("events[2]", events[2], not away_val)
|
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
|
|
|
|
|
|
|
# Remove all
|
|
|
|
away_val = False
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_status(
|
2021-06-22 18:42:31 +02:00
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-06-22 18:42:31 +02:00
|
|
|
status_text="",
|
|
|
|
emoji_name="",
|
|
|
|
emoji_code="",
|
|
|
|
reaction_type=UserStatus.UNICODE_EMOJI,
|
|
|
|
client_id=client.id,
|
2022-09-22 11:56:58 +02:00
|
|
|
),
|
|
|
|
num_events=4,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2021-06-22 18:42:31 +02:00
|
|
|
check_user_status(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
{"away", "status_text", "emoji_name", "emoji_code", "reaction_type"},
|
|
|
|
)
|
2022-09-22 11:56:58 +02:00
|
|
|
check_user_settings_update("events[1]", events[1])
|
|
|
|
check_update_global_notifications("events[2]", events[2], not away_val)
|
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Only set away
|
|
|
|
away_val = True
|
2021-01-20 19:53:11 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_status(
|
2021-06-22 18:42:31 +02:00
|
|
|
user_profile=self.user_profile,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=away_val,
|
2021-06-22 18:42:31 +02:00
|
|
|
status_text=None,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
client_id=client.id,
|
2022-09-22 11:56:58 +02:00
|
|
|
),
|
|
|
|
num_events=4,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
|
|
|
check_user_status("events[0]", events[0], {"away"})
|
2022-09-22 11:56:58 +02:00
|
|
|
check_user_settings_update("events[1]", events[1])
|
|
|
|
check_update_global_notifications("events[2]", events[2], not away_val)
|
|
|
|
check_presence(
|
|
|
|
"events[3]",
|
|
|
|
events[3],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if not away_val else "idle",
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
2022-09-22 11:56:58 +02:00
|
|
|
# Only set status_text
|
2021-01-20 19:53:11 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_status(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
away=None,
|
|
|
|
status_text="at the beach",
|
2021-06-22 18:42:31 +02:00
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
client_id=client.id,
|
|
|
|
)
|
|
|
|
)
|
2021-01-20 19:53:11 +01:00
|
|
|
|
|
|
|
check_user_status("events[0]", events[0], {"status_text"})
|
2018-12-18 17:17:08 +01:00
|
|
|
|
2017-11-14 07:31:31 +01:00
|
|
|
def test_user_group_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: check_add_user_group(
|
2022-11-21 03:37:11 +01:00
|
|
|
self.user_profile.realm, "backend", [othello], "Backend team", acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_add("events[0]", events[0])
|
2017-11-14 07:31:31 +01:00
|
|
|
|
2017-11-14 08:00:18 +01:00
|
|
|
# Test name update
|
2021-02-12 08:20:45 +01:00
|
|
|
backend = UserGroup.objects.get(name="backend")
|
2022-11-21 03:43:48 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_group_name(backend, "backendteam", acting_user=None)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_update("events[0]", events[0], "name")
|
2017-11-14 08:00:18 +01:00
|
|
|
|
2017-11-14 08:00:53 +01:00
|
|
|
# Test description update
|
|
|
|
description = "Backend team to deal with backend code."
|
2022-11-21 03:45:16 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_user_group_description(backend, description, acting_user=None)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_update("events[0]", events[0], "description")
|
2017-11-14 08:00:53 +01:00
|
|
|
|
2023-06-15 05:24:23 +02:00
|
|
|
# Test can_mention_group setting update
|
|
|
|
moderators_group = UserGroup.objects.get(
|
2023-07-03 09:18:44 +02:00
|
|
|
name="role:moderators", realm=self.user_profile.realm, is_system_group=True
|
2023-06-15 05:24:23 +02:00
|
|
|
)
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_user_group_permission_setting(
|
|
|
|
backend, "can_mention_group", moderators_group, acting_user=None
|
|
|
|
)
|
|
|
|
)
|
2023-07-14 06:50:33 +02:00
|
|
|
check_user_group_update("events[0]", events[0], "can_mention_group")
|
2023-06-15 05:24:23 +02:00
|
|
|
|
2017-11-14 08:01:39 +01:00
|
|
|
# Test add members
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2022-11-21 03:48:10 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: bulk_add_members_to_user_group(backend, [hamlet.id], acting_user=None)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_add_members("events[0]", events[0])
|
2017-11-14 08:01:39 +01:00
|
|
|
|
2017-11-14 08:01:50 +01:00
|
|
|
# Test remove members
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2022-11-21 04:06:15 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: remove_members_from_user_group(backend, [hamlet.id], acting_user=None)
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_remove_members("events[0]", events[0])
|
2017-11-14 08:01:50 +01:00
|
|
|
|
2022-12-14 06:45:55 +01:00
|
|
|
api_design = check_add_user_group(
|
|
|
|
hamlet.realm, "api-design", [hamlet], description="API design team", acting_user=None
|
2022-03-01 07:52:47 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Test add subgroups
|
2022-11-21 04:09:19 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: add_subgroups_to_user_group(backend, [api_design], acting_user=None)
|
|
|
|
)
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_add_subgroups("events[0]", events[0])
|
|
|
|
|
|
|
|
# Test remove subgroups
|
2022-11-21 04:10:48 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: remove_subgroups_from_user_group(backend, [api_design], acting_user=None)
|
|
|
|
)
|
2022-03-01 07:52:47 +01:00
|
|
|
check_user_group_remove_subgroups("events[0]", events[0])
|
|
|
|
|
2020-08-14 13:38:36 +02:00
|
|
|
# Test remove event
|
user_groups: Make locks required for updating user group memberships.
**Background**
User groups are expected to comply with the DAG constraint for the
many-to-many inter-group membership. The check for this constraint has
to be performed recursively so that we can find all direct and indirect
subgroups of the user group to be added.
This kind of check is vulnerable to phantom reads which is possible at
the default read committed isolation level because we cannot guarantee
that the check is still valid when we are adding the subgroups to the
user group.
**Solution**
To avoid having another transaction concurrently update one of the
to-be-subgroup after the recursive check is done, and before the subgroup
is added, we use SELECT FOR UPDATE to lock the user group rows.
The lock needs to be acquired before a group membership change is about
to occur before any check has been conducted.
Suppose that we are adding subgroup B to supergroup A, the locking protocol
is specified as follows:
1. Acquire a lock for B and all its direct and indirect subgroups.
2. Acquire a lock for A.
For the removal of user groups, we acquire a lock for the user group to
be removed with all its direct and indirect subgroups. This is the special
case A=B, which is still complaint with the protocol.
**Error handling**
We currently rely on Postgres' deadlock detection to abort transactions
and show an error for the users. In the future, we might need some
recovery mechanism or at least better error handling.
**Notes**
An important note is that we need to reuse the recursive CTE query that
finds the direct and indirect subgroups when applying the lock on the
rows. And the lock needs to be acquired the same way for the addition and
removal of direct subgroups.
User membership change (as opposed to user group membership) is not
affected. Read-only queries aren't either. The locks only protect
critical regions where the user group dependency graph might violate
the DAG constraint, where users are not participating.
**Testing**
We implement a transaction test case targeting some typical scenarios
when an internal server error is expected to happen (this means that the
user group view makes the correct decision to abort the transaction when
something goes wrong with locks).
To achieve this, we add a development view intended only for unit tests.
It has a global BARRIER that can be shared across threads, so that we
can synchronize them to consistently reproduce certain potential race
conditions prevented by the database locks.
The transaction test case lanuches pairs of threads initiating possibly
conflicting requests at the same time. The tests are set up such that exactly N
of them are expected to succeed with a certain error message (while we don't
know each one).
**Security notes**
get_recursive_subgroups_for_groups will no longer fetch user groups from
other realms. As a result, trying to add/remove a subgroup from another
realm results in a UserGroup not found error response.
We also implement subgroup-specific checks in has_user_group_access to
keep permission managing in a single place. Do note that the API
currently don't have a way to violate that check because we are only
checking the realm ID now.
2023-06-17 04:39:52 +02:00
|
|
|
events = self.verify_action(lambda: check_delete_user_group(backend, acting_user=othello))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_user_group_remove("events[0]", events[0])
|
2017-11-15 08:09:49 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_stream_groups_events(self) -> None:
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Scotland", "Rome", "Denmark"]
|
|
|
|
]
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_create_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, "group1", "This is group1", streams
|
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2017-11-14 20:33:09 +01:00
|
|
|
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
2017-11-01 18:20:34 +01:00
|
|
|
venice_stream = get_stream("Venice", self.user_profile.realm)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_add_streams_to_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, [venice_stream]
|
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_remove_streams_from_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, [venice_stream]
|
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_default_stream_group_description(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, "New description"
|
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-14 20:51:34 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_default_stream_group_name(
|
2021-05-10 07:02:14 +02:00
|
|
|
self.user_profile.realm, group, "New group name"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-14 21:06:02 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_remove_default_stream_group(self.user_profile.realm, group)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_stream_groups("events[0]", events[0])
|
2017-11-01 18:20:34 +01:00
|
|
|
|
2019-03-01 01:26:57 +01:00
|
|
|
def test_default_stream_group_events_guest(self) -> None:
|
2023-07-31 22:52:35 +02:00
|
|
|
streams = [
|
|
|
|
get_stream(stream_name, self.user_profile.realm)
|
|
|
|
for stream_name in ["Scotland", "Rome", "Denmark"]
|
|
|
|
]
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_create_default_stream_group(self.user_profile.realm, "group1", "This is group1", streams)
|
2019-03-01 01:26:57 +01:00
|
|
|
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_GUEST, acting_user=None)
|
2019-03-01 01:26:57 +01:00
|
|
|
venice_stream = get_stream("Venice", self.user_profile.realm)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
|
|
|
lambda: do_add_streams_to_default_stream_group(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.realm, group, [venice_stream]
|
|
|
|
),
|
|
|
|
state_change_expected=False,
|
|
|
|
num_events=0,
|
|
|
|
)
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_default_streams_events(self) -> None:
|
2017-01-30 04:23:08 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(lambda: do_add_default_stream(stream))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_streams("events[0]", events[0])
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(lambda: do_remove_default_stream(stream))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_default_streams("events[0]", events[0])
|
2016-05-20 22:08:42 +02:00
|
|
|
|
2019-03-01 01:26:57 +01:00
|
|
|
def test_default_streams_events_guest(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_GUEST, acting_user=None)
|
2019-03-01 01:26:57 +01:00
|
|
|
stream = get_stream("Scotland", self.user_profile.realm)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_add_default_stream(stream), state_change_expected=False, num_events=0
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_remove_default_stream(stream), state_change_expected=False, num_events=0
|
|
|
|
)
|
2019-03-01 01:26:57 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_muted_topics_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
2022-02-25 21:48:56 +01:00
|
|
|
events = self.verify_action(
|
2023-02-03 12:57:43 +01:00
|
|
|
lambda: do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2023-02-03 12:57:43 +01:00
|
|
|
),
|
|
|
|
num_events=2,
|
2022-02-25 21:48:56 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_muted_topics("events[0]", events[0])
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic("events[1]", events[1])
|
2017-03-24 05:32:50 +01:00
|
|
|
|
2022-02-25 21:48:56 +01:00
|
|
|
events = self.verify_action(
|
2023-02-03 13:21:25 +01:00
|
|
|
lambda: do_set_user_topic_visibility_policy(
|
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.INHERIT,
|
2023-02-03 13:21:25 +01:00
|
|
|
),
|
|
|
|
num_events=2,
|
2022-02-25 21:48:56 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_muted_topics("events[0]", events[0])
|
2022-02-25 21:48:56 +01:00
|
|
|
check_user_topic("events[1]", events[1])
|
|
|
|
|
|
|
|
events = self.verify_action(
|
2023-02-03 12:57:43 +01:00
|
|
|
lambda: do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2023-02-03 12:57:43 +01:00
|
|
|
),
|
2022-02-25 21:48:56 +01:00
|
|
|
event_types=["muted_topics", "user_topic"],
|
|
|
|
)
|
|
|
|
check_user_topic("events[0]", events[0])
|
2017-03-24 05:32:50 +01:00
|
|
|
|
user_topics: Refactor add_topic_mute.
In order to support different types of topic visibility policies,
this renames 'add_topic_mute' to
'set_user_topic_visibility_policy_in_database'
and refactors it to accept a parameter 'visibility_policy'.
Create a corresponding UserTopic row for any visibility policy,
not just muting topics.
When a UserTopic row for (user_profile, stream, topic, recipient_id)
exists already, it updates the row with the new visibility_policy.
In the event of a duplicate request, raises a JsonableError.
i.e., new_visibility_policy == existing_visibility_policy.
There is an increase in the database query count in the message-edit
code path.
Reason:
Earlier, 'add_topic_mute' used 'bulk_create' which either
creates or raises IntegrityError -- 1 query.
Now, 'set_user_topic_visibility_policy' uses get_or_create
-- 2 queries in the case of creating new row.
We can't use the previous approach, because now we have to
handle the case of updating the visibility_policy too.
Also, using bulk_* for a single row is not the correct way.
Co-authored-by: Kartik Srivastava <kaushiksri0908@gmail.com>
Co-authored-by: Prakhar Pratyush <prakhar841301@gmail.com>
2022-09-12 16:39:53 +02:00
|
|
|
def test_unmuted_topics_events(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
self.user_profile,
|
|
|
|
stream,
|
|
|
|
"topic",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.UNMUTED,
|
user_topics: Refactor add_topic_mute.
In order to support different types of topic visibility policies,
this renames 'add_topic_mute' to
'set_user_topic_visibility_policy_in_database'
and refactors it to accept a parameter 'visibility_policy'.
Create a corresponding UserTopic row for any visibility policy,
not just muting topics.
When a UserTopic row for (user_profile, stream, topic, recipient_id)
exists already, it updates the row with the new visibility_policy.
In the event of a duplicate request, raises a JsonableError.
i.e., new_visibility_policy == existing_visibility_policy.
There is an increase in the database query count in the message-edit
code path.
Reason:
Earlier, 'add_topic_mute' used 'bulk_create' which either
creates or raises IntegrityError -- 1 query.
Now, 'set_user_topic_visibility_policy' uses get_or_create
-- 2 queries in the case of creating new row.
We can't use the previous approach, because now we have to
handle the case of updating the visibility_policy too.
Also, using bulk_* for a single row is not the correct way.
Co-authored-by: Kartik Srivastava <kaushiksri0908@gmail.com>
Co-authored-by: Prakhar Pratyush <prakhar841301@gmail.com>
2022-09-12 16:39:53 +02:00
|
|
|
),
|
|
|
|
num_events=2,
|
|
|
|
)
|
|
|
|
check_muted_topics("events[0]", events[0])
|
|
|
|
check_user_topic("events[1]", events[1])
|
|
|
|
|
2021-03-27 12:23:32 +01:00
|
|
|
def test_muted_users_events(self) -> None:
|
|
|
|
muted_user = self.example_user("othello")
|
2021-03-27 13:52:30 +01:00
|
|
|
events = self.verify_action(
|
2022-05-13 03:50:40 +02:00
|
|
|
lambda: do_mute_user(self.user_profile, muted_user), num_events=1
|
2021-03-27 13:52:30 +01:00
|
|
|
)
|
2022-05-13 03:50:40 +02:00
|
|
|
check_muted_users("events[0]", events[0])
|
2021-03-27 12:23:32 +01:00
|
|
|
|
2021-04-08 06:20:43 +02:00
|
|
|
mute_object = get_mute_object(self.user_profile, muted_user)
|
|
|
|
assert mute_object is not None
|
|
|
|
# This is a hack to silence mypy errors which result from it not taking
|
|
|
|
# into account type restrictions for nested functions (here, `lambda`).
|
|
|
|
# https://github.com/python/mypy/commit/8780d45507ab1efba33568744967674cce7184d1
|
|
|
|
mute_object2 = mute_object
|
|
|
|
|
|
|
|
events = self.verify_action(lambda: do_unmute_user(mute_object2))
|
2021-03-27 12:23:32 +01:00
|
|
|
check_muted_users("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_avatar_fields(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_change_avatar_fields(
|
|
|
|
self.user_profile, UserProfile.AVATAR_FROM_USER, acting_user=self.user_profile
|
|
|
|
),
|
2017-02-21 21:37:16 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "avatar_fields")
|
|
|
|
assert isinstance(events[0]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[0]["person"]["avatar_url_medium"], str)
|
2017-02-21 21:37:16 +01:00
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_change_avatar_fields(
|
|
|
|
self.user_profile, UserProfile.AVATAR_FROM_GRAVATAR, acting_user=self.user_profile
|
|
|
|
),
|
2018-02-05 21:42:54 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "avatar_fields")
|
|
|
|
self.assertEqual(events[0]["person"]["avatar_url"], None)
|
|
|
|
self.assertEqual(events[0]["person"]["avatar_url_medium"], None)
|
2018-02-05 21:42:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_full_name(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:20:45 +01:00
|
|
|
lambda: do_change_full_name(self.user_profile, "Sir Hamlet", self.user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "full_name")
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2023-09-07 00:29:46 +02:00
|
|
|
def test_change_user_delivery_email_email_address_visibility_admins(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-03-01 11:33:24 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-12-06 23:17:46 +01:00
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
2021-02-12 08:20:45 +01:00
|
|
|
action = lambda: do_change_user_delivery_email(self.user_profile, "newhamlet@zulip.com")
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(action, num_events=2, client_gravatar=False)
|
event_schema: Extract check_realm_user_update.
This a pretty big commit, but I really wanted it
to be atomic.
All realm_user/update events look the same from
the top:
_check_realm_user_update = check_events_dict(
required_keys=[
("type", equals("realm_user")),
("op", equals("update")),
("person", _check_realm_user_person),
]
)
And then we have a bunch of fields for person that
are optional, and we usually only send user_id plus
one other field, with the exception of avatar-related
events:
_check_realm_user_person = check_dict_only(
required_keys=[
# vertical formatting
("user_id", check_int),
],
optional_keys=[
("avatar_source", check_string),
("avatar_url", check_none_or(check_string)),
("avatar_url_medium", check_none_or(check_string)),
("avatar_version", check_int),
("bot_owner_id", check_int),
("custom_profile_field", _check_custom_profile_field),
("delivery_email", check_string),
("full_name", check_string),
("role", check_int_in(UserProfile.ROLE_TYPES)),
("email", check_string),
("user_id", check_int),
("timezone", check_string),
],
)
I would start the code review by just skimming the changes
to event_schema.py, to get the big picture of the complexity
here. Basically the schema is just the combined superset of
all the individual schemas that we remove from test_events.
Then I would read test_events.py.
The simplest diffs are basically of this form:
- schema_checker = check_events_dict([
- ('type', equals('realm_user')),
- ('op', equals('update')),
- ('person', check_dict_only([
- ('role', check_int_in(UserProfile.ROLE_TYPES)),
- ('user_id', check_int),
- ])),
- ])
# ...
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
Instead of a custom schema checker, we use the "superset"
schema checker, but then we pass in the set of fields that we
expect to be there. Note that 'user_id' is always there.
So most of the heavy lifting happens in this new function
in event_schema.py:
def check_realm_user_update(
var_name: str, event: Dict[str, Any], optional_fields: Set[str],
) -> None:
_check_realm_user_update(var_name, event)
keys = set(event["person"].keys()) - {"user_id"}
assert optional_fields == keys
But we still do some more custom checks in test_events.py.
custom profile fields: check keys of custom_profile_field
def test_custom_profile_field_data_events(self) -> None:
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value", "rendered_value"}
+ )
+ check_realm_user_update('events[0]', events[0], {"custom_profile_field"})
+ self.assertEqual(
+ events[0]['person']['custom_profile_field'].keys(),
+ {"id", "value"}
+ )
avatar fields: check more specific types, since the superset
schema has check_none_or(check_string)
def test_change_avatar_fields(self) -> None:
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ assert isinstance(events[0]['person']['avatar_url'], str)
+ assert isinstance(events[0]['person']['avatar_url_medium'], str)
+ check_realm_user_update('events[0]', events[0], avatar_fields)
+ self.assertEqual(events[0]['person']['avatar_url'], None)
+ self.assertEqual(events[0]['person']['avatar_url_medium'], None)
Also note that avatar_fields is a set of four fields that
are set in event_schema.
full name: no extra work!
def test_change_full_name(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'full_name'})
test_change_user_delivery_email_email_address_visibilty_admins:
no extra work for delivery_email
check avatar fields more directly
roles (several examples) -- actually check the specific role
def test_change_realm_authentication_methods(self) -> None:
- schema_checker('events[0]', events[0])
+ check_realm_user_update('events[0]', events[0], {'role'})
+ self.assertEqual(events[0]['person']['role'], role)
bot_owner_id: no extra work!
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
- change_bot_owner_checker_user('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"bot_owner_id"})
timezone: no extra work!
- timezone_schema_checker('events[1]', events[1])
+ check_realm_user_update('events[1]', events[1], {"email", "timezone"})
2020-07-23 16:04:06 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
check_realm_user_update("events[1]", events[1], "avatar_fields")
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url_medium"], str)
|
2018-08-02 08:47:13 +02:00
|
|
|
|
2021-10-05 15:31:11 +02:00
|
|
|
def test_change_user_delivery_email_email_address_visibility_everyone(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.user_profile,
|
2021-10-05 15:31:11 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
2021-10-05 15:31:11 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
action = lambda: do_change_user_delivery_email(self.user_profile, "newhamlet@zulip.com")
|
|
|
|
events = self.verify_action(action, num_events=3, client_gravatar=False)
|
|
|
|
|
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
check_realm_user_update("events[1]", events[1], "avatar_fields")
|
|
|
|
check_realm_user_update("events[2]", events[2], "email")
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url"], str)
|
|
|
|
assert isinstance(events[1]["person"]["avatar_url_medium"], str)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_authentication_methods(self) -> None:
|
|
|
|
def fake_backends() -> Any:
|
2017-02-21 19:35:17 +01:00
|
|
|
backends = (
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.DevAuthBackend",
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.GoogleAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2017-02-21 19:35:17 +01:00
|
|
|
)
|
|
|
|
return self.settings(AUTHENTICATION_BACKENDS=backends)
|
|
|
|
|
2016-11-02 21:51:56 +01:00
|
|
|
# Test transitions; any new backends should be tested with T/T/T/F/T
|
2021-02-12 08:19:30 +01:00
|
|
|
for auth_method_dict in (
|
2021-02-12 08:20:45 +01:00
|
|
|
{"Google": True, "Email": True, "GitHub": True, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": True, "GitHub": False, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": False, "GitHub": False, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": True, "Email": False, "GitHub": True, "LDAP": False, "Dev": False},
|
|
|
|
{"Google": False, "Email": False, "GitHub": False, "LDAP": False, "Dev": True},
|
|
|
|
{"Google": False, "Email": False, "GitHub": True, "LDAP": False, "Dev": True},
|
|
|
|
{"Google": False, "Email": True, "GitHub": True, "LDAP": True, "Dev": False},
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2017-02-21 19:35:17 +01:00
|
|
|
with fake_backends():
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2017-02-21 19:35:17 +01:00
|
|
|
lambda: do_set_realm_authentication_methods(
|
2021-03-07 06:00:29 +01:00
|
|
|
self.user_profile.realm, auth_method_dict, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_pin_stream(self) -> None:
|
2017-03-05 01:30:48 +01:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2021-04-08 02:41:57 +02:00
|
|
|
do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "pin_to_top", False, acting_user=None
|
|
|
|
)
|
2017-02-21 19:35:17 +01:00
|
|
|
for pinned in (True, False):
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_subscription_property(
|
2021-04-08 02:41:57 +02:00
|
|
|
self.user_profile, sub, stream, "pin_to_top", pinned, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="pin_to_top",
|
|
|
|
value=pinned,
|
|
|
|
)
|
2016-07-01 07:26:09 +02:00
|
|
|
|
2022-08-09 20:37:07 +02:00
|
|
|
def test_mute_and_unmute_stream(self) -> None:
|
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
|
|
|
|
|
|
|
# While migrating events API from in_home_view to is_muted:
|
|
|
|
# First, test in_home_view sends 2 events: in_home_view and is_muted.
|
|
|
|
do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "in_home_view", False, acting_user=None
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "in_home_view", True, acting_user=None
|
|
|
|
),
|
|
|
|
num_events=2,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="in_home_view",
|
|
|
|
value=True,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
property="is_muted",
|
|
|
|
value=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Then, test is_muted also sends both events, in the same order.
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_subscription_property(
|
|
|
|
self.user_profile, sub, stream, "is_muted", True, acting_user=None
|
|
|
|
),
|
|
|
|
num_events=2,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property="in_home_view",
|
|
|
|
value=False,
|
|
|
|
)
|
|
|
|
check_subscription_update(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
property="is_muted",
|
|
|
|
value=True,
|
|
|
|
)
|
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
def test_change_stream_notification_settings(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
for setting_name in ["email_notifications"]:
|
2020-08-17 14:06:06 +02:00
|
|
|
stream = get_stream("Denmark", self.user_profile.realm)
|
|
|
|
sub = get_subscription(stream.name, self.user_profile)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2020-08-17 14:06:06 +02:00
|
|
|
# First test with notification_settings_null enabled
|
|
|
|
for value in (True, False):
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_subscription_property(
|
2021-04-08 02:41:57 +02:00
|
|
|
self.user_profile, sub, stream, setting_name, value, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
notification_settings_null=True,
|
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property=setting_name,
|
|
|
|
value=value,
|
|
|
|
)
|
2020-08-17 14:06:06 +02:00
|
|
|
|
|
|
|
for value in (True, False):
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_subscription_property(
|
2021-04-08 02:41:57 +02:00
|
|
|
self.user_profile, sub, stream, setting_name, value, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-08-17 14:19:09 +02:00
|
|
|
check_subscription_update(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
property=setting_name,
|
|
|
|
value=value,
|
|
|
|
)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_notifications_stream(self) -> None:
|
2017-06-09 20:50:38 +02:00
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
|
|
|
for notifications_stream, notifications_stream_id in ((stream, stream.id), (None, -1)):
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_set_realm_notifications_stream(
|
2021-03-08 05:24:00 +01:00
|
|
|
self.user_profile.realm,
|
|
|
|
notifications_stream,
|
|
|
|
notifications_stream_id,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update("events[0]", events[0], "notifications_stream_id")
|
2017-06-09 20:50:38 +02:00
|
|
|
|
2017-10-20 16:55:04 +02:00
|
|
|
def test_change_realm_signup_notifications_stream(self) -> None:
|
|
|
|
stream = get_stream("Rome", self.user_profile.realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for signup_notifications_stream, signup_notifications_stream_id in (
|
|
|
|
(stream, stream.id),
|
|
|
|
(None, -1),
|
|
|
|
):
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_set_realm_signup_notifications_stream(
|
|
|
|
self.user_profile.realm,
|
|
|
|
signup_notifications_stream,
|
|
|
|
signup_notifications_stream_id,
|
2021-04-02 16:36:43 +02:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update("events[0]", events[0], "signup_notifications_stream_id")
|
2017-10-20 16:55:04 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_is_admin(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2023-07-06 08:38:12 +02:00
|
|
|
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Test private stream")
|
|
|
|
|
2020-05-21 00:13:06 +02:00
|
|
|
for role in [UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
num_events = 6
|
|
|
|
else:
|
|
|
|
num_events = 5
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
events = self.verify_action(
|
2021-08-12 12:15:06 +02:00
|
|
|
lambda: do_change_user_role(self.user_profile, role, acting_user=None),
|
2023-07-06 08:38:12 +02:00
|
|
|
num_events=num_events,
|
2021-03-27 05:13:46 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2020-06-03 19:49:45 +02:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2021-05-28 12:51:50 +02:00
|
|
|
def test_change_is_billing_admin(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2021-05-28 12:51:50 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
|
|
|
events = self.verify_action(lambda: do_make_user_billing_admin(self.user_profile))
|
|
|
|
check_realm_user_update("events[0]", events[0], "is_billing_admin")
|
|
|
|
self.assertEqual(events[0]["person"]["is_billing_admin"], True)
|
|
|
|
|
2020-06-03 19:49:45 +02:00
|
|
|
def test_change_is_owner(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-06-03 19:49:45 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2023-07-06 08:38:12 +02:00
|
|
|
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Test private stream")
|
|
|
|
|
2020-06-03 19:49:45 +02:00
|
|
|
for role in [UserProfile.ROLE_REALM_OWNER, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
num_events = 6
|
|
|
|
else:
|
|
|
|
num_events = 5
|
2021-03-27 05:13:46 +01:00
|
|
|
events = self.verify_action(
|
2021-08-12 12:15:06 +02:00
|
|
|
lambda: do_change_user_role(self.user_profile, role, acting_user=None),
|
2023-07-06 08:38:12 +02:00
|
|
|
num_events=num_events,
|
2021-04-19 20:56:15 +02:00
|
|
|
)
|
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2021-04-19 20:56:15 +02:00
|
|
|
def test_change_is_moderator(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2021-04-19 20:56:15 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
|
|
|
for role in [UserProfile.ROLE_MODERATOR, UserProfile.ROLE_MEMBER]:
|
|
|
|
events = self.verify_action(
|
2021-08-12 12:15:06 +02:00
|
|
|
lambda: do_change_user_role(self.user_profile, role, acting_user=None),
|
|
|
|
num_events=4,
|
2021-03-27 05:13:46 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_MODERATOR:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
|
|
|
|
2020-06-01 02:31:30 +02:00
|
|
|
def test_change_is_guest(self) -> None:
|
2021-05-19 16:07:01 +02:00
|
|
|
stream = Stream.objects.get(name="Denmark")
|
|
|
|
do_add_default_stream(stream)
|
|
|
|
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-06-01 02:31:30 +02:00
|
|
|
|
|
|
|
# Important: We need to refresh from the database here so that
|
|
|
|
# we don't have a stale UserProfile object with an old value
|
|
|
|
# for email being passed into this next function.
|
|
|
|
self.user_profile.refresh_from_db()
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2020-06-01 02:31:30 +02:00
|
|
|
for role in [UserProfile.ROLE_GUEST, UserProfile.ROLE_MEMBER]:
|
2023-07-06 08:38:12 +02:00
|
|
|
if role == UserProfile.ROLE_MEMBER:
|
|
|
|
# When changing role from guest to member, peer_add events are also sent
|
|
|
|
# to make sure the subscribers info is provided to the clients for the
|
|
|
|
# streams added by stream creation event.
|
|
|
|
num_events = 7
|
|
|
|
else:
|
|
|
|
num_events = 5
|
2021-03-27 05:13:46 +01:00
|
|
|
events = self.verify_action(
|
2021-08-12 12:15:06 +02:00
|
|
|
lambda: do_change_user_role(self.user_profile, role, acting_user=None),
|
2023-07-06 08:38:12 +02:00
|
|
|
num_events=num_events,
|
2021-03-27 05:13:46 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_user_update("events[0]", events[0], "role")
|
|
|
|
self.assertEqual(events[0]["person"]["role"], role)
|
2020-06-01 02:31:30 +02:00
|
|
|
|
2021-08-12 12:15:06 +02:00
|
|
|
check_user_group_remove_members("events[1]", events[1])
|
|
|
|
check_user_group_add_members("events[2]", events[2])
|
|
|
|
|
|
|
|
if role == UserProfile.ROLE_GUEST:
|
|
|
|
check_user_group_remove_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_delete("events[4]", events[4])
|
2021-08-12 12:15:06 +02:00
|
|
|
else:
|
|
|
|
check_user_group_add_members("events[3]", events[3])
|
2023-07-06 08:38:12 +02:00
|
|
|
check_stream_create("events[4]", events[4])
|
|
|
|
check_subscription_peer_add("events[5]", events[5])
|
|
|
|
check_subscription_peer_add("events[6]", events[6])
|
2021-08-12 12:15:06 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_notification_settings(self) -> None:
|
2023-07-31 22:16:30 +02:00
|
|
|
for notification_setting in self.user_profile.notification_setting_types:
|
2022-08-24 21:42:44 +02:00
|
|
|
if notification_setting in [
|
|
|
|
"notification_sound",
|
|
|
|
"desktop_icon_count_display",
|
|
|
|
"presence_enabled",
|
2023-01-14 20:36:37 +01:00
|
|
|
"realm_name_in_email_notifications_policy",
|
2022-08-24 21:42:44 +02:00
|
|
|
]:
|
2019-06-29 22:00:44 +02:00
|
|
|
# These settings are tested in their own tests.
|
2018-01-11 21:36:11 +01:00
|
|
|
continue
|
|
|
|
|
2021-09-08 15:36:08 +02:00
|
|
|
do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, False, acting_user=self.user_profile
|
|
|
|
)
|
2018-01-11 21:36:11 +01:00
|
|
|
|
2023-05-17 16:01:16 +02:00
|
|
|
num_events = 2
|
|
|
|
is_modern_notification_setting = (
|
|
|
|
notification_setting in self.user_profile.modern_notification_settings
|
|
|
|
)
|
|
|
|
if is_modern_notification_setting:
|
|
|
|
# The legacy event format is not sent for modern_notification_settings
|
|
|
|
# as it exists only for backwards-compatibility with
|
|
|
|
# clients that don't support the new user_settings event type.
|
|
|
|
# We only send the legacy event for settings added before Feature level 89.
|
|
|
|
num_events = 1
|
|
|
|
|
2017-05-23 03:19:21 +02:00
|
|
|
for setting_value in [True, False]:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2020-06-27 17:32:39 +02:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=self.user_profile,
|
2021-07-26 08:35:27 +02:00
|
|
|
),
|
2023-05-17 16:01:16 +02:00
|
|
|
num_events=num_events,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2023-05-17 16:01:16 +02:00
|
|
|
if not is_modern_notification_setting:
|
|
|
|
check_update_global_notifications("events[1]", events[1], setting_value)
|
2016-12-08 21:06:23 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
# Also test with notification_settings_null=True
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile,
|
|
|
|
notification_setting,
|
|
|
|
setting_value,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
),
|
2019-02-13 10:22:16 +01:00
|
|
|
notification_settings_null=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=False,
|
2023-05-17 16:01:16 +02:00
|
|
|
num_events=num_events,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2023-05-17 16:01:16 +02:00
|
|
|
if not is_modern_notification_setting:
|
|
|
|
check_update_global_notifications("events[1]", events[1], setting_value)
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2022-08-24 21:42:44 +02:00
|
|
|
def test_change_presence_enabled(self) -> None:
|
|
|
|
presence_enabled_setting = "presence_enabled"
|
|
|
|
|
2023-04-08 15:52:48 +02:00
|
|
|
# Disabling presence will lead to the creation of a UserPresence object for the user
|
|
|
|
# with a last_connected_time slightly preceding the moment of flipping the setting
|
|
|
|
# and last_active_time set to None. The presence API defaults to user_profile.date_joined
|
|
|
|
# for backwards compatibility when dealing with a None value. Thus for this test to properly
|
|
|
|
# check that the presence event emitted will have "idle" status, we need to simulate
|
|
|
|
# the (more realistic) scenario where date_joined is further in the past and not super recent.
|
|
|
|
self.user_profile.date_joined = timezone_now() - datetime.timedelta(days=15)
|
|
|
|
self.user_profile.save()
|
|
|
|
|
2022-08-24 21:42:44 +02:00
|
|
|
for val in [True, False]:
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_user_setting(
|
|
|
|
self.user_profile, presence_enabled_setting, val, acting_user=self.user_profile
|
|
|
|
),
|
|
|
|
num_events=3,
|
|
|
|
)
|
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], val)
|
|
|
|
check_presence(
|
|
|
|
"events[2]",
|
|
|
|
events[2],
|
|
|
|
has_email=True,
|
|
|
|
presence_key="website",
|
|
|
|
status="active" if val else "idle",
|
|
|
|
)
|
|
|
|
|
2018-01-11 21:36:11 +01:00
|
|
|
def test_change_notification_sound(self) -> None:
|
|
|
|
notification_setting = "notification_sound"
|
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-04-08 11:10:34 +02:00
|
|
|
self.user_profile, notification_setting, "ding", acting_user=self.user_profile
|
2021-07-26 08:35:27 +02:00
|
|
|
),
|
|
|
|
num_events=2,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], "ding")
|
2018-01-11 21:36:11 +01:00
|
|
|
|
2019-06-29 22:00:44 +02:00
|
|
|
def test_change_desktop_icon_count_display(self) -> None:
|
|
|
|
notification_setting = "desktop_icon_count_display"
|
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, 2, acting_user=self.user_profile
|
2021-07-26 08:35:27 +02:00
|
|
|
),
|
|
|
|
num_events=2,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 2)
|
2019-06-29 22:00:44 +02:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile, notification_setting, 1, acting_user=self.user_profile
|
2021-07-26 08:35:27 +02:00
|
|
|
),
|
|
|
|
num_events=2,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 1)
|
2019-06-29 22:00:44 +02:00
|
|
|
|
2023-01-14 20:36:37 +01:00
|
|
|
def test_change_realm_name_in_email_notifications_policy(self) -> None:
|
|
|
|
notification_setting = "realm_name_in_email_notifications_policy"
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_user_setting(
|
|
|
|
self.user_profile, notification_setting, 3, acting_user=self.user_profile
|
|
|
|
),
|
|
|
|
num_events=2,
|
|
|
|
)
|
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 3)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_user_setting(
|
|
|
|
self.user_profile, notification_setting, 2, acting_user=self.user_profile
|
|
|
|
),
|
|
|
|
num_events=2,
|
|
|
|
)
|
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_global_notifications("events[1]", events[1], 2)
|
|
|
|
|
2022-04-11 19:26:16 +02:00
|
|
|
def test_realm_update_org_type(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
|
|
|
|
|
|
|
state_data = fetch_initial_state_data(self.user_profile)
|
|
|
|
self.assertEqual(state_data["realm_org_type"], Realm.ORG_TYPES["business"]["id"])
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_realm_org_type(
|
|
|
|
realm, Realm.ORG_TYPES["government"]["id"], acting_user=self.user_profile
|
|
|
|
)
|
|
|
|
)
|
|
|
|
check_realm_update("events[0]", events[0], "org_type")
|
|
|
|
|
|
|
|
state_data = fetch_initial_state_data(self.user_profile)
|
|
|
|
self.assertEqual(state_data["realm_org_type"], Realm.ORG_TYPES["government"]["id"])
|
|
|
|
|
2019-06-11 12:43:08 +02:00
|
|
|
def test_realm_update_plan_type(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2021-01-17 17:58:50 +01:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(state_data["realm_plan_type"], Realm.PLAN_TYPE_SELF_HOSTED)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(state_data["zulip_plan_is_not_limited"], True)
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2020-12-04 10:54:15 +01:00
|
|
|
events = self.verify_action(
|
2021-12-01 02:10:40 +01:00
|
|
|
lambda: do_change_realm_plan_type(
|
2021-10-18 23:28:17 +02:00
|
|
|
realm, Realm.PLAN_TYPE_LIMITED, acting_user=self.user_profile
|
2022-06-02 15:56:30 +02:00
|
|
|
),
|
|
|
|
num_events=2,
|
2020-12-04 10:54:15 +01:00
|
|
|
)
|
2022-06-02 15:56:30 +02:00
|
|
|
check_realm_update("events[0]", events[0], "enable_spectator_access")
|
|
|
|
check_realm_update("events[1]", events[1], "plan_type")
|
2019-06-12 08:56:28 +02:00
|
|
|
|
2021-01-17 17:58:50 +01:00
|
|
|
state_data = fetch_initial_state_data(self.user_profile)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(state_data["realm_plan_type"], Realm.PLAN_TYPE_LIMITED)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(state_data["zulip_plan_is_not_limited"], False)
|
2019-06-11 12:43:08 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_emoji_events(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
author = self.example_user("iago")
|
|
|
|
with get_test_image_file("img.png") as img_file:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: check_add_realm_emoji(self.user_profile.realm, "my_emoji", author, img_file)
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_emoji_update("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-04-07 12:24:30 +02:00
|
|
|
lambda: do_remove_realm_emoji(
|
|
|
|
self.user_profile.realm, "my_emoji", acting_user=self.user_profile
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_emoji_update("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_filter_events(self) -> None:
|
2020-06-24 21:36:27 +02:00
|
|
|
regex = "#(?P<id>[123])"
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
url = "https://realm.com/my_realm_filter/{id}"
|
2020-06-24 21:36:27 +02:00
|
|
|
|
2021-03-30 12:51:54 +02:00
|
|
|
events = self.verify_action(
|
2022-03-14 11:50:24 +01:00
|
|
|
lambda: do_add_linkifier(self.user_profile.realm, regex, url, acting_user=None),
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
num_events=1,
|
2021-03-30 12:51:54 +02:00
|
|
|
)
|
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
2014-03-06 17:07:43 +01:00
|
|
|
|
2023-08-11 01:10:21 +02:00
|
|
|
linkifier_id = events[0]["realm_linkifiers"][-1]["id"]
|
|
|
|
self.assertEqual(RealmFilter.objects.get(id=linkifier_id).pattern, regex)
|
|
|
|
|
2021-04-15 19:51:36 +02:00
|
|
|
regex = "#(?P<id>[0-9]+)"
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-03-14 12:10:25 +01:00
|
|
|
lambda: do_update_linkifier(
|
|
|
|
self.user_profile.realm, linkifier_id, regex, url, acting_user=None
|
|
|
|
),
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
num_events=1,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-03-30 12:51:54 +02:00
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2023-08-10 04:09:25 +02:00
|
|
|
linkifier_ids = list(
|
|
|
|
RealmFilter.objects.all().values_list("id", flat=True).order_by("order")
|
|
|
|
)
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: check_reorder_linkifiers(
|
|
|
|
self.user_profile.realm, [linkifier_ids[-1], *linkifier_ids[:-1]], acting_user=None
|
|
|
|
),
|
|
|
|
num_events=1,
|
|
|
|
)
|
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
|
|
|
|
2021-04-15 19:51:36 +02:00
|
|
|
events = self.verify_action(
|
2022-03-14 14:50:55 +01:00
|
|
|
lambda: do_remove_linkifier(self.user_profile.realm, regex, acting_user=None),
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
num_events=1,
|
2021-04-15 19:51:36 +02:00
|
|
|
)
|
|
|
|
check_realm_linkifiers("events[0]", events[0])
|
linkifier: Support URL templates for linkifiers.
This swaps out url_format_string from all of our APIs and replaces it
with url_template. Note that the documentation changes in the following
commits will be squashed with this commit.
We change the "url_format" key to "url_template" for the
realm_linkifiers events in event_schema, along with updating
LinkifierDict. "url_template" is the name chosen to normalize
mixed usages of "url_format_string" and "url_format" throughout
the backend.
The markdown processor is updated to stop handling the format string
interpolation and delegate the task template expansion to the uri_template
library instead.
This change affects many test cases. We mostly just replace "%(name)s"
with "{name}", "url_format_string" with "url_template" to make sure that
they still pass. There are some test cases dedicated for testing "%"
escaping, which aren't relevant anymore and are subject to removal.
But for now we keep most of them as-is, and make sure that "%" is always
escaped since we do not use it for variable substitution any more.
Since url_format_string is not populated anymore, a migration is created
to remove this field entirely, and make url_template non-nullable since
we will always populate it. Note that it is possible to have
url_template being null after migration 0422 and before 0424, but
in practice, url_template will not be None after backfilling and the
backend now is always setting url_template.
With the removal of url_format_string, RealmFilter model will now be cleaned
with URL template checks, and the old checks for escapes are removed.
We also modified RealmFilter.clean to skip the validation when the
url_template is invalid. This avoids raising mulitple ValidationError's
when calling full_clean on a linkifier. But we might eventually want to
have a more centric approach to data validation instead of having
the same validation in both the clean method and the validator.
Fixes #23124.
Signed-off-by: Zixuan James Li <p359101898@gmail.com>
2022-10-05 20:55:31 +02:00
|
|
|
|
|
|
|
# Redo the checks, but assume that the client does not support URL template.
|
|
|
|
# apply_event should drop the event, and no state change should occur.
|
|
|
|
regex = "#(?P<id>[123])"
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_add_linkifier(self.user_profile.realm, regex, url, acting_user=None),
|
|
|
|
num_events=1,
|
|
|
|
linkifier_url_template=False,
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
regex = "#(?P<id>[0-9]+)"
|
|
|
|
linkifier_id = events[0]["realm_linkifiers"][0]["id"]
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_update_linkifier(
|
|
|
|
self.user_profile.realm, linkifier_id, regex, url, acting_user=None
|
|
|
|
),
|
|
|
|
num_events=1,
|
|
|
|
linkifier_url_template=False,
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_remove_linkifier(self.user_profile.realm, regex, acting_user=None),
|
|
|
|
num_events=1,
|
|
|
|
linkifier_url_template=False,
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2021-04-15 19:51:36 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_domain_events(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2022-03-07 14:49:16 +01:00
|
|
|
lambda: do_add_realm_domain(
|
|
|
|
self.user_profile.realm, "zulip.org", False, acting_user=None
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_domains_add("events[0]", events[0])
|
2020-08-17 16:07:25 +02:00
|
|
|
self.assertEqual(events[0]["realm_domain"]["domain"], "zulip.org")
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["allow_subdomains"], False)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
test_domain = RealmDomain.objects.get(realm=self.user_profile.realm, domain="zulip.org")
|
2022-03-07 15:19:13 +01:00
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_realm_domain(test_domain, True, acting_user=None)
|
|
|
|
)
|
2017-02-09 22:44:03 +01:00
|
|
|
|
2020-08-17 16:07:25 +02:00
|
|
|
check_realm_domains_change("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["domain"], "zulip.org")
|
|
|
|
self.assertEqual(events[0]["realm_domain"]["allow_subdomains"], True)
|
|
|
|
|
2021-04-08 12:40:46 +02:00
|
|
|
events = self.verify_action(lambda: do_remove_realm_domain(test_domain, acting_user=None))
|
2020-08-17 16:07:25 +02:00
|
|
|
|
|
|
|
check_realm_domains_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["domain"], "zulip.org")
|
2016-12-26 19:19:02 +01:00
|
|
|
|
2020-10-28 04:00:46 +01:00
|
|
|
def test_realm_playground_events(self) -> None:
|
|
|
|
events = self.verify_action(
|
2023-07-24 21:14:42 +02:00
|
|
|
lambda: check_add_realm_playground(
|
2023-05-27 01:33:01 +02:00
|
|
|
self.user_profile.realm,
|
|
|
|
acting_user=None,
|
|
|
|
name="Python playground",
|
|
|
|
pygments_language="Python",
|
2023-05-27 05:04:50 +02:00
|
|
|
url_template="https://python.example.com{code}",
|
2022-03-11 15:16:04 +01:00
|
|
|
)
|
2020-10-28 04:00:46 +01:00
|
|
|
)
|
|
|
|
check_realm_playgrounds("events[0]", events[0])
|
|
|
|
|
2021-07-24 16:56:39 +02:00
|
|
|
last_realm_playground = RealmPlayground.objects.last()
|
|
|
|
assert last_realm_playground is not None
|
|
|
|
last_id = last_realm_playground.id
|
2020-10-28 04:00:46 +01:00
|
|
|
realm_playground = access_playground_by_id(self.user_profile.realm, last_id)
|
|
|
|
events = self.verify_action(
|
2022-03-11 15:40:42 +01:00
|
|
|
lambda: do_remove_realm_playground(
|
|
|
|
self.user_profile.realm, realm_playground, acting_user=None
|
|
|
|
)
|
2020-10-28 04:00:46 +01:00
|
|
|
)
|
|
|
|
check_realm_playgrounds("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_create_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
action = lambda: self.create_bot("test")
|
2021-08-12 12:15:06 +02:00
|
|
|
events = self.verify_action(action, num_events=4)
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2018-01-30 17:10:10 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
action = lambda: self.create_bot(
|
2021-02-12 08:20:45 +01:00
|
|
|
"test_outgoing_webhook",
|
|
|
|
full_name="Outgoing Webhook Bot",
|
|
|
|
payload_url=orjson.dumps("https://foo.bar.com").decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
|
|
|
)
|
2021-08-12 12:15:06 +02:00
|
|
|
events = self.verify_action(action, num_events=4)
|
2018-01-30 17:10:10 +01:00
|
|
|
# The third event is the second call of notify_created_bot, which contains additional
|
|
|
|
# data for services (in contrast to the first call).
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2014-02-26 00:12:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
action = lambda: self.create_bot(
|
2021-02-12 08:20:45 +01:00
|
|
|
"test_embedded",
|
|
|
|
full_name="Embedded Bot",
|
|
|
|
service_name="helloworld",
|
|
|
|
config_data=orjson.dumps({"foo": "bar"}).decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
|
|
|
)
|
2021-08-12 12:15:06 +02:00
|
|
|
events = self.verify_action(action, num_events=4)
|
2022-08-15 15:54:50 +02:00
|
|
|
check_realm_bot_add("events[3]", events[3])
|
2018-01-30 19:21:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
|
|
|
action = lambda: do_change_full_name(bot, "New Bot Name", self.user_profile)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[1]", events[1], "full_name")
|
2014-02-26 19:55:29 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_regenerate_bot_api_key(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2017-04-06 12:27:58 +02:00
|
|
|
action = lambda: do_regenerate_api_key(bot, self.user_profile)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "api_key")
|
2014-02-26 20:17:19 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_avatar_source(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2021-02-12 08:19:30 +01:00
|
|
|
action = lambda: do_change_avatar_fields(
|
|
|
|
bot, bot.AVATAR_FROM_USER, acting_user=self.user_profile
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "avatar_url")
|
|
|
|
self.assertEqual(events[1]["type"], "realm_user")
|
2014-02-26 21:05:10 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_realm_icon_source(self) -> None:
|
2021-04-08 10:42:55 +02:00
|
|
|
action = lambda: do_change_icon_source(
|
|
|
|
self.user_profile.realm, Realm.ICON_UPLOADED, acting_user=None
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, state_change_expected=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-12-03 08:13:58 +01:00
|
|
|
def test_change_realm_light_theme_logo_source(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
action = lambda: do_change_logo_source(
|
|
|
|
self.user_profile.realm, Realm.LOGO_UPLOADED, False, acting_user=self.user_profile
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, state_change_expected=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2019-03-01 15:52:44 +01:00
|
|
|
|
2021-12-03 08:13:58 +01:00
|
|
|
def test_change_realm_dark_theme_logo_source(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
action = lambda: do_change_logo_source(
|
|
|
|
self.user_profile.realm, Realm.LOGO_UPLOADED, True, acting_user=self.user_profile
|
|
|
|
)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, state_change_expected=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_update_dict("events[0]", events[0])
|
2019-03-01 15:52:44 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_all_public_streams(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2021-04-08 10:58:47 +02:00
|
|
|
action = lambda: do_change_default_all_public_streams(bot, True, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_all_public_streams")
|
2014-02-26 21:15:31 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_sending_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2021-04-08 10:50:37 +02:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, stream, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_sending_stream")
|
2014-02-26 21:23:18 +01:00
|
|
|
|
2021-04-08 10:50:37 +02:00
|
|
|
action = lambda: do_change_default_sending_stream(bot, None, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_sending_stream")
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_default_events_register_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2017-02-21 19:35:17 +01:00
|
|
|
stream = get_stream("Rome", bot.realm)
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2021-04-08 10:55:24 +02:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, stream, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_events_register_stream")
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2021-04-08 10:55:24 +02:00
|
|
|
action = lambda: do_change_default_events_register_stream(bot, None, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "default_events_register_stream")
|
2017-03-24 03:04:13 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_change_bot_owner(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
owner = self.example_user("hamlet")
|
|
|
|
bot = self.create_bot("test")
|
2017-03-31 17:27:08 +02:00
|
|
|
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "owner_id")
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2014-02-26 21:34:12 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("aaron")
|
|
|
|
owner = self.example_user("hamlet")
|
|
|
|
bot = self.create_bot("test1", full_name="Test1 Testerson")
|
2018-03-06 22:32:03 +01:00
|
|
|
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_delete("events[0]", events[0])
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2018-03-06 22:32:03 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
previous_owner = self.example_user("aaron")
|
|
|
|
self.user_profile = self.example_user("hamlet")
|
|
|
|
bot = self.create_test_bot("test2", previous_owner, full_name="Test2 Testerson")
|
2018-03-06 22:32:03 +01:00
|
|
|
action = lambda: do_change_bot_owner(bot, self.user_profile, previous_owner)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_add("events[0]", events[0])
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
2018-03-06 22:32:03 +01:00
|
|
|
|
2022-05-07 08:56:33 +02:00
|
|
|
def test_peer_remove_events_on_changing_bot_owner(self) -> None:
|
|
|
|
previous_owner = self.example_user("aaron")
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
bot = self.create_test_bot("test2", previous_owner, full_name="Test2 Testerson")
|
|
|
|
private_stream = self.make_stream("private_stream", invite_only=True)
|
|
|
|
self.make_stream("public_stream")
|
|
|
|
self.subscribe(bot, "private_stream")
|
|
|
|
self.subscribe(self.example_user("aaron"), "private_stream")
|
|
|
|
self.subscribe(bot, "public_stream")
|
|
|
|
self.subscribe(self.example_user("aaron"), "public_stream")
|
|
|
|
|
|
|
|
self.make_stream("private_stream_test", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("iago"), "private_stream_test")
|
|
|
|
self.subscribe(bot, "private_stream_test")
|
|
|
|
|
|
|
|
action = lambda: do_change_bot_owner(bot, self.user_profile, previous_owner)
|
|
|
|
events = self.verify_action(action, num_events=3)
|
|
|
|
|
|
|
|
check_realm_bot_update("events[0]", events[0], "owner_id")
|
|
|
|
check_realm_user_update("events[1]", events[1], "bot_owner_id")
|
|
|
|
check_subscription_peer_remove("events[2]", events[2])
|
|
|
|
self.assertEqual(events[2]["stream_ids"], [private_stream.id])
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def test_do_update_outgoing_webhook_service(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
bot = self.create_test_bot(
|
2021-02-12 08:20:45 +01:00
|
|
|
"test",
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="Test Bot",
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload_url=orjson.dumps("http://hostname.domain2.com").decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
interface_type=Service.GENERIC,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
action = lambda: do_update_outgoing_webhook_service(bot, 2, "http://hostname.domain2.com")
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_update("events[0]", events[0], "services")
|
2018-01-16 20:34:12 +01:00
|
|
|
|
2022-01-14 22:50:27 +01:00
|
|
|
def test_do_deactivate_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2021-03-27 06:02:12 +01:00
|
|
|
action = lambda: do_deactivate_user(bot, acting_user=None)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(action, num_events=2)
|
2020-08-18 18:38:41 +02:00
|
|
|
check_realm_user_remove("events[0]", events[0])
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_remove("events[1]", events[1])
|
2014-02-26 22:27:51 +01:00
|
|
|
|
2022-01-14 22:50:27 +01:00
|
|
|
def test_do_deactivate_user(self) -> None:
|
|
|
|
user_profile = self.example_user("cordelia")
|
|
|
|
action = lambda: do_deactivate_user(user_profile, acting_user=None)
|
|
|
|
events = self.verify_action(action, num_events=1)
|
|
|
|
check_realm_user_remove("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_reactivate_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
bot = self.create_bot("test")
|
2022-01-12 15:30:05 +01:00
|
|
|
self.subscribe(bot, "Denmark")
|
|
|
|
self.make_stream("Test private stream", invite_only=True)
|
|
|
|
self.subscribe(bot, "Test private stream")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(bot, acting_user=None)
|
2021-03-27 05:42:18 +01:00
|
|
|
action = lambda: do_reactivate_user(bot, acting_user=None)
|
2022-01-12 15:30:05 +01:00
|
|
|
events = self.verify_action(action, num_events=3)
|
|
|
|
check_realm_bot_add("events[1]", events[1])
|
|
|
|
check_subscription_peer_add("events[2]", events[2])
|
|
|
|
|
|
|
|
# Test 'peer_add' event for private stream is received only if user is subscribed to it.
|
|
|
|
do_deactivate_user(bot, acting_user=None)
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Test private stream")
|
|
|
|
action = lambda: do_reactivate_user(bot, acting_user=None)
|
|
|
|
events = self.verify_action(action, num_events=4)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_realm_bot_add("events[1]", events[1])
|
2022-01-12 15:30:05 +01:00
|
|
|
check_subscription_peer_add("events[2]", events[2])
|
|
|
|
check_subscription_peer_add("events[3]", events[3])
|
2017-01-24 01:48:35 +01:00
|
|
|
|
2022-04-21 16:04:52 +02:00
|
|
|
do_deactivate_user(bot, acting_user=None)
|
|
|
|
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
|
|
|
|
|
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
|
|
|
bot.refresh_from_db()
|
|
|
|
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
action = lambda: do_reactivate_user(bot, acting_user=self.example_user("iago"))
|
2022-05-16 09:13:41 +02:00
|
|
|
events = self.verify_action(action, num_events=7)
|
2022-04-21 16:04:52 +02:00
|
|
|
check_realm_bot_add("events[1]", events[1])
|
|
|
|
check_realm_bot_update("events[2]", events[2], "owner_id")
|
|
|
|
check_realm_user_update("events[3]", events[3], "bot_owner_id")
|
2022-05-16 09:13:41 +02:00
|
|
|
check_subscription_peer_remove("events[4]", events[4])
|
|
|
|
check_stream_delete("events[5]", events[5])
|
2022-04-21 16:04:52 +02:00
|
|
|
|
2021-03-13 20:00:05 +01:00
|
|
|
def test_do_deactivate_realm(self) -> None:
|
|
|
|
realm = self.user_profile.realm
|
2021-04-02 17:11:25 +02:00
|
|
|
action = lambda: do_deactivate_realm(realm, acting_user=None)
|
2021-03-13 20:00:05 +01:00
|
|
|
|
|
|
|
# We delete sessions of all active users when a realm is
|
|
|
|
# deactivated, and redirect them to a deactivated page in
|
|
|
|
# order to inform that realm/organization has been
|
|
|
|
# deactivated. state_change_expected is False is kinda
|
|
|
|
# correct because were one to somehow compute page_params (as
|
|
|
|
# this test does), but that's not actually possible.
|
|
|
|
events = self.verify_action(action, state_change_expected=False)
|
|
|
|
check_realm_deactivated("events[0]", events[0])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_mark_hotspot_as_read(self) -> None:
|
2017-10-12 17:13:02 +02:00
|
|
|
self.user_profile.tutorial_status = UserProfile.TUTORIAL_WAITING
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile.save(update_fields=["tutorial_status"])
|
2017-10-12 17:13:02 +02:00
|
|
|
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-12-07 15:43:26 +01:00
|
|
|
lambda: do_mark_hotspot_as_read(self.user_profile, "intro_streams")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_hotspots("events[0]", events[0])
|
2017-02-15 21:06:07 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_rename_stream(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
2021-02-12 08:20:45 +01:00
|
|
|
old_name = f"old name{i}"
|
|
|
|
new_name = f"new name{i}"
|
2020-12-01 14:13:09 +01:00
|
|
|
|
|
|
|
stream = self.make_stream(old_name)
|
|
|
|
self.subscribe(self.user_profile, stream.name)
|
|
|
|
action = lambda: do_rename_stream(stream, new_name, self.user_profile)
|
|
|
|
events = self.verify_action(action, num_events=3, include_streams=include_streams)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["name"], old_name)
|
2020-12-01 14:13:09 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[1]", events[1])
|
|
|
|
self.assertEqual(events[1]["name"], old_name)
|
2020-12-01 14:13:09 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_message("events[2]", events[2])
|
2020-12-01 14:13:09 +01:00
|
|
|
|
|
|
|
fields = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_email="notification-bot@zulip.com",
|
2020-12-01 14:13:09 +01:00
|
|
|
display_recipient=new_name,
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_full_name="Notification Bot",
|
2020-12-01 14:13:09 +01:00
|
|
|
is_me_message=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
type="stream",
|
|
|
|
client="Internal",
|
2020-12-01 14:13:09 +01:00
|
|
|
)
|
2020-07-10 16:10:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
fields[TOPIC_NAME] = "stream events"
|
2020-07-10 16:10:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
msg = events[2]["message"]
|
2020-12-01 14:13:09 +01:00
|
|
|
for k, v in fields.items():
|
|
|
|
self.assertEqual(msg[k], v)
|
2014-01-31 23:23:39 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_deactivate_stream_neversubscribed(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
|
|
|
stream = self.make_stream(f"stream{i}")
|
2021-04-02 17:49:36 +02:00
|
|
|
action = lambda: do_deactivate_stream(stream, acting_user=None)
|
2020-12-01 14:13:09 +01:00
|
|
|
events = self.verify_action(action, include_streams=include_streams)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_delete("events[0]", events[0])
|
2023-07-27 16:42:21 +02:00
|
|
|
self.assertIsNone(events[0]["streams"][0]["stream_weekly_traffic"])
|
2016-07-12 23:57:16 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_subscribe_other_user_never_subscribed(self) -> None:
|
2020-12-01 14:13:09 +01:00
|
|
|
for i, include_streams in enumerate([True, False]):
|
|
|
|
action = lambda: self.subscribe(self.example_user("othello"), f"test_stream{i}")
|
|
|
|
events = self.verify_action(action, num_events=2, include_streams=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
2016-07-12 23:57:16 +02:00
|
|
|
|
2020-08-31 18:33:09 +02:00
|
|
|
def test_remove_other_user_never_subscribed(self) -> None:
|
2021-12-24 14:29:40 +01:00
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = othello.realm
|
|
|
|
self.subscribe(othello, "test_stream")
|
2020-08-31 18:33:09 +02:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
|
|
|
|
2021-12-24 14:29:40 +01:00
|
|
|
action = lambda: bulk_remove_subscriptions(realm, [othello], [stream], acting_user=None)
|
2020-08-31 18:33:09 +02:00
|
|
|
events = self.verify_action(action)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-08-31 18:33:09 +02:00
|
|
|
|
2018-04-02 00:21:21 +02:00
|
|
|
def test_do_delete_message_stream(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-06-11 12:12:12 +02:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Verona")
|
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Verona")
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [Message.objects.get(id=msg_id), Message.objects.get(id=msg_id_2)]
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2020-06-11 12:12:12 +02:00
|
|
|
lambda: do_delete_messages(self.user_profile.realm, messages),
|
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="stream",
|
|
|
|
num_message_ids=2,
|
|
|
|
is_legacy=False,
|
|
|
|
)
|
2020-06-11 12:12:12 +02:00
|
|
|
|
|
|
|
def test_do_delete_message_stream_legacy(self) -> None:
|
|
|
|
"""
|
|
|
|
Test for legacy method of deleting messages which
|
|
|
|
sends an event per message to delete to the client.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "Verona")
|
2020-06-11 12:12:12 +02:00
|
|
|
msg_id_2 = self.send_stream_message(hamlet, "Verona")
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = [Message.objects.get(id=msg_id), Message.objects.get(id=msg_id_2)]
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2020-06-11 12:12:12 +02:00
|
|
|
lambda: do_delete_messages(self.user_profile.realm, messages),
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=True,
|
|
|
|
bulk_message_deletion=False,
|
|
|
|
num_events=2,
|
2020-06-11 12:12:12 +02:00
|
|
|
)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="stream",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=True,
|
|
|
|
)
|
2020-06-11 12:12:12 +02:00
|
|
|
|
|
|
|
def test_do_delete_message_personal(self) -> None:
|
|
|
|
msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.user_profile,
|
|
|
|
"hello",
|
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2019-11-12 21:20:31 +01:00
|
|
|
lambda: do_delete_messages(self.user_profile.realm, [message]),
|
2017-05-14 21:14:26 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="private",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=False,
|
|
|
|
)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2020-06-11 12:12:12 +02:00
|
|
|
def test_do_delete_message_personal_legacy(self) -> None:
|
2018-04-02 00:21:21 +02:00
|
|
|
msg_id = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.example_user("cordelia"),
|
|
|
|
self.user_profile,
|
2018-04-02 00:21:21 +02:00
|
|
|
"hello",
|
|
|
|
)
|
|
|
|
message = Message.objects.get(id=msg_id)
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2019-11-12 21:20:31 +01:00
|
|
|
lambda: do_delete_messages(self.user_profile.realm, [message]),
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=True,
|
|
|
|
bulk_message_deletion=False,
|
2018-04-02 00:21:21 +02:00
|
|
|
)
|
2020-08-14 15:12:27 +02:00
|
|
|
check_delete_message(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
message_type="private",
|
|
|
|
num_message_ids=1,
|
|
|
|
is_legacy=True,
|
|
|
|
)
|
2018-04-02 00:21:21 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_do_delete_message_no_max_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("aaron")
|
2017-05-14 21:14:26 +02:00
|
|
|
# Delete all historical messages for this user
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-05-14 21:14:26 +02:00
|
|
|
UserMessage.objects.filter(user_profile=user_profile).delete()
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(user_profile, "Verona")
|
2017-05-14 21:14:26 +02:00
|
|
|
message = Message.objects.get(id=msg_id)
|
2020-06-27 17:32:39 +02:00
|
|
|
self.verify_action(
|
2019-11-12 21:20:31 +01:00
|
|
|
lambda: do_delete_messages(self.user_profile.realm, [message]),
|
2017-05-14 21:14:26 +02:00
|
|
|
state_change_expected=True,
|
|
|
|
)
|
2021-01-17 17:58:50 +01:00
|
|
|
result = fetch_initial_state_data(user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["max_message_id"], -1)
|
2017-05-14 21:14:26 +02:00
|
|
|
|
2023-05-09 17:48:52 +02:00
|
|
|
def test_do_delete_message_with_no_messages(self) -> None:
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_delete_messages(self.user_profile.realm, []),
|
|
|
|
num_events=0,
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(events, [])
|
|
|
|
|
2018-05-04 22:57:36 +02:00
|
|
|
def test_add_attachment(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-05-04 22:57:36 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2023-04-08 07:01:50 +02:00
|
|
|
url = None
|
2018-05-04 22:57:36 +02:00
|
|
|
|
|
|
|
def do_upload() -> None:
|
2023-04-08 07:01:50 +02:00
|
|
|
nonlocal url
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", response_dict)
|
2023-04-08 07:01:50 +02:00
|
|
|
url = response_dict["uri"]
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2023-04-08 07:01:50 +02:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(lambda: do_upload(), num_events=1, state_change_expected=False)
|
2020-08-06 13:08:42 +02:00
|
|
|
|
|
|
|
check_attachment_add("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 6)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
|
|
|
# Verify that the DB has the attachment marked as unclaimed
|
2021-02-12 08:20:45 +01:00
|
|
|
entry = Attachment.objects.get(file_name="zulip.txt")
|
2018-05-04 22:57:36 +02:00
|
|
|
self.assertEqual(entry.is_claimed(), False)
|
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.subscribe(hamlet, "Denmark")
|
2023-04-08 07:01:50 +02:00
|
|
|
assert url is not None
|
|
|
|
body = f"First message ...[zulip.txt](http://{hamlet.realm.host}" + url + ")"
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
Revert "create_user: Use transaction.atomic decorator for do_create_user."
This reverts commit 851d68e0fc364d649175533c286c179cf38f89d6.
That commit widened how long the transaction is open, which made it
much more likely that after the user was created in the transaction,
and the memcached caches were flushed, some other request will fill
the `get_realm_user_dicts` cache with data which did not include the
new user (because it had not been committed yet).
If a user creation request lost this race, the user would, upon first
request to `/`, get a blank page and a Javascript error:
Unknown user_id in get_by_user_id: 12345
...where 12345 was their own user-id. This error would persist until
the cache expired (in 7 days) or something else expunged it.
Reverting this does not prevent the race, as the post_save hook's call
to flush_user_profile is still in a transaction (and has been since
168f241ff0a5), and thus leaves the potential race window open.
However, it much shortens the potential window of opportunity, and is
a reasonable short-term stopgap.
2023-02-18 02:44:51 +01:00
|
|
|
lambda: self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test"),
|
2021-02-12 08:19:30 +01:00
|
|
|
num_events=2,
|
|
|
|
)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2020-08-06 13:08:42 +02:00
|
|
|
check_attachment_update("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 6)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2020-08-06 13:08:42 +02:00
|
|
|
# Now remove the attachment
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2020-06-10 06:41:04 +02:00
|
|
|
lambda: self.client_delete(f"/json/attachments/{entry.id}"),
|
2021-02-12 08:19:30 +01:00
|
|
|
num_events=1,
|
|
|
|
state_change_expected=False,
|
|
|
|
)
|
2020-08-06 13:08:42 +02:00
|
|
|
|
|
|
|
check_attachment_remove("events[0]", events[0])
|
|
|
|
self.assertEqual(events[0]["upload_space_used"], 0)
|
2018-05-04 22:57:36 +02:00
|
|
|
|
2019-08-02 00:14:58 +02:00
|
|
|
def test_notify_realm_export(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(
|
|
|
|
self.user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
|
|
|
|
)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(self.user_profile)
|
2019-08-07 21:49:54 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.export.do_export_realm",
|
|
|
|
return_value=create_dummy_file("test-export.tar.gz"),
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
with stdout_suppressed(), self.assertLogs(level="INFO") as info_logs:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:20:45 +01:00
|
|
|
lambda: self.client_post("/json/export/realm"),
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=True,
|
|
|
|
num_events=3,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue("INFO:root:Completed data export for zulip in" in info_logs.output[0])
|
2019-08-07 21:49:54 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
# We get two realm_export events for this action, where the first
|
|
|
|
# is missing the export_url (because it's pending).
|
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export(
|
|
|
|
"events[2]",
|
|
|
|
events[2],
|
|
|
|
has_export_url=True,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2019-03-27 00:57:33 +01:00
|
|
|
|
2019-08-01 19:59:36 +02:00
|
|
|
# Now we check the deletion of the export.
|
|
|
|
audit_log_entry = RealmAuditLog.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
event_type=RealmAuditLog.REALM_EXPORTED
|
|
|
|
).first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert audit_log_entry is not None
|
2021-07-24 16:56:39 +02:00
|
|
|
audit_log_entry_id = audit_log_entry.id
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-07-24 16:56:39 +02:00
|
|
|
lambda: self.client_delete(f"/json/export/realm/{audit_log_entry_id}"),
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
num_events=1,
|
|
|
|
)
|
2019-08-01 19:59:36 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=True,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
def test_notify_realm_export_on_failure(self) -> None:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(
|
|
|
|
self.user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
self.login_user(self.user_profile)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2022-04-28 19:52:34 +02:00
|
|
|
"zerver.lib.export.do_export_realm", side_effect=Exception("Some failure")
|
2021-02-12 08:19:30 +01:00
|
|
|
), self.assertLogs(level="ERROR") as error_log:
|
2020-04-16 23:00:24 +02:00
|
|
|
with stdout_suppressed():
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:20:45 +01:00
|
|
|
lambda: self.client_post("/json/export/realm"),
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=False,
|
|
|
|
num_events=2,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2020-07-19 16:09:17 +02:00
|
|
|
# Log is of following format: "ERROR:root:Data export for zulip failed after 0.004499673843383789"
|
|
|
|
# Where last floating number is time and will vary in each test hence the following assertion is
|
|
|
|
# independent of time bit by not matching exact log but only part of it.
|
|
|
|
self.assertTrue("ERROR:root:Data export for zulip failed after" in error_log.output[0])
|
2022-04-28 19:52:34 +02:00
|
|
|
self.assertTrue("Some failure" in error_log.output[0])
|
2020-07-19 16:09:17 +02:00
|
|
|
|
2020-08-05 23:54:26 +02:00
|
|
|
# We get two events for the export.
|
|
|
|
check_realm_export(
|
|
|
|
"events[0]",
|
|
|
|
events[0],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=False,
|
|
|
|
)
|
|
|
|
check_realm_export(
|
|
|
|
"events[1]",
|
|
|
|
events[1],
|
|
|
|
has_export_url=False,
|
|
|
|
has_deleted_timestamp=False,
|
|
|
|
has_failed_timestamp=True,
|
|
|
|
)
|
2020-04-16 23:00:24 +02:00
|
|
|
|
2019-11-16 09:26:28 +01:00
|
|
|
def test_has_zoom_token(self) -> None:
|
2020-06-27 17:32:39 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:20:45 +01:00
|
|
|
lambda: do_set_zoom_token(self.user_profile, {"access_token": "token"}),
|
2019-11-16 09:26:28 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_has_zoom_token("events[0]", events[0], value=True)
|
2019-11-16 09:26:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(lambda: do_set_zoom_token(self.user_profile, None))
|
2021-02-12 08:20:45 +01:00
|
|
|
check_has_zoom_token("events[0]", events[0], value=False)
|
2020-06-27 18:06:51 +02:00
|
|
|
|
2021-04-18 11:28:39 +02:00
|
|
|
def test_restart_event(self) -> None:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(RestartEventError):
|
2021-04-18 11:28:39 +02:00
|
|
|
self.verify_action(lambda: send_restart_events(immediate=True))
|
|
|
|
|
2021-07-24 19:51:25 +02:00
|
|
|
def test_display_setting_event_not_sent(self) -> None:
|
|
|
|
events = self.verify_action(
|
2021-08-13 16:18:53 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-07-24 19:51:25 +02:00
|
|
|
self.user_profile,
|
|
|
|
"default_view",
|
|
|
|
"all_messages",
|
2021-09-08 13:25:50 +02:00
|
|
|
acting_user=self.user_profile,
|
2021-07-24 19:51:25 +02:00
|
|
|
),
|
|
|
|
state_change_expected=True,
|
|
|
|
user_settings_object=True,
|
|
|
|
)
|
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
|
|
|
def test_notification_setting_event_not_sent(self) -> None:
|
|
|
|
events = self.verify_action(
|
2021-09-08 15:36:08 +02:00
|
|
|
lambda: do_change_user_setting(
|
2021-07-24 19:51:25 +02:00
|
|
|
self.user_profile,
|
|
|
|
"enable_sounds",
|
|
|
|
False,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
),
|
|
|
|
state_change_expected=True,
|
|
|
|
user_settings_object=True,
|
|
|
|
)
|
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 18:06:51 +02:00
|
|
|
class RealmPropertyActionTest(BaseAction):
|
|
|
|
def do_set_realm_property_test(self, name: str) -> None:
|
|
|
|
bool_tests: List[bool] = [True, False, True]
|
|
|
|
test_values: Dict[str, Any] = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
default_language=["es", "de", "en"],
|
|
|
|
description=["Realm description", "New description"],
|
2020-06-27 18:06:51 +02:00
|
|
|
digest_weekday=[0, 1, 2],
|
|
|
|
message_retention_days=[10, 20],
|
2021-02-12 08:20:45 +01:00
|
|
|
name=["Zulip", "New Name"],
|
2022-09-06 20:27:48 +02:00
|
|
|
waiting_period_threshold=[1000, 2000],
|
2021-10-04 16:14:07 +02:00
|
|
|
create_public_stream_policy=Realm.COMMON_POLICY_TYPES,
|
|
|
|
create_private_stream_policy=Realm.COMMON_POLICY_TYPES,
|
2021-10-04 08:33:31 +02:00
|
|
|
create_web_public_stream_policy=Realm.CREATE_WEB_PUBLIC_STREAM_POLICY_TYPES,
|
2021-10-04 16:14:07 +02:00
|
|
|
invite_to_stream_policy=Realm.COMMON_POLICY_TYPES,
|
|
|
|
private_message_policy=Realm.PRIVATE_MESSAGE_POLICY_TYPES,
|
|
|
|
user_group_edit_policy=Realm.COMMON_POLICY_TYPES,
|
|
|
|
wildcard_mention_policy=Realm.WILDCARD_MENTION_POLICY_TYPES,
|
|
|
|
bot_creation_policy=Realm.BOT_CREATION_POLICY_TYPES,
|
2020-06-27 18:06:51 +02:00
|
|
|
video_chat_provider=[
|
2021-02-12 08:20:45 +01:00
|
|
|
Realm.VIDEO_CHAT_PROVIDERS["jitsi_meet"]["id"],
|
2020-06-27 18:06:51 +02:00
|
|
|
],
|
2021-03-31 13:10:46 +02:00
|
|
|
giphy_rating=[
|
|
|
|
Realm.GIPHY_RATING_OPTIONS["disabled"]["id"],
|
|
|
|
],
|
2021-02-12 08:20:45 +01:00
|
|
|
default_code_block_language=["python", "javascript"],
|
2021-02-12 08:19:30 +01:00
|
|
|
message_content_delete_limit_seconds=[1000, 1100, 1200],
|
2021-10-04 16:14:07 +02:00
|
|
|
invite_to_realm_policy=Realm.INVITE_TO_REALM_POLICY_TYPES,
|
2022-09-29 09:03:12 +02:00
|
|
|
move_messages_between_streams_policy=Realm.MOVE_MESSAGES_BETWEEN_STREAMS_POLICY_TYPES,
|
2021-10-04 16:14:07 +02:00
|
|
|
add_custom_emoji_policy=Realm.COMMON_POLICY_TYPES,
|
|
|
|
delete_own_message_policy=Realm.COMMON_MESSAGE_POLICY_TYPES,
|
2022-09-22 10:53:37 +02:00
|
|
|
edit_topic_policy=Realm.COMMON_MESSAGE_POLICY_TYPES,
|
|
|
|
message_content_edit_limit_seconds=[1000, 1100, 1200, None],
|
2023-01-26 12:53:27 +01:00
|
|
|
move_messages_within_stream_limit_seconds=[1000, 1100, 1200],
|
2022-10-11 13:19:49 +02:00
|
|
|
move_messages_between_streams_limit_seconds=[1000, 1100, 1200],
|
2020-06-27 18:06:51 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
vals = test_values.get(name)
|
|
|
|
property_type = Realm.property_types[name]
|
|
|
|
if property_type is bool:
|
|
|
|
vals = bool_tests
|
|
|
|
|
|
|
|
if vals is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"No test created for {name}")
|
2020-06-29 15:34:19 +02:00
|
|
|
now = timezone_now()
|
|
|
|
do_set_realm_property(self.user_profile.realm, name, vals[0], acting_user=self.user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2020-06-29 15:34:19 +02:00
|
|
|
for count, val in enumerate(vals[1:]):
|
|
|
|
now = timezone_now()
|
2020-06-27 18:06:51 +02:00
|
|
|
state_change_expected = True
|
2021-10-04 18:52:57 +02:00
|
|
|
old_value = vals[count]
|
|
|
|
num_events = 1
|
|
|
|
|
2020-06-27 18:06:51 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
lambda: do_set_realm_property(
|
|
|
|
self.user_profile.realm, name, val, acting_user=self.user_profile
|
|
|
|
),
|
|
|
|
state_change_expected=state_change_expected,
|
2021-10-04 18:52:57 +02:00
|
|
|
num_events=num_events,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-29 15:34:19 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: val,
|
|
|
|
"property": name,
|
|
|
|
},
|
2021-02-12 08:19:30 +01:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2022-09-22 10:53:37 +02:00
|
|
|
|
|
|
|
if name in [
|
|
|
|
"allow_message_editing",
|
|
|
|
"edit_topic_policy",
|
|
|
|
"message_content_edit_limit_seconds",
|
|
|
|
]:
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
else:
|
|
|
|
check_realm_update("events[0]", events[0], name)
|
2020-06-27 18:06:51 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
def do_set_realm_permission_group_setting_test(self, setting_name: str) -> None:
|
|
|
|
all_system_user_groups = UserGroup.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
is_system_group=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
setting_permission_configuration = Realm.REALM_PERMISSION_GROUP_SETTINGS[setting_name]
|
|
|
|
|
|
|
|
default_group_name = setting_permission_configuration.default_group_name
|
|
|
|
default_group = all_system_user_groups.get(name=default_group_name)
|
|
|
|
old_group_id = default_group.id
|
|
|
|
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
do_change_realm_permission_group_setting(
|
|
|
|
self.user_profile.realm,
|
|
|
|
setting_name,
|
|
|
|
default_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
for user_group in all_system_user_groups:
|
|
|
|
if user_group.name == default_group_name:
|
|
|
|
continue
|
|
|
|
|
|
|
|
now = timezone_now()
|
|
|
|
state_change_expected = True
|
|
|
|
num_events = 1
|
|
|
|
new_group_id = user_group.id
|
|
|
|
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_change_realm_permission_group_setting(
|
|
|
|
self.user_profile.realm,
|
|
|
|
setting_name,
|
|
|
|
user_group,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
),
|
|
|
|
state_change_expected=state_change_expected,
|
|
|
|
num_events=num_events,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_group_id,
|
|
|
|
RealmAuditLog.NEW_VALUE: new_group_id,
|
|
|
|
"property": setting_name,
|
|
|
|
},
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_update_dict("events[0]", events[0])
|
|
|
|
|
|
|
|
old_group_id = new_group_id
|
|
|
|
|
2020-06-27 18:06:51 +02:00
|
|
|
def test_change_realm_property(self) -> None:
|
|
|
|
for prop in Realm.property_types:
|
|
|
|
with self.settings(SEND_DIGEST_EMAILS=True):
|
|
|
|
self.do_set_realm_property_test(prop)
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
for prop in Realm.REALM_PERMISSION_GROUP_SETTINGS:
|
|
|
|
with self.settings(SEND_DIGEST_EMAILS=True):
|
|
|
|
self.do_set_realm_permission_group_setting_test(prop)
|
|
|
|
|
2021-07-21 13:40:46 +02:00
|
|
|
def do_set_realm_user_default_setting_test(self, name: str) -> None:
|
|
|
|
bool_tests: List[bool] = [True, False, True]
|
|
|
|
test_values: Dict[str, Any] = dict(
|
|
|
|
color_scheme=UserProfile.COLOR_SCHEME_CHOICES,
|
|
|
|
default_view=["recent_topics", "all_messages"],
|
|
|
|
emojiset=[emojiset["key"] for emojiset in RealmUserDefault.emojiset_choices()],
|
|
|
|
demote_inactive_streams=UserProfile.DEMOTE_STREAMS_CHOICES,
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=UserProfile.WEB_MARK_READ_ON_SCROLL_POLICY_CHOICES,
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=UserProfile.USER_LIST_STYLE_CHOICES,
|
2021-07-21 13:40:46 +02:00
|
|
|
desktop_icon_count_display=[1, 2, 3],
|
|
|
|
notification_sound=["zulip", "ding"],
|
|
|
|
email_notifications_batching_period_seconds=[120, 300],
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=UserProfile.EMAIL_ADDRESS_VISIBILITY_TYPES,
|
2023-01-14 20:36:37 +01:00
|
|
|
realm_name_in_email_notifications_policy=UserProfile.REALM_NAME_IN_EMAIL_NOTIFICATIONS_POLICY_CHOICES,
|
2021-07-21 13:40:46 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
vals = test_values.get(name)
|
|
|
|
property_type = RealmUserDefault.property_types[name]
|
|
|
|
|
|
|
|
if property_type is bool:
|
|
|
|
vals = bool_tests
|
|
|
|
|
|
|
|
if vals is None:
|
|
|
|
raise AssertionError(f"No test created for {name}")
|
|
|
|
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=self.user_profile.realm)
|
|
|
|
now = timezone_now()
|
|
|
|
do_set_realm_user_default_setting(
|
|
|
|
realm_user_default, name, vals[0], acting_user=self.user_profile
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
for count, val in enumerate(vals[1:]):
|
|
|
|
now = timezone_now()
|
|
|
|
state_change_expected = True
|
|
|
|
events = self.verify_action(
|
|
|
|
lambda: do_set_realm_user_default_setting(
|
|
|
|
realm_user_default, name, val, acting_user=self.user_profile
|
|
|
|
),
|
|
|
|
state_change_expected=state_change_expected,
|
|
|
|
)
|
|
|
|
|
|
|
|
old_value = vals[count]
|
|
|
|
self.assertEqual(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED,
|
|
|
|
event_time__gte=now,
|
|
|
|
acting_user=self.user_profile,
|
2023-07-13 19:46:06 +02:00
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.OLD_VALUE: old_value,
|
|
|
|
RealmAuditLog.NEW_VALUE: val,
|
|
|
|
"property": name,
|
|
|
|
},
|
2021-07-21 13:40:46 +02:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
check_realm_default_update("events[0]", events[0], name)
|
|
|
|
|
|
|
|
def test_change_realm_user_default_setting(self) -> None:
|
|
|
|
for prop in RealmUserDefault.property_types:
|
2021-09-17 18:11:37 +02:00
|
|
|
if prop == "default_language":
|
2021-07-21 13:40:46 +02:00
|
|
|
continue
|
|
|
|
self.do_set_realm_user_default_setting_test(prop)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
class UserDisplayActionTest(BaseAction):
|
2021-08-13 16:18:53 +02:00
|
|
|
def do_change_user_settings_test(self, setting_name: str) -> None:
|
2020-06-27 19:04:32 +02:00
|
|
|
"""Test updating each setting in UserProfile.property_types dict."""
|
|
|
|
|
|
|
|
test_changes: Dict[str, Any] = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
emojiset=["twitter"],
|
|
|
|
default_language=["es", "de", "en"],
|
2021-03-10 13:56:10 +01:00
|
|
|
default_view=["all_messages", "recent_topics"],
|
2021-02-12 08:19:30 +01:00
|
|
|
demote_inactive_streams=[2, 3, 1],
|
2023-04-05 21:35:47 +02:00
|
|
|
web_mark_read_on_scroll_policy=[2, 3, 1],
|
2022-08-12 22:41:06 +02:00
|
|
|
user_list_style=[1, 2, 3],
|
2021-02-12 08:19:30 +01:00
|
|
|
color_scheme=[2, 3, 1],
|
2021-10-21 10:36:57 +02:00
|
|
|
email_address_visibility=[5, 4, 1, 2, 3],
|
2020-06-27 19:04:32 +02:00
|
|
|
)
|
|
|
|
|
2021-10-03 20:39:59 +02:00
|
|
|
user_settings_object = True
|
|
|
|
num_events = 1
|
|
|
|
|
|
|
|
legacy_setting = setting_name in UserProfile.display_settings_legacy
|
|
|
|
if legacy_setting:
|
|
|
|
# Two events:`update_display_settings` and `user_settings`.
|
|
|
|
# `update_display_settings` is only sent for settings added
|
|
|
|
# before feature level 89 which introduced `user_settings`.
|
|
|
|
# We send both events so that older clients that do not
|
|
|
|
# rely on `user_settings` don't break.
|
|
|
|
num_events = 2
|
|
|
|
user_settings_object = False
|
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
values = test_changes.get(setting_name)
|
2020-07-08 15:29:13 +02:00
|
|
|
|
|
|
|
property_type = UserProfile.property_types[setting_name]
|
2020-06-27 19:04:32 +02:00
|
|
|
if property_type is bool:
|
|
|
|
if getattr(self.user_profile, setting_name) is False:
|
|
|
|
values = [True, False, True]
|
|
|
|
else:
|
|
|
|
values = [False, True, False]
|
2020-07-08 15:29:13 +02:00
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
if values is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"No test created for {setting_name}")
|
2020-06-27 19:04:32 +02:00
|
|
|
|
|
|
|
for value in values:
|
2021-10-26 09:15:16 +02:00
|
|
|
if setting_name == "email_address_visibility":
|
|
|
|
# When "email_address_visibility" setting is changed, there is at least
|
|
|
|
# one event with type "user_settings" sent to the modified user itself.
|
|
|
|
num_events = 1
|
|
|
|
|
|
|
|
old_value = getattr(self.user_profile, setting_name)
|
|
|
|
if UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE in [old_value, value]:
|
|
|
|
# In case when either the old value or new value of setting is
|
|
|
|
# UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE, "email" field of
|
|
|
|
# UserProfile object is updated and thus two additional events, for
|
|
|
|
# changing email and avatar_url field, are sent.
|
|
|
|
num_events = 3
|
|
|
|
|
2020-06-27 19:04:32 +02:00
|
|
|
events = self.verify_action(
|
2021-09-08 13:25:50 +02:00
|
|
|
lambda: do_change_user_setting(
|
|
|
|
self.user_profile, setting_name, value, acting_user=self.user_profile
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
num_events=num_events,
|
2021-10-03 20:39:59 +02:00
|
|
|
user_settings_object=user_settings_object,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
2021-10-03 20:39:59 +02:00
|
|
|
if legacy_setting:
|
|
|
|
# Only settings added before feature level 89
|
|
|
|
# generate this event.
|
|
|
|
self.assert_length(events, 2)
|
|
|
|
check_update_display_settings("events[1]", events[1])
|
2020-06-27 19:04:32 +02:00
|
|
|
|
2021-08-13 16:18:53 +02:00
|
|
|
def test_change_user_settings(self) -> None:
|
2020-06-27 19:04:32 +02:00
|
|
|
for prop in UserProfile.property_types:
|
2021-08-11 15:34:25 +02:00
|
|
|
# Notification settings have a separate test suite, which
|
|
|
|
# handles their separate legacy event type.
|
|
|
|
if prop not in UserProfile.notification_settings_legacy:
|
2021-08-13 16:18:53 +02:00
|
|
|
self.do_change_user_settings_test(prop)
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-07-07 12:21:35 +02:00
|
|
|
def test_set_user_timezone(self) -> None:
|
|
|
|
values = ["America/Denver", "Pacific/Pago_Pago", "Pacific/Galapagos", ""]
|
2021-07-26 08:35:27 +02:00
|
|
|
num_events = 3
|
2021-07-07 12:21:35 +02:00
|
|
|
|
|
|
|
for value in values:
|
|
|
|
events = self.verify_action(
|
2021-09-08 13:25:50 +02:00
|
|
|
lambda: do_change_user_setting(
|
|
|
|
self.user_profile, "timezone", value, acting_user=self.user_profile
|
|
|
|
),
|
2021-07-07 12:21:35 +02:00
|
|
|
num_events=num_events,
|
|
|
|
)
|
|
|
|
|
2021-07-26 08:35:27 +02:00
|
|
|
check_user_settings_update("events[0]", events[0])
|
|
|
|
check_update_display_settings("events[1]", events[1])
|
|
|
|
check_realm_user_update("events[2]", events[2], "timezone")
|
2021-07-07 12:21:35 +02:00
|
|
|
|
2022-05-25 13:13:31 +02:00
|
|
|
def test_delivery_email_events_on_changing_email_address_visibility(self) -> None:
|
2021-10-26 09:15:16 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
2022-05-25 13:13:31 +02:00
|
|
|
do_change_user_role(self.user_profile, UserProfile.ROLE_MODERATOR, acting_user=None)
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
events = self.verify_action(
|
2021-10-26 09:15:16 +02:00
|
|
|
lambda: do_change_user_setting(
|
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=self.user_profile,
|
|
|
|
),
|
2021-10-26 09:15:16 +02:00
|
|
|
user_settings_object=True,
|
2022-05-25 13:13:31 +02:00
|
|
|
)
|
2021-10-26 09:15:16 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
self.assertIsNone(events[0]["person"]["delivery_email"])
|
2022-05-25 13:13:31 +02:00
|
|
|
|
|
|
|
events = self.verify_action(
|
2021-10-26 09:15:16 +02:00
|
|
|
lambda: do_change_user_setting(
|
|
|
|
cordelia,
|
2022-05-25 13:13:31 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
2022-05-25 13:13:31 +02:00
|
|
|
acting_user=self.user_profile,
|
|
|
|
),
|
2021-10-26 09:15:16 +02:00
|
|
|
user_settings_object=True,
|
2022-05-25 13:13:31 +02:00
|
|
|
)
|
2021-10-26 09:15:16 +02:00
|
|
|
check_realm_user_update("events[0]", events[0], "delivery_email")
|
|
|
|
self.assertEqual(events[0]["person"]["delivery_email"], cordelia.delivery_email)
|
2022-05-25 13:13:31 +02:00
|
|
|
|
2023-07-11 13:13:09 +02:00
|
|
|
def test_stream_creation_events(self) -> None:
|
|
|
|
action = lambda: self.subscribe(self.example_user("hamlet"), "Test stream")
|
|
|
|
events = self.verify_action(action, num_events=2)
|
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
|
|
|
# Check that guest user does not receive stream creation itself of public
|
|
|
|
# stream.
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
action = lambda: self.subscribe(self.example_user("hamlet"), "Test stream 2")
|
|
|
|
events = self.verify_action(action, num_events=0, state_change_expected=False)
|
|
|
|
|
|
|
|
self.user_profile = self.example_user("hamlet")
|
|
|
|
action = lambda: self.subscribe(
|
|
|
|
self.example_user("hamlet"), "Private test stream", invite_only=True
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, num_events=2)
|
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
|
|
|
# A non-admin user who is not subscribed to the private stream does not
|
|
|
|
# receive stream creation event.
|
|
|
|
self.user_profile = self.example_user("othello")
|
|
|
|
action = lambda: self.subscribe(
|
|
|
|
self.example_user("hamlet"), "Private test stream 2", invite_only=True
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, num_events=0, state_change_expected=False)
|
|
|
|
|
|
|
|
# An admin user who is not subscribed to the private stream also
|
|
|
|
# receives stream creation event.
|
|
|
|
action = lambda: self.subscribe(
|
|
|
|
self.example_user("hamlet"), "Private test stream 3", invite_only=True
|
|
|
|
)
|
|
|
|
self.user_profile = self.example_user("iago")
|
|
|
|
events = self.verify_action(action, num_events=2)
|
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
class SubscribeActionTest(BaseAction):
|
|
|
|
def test_subscribe_events(self) -> None:
|
|
|
|
self.do_test_subscribe_events(include_subscribers=True)
|
|
|
|
|
|
|
|
def test_subscribe_events_no_include_subscribers(self) -> None:
|
|
|
|
self.do_test_subscribe_events(include_subscribers=False)
|
|
|
|
|
|
|
|
def do_test_subscribe_events(self, include_subscribers: bool) -> None:
|
|
|
|
# Subscribe to a totally new stream, so it's just Hamlet on it
|
2021-02-12 08:19:30 +01:00
|
|
|
action: Callable[[], object] = lambda: self.subscribe(
|
|
|
|
self.example_user("hamlet"), "test_stream"
|
|
|
|
)
|
2020-06-28 13:20:01 +02:00
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
action, event_types=["subscription"], include_subscribers=include_subscribers
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
|
|
|
# Add another user to that totally new stream
|
|
|
|
action = lambda: self.subscribe(self.example_user("othello"), "test_stream")
|
|
|
|
events = self.verify_action(
|
|
|
|
action,
|
|
|
|
include_subscribers=include_subscribers,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-12-24 14:29:40 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = othello.realm
|
2020-06-28 13:20:01 +02:00
|
|
|
stream = get_stream("test_stream", self.user_profile.realm)
|
|
|
|
|
2020-08-31 16:42:16 +02:00
|
|
|
# Now remove the first user, to test the normal unsubscribe flow and
|
|
|
|
# 'peer_remove' event for subscribed streams.
|
2021-12-24 14:29:40 +01:00
|
|
|
action = lambda: bulk_remove_subscriptions(realm, [othello], [stream], acting_user=None)
|
2020-06-28 13:20:01 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
action,
|
|
|
|
include_subscribers=include_subscribers,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2020-10-22 13:40:24 +02:00
|
|
|
# Now remove the user himself, to test the 'remove' event flow
|
2021-12-24 14:29:40 +01:00
|
|
|
action = lambda: bulk_remove_subscriptions(realm, [hamlet], [stream], acting_user=None)
|
2020-06-28 13:20:01 +02:00
|
|
|
events = self.verify_action(
|
2022-05-13 03:50:40 +02:00
|
|
|
action, include_subscribers=include_subscribers, include_streams=False, num_events=1
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_remove("events[0]", events[0])
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(events[0]["subscriptions"], 1)
|
2020-07-08 14:20:25 +02:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["subscriptions"][0]["name"],
|
|
|
|
"test_stream",
|
2020-07-08 14:20:25 +02:00
|
|
|
)
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2020-09-01 07:46:12 +02:00
|
|
|
# Subscribe other user to test 'peer_add' event flow for unsubscribed stream.
|
|
|
|
action = lambda: self.subscribe(self.example_user("iago"), "test_stream")
|
|
|
|
events = self.verify_action(
|
|
|
|
action,
|
|
|
|
event_types=["subscription"],
|
|
|
|
include_subscribers=include_subscribers,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_add("events[0]", events[0])
|
2020-08-31 16:42:16 +02:00
|
|
|
|
|
|
|
# Remove the user to test 'peer_remove' event flow for unsubscribed stream.
|
2021-12-24 14:29:40 +01:00
|
|
|
action = lambda: bulk_remove_subscriptions(realm, [iago], [stream], acting_user=None)
|
2020-08-31 16:42:16 +02:00
|
|
|
events = self.verify_action(
|
|
|
|
action,
|
|
|
|
include_subscribers=include_subscribers,
|
2021-02-12 08:19:30 +01:00
|
|
|
state_change_expected=include_subscribers,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_peer_remove("events[0]", events[0])
|
2020-08-31 16:42:16 +02:00
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
# Now resubscribe a user, to make sure that works on a vacated stream
|
|
|
|
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream")
|
|
|
|
events = self.verify_action(
|
2021-02-12 08:19:30 +01:00
|
|
|
action, include_subscribers=include_subscribers, include_streams=False, num_events=1
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_subscription_add("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-12-14 20:08:48 +01:00
|
|
|
action = lambda: do_change_stream_description(
|
|
|
|
stream, "new description", acting_user=self.example_user("hamlet")
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-14 20:08:48 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2022-01-29 00:54:13 +01:00
|
|
|
# Update stream privacy - make stream web-public
|
2020-11-10 14:11:19 +01:00
|
|
|
action = lambda: do_change_stream_permission(
|
2021-12-11 00:41:25 +01:00
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=True,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
2020-11-10 14:11:19 +01:00
|
|
|
)
|
2021-12-11 00:41:25 +01:00
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
2020-11-10 14:11:19 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-11 00:41:25 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-11-10 14:11:19 +01:00
|
|
|
|
|
|
|
# Update stream privacy - make stream private
|
|
|
|
action = lambda: do_change_stream_permission(
|
2021-12-11 00:41:25 +01:00
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-12-11 00:41:25 +01:00
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-11 00:41:25 +01:00
|
|
|
check_message("events[1]", events[1])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2022-06-07 16:22:28 +02:00
|
|
|
# Update stream privacy - make stream public
|
|
|
|
self.user_profile = self.example_user("cordelia")
|
|
|
|
action = lambda: do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_peer_add("events[1]", events[1])
|
|
|
|
|
|
|
|
do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
|
|
|
self.subscribe(self.example_user("cordelia"), stream.name)
|
|
|
|
self.unsubscribe(self.example_user("cordelia"), stream.name)
|
|
|
|
action = lambda: do_change_stream_permission(
|
|
|
|
stream,
|
|
|
|
invite_only=False,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
is_web_public=False,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
|
|
|
)
|
|
|
|
events = self.verify_action(
|
|
|
|
action, include_subscribers=include_subscribers, num_events=2, include_streams=False
|
|
|
|
)
|
|
|
|
|
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-06-28 13:20:01 +02:00
|
|
|
# Update stream stream_post_policy property
|
2021-12-15 01:04:35 +01:00
|
|
|
action = lambda: do_change_stream_post_policy(
|
|
|
|
stream, Stream.STREAM_POST_POLICY_ADMINS, acting_user=self.example_user("hamlet")
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=3)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2021-12-15 01:04:35 +01:00
|
|
|
check_message("events[2]", events[2])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2021-12-06 18:19:12 +01:00
|
|
|
action = lambda: do_change_stream_message_retention_days(
|
|
|
|
stream, self.example_user("hamlet"), -1
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_update("events[0]", events[0])
|
2020-06-28 13:20:01 +02:00
|
|
|
|
2022-06-27 18:39:33 +02:00
|
|
|
moderators_group = UserGroup.objects.get(
|
|
|
|
name=UserGroup.MODERATORS_GROUP_NAME,
|
|
|
|
is_system_group=True,
|
|
|
|
realm=self.user_profile.realm,
|
|
|
|
)
|
2023-02-17 12:46:14 +01:00
|
|
|
action = lambda: do_change_stream_group_based_setting(
|
|
|
|
stream,
|
|
|
|
"can_remove_subscribers_group",
|
|
|
|
moderators_group,
|
|
|
|
acting_user=self.example_user("hamlet"),
|
2022-06-27 18:39:33 +02:00
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=1)
|
|
|
|
check_stream_update("events[0]", events[0])
|
|
|
|
|
2020-06-28 13:20:01 +02:00
|
|
|
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
|
|
|
|
stream = self.make_stream("private", self.user_profile.realm, invite_only=True)
|
2020-08-05 11:57:45 +02:00
|
|
|
stream.message_retention_days = 10
|
|
|
|
stream.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-04-02 18:33:28 +02:00
|
|
|
action = lambda: bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [user_profile], acting_user=None
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
2020-08-05 11:57:45 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
events[0]["streams"][0]["message_retention_days"],
|
2020-08-05 11:57:45 +02:00
|
|
|
10,
|
|
|
|
)
|
2023-07-27 16:42:21 +02:00
|
|
|
self.assertIsNone(events[0]["streams"][0]["stream_weekly_traffic"])
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2023-06-30 13:14:49 +02:00
|
|
|
stream.invite_only = False
|
|
|
|
stream.save()
|
|
|
|
|
|
|
|
# Subscribe as a guest to a public stream.
|
|
|
|
self.user_profile = self.example_user("polonius")
|
|
|
|
action = lambda: bulk_add_subscriptions(
|
|
|
|
user_profile.realm, [stream], [self.user_profile], acting_user=None
|
|
|
|
)
|
|
|
|
events = self.verify_action(action, include_subscribers=include_subscribers, num_events=2)
|
|
|
|
check_stream_create("events[0]", events[0])
|
|
|
|
check_subscription_add("events[1]", events[1])
|
|
|
|
|
2021-07-24 06:56:56 +02:00
|
|
|
|
|
|
|
class DraftActionTest(BaseAction):
|
|
|
|
def do_enable_drafts_synchronization(self, user_profile: UserProfile) -> None:
|
2021-09-08 13:25:50 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
user_profile, "enable_drafts_synchronization", True, acting_user=self.user_profile
|
|
|
|
)
|
2021-07-24 06:56:56 +02:00
|
|
|
|
2021-07-24 06:56:56 +02:00
|
|
|
def test_draft_create_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
|
|
|
dummy_draft = {
|
|
|
|
"type": "draft",
|
|
|
|
"to": "",
|
|
|
|
"topic": "",
|
|
|
|
"content": "Sample draft content",
|
|
|
|
"timestamp": 1596820995,
|
|
|
|
}
|
|
|
|
action = lambda: do_create_drafts([dummy_draft], self.user_profile)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_draft_edit_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
|
|
|
dummy_draft = {
|
|
|
|
"type": "draft",
|
|
|
|
"to": "",
|
|
|
|
"topic": "",
|
|
|
|
"content": "Sample draft content",
|
|
|
|
"timestamp": 1596820995,
|
|
|
|
}
|
|
|
|
draft_id = do_create_drafts([dummy_draft], self.user_profile)[0].id
|
|
|
|
dummy_draft["content"] = "Some more sample draft content"
|
|
|
|
action = lambda: do_edit_draft(draft_id, dummy_draft, self.user_profile)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_draft_delete_event(self) -> None:
|
|
|
|
self.do_enable_drafts_synchronization(self.user_profile)
|
|
|
|
dummy_draft = {
|
|
|
|
"type": "draft",
|
|
|
|
"to": "",
|
|
|
|
"topic": "",
|
|
|
|
"content": "Sample draft content",
|
|
|
|
"timestamp": 1596820995,
|
|
|
|
}
|
|
|
|
draft_id = do_create_drafts([dummy_draft], self.user_profile)[0].id
|
|
|
|
action = lambda: do_delete_draft(draft_id, self.user_profile)
|
|
|
|
self.verify_action(action)
|
2023-04-20 04:40:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
class ScheduledMessagesEventsTest(BaseAction):
|
|
|
|
def test_stream_scheduled_message_create_event(self) -> None:
|
|
|
|
# Create stream scheduled message
|
|
|
|
action = lambda: check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_create_event_with_existing_scheduled_messages(self) -> None:
|
|
|
|
# Create stream scheduled message
|
|
|
|
check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message 1",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 17:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that the new scheduled message gets appended correctly.
|
|
|
|
action = lambda: check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message 2",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_private_scheduled_message_create_event(self) -> None:
|
2023-06-19 16:26:12 +02:00
|
|
|
# Create direct scheduled message
|
2023-04-20 04:40:41 +02:00
|
|
|
action = lambda: check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"private",
|
|
|
|
[self.example_user("hamlet").id],
|
|
|
|
None,
|
2023-06-19 16:26:12 +02:00
|
|
|
"Direct message",
|
2023-04-20 04:40:41 +02:00
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_scheduled_message_edit_event(self) -> None:
|
|
|
|
scheduled_message_id = check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
2023-05-16 21:18:09 +02:00
|
|
|
action = lambda: edit_scheduled_message(
|
2023-04-20 04:40:41 +02:00
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
2023-05-16 21:18:09 +02:00
|
|
|
scheduled_message_id,
|
|
|
|
None,
|
|
|
|
None,
|
2023-04-20 04:40:41 +02:00
|
|
|
"Edited test topic",
|
|
|
|
"Edited stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-20 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
self.verify_action(action)
|
|
|
|
|
|
|
|
def test_scheduled_message_delete_event(self) -> None:
|
|
|
|
scheduled_message_id = check_schedule_message(
|
|
|
|
self.user_profile,
|
|
|
|
get_client("website"),
|
|
|
|
"stream",
|
|
|
|
[self.get_stream_id("Verona")],
|
|
|
|
"Test topic",
|
|
|
|
"Stream message",
|
|
|
|
convert_to_UTC(dateparser("2023-04-19 18:24:56")),
|
|
|
|
self.user_profile.realm,
|
|
|
|
)
|
|
|
|
action = lambda: delete_scheduled_message(self.user_profile, scheduled_message_id)
|
|
|
|
self.verify_action(action)
|