2021-06-08 02:45:49 +02:00
import asyncio
2020-06-11 00:54:34 +02:00
import base64
2023-12-15 16:01:04 +01:00
import logging
2020-06-11 00:54:34 +02:00
import uuid
2024-07-12 02:30:25 +02:00
from collections . abc import Iterator , Mapping
2020-06-11 00:54:34 +02:00
from contextlib import contextmanager
2023-11-19 19:45:19 +01:00
from datetime import datetime , timedelta , timezone
2024-07-12 02:30:25 +02:00
from typing import Any
2020-08-27 22:46:39 +02:00
from unittest import mock , skipUnless
2016-08-03 11:11:25 +02:00
2022-02-08 08:42:25 +01:00
import aioapns
2024-06-13 14:38:58 +02:00
import firebase_admin . messaging as firebase_messaging
2020-08-07 01:09:47 +02:00
import orjson
2021-06-11 22:51:27 +02:00
import responses
2023-09-18 17:07:34 +02:00
import time_machine
2016-08-03 11:11:25 +02:00
from django . conf import settings
2019-04-23 22:32:12 +02:00
from django . db import transaction
2022-02-23 20:25:30 +01:00
from django . db . models import F , Q
2021-08-15 18:35:37 +02:00
from django . http . response import ResponseHeaders
2020-06-11 00:54:34 +02:00
from django . test import override_settings
2018-05-04 01:40:46 +02:00
from django . utils . crypto import get_random_string
2020-06-11 00:54:34 +02:00
from django . utils . timezone import now
2024-05-23 20:31:31 +02:00
from dns . resolver import NoAnswer as DNSNoAnswer
2024-06-13 14:38:58 +02:00
from firebase_admin import exceptions as firebase_exceptions
2021-06-11 22:51:27 +02:00
from requests . exceptions import ConnectionError
from requests . models import PreparedRequest
2023-10-12 19:43:45 +02:00
from typing_extensions import override
2016-08-03 11:11:25 +02:00
2019-01-31 00:39:02 +01:00
from analytics . lib . counts import CountStat , LoggingCountStat
2024-02-26 21:14:52 +01:00
from analytics . models import InstallationCount , RealmCount , UserCount
2024-02-29 08:30:13 +01:00
from corporate . lib . stripe import RemoteRealmBillingSession
2023-11-29 17:00:19 +01:00
from corporate . models import CustomerPlan
2023-12-09 13:29:59 +01:00
from version import ZULIP_VERSION
2024-02-24 00:58:58 +01:00
from zerver . actions . create_realm import do_create_realm
2022-07-17 13:00:21 +02:00
from zerver . actions . message_delete import do_delete_messages
2022-04-14 23:54:53 +02:00
from zerver . actions . message_flags import do_mark_stream_messages_as_read , do_update_message_flags
2023-11-30 00:20:42 +01:00
from zerver . actions . realm_settings import (
do_change_realm_org_type ,
do_deactivate_realm ,
do_set_realm_authentication_methods ,
)
2023-12-08 20:53:31 +01:00
from zerver . actions . user_groups import add_subgroups_to_user_group , check_add_user_group
2023-07-05 11:59:56 +02:00
from zerver . actions . user_settings import do_change_user_setting , do_regenerate_api_key
from zerver . actions . user_topics import do_set_user_topic_visibility_policy
2024-03-18 01:18:53 +01:00
from zerver . lib import redis_utils
2023-11-06 10:41:08 +01:00
from zerver . lib . avatar import absolute_avatar_url , get_avatar_for_inaccessible_user
2021-07-16 22:11:10 +02:00
from zerver . lib . exceptions import JsonableError
2019-02-08 23:09:20 +01:00
from zerver . lib . push_notifications import (
2021-06-08 02:45:49 +02:00
APNsContext ,
2020-06-11 00:54:34 +02:00
DeviceToken ,
2023-10-08 00:43:41 +02:00
InvalidRemotePushDeviceTokenError ,
2023-01-02 20:50:23 +01:00
UserPushIdentityCompat ,
2019-02-08 23:09:20 +01:00
b64_to_hex ,
2020-06-02 18:09:26 +02:00
get_apns_badge_count ,
2020-07-16 07:05:02 +02:00
get_apns_badge_count_future ,
2021-06-08 02:45:49 +02:00
get_apns_context ,
2023-10-05 13:53:09 +02:00
get_base_payload ,
2019-02-14 00:54:56 +01:00
get_message_payload_apns ,
get_message_payload_gcm ,
2019-02-08 23:09:20 +01:00
get_mobile_push_content ,
handle_push_notification ,
handle_remove_push_notification ,
hex_to_b64 ,
modernize_apns_payload ,
2024-06-13 21:24:17 +02:00
parse_fcm_options ,
2019-02-08 23:09:20 +01:00
send_android_push_notification_to_user ,
send_apple_push_notification ,
send_notifications_to_bouncer ,
)
2020-06-11 00:54:34 +02:00
from zerver . lib . remote_server import (
2024-03-18 01:18:53 +01:00
PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY ,
2023-12-09 00:09:01 +01:00
AnalyticsRequest ,
2022-11-17 09:30:48 +01:00
PushNotificationBouncerError ,
2020-06-11 00:54:34 +02:00
PushNotificationBouncerRetryLaterError ,
2023-12-07 21:02:35 +01:00
PushNotificationBouncerServerError ,
2020-06-11 00:54:34 +02:00
build_analytics_data ,
2023-11-16 15:25:58 +01:00
get_realms_info_for_push_bouncer ,
2024-03-18 01:18:53 +01:00
record_push_notifications_recently_working ,
redis_client ,
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ,
2021-07-16 22:11:10 +02:00
send_to_push_bouncer ,
2020-06-11 00:54:34 +02:00
)
2021-08-14 03:18:13 +02:00
from zerver . lib . response import json_response_from_error
2023-11-26 22:42:43 +01:00
from zerver . lib . test_classes import BouncerTestCase , ZulipTestCase
2023-11-06 10:41:08 +01:00
from zerver . lib . test_helpers import (
2024-07-16 22:52:01 +02:00
activate_push_notification_service ,
2023-11-06 10:41:08 +01:00
mock_queue_publish ,
reset_email_visibility_to_everyone_in_zulip_realm ,
)
2023-09-29 20:04:09 +02:00
from zerver . lib . timestamp import datetime_to_timestamp
2024-07-16 22:52:01 +02:00
from zerver . lib . types import AnalyticsDataUploadLevel
2023-06-02 17:33:05 +02:00
from zerver . lib . user_counts import realm_user_count_by_role
2020-06-11 00:54:34 +02:00
from zerver . models import (
Message ,
PushDeviceToken ,
2023-10-30 23:50:53 +01:00
Realm ,
2020-06-11 00:54:34 +02:00
RealmAuditLog ,
Recipient ,
Stream ,
Subscription ,
UserMessage ,
2023-12-12 17:15:57 +01:00
UserProfile ,
2023-07-05 11:59:56 +02:00
UserTopic ,
2020-06-11 00:54:34 +02:00
)
2023-12-15 04:33:19 +01:00
from zerver . models . clients import get_client
2024-08-30 18:15:41 +02:00
from zerver . models . realm_audit_logs import AuditLogEventType
2023-12-15 02:14:24 +01:00
from zerver . models . realms import get_realm
2023-12-15 20:21:59 +01:00
from zerver . models . scheduled_jobs import NotificationTriggers
2023-12-15 03:57:04 +01:00
from zerver . models . streams import get_stream
2024-01-05 15:02:55 +01:00
from zilencer . lib . remote_counts import MissingDataError
2022-01-12 23:45:01 +01:00
from zilencer . models import RemoteZulipServerAuditLog
2023-12-12 00:06:37 +01:00
from zilencer . views import DevicesToCleanUpDict
2020-08-27 22:46:39 +02:00
if settings . ZILENCER_ENABLED :
from zilencer . models import (
RemoteInstallationCount ,
RemotePushDeviceToken ,
2023-10-30 23:50:53 +01:00
RemoteRealm ,
2020-08-27 22:46:39 +02:00
RemoteRealmAuditLog ,
RemoteRealmCount ,
RemoteZulipServer ,
)
2023-12-01 22:57:34 +01:00
from zilencer . views import update_remote_realm_data_for_server
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
2023-10-05 13:53:09 +02:00
class SendTestPushNotificationEndpointTest ( BouncerTestCase ) :
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-10-08 00:43:41 +02:00
@responses.activate
2023-10-05 13:53:09 +02:00
def test_send_test_push_notification_api_invalid_token ( self ) - > None :
2023-10-08 00:43:41 +02:00
# What happens when the mobile device isn't registered with its server,
# and makes a request to this API:
2023-10-05 13:53:09 +02:00
user = self . example_user ( " cordelia " )
result = self . api_post (
user , " /api/v1/mobile_push/test_notification " , { " token " : " invalid " } , subdomain = " zulip "
)
2023-10-08 00:43:41 +02:00
self . assert_json_error ( result , " Device not recognized " )
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " INVALID_PUSH_DEVICE_TOKEN " )
2023-10-05 13:53:09 +02:00
2023-10-08 00:43:41 +02:00
# What response the server receives when it makes a request to the bouncer
# to the /test_notification endpoint:
2023-10-05 13:53:09 +02:00
payload = {
2023-12-25 23:10:35 +01:00
" realm_uuid " : str ( user . realm . uuid ) ,
2023-10-05 13:53:09 +02:00
" user_uuid " : str ( user . uuid ) ,
" user_id " : user . id ,
" token " : " invalid " ,
2024-06-13 20:53:09 +02:00
" token_kind " : PushDeviceToken . FCM ,
2023-10-05 13:53:09 +02:00
" base_payload " : get_base_payload ( user ) ,
}
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/test_notification " ,
payload ,
subdomain = " " ,
content_type = " application/json " ,
)
2023-10-08 00:43:41 +02:00
self . assert_json_error ( result , " Device not recognized by the push bouncer " )
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " INVALID_REMOTE_PUSH_DEVICE_TOKEN " )
# Finally, test the full scenario where the mobile device is registered with its
# server, but for some reason the server failed to register it with the bouncer.
token = " 111222 "
2024-06-13 20:53:09 +02:00
token_kind = PushDeviceToken . FCM
2023-10-08 00:43:41 +02:00
# We create a PushDeviceToken object, but no RemotePushDeviceToken object, to simulate
# a missing registration on the bouncer.
PushDeviceToken . objects . create ( user = user , token = token , kind = token_kind )
# As verified above, this is the response the server receives from the bouncer in this kind of case.
# We have to simulate it with a response mock.
error_response = json_response_from_error ( InvalidRemotePushDeviceTokenError ( ) )
responses . add (
responses . POST ,
2024-07-16 22:52:01 +02:00
f " { settings . ZULIP_SERVICES_URL } /api/v1/remotes/push/test_notification " ,
2023-10-08 00:43:41 +02:00
body = error_response . content ,
status = error_response . status_code ,
)
result = self . api_post (
user , " /api/v1/mobile_push/test_notification " , { " token " : token } , subdomain = " zulip "
)
self . assert_json_error ( result , " Device not recognized by the push bouncer " )
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " INVALID_REMOTE_PUSH_DEVICE_TOKEN " )
2023-10-05 13:53:09 +02:00
def test_send_test_push_notification_api_no_bouncer_config ( self ) - > None :
"""
Tests the endpoint on a server that doesn ' t use the bouncer, due to having its
own ability to send push notifications to devices directly .
"""
user = self . example_user ( " cordelia " )
android_token = " 111222 "
2024-06-13 20:53:09 +02:00
android_token_kind = PushDeviceToken . FCM
2023-10-05 13:53:09 +02:00
apple_token = " 111223 "
apple_token_kind = PushDeviceToken . APNS
android_device = PushDeviceToken . objects . create (
user = user , token = android_token , kind = android_token_kind
)
apple_device = PushDeviceToken . objects . create (
user = user , token = apple_token , kind = apple_token_kind
)
endpoint = " /api/v1/mobile_push/test_notification "
time_now = now ( )
# 1. First test for an android device.
# 2. Then test for an apple device.
# 3. Then test without submitting a specific token,
# meaning both devices should get notified.
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android_push_notification ,
time_machine . travel ( time_now , tick = False ) ,
) :
2023-10-05 13:53:09 +02:00
result = self . api_post ( user , endpoint , { " token " : android_token } , subdomain = " zulip " )
expected_android_payload = {
" server " : " testserver " ,
" realm_id " : user . realm_id ,
2023-12-07 19:03:21 +01:00
" realm_name " : " Zulip Dev " ,
2023-10-05 13:53:09 +02:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2023-10-05 13:53:09 +02:00
" user_id " : user . id ,
2023-12-07 19:03:21 +01:00
" event " : " test " ,
2023-10-05 13:53:09 +02:00
" time " : datetime_to_timestamp ( time_now ) ,
}
expected_gcm_options = { " priority " : " high " }
mock_send_android_push_notification . assert_called_once_with (
UserPushIdentityCompat ( user_id = user . id , user_uuid = str ( user . uuid ) ) ,
[ android_device ] ,
expected_android_payload ,
expected_gcm_options ,
remote = None ,
)
self . assert_json_success ( result )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification "
) as mock_send_apple_push_notification ,
time_machine . travel ( time_now , tick = False ) ,
) :
2023-10-05 13:53:09 +02:00
result = self . api_post ( user , endpoint , { " token " : apple_token } , subdomain = " zulip " )
expected_apple_payload = {
" alert " : {
" title " : " Test notification " ,
2023-12-07 19:03:21 +01:00
" body " : " This is a test notification from Zulip Dev (http://zulip.testserver). " ,
2023-10-05 13:53:09 +02:00
} ,
" sound " : " default " ,
" custom " : {
" zulip " : {
" server " : " testserver " ,
" realm_id " : user . realm_id ,
2023-12-07 19:03:21 +01:00
" realm_name " : " Zulip Dev " ,
2023-10-05 13:53:09 +02:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2023-10-05 13:53:09 +02:00
" user_id " : user . id ,
2023-12-07 19:03:21 +01:00
" event " : " test " ,
2023-10-05 13:53:09 +02:00
}
} ,
}
mock_send_apple_push_notification . assert_called_once_with (
UserPushIdentityCompat ( user_id = user . id , user_uuid = str ( user . uuid ) ) ,
[ apple_device ] ,
expected_apple_payload ,
remote = None ,
)
self . assert_json_success ( result )
# Test without submitting a token value. Both devices should get notified.
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification "
) as mock_send_apple_push_notification ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android_push_notification ,
time_machine . travel ( time_now , tick = False ) ,
) :
2023-10-05 13:53:09 +02:00
result = self . api_post ( user , endpoint , subdomain = " zulip " )
mock_send_android_push_notification . assert_called_once_with (
UserPushIdentityCompat ( user_id = user . id , user_uuid = str ( user . uuid ) ) ,
[ android_device ] ,
expected_android_payload ,
expected_gcm_options ,
remote = None ,
)
mock_send_apple_push_notification . assert_called_once_with (
UserPushIdentityCompat ( user_id = user . id , user_uuid = str ( user . uuid ) ) ,
[ apple_device ] ,
expected_apple_payload ,
remote = None ,
)
self . assert_json_success ( result )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-10-05 13:53:09 +02:00
@responses.activate
def test_send_test_push_notification_api_with_bouncer_config ( self ) - > None :
"""
Tests the endpoint on a server that uses the bouncer . This will simulate the
end - to - end flow :
1. First we simulate a request from the mobile device to the remote server ' s
endpoint for a test notification .
2. As a result , the remote server makes a request to the bouncer to send that
notification .
We verify that the appropriate function for sending the notification to the
device is called on the bouncer as the ultimate result of the flow .
"""
self . add_mock_response ( )
user = self . example_user ( " cordelia " )
2023-12-22 02:29:00 +01:00
server = self . server
2023-12-25 23:10:35 +01:00
remote_realm = RemoteRealm . objects . get ( server = server , uuid = user . realm . uuid )
2023-10-05 13:53:09 +02:00
token = " 111222 "
2024-06-13 20:53:09 +02:00
token_kind = PushDeviceToken . FCM
2023-10-05 13:53:09 +02:00
PushDeviceToken . objects . create ( user = user , token = token , kind = token_kind )
remote_device = RemotePushDeviceToken . objects . create (
server = server , user_uuid = str ( user . uuid ) , token = token , kind = token_kind
)
endpoint = " /api/v1/mobile_push/test_notification "
time_now = now ( )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android_push_notification ,
time_machine . travel ( time_now , tick = False ) ,
) :
2023-10-05 13:53:09 +02:00
result = self . api_post ( user , endpoint , { " token " : token } , subdomain = " zulip " )
expected_payload = {
" server " : " testserver " ,
" realm_id " : user . realm_id ,
2023-12-07 19:03:21 +01:00
" realm_name " : " Zulip Dev " ,
2023-10-05 13:53:09 +02:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2023-10-05 13:53:09 +02:00
" user_id " : user . id ,
2023-12-07 19:03:21 +01:00
" event " : " test " ,
2023-10-05 13:53:09 +02:00
" time " : datetime_to_timestamp ( time_now ) ,
}
expected_gcm_options = { " priority " : " high " }
user_identity = UserPushIdentityCompat ( user_id = user . id , user_uuid = str ( user . uuid ) )
mock_send_android_push_notification . assert_called_once_with (
user_identity ,
[ remote_device ] ,
expected_payload ,
expected_gcm_options ,
remote = server ,
)
self . assert_json_success ( result )
2023-12-25 23:10:35 +01:00
remote_realm . refresh_from_db ( )
self . assertEqual ( remote_realm . last_request_datetime , time_now )
2023-10-05 13:53:09 +02:00
2017-05-16 08:05:31 +02:00
class PushBouncerNotificationTest ( BouncerTestCase ) :
2017-08-29 06:28:30 +02:00
DEFAULT_SUBDOMAIN = " "
2017-11-05 10:51:25 +01:00
def test_unregister_remote_push_user_params ( self ) - > None :
2016-10-27 23:55:31 +02:00
token = " 111222 "
2024-06-13 20:53:09 +02:00
token_kind = PushDeviceToken . FCM
2016-10-27 23:55:31 +02:00
2021-02-12 08:20:45 +01:00
endpoint = " /api/v1/remotes/push/unregister "
result = self . uuid_post ( self . server_uuid , endpoint , { " token_kind " : token_kind } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token ' argument " )
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , { " token " : token } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token_kind ' argument " )
2017-08-29 06:28:30 +02:00
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2017-08-29 06:28:30 +02:00
realm = get_realm ( " zulip " )
realm . string_id = " "
realm . save ( )
2020-03-10 11:48:26 +01:00
result = self . api_post (
hamlet ,
endpoint ,
dict ( user_id = 15 , token = token , token_kind = token_kind ) ,
2021-02-12 08:20:45 +01:00
subdomain = " " ,
2020-03-10 11:48:26 +01:00
)
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Must validate with valid Zulip server API key " )
2022-01-14 04:20:39 +01:00
# Try with deactivated remote servers
self . server . deactivated = True
self . server . save ( )
result = self . uuid_post ( self . server_uuid , endpoint , self . get_generic_payload ( " unregister " ) )
self . assert_json_error_contains (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
401 ,
)
2022-02-08 00:13:33 +01:00
def test_register_remote_push_user_params ( self ) - > None :
2016-10-27 23:55:31 +02:00
token = " 111222 "
user_id = 11
2024-06-13 20:53:09 +02:00
token_kind = PushDeviceToken . FCM
2016-10-27 23:55:31 +02:00
2021-02-12 08:20:45 +01:00
endpoint = " /api/v1/remotes/push/register "
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " user_id " : user_id , " token_kind " : token_kind }
2021-02-12 08:19:30 +01:00
)
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token ' argument " )
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , { " user_id " : user_id , " token " : token } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token_kind ' argument " )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " token " : token , " token_kind " : token_kind }
2021-02-12 08:19:30 +01:00
)
2022-02-23 20:25:30 +01:00
self . assert_json_error ( result , " Missing user_id or user_uuid " )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " user_id " : user_id , " token " : token , " token_kind " : 17 }
2021-02-12 08:19:30 +01:00
)
2017-07-07 18:29:45 +02:00
self . assert_json_error ( result , " Invalid token type " )
2016-10-27 23:55:31 +02:00
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2020-03-12 14:17:25 +01:00
2017-08-29 06:28:30 +02:00
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
realm = get_realm ( " zulip " )
realm . string_id = " "
realm . save ( )
2020-03-10 11:48:26 +01:00
result = self . api_post (
hamlet ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
)
2017-08-29 06:28:30 +02:00
self . assert_json_error ( result , " Must validate with valid Zulip server API key " )
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
self . server_uuid ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
2021-02-12 08:19:30 +01:00
subdomain = " zulip " ,
)
self . assert_json_error (
result , " Invalid subdomain for push notifications bouncer " , status_code = 401
)
2018-04-26 07:08:44 +02:00
2018-04-26 06:36:34 +02:00
# We do a bit of hackery here to the API_KEYS cache just to
# make the code simple for sending an incorrect API key.
2021-02-12 08:20:45 +01:00
self . API_KEYS [ self . server_uuid ] = " invalid "
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
2021-02-12 08:19:30 +01:00
self . server_uuid , endpoint , dict ( user_id = user_id , token_kind = token_kind , token = token )
)
self . assert_json_error (
2021-12-22 14:37:12 +01:00
result ,
" Zulip server auth failure: key does not match role 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe " ,
status_code = 401 ,
2021-02-12 08:19:30 +01:00
)
2018-04-26 06:36:34 +02:00
2020-01-16 22:02:06 +01:00
del self . API_KEYS [ self . server_uuid ]
2018-04-26 06:36:34 +02:00
2022-01-04 21:59:51 +01:00
self . API_KEYS [ " invalid_uuid " ] = " invalid "
result = self . uuid_post (
" invalid_uuid " ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
subdomain = " zulip " ,
)
self . assert_json_error (
result ,
" Zulip server auth failure: invalid_uuid is not registered -- did you run `manage.py register_server`? " ,
status_code = 401 ,
)
del self . API_KEYS [ " invalid_uuid " ]
2021-12-22 14:37:12 +01:00
credentials_uuid = str ( uuid . uuid4 ( ) )
credentials = " {} : {} " . format ( credentials_uuid , " invalid " )
2021-08-02 23:20:39 +02:00
api_auth = " Basic " + base64 . b64encode ( credentials . encode ( ) ) . decode ( )
2021-02-12 08:19:30 +01:00
result = self . client_post (
endpoint ,
2021-02-12 08:20:45 +01:00
{ " user_id " : user_id , " token_kind " : token_kind , " token " : token } ,
2021-02-12 08:19:30 +01:00
HTTP_AUTHORIZATION = api_auth ,
)
self . assert_json_error (
2021-10-19 03:30:05 +02:00
result ,
2021-12-22 14:37:12 +01:00
f " Zulip server auth failure: { credentials_uuid } is not registered -- did you run `manage.py register_server`? " ,
2021-10-19 03:30:05 +02:00
status_code = 401 ,
2021-02-12 08:19:30 +01:00
)
2018-04-26 06:50:37 +02:00
2022-01-14 04:20:39 +01:00
# Try with deactivated remote servers
self . server . deactivated = True
self . server . save ( )
result = self . uuid_post ( self . server_uuid , endpoint , self . get_generic_payload ( " register " ) )
self . assert_json_error_contains (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
401 ,
)
2023-11-06 22:18:52 +01:00
def test_register_require_ios_app_id ( self ) - > None :
endpoint = " /api/v1/remotes/push/register "
args = { " user_id " : 11 , " token " : " 1122 " }
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ * * args , " token_kind " : PushDeviceToken . APNS } ,
)
self . assert_json_error ( result , " Missing ios_app_id " )
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ * * args , " token_kind " : PushDeviceToken . APNS , " ios_app_id " : " example.app " } ,
)
self . assert_json_success ( result )
result = self . uuid_post (
self . server_uuid ,
endpoint ,
2024-06-13 20:53:09 +02:00
{ * * args , " token_kind " : PushDeviceToken . FCM } ,
2023-11-06 22:18:52 +01:00
)
self . assert_json_success ( result )
2023-11-07 23:13:39 +01:00
def test_register_validate_ios_app_id ( self ) - > None :
endpoint = " /api/v1/remotes/push/register "
2024-03-23 11:15:07 +01:00
args = {
" user_id " : 11 ,
" token " : " 1122 " ,
" token_kind " : PushDeviceToken . APNS ,
" ios_app_id " : " ' ; tables -- " ,
}
2023-11-07 23:13:39 +01:00
2024-03-23 11:15:07 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , args )
self . assert_json_error ( result , " ios_app_id has invalid format " )
args [ " ios_app_id " ] = " com.zulip.apple "
result = self . uuid_post ( self . server_uuid , endpoint , args )
self . assert_json_success ( result )
2023-11-07 23:13:39 +01:00
2023-04-10 00:23:59 +02:00
def test_register_device_deduplication ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
token = " 111222 "
user_id = hamlet . id
user_uuid = str ( hamlet . uuid )
2024-06-13 20:53:09 +02:00
token_kind = PushDeviceToken . FCM
2023-04-10 00:23:59 +02:00
endpoint = " /api/v1/remotes/push/register "
# First we create a legacy user_id registration.
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ " user_id " : user_id , " token_kind " : token_kind , " token " : token } ,
)
self . assert_json_success ( result )
registrations = list ( RemotePushDeviceToken . objects . filter ( token = token ) )
self . assert_length ( registrations , 1 )
self . assertEqual ( registrations [ 0 ] . user_id , user_id )
self . assertEqual ( registrations [ 0 ] . user_uuid , None )
# Register same user+device with uuid now. The old registration should be deleted
# to avoid duplication.
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ " user_id " : user_id , " user_uuid " : user_uuid , " token_kind " : token_kind , " token " : token } ,
)
registrations = list ( RemotePushDeviceToken . objects . filter ( token = token ) )
self . assert_length ( registrations , 1 )
self . assertEqual ( registrations [ 0 ] . user_id , None )
self . assertEqual ( str ( registrations [ 0 ] . user_uuid ) , user_uuid )
2017-11-05 10:51:25 +01:00
def test_remote_push_user_endpoints ( self ) - > None :
2016-10-27 23:55:31 +02:00
endpoints = [
2021-02-12 08:20:45 +01:00
( " /api/v1/remotes/push/register " , " register " ) ,
( " /api/v1/remotes/push/unregister " , " unregister " ) ,
2016-10-27 23:55:31 +02:00
]
for endpoint , method in endpoints :
payload = self . get_generic_payload ( method )
# Verify correct results are success
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
token_count = 1 if method == " register " else 0
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , token_count )
2016-10-27 23:55:31 +02:00
# Try adding/removing tokens that are too big...
2017-05-07 20:00:17 +02:00
broken_token = " x " * 5000 # too big
2021-02-12 08:20:45 +01:00
payload [ " token " ] = broken_token
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
2023-12-22 02:22:48 +01:00
def test_send_notification_endpoint_sets_remote_realm_for_devices ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
server = self . server
remote_realm = RemoteRealm . objects . get ( server = server , uuid = hamlet . realm . uuid )
android_token = RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2023-12-22 02:22:48 +01:00
token = hex_to_b64 ( " aaaa " ) ,
user_uuid = hamlet . uuid ,
server = server ,
)
apple_token = RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
token = hex_to_b64 ( " bbbb " ) ,
user_uuid = hamlet . uuid ,
server = server ,
)
payload = {
" user_id " : hamlet . id ,
" user_uuid " : str ( hamlet . uuid ) ,
" realm_uuid " : str ( hamlet . realm . uuid ) ,
" gcm_payload " : { } ,
" apns_payload " : { } ,
" gcm_options " : { } ,
}
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zilencer.views.send_android_push_notification " , return_value = 1 ) ,
mock . patch ( " zilencer.views.send_apple_push_notification " , return_value = 1 ) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) ,
) :
2023-12-22 02:22:48 +01:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
self . assert_json_success ( result )
android_token . refresh_from_db ( )
apple_token . refresh_from_db ( )
self . assertEqual ( android_token . remote_realm , remote_realm )
self . assertEqual ( apple_token . remote_realm , remote_realm )
2021-09-28 14:17:16 +02:00
def test_send_notification_endpoint ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
2023-12-22 02:29:00 +01:00
server = self . server
2023-12-25 03:01:58 +01:00
remote_realm = RemoteRealm . objects . get ( server = server , uuid = hamlet . realm . uuid )
2021-09-28 14:17:16 +02:00
token = " aaaa "
2021-11-09 01:32:19 +01:00
android_tokens = [ ]
2023-04-10 00:55:16 +02:00
uuid_android_tokens = [ ]
2021-09-28 14:17:16 +02:00
for i in [ " aa " , " bb " ] :
2021-11-09 01:32:19 +01:00
android_tokens . append (
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2021-11-09 01:32:19 +01:00
token = hex_to_b64 ( token + i ) ,
user_id = hamlet . id ,
server = server ,
)
2021-09-28 14:17:16 +02:00
)
2023-04-10 00:55:16 +02:00
# Create a duplicate, newer uuid-based registration for the same user to verify
# the bouncer will handle that correctly, without triggering a duplicate notification,
# and will delete the old, legacy registration.
uuid_android_tokens . append (
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2023-04-10 00:55:16 +02:00
token = hex_to_b64 ( token + i ) ,
user_uuid = str ( hamlet . uuid ) ,
server = server ,
)
)
2021-11-09 01:32:19 +01:00
apple_token = RemotePushDeviceToken . objects . create (
2021-09-28 14:17:16 +02:00
kind = RemotePushDeviceToken . APNS ,
token = hex_to_b64 ( token ) ,
user_id = hamlet . id ,
server = server ,
)
2021-11-09 01:32:19 +01:00
many_ids = " , " . join ( str ( i ) for i in range ( 1 , 250 ) )
2021-09-28 14:17:16 +02:00
payload = {
" user_id " : hamlet . id ,
2023-04-10 00:55:16 +02:00
" user_uuid " : str ( hamlet . uuid ) ,
2023-12-11 22:21:48 +01:00
" realm_uuid " : str ( hamlet . realm . uuid ) ,
2021-11-09 01:32:19 +01:00
" gcm_payload " : { " event " : " remove " , " zulip_message_ids " : many_ids } ,
2022-01-03 23:06:42 +01:00
" apns_payload " : {
" badge " : 0 ,
" custom " : { " zulip " : { " event " : " remove " , " zulip_message_ids " : many_ids } } ,
} ,
2021-09-28 14:17:16 +02:00
" gcm_options " : { } ,
}
2023-12-25 03:01:58 +01:00
time_sent = now ( )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zilencer.views.send_android_push_notification " , return_value = 2
) as android_push ,
mock . patch ( " zilencer.views.send_apple_push_notification " , return_value = 1 ) as apple_push ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
time_machine . travel ( time_sent , tick = False ) ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) as logger ,
) :
2021-09-28 14:17:16 +02:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
2022-06-07 01:37:01 +02:00
data = self . assert_json_success ( result )
2021-09-28 14:17:16 +02:00
self . assertEqual (
2023-12-12 00:06:37 +01:00
{
" result " : " success " ,
" msg " : " " ,
" total_android_devices " : 2 ,
" total_apple_devices " : 1 ,
" deleted_devices " : { " android_devices " : [ ] , " apple_devices " : [ ] } ,
2023-12-11 22:21:48 +01:00
" realm " : { " can_push " : True , " expected_end_timestamp " : None } ,
2023-12-12 00:06:37 +01:00
} ,
2021-09-28 14:17:16 +02:00
data ,
)
2021-10-20 01:16:18 +02:00
self . assertEqual (
logger . output ,
[
" INFO:zilencer.views: "
2023-06-06 22:07:28 +02:00
f " Deduplicating push registrations for server id: { server . id } user id: { hamlet . id } uuid: { hamlet . uuid } and tokens: { sorted ( t . token for t in android_tokens ) } " ,
2023-04-10 00:55:16 +02:00
" INFO:zilencer.views: "
2023-06-06 22:07:28 +02:00
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { hamlet . id } ><uuid: { hamlet . uuid } >: "
2023-04-10 00:55:16 +02:00
" 2 via FCM devices, 1 via APNs devices " ,
2021-10-20 01:16:18 +02:00
] ,
)
2022-02-23 20:25:30 +01:00
2023-04-10 00:55:16 +02:00
user_identity = UserPushIdentityCompat ( user_id = hamlet . id , user_uuid = str ( hamlet . uuid ) )
2021-11-09 01:32:19 +01:00
apple_push . assert_called_once_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2021-11-09 01:32:19 +01:00
[ apple_token ] ,
2022-01-03 23:06:42 +01:00
{
" badge " : 0 ,
" custom " : {
" zulip " : {
" event " : " remove " ,
" zulip_message_ids " : " , " . join ( str ( i ) for i in range ( 50 , 250 ) ) ,
}
} ,
} ,
2021-11-09 01:32:19 +01:00
remote = server ,
)
android_push . assert_called_once_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2024-05-30 20:52:29 +02:00
uuid_android_tokens ,
2021-11-09 01:32:19 +01:00
{ " event " : " remove " , " zulip_message_ids " : " , " . join ( str ( i ) for i in range ( 50 , 250 ) ) } ,
{ } ,
remote = server ,
)
2021-09-28 14:17:16 +02:00
2023-12-25 03:01:58 +01:00
remote_realm . refresh_from_db ( )
server . refresh_from_db ( )
self . assertEqual ( remote_realm . last_request_datetime , time_sent )
self . assertEqual ( server . last_request_datetime , time_sent )
2023-12-17 06:59:16 +01:00
def test_send_notification_endpoint_on_free_plans ( self ) - > None :
2023-12-12 17:15:57 +01:00
hamlet = self . example_user ( " hamlet " )
2023-12-22 02:29:00 +01:00
remote_server = self . server
2023-12-12 17:15:57 +01:00
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2023-12-12 17:15:57 +01:00
token = hex_to_b64 ( " aaaaaa " ) ,
user_id = hamlet . id ,
server = remote_server ,
)
current_time = now ( )
message = Message (
sender = hamlet ,
recipient = self . example_user ( " othello " ) . recipient ,
realm_id = hamlet . realm_id ,
content = " This is test content " ,
rendered_content = " This is test content " ,
date_sent = current_time ,
sending_client = get_client ( " test " ) ,
)
message . save ( )
# Test old zulip server case.
self . assertIsNone ( remote_server . last_api_feature_level )
old_apns_payload = {
" alert " : {
" title " : " King Hamlet " ,
" subtitle " : " " ,
" body " : message . content ,
} ,
" badge " : 0 ,
" sound " : " default " ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" sender_email " : hamlet . email ,
" sender_id " : hamlet . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2023-12-12 17:15:57 +01:00
" user_id " : self . example_user ( " othello " ) . id ,
}
} ,
}
old_gcm_payload = {
" user_id " : self . example_user ( " othello " ) . id ,
" event " : " message " ,
" alert " : " New private message from King Hamlet " ,
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
" content " : message . content ,
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2023-12-12 17:15:57 +01:00
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " private " ,
}
payload = {
" user_id " : hamlet . id ,
" gcm_payload " : old_gcm_payload ,
" apns_payload " : old_apns_payload ,
" gcm_options " : { " priority " : " high " } ,
}
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " INVALID_ZULIP_SERVER " )
remote_server . last_api_feature_level = 235
remote_server . save ( )
gcm_payload , gcm_options = get_message_payload_gcm ( hamlet , message )
apns_payload = get_message_payload_apns (
hamlet , message , NotificationTriggers . DIRECT_MESSAGE
)
payload = {
" user_id " : hamlet . id ,
" user_uuid " : str ( hamlet . uuid ) ,
" gcm_payload " : gcm_payload ,
" apns_payload " : apns_payload ,
" gcm_options " : gcm_options ,
}
# Test the case when there is no data about users.
self . assertIsNone ( remote_server . last_audit_log_update )
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
2024-02-06 23:02:01 +01:00
self . assert_json_error (
result ,
" Your plan doesn ' t allow sending push notifications. Reason provided by the server: Missing data " ,
)
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " PUSH_NOTIFICATIONS_DISALLOWED " )
2023-12-12 17:15:57 +01:00
human_counts = {
str ( UserProfile . ROLE_REALM_ADMINISTRATOR ) : 1 ,
str ( UserProfile . ROLE_REALM_OWNER ) : 1 ,
str ( UserProfile . ROLE_MODERATOR ) : 0 ,
str ( UserProfile . ROLE_MEMBER ) : 7 ,
str ( UserProfile . ROLE_GUEST ) : 2 ,
}
RemoteRealmAuditLog . objects . create (
server = remote_server ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_CREATED ,
2023-12-12 17:15:57 +01:00
event_time = current_time - timedelta ( minutes = 10 ) ,
extra_data = { RealmAuditLog . ROLE_COUNT : { RealmAuditLog . ROLE_COUNT_HUMANS : human_counts } } ,
)
remote_server . last_audit_log_update = current_time - timedelta ( minutes = 10 )
remote_server . save ( )
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
2024-02-06 23:02:01 +01:00
self . assert_json_error (
result ,
2024-02-16 19:20:02 +01:00
" Your plan doesn ' t allow sending push notifications. Reason provided by the server: Push notifications access with 10+ users requires signing up for a plan. https://zulip.com/plans/ " ,
2024-02-06 23:02:01 +01:00
)
self . assertEqual ( orjson . loads ( result . content ) [ " code " ] , " PUSH_NOTIFICATIONS_DISALLOWED " )
2023-12-12 17:15:57 +01:00
2023-12-17 06:59:16 +01:00
# Check that sponsored realms are allowed to send push notifications.
remote_server . plan_type = RemoteRealm . PLAN_TYPE_COMMUNITY
remote_server . save ( )
with self . assertLogs ( " zilencer.views " , level = " INFO " ) as logger :
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
data = self . assert_json_success ( result )
self . assertEqual (
{
" result " : " success " ,
" msg " : " " ,
" realm " : None ,
" total_android_devices " : 1 ,
" total_apple_devices " : 0 ,
" deleted_devices " : { " android_devices " : [ ] , " apple_devices " : [ ] } ,
} ,
data ,
)
self . assertIn (
" INFO:zilencer.views: "
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { hamlet . id } ><uuid: { hamlet . uuid } >: "
" 1 via FCM devices, 0 via APNs devices " ,
logger . output ,
)
# Reset the plan_type to test remaining cases.
remote_server . plan_type = RemoteRealm . PLAN_TYPE_SELF_MANAGED
remote_server . save ( )
2023-12-12 17:15:57 +01:00
human_counts = {
str ( UserProfile . ROLE_REALM_ADMINISTRATOR ) : 1 ,
str ( UserProfile . ROLE_REALM_OWNER ) : 1 ,
str ( UserProfile . ROLE_MODERATOR ) : 0 ,
str ( UserProfile . ROLE_MEMBER ) : 6 ,
str ( UserProfile . ROLE_GUEST ) : 2 ,
}
RemoteRealmAuditLog . objects . create (
server = remote_server ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_DEACTIVATED ,
2023-12-12 17:15:57 +01:00
event_time = current_time - timedelta ( minutes = 8 ) ,
extra_data = { RealmAuditLog . ROLE_COUNT : { RealmAuditLog . ROLE_COUNT_HUMANS : human_counts } } ,
)
remote_server . last_audit_log_update = current_time - timedelta ( minutes = 8 )
remote_server . save ( )
with self . assertLogs ( " zilencer.views " , level = " INFO " ) as logger :
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
data = self . assert_json_success ( result )
self . assertEqual (
{
" result " : " success " ,
" msg " : " " ,
" realm " : None ,
" total_android_devices " : 1 ,
" total_apple_devices " : 0 ,
" deleted_devices " : { " android_devices " : [ ] , " apple_devices " : [ ] } ,
} ,
data ,
)
self . assertIn (
" INFO:zilencer.views: "
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { hamlet . id } ><uuid: { hamlet . uuid } >: "
" 1 via FCM devices, 0 via APNs devices " ,
logger . output ,
)
2023-09-29 20:04:09 +02:00
def test_subsecond_timestamp_format ( self ) - > None :
2023-09-18 17:07:34 +02:00
hamlet = self . example_user ( " hamlet " )
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2023-09-18 17:07:34 +02:00
token = hex_to_b64 ( " aaaaaa " ) ,
user_id = hamlet . id ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2023-09-18 17:07:34 +02:00
)
2023-09-29 20:04:09 +02:00
time_sent = now ( ) . replace ( microsecond = 234000 )
with time_machine . travel ( time_sent , tick = False ) :
2023-09-18 17:07:34 +02:00
message = Message (
sender = hamlet ,
recipient = self . example_user ( " othello " ) . recipient ,
realm_id = hamlet . realm_id ,
content = " This is test content " ,
rendered_content = " This is test content " ,
date_sent = now ( ) ,
sending_client = get_client ( " test " ) ,
)
message . set_topic_name ( " Test topic " )
message . save ( )
gcm_payload , gcm_options = get_message_payload_gcm ( hamlet , message )
apns_payload = get_message_payload_apns (
hamlet , message , NotificationTriggers . DIRECT_MESSAGE
)
2023-09-29 20:04:09 +02:00
# Reconfigure like recent versions, which had subsecond-granularity
# timestamps.
2023-09-18 17:07:34 +02:00
self . assertIsNotNone ( gcm_payload . get ( " time " ) )
2023-09-29 20:04:09 +02:00
gcm_payload [ " time " ] = float ( gcm_payload [ " time " ] + 0.234 )
self . assertEqual ( gcm_payload [ " time " ] , time_sent . timestamp ( ) )
2023-09-18 17:07:34 +02:00
self . assertIsNotNone ( apns_payload [ " custom " ] [ " zulip " ] . get ( " time " ) )
2023-09-29 20:04:09 +02:00
apns_payload [ " custom " ] [ " zulip " ] [ " time " ] = gcm_payload [ " time " ]
2023-09-18 17:07:34 +02:00
payload = {
" user_id " : hamlet . id ,
" user_uuid " : str ( hamlet . uuid ) ,
" gcm_payload " : gcm_payload ,
" apns_payload " : apns_payload ,
" gcm_options " : gcm_options ,
}
2023-11-19 19:45:19 +01:00
time_received = time_sent + timedelta ( seconds = 1 , milliseconds = 234 )
2024-07-12 02:30:32 +02:00
with (
time_machine . travel ( time_received , tick = False ) ,
mock . patch ( " zilencer.views.send_android_push_notification " , return_value = 1 ) ,
mock . patch ( " zilencer.views.send_apple_push_notification " , return_value = 1 ) ,
mock . patch (
" corporate.lib.stripe.RemoteServerBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) as logger ,
) :
2023-09-18 17:07:34 +02:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
self . assert_json_success ( result )
self . assertEqual (
logger . output [ 0 ] ,
" INFO:zilencer.views: "
f " Remote queuing latency for 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { hamlet . id } ><uuid: { hamlet . uuid } > "
2023-09-29 20:04:09 +02:00
" is 1.234 seconds " ,
2023-09-18 17:07:34 +02:00
)
2019-11-19 03:12:54 +01:00
def test_remote_push_unregister_all ( self ) - > None :
2021-02-12 08:20:45 +01:00
payload = self . get_generic_payload ( " register " )
2019-11-19 03:12:54 +01:00
# Verify correct results are success
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , " /api/v1/remotes/push/register " , payload )
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , 1 )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , " /api/v1/remotes/push/unregister/all " , dict ( user_id = 10 )
2021-02-12 08:19:30 +01:00
)
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , 0 )
2019-11-19 03:12:54 +01:00
2017-11-05 10:51:25 +01:00
def test_invalid_apns_token ( self ) - > None :
2017-07-07 18:18:37 +02:00
endpoints = [
2021-02-12 08:20:45 +01:00
( " /api/v1/remotes/push/register " , " apple-token " ) ,
2017-07-07 18:18:37 +02:00
]
for endpoint , method in endpoints :
payload = {
2021-02-12 08:20:45 +01:00
" user_id " : 10 ,
" token " : " xyz uses non-hex characters " ,
" token_kind " : PushDeviceToken . APNS ,
2017-07-07 18:18:37 +02:00
}
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid APNS token " )
2017-07-07 18:18:37 +02:00
2023-11-23 22:07:41 +01:00
def test_initialize_push_notifications ( self ) - > None :
realm = get_realm ( " zulip " )
realm . push_notifications_enabled = False
realm . save ( )
from zerver . lib . push_notifications import initialize_push_notifications
with mock . patch (
" zerver.lib.push_notifications.sends_notifications_directly " , return_value = True
) :
initialize_push_notifications ( )
realm = get_realm ( " zulip " )
self . assertTrue ( realm . push_notifications_enabled )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = False
) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as warn_log ,
) :
2023-11-23 22:07:41 +01:00
initialize_push_notifications ( )
not_configured_warn_log = (
" WARNING:zerver.lib.push_notifications: "
" Mobile push notifications are not configured. \n "
" See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html "
)
realm = get_realm ( " zulip " )
self . assertFalse ( realm . push_notifications_enabled )
self . assertEqual (
warn_log . output [ 0 ] ,
not_configured_warn_log ,
)
2024-07-14 20:30:42 +02:00
with (
2024-07-16 22:52:01 +02:00
activate_push_notification_service ( ) ,
2024-07-14 20:30:42 +02:00
mock . patch ( " zerver.lib.remote_server.send_to_push_bouncer " ) as m ,
2023-11-23 22:07:41 +01:00
) :
2024-07-14 20:30:42 +02:00
post_response = {
" realms " : { realm . uuid : { " can_push " : True , " expected_end_timestamp " : None } }
}
get_response = {
" last_realm_count_id " : 0 ,
" last_installation_count_id " : 0 ,
" last_realmauditlog_id " : 0 ,
}
2023-11-23 22:07:41 +01:00
2024-07-14 20:30:42 +02:00
def mock_send_to_push_bouncer_response ( method : str , * args : Any ) - > dict [ str , Any ] :
if method == " POST " :
return post_response
return get_response
2023-12-09 13:29:59 +01:00
2024-07-14 20:30:42 +02:00
m . side_effect = mock_send_to_push_bouncer_response
2023-11-23 22:07:41 +01:00
2024-07-14 20:30:42 +02:00
initialize_push_notifications ( )
2023-11-23 22:07:41 +01:00
2024-07-14 20:30:42 +02:00
realm = get_realm ( " zulip " )
self . assertTrue ( realm . push_notifications_enabled )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-11-23 22:07:41 +01:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-12-01 22:57:34 +01:00
@responses.activate
def test_register_token_realm_uuid_belongs_to_different_server ( self ) - > None :
self . add_mock_response ( )
user = self . example_user ( " cordelia " )
self . login_user ( user )
# Create a simulated second server. We will take user's RemoteRealm registration
# and change its server to this second server. This means that when the bouncer
# is processing the token registration request, it will find a RemoteRealm matching
# the realm_uuid in the request, but that RemoteRealm will be registered to a
# different server than the one making the request (self.server).
2023-12-10 11:59:28 +01:00
# This will make it log a warning, raise an exception when trying to get
# remote realm via get_remote_realm_helper and thus, not register the token.
2023-12-01 22:57:34 +01:00
second_server = RemoteZulipServer . objects . create (
uuid = uuid . uuid4 ( ) ,
api_key = " magic_secret_api_key2 " ,
hostname = " demo2.example.com " ,
last_updated = now ( ) ,
)
remote_realm = RemoteRealm . objects . get ( server = self . server , uuid = user . realm . uuid )
remote_realm . server = second_server
remote_realm . save ( )
endpoint = " /json/users/me/apns_device_token "
token = " apple-tokenaz "
with self . assertLogs ( " zilencer.views " , level = " WARN " ) as warn_log :
result = self . client_post (
endpoint , { " token " : token , " appid " : " org.zulip.Zulip " } , subdomain = " zulip "
)
2023-12-10 11:59:28 +01:00
self . assert_json_error_contains (
result ,
" Your organization is registered to a different Zulip server. Please contact Zulip support " ,
)
2023-12-01 22:57:34 +01:00
self . assertEqual (
warn_log . output ,
[
" WARNING:zilencer.views:/api/v1/remotes/push/register: "
f " Realm { remote_realm . uuid !s} exists, but not registered to server { self . server . id } "
] ,
)
2023-12-10 11:59:28 +01:00
self . assert_length ( RemotePushDeviceToken . objects . filter ( token = token ) , 0 )
2023-12-01 22:57:34 +01:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_push_bouncer_api ( self ) - > None :
2016-10-27 23:55:31 +02:00
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2023-12-22 02:29:00 +01:00
server = self . server
2016-10-27 23:55:31 +02:00
2024-07-12 02:30:17 +02:00
endpoints : list [ tuple [ str , str , int , Mapping [ str , str ] ] ] = [
2023-11-06 21:06:55 +01:00
(
" /json/users/me/apns_device_token " ,
" apple-tokenaz " ,
RemotePushDeviceToken . APNS ,
{ " appid " : " org.zulip.Zulip " } ,
) ,
2024-06-13 20:53:09 +02:00
( " /json/users/me/android_gcm_reg_id " , " android-token " , RemotePushDeviceToken . FCM , { } ) ,
2016-10-27 23:55:31 +02:00
]
# Test error handling
2023-11-06 21:06:55 +01:00
for endpoint , token , kind , appid in endpoints :
2016-10-27 23:55:31 +02:00
# Try adding/removing tokens that are too big...
2017-07-07 18:18:37 +02:00
broken_token = " a " * 5000 # too big
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : broken_token , * * appid } , subdomain = " zulip " )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
2023-11-06 21:01:21 +01:00
result = self . client_delete ( endpoint , { " token " : broken_token } , subdomain = " zulip " )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
2023-11-07 23:12:39 +01:00
# Try adding with missing or invalid appid...
2023-11-06 20:05:30 +01:00
if appid :
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
self . assert_json_error ( result , " Missing ' appid ' argument " )
2023-11-07 23:12:39 +01:00
result = self . client_post (
endpoint , { " token " : token , " appid " : " ' ; tables -- " } , subdomain = " zulip "
)
2024-04-20 11:47:33 +02:00
self . assert_json_error ( result , " appid has invalid format " )
2023-11-07 23:12:39 +01:00
2016-10-27 23:55:31 +02:00
# Try to remove a non-existent token...
2023-11-06 21:01:21 +01:00
result = self . client_delete ( endpoint , { " token " : " abcd1234 " } , subdomain = " zulip " )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Token does not exist " )
2016-10-27 23:55:31 +02:00
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/push/register "
2021-06-11 22:51:27 +02:00
with responses . RequestsMock ( ) as resp , self . assertLogs ( level = " ERROR " ) as error_log :
resp . add ( responses . POST , URL , body = ConnectionError ( ) , status = 502 )
2023-10-04 20:45:34 +02:00
with self . assertRaisesRegex (
PushNotificationBouncerRetryLaterError ,
r " ^ConnectionError while trying to connect to push notification bouncer$ " ,
) :
2023-11-06 21:06:55 +01:00
self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
2022-08-31 19:45:49 +02:00
self . assertIn (
f " ERROR:django.request:Bad Gateway: { endpoint } \n Traceback " ,
error_log . output [ 0 ] ,
2021-02-12 08:19:30 +01:00
)
2021-06-11 22:51:27 +02:00
with responses . RequestsMock ( ) as resp , self . assertLogs ( level = " WARNING " ) as warn_log :
resp . add ( responses . POST , URL , body = orjson . dumps ( { " msg " : " error " } ) , status = 500 )
2023-10-04 20:45:34 +02:00
with self . assertRaisesRegex (
2023-12-07 21:02:35 +01:00
PushNotificationBouncerServerError ,
2023-10-04 20:45:34 +02:00
r " Received 500 from push notification bouncer$ " ,
) :
2023-11-06 21:06:55 +01:00
self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2022-08-31 19:45:49 +02:00
warn_log . output [ 0 ] ,
" WARNING:root:Received 500 from push notification bouncer " ,
)
self . assertIn (
f " ERROR:django.request:Bad Gateway: { endpoint } \n Traceback " , warn_log . output [ 1 ]
2021-02-12 08:19:30 +01:00
)
2019-12-03 20:19:38 +01:00
2016-10-27 23:55:31 +02:00
# Add tokens
2023-11-06 21:06:55 +01:00
for endpoint , token , kind , appid in endpoints :
2023-12-01 22:57:34 +01:00
# First register a token without having a RemoteRealm registration:
RemoteRealm . objects . all ( ) . delete ( )
with self . assertLogs ( " zilencer.views " , level = " INFO " ) as info_log :
result = self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
self . assert_json_success ( result )
self . assertIn (
" INFO:zilencer.views:/api/v1/remotes/push/register: Received request for "
f " unknown realm { user . realm . uuid !s} , server { server . id } , "
f " user { user . uuid !s} " ,
info_log . output ,
)
# The registration succeeded, but RemotePushDeviceToken doesn't have remote_realm set:
tokens = list (
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
)
)
self . assert_length ( tokens , 1 )
self . assertEqual ( tokens [ 0 ] . kind , kind )
self . assertEqual ( tokens [ 0 ] . user_uuid , user . uuid )
# Delete it to clean up.
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
) . delete ( )
# Create the expected RemoteRealm registration and proceed with testing with a
# normal setup.
update_remote_realm_data_for_server ( self . server , get_realms_info_for_push_bouncer ( ) )
2023-12-25 03:01:58 +01:00
time_sent = now ( )
with time_machine . travel ( time_sent , tick = False ) :
result = self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
self . assert_json_success ( result )
2016-10-27 23:55:31 +02:00
2023-12-25 03:01:58 +01:00
# Test that we can push more times
result = self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
self . assert_json_success ( result )
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
)
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2023-11-06 22:02:06 +01:00
self . assertEqual ( tokens [ 0 ] . kind , kind )
2023-12-01 22:57:34 +01:00
# These new registrations have .remote_realm set properly.
assert tokens [ 0 ] . remote_realm is not None
2023-12-25 03:01:58 +01:00
remote_realm = tokens [ 0 ] . remote_realm
self . assertEqual ( remote_realm . uuid , user . realm . uuid )
2023-11-06 22:02:06 +01:00
self . assertEqual ( tokens [ 0 ] . ios_app_id , appid . get ( " appid " ) )
2016-10-27 23:55:31 +02:00
2023-12-25 03:01:58 +01:00
# Both RemoteRealm and RemoteZulipServer should have last_request_datetime
# updated.
self . assertEqual ( remote_realm . last_request_datetime , time_sent )
server . refresh_from_db ( )
self . assertEqual ( server . last_request_datetime , time_sent )
2016-10-27 23:55:31 +02:00
# User should have tokens for both devices now.
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2016-10-27 23:55:31 +02:00
# Remove tokens
2024-07-14 21:06:04 +02:00
time_sent + = timedelta ( minutes = 1 )
2023-11-06 21:06:55 +01:00
for endpoint , token , kind , appid in endpoints :
2023-12-25 23:10:35 +01:00
with time_machine . travel ( time_sent , tick = False ) :
result = self . client_delete ( endpoint , { " token " : token } , subdomain = " zulip " )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
)
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2016-10-27 23:55:31 +02:00
2023-12-25 23:10:35 +01:00
remote_realm . refresh_from_db ( )
self . assertEqual ( remote_realm . last_request_datetime , time_sent )
2019-11-19 03:12:54 +01:00
# Re-add copies of those tokens
2023-11-06 21:06:55 +01:00
for endpoint , token , kind , appid in endpoints :
result = self . client_post ( endpoint , { " token " : token , * * appid } , subdomain = " zulip " )
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2019-11-19 03:12:54 +01:00
2019-12-03 20:19:38 +01:00
# Now we want to remove them using the bouncer after an API key change.
# First we test error handling in case of issues with the bouncer:
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.worker.deferred_work.clear_push_device_tokens " ,
side_effect = PushNotificationBouncerRetryLaterError ( " test " ) ,
) ,
mock . patch ( " zerver.worker.deferred_work.retry_event " ) as mock_retry ,
) :
2024-08-22 09:31:47 +02:00
with self . captureOnCommitCallbacks ( execute = True ) :
do_regenerate_api_key ( user , user )
2019-12-03 20:19:38 +01:00
mock_retry . assert_called ( )
# We didn't manage to communicate with the bouncer, to the tokens are still there:
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2019-12-03 20:19:38 +01:00
2020-03-28 01:25:56 +01:00
# Now we successfully remove them:
2024-07-14 21:06:04 +02:00
time_sent + = timedelta ( minutes = 1 )
2024-08-22 09:31:47 +02:00
with (
time_machine . travel ( time_sent , tick = False ) ,
self . captureOnCommitCallbacks ( execute = True ) ,
) :
2023-12-25 23:10:35 +01:00
do_regenerate_api_key ( user , user )
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2019-11-19 03:12:54 +01:00
2023-12-25 23:10:35 +01:00
remote_realm . refresh_from_db ( )
self . assertEqual ( remote_realm . last_request_datetime , time_sent )
2021-02-12 08:19:30 +01:00
2019-01-31 00:39:02 +01:00
class AnalyticsBouncerTest ( BouncerTestCase ) :
2023-11-19 19:45:19 +01:00
TIME_ZERO = datetime ( 1988 , 3 , 14 , tzinfo = timezone . utc )
2019-01-31 00:39:02 +01:00
2023-12-14 17:33:35 +01:00
def assertPushNotificationsAre ( self , should_be : bool ) - > None :
self . assertEqual (
{ should_be } ,
set (
Realm . objects . all ( ) . distinct ( ) . values_list ( " push_notifications_enabled " , flat = True )
) ,
)
2024-03-18 01:18:53 +01:00
@override
def setUp ( self ) - > None :
redis_client . delete ( PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY )
return super ( ) . setUp ( )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
2023-12-14 17:33:35 +01:00
def test_analytics_failure_api ( self ) - > None :
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
ANALYTICS_URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/server/analytics "
2021-06-11 22:51:27 +02:00
ANALYTICS_STATUS_URL = ANALYTICS_URL + " /status "
2019-01-31 00:39:02 +01:00
2024-07-12 02:30:32 +02:00
with (
responses . RequestsMock ( ) as resp ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as mock_warning ,
) :
2021-06-11 22:51:27 +02:00
resp . add ( responses . GET , ANALYTICS_STATUS_URL , body = ConnectionError ( ) )
2023-12-14 17:33:35 +01:00
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
send_server_data_to_push_bouncer ( )
self . assertEqual (
" WARNING:zulip.analytics:ConnectionError while trying to connect to push notification bouncer " ,
mock_warning . output [ 0 ] ,
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
self . assertPushNotificationsAre ( False )
2024-03-18 01:18:53 +01:00
# Simulate ConnectionError again, but this time with a redis record indicating
# that push notifications have recently worked fine.
2024-07-12 02:30:32 +02:00
with (
responses . RequestsMock ( ) as resp ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as mock_warning ,
) :
2024-03-18 01:18:53 +01:00
resp . add ( responses . GET , ANALYTICS_STATUS_URL , body = ConnectionError ( ) )
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
record_push_notifications_recently_working ( )
send_server_data_to_push_bouncer ( )
self . assertEqual (
" WARNING:zulip.analytics:ConnectionError while trying to connect to push notification bouncer " ,
mock_warning . output [ 0 ] ,
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
# push_notifications_enabled shouldn't get set to False, because this is treated
# as a transient error.
self . assertPushNotificationsAre ( True )
# However after an hour has passed without seeing push notifications
# working, we take the error seriously.
with time_machine . travel ( now ( ) + timedelta ( minutes = 61 ) , tick = False ) :
send_server_data_to_push_bouncer ( )
self . assertEqual (
" WARNING:zulip.analytics:ConnectionError while trying to connect to push notification bouncer " ,
mock_warning . output [ 1 ] ,
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 2 ) )
self . assertPushNotificationsAre ( False )
redis_client . delete (
redis_utils . REDIS_KEY_PREFIX + PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY
)
2024-07-12 02:30:32 +02:00
with (
responses . RequestsMock ( ) as resp ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as mock_warning ,
) :
2023-12-14 17:33:35 +01:00
resp . add ( responses . GET , ANALYTICS_STATUS_URL , body = " This is not JSON " )
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
send_server_data_to_push_bouncer ( )
self . assertTrue (
mock_warning . output [ 0 ] . startswith (
2024-07-16 22:52:01 +02:00
f " ERROR:zulip.analytics:Exception communicating with { settings . ZULIP_SERVICES_URL } \n Traceback " ,
2023-12-14 17:33:35 +01:00
)
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
self . assertPushNotificationsAre ( False )
with responses . RequestsMock ( ) as resp , self . assertLogs ( " " , level = " WARNING " ) as mock_warning :
resp . add ( responses . GET , ANALYTICS_STATUS_URL , body = " Server error " , status = 502 )
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
send_server_data_to_push_bouncer ( )
self . assertEqual (
" WARNING:root:Received 502 from push notification bouncer " ,
mock_warning . output [ 0 ] ,
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
self . assertPushNotificationsAre ( True )
2024-07-12 02:30:32 +02:00
with (
responses . RequestsMock ( ) as resp ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as mock_warning ,
) :
2023-12-14 17:33:35 +01:00
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
resp . add (
responses . GET ,
ANALYTICS_STATUS_URL ,
status = 401 ,
json = { " CODE " : " UNAUTHORIZED " , " msg " : " Some problem " , " result " : " error " } ,
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2022-08-31 19:30:31 +02:00
self . assertIn (
2023-12-14 17:33:35 +01:00
" WARNING:zulip.analytics:Some problem " ,
2022-08-31 19:30:31 +02:00
mock_warning . output [ 0 ] ,
2021-02-12 08:19:30 +01:00
)
2021-06-11 22:51:27 +02:00
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
2023-12-14 17:33:35 +01:00
self . assertPushNotificationsAre ( False )
2024-07-12 02:30:32 +02:00
with (
responses . RequestsMock ( ) as resp ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as mock_warning ,
) :
2023-12-14 17:33:35 +01:00
Realm . objects . all ( ) . update ( push_notifications_enabled = True )
resp . add (
responses . GET ,
ANALYTICS_STATUS_URL ,
json = {
" last_realm_count_id " : 0 ,
" last_installation_count_id " : 0 ,
" last_realmauditlog_id " : 0 ,
} ,
)
resp . add (
responses . POST ,
ANALYTICS_URL ,
status = 401 ,
json = { " CODE " : " UNAUTHORIZED " , " msg " : " Some problem " , " result " : " error " } ,
)
send_server_data_to_push_bouncer ( )
self . assertIn (
" WARNING:zulip.analytics:Some problem " ,
mock_warning . output [ 0 ] ,
)
self . assertTrue ( resp . assert_call_count ( ANALYTICS_URL , 1 ) )
self . assertPushNotificationsAre ( False )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( submit_usage_statistics = True )
2023-12-14 17:33:35 +01:00
@responses.activate
def test_analytics_api ( self ) - > None :
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
ANALYTICS_URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/server/analytics "
2023-12-14 17:33:35 +01:00
ANALYTICS_STATUS_URL = ANALYTICS_URL + " /status "
user = self . example_user ( " hamlet " )
end_time = self . TIME_ZERO
2019-12-03 20:19:38 +01:00
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2019-10-03 02:01:36 +02:00
# Send any existing data over, so that we can start the test with a "clean" slate
2023-12-22 02:29:00 +01:00
remote_server = self . server
2023-11-20 21:40:43 +01:00
assert remote_server is not None
assert remote_server . last_version is None
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2021-06-11 22:51:27 +02:00
self . assertTrue ( responses . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
2023-11-20 21:40:43 +01:00
2023-12-09 13:29:59 +01:00
audit_log = RealmAuditLog . objects . all ( ) . order_by ( " id " ) . last ( )
assert audit_log is not None
audit_log_max_id = audit_log . id
2023-12-22 02:29:00 +01:00
remote_server . refresh_from_db ( )
2023-11-20 21:40:43 +01:00
assert remote_server . last_version == ZULIP_VERSION
2019-10-03 02:01:36 +02:00
remote_audit_log_count = RemoteRealmAuditLog . objects . count ( )
2023-11-20 21:40:43 +01:00
2019-10-03 02:01:36 +02:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , 0 )
2021-02-12 08:19:30 +01:00
def check_counts (
2021-06-11 22:51:27 +02:00
analytics_status_mock_request_call_count : int ,
analytics_mock_request_call_count : int ,
2021-02-12 08:19:30 +01:00
remote_realm_count : int ,
remote_installation_count : int ,
remote_realm_audit_log : int ,
) - > None :
2021-06-11 22:51:27 +02:00
self . assertTrue (
responses . assert_call_count (
ANALYTICS_STATUS_URL , analytics_status_mock_request_call_count
)
)
self . assertTrue (
responses . assert_call_count ( ANALYTICS_URL , analytics_mock_request_call_count )
)
2019-10-03 02:01:36 +02:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , remote_realm_count )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , remote_installation_count )
2021-02-12 08:19:30 +01:00
self . assertEqual (
RemoteRealmAuditLog . objects . count ( ) , remote_audit_log_count + remote_realm_audit_log
)
2019-10-03 02:01:36 +02:00
# Create some rows we'll send to remote server
2024-02-26 21:14:52 +01:00
# LoggingCountStat that should be included;
# i.e. not in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
messages_read_logging_stat = LoggingCountStat (
" messages_read::hour " , UserCount , CountStat . HOUR
)
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2024-02-26 21:14:52 +01:00
realm = user . realm ,
property = messages_read_logging_stat . property ,
end_time = end_time ,
value = 5 ,
2021-02-12 08:19:30 +01:00
)
2019-01-31 00:39:02 +01:00
InstallationCount . objects . create (
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
end_time = end_time ,
value = 5 ,
)
# LoggingCountStat that should not be included;
# i.e. in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
invites_sent_logging_stat = LoggingCountStat ( " invites_sent::day " , RealmCount , CountStat . DAY )
RealmCount . objects . create (
realm = user . realm ,
property = invites_sent_logging_stat . property ,
end_time = end_time ,
value = 5 ,
)
InstallationCount . objects . create (
property = invites_sent_logging_stat . property ,
2021-02-12 08:19:30 +01:00
end_time = end_time ,
value = 5 ,
)
2019-10-03 02:01:36 +02:00
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_CREATED ,
2021-02-12 08:19:30 +01:00
event_time = end_time ,
2023-06-02 17:33:05 +02:00
extra_data = orjson . dumps (
{
RealmAuditLog . ROLE_COUNT : realm_user_count_by_role ( user . realm ) ,
}
) . decode ( ) ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-09-03 15:58:19 +02:00
event_type = AuditLogEventType . REALM_LOGO_CHANGED ,
2021-02-12 08:19:30 +01:00
event_time = end_time ,
2023-06-02 17:33:05 +02:00
extra_data = orjson . dumps ( { " foo " : " bar " } ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
)
2024-02-26 21:14:52 +01:00
self . assertEqual ( RealmCount . objects . count ( ) , 2 )
self . assertEqual ( InstallationCount . objects . count ( ) , 2 )
2019-10-03 02:01:36 +02:00
self . assertEqual ( RealmAuditLog . objects . filter ( id__gt = audit_log_max_id ) . count ( ) , 2 )
2019-01-31 00:39:02 +01:00
2024-07-16 22:52:01 +02:00
with self . settings ( ANALYTICS_DATA_UPLOAD_LEVEL = AnalyticsDataUploadLevel . BILLING ) :
# With this setting, we don't send RealmCounts and InstallationCounts.
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-11-28 23:20:24 +01:00
check_counts ( 2 , 2 , 0 , 0 , 1 )
2024-07-16 22:52:01 +02:00
with self . settings ( ANALYTICS_DATA_UPLOAD_LEVEL = AnalyticsDataUploadLevel . ALL ) :
# With ALL data upload enabled, but 'consider_usage_statistics=False',
2023-12-09 13:29:59 +01:00
# we don't send RealmCount and InstallationCounts.
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
2023-12-09 13:29:59 +01:00
check_counts ( 3 , 3 , 0 , 0 , 1 )
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 4 , 4 , 1 , 1 , 1 )
2019-10-03 02:01:36 +02:00
2023-10-30 23:50:53 +01:00
self . assertEqual (
list (
RemoteRealm . objects . order_by ( " id " ) . values (
" server_id " ,
" uuid " ,
" uuid_owner_secret " ,
" host " ,
2023-11-29 23:48:46 +01:00
" name " ,
2023-11-27 02:06:23 +01:00
" org_type " ,
2023-11-29 23:48:46 +01:00
" authentication_methods " ,
2023-10-30 23:50:53 +01:00
" realm_date_created " ,
" registration_deactivated " ,
" realm_deactivated " ,
" plan_type " ,
2023-12-11 14:24:37 +01:00
" is_system_bot_realm " ,
2023-10-30 23:50:53 +01:00
)
) ,
[
{
" server_id " : self . server . id ,
" uuid " : realm . uuid ,
" uuid_owner_secret " : realm . uuid_owner_secret ,
" host " : realm . host ,
2023-11-29 23:48:46 +01:00
" name " : realm . name ,
2023-11-27 02:06:23 +01:00
" org_type " : realm . org_type ,
2023-11-29 23:48:46 +01:00
" authentication_methods " : realm . authentication_methods_dict ( ) ,
2023-10-30 23:50:53 +01:00
" realm_date_created " : realm . date_created ,
" registration_deactivated " : False ,
" realm_deactivated " : False ,
2023-12-14 00:17:55 +01:00
" plan_type " : RemoteRealm . PLAN_TYPE_SELF_MANAGED ,
2023-12-11 14:24:37 +01:00
" is_system_bot_realm " : realm . string_id == " zulipinternal " ,
2023-10-30 23:50:53 +01:00
}
for realm in Realm . objects . order_by ( " id " )
] ,
)
2023-11-08 20:02:10 +01:00
# Modify a realm and verify the remote realm data that should get updated, get updated.
zephyr_realm = get_realm ( " zephyr " )
zephyr_original_host = zephyr_realm . host
zephyr_realm . string_id = " zephyr2 "
2023-11-30 00:20:42 +01:00
zephyr_original_name = zephyr_realm . name
zephyr_realm . name = " Zephyr2 "
zephyr_original_org_type = zephyr_realm . org_type
self . assertEqual ( zephyr_realm . org_type , Realm . ORG_TYPES [ " business " ] [ " id " ] )
do_change_realm_org_type (
zephyr_realm , Realm . ORG_TYPES [ " government " ] [ " id " ] , acting_user = user
)
2023-11-08 20:02:10 +01:00
# date_created can't be updated.
original_date_created = zephyr_realm . date_created
zephyr_realm . date_created = now ( )
zephyr_realm . save ( )
2023-11-30 00:20:42 +01:00
zephyr_original_authentication_methods = zephyr_realm . authentication_methods_dict ( )
# Sanity check to make sure the set up is how we think.
self . assertEqual ( zephyr_original_authentication_methods [ " Email " ] , True )
new_auth_method_dict = {
" Google " : False ,
" Email " : False ,
" GitHub " : False ,
" Apple " : False ,
" Dev " : True ,
" SAML " : True ,
" GitLab " : False ,
" OpenID Connect " : False ,
}
do_set_realm_authentication_methods ( zephyr_realm , new_auth_method_dict , acting_user = user )
2023-11-08 20:02:10 +01:00
# Deactivation is synced.
2023-09-25 22:59:44 +02:00
do_deactivate_realm (
zephyr_realm , acting_user = None , deactivation_reason = " owner_request " , email_owners = False
)
2023-11-08 20:02:10 +01:00
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 5 , 5 , 1 , 1 , 7 )
2023-11-08 20:02:10 +01:00
zephyr_remote_realm = RemoteRealm . objects . get ( uuid = zephyr_realm . uuid )
self . assertEqual ( zephyr_remote_realm . host , zephyr_realm . host )
self . assertEqual ( zephyr_remote_realm . realm_date_created , original_date_created )
self . assertEqual ( zephyr_remote_realm . realm_deactivated , True )
2023-11-30 00:20:42 +01:00
self . assertEqual ( zephyr_remote_realm . name , zephyr_realm . name )
self . assertEqual ( zephyr_remote_realm . authentication_methods , new_auth_method_dict )
self . assertEqual ( zephyr_remote_realm . org_type , Realm . ORG_TYPES [ " government " ] [ " id " ] )
2023-11-08 20:02:10 +01:00
# Verify the RemoteRealmAuditLog entries created.
remote_audit_logs = (
RemoteRealmAuditLog . objects . filter (
2024-06-23 04:17:15 +02:00
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_realm = zephyr_remote_realm ,
2023-11-08 20:02:10 +01:00
)
. order_by ( " id " )
. values ( " event_type " , " remote_id " , " realm_id " , " extra_data " )
)
self . assertEqual (
list ( remote_audit_logs ) ,
[
dict (
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_id = None ,
realm_id = zephyr_realm . id ,
extra_data = {
" attr_name " : " host " ,
" old_value " : zephyr_original_host ,
" new_value " : zephyr_realm . host ,
} ,
) ,
2023-11-30 00:20:42 +01:00
dict (
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_id = None ,
realm_id = zephyr_realm . id ,
extra_data = {
" attr_name " : " org_type " ,
" old_value " : zephyr_original_org_type ,
" new_value " : zephyr_realm . org_type ,
} ,
) ,
dict (
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_id = None ,
realm_id = zephyr_realm . id ,
extra_data = {
" attr_name " : " name " ,
" old_value " : zephyr_original_name ,
" new_value " : zephyr_realm . name ,
} ,
) ,
dict (
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_id = None ,
realm_id = zephyr_realm . id ,
extra_data = {
" attr_name " : " authentication_methods " ,
" old_value " : zephyr_original_authentication_methods ,
" new_value " : new_auth_method_dict ,
} ,
) ,
2023-11-08 20:02:10 +01:00
dict (
event_type = RemoteRealmAuditLog . REMOTE_REALM_VALUE_UPDATED ,
remote_id = None ,
realm_id = zephyr_realm . id ,
extra_data = {
" attr_name " : " realm_deactivated " ,
" old_value " : False ,
" new_value " : True ,
} ,
) ,
] ,
)
2019-10-03 02:01:36 +02:00
# Test having no new rows
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 6 , 6 , 1 , 1 , 7 )
2019-01-31 00:39:02 +01:00
2019-10-03 02:01:36 +02:00
# Test only having new RealmCount rows
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
2023-11-19 19:45:19 +01:00
end_time = end_time + timedelta ( days = 1 ) ,
2021-02-12 08:19:30 +01:00
value = 6 ,
)
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
2023-11-19 19:45:19 +01:00
end_time = end_time + timedelta ( days = 2 ) ,
2021-02-12 08:19:30 +01:00
value = 9 ,
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 7 , 7 , 3 , 1 , 7 )
2019-01-31 00:39:02 +01:00
2019-10-03 02:01:36 +02:00
# Test only having new InstallationCount rows
2019-01-31 00:39:02 +01:00
InstallationCount . objects . create (
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
end_time = end_time + timedelta ( days = 1 ) ,
value = 6 ,
2021-02-12 08:19:30 +01:00
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 8 , 8 , 3 , 2 , 7 )
2019-10-03 02:01:36 +02:00
# Test only having new RealmAuditLog rows
# Non-synced event
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-09-03 15:58:19 +02:00
event_type = AuditLogEventType . REALM_LOGO_CHANGED ,
2021-02-12 08:19:30 +01:00
event_time = end_time ,
2023-07-13 19:46:06 +02:00
extra_data = { " data " : " foo " } ,
2021-02-12 08:19:30 +01:00
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-09 13:29:59 +01:00
check_counts ( 9 , 9 , 3 , 2 , 7 )
2019-10-03 02:01:36 +02:00
# Synced event
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_REACTIVATED ,
2021-02-12 08:19:30 +01:00
event_time = end_time ,
2023-07-13 19:46:06 +02:00
extra_data = {
RealmAuditLog . ROLE_COUNT : realm_user_count_by_role ( user . realm ) ,
} ,
2021-02-12 08:19:30 +01:00
)
2024-07-16 22:52:01 +02:00
with self . settings ( ANALYTICS_DATA_UPLOAD_LEVEL = AnalyticsDataUploadLevel . BASIC ) :
# With the BASIC level, RealmAuditLog rows are not sent.
send_server_data_to_push_bouncer ( )
check_counts ( 10 , 10 , 3 , 2 , 7 )
# Now, with ANALYTICS_DATA_UPLOAD_LEVEL back to the baseline for this test,
# the new RealmAuditLog event will be sent.
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2024-07-16 22:52:01 +02:00
check_counts ( 11 , 11 , 3 , 2 , 8 )
2019-01-31 00:39:02 +01:00
2023-10-23 14:01:49 +02:00
# Now create an InstallationCount with a property that's not supposed
# to be tracked by the remote server - since the bouncer itself tracks
# the RemoteInstallationCount with this property. We want to verify
# that the remote server will fail at sending analytics to the bouncer
# with such an InstallationCount - since syncing it should not be allowed.
forbidden_installation_count = InstallationCount . objects . create (
property = " mobile_pushes_received::day " ,
end_time = end_time ,
value = 5 ,
)
2023-11-17 19:43:25 +01:00
with self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as warn_log :
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-10-23 14:01:49 +02:00
self . assertEqual (
2023-11-17 19:43:25 +01:00
warn_log . output ,
[ " WARNING:zulip.analytics:Invalid property mobile_pushes_received::day " ] ,
2023-10-23 14:01:49 +02:00
)
# The analytics endpoint call counts increase by 1, but the actual RemoteCounts remain unchanged,
# since syncing the data failed.
2024-07-16 22:52:01 +02:00
check_counts ( 12 , 12 , 3 , 2 , 8 )
2023-10-23 14:01:49 +02:00
forbidden_installation_count . delete ( )
2021-02-12 08:19:30 +01:00
( realm_count_data , installation_count_data , realmauditlog_data ) = build_analytics_data (
RealmCount . objects . all ( ) , InstallationCount . objects . all ( ) , RealmAuditLog . objects . all ( )
)
2023-12-09 00:09:01 +01:00
request = AnalyticsRequest . model_construct (
realm_counts = realm_count_data ,
installation_counts = installation_count_data ,
realmauditlog_rows = realmauditlog_data ,
realms = [ ] ,
version = None ,
2024-06-23 02:50:05 +02:00
merge_base = None ,
2023-12-08 21:38:01 +01:00
api_feature_level = None ,
2023-12-09 00:09:01 +01:00
)
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
self . server_uuid ,
2021-02-12 08:20:45 +01:00
" /api/v1/remotes/server/analytics " ,
2024-06-23 02:50:05 +02:00
request . model_dump (
round_trip = True , exclude = { " realms " , " version " , " merge_base " , " api_feature_level " }
) ,
2021-02-12 08:19:30 +01:00
subdomain = " " ,
)
2019-01-31 00:39:02 +01:00
self . assert_json_error ( result , " Data is out of order. " )
2023-11-21 17:59:13 +01:00
# Adjust the id of all existing rows so that they get re-sent.
# This is equivalent to running `./manage.py clear_analytics_tables`
RealmCount . objects . all ( ) . update ( id = F ( " id " ) + RealmCount . objects . latest ( " id " ) . id )
InstallationCount . objects . all ( ) . update (
id = F ( " id " ) + InstallationCount . objects . latest ( " id " ) . id
)
with self . assertLogs ( level = " WARNING " ) as warn_log :
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-11-21 17:59:13 +01:00
self . assertEqual (
warn_log . output ,
[
f " WARNING:root:Dropped 3 duplicated rows while saving 3 rows of zilencer_remoterealmcount for server demo.example.com/ { self . server_uuid } " ,
f " WARNING:root:Dropped 2 duplicated rows while saving 2 rows of zilencer_remoteinstallationcount for server demo.example.com/ { self . server_uuid } " ,
] ,
)
# Only the request counts go up -- all of the other rows' duplicates are dropped
2024-07-16 22:52:01 +02:00
check_counts ( 13 , 13 , 3 , 2 , 8 )
2019-04-23 22:32:12 +02:00
2023-11-27 02:06:23 +01:00
# Test that only valid org_type values are accepted - integers defined in OrgTypeEnum.
2023-12-09 00:09:01 +01:00
realms_data = get_realms_info_for_push_bouncer ( )
2023-11-27 02:06:23 +01:00
# Not a valid org_type value:
2023-12-09 00:09:01 +01:00
realms_data [ 0 ] . org_type = 11
2023-11-27 02:06:23 +01:00
2023-12-09 00:09:01 +01:00
request = AnalyticsRequest . model_construct (
realm_counts = [ ] ,
installation_counts = [ ] ,
realmauditlog_rows = [ ] ,
realms = realms_data ,
version = None ,
2024-06-23 02:50:05 +02:00
merge_base = None ,
2023-12-08 21:38:01 +01:00
api_feature_level = None ,
2023-12-09 00:09:01 +01:00
)
2023-11-27 02:06:23 +01:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/server/analytics " ,
2024-06-23 02:50:05 +02:00
request . model_dump (
round_trip = True , exclude = { " version " , " merge_base " , " api_feature_level " }
) ,
2023-11-27 02:06:23 +01:00
subdomain = " " ,
)
self . assert_json_error (
result , ' Invalid realms[0][ " org_type " ]: Value error, Not a valid org_type value '
)
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( submit_usage_statistics = True )
2023-12-05 20:28:36 +01:00
@responses.activate
def test_analytics_api_foreign_keys_to_remote_realm ( self ) - > None :
self . add_mock_response ( )
user = self . example_user ( " hamlet " )
end_time = self . TIME_ZERO
# Create some rows we'll send to remote server
2024-02-26 21:14:52 +01:00
messages_read_logging_stat = LoggingCountStat (
" messages_read::hour " , UserCount , CountStat . HOUR
)
2023-12-05 20:28:36 +01:00
realm_count = RealmCount . objects . create (
2024-02-26 21:14:52 +01:00
realm = user . realm ,
property = messages_read_logging_stat . property ,
end_time = end_time ,
value = 5 ,
2023-12-05 20:28:36 +01:00
)
installation_count = InstallationCount . objects . create (
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
2023-12-05 20:28:36 +01:00
end_time = end_time ,
value = 5 ,
)
realm_audit_log = RealmAuditLog . objects . create (
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_CREATED ,
2023-12-05 20:28:36 +01:00
event_time = end_time ,
extra_data = orjson . dumps (
{
RealmAuditLog . ROLE_COUNT : realm_user_count_by_role ( user . realm ) ,
}
) . decode ( ) ,
)
realm_count_data , installation_count_data , realmauditlog_data = build_analytics_data (
RealmCount . objects . all ( ) , InstallationCount . objects . all ( ) , RealmAuditLog . objects . all ( )
)
2023-12-09 02:10:05 +01:00
# This first post should fail because of excessive audit log event types.
2023-12-09 00:09:01 +01:00
request = AnalyticsRequest . model_construct (
realm_counts = realm_count_data ,
installation_counts = installation_count_data ,
realmauditlog_rows = realmauditlog_data ,
realms = [ ] ,
version = None ,
2024-06-23 02:50:05 +02:00
merge_base = None ,
2023-12-08 21:38:01 +01:00
api_feature_level = None ,
2023-12-09 00:09:01 +01:00
)
2023-12-09 02:10:05 +01:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/server/analytics " ,
2024-06-23 02:50:05 +02:00
request . model_dump (
round_trip = True , exclude = { " version " , " merge_base " , " api_feature_level " }
) ,
2023-12-09 02:10:05 +01:00
subdomain = " " ,
)
self . assert_json_error ( result , " Invalid event type. " )
# Start again only using synced billing events.
realm_count_data , installation_count_data , realmauditlog_data = build_analytics_data (
RealmCount . objects . all ( ) ,
InstallationCount . objects . all ( ) ,
RealmAuditLog . objects . filter ( event_type__in = RemoteRealmAuditLog . SYNCED_BILLING_EVENTS ) ,
)
2023-12-05 20:28:36 +01:00
# Send the data to the bouncer without any realms data. This should lead
# to successful saving of the data, but with the remote_realm foreign key
# set to NULL.
2023-12-09 00:09:01 +01:00
request = AnalyticsRequest . model_construct (
realm_counts = realm_count_data ,
installation_counts = installation_count_data ,
realmauditlog_rows = realmauditlog_data ,
realms = [ ] ,
version = None ,
2024-06-23 02:50:05 +02:00
merge_base = None ,
2023-12-08 21:38:01 +01:00
api_feature_level = None ,
2023-12-09 00:09:01 +01:00
)
2023-12-05 20:28:36 +01:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/server/analytics " ,
2024-06-23 02:50:05 +02:00
request . model_dump (
round_trip = True , exclude = { " version " , " merge_base " , " api_feature_level " }
) ,
2023-12-05 20:28:36 +01:00
subdomain = " " ,
)
self . assert_json_success ( result )
remote_realm_count = RemoteRealmCount . objects . latest ( " id " )
remote_installation_count = RemoteInstallationCount . objects . latest ( " id " )
remote_realm_audit_log = RemoteRealmAuditLog . objects . latest ( " id " )
self . assertEqual ( remote_realm_count . remote_id , realm_count . id )
self . assertEqual ( remote_realm_count . remote_realm , None )
self . assertEqual ( remote_installation_count . remote_id , installation_count . id )
2024-06-03 21:10:43 +02:00
# InstallationCount/RemoteInstallationCount don't have realm/remote_realm foreign
2023-12-05 20:28:36 +01:00
# keys, because they're aggregated over all realms.
self . assertEqual ( remote_realm_audit_log . remote_id , realm_audit_log . id )
self . assertEqual ( remote_realm_audit_log . remote_realm , None )
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-05 20:28:36 +01:00
remote_realm_count . refresh_from_db ( )
remote_installation_count . refresh_from_db ( )
remote_realm_audit_log . refresh_from_db ( )
remote_realm = RemoteRealm . objects . get ( uuid = user . realm . uuid )
self . assertEqual ( remote_realm_count . remote_realm , remote_realm )
self . assertEqual ( remote_realm_audit_log . remote_realm , remote_realm )
current_remote_realm_count_amount = RemoteRealmCount . objects . count ( )
current_remote_realm_audit_log_amount = RemoteRealmAuditLog . objects . count ( )
# Now create and send new data (including realm info) and verify it has .remote_realm
# set as it should.
RealmCount . objects . create (
realm = user . realm ,
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
2023-12-05 20:28:36 +01:00
end_time = end_time + timedelta ( days = 1 ) ,
value = 6 ,
)
InstallationCount . objects . create (
2024-02-26 21:14:52 +01:00
property = messages_read_logging_stat . property ,
end_time = end_time + timedelta ( days = 1 ) ,
value = 6 ,
2023-12-05 20:28:36 +01:00
)
RealmAuditLog . objects . create (
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_CREATED ,
2023-12-05 20:28:36 +01:00
event_time = end_time ,
2023-12-12 17:15:57 +01:00
extra_data = {
RealmAuditLog . ROLE_COUNT : realm_user_count_by_role ( user . realm ) ,
} ,
2023-12-05 20:28:36 +01:00
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-12-05 20:28:36 +01:00
# Make sure new data was created, so that we're actually testing what we think.
self . assertEqual ( RemoteRealmCount . objects . count ( ) , current_remote_realm_count_amount + 1 )
self . assertEqual (
RemoteRealmAuditLog . objects . count ( ) , current_remote_realm_audit_log_amount + 1
)
for remote_realm_count in RemoteRealmCount . objects . filter ( realm_id = user . realm . id ) :
self . assertEqual ( remote_realm_count . remote_realm , remote_realm )
for remote_realm_audit_log in RemoteRealmAuditLog . objects . filter ( realm_id = user . realm . id ) :
self . assertEqual ( remote_realm_audit_log . remote_realm , remote_realm )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( submit_usage_statistics = True )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_analytics_api_invalid ( self ) - > None :
2019-01-31 00:39:02 +01:00
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-01-31 00:39:02 +01:00
end_time = self . TIME_ZERO
2021-02-12 08:20:45 +01:00
realm_stat = LoggingCountStat ( " invalid count stat " , RealmCount , CountStat . DAY )
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm , property = realm_stat . property , end_time = end_time , value = 5
)
2019-01-31 00:39:02 +01:00
self . assertEqual ( RealmCount . objects . count ( ) , 1 )
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
2023-11-17 19:43:25 +01:00
with self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as m :
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-11-17 19:43:25 +01:00
self . assertEqual ( m . output , [ " WARNING:zulip.analytics:Invalid property invalid count stat " ] )
2019-01-31 00:39:02 +01:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-10-30 23:50:53 +01:00
@responses.activate
def test_remote_realm_duplicate_uuid ( self ) - > None :
"""
Tests for a case where a RemoteRealm with a certain uuid is already registered for one server ,
and then another server tries to register the same uuid . This generally shouldn ' t happen,
because export - > import of a realm should re - generate the uuid , but we should have error
handling for this edge case nonetheless .
"""
2023-12-01 18:06:22 +01:00
# Start by deleting existing registration, to have a clean slate.
RemoteRealm . objects . all ( ) . delete ( )
2023-10-30 23:50:53 +01:00
second_server = RemoteZulipServer . objects . create (
uuid = uuid . uuid4 ( ) ,
api_key = " magic_secret_api_key2 " ,
hostname = " demo2.example.com " ,
last_updated = now ( ) ,
)
self . add_mock_response ( )
user = self . example_user ( " hamlet " )
realm = user . realm
RemoteRealm . objects . create (
server = second_server ,
uuid = realm . uuid ,
uuid_owner_secret = realm . uuid_owner_secret ,
host = realm . host ,
realm_date_created = realm . date_created ,
registration_deactivated = False ,
realm_deactivated = False ,
2023-12-14 00:17:55 +01:00
plan_type = RemoteRealm . PLAN_TYPE_SELF_MANAGED ,
2023-10-30 23:50:53 +01:00
)
2023-11-17 19:43:25 +01:00
with transaction . atomic ( ) , self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as m :
2023-10-30 23:50:53 +01:00
# The usual atomic() wrapper to avoid IntegrityError breaking the test's
# transaction.
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-11-17 19:43:25 +01:00
self . assertEqual ( m . output , [ " WARNING:zulip.analytics:Duplicate registration detected. " ] )
2023-10-30 23:50:53 +01:00
2019-10-03 02:01:36 +02:00
# Servers on Zulip 2.0.6 and earlier only send realm_counts and installation_counts data,
# and don't send realmauditlog_rows. Make sure that continues to work.
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_old_two_table_format ( self ) - > None :
self . add_mock_response ( )
2019-10-03 02:01:36 +02:00
# Send fixture generated with Zulip 2.0 code
2021-02-12 08:19:30 +01:00
send_to_push_bouncer (
2021-02-12 08:20:45 +01:00
" POST " ,
" server/analytics " ,
2021-02-12 08:19:30 +01:00
{
2024-05-31 22:06:19 +02:00
" realm_counts " : ' [ { " id " :1, " property " : " messages_sent:is_bot:hour " , " subgroup " : " false " , " end_time " :574300800.0, " value " :5, " realm " :2}] ' ,
2021-02-12 08:20:45 +01:00
" installation_counts " : " [] " ,
" version " : ' " 2.0.6+git " ' ,
2021-02-12 08:19:30 +01:00
} ,
)
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
ANALYTICS_URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/server/analytics "
2021-06-11 22:51:27 +02:00
self . assertTrue ( responses . assert_call_count ( ANALYTICS_URL , 1 ) )
2019-10-03 02:01:36 +02:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 1 )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , 0 )
self . assertEqual ( RemoteRealmAuditLog . objects . count ( ) , 0 )
# Make sure we aren't sending data we don't mean to, even if we don't store it.
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_only_sending_intended_realmauditlog_data ( self ) - > None :
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-10-03 02:01:36 +02:00
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_REACTIVATED ,
2021-02-12 08:19:30 +01:00
event_time = self . TIME_ZERO ,
2023-07-13 19:46:06 +02:00
extra_data = {
RealmAuditLog . ROLE_COUNT : realm_user_count_by_role ( user . realm ) ,
} ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
2024-09-03 15:58:19 +02:00
event_type = AuditLogEventType . REALM_LOGO_CHANGED ,
2021-02-12 08:19:30 +01:00
event_time = self . TIME_ZERO ,
2023-06-02 17:33:05 +02:00
extra_data = orjson . dumps ( { " foo " : " bar " } ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
2023-12-11 14:24:13 +01:00
# send_server_data_to_push_bouncer calls send_to_push_bouncer twice.
2020-06-23 07:00:30 +02:00
# We need to distinguish the first and second calls.
first_call = True
2019-10-03 02:01:36 +02:00
def check_for_unwanted_data ( * args : Any ) - > Any :
2020-06-23 07:00:30 +02:00
nonlocal first_call
if first_call :
first_call = False
2019-10-03 02:01:36 +02:00
else :
# Test that we're respecting SYNCED_BILLING_EVENTS
2024-08-30 18:15:41 +02:00
self . assertIn ( f ' " event_type " : { AuditLogEventType . USER_REACTIVATED } ' , str ( args ) )
2024-09-03 15:58:19 +02:00
self . assertNotIn ( f ' " event_type " : { AuditLogEventType . REALM_LOGO_CHANGED } ' , str ( args ) )
2019-10-03 02:01:36 +02:00
# Test that we're respecting REALMAUDITLOG_PUSHED_FIELDS
2021-02-12 08:20:45 +01:00
self . assertIn ( " backfilled " , str ( args ) )
self . assertNotIn ( " modified_user " , str ( args ) )
2019-10-03 02:01:36 +02:00
return send_to_push_bouncer ( * args )
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.remote_server.send_to_push_bouncer " , side_effect = check_for_unwanted_data
2021-02-12 08:19:30 +01:00
) :
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2019-10-03 02:01:36 +02:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_realmauditlog_data_mapping ( self ) - > None :
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2023-06-02 17:33:05 +02:00
user_count = realm_user_count_by_role ( user . realm )
2019-10-03 02:01:36 +02:00
log_entry = RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
backfilled = True ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_REACTIVATED ,
2021-02-12 08:19:30 +01:00
event_time = self . TIME_ZERO ,
2023-06-02 17:33:05 +02:00
extra_data = orjson . dumps ( { RealmAuditLog . ROLE_COUNT : user_count } ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
)
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2021-02-12 08:20:45 +01:00
remote_log_entry = RemoteRealmAuditLog . objects . order_by ( " id " ) . last ( )
2021-07-24 16:56:39 +02:00
assert remote_log_entry is not None
2021-12-22 14:37:12 +01:00
self . assertEqual ( str ( remote_log_entry . server . uuid ) , self . server_uuid )
2019-10-03 02:01:36 +02:00
self . assertEqual ( remote_log_entry . remote_id , log_entry . id )
self . assertEqual ( remote_log_entry . event_time , self . TIME_ZERO )
self . assertEqual ( remote_log_entry . backfilled , True )
2023-06-02 17:33:05 +02:00
assert remote_log_entry . extra_data is not None
2023-07-13 19:46:06 +02:00
self . assertEqual ( remote_log_entry . extra_data , { RealmAuditLog . ROLE_COUNT : user_count } )
2024-08-30 18:15:41 +02:00
self . assertEqual ( remote_log_entry . event_type , AuditLogEventType . USER_REACTIVATED )
2019-10-03 02:01:36 +02:00
2023-07-13 19:46:06 +02:00
# This verifies that the bouncer is backwards-compatible with remote servers using
2023-06-03 06:03:43 +02:00
# TextField to store extra_data.
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-06-03 06:03:43 +02:00
@responses.activate
def test_realmauditlog_string_extra_data ( self ) - > None :
self . add_mock_response ( )
def verify_request_with_overridden_extra_data (
2023-06-07 21:14:43 +02:00
request_extra_data : object ,
* ,
expected_extra_data : object = None ,
skip_audit_log_check : bool = False ,
2023-06-03 06:03:43 +02:00
) - > None :
user = self . example_user ( " hamlet " )
log_entry = RealmAuditLog . objects . create (
realm = user . realm ,
modified_user = user ,
2024-08-30 18:15:41 +02:00
event_type = AuditLogEventType . USER_REACTIVATED ,
2023-06-03 06:03:43 +02:00
event_time = self . TIME_ZERO ,
2023-12-12 17:15:57 +01:00
extra_data = orjson . dumps (
{
RealmAuditLog . ROLE_COUNT : {
RealmAuditLog . ROLE_COUNT_HUMANS : { } ,
}
}
) . decode ( ) ,
2023-06-03 06:03:43 +02:00
)
# We use this to patch send_to_push_bouncer so that extra_data in the
# legacy format gets sent to the bouncer.
def transform_realmauditlog_extra_data (
method : str ,
endpoint : str ,
2024-07-12 02:30:23 +02:00
post_data : bytes | Mapping [ str , str | int | None | bytes ] ,
2023-06-03 06:03:43 +02:00
extra_headers : Mapping [ str , str ] = { } ,
2024-07-12 02:30:17 +02:00
) - > dict [ str , Any ] :
2023-06-03 06:03:43 +02:00
if endpoint == " server/analytics " :
assert isinstance ( post_data , dict )
assert isinstance ( post_data [ " realmauditlog_rows " ] , str )
original_data = orjson . loads ( post_data [ " realmauditlog_rows " ] )
# We replace the extra_data with another fake example to verify that
# the bouncer actually gets requested with extra_data being string
new_data = [ { * * row , " extra_data " : request_extra_data } for row in original_data ]
post_data [ " realmauditlog_rows " ] = orjson . dumps ( new_data ) . decode ( )
return send_to_push_bouncer ( method , endpoint , post_data , extra_headers )
with mock . patch (
" zerver.lib.remote_server.send_to_push_bouncer " ,
side_effect = transform_realmauditlog_extra_data ,
) :
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( )
2023-06-03 06:03:43 +02:00
2023-06-07 21:14:43 +02:00
if skip_audit_log_check :
return
2023-06-03 06:03:43 +02:00
remote_log_entry = RemoteRealmAuditLog . objects . order_by ( " id " ) . last ( )
assert remote_log_entry is not None
self . assertEqual ( str ( remote_log_entry . server . uuid ) , self . server_uuid )
self . assertEqual ( remote_log_entry . remote_id , log_entry . id )
self . assertEqual ( remote_log_entry . event_time , self . TIME_ZERO )
self . assertEqual ( remote_log_entry . extra_data , expected_extra_data )
# Pre-migration extra_data
verify_request_with_overridden_extra_data (
2023-12-12 17:15:57 +01:00
request_extra_data = orjson . dumps (
{
RealmAuditLog . ROLE_COUNT : {
RealmAuditLog . ROLE_COUNT_HUMANS : { } ,
}
}
) . decode ( ) ,
expected_extra_data = {
RealmAuditLog . ROLE_COUNT : {
RealmAuditLog . ROLE_COUNT_HUMANS : { } ,
}
} ,
2023-06-03 06:03:43 +02:00
)
2023-07-13 19:46:06 +02:00
verify_request_with_overridden_extra_data ( request_extra_data = None , expected_extra_data = { } )
2023-06-03 06:03:43 +02:00
# Post-migration extra_data
verify_request_with_overridden_extra_data (
2023-12-12 17:15:57 +01:00
request_extra_data = {
RealmAuditLog . ROLE_COUNT : {
RealmAuditLog . ROLE_COUNT_HUMANS : { } ,
}
} ,
expected_extra_data = {
RealmAuditLog . ROLE_COUNT : {
RealmAuditLog . ROLE_COUNT_HUMANS : { } ,
}
} ,
2023-06-03 06:03:43 +02:00
)
verify_request_with_overridden_extra_data (
2023-06-07 21:14:43 +02:00
request_extra_data = { } ,
2023-07-13 19:46:06 +02:00
expected_extra_data = { } ,
2023-06-03 06:03:43 +02:00
)
2023-06-07 21:14:43 +02:00
# Invalid extra_data
2023-11-17 19:43:25 +01:00
with self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as m :
2023-06-07 21:14:43 +02:00
verify_request_with_overridden_extra_data (
request_extra_data = " { malformedjson: " ,
skip_audit_log_check = True ,
)
self . assertIn ( " Malformed audit log data " , m . output [ 0 ] )
2023-06-03 06:03:43 +02:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-11-16 15:25:58 +01:00
@responses.activate
2023-12-09 13:29:59 +01:00
def test_realm_properties_after_send_analytics ( self ) - > None :
2023-11-16 15:25:58 +01:00
self . add_mock_response ( )
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " , return_value = None
) as m ,
mock . patch (
2023-12-12 17:15:57 +01:00
" corporate.lib.stripe.RemoteServerBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
2024-07-14 20:30:42 +02:00
) ,
) :
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-12-12 17:15:57 +01:00
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" zilencer.views.RemoteRealmBillingSession.get_customer " , return_value = None
) as m ,
mock . patch (
2024-02-21 20:44:46 +01:00
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
2023-12-12 17:15:57 +01:00
return_value = 11 ,
2024-07-14 20:30:42 +02:00
) ,
) :
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , False )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-11-29 17:00:19 +01:00
dummy_customer = mock . MagicMock ( )
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_customer ,
) ,
mock . patch ( " corporate.lib.stripe.get_current_plan_by_customer " , return_value = None ) as m ,
2023-11-29 17:00:19 +01:00
) :
2024-07-14 20:30:42 +02:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-11-29 17:00:19 +01:00
2023-12-12 17:15:57 +01:00
dummy_customer = mock . MagicMock ( )
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" zilencer.views.RemoteRealmBillingSession.get_customer " , return_value = dummy_customer
) ,
mock . patch ( " corporate.lib.stripe.get_current_plan_by_customer " , return_value = None ) as m ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 11 ,
) ,
2023-12-12 17:15:57 +01:00
) :
2024-07-14 20:30:42 +02:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , False )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-12-12 17:15:57 +01:00
2023-12-17 06:59:16 +01:00
RemoteRealm . objects . filter ( server = self . server ) . update (
plan_type = RemoteRealm . PLAN_TYPE_COMMUNITY
)
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" zilencer.views.RemoteRealmBillingSession.get_customer " , return_value = dummy_customer
) ,
mock . patch ( " corporate.lib.stripe.get_current_plan_by_customer " , return_value = None ) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses "
) as m ,
2023-12-18 21:08:36 +01:00
) :
2024-07-14 20:30:42 +02:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_not_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2023-12-17 06:59:16 +01:00
# Reset the plan type to test remaining cases.
RemoteRealm . objects . filter ( server = self . server ) . update (
plan_type = RemoteRealm . PLAN_TYPE_SELF_MANAGED
)
2023-11-29 17:00:19 +01:00
dummy_customer_plan = mock . MagicMock ( )
dummy_customer_plan . status = CustomerPlan . DOWNGRADE_AT_END_OF_CYCLE
2023-11-19 19:45:19 +01:00
dummy_date = datetime ( year = 2023 , month = 12 , day = 3 , tzinfo = timezone . utc )
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_customer ,
) ,
mock . patch (
2023-12-11 22:21:48 +01:00
" corporate.lib.stripe.get_current_plan_by_customer " ,
return_value = dummy_customer_plan ,
2024-07-14 20:30:42 +02:00
) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 11 ,
) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_next_billing_cycle " ,
return_value = dummy_date ,
) as m ,
self . assertLogs ( " zulip.analytics " , level = " INFO " ) as info_log ,
2024-01-05 15:02:55 +01:00
) :
2024-07-14 20:30:42 +02:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual (
realm . push_notifications_enabled_end_timestamp ,
dummy_date ,
)
self . assertIn (
" INFO:zulip.analytics:Reported 0 records " ,
info_log . output [ 0 ] ,
)
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_customer ,
) ,
mock . patch (
2024-01-05 15:02:55 +01:00
" corporate.lib.stripe.get_current_plan_by_customer " ,
return_value = dummy_customer_plan ,
2024-07-14 20:30:42 +02:00
) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
side_effect = MissingDataError ,
) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_next_billing_cycle " ,
return_value = dummy_date ,
) as m ,
self . assertLogs ( " zulip.analytics " , level = " INFO " ) as info_log ,
2024-01-05 15:02:55 +01:00
) :
2024-07-14 20:30:42 +02:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual (
realm . push_notifications_enabled_end_timestamp ,
dummy_date ,
)
self . assertIn (
" INFO:zulip.analytics:Reported 0 records " ,
info_log . output [ 0 ] ,
)
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_customer ,
) ,
mock . patch (
2024-01-05 15:02:55 +01:00
" corporate.lib.stripe.get_current_plan_by_customer " ,
return_value = dummy_customer_plan ,
2024-07-14 20:30:42 +02:00
) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
) :
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual (
realm . push_notifications_enabled_end_timestamp ,
None ,
)
2023-12-09 13:29:59 +01:00
2023-12-11 09:32:44 +01:00
dummy_customer_plan = mock . MagicMock ( )
dummy_customer_plan . status = CustomerPlan . ACTIVE
2024-07-14 20:30:42 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_customer ,
) ,
mock . patch (
2023-12-11 09:32:44 +01:00
" corporate.lib.stripe.get_current_plan_by_customer " ,
return_value = dummy_customer_plan ,
2024-07-14 20:30:42 +02:00
) ,
self . assertLogs ( " zulip.analytics " , level = " INFO " ) as info_log ,
) :
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual (
realm . push_notifications_enabled_end_timestamp ,
None ,
)
self . assertIn (
" INFO:zulip.analytics:Reported 0 records " ,
info_log . output [ 0 ] ,
)
2023-12-11 09:32:44 +01:00
2024-02-16 18:18:53 +01:00
# Remote realm is on an inactive plan. Remote server on active plan.
# ACTIVE plan takes precedence.
dummy_remote_realm_customer = mock . MagicMock ( )
dummy_remote_server_customer = mock . MagicMock ( )
dummy_remote_server_customer_plan = mock . MagicMock ( )
dummy_remote_server_customer_plan . status = CustomerPlan . ACTIVE
2024-07-12 02:30:23 +02:00
def get_current_plan_by_customer ( customer : mock . MagicMock ) - > mock . MagicMock | None :
2024-02-16 18:18:53 +01:00
assert customer in [ dummy_remote_realm_customer , dummy_remote_server_customer ]
if customer == dummy_remote_server_customer :
return dummy_remote_server_customer_plan
return None
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.get_customer " ,
return_value = dummy_remote_realm_customer ,
) ,
mock . patch (
" corporate.lib.stripe.RemoteServerBillingSession.get_customer " ,
return_value = dummy_remote_server_customer ,
) ,
mock . patch ( " zilencer.views.RemoteServerBillingSession.sync_license_ledger_if_needed " ) ,
mock . patch (
" corporate.lib.stripe.get_current_plan_by_customer " ,
side_effect = get_current_plan_by_customer ,
) as m ,
) :
2024-02-16 18:18:53 +01:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual (
realm . push_notifications_enabled_end_timestamp ,
None ,
)
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.remote_server.send_to_push_bouncer " ) as m ,
self . assertLogs ( " zulip.analytics " , level = " WARNING " ) as exception_log ,
) :
2023-12-09 13:29:59 +01:00
get_response = {
" last_realm_count_id " : 0 ,
" last_installation_count_id " : 0 ,
" last_realmauditlog_id " : 0 ,
}
2024-07-12 02:30:17 +02:00
def mock_send_to_push_bouncer_response ( method : str , * args : Any ) - > dict [ str , int ] :
2023-12-09 13:29:59 +01:00
if method == " POST " :
2023-12-14 17:33:35 +01:00
raise PushNotificationBouncerRetryLaterError ( " Some problem " )
2023-12-09 13:29:59 +01:00
return get_response
m . side_effect = mock_send_to_push_bouncer_response
2023-11-29 17:00:19 +01:00
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
2023-12-09 13:29:59 +01:00
realms = Realm . objects . all ( )
for realm in realms :
self . assertFalse ( realm . push_notifications_enabled )
2023-12-14 17:33:35 +01:00
self . assertEqual (
exception_log . output ,
[ " WARNING:zulip.analytics:Some problem " ] ,
)
2023-12-09 13:29:59 +01:00
2023-12-11 14:24:13 +01:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
2023-11-16 15:25:58 +01:00
self . assertEqual (
list (
RemoteRealm . objects . order_by ( " id " ) . values (
" server_id " ,
" uuid " ,
" uuid_owner_secret " ,
" host " ,
" realm_date_created " ,
" registration_deactivated " ,
" realm_deactivated " ,
" plan_type " ,
)
) ,
[
{
" server_id " : self . server . id ,
" uuid " : realm . uuid ,
" uuid_owner_secret " : realm . uuid_owner_secret ,
" host " : realm . host ,
" realm_date_created " : realm . date_created ,
" registration_deactivated " : False ,
" realm_deactivated " : False ,
2023-12-14 00:17:55 +01:00
" plan_type " : RemoteRealm . PLAN_TYPE_SELF_MANAGED ,
2023-11-16 15:25:58 +01:00
}
for realm in Realm . objects . order_by ( " id " )
] ,
)
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2023-12-13 21:40:54 +01:00
@responses.activate
2023-12-15 16:01:04 +01:00
def test_deleted_realm ( self ) - > None :
2023-12-13 21:40:54 +01:00
self . add_mock_response ( )
2023-12-15 16:01:04 +01:00
logger = logging . getLogger ( " zulip.analytics " )
2023-12-13 21:40:54 +01:00
realm_info = get_realms_info_for_push_bouncer ( )
# Hard-delete a realm to test the non existent realm uuid case.
2024-02-24 00:58:58 +01:00
zephyr_realm = get_realm ( " zephyr " )
assert zephyr_realm is not None
deleted_realm_uuid = zephyr_realm . uuid
zephyr_realm . delete ( )
2023-12-13 21:40:54 +01:00
2023-12-15 16:01:04 +01:00
# This mock causes us to still send data to the bouncer as if the realm existed,
# causing the bouncer to include its corresponding info in the response. Through
# that, we're testing our graceful handling of seeing a non-existent realm uuid
# in that response.
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.remote_server.get_realms_info_for_push_bouncer " , return_value = realm_info
) as m ,
self . assertLogs ( logger , level = " WARNING " ) as analytics_logger ,
) :
2023-12-13 21:40:54 +01:00
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
m . assert_called ( )
realms = Realm . objects . all ( )
for realm in realms :
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
self . assertEqual (
analytics_logger . output ,
[
" WARNING:zulip.analytics: "
f " Received unexpected realm UUID from bouncer { deleted_realm_uuid } "
] ,
)
2023-12-15 16:01:04 +01:00
# Now we want to test the other side of this - bouncer's handling
# of a deleted realm.
2024-07-12 02:30:32 +02:00
with (
self . assertLogs ( logger , level = " WARNING " ) as analytics_logger ,
mock . patch ( " zilencer.views.RemoteRealmBillingSession.on_paid_plan " , return_value = True ) ,
2024-02-29 08:30:13 +01:00
) :
2023-12-15 16:01:04 +01:00
# This time the logger shouldn't get triggered - because the bouncer doesn't
# include .realm_locally_deleted realms in its response.
# Note: This is hacky, because until Python 3.10 we don't have access to
# assertNoLogs - and regular assertLogs demands that the logger gets triggered.
# So we do a dummy warning ourselves here, to satisfy it.
# TODO: Replace this with assertNoLogs once we fully upgrade to Python 3.10.
logger . warning ( " Dummy warning " )
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
remote_realm_for_deleted_realm = RemoteRealm . objects . get ( uuid = deleted_realm_uuid )
2024-01-05 02:43:54 +01:00
self . assertEqual ( remote_realm_for_deleted_realm . registration_deactivated , False )
2023-12-15 16:01:04 +01:00
self . assertEqual ( remote_realm_for_deleted_realm . realm_locally_deleted , True )
self . assertEqual ( analytics_logger . output , [ " WARNING:zulip.analytics:Dummy warning " ] )
audit_log = RemoteRealmAuditLog . objects . latest ( " id " )
self . assertEqual ( audit_log . event_type , RemoteRealmAuditLog . REMOTE_REALM_LOCALLY_DELETED )
self . assertEqual ( audit_log . remote_realm , remote_realm_for_deleted_realm )
2024-02-29 08:30:13 +01:00
from django . core . mail import outbox
email = outbox [ - 1 ]
self . assert_length ( email . to , 1 )
self . assertEqual ( email . to [ 0 ] , " sales@zulip.com " )
billing_session = RemoteRealmBillingSession ( remote_realm = remote_realm_for_deleted_realm )
self . assertIn (
f " Support URL: { billing_session . support_url ( ) } " ,
email . body ,
)
2024-03-20 15:46:12 +01:00
self . assertIn (
f " Internal billing notice for { billing_session . billing_entity_display_name } . " ,
email . body ,
)
self . assertIn (
" Investigate why remote realm is marked as locally deleted when it ' s on a paid plan. " ,
email . body ,
)
2024-02-29 08:30:13 +01:00
self . assertEqual (
f " { billing_session . billing_entity_display_name } on paid plan marked as locally deleted " ,
email . subject ,
)
2024-02-24 00:58:58 +01:00
# Restore the deleted realm to verify that the bouncer correctly handles that
2024-05-31 23:06:32 +02:00
# by toggling off .realm_locally_deleted.
2024-02-24 00:58:58 +01:00
restored_zephyr_realm = do_create_realm ( " zephyr " , " Zephyr " )
restored_zephyr_realm . uuid = deleted_realm_uuid
restored_zephyr_realm . save ( )
send_server_data_to_push_bouncer ( consider_usage_statistics = False )
remote_realm_for_deleted_realm . refresh_from_db ( )
self . assertEqual ( remote_realm_for_deleted_realm . realm_locally_deleted , False )
audit_log = RemoteRealmAuditLog . objects . latest ( " id " )
self . assertEqual (
audit_log . event_type , RemoteRealmAuditLog . REMOTE_REALM_LOCALLY_DELETED_RESTORED
)
self . assertEqual ( audit_log . remote_realm , remote_realm_for_deleted_realm )
2021-02-12 08:19:30 +01:00
2017-05-11 10:55:05 +02:00
class PushNotificationTest ( BouncerTestCase ) :
2023-10-12 19:43:45 +02:00
@override
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2021-02-12 08:20:45 +01:00
self . user_profile = self . example_user ( " hamlet " )
self . sending_client = get_client ( " test " )
self . sender = self . example_user ( " hamlet " )
2022-09-27 20:33:33 +02:00
self . personal_recipient_user = self . example_user ( " othello " )
2017-05-11 09:12:21 +02:00
2022-09-27 21:42:31 +02:00
def get_message ( self , type : int , type_id : int , realm_id : int ) - > Message :
2017-05-11 10:15:00 +02:00
recipient , _ = Recipient . objects . get_or_create (
type_id = type_id ,
2017-05-11 09:12:21 +02:00
type = type ,
)
2018-11-10 16:48:13 +01:00
message = Message (
2017-05-11 09:12:21 +02:00
sender = self . sender ,
recipient = recipient ,
2022-09-27 21:42:31 +02:00
realm_id = realm_id ,
2021-02-12 08:20:45 +01:00
content = " This is test content " ,
rendered_content = " This is test content " ,
2019-08-28 02:43:19 +02:00
date_sent = now ( ) ,
2017-05-11 09:12:21 +02:00
sending_client = self . sending_client ,
)
2021-05-10 07:02:14 +02:00
message . set_topic_name ( " Test topic " )
2018-11-10 16:48:13 +01:00
message . save ( )
return message
2017-05-11 09:12:21 +02:00
2018-02-09 23:19:00 +01:00
@contextmanager
2024-07-12 02:30:17 +02:00
def mock_apns ( self ) - > Iterator [ tuple [ APNsContext , mock . AsyncMock ] ] :
2023-08-29 00:43:51 +02:00
apns = mock . Mock ( spec = aioapns . APNs )
apns . send_notification = mock . AsyncMock ( )
2023-08-29 00:43:51 +02:00
apns_context = APNsContext (
apns = apns ,
loop = asyncio . new_event_loop ( ) ,
)
2022-01-14 04:47:42 +01:00
try :
with mock . patch ( " zerver.lib.push_notifications.get_apns_context " ) as mock_get :
mock_get . return_value = apns_context
2023-08-29 00:43:51 +02:00
yield apns_context , apns . send_notification
2022-01-14 04:47:42 +01:00
finally :
apns_context . loop . close ( )
2018-02-09 23:19:00 +01:00
2019-02-08 23:42:24 +01:00
def setup_apns_tokens ( self ) - > None :
2023-11-06 20:25:28 +01:00
self . tokens = [ ( " aaaa " , " org.zulip.Zulip " ) , ( " bbbb " , " com.zulip.flutter " ) ]
for token , appid in self . tokens :
2019-02-08 23:42:24 +01:00
PushDeviceToken . objects . create (
kind = PushDeviceToken . APNS ,
token = hex_to_b64 ( token ) ,
user = self . user_profile ,
2023-11-06 20:25:28 +01:00
ios_app_id = appid ,
2021-02-12 08:19:30 +01:00
)
2019-02-08 23:42:24 +01:00
2023-11-06 20:25:28 +01:00
self . remote_tokens = [
( " cccc " , " dddd " , " org.zulip.Zulip " ) ,
( " eeee " , " ffff " , " com.zulip.flutter " ) ,
]
for id_token , uuid_token , appid in self . remote_tokens :
2022-02-23 20:27:39 +01:00
# We want to set up both types of RemotePushDeviceToken here:
# the legacy one with user_id and the new with user_uuid.
# This allows tests to work with either, without needing to
# do their own setup.
2019-02-08 23:42:24 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
2022-02-23 20:27:39 +01:00
token = hex_to_b64 ( id_token ) ,
2023-11-06 20:25:28 +01:00
ios_app_id = appid ,
2019-02-08 23:42:24 +01:00
user_id = self . user_profile . id ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2019-02-08 23:42:24 +01:00
)
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
token = hex_to_b64 ( uuid_token ) ,
2023-11-06 20:25:28 +01:00
ios_app_id = appid ,
2022-02-23 20:27:39 +01:00
user_uuid = self . user_profile . uuid ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2022-02-23 20:27:39 +01:00
)
2019-02-08 23:42:24 +01:00
2024-06-13 14:38:58 +02:00
@contextmanager
2024-07-12 02:30:17 +02:00
def mock_fcm ( self ) - > Iterator [ tuple [ mock . MagicMock , mock . MagicMock ] ] :
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.push_notifications.fcm_app " ) as mock_fcm_app ,
mock . patch ( " zerver.lib.push_notifications.firebase_messaging " ) as mock_fcm_messaging ,
) :
2024-06-13 14:38:58 +02:00
yield mock_fcm_app , mock_fcm_messaging
2024-06-14 19:24:10 +02:00
def setup_fcm_tokens ( self ) - > None :
self . fcm_tokens = [ " 1111 " , " 2222 " ]
for token in self . fcm_tokens :
2019-02-08 23:42:24 +01:00
PushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = PushDeviceToken . FCM ,
2019-02-08 23:42:24 +01:00
token = hex_to_b64 ( token ) ,
user = self . user_profile ,
2021-02-12 08:19:30 +01:00
ios_app_id = None ,
)
2019-02-08 23:42:24 +01:00
2024-06-14 19:24:10 +02:00
self . remote_fcm_tokens = [ ( " dddd " , " eeee " ) ]
for id_token , uuid_token in self . remote_fcm_tokens :
2019-02-08 23:42:24 +01:00
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2022-02-23 20:27:39 +01:00
token = hex_to_b64 ( id_token ) ,
2019-02-08 23:42:24 +01:00
user_id = self . user_profile . id ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2019-02-08 23:42:24 +01:00
)
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = RemotePushDeviceToken . FCM ,
2022-02-23 20:27:39 +01:00
token = hex_to_b64 ( uuid_token ) ,
user_uuid = self . user_profile . uuid ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2022-02-23 20:27:39 +01:00
)
2019-02-08 23:42:24 +01:00
2024-07-12 02:30:17 +02:00
def make_fcm_success_response ( self , tokens : list [ str ] ) - > firebase_messaging . BatchResponse :
2024-06-13 14:38:58 +02:00
responses = [
firebase_messaging . SendResponse ( exception = None , resp = dict ( name = str ( idx ) ) )
for idx , _ in enumerate ( tokens )
]
return firebase_messaging . BatchResponse ( responses )
def make_fcm_error_response (
self , token : str , exception : firebase_exceptions . FirebaseError
) - > firebase_messaging . BatchResponse :
error_response = firebase_messaging . SendResponse ( exception = exception , resp = None )
return firebase_messaging . BatchResponse ( [ error_response ] )
2021-02-12 08:19:30 +01:00
2017-05-11 10:55:05 +02:00
class HandlePushNotificationTest ( PushNotificationTest ) :
2017-08-29 06:28:30 +02:00
DEFAULT_SUBDOMAIN = " "
2023-08-19 15:49:00 +02:00
def soft_deactivate_main_user ( self ) - > None :
self . user_profile = self . example_user ( " hamlet " )
self . soft_deactivate_user ( self . user_profile )
2023-10-12 19:43:45 +02:00
@override
2024-07-12 02:30:17 +02:00
def request_callback ( self , request : PreparedRequest ) - > tuple [ int , ResponseHeaders , bytes ] :
2021-06-11 22:51:27 +02:00
assert request . url is not None # allow mypy to infer url is present.
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
local_url = request . url . replace ( settings . ZULIP_SERVICES_URL , " " )
2021-12-17 08:14:22 +01:00
assert isinstance ( request . body , bytes )
2021-06-11 22:51:27 +02:00
result = self . uuid_post (
self . server_uuid , local_url , request . body , content_type = " application/json "
)
return ( result . status_code , result . headers , result . content )
2017-05-11 10:55:05 +02:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
2017-11-05 10:51:25 +01:00
def test_end_to_end ( self ) - > None :
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2017-05-17 07:16:20 +02:00
2023-09-29 20:04:09 +02:00
time_sent = now ( ) . replace ( microsecond = 0 )
2023-09-18 17:07:34 +02:00
with time_machine . travel ( time_sent , tick = False ) :
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
UserMessage . objects . create (
user_profile = self . user_profile ,
message = message ,
)
2017-05-11 10:55:05 +02:00
2023-11-19 19:45:19 +01:00
time_received = time_sent + timedelta ( seconds = 1 , milliseconds = 234 )
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2017-09-10 00:47:36 +02:00
}
2024-07-12 02:30:32 +02:00
with (
time_machine . travel ( time_received , tick = False ) ,
self . mock_fcm ( ) as (
mock_fcm_app ,
mock_fcm_messaging ,
) ,
self . mock_apns ( ) as ( apns_context , send_notification ) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as pn_logger ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) as views_logger ,
) :
2017-05-17 07:16:20 +02:00
apns_devices = [
2019-02-08 23:09:20 +01:00
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
2017-05-11 10:55:05 +02:00
]
2017-05-17 07:16:20 +02:00
gcm_devices = [
2019-02-08 23:09:20 +01:00
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2024-06-13 20:53:09 +02:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM )
2017-05-17 07:16:20 +02:00
]
2024-06-13 14:38:58 +02:00
mock_fcm_messaging . send_each . return_value = self . make_fcm_success_response (
[ device [ 2 ] for device in gcm_devices ]
)
2023-08-29 00:43:51 +02:00
send_notification . return_value . is_successful = True
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2023-11-06 20:25:28 +01:00
self . assertEqual (
{
( args [ 0 ] [ 0 ] . device_token , args [ 0 ] [ 0 ] . apns_topic )
for args in send_notification . call_args_list
} ,
{
( device . token , device . ios_app_id )
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
} ,
)
2021-10-20 01:16:18 +02:00
self . assertEqual (
views_logger . output ,
[
2023-09-18 17:07:34 +02:00
" INFO:zilencer.views: "
f " Remote queuing latency for 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { self . user_profile . id } ><uuid: { self . user_profile . uuid } > "
2023-09-29 20:04:09 +02:00
" is 1 seconds " ,
2021-10-20 01:16:18 +02:00
" INFO:zilencer.views: "
2023-05-27 02:30:33 +02:00
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { self . user_profile . id } ><uuid: { self . user_profile . uuid } >: "
2023-09-18 17:07:34 +02:00
f " { len ( gcm_devices ) } via FCM devices, { len ( apns_devices ) } via APNs devices " ,
2021-10-20 01:16:18 +02:00
] ,
)
2017-08-19 01:38:11 +02:00
for _ , _ , token in apns_devices :
2021-07-25 14:26:24 +02:00
self . assertIn (
" INFO:zerver.lib.push_notifications: "
2023-05-27 02:30:33 +02:00
f " APNs: Success sending for user <id: { self . user_profile . id } ><uuid: { self . user_profile . uuid } > to device { token } " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2021-02-12 08:19:30 +01:00
)
2024-06-13 14:38:58 +02:00
for idx , ( _ , _ , token ) in enumerate ( gcm_devices ) :
2021-07-25 14:26:24 +02:00
self . assertIn (
2024-06-13 14:38:58 +02:00
f " INFO:zerver.lib.push_notifications:FCM: Sent message with ID: { idx } to { token } " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2020-05-02 08:44:14 +02:00
)
2017-05-11 10:55:05 +02:00
2023-11-07 15:12:37 +01:00
remote_realm_count = RealmCount . objects . values ( " property " , " subgroup " , " value " ) . last ( )
self . assertEqual (
remote_realm_count ,
dict (
property = " mobile_pushes_sent::day " ,
subgroup = None ,
value = len ( gcm_devices ) + len ( apns_devices ) ,
) ,
)
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2024-02-07 00:38:35 +01:00
@responses.activate
def test_end_to_end_failure_due_to_no_plan ( self ) - > None :
self . add_mock_response ( )
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2024-02-07 00:38:35 +01:00
self . server . last_api_feature_level = 237
self . server . save ( )
realm = self . user_profile . realm
realm . push_notifications_enabled = True
realm . save ( )
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
UserMessage . objects . create (
user_profile = self . user_profile ,
message = message ,
)
missed_message = {
" message_id " : message . id ,
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
}
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 100 ,
) as mock_current_count ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as pn_logger ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) ,
) :
2024-02-07 00:38:35 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
self . assertEqual (
pn_logger . output ,
[
f " INFO:zerver.lib.push_notifications:Sending push notifications to mobile clients for user { self . user_profile . id } " ,
2024-02-16 19:20:02 +01:00
" WARNING:zerver.lib.push_notifications:Bouncer refused to send push notification: Your plan doesn ' t allow sending push notifications. Reason provided by the server: Push notifications access with 10+ users requires signing up for a plan. https://zulip.com/plans/ " ,
2024-02-07 00:38:35 +01:00
] ,
)
realm . refresh_from_db ( )
self . assertEqual ( realm . push_notifications_enabled , False )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
# Now verify the flag will correctly get flipped back if the server stops
# rejecting our notification.
# This will put us within the allowed number of users to use push notifications
# for free, so the server will accept our next request.
mock_current_count . return_value = 5
new_message_id = self . send_personal_message (
self . example_user ( " othello " ) , self . user_profile
)
new_missed_message = {
" message_id " : new_message_id ,
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
}
handle_push_notification ( self . user_profile . id , new_missed_message )
self . assertIn (
f " Sent mobile push notifications for user { self . user_profile . id } " ,
pn_logger . output [ - 1 ] ,
)
realm . refresh_from_db ( )
self . assertEqual ( realm . push_notifications_enabled , True )
self . assertEqual ( realm . push_notifications_enabled_end_timestamp , None )
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
2020-07-14 08:12:46 +02:00
def test_unregistered_client ( self ) - > None :
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2020-07-14 08:12:46 +02:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2020-07-14 08:12:46 +02:00
2023-09-29 20:04:09 +02:00
time_sent = now ( ) . replace ( microsecond = 0 )
2023-09-18 17:07:34 +02:00
with time_machine . travel ( time_sent , tick = False ) :
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
UserMessage . objects . create (
user_profile = self . user_profile ,
message = message ,
)
2020-07-14 08:12:46 +02:00
2023-11-19 19:45:19 +01:00
time_received = time_sent + timedelta ( seconds = 1 , milliseconds = 234 )
2020-07-14 08:12:46 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2020-07-14 08:12:46 +02:00
}
2024-07-12 02:30:32 +02:00
with (
time_machine . travel ( time_received , tick = False ) ,
self . mock_fcm ( ) as (
mock_fcm_app ,
mock_fcm_messaging ,
) ,
self . mock_apns ( ) as ( apns_context , send_notification ) ,
mock . patch (
" corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses " ,
return_value = 10 ,
) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as pn_logger ,
self . assertLogs ( " zilencer.views " , level = " INFO " ) as views_logger ,
) :
2020-07-14 08:12:46 +02:00
apns_devices = [
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
2020-07-14 08:12:46 +02:00
]
gcm_devices = [
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2024-06-13 20:53:09 +02:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM )
2020-07-14 08:12:46 +02:00
]
2023-12-12 00:06:37 +01:00
# Reset the local registrations for the user to make them compatible
# with the RemotePushDeviceToken entries.
PushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . delete ( )
[
PushDeviceToken . objects . create (
kind = PushDeviceToken . APNS ,
token = device . token ,
user = self . user_profile ,
ios_app_id = device . ios_app_id ,
)
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
]
2024-06-13 20:53:09 +02:00
PushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM ) . delete ( )
2023-12-12 00:06:37 +01:00
[
PushDeviceToken . objects . create (
2024-06-13 20:53:09 +02:00
kind = PushDeviceToken . FCM ,
2023-12-12 00:06:37 +01:00
token = device . token ,
user = self . user_profile ,
ios_app_id = device . ios_app_id ,
)
2024-06-13 20:53:09 +02:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM )
2023-12-12 00:06:37 +01:00
]
2024-06-13 14:38:58 +02:00
mock_fcm_messaging . send_each . return_value = self . make_fcm_success_response (
[ gcm_devices [ 0 ] [ 2 ] ]
)
2023-08-29 00:43:51 +02:00
send_notification . return_value . is_successful = False
send_notification . return_value . description = " Unregistered "
2023-12-12 00:06:37 +01:00
# Ensure the setup is as expected:
self . assertNotEqual (
PushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . count ( ) , 0
)
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2021-10-20 01:16:18 +02:00
self . assertEqual (
views_logger . output ,
[
2023-09-18 17:07:34 +02:00
" INFO:zilencer.views: "
f " Remote queuing latency for 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { self . user_profile . id } ><uuid: { self . user_profile . uuid } > "
2023-09-29 20:04:09 +02:00
" is 1 seconds " ,
2021-10-20 01:16:18 +02:00
" INFO:zilencer.views: "
2023-05-27 02:30:33 +02:00
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { self . user_profile . id } ><uuid: { self . user_profile . uuid } >: "
2023-09-18 17:07:34 +02:00
f " { len ( gcm_devices ) } via FCM devices, { len ( apns_devices ) } via APNs devices " ,
2021-10-20 01:16:18 +02:00
] ,
)
2018-05-21 20:20:23 +02:00
for _ , _ , token in apns_devices :
2021-07-25 14:26:24 +02:00
self . assertIn (
" INFO:zerver.lib.push_notifications: "
f " APNs: Removing invalid/expired token { token } (Unregistered) " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2020-05-02 08:44:14 +02:00
)
2023-12-12 00:06:37 +01:00
self . assertIn (
" INFO:zerver.lib.push_notifications:Deleting push tokens based on response from bouncer: "
2024-03-01 03:06:34 +01:00
f " Android: [], Apple: { sorted ( [ token for _ , _ , token in apns_devices ] ) } " ,
2023-12-12 00:06:37 +01:00
pn_logger . output ,
)
2021-02-12 08:19:30 +01:00
self . assertEqual (
RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . count ( ) , 0
)
2023-12-12 00:06:37 +01:00
# Local registrations have also been deleted:
self . assertEqual ( PushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . count ( ) , 0 )
2018-05-21 20:20:23 +02:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2021-06-11 22:51:27 +02:00
@responses.activate
2019-12-02 19:46:11 +01:00
def test_connection_error ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2017-08-18 09:04:52 +02:00
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-08-18 09:04:52 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-08-18 09:04:52 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" user_profile_id " : self . user_profile . id ,
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2017-09-10 00:47:36 +02:00
}
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/push/notify "
2021-06-11 22:51:27 +02:00
responses . add ( responses . POST , URL , body = ConnectionError ( ) )
2024-06-16 00:53:14 +02:00
with self . assertRaises ( PushNotificationBouncerRetryLaterError ) :
handle_push_notification ( self . user_profile . id , missed_message )
2017-08-18 09:04:52 +02:00
2023-11-23 22:10:26 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_configured " , return_value = True )
2024-07-16 22:52:01 +02:00
@override_settings ( ZULIP_SERVICE_PUSH_NOTIFICATIONS = False , ZULIP_SERVICES = set ( ) )
2018-12-11 07:05:40 +01:00
def test_read_message ( self , mock_push_notifications : mock . MagicMock ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2021-06-10 14:38:54 +02:00
usermessage = UserMessage . objects . create (
2017-05-11 10:55:05 +02:00
user_profile = user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2017-09-10 00:47:36 +02:00
}
2021-06-10 14:38:54 +02:00
# If the message is unread, we should send push notifications.
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification " , return_value = 1
) as mock_send_apple ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification " , return_value = 1
) as mock_send_android ,
) :
2021-06-10 14:38:54 +02:00
handle_push_notification ( user_profile . id , missed_message )
mock_send_apple . assert_called_once ( )
mock_send_android . assert_called_once ( )
# If the message has been read, don't send push notifications.
usermessage . flags . read = True
usermessage . save ( )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification " , return_value = 1
) as mock_send_apple ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification " , return_value = 1
) as mock_send_android ,
) :
2021-06-10 14:38:54 +02:00
handle_push_notification ( user_profile . id , missed_message )
mock_send_apple . assert_not_called ( )
mock_send_android . assert_not_called ( )
2017-05-11 10:55:05 +02:00
2018-12-05 19:36:58 +01:00
def test_deleted_message ( self ) - > None :
2022-02-08 00:13:33 +01:00
""" Simulates the race where message is deleted before handling push notifications """
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-12-05 19:36:58 +01:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . read ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2018-12-05 19:36:58 +01:00
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2018-12-05 19:36:58 +01:00
}
# Now, delete the message the normal way
2024-07-17 05:11:28 +02:00
do_delete_messages ( user_profile . realm , [ message ] , acting_user = None )
2018-12-05 19:36:58 +01:00
2020-10-29 20:21:18 +01:00
# This mock.patch() should be assertNoLogs once that feature
# is added to Python.
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.push_notifications.uses_notification_bouncer " ) as mock_check ,
mock . patch ( " logging.error " ) as mock_logging_error ,
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2018-12-05 19:44:25 +01:00
# Check we didn't proceed through and didn't log anything.
2018-12-05 19:36:58 +01:00
mock_check . assert_not_called ( )
2018-12-05 19:44:25 +01:00
mock_logging_error . assert_not_called ( )
2018-12-05 19:36:58 +01:00
def test_missing_message ( self ) - > None :
""" Simulates the race where message is missing when handling push notifications """
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-12-05 19:36:58 +01:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . read ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2018-12-05 19:36:58 +01:00
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2018-12-05 19:36:58 +01:00
}
# Now delete the message forcefully, so it just doesn't exist.
message . delete ( )
# This should log an error
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.push_notifications.uses_notification_bouncer " ) as mock_check ,
self . assertLogs ( level = " INFO " ) as mock_logging_info ,
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2018-12-05 19:36:58 +01:00
# Check we didn't proceed through.
mock_check . assert_not_called ( )
2021-02-12 08:19:30 +01:00
self . assertEqual (
mock_logging_info . output ,
[
f " INFO:root:Unexpected message access failure handling push notifications: { user_profile . id } { missed_message [ ' message_id ' ] } "
] ,
)
2018-12-05 19:36:58 +01:00
2017-11-05 10:51:25 +01:00
def test_send_notifications_to_bouncer ( self ) - > None :
2023-12-12 00:06:37 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2023-12-12 00:06:37 +01:00
user_profile = self . user_profile
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-05-11 10:55:05 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2017-09-10 00:47:36 +02:00
}
2024-07-12 02:30:32 +02:00
with (
2024-07-16 22:52:01 +02:00
activate_push_notification_service ( ) ,
2024-07-12 02:30:32 +02:00
mock . patch (
" zerver.lib.push_notifications.get_message_payload_apns " ,
return_value = { " apns " : True } ,
2023-12-14 21:37:13 +01:00
) ,
2024-07-12 02:30:32 +02:00
mock . patch (
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
) ,
mock . patch (
" zerver.lib.push_notifications.send_json_to_push_bouncer " ,
return_value = dict (
total_android_devices = 3 ,
total_apple_devices = 5 ,
deleted_devices = DevicesToCleanUpDict ( android_devices = [ ] , apple_devices = [ ] ) ,
realm = None ,
) ,
) as mock_send ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as mock_logging_info ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2021-02-12 08:19:30 +01:00
mock_send . assert_called_with (
2023-12-14 21:37:13 +01:00
" POST " ,
" push/notify " ,
{
" user_uuid " : str ( user_profile . uuid ) ,
" user_id " : user_profile . id ,
" realm_uuid " : str ( user_profile . realm . uuid ) ,
" apns_payload " : { " apns " : True } ,
" gcm_payload " : { " gcm " : True } ,
" gcm_options " : { } ,
" android_devices " : list (
2024-06-13 20:53:09 +02:00
PushDeviceToken . objects . filter ( user = user_profile , kind = PushDeviceToken . FCM )
2023-12-14 21:37:13 +01:00
. order_by ( " id " )
. values_list ( " token " , flat = True )
) ,
" apple_devices " : list (
PushDeviceToken . objects . filter ( user = user_profile , kind = PushDeviceToken . APNS )
. order_by ( " id " )
. values_list ( " token " , flat = True )
) ,
} ,
2021-02-12 08:19:30 +01:00
)
2023-12-14 21:37:13 +01:00
2021-09-28 14:17:16 +02:00
self . assertEqual (
mock_logging_info . output ,
[
f " INFO:zerver.lib.push_notifications:Sending push notifications to mobile clients for user { user_profile . id } " ,
f " INFO:zerver.lib.push_notifications:Sent mobile push notifications for user { user_profile . id } through bouncer: 3 via FCM devices, 5 via APNs devices " ,
] ,
)
2017-05-11 10:55:05 +02:00
2017-11-05 10:51:25 +01:00
def test_non_bouncer_push ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-05-11 10:55:05 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
android_devices = list (
2024-06-13 20:53:09 +02:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . FCM )
2021-02-12 08:19:30 +01:00
)
2017-05-11 10:55:05 +02:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2017-05-11 10:55:05 +02:00
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2017-09-10 00:47:36 +02:00
}
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.get_message_payload_apns " ,
return_value = { " apns " : True } ,
) ,
mock . patch (
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
) ,
mock . patch (
# Simulate the send...push_notification functions returning a number of successes
# lesser than the number of devices, so that we can verify correct CountStat counting.
" zerver.lib.push_notifications.send_apple_push_notification " ,
return_value = len ( apple_devices ) - 1 ,
) as mock_send_apple ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification " ,
return_value = len ( android_devices ) - 1 ,
) as mock_send_android ,
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2022-02-23 20:25:30 +01:00
mock_send_apple . assert_called_with ( user_identity , apple_devices , { " apns " : True } )
mock_send_android . assert_called_with ( user_identity , android_devices , { " gcm " : True } , { } )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-05-11 10:55:05 +02:00
2023-11-07 15:12:37 +01:00
remote_realm_count = RealmCount . objects . values ( " property " , " subgroup " , " value " ) . last ( )
self . assertEqual (
remote_realm_count ,
dict (
property = " mobile_pushes_sent::day " ,
subgroup = None ,
value = len ( android_devices ) + len ( apple_devices ) - 2 ,
) ,
)
2018-07-28 14:31:45 +02:00
def test_send_remove_notifications_to_bouncer ( self ) - > None :
2023-12-12 00:06:37 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2023-12-12 00:06:37 +01:00
user_profile = self . user_profile
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-07-28 14:31:45 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
2019-02-14 01:23:55 +01:00
message = message ,
flags = UserMessage . flags . active_mobile_push_notification ,
2018-07-28 14:31:45 +02:00
)
2024-07-12 02:30:32 +02:00
with (
2024-07-16 22:52:01 +02:00
activate_push_notification_service ( ) ,
2024-07-12 02:30:32 +02:00
mock . patch ( " zerver.lib.push_notifications.send_notifications_to_bouncer " ) as mock_send ,
) :
2019-02-14 01:08:51 +01:00
handle_remove_push_notification ( user_profile . id , [ message . id ] )
2020-06-02 18:09:26 +02:00
mock_send . assert_called_with (
2023-11-07 15:12:37 +01:00
user_profile ,
2020-06-02 18:09:26 +02:00
{
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" custom " : {
" zulip " : {
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2021-02-12 08:20:45 +01:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2021-02-12 08:20:45 +01:00
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
2020-06-02 18:09:26 +02:00
} ,
} ,
} ,
2019-02-14 01:08:51 +01:00
{
2021-02-12 08:20:45 +01:00
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2021-02-12 08:20:45 +01:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2021-02-12 08:20:45 +01:00
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
" zulip_message_id " : message . id ,
2019-02-14 01:08:51 +01:00
} ,
2021-02-12 08:20:45 +01:00
{ " priority " : " normal " } ,
2023-12-12 01:32:43 +01:00
list (
PushDeviceToken . objects . filter (
2024-06-13 20:53:09 +02:00
user = user_profile , kind = PushDeviceToken . FCM
2023-12-12 01:32:43 +01:00
) . order_by ( " id " )
) ,
list (
PushDeviceToken . objects . filter (
user = user_profile , kind = PushDeviceToken . APNS
) . order_by ( " id " )
) ,
2021-02-12 08:19:30 +01:00
)
user_message = UserMessage . objects . get ( user_profile = self . user_profile , message = message )
2019-02-14 01:23:55 +01:00
self . assertEqual ( user_message . flags . active_mobile_push_notification , False )
2018-07-28 14:31:45 +02:00
def test_non_bouncer_push_remove ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-07-28 14:31:45 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
2019-02-14 01:23:55 +01:00
message = message ,
flags = UserMessage . flags . active_mobile_push_notification ,
2018-07-28 14:31:45 +02:00
)
android_devices = list (
2024-06-13 20:53:09 +02:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . FCM )
2021-02-12 08:19:30 +01:00
)
2018-07-28 14:31:45 +02:00
2020-06-02 18:09:26 +02:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2020-06-02 18:09:26 +02:00
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
mock . patch (
# Simulate the send...push_notification functions returning a number of successes
# lesser than the number of devices, so that we can verify correct CountStat counting.
" zerver.lib.push_notifications.send_android_push_notification " ,
return_value = len ( apple_devices ) - 1 ,
) as mock_send_android ,
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification " ,
return_value = len ( apple_devices ) - 1 ,
) as mock_send_apple ,
) :
2019-02-14 01:08:51 +01:00
handle_remove_push_notification ( self . user_profile . id , [ message . id ] )
2022-03-09 23:40:34 +01:00
mock_push_notifications . assert_called_once ( )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2019-02-14 01:08:51 +01:00
mock_send_android . assert_called_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2019-02-14 01:08:51 +01:00
android_devices ,
{
2021-02-12 08:20:45 +01:00
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2021-02-12 08:20:45 +01:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2021-02-12 08:20:45 +01:00
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
" zulip_message_id " : message . id ,
2019-02-14 01:08:51 +01:00
} ,
2021-02-12 08:20:45 +01:00
{ " priority " : " normal " } ,
2021-02-12 08:19:30 +01:00
)
2020-06-02 18:09:26 +02:00
mock_send_apple . assert_called_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2020-06-02 18:09:26 +02:00
apple_devices ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" custom " : {
" zulip " : {
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2021-02-12 08:20:45 +01:00
" realm_uri " : " http://zulip.testserver " ,
2024-05-07 06:08:52 +02:00
" realm_url " : " http://zulip.testserver " ,
2021-02-12 08:20:45 +01:00
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
2021-02-12 08:19:30 +01:00
}
} ,
} ,
)
user_message = UserMessage . objects . get ( user_profile = self . user_profile , message = message )
2019-02-14 01:23:55 +01:00
self . assertEqual ( user_message . flags . active_mobile_push_notification , False )
2018-07-28 14:31:45 +02:00
2023-11-07 15:12:37 +01:00
remote_realm_count = RealmCount . objects . values ( " property " , " subgroup " , " value " ) . last ( )
self . assertEqual (
remote_realm_count ,
dict (
property = " mobile_pushes_sent::day " ,
subgroup = None ,
value = len ( android_devices ) + len ( apple_devices ) - 2 ,
) ,
)
2017-11-05 10:51:25 +01:00
def test_user_message_does_not_exist ( self ) - > None :
2017-11-10 00:51:06 +01:00
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream " )
sender = self . example_user ( " iago " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
self . subscribe ( sender , " public_stream " )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
2021-02-12 08:20:45 +01:00
missed_message = { " message_id " : message_id }
2024-07-12 02:30:32 +02:00
with (
self . assertLogs ( " zerver.lib.push_notifications " , level = " ERROR " ) as logger ,
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2021-07-25 14:26:24 +02:00
self . assertEqual (
" ERROR:zerver.lib.push_notifications: "
f " Could not find UserMessage with message_id { message_id } and user_id { self . user_profile . id } "
" \n NoneType: None " , # This is an effect of using `exc_info=True` in the actual logger.
logger . output [ 0 ] ,
2020-05-02 08:44:14 +02:00
)
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-11-10 00:51:06 +01:00
2022-03-09 23:30:38 +01:00
def test_user_message_does_not_exist_remove ( self ) - > None :
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2022-03-09 23:30:38 +01:00
self . make_stream ( " public_stream " )
sender = self . example_user ( " iago " )
self . subscribe ( sender , " public_stream " )
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification " , return_value = 1
) as mock_send_android ,
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification " , return_value = 1
) as mock_send_apple ,
) :
2022-03-09 23:30:38 +01:00
handle_remove_push_notification ( self . user_profile . id , [ message_id ] )
mock_push_notifications . assert_called_once ( )
mock_send_android . assert_called_once ( )
mock_send_apple . assert_called_once ( )
2017-11-17 10:47:43 +01:00
def test_user_message_soft_deactivated ( self ) - > None :
2017-11-10 00:51:06 +01:00
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
sender = self . example_user ( " iago " )
2021-02-12 08:20:45 +01:00
self . subscribe ( self . user_profile , " public_stream " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
self . subscribe ( sender , " public_stream " )
2020-08-09 14:15:58 +02:00
logger_string = " zulip.soft_deactivation "
2021-02-12 08:20:45 +01:00
with self . assertLogs ( logger_string , level = " INFO " ) as info_logs :
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
2021-02-12 08:19:30 +01:00
self . assertEqual (
info_logs . output ,
[
f " INFO: { logger_string } :Soft deactivated user { self . user_profile . id } " ,
f " INFO: { logger_string } :Soft-deactivated batch of 1 users; 0 remain to process " ,
] ,
)
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
2017-11-10 00:51:06 +01:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message_id ,
2023-07-24 14:26:43 +02:00
" trigger " : NotificationTriggers . STREAM_PUSH ,
2017-11-10 00:51:06 +01:00
}
android_devices = list (
2024-06-13 20:53:09 +02:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . FCM )
2021-02-12 08:19:30 +01:00
)
2017-11-10 00:51:06 +01:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2024-07-12 02:30:32 +02:00
with (
mock . patch (
" zerver.lib.push_notifications.get_message_payload_apns " ,
return_value = { " apns " : True } ,
) ,
mock . patch (
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
) ,
mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification " , return_value = 1
) as mock_send_apple ,
mock . patch (
" zerver.lib.push_notifications.send_android_push_notification " , return_value = 1
) as mock_send_android ,
mock . patch ( " zerver.lib.push_notifications.logger.error " ) as mock_logger ,
mock . patch (
" zerver.lib.push_notifications.push_notifications_configured " , return_value = True
) as mock_push_notifications ,
) :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2017-11-10 00:51:06 +01:00
mock_logger . assert_not_called ( )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2022-02-23 20:25:30 +01:00
mock_send_apple . assert_called_with ( user_identity , apple_devices , { " apns " : True } )
mock_send_android . assert_called_with ( user_identity , android_devices , { " gcm " : True } , { } )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-05-11 10:55:05 +02:00
2023-12-08 20:53:31 +01:00
@override_settings ( MAX_GROUP_SIZE_FOR_MENTION_REACTIVATION = 2 )
2023-11-23 22:10:26 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_configured " , return_value = True )
2022-04-15 22:07:22 +02:00
def test_user_push_soft_reactivate_soft_deactivated_user (
self , mock_push_notifications : mock . MagicMock
) - > None :
othello = self . example_user ( " othello " )
cordelia = self . example_user ( " cordelia " )
2023-12-08 20:53:31 +01:00
zulip_realm = get_realm ( " zulip " )
# user groups having upto 'MAX_GROUP_SIZE_FOR_MENTION_REACTIVATION'
# members are small user groups.
small_user_group = check_add_user_group (
zulip_realm ,
" small_user_group " ,
[ self . user_profile , othello ] ,
2022-11-21 03:37:11 +01:00
acting_user = None ,
2022-04-15 22:07:22 +02:00
)
2023-12-08 20:53:31 +01:00
large_user_group = check_add_user_group (
zulip_realm , " large_user_group " , [ self . user_profile ] , acting_user = None
)
subgroup = check_add_user_group (
zulip_realm , " subgroup " , [ othello , cordelia ] , acting_user = None
)
add_subgroups_to_user_group ( large_user_group , [ subgroup ] , acting_user = None )
2022-04-15 22:07:22 +02:00
# Personal mention in a stream message should soft reactivate the user
2023-08-19 15:49:00 +02:00
def mention_in_stream ( ) - > None :
2022-04-15 22:07:22 +02:00
mention = f " @** { self . user_profile . full_name } ** "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
2023-07-24 14:26:43 +02:00
{
" message_id " : stream_mentioned_message_id ,
" trigger " : NotificationTriggers . MENTION ,
} ,
2022-04-15 22:07:22 +02:00
)
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
self . expect_soft_reactivation ( self . user_profile , mention_in_stream )
2023-06-19 16:26:12 +02:00
# Direct message should soft reactivate the user
2023-08-19 15:49:00 +02:00
def direct_message ( ) - > None :
2022-04-15 22:07:22 +02:00
# Soft reactivate the user by sending a personal message
personal_message_id = self . send_personal_message ( othello , self . user_profile , " Message " )
handle_push_notification (
self . user_profile . id ,
2023-07-24 14:26:43 +02:00
{
" message_id " : personal_message_id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2023-07-24 14:26:43 +02:00
} ,
2022-04-15 22:07:22 +02:00
)
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
self . expect_soft_reactivation ( self . user_profile , direct_message )
2023-07-05 11:59:56 +02:00
# User FOLLOWS the topic.
# 'wildcard_mentions_notify' is disabled to verify the corner case when only
# 'enable_followed_topic_wildcard_mentions_notify' is enabled (True by default).
do_set_user_topic_visibility_policy (
self . user_profile ,
get_stream ( " Denmark " , self . user_profile . realm ) ,
" test " ,
visibility_policy = UserTopic . VisibilityPolicy . FOLLOWED ,
)
do_change_user_setting (
self . user_profile , " wildcard_mentions_notify " , False , acting_user = None
)
2023-06-09 19:43:36 +02:00
# Topic wildcard mention in followed topic should soft reactivate the user
# user should be a topic participant
2023-10-09 20:41:12 +02:00
self . send_stream_message ( self . user_profile , " Denmark " , " topic participant " )
2023-08-19 15:49:00 +02:00
def send_topic_wildcard_mention ( ) - > None :
2023-06-09 19:43:36 +02:00
mention = " @**topic** "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{
" message_id " : stream_mentioned_message_id ,
2023-07-24 14:26:43 +02:00
" trigger " : NotificationTriggers . TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC ,
2023-06-09 19:43:36 +02:00
} ,
)
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
self . expect_soft_reactivation ( self . user_profile , send_topic_wildcard_mention )
2023-06-03 16:51:38 +02:00
# Stream wildcard mention in followed topic should NOT soft reactivate the user
2023-08-19 15:49:00 +02:00
def send_stream_wildcard_mention ( ) - > None :
2023-06-21 21:15:23 +02:00
mention = " @**all** "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{
" message_id " : stream_mentioned_message_id ,
2023-07-24 14:26:43 +02:00
" trigger " : NotificationTriggers . STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC ,
2023-06-21 21:15:23 +02:00
} ,
)
2023-07-05 11:59:56 +02:00
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
self . expect_to_stay_long_term_idle ( self . user_profile , send_stream_wildcard_mention )
2023-07-05 11:59:56 +02:00
# Reset
do_set_user_topic_visibility_policy (
self . user_profile ,
get_stream ( " Denmark " , self . user_profile . realm ) ,
" test " ,
visibility_policy = UserTopic . VisibilityPolicy . INHERIT ,
)
do_change_user_setting (
self . user_profile , " wildcard_mentions_notify " , True , acting_user = None
)
2023-06-21 21:15:23 +02:00
2023-06-09 19:43:36 +02:00
# Topic Wildcard mention should soft reactivate the user
2023-08-19 15:49:00 +02:00
self . expect_soft_reactivation ( self . user_profile , send_topic_wildcard_mention )
2023-06-09 19:43:36 +02:00
2023-06-03 16:51:38 +02:00
# Stream Wildcard mention should NOT soft reactivate the user
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
self . expect_to_stay_long_term_idle ( self . user_profile , send_stream_wildcard_mention )
2022-04-15 22:07:22 +02:00
2023-12-08 20:53:31 +01:00
# Small group mention should soft reactivate the user
def send_small_group_mention ( ) - > None :
mention = " @*small_user_group* "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{
" message_id " : stream_mentioned_message_id ,
" trigger " : NotificationTriggers . MENTION ,
" mentioned_user_group_id " : small_user_group . id ,
} ,
)
self . soft_deactivate_main_user ( )
self . expect_soft_reactivation ( self . user_profile , send_small_group_mention )
# Large group mention should NOT soft reactivate the user
def send_large_group_mention ( ) - > None :
2022-04-15 22:07:22 +02:00
mention = " @*large_user_group* "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{
" message_id " : stream_mentioned_message_id ,
2023-07-24 14:26:43 +02:00
" trigger " : NotificationTriggers . MENTION ,
2022-04-15 22:07:22 +02:00
" mentioned_user_group_id " : large_user_group . id ,
} ,
)
2023-08-19 15:49:00 +02:00
self . soft_deactivate_main_user ( )
2023-12-08 20:53:31 +01:00
self . expect_to_stay_long_term_idle ( self . user_profile , send_large_group_mention )
2023-08-19 15:49:00 +02:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.logger.info " )
2023-11-23 22:10:26 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_configured " , return_value = True )
2021-02-12 08:19:30 +01:00
def test_user_push_notification_already_active (
self , mock_push_notifications : mock . MagicMock , mock_info : mock . MagicMock
) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2020-07-14 08:12:46 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . active_mobile_push_notification ,
message = message ,
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
2023-08-04 19:54:41 +02:00
" trigger " : NotificationTriggers . DIRECT_MESSAGE ,
2020-07-14 08:12:46 +02:00
}
handle_push_notification ( user_profile . id , missed_message )
mock_push_notifications . assert_called_once ( )
# Check we didn't proceed ahead and function returned.
mock_info . assert_not_called ( )
2021-02-12 08:19:30 +01:00
2017-08-29 01:03:29 +02:00
class TestAPNs ( PushNotificationTest ) :
2024-07-12 02:30:17 +02:00
def devices ( self ) - > list [ DeviceToken ] :
2021-02-12 08:19:30 +01:00
return list (
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2017-08-29 01:03:29 +02:00
2021-02-12 08:19:30 +01:00
def send (
2021-08-15 18:35:37 +02:00
self ,
2024-07-12 02:30:23 +02:00
devices : list [ PushDeviceToken | RemotePushDeviceToken ] | None = None ,
2022-10-06 11:56:48 +02:00
payload_data : Mapping [ str , Any ] = { } ,
2021-02-12 08:19:30 +01:00
) - > None :
2021-08-15 18:35:37 +02:00
send_apple_push_notification (
2023-01-02 20:50:23 +01:00
UserPushIdentityCompat ( user_id = self . user_profile . id ) ,
2022-02-23 20:25:30 +01:00
devices if devices is not None else self . devices ( ) ,
payload_data ,
2021-08-15 18:35:37 +02:00
)
2017-10-12 03:56:50 +02:00
2021-06-08 02:45:49 +02:00
def test_get_apns_context ( self ) - > None :
2018-12-15 20:05:43 +01:00
""" This test is pretty hacky, and needs to carefully reset the state
it modifies in order to avoid leaking state that can lead to
nondeterministic results for other tests .
"""
2018-02-12 23:34:59 +01:00
import zerver . lib . push_notifications
2021-02-12 08:19:30 +01:00
2021-06-08 02:45:49 +02:00
zerver . lib . push_notifications . get_apns_context . cache_clear ( )
2019-12-01 20:19:13 +01:00
try :
2024-07-12 02:30:32 +02:00
with (
self . settings ( APNS_CERT_FILE = " /foo.pem " ) ,
mock . patch ( " ssl.SSLContext.load_cert_chain " ) as mock_load_cert_chain ,
) :
2021-06-08 02:45:49 +02:00
apns_context = get_apns_context ( )
assert apns_context is not None
2022-01-14 04:47:42 +01:00
try :
2022-02-08 08:42:25 +01:00
mock_load_cert_chain . assert_called_once_with ( " /foo.pem " )
assert apns_context . apns . pool . loop == apns_context . loop
2022-01-14 04:47:42 +01:00
finally :
apns_context . loop . close ( )
2019-12-01 20:19:13 +01:00
finally :
2021-06-08 02:45:49 +02:00
# Reset the cache for `get_apns_context` so that we don't
2019-12-01 20:19:13 +01:00
# leak changes to the rest of the world.
2021-06-08 02:45:49 +02:00
zerver . lib . push_notifications . get_apns_context . cache_clear ( )
2018-02-10 03:22:26 +01:00
2018-02-09 23:19:00 +01:00
def test_not_configured ( self ) - > None :
2020-07-16 16:54:49 +02:00
self . setup_apns_tokens ( )
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.push_notifications.get_apns_context " ) as mock_get ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " DEBUG " ) as logger ,
) :
2018-02-09 23:19:00 +01:00
mock_get . return_value = None
self . send ( )
2021-07-25 14:26:24 +02:00
notification_drop_log = (
" DEBUG:zerver.lib.push_notifications: "
2018-02-09 23:19:00 +01:00
" APNs: Dropping a notification because nothing configured. "
2024-07-16 22:52:01 +02:00
" Set ZULIP_SERVICES_URL (or APNS_CERT_FILE). "
2021-02-12 08:19:30 +01:00
)
2021-07-25 14:26:24 +02:00
2018-11-27 18:12:11 +01:00
from zerver . lib . push_notifications import initialize_push_notifications
2021-02-12 08:19:30 +01:00
2018-11-27 18:12:11 +01:00
initialize_push_notifications ( )
2021-07-25 14:26:24 +02:00
mobile_notifications_not_configured_log = (
" WARNING:zerver.lib.push_notifications: "
2018-11-27 18:12:11 +01:00
" Mobile push notifications are not configured. \n "
2021-02-12 08:19:30 +01:00
" See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html "
)
2017-08-29 01:03:29 +02:00
2021-07-25 14:26:24 +02:00
self . assertEqual (
[ notification_drop_log , mobile_notifications_not_configured_log ] , logger . output
)
2017-11-05 10:51:25 +01:00
def test_success ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-07-12 02:30:32 +02:00
with (
self . mock_apns ( ) as ( apns_context , send_notification ) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger ,
) :
2023-08-29 00:43:51 +02:00
send_notification . return_value . is_successful = True
2017-08-29 01:03:29 +02:00
self . send ( )
for device in self . devices ( ) :
2021-07-25 14:26:24 +02:00
self . assertIn (
2022-03-10 13:31:16 +01:00
f " INFO:zerver.lib.push_notifications:APNs: Success sending for user <id: { self . user_profile . id } > to device { device . token } " ,
2021-07-25 14:26:24 +02:00
logger . output ,
2021-02-12 08:19:30 +01:00
)
2017-08-29 01:03:29 +02:00
2021-09-02 20:33:36 +02:00
def test_http_retry_eventually_fails ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2024-07-12 02:30:32 +02:00
with (
self . mock_apns ( ) as ( apns_context , send_notification ) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger ,
) :
2023-08-29 00:43:51 +02:00
send_notification . side_effect = aioapns . exceptions . ConnectionError ( )
2017-10-12 03:56:50 +02:00
self . send ( devices = self . devices ( ) [ 0 : 1 ] )
2021-07-25 14:26:24 +02:00
self . assertIn (
2022-03-25 21:51:55 +01:00
f " ERROR:zerver.lib.push_notifications:APNs: ConnectionError sending for user <id: { self . user_profile . id } > to device { self . devices ( ) [ 0 ] . token } ; check certificate expiration " ,
2021-07-25 14:26:24 +02:00
logger . output ,
)
2023-08-29 03:03:11 +02:00
def test_other_exception ( self ) - > None :
self . setup_apns_tokens ( )
2024-07-12 02:30:32 +02:00
with (
self . mock_apns ( ) as ( apns_context , send_notification ) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger ,
) :
2023-08-29 03:03:11 +02:00
send_notification . side_effect = IOError
self . send ( devices = self . devices ( ) [ 0 : 1 ] )
self . assertIn (
f " ERROR:zerver.lib.push_notifications:APNs: Error sending for user <id: { self . user_profile . id } > to device { self . devices ( ) [ 0 ] . token } " ,
logger . output [ 1 ] ,
)
2017-10-12 03:56:50 +02:00
2021-09-02 21:17:42 +02:00
def test_internal_server_error ( self ) - > None :
self . setup_apns_tokens ( )
2024-07-12 02:30:32 +02:00
with (
self . mock_apns ( ) as ( apns_context , send_notification ) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger ,
) :
2023-08-29 00:43:51 +02:00
send_notification . return_value . is_successful = False
send_notification . return_value . description = " InternalServerError "
2021-09-02 21:17:42 +02:00
self . send ( devices = self . devices ( ) [ 0 : 1 ] )
self . assertIn (
2022-03-10 13:31:16 +01:00
f " WARNING:zerver.lib.push_notifications:APNs: Failed to send for user <id: { self . user_profile . id } > to device { self . devices ( ) [ 0 ] . token } : InternalServerError " ,
2021-09-02 21:17:42 +02:00
logger . output ,
)
2023-11-07 00:01:14 +01:00
def test_log_missing_ios_app_id ( self ) - > None :
device = RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
token = " 1234 " ,
ios_app_id = None ,
user_id = self . user_profile . id ,
2023-12-22 02:29:00 +01:00
server = self . server ,
2023-11-07 00:01:14 +01:00
)
2024-07-12 02:30:32 +02:00
with (
self . mock_apns ( ) as ( apns_context , send_notification ) ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger ,
) :
2023-11-07 00:01:14 +01:00
send_notification . return_value . is_successful = True
self . send ( devices = [ device ] )
self . assertIn (
f " ERROR:zerver.lib.push_notifications:APNs: Missing ios_app_id for user <id: { self . user_profile . id } > device { device . token } " ,
logger . output ,
)
2017-11-05 10:51:25 +01:00
def test_modernize_apns_payload ( self ) - > None :
2021-02-12 08:19:30 +01:00
payload = {
2021-02-12 08:20:45 +01:00
" alert " : " Message from Hamlet " ,
" badge " : 0 ,
" custom " : { " zulip " : { " message_ids " : [ 3 ] } } ,
2021-02-12 08:19:30 +01:00
}
2017-09-28 03:08:37 +02:00
self . assertEqual (
2019-02-08 23:09:20 +01:00
modernize_apns_payload (
2021-02-12 08:20:45 +01:00
{ " alert " : " Message from Hamlet " , " message_ids " : [ 3 ] , " badge " : 0 }
2021-02-12 08:19:30 +01:00
) ,
payload ,
)
self . assertEqual ( modernize_apns_payload ( payload ) , payload )
2018-10-04 23:31:04 +02:00
2023-11-23 22:10:26 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_configured " , return_value = True )
2020-06-02 18:09:26 +02:00
def test_apns_badge_count ( self , mock_push_notifications : mock . MagicMock ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " othello " )
2020-06-02 18:09:26 +02:00
# Test APNs badge count for personal messages.
2021-02-12 08:19:30 +01:00
message_ids = [
2021-02-12 08:20:45 +01:00
self . send_personal_message ( self . sender , user_profile , " Content of message " )
2021-02-12 08:19:30 +01:00
for i in range ( 3 )
]
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , 3 )
2020-06-02 18:09:26 +02:00
# Similarly, test APNs badge count for stream mention.
stream = self . subscribe ( user_profile , " Denmark " )
2021-02-12 08:19:30 +01:00
message_ids + = [
self . send_stream_message (
2021-02-12 08:20:45 +01:00
self . sender , stream . name , " Hi, @**Othello, the Moor of Venice** "
2021-02-12 08:19:30 +01:00
)
for i in range ( 2 )
]
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , 5 )
2020-06-02 18:09:26 +02:00
num_messages = len ( message_ids )
# Mark the messages as read and test whether
# the count decreases correctly.
for i , message_id in enumerate ( message_ids ) :
2024-08-16 13:12:13 +02:00
with self . captureOnCommitCallbacks ( execute = True ) :
do_update_message_flags ( user_profile , " add " , " read " , [ message_id ] )
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , num_messages - i - 1 )
2020-06-02 18:09:26 +02:00
mock_push_notifications . assert_called ( )
2021-02-12 08:19:30 +01:00
2017-05-11 09:26:00 +02:00
class TestGetAPNsPayload ( PushNotificationTest ) :
2019-02-14 00:54:56 +01:00
def test_get_message_payload_apns_personal_message ( self ) - > None :
2018-10-04 23:31:04 +02:00
user_profile = self . example_user ( " othello " )
message_id = self . send_personal_message (
2020-03-07 11:43:05 +01:00
self . sender ,
user_profile ,
2021-02-12 08:20:45 +01:00
" Content of personal message " ,
2018-10-04 23:31:04 +02:00
)
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
2023-08-04 19:54:41 +02:00
user_profile , message , NotificationTriggers . DIRECT_MESSAGE
2021-09-03 16:49:27 +02:00
)
2018-10-04 23:31:04 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
" title " : " King Hamlet " ,
" subtitle " : " " ,
" body " : message . content ,
2018-10-04 23:31:04 +02:00
} ,
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" sound " : " default " ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2018-10-04 23:31:04 +02:00
}
self . assertDictEqual ( payload , expected )
2023-11-23 22:10:26 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_configured " , return_value = True )
2024-07-04 14:05:48 +02:00
def test_get_message_payload_apns_group_direct_message (
2021-02-12 08:19:30 +01:00
self , mock_push_notifications : mock . MagicMock
) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2024-07-04 14:05:48 +02:00
message_id = self . send_group_direct_message (
2021-02-12 08:20:45 +01:00
self . sender , [ self . example_user ( " othello " ) , self . example_user ( " cordelia " ) ]
2021-02-12 08:19:30 +01:00
)
2018-02-16 23:18:47 +01:00
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
2023-08-04 19:54:41 +02:00
user_profile , message , NotificationTriggers . DIRECT_MESSAGE
2021-09-03 16:49:27 +02:00
)
2017-05-11 09:26:00 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-04-11 16:26:54 +02:00
" title " : " Cordelia, Lear ' s daughter, King Hamlet, Othello, the Moor of Venice " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
" body " : message . content ,
2017-08-31 22:27:46 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" pm_users " : " , " . join (
2021-12-04 02:15:21 +01:00
str ( user_profile_id )
for user_profile_id in sorted (
s . user_profile_id
2021-12-04 01:54:29 +01:00
for s in Subscription . objects . filter ( recipient = message . recipient )
)
2021-02-12 08:19:30 +01:00
) ,
2021-02-12 08:20:45 +01:00
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2017-12-12 05:40:11 +01:00
}
self . assertDictEqual ( payload , expected )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called ( )
2017-12-12 05:40:11 +01:00
2023-08-29 12:37:41 +02:00
def _test_get_message_payload_apns_stream_message ( self , trigger : str ) - > None :
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2023-08-29 12:37:41 +02:00
payload = get_message_payload_apns ( self . sender , message , trigger )
2018-10-04 23:31:04 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-05-10 07:02:14 +02:00
" title " : " #Verona > Test topic " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
" body " : message . content ,
2018-10-04 23:31:04 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2023-07-16 13:09:25 +02:00
" stream " : stream . name ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2018-11-10 16:11:12 +01:00
" topic " : message . topic_name ( ) ,
2021-02-12 08:20:45 +01:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2020-03-12 14:17:25 +01:00
" user_id " : self . sender . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2018-10-04 23:31:04 +02:00
}
self . assertDictEqual ( payload , expected )
2023-08-29 12:37:41 +02:00
def test_get_message_payload_apns_stream_message ( self ) - > None :
self . _test_get_message_payload_apns_stream_message ( NotificationTriggers . STREAM_PUSH )
def test_get_message_payload_apns_followed_topic_message ( self ) - > None :
self . _test_get_message_payload_apns_stream_message ( NotificationTriggers . FOLLOWED_TOPIC_PUSH )
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_get_message_payload_apns_stream_mention ( self ) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns ( user_profile , message , NotificationTriggers . MENTION )
2017-12-12 05:40:11 +01:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-05-10 07:02:14 +02:00
" title " : " #Verona > Test topic " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet mentioned you: " ,
" body " : message . content ,
2017-12-12 05:40:11 +01:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2023-07-16 13:09:25 +02:00
" stream " : stream . name ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2019-08-26 04:40:07 +02:00
" topic " : message . topic_name ( ) ,
2021-02-12 08:20:45 +01:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2019-08-26 04:40:07 +02:00
" user_id " : user_profile . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2019-08-26 04:40:07 +02:00
}
self . assertDictEqual ( payload , expected )
2020-05-20 10:01:15 +02:00
def test_get_message_payload_apns_user_group_mention ( self ) - > None :
user_profile = self . example_user ( " othello " )
2022-12-14 06:45:55 +01:00
user_group = check_add_user_group (
get_realm ( " zulip " ) , " test_user_group " , [ user_profile ] , acting_user = None
2022-11-21 03:37:11 +01:00
)
2020-05-20 10:01:15 +02:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . MENTION , user_group . id , user_group . name
)
2020-05-20 10:01:15 +02:00
expected = {
" alert " : {
" title " : " #Verona > Test topic " ,
" subtitle " : " King Hamlet mentioned @test_user_group: " ,
" body " : message . content ,
} ,
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2023-07-16 13:09:25 +02:00
" stream " : stream . name ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2020-05-20 10:01:15 +02:00
" topic " : message . topic_name ( ) ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2020-05-20 10:01:15 +02:00
" user_id " : user_profile . id ,
2021-07-08 14:45:05 +02:00
" mentioned_user_group_id " : user_group . id ,
" mentioned_user_group_name " : user_group . name ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
2020-05-20 10:01:15 +02:00
}
} ,
}
self . assertDictEqual ( payload , expected )
2023-08-29 12:24:10 +02:00
def _test_get_message_payload_apns_wildcard_mention ( self , trigger : str ) - > None :
2023-06-07 19:19:33 +02:00
user_profile = self . example_user ( " othello " )
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
payload = get_message_payload_apns (
user_profile ,
message ,
2023-08-29 12:24:10 +02:00
trigger ,
2023-06-07 19:19:33 +02:00
)
expected = {
" alert " : {
" title " : " #Verona > Test topic " ,
2023-08-29 12:24:10 +02:00
" subtitle " : " King Hamlet mentioned everyone: " ,
2023-06-07 19:19:33 +02:00
" body " : message . content ,
} ,
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2023-07-16 13:09:25 +02:00
" stream " : stream . name ,
2023-06-07 19:19:33 +02:00
" stream_id " : stream . id ,
" topic " : message . topic_name ( ) ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2023-06-07 19:19:33 +02:00
" user_id " : user_profile . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
2023-06-07 19:19:33 +02:00
} ,
} ,
}
self . assertDictEqual ( payload , expected )
2023-08-29 12:24:10 +02:00
def test_get_message_payload_apns_topic_wildcard_mention_in_followed_topic ( self ) - > None :
self . _test_get_message_payload_apns_wildcard_mention (
NotificationTriggers . TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC
)
2023-06-03 16:51:38 +02:00
def test_get_message_payload_apns_stream_wildcard_mention_in_followed_topic ( self ) - > None :
2023-08-29 12:24:10 +02:00
self . _test_get_message_payload_apns_wildcard_mention (
NotificationTriggers . STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC
2023-06-22 14:48:24 +02:00
)
2023-06-07 19:19:33 +02:00
def test_get_message_payload_apns_topic_wildcard_mention ( self ) - > None :
2023-08-29 12:24:10 +02:00
self . _test_get_message_payload_apns_wildcard_mention (
NotificationTriggers . TOPIC_WILDCARD_MENTION
2023-06-07 19:19:33 +02:00
)
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_get_message_payload_apns_stream_wildcard_mention ( self ) - > None :
2023-08-29 12:24:10 +02:00
self . _test_get_message_payload_apns_wildcard_mention (
NotificationTriggers . STREAM_WILDCARD_MENTION
2021-09-03 16:49:27 +02:00
)
2017-05-11 09:26:00 +02:00
2021-02-12 08:19:30 +01:00
@override_settings ( PUSH_NOTIFICATION_REDACT_CONTENT = True )
2019-02-14 00:54:56 +01:00
def test_get_message_payload_apns_redacted_content ( self ) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2024-07-04 14:05:48 +02:00
message_id = self . send_group_direct_message (
2021-02-12 08:20:45 +01:00
self . sender , [ self . example_user ( " othello " ) , self . example_user ( " cordelia " ) ]
2021-02-12 08:19:30 +01:00
)
2018-02-16 23:18:47 +01:00
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
2023-08-04 19:54:41 +02:00
user_profile , message , NotificationTriggers . DIRECT_MESSAGE
2021-09-03 16:49:27 +02:00
)
2017-10-10 11:14:10 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-04-11 16:26:54 +02:00
" title " : " Cordelia, Lear ' s daughter, King Hamlet, Othello, the Moor of Venice " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
2024-03-22 17:37:51 +01:00
" body " : " New message " ,
2017-10-10 11:14:10 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" pm_users " : " , " . join (
2021-12-04 02:15:21 +01:00
str ( user_profile_id )
for user_profile_id in sorted (
s . user_profile_id
2021-12-04 01:54:29 +01:00
for s in Subscription . objects . filter ( recipient = message . recipient )
)
2021-02-12 08:19:30 +01:00
) ,
2021-02-12 08:20:45 +01:00
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : self . sender . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : self . sender . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : self . sender . realm . url ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2017-10-10 11:14:10 +02:00
}
self . assertDictEqual ( payload , expected )
2023-11-06 10:41:08 +01:00
def test_get_message_payload_apns_stream_message_from_inaccessible_user ( self ) - > None :
self . set_up_db_for_testing_user_access ( )
# Unsubscribe hamlet so that the guest user cannot access hamlet.
self . unsubscribe ( self . sender , " test_stream1 " )
# Reset email visibility to everyone so that we can make sure
# that sender_email field is not set to real email.
reset_email_visibility_to_everyone_in_zulip_realm ( )
hamlet = self . example_user ( " hamlet " )
polonius = self . example_user ( " polonius " )
stream = Stream . objects . get ( name = " test_stream1 " )
# We reset the self.sender field here such that it is set
# to the UserProfile object with latest "realm" field.
self . sender = hamlet
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
payload = get_message_payload_apns (
polonius , message , NotificationTriggers . STREAM_PUSH , can_access_sender = False
)
expected = {
" alert " : {
" title " : " #test_stream1 > Test topic " ,
" subtitle " : " Unknown user: " ,
" body " : message . content ,
} ,
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : f " user { hamlet . id } @zulip.testserver " ,
" sender_id " : hamlet . id ,
" stream " : stream . name ,
" stream_id " : stream . id ,
" topic " : message . topic_name ( ) ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : hamlet . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2023-11-06 10:41:08 +01:00
" user_id " : polonius . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
}
} ,
}
self . assertDictEqual ( payload , expected )
2021-02-12 08:19:30 +01:00
2017-05-11 10:15:00 +02:00
class TestGetGCMPayload ( PushNotificationTest ) :
2023-08-29 16:00:58 +02:00
def _test_get_message_payload_gcm_stream_message (
2021-07-08 14:45:05 +02:00
self ,
2023-08-29 16:52:45 +02:00
truncate_content : bool = False ,
2024-07-12 02:30:23 +02:00
mentioned_user_group_id : int | None = None ,
mentioned_user_group_name : str | None = None ,
2021-07-08 14:15:42 +02:00
) - > None :
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2023-08-29 16:52:45 +02:00
content = message . content
if truncate_content :
message . content = " a " * 210
message . rendered_content = " a " * 210
message . save ( )
content = " a " * 200 + " … "
2017-05-11 10:15:00 +02:00
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2021-07-08 14:45:05 +02:00
payload , gcm_options = get_message_payload_gcm (
2023-08-29 16:00:58 +02:00
hamlet , message , mentioned_user_group_id , mentioned_user_group_name
2021-02-12 08:19:30 +01:00
)
2021-07-08 14:45:05 +02:00
expected_payload = {
" user_id " : hamlet . id ,
" event " : " message " ,
" zulip_message_id " : message . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
2023-08-29 16:52:45 +02:00
" content " : content ,
" content_truncated " : truncate_content ,
2021-07-08 14:45:05 +02:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : hamlet . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2021-07-08 14:45:05 +02:00
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " stream " ,
2023-07-16 13:09:25 +02:00
" stream " : stream . name ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2021-07-08 14:45:05 +02:00
" topic " : message . topic_name ( ) ,
}
if mentioned_user_group_id is not None :
expected_payload [ " mentioned_user_group_id " ] = mentioned_user_group_id
expected_payload [ " mentioned_user_group_name " ] = mentioned_user_group_name
self . assertDictEqual ( payload , expected_payload )
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-05-11 10:15:00 +02:00
2023-08-29 16:00:58 +02:00
# Here, the payload is notification trigger independent. This test covers
# the case when the trigger for push notifications is personal mention,
# wildcard mention, stream push, or followed topic push.
def test_get_message_payload_gcm_stream_message ( self ) - > None :
self . _test_get_message_payload_gcm_stream_message ( )
2021-07-08 13:11:52 +02:00
2023-08-29 16:52:45 +02:00
def test_get_message_payload_gcm_stream_message_truncate_content ( self ) - > None :
self . _test_get_message_payload_gcm_stream_message ( truncate_content = True )
2021-07-08 14:15:42 +02:00
def test_get_message_payload_gcm_user_group_mention ( self ) - > None :
2021-07-08 14:45:05 +02:00
# Note that the @mobile_team user group doesn't actually
# exist; this test is just verifying the formatting logic.
2023-08-29 16:00:58 +02:00
self . _test_get_message_payload_gcm_stream_message (
2021-07-08 14:45:05 +02:00
mentioned_user_group_id = 3 ,
mentioned_user_group_name = " mobile_team " ,
2021-07-08 14:15:42 +02:00
)
2023-08-04 19:54:41 +02:00
def test_get_message_payload_gcm_direct_message ( self ) - > None :
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2023-08-29 16:00:58 +02:00
payload , gcm_options = get_message_payload_gcm ( hamlet , message )
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
payload ,
{
" user_id " : hamlet . id ,
" event " : " message " ,
" zulip_message_id " : message . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
2021-02-12 08:19:30 +01:00
" content " : message . content ,
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : hamlet . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2021-02-12 08:19:30 +01:00
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " private " ,
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-05-11 10:15:00 +02:00
2021-02-12 08:19:30 +01:00
@override_settings ( PUSH_NOTIFICATION_REDACT_CONTENT = True )
2019-02-14 00:54:56 +01:00
def test_get_message_payload_gcm_redacted_content ( self ) - > None :
2021-04-27 16:56:45 +02:00
stream = Stream . objects . get ( name = " Denmark " )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2023-08-29 16:00:58 +02:00
payload , gcm_options = get_message_payload_gcm ( hamlet , message )
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
payload ,
{
" user_id " : hamlet . id ,
" event " : " message " ,
" zulip_message_id " : message . id ,
2023-09-29 20:04:09 +02:00
" time " : datetime_to_timestamp ( message . date_sent ) ,
2024-03-22 17:37:51 +01:00
" content " : " New message " ,
2021-02-12 08:19:30 +01:00
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : hamlet . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2021-02-12 08:19:30 +01:00
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " stream " ,
2021-05-10 07:02:14 +02:00
" topic " : " Test topic " ,
2021-02-12 08:19:30 +01:00
" stream " : " Denmark " ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2021-02-12 08:19:30 +01:00
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-10-10 11:14:10 +02:00
2023-11-06 10:41:08 +01:00
def test_get_message_payload_gcm_stream_message_from_inaccessible_user ( self ) - > None :
self . set_up_db_for_testing_user_access ( )
# Unsubscribe hamlet so that the guest user cannot access hamlet.
self . unsubscribe ( self . sender , " test_stream1 " )
# Reset email visibility to everyone so that we can make sure
# that sender_email field is not set to real email.
reset_email_visibility_to_everyone_in_zulip_realm ( )
hamlet = self . example_user ( " hamlet " )
polonius = self . example_user ( " polonius " )
stream = Stream . objects . get ( name = " test_stream1 " )
# We reset the self.sender field here such that it is set
# to the UserProfile object with latest "realm" field.
self . sender = hamlet
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
payload , gcm_options = get_message_payload_gcm ( polonius , message , can_access_sender = False )
self . assertDictEqual (
payload ,
{
" user_id " : polonius . id ,
" event " : " message " ,
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
" content " : message . content ,
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
2023-12-07 19:03:21 +01:00
" realm_name " : hamlet . realm . name ,
2024-05-06 15:27:22 +02:00
" realm_uri " : hamlet . realm . url ,
2024-05-07 06:08:52 +02:00
" realm_url " : hamlet . realm . url ,
2023-11-06 10:41:08 +01:00
" sender_id " : hamlet . id ,
" sender_email " : f " user { hamlet . id } @zulip.testserver " ,
" sender_full_name " : " Unknown user " ,
" sender_avatar_url " : get_avatar_for_inaccessible_user ( ) ,
" recipient_type " : " stream " ,
" stream " : stream . name ,
" stream_id " : stream . id ,
" topic " : message . topic_name ( ) ,
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-10-10 11:14:10 +02:00
2023-12-12 00:06:37 +01:00
class TestSendNotificationsToBouncer ( PushNotificationTest ) :
2023-12-12 01:17:15 +01:00
def test_send_notifications_to_bouncer_when_no_devices ( self ) - > None :
user = self . example_user ( " hamlet " )
2024-07-12 02:30:32 +02:00
with (
mock . patch ( " zerver.lib.remote_server.send_to_push_bouncer " ) as mock_send ,
self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as mock_logging_info ,
) :
2023-12-14 21:37:13 +01:00
send_notifications_to_bouncer (
2023-12-12 01:17:15 +01:00
user , { " apns " : True } , { " gcm " : True } , { } , android_devices = [ ] , apple_devices = [ ]
)
2023-12-14 21:37:13 +01:00
self . assertIn (
f " INFO:zerver.lib.push_notifications:Skipping contacting the bouncer for user { user . id } because there are no registered devices " ,
mock_logging_info . output ,
)
2023-12-12 01:17:15 +01:00
mock_send . assert_not_called ( )
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.remote_server.send_to_push_bouncer " )
2017-11-05 10:51:25 +01:00
def test_send_notifications_to_bouncer ( self , mock_send : mock . MagicMock ) - > None :
2023-11-07 15:12:37 +01:00
user = self . example_user ( " hamlet " )
2023-12-12 00:06:37 +01:00
self . setup_apns_tokens ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2023-12-12 00:06:37 +01:00
2024-06-13 20:53:09 +02:00
android_devices = PushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM )
2023-12-12 00:06:37 +01:00
apple_devices = PushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
self . assertNotEqual ( android_devices . count ( ) , 0 )
self . assertNotEqual ( apple_devices . count ( ) , 0 )
mock_send . return_value = {
" total_android_devices " : 1 ,
" total_apple_devices " : 3 ,
# This response tests the logic of the server taking
# deleted_devices from the bouncer and deleting the
# corresponding PushDeviceTokens - because the bouncer is
# communicating that those devices have been deleted due
# to failures from Apple/Google and have no further user.
" deleted_devices " : DevicesToCleanUpDict (
android_devices = [ device . token for device in android_devices ] ,
apple_devices = [ device . token for device in apple_devices ] ,
) ,
2023-12-11 22:21:48 +01:00
" realm " : { " can_push " : True , " expected_end_timestamp " : None } ,
2023-12-12 00:06:37 +01:00
}
2023-12-14 21:37:13 +01:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as mock_logging_info :
send_notifications_to_bouncer (
user , { " apns " : True } , { " gcm " : True } , { } , list ( android_devices ) , list ( apple_devices )
)
2017-05-11 10:31:31 +02:00
post_data = {
2023-11-07 15:12:37 +01:00
" user_uuid " : user . uuid ,
" user_id " : user . id ,
2023-11-09 19:24:49 +01:00
" realm_uuid " : user . realm . uuid ,
2021-02-12 08:20:45 +01:00
" apns_payload " : { " apns " : True } ,
" gcm_payload " : { " gcm " : True } ,
" gcm_options " : { } ,
2023-12-12 00:06:37 +01:00
" android_devices " : [ device . token for device in android_devices ] ,
" apple_devices " : [ device . token for device in apple_devices ] ,
2017-05-11 10:31:31 +02:00
}
2021-02-12 08:19:30 +01:00
mock_send . assert_called_with (
2021-02-12 08:20:45 +01:00
" POST " ,
" push/notify " ,
2021-02-12 08:19:30 +01:00
orjson . dumps ( post_data ) ,
2021-02-12 08:20:45 +01:00
extra_headers = { " Content-type " : " application/json " } ,
2021-02-12 08:19:30 +01:00
)
2023-12-14 21:37:13 +01:00
self . assertIn (
f " INFO:zerver.lib.push_notifications:Sent mobile push notifications for user { user . id } through bouncer: 1 via FCM devices, 3 via APNs devices " ,
mock_logging_info . output ,
)
2021-02-12 08:19:30 +01:00
2023-11-07 15:12:37 +01:00
remote_realm_count = RealmCount . objects . values ( " property " , " subgroup " , " value " ) . last ( )
self . assertEqual (
remote_realm_count ,
dict (
property = " mobile_pushes_sent::day " ,
subgroup = None ,
2023-12-14 21:37:13 +01:00
value = 4 ,
2023-11-07 15:12:37 +01:00
) ,
)
2023-12-12 00:06:37 +01:00
self . assertEqual ( PushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . count ( ) , 0 )
2024-06-13 20:53:09 +02:00
self . assertEqual ( PushDeviceToken . objects . filter ( kind = PushDeviceToken . FCM ) . count ( ) , 0 )
2023-12-12 00:06:37 +01:00
2023-12-11 22:21:48 +01:00
# Now simulating getting "can_push" as False from the bouncer and verify
# that we update the realm value.
mock_send . return_value = {
" total_android_devices " : 1 ,
" total_apple_devices " : 3 ,
" realm " : { " can_push " : False , " expected_end_timestamp " : None } ,
" deleted_devices " : DevicesToCleanUpDict (
android_devices = [ ] ,
apple_devices = [ ] ,
) ,
}
2023-12-14 21:37:13 +01:00
send_notifications_to_bouncer (
2023-12-11 22:21:48 +01:00
user , { " apns " : True } , { " gcm " : True } , { } , list ( android_devices ) , list ( apple_devices )
)
user . realm . refresh_from_db ( )
self . assertEqual ( user . realm . push_notifications_enabled , False )
2017-05-11 10:31:31 +02:00
2024-07-16 22:52:01 +02:00
@activate_push_notification_service ( )
2019-02-08 23:42:24 +01:00
class TestSendToPushBouncer ( ZulipTestCase ) :
2021-06-11 22:51:27 +02:00
def add_mock_response (
self , body : bytes = orjson . dumps ( { " msg " : " error " } ) , status : int = 200
) - > None :
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/register "
2021-06-11 22:51:27 +02:00
responses . add ( responses . POST , URL , body = body , status = status )
@responses.activate
def test_500_error ( self ) - > None :
self . add_mock_response ( status = 500 )
2020-10-29 20:21:18 +01:00
with self . assertLogs ( level = " WARNING " ) as m :
2023-12-07 21:02:35 +01:00
with self . assertRaises ( PushNotificationBouncerServerError ) :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " data " : " true " } )
2020-10-29 20:21:18 +01:00
self . assertEqual ( m . output , [ " WARNING:root:Received 500 from push notification bouncer " ] )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_400_error ( self ) - > None :
self . add_mock_response ( status = 400 )
2019-02-08 23:09:20 +01:00
with self . assertRaises ( JsonableError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:20:45 +01:00
self . assertEqual ( exc . exception . msg , " error " )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
2017-11-05 10:51:25 +01:00
def test_400_error_invalid_server_key ( self ) - > None :
2022-08-01 22:54:47 +02:00
from zilencer . auth import InvalidZulipServerError
2020-06-11 00:54:34 +02:00
2017-10-12 03:02:35 +02:00
# This is the exception our decorator uses for an invalid Zulip server
2021-08-14 03:18:13 +02:00
error_response = json_response_from_error ( InvalidZulipServerError ( " testRole " ) )
self . add_mock_response ( body = error_response . content , status = error_response . status_code )
2022-11-17 09:30:48 +01:00
with self . assertRaises ( PushNotificationBouncerError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:19:30 +01:00
self . assertEqual (
str ( exc . exception ) ,
2021-02-12 08:20:45 +01:00
" Push notifications bouncer error: "
2021-10-19 03:30:05 +02:00
" Zulip server auth failure: testRole is not registered -- did you run `manage.py register_server`? " ,
2021-02-12 08:19:30 +01:00
)
2017-10-12 03:02:35 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_400_error_when_content_is_not_serializable ( self ) - > None :
self . add_mock_response ( body = b " / " , status = 400 )
2020-08-07 01:09:47 +02:00
with self . assertRaises ( orjson . JSONDecodeError ) :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_300_error ( self ) - > None :
self . add_mock_response ( body = b " / " , status = 300 )
2022-11-17 09:30:48 +01:00
with self . assertRaises ( PushNotificationBouncerError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
str ( exc . exception ) , " Push notification bouncer returned unexpected status code 300 "
2021-02-12 08:19:30 +01:00
)
2017-05-12 09:47:31 +02:00
2020-06-08 18:38:50 +02:00
class TestPushApi ( BouncerTestCase ) :
2021-06-11 22:51:27 +02:00
@responses.activate
2020-06-08 18:38:50 +02:00
def test_push_api_error_handling ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-09-15 15:19:46 +02:00
2024-07-12 02:30:17 +02:00
endpoints : list [ tuple [ str , str , Mapping [ str , str ] ] ] = [
2023-11-06 21:06:55 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenaz " , { " appid " : " org.zulip.Zulip " } ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token " , { } ) ,
2016-09-15 15:19:46 +02:00
]
# Test error handling
2023-11-06 21:06:55 +01:00
for endpoint , label , appid in endpoints :
2016-09-15 15:19:46 +02:00
# Try adding/removing tokens that are too big...
2017-07-07 18:18:37 +02:00
broken_token = " a " * 5000 # too big
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : broken_token , * * appid } )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-09-15 15:19:46 +02:00
2021-02-12 08:20:45 +01:00
if label == " apple-tokenaz " :
2023-11-06 21:06:55 +01:00
result = self . client_post (
endpoint , { " token " : " xyz has non-hex characters " , * * appid }
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid APNS token " )
2017-07-07 18:18:37 +02:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : broken_token } )
self . assert_json_error ( result , " Empty or invalid length token " )
2016-09-15 15:19:46 +02:00
2023-11-07 23:12:39 +01:00
# Try adding with missing or invalid appid...
2023-11-06 20:05:30 +01:00
if appid :
result = self . client_post ( endpoint , { " token " : label } )
self . assert_json_error ( result , " Missing ' appid ' argument " )
2023-11-07 23:12:39 +01:00
result = self . client_post ( endpoint , { " token " : label , " appid " : " ' ; tables -- " } )
2024-04-20 11:47:33 +02:00
self . assert_json_error ( result , " appid has invalid format " )
2023-11-07 23:12:39 +01:00
2016-09-15 15:19:46 +02:00
# Try to remove a non-existent token...
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : " abcd1234 " } )
self . assert_json_error ( result , " Token does not exist " )
2016-09-15 15:19:46 +02:00
2020-06-08 18:38:50 +02:00
# Use push notification bouncer and try to remove non-existing tokens.
2024-07-12 02:30:32 +02:00
with (
2024-07-16 22:52:01 +02:00
activate_push_notification_service ( ) ,
2024-07-12 02:30:32 +02:00
responses . RequestsMock ( ) as resp ,
) :
2024-07-16 22:52:01 +02:00
assert settings . ZULIP_SERVICES_URL is not None
URL = settings . ZULIP_SERVICES_URL + " /api/v1/remotes/push/unregister "
2021-06-11 22:51:27 +02:00
resp . add_callback ( responses . POST , URL , callback = self . request_callback )
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : " abcd1234 " } )
self . assert_json_error ( result , " Token does not exist " )
2021-06-11 22:51:27 +02:00
self . assertTrue ( resp . assert_call_count ( URL , 1 ) )
2020-06-08 18:38:50 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
2020-06-08 18:38:50 +02:00
def test_push_api_add_and_remove_device_tokens ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-06-08 18:38:50 +02:00
self . login_user ( user )
2024-07-12 02:30:17 +02:00
no_bouncer_requests : list [ tuple [ str , str , Mapping [ str , str ] ] ] = [
2023-11-06 21:06:55 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenaa " , { " appid " : " org.zulip.Zulip " } ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token-1 " , { } ) ,
2020-06-08 18:38:50 +02:00
]
2024-07-12 02:30:17 +02:00
bouncer_requests : list [ tuple [ str , str , Mapping [ str , str ] ] ] = [
2023-11-06 21:06:55 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenbb " , { " appid " : " org.zulip.Zulip " } ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token-2 " , { } ) ,
2020-06-08 18:38:50 +02:00
]
# Add tokens without using push notification bouncer.
2023-11-06 21:06:55 +01:00
for endpoint , token , appid in no_bouncer_requests :
2020-06-08 18:38:50 +02:00
# Test that we can push twice.
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : token , * * appid } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : token , * * appid } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2016-09-15 15:19:46 +02:00
self . assertEqual ( tokens [ 0 ] . token , token )
2024-07-16 22:52:01 +02:00
with activate_push_notification_service ( ) :
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2020-06-08 18:38:50 +02:00
# Enable push notification bouncer and add tokens.
2023-11-06 21:06:55 +01:00
for endpoint , token , appid in bouncer_requests :
2020-06-08 18:38:50 +02:00
# Test that we can push twice.
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : token , * * appid } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
2023-11-06 21:06:55 +01:00
result = self . client_post ( endpoint , { " token " : token , * * appid } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2020-06-08 18:38:50 +02:00
self . assertEqual ( tokens [ 0 ] . token , token )
2021-08-15 18:35:37 +02:00
remote_tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , token = token )
2021-08-15 18:35:37 +02:00
)
self . assert_length ( remote_tokens , 1 )
self . assertEqual ( remote_tokens [ 0 ] . token , token )
2020-06-08 18:38:50 +02:00
# PushDeviceToken will include all the device tokens.
2021-08-15 18:35:37 +02:00
token_values = list ( PushDeviceToken . objects . values_list ( " token " , flat = True ) )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2021-08-15 18:35:37 +02:00
token_values , [ " apple-tokenaa " , " android-token-1 " , " apple-tokenbb " , " android-token-2 " ]
2021-02-12 08:19:30 +01:00
)
2020-06-08 18:38:50 +02:00
# RemotePushDeviceToken will only include tokens of
# the devices using push notification bouncer.
2021-08-15 18:35:37 +02:00
remote_token_values = list ( RemotePushDeviceToken . objects . values_list ( " token " , flat = True ) )
2024-01-29 19:21:53 +01:00
self . assertEqual ( sorted ( remote_token_values ) , [ " android-token-2 " , " apple-tokenbb " ] )
2020-06-08 18:38:50 +02:00
# Test removing tokens without using push notification bouncer.
2023-11-06 21:06:55 +01:00
for endpoint , token , appid in no_bouncer_requests :
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : token } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2016-09-15 15:19:46 +02:00
2020-06-08 18:38:50 +02:00
# Use push notification bouncer and test removing device tokens.
# Tokens will be removed both locally and remotely.
2024-07-16 22:52:01 +02:00
with activate_push_notification_service ( ) :
2023-11-06 21:06:55 +01:00
for endpoint , token , appid in bouncer_requests :
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : token } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-02-12 08:19:30 +01:00
remote_tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , token = token )
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
self . assert_length ( remote_tokens , 0 )
2020-06-08 18:38:50 +02:00
# Verify that the above process indeed removed all the tokens we created.
self . assertEqual ( RemotePushDeviceToken . objects . all ( ) . count ( ) , 0 )
self . assertEqual ( PushDeviceToken . objects . all ( ) . count ( ) , 0 )
2021-02-12 08:19:30 +01:00
2020-07-01 04:19:54 +02:00
class GCMParseOptionsTest ( ZulipTestCase ) :
2019-02-08 22:44:55 +01:00
def test_invalid_option ( self ) - > None :
with self . assertRaises ( JsonableError ) :
2024-06-13 21:24:17 +02:00
parse_fcm_options ( { " invalid " : True } , { } )
2019-02-08 22:44:55 +01:00
def test_invalid_priority_value ( self ) - > None :
with self . assertRaises ( JsonableError ) :
2024-06-13 21:24:17 +02:00
parse_fcm_options ( { " priority " : " invalid " } , { } )
2019-02-08 22:44:55 +01:00
def test_default_priority ( self ) - > None :
2024-06-13 21:24:17 +02:00
self . assertEqual ( " high " , parse_fcm_options ( { } , { " event " : " message " } ) )
self . assertEqual ( " normal " , parse_fcm_options ( { } , { " event " : " remove " } ) )
self . assertEqual ( " normal " , parse_fcm_options ( { } , { } ) )
2019-02-08 22:44:55 +01:00
def test_explicit_priority ( self ) - > None :
2024-06-13 21:24:17 +02:00
self . assertEqual ( " normal " , parse_fcm_options ( { " priority " : " normal " } , { } ) )
self . assertEqual ( " high " , parse_fcm_options ( { " priority " : " high " } , { } ) )
2021-02-12 08:19:30 +01:00
2019-02-08 22:44:55 +01:00
2024-06-13 14:38:58 +02:00
@mock.patch ( " zerver.lib.push_notifications.fcm_app " )
@mock.patch ( " zerver.lib.push_notifications.firebase_messaging " )
2024-06-14 19:24:10 +02:00
class FCMSendTest ( PushNotificationTest ) :
2023-10-12 19:43:45 +02:00
@override
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2024-06-14 19:24:10 +02:00
self . setup_fcm_tokens ( )
2016-08-08 14:20:41 +02:00
2024-07-12 02:30:17 +02:00
def get_fcm_data ( self , * * kwargs : Any ) - > dict [ str , Any ] :
2016-08-08 14:20:41 +02:00
data = {
2021-02-12 08:20:45 +01:00
" key 1 " : " Data 1 " ,
" key 2 " : " Data 2 " ,
2016-08-08 14:20:41 +02:00
}
data . update ( kwargs )
return data
2024-06-13 14:38:58 +02:00
def test_fcm_is_none ( self , mock_fcm_messaging : mock . MagicMock , fcm_app : mock . MagicMock ) - > None :
fcm_app . __bool__ . return_value = False
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " DEBUG " ) as logger :
send_android_push_notification_to_user ( self . user_profile , { } , { } )
self . assertEqual (
" DEBUG:zerver.lib.push_notifications: "
2024-07-16 22:52:01 +02:00
" Skipping sending a FCM push notification since ZULIP_SERVICE_PUSH_NOTIFICATIONS "
2024-06-13 14:38:58 +02:00
" and ANDROID_FCM_CREDENTIALS_PATH are both unset " ,
2021-07-25 14:26:24 +02:00
logger . output [ 0 ] ,
)
2016-08-08 14:20:41 +02:00
2024-06-13 14:38:58 +02:00
def test_send_raises_error (
self , mock_fcm_messaging : mock . MagicMock , fcm_app : mock . MagicMock
) - > None :
mock_fcm_messaging . send_each . side_effect = firebase_exceptions . FirebaseError (
firebase_exceptions . UNKNOWN , " error "
)
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , { } , { } )
self . assertIn (
2024-06-13 14:38:58 +02:00
" WARNING:zerver.lib.push_notifications:Error while pushing to FCM \n Traceback " ,
2021-07-25 14:26:24 +02:00
logger . output [ 0 ] ,
)
2017-05-17 09:58:27 +02:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.logger.warning " )
2024-06-13 14:38:58 +02:00
def test_success (
self ,
mock_warning : mock . MagicMock ,
mock_fcm_messaging : mock . MagicMock ,
fcm_app : mock . MagicMock ,
) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2024-06-14 19:24:10 +02:00
res [ " success " ] = { token : ind for ind , token in enumerate ( self . fcm_tokens ) }
2024-06-13 14:38:58 +02:00
response = self . make_fcm_success_response ( self . fcm_tokens )
mock_fcm_messaging . send_each . return_value = response
2016-08-08 14:20:41 +02:00
2024-06-14 19:24:10 +02:00
data = self . get_fcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
2021-10-20 01:52:23 +02:00
self . assert_length ( logger . output , 3 )
2024-06-13 14:38:58 +02:00
log_msg1 = f " INFO:zerver.lib.push_notifications:FCM: Sending notification for local user <id: { self . user_profile . id } > to 2 devices "
log_msg2 = f " INFO:zerver.lib.push_notifications:FCM: Sent message with ID: { response . responses [ 0 ] . message_id } to { hex_to_b64 ( self . fcm_tokens [ 0 ] ) } "
log_msg3 = f " INFO:zerver.lib.push_notifications:FCM: Sent message with ID: { response . responses [ 1 ] . message_id } to { hex_to_b64 ( self . fcm_tokens [ 1 ] ) } "
2021-10-20 01:52:23 +02:00
self . assertEqual ( [ log_msg1 , log_msg2 , log_msg3 ] , logger . output )
2016-08-08 14:20:41 +02:00
mock_warning . assert_not_called ( )
2024-06-13 14:38:58 +02:00
def test_not_registered (
self , mock_fcm_messaging : mock . MagicMock , fcm_app : mock . MagicMock
) - > None :
2021-02-12 08:20:45 +01:00
token = hex_to_b64 ( " 1111 " )
2024-06-13 14:38:58 +02:00
response = self . make_fcm_error_response (
token , firebase_messaging . UnregisteredError ( " Requested entity was not found. " )
)
mock_fcm_messaging . send_each . return_value = response
2016-08-08 14:20:41 +02:00
2018-05-11 01:39:38 +02:00
def get_count ( hex_token : str ) - > int :
2019-02-08 23:09:20 +01:00
token = hex_to_b64 ( hex_token )
2024-06-13 20:53:09 +02:00
return PushDeviceToken . objects . filter ( token = token , kind = PushDeviceToken . FCM ) . count ( )
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 1 )
2016-08-08 14:20:41 +02:00
2024-06-14 19:24:10 +02:00
data = self . get_fcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
self . assertEqual (
2024-06-13 14:38:58 +02:00
f " INFO:zerver.lib.push_notifications:FCM: Sending notification for local user <id: { self . user_profile . id } > to 2 devices " ,
2021-10-20 01:52:23 +02:00
logger . output [ 0 ] ,
)
self . assertEqual (
2024-06-13 14:38:58 +02:00
f " INFO:zerver.lib.push_notifications:FCM: Removing { token } due to NOT_FOUND " ,
2021-10-20 01:52:23 +02:00
logger . output [ 1 ] ,
2021-07-25 14:26:24 +02:00
)
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 0 )
2016-08-08 14:20:41 +02:00
2024-06-13 14:38:58 +02:00
def test_failure ( self , mock_fcm_messaging : mock . MagicMock , fcm_app : mock . MagicMock ) - > None :
2021-02-12 08:20:45 +01:00
token = hex_to_b64 ( " 1111 " )
2024-06-13 14:38:58 +02:00
response = self . make_fcm_error_response ( token , firebase_exceptions . UnknownError ( " Failed " ) )
mock_fcm_messaging . send_each . return_value = response
2016-08-08 14:20:41 +02:00
2024-06-14 19:24:10 +02:00
data = self . get_fcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
2024-06-13 14:38:58 +02:00
msg = f " WARNING:zerver.lib.push_notifications:FCM: Delivery failed for { token } : <class ' firebase_admin.exceptions.UnknownError ' >:Failed "
2021-07-25 14:26:24 +02:00
self . assertEqual ( msg , logger . output [ 0 ] )
2016-12-14 13:23:05 +01:00
2021-02-12 08:19:30 +01:00
2019-02-14 01:08:51 +01:00
class TestClearOnRead ( ZulipTestCase ) :
def test_mark_stream_as_read ( self ) - > None :
n_msgs = 3
hamlet = self . example_user ( " hamlet " )
hamlet . enable_stream_push_notifications = True
hamlet . save ( )
stream = self . subscribe ( hamlet , " Denmark " )
2021-02-12 08:19:30 +01:00
message_ids = [
self . send_stream_message ( self . example_user ( " iago " ) , stream . name , f " yo { i } " )
for i in range ( n_msgs )
]
2019-02-14 01:08:51 +01:00
UserMessage . objects . filter (
user_profile_id = hamlet . id ,
2019-08-15 08:05:44 +02:00
message_id__in = message_ids ,
2021-02-12 08:20:45 +01:00
) . update ( flags = F ( " flags " ) . bitor ( UserMessage . flags . active_mobile_push_notification ) )
2019-02-14 01:08:51 +01:00
2024-08-16 13:12:13 +02:00
with mock_queue_publish ( " zerver.actions.message_flags.queue_event_on_commit " ) as m :
2021-08-15 18:35:37 +02:00
assert stream . recipient_id is not None
2020-10-16 16:25:32 +02:00
do_mark_stream_messages_as_read ( hamlet , stream . recipient_id )
2024-08-16 13:12:13 +02:00
queue_items = [ c [ 0 ] [ 1 ] for c in m . call_args_list ]
2021-02-12 08:20:45 +01:00
groups = [ item [ " message_ids " ] for item in queue_items ]
2019-02-14 01:08:51 +01:00
2020-06-30 08:24:37 +02:00
self . assert_length ( groups , 1 )
2019-08-15 08:05:44 +02:00
self . assertEqual ( sum ( len ( g ) for g in groups ) , len ( message_ids ) )
2020-04-09 21:51:58 +02:00
self . assertEqual ( { id for g in groups for id in g } , set ( message_ids ) )
2019-02-14 01:08:51 +01:00
2021-02-12 08:19:30 +01:00
2017-10-06 23:16:29 +02:00
class TestPushNotificationsContent ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_fixtures ( self ) - > None :
2020-08-07 01:09:47 +02:00
fixtures = orjson . loads ( self . fixture_data ( " markdown_test_cases.json " ) )
2017-10-06 23:16:29 +02:00
tests = fixtures [ " regular_tests " ]
for test in tests :
if " text_content " in test :
2019-08-14 01:44:45 +02:00
with self . subTest ( markdown_test_case = test [ " name " ] ) :
output = get_mobile_push_content ( test [ " expected_output " ] )
self . assertEqual ( output , test [ " text_content " ] )
2017-10-06 23:16:29 +02:00
2017-11-05 10:51:25 +01:00
def test_backend_only_fixtures ( self ) - > None :
2019-07-24 07:47:59 +02:00
realm = get_realm ( " zulip " )
cordelia = self . example_user ( " cordelia " )
stream = get_stream ( " Verona " , realm )
2017-10-06 23:16:29 +02:00
fixtures = [
{
2021-02-12 08:20:45 +01:00
" name " : " realm_emoji " ,
" rendered_content " : f ' <p>Testing <img alt= " :green_tick: " class= " emoji " src= " /user_avatars/ { realm . id } /emoji/green_tick.png " title= " green tick " > realm emoji.</p> ' ,
" expected_output " : " Testing :green_tick: realm emoji. " ,
2017-10-06 23:16:29 +02:00
} ,
{
2021-02-12 08:20:45 +01:00
" name " : " mentions " ,
2021-04-11 16:26:54 +02:00
" rendered_content " : f ' <p>Mentioning <span class= " user-mention " data-user-id= " { cordelia . id } " >@Cordelia, Lear \' s daughter</span>.</p> ' ,
" expected_output " : " Mentioning @Cordelia, Lear ' s daughter. " ,
2017-10-06 23:16:29 +02:00
} ,
{
2021-02-12 08:20:45 +01:00
" name " : " stream_names " ,
" rendered_content " : f ' <p>Testing stream names <a class= " stream " data-stream-id= " { stream . id } " href= " /#narrow/stream/Verona " >#Verona</a>.</p> ' ,
" expected_output " : " Testing stream names #Verona. " ,
2017-10-06 23:16:29 +02:00
} ,
]
for test in fixtures :
actual_output = get_mobile_push_content ( test [ " rendered_content " ] )
self . assertEqual ( actual_output , test [ " expected_output " ] )
2018-05-04 01:40:46 +02:00
2021-02-12 08:19:30 +01:00
2020-08-27 22:46:39 +02:00
@skipUnless ( settings . ZILENCER_ENABLED , " requires zilencer " )
2018-05-04 01:40:46 +02:00
class PushBouncerSignupTest ( ZulipTestCase ) :
2024-01-26 17:06:12 +01:00
@override
def setUp ( self ) - > None :
super ( ) . setUp ( )
# Set up a MX lookup mock for all the tests, so that they don't have to
# worry about it failing, unless they intentionally want to set it up
# to happen.
self . mxlookup_patcher = mock . patch ( " DNS.mxlookup " , return_value = [ ( 0 , " " ) ] )
self . mxlookup_mock = self . mxlookup_patcher . start ( )
@override
def tearDown ( self ) - > None :
self . mxlookup_patcher . stop ( )
2022-01-12 23:45:01 +01:00
def test_deactivate_remote_server ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2022-01-12 23:45:01 +01:00
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
2024-01-26 17:06:12 +01:00
self . assertEqual ( server . contact_email , " server-admin@zulip.com " )
2022-01-12 23:45:01 +01:00
2022-04-07 17:53:37 +02:00
result = self . uuid_post ( zulip_org_id , " /api/v1/remotes/server/deactivate " , subdomain = " " )
2022-01-12 23:45:01 +01:00
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
remote_realm_audit_log = RemoteZulipServerAuditLog . objects . filter (
2024-09-06 17:29:04 +02:00
event_type = AuditLogEventType . REMOTE_SERVER_DEACTIVATED
2022-01-12 23:45:01 +01:00
) . last ( )
assert remote_realm_audit_log is not None
self . assertTrue ( server . deactivated )
# Now test that trying to deactivate again reports the right error.
result = self . uuid_post (
zulip_org_id , " /api/v1/remotes/server/deactivate " , request , subdomain = " "
)
self . assert_json_error (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
status_code = 401 ,
)
2023-12-15 14:29:21 +01:00
# Now try to do a request to server/register again. Normally, this updates
# the server's registration details. But the server is deactivated, so it
# should return the corresponding error.
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
status_code = 401 ,
)
2018-05-04 01:40:46 +02:00
def test_push_signup_invalid_host ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " invalid-host " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " invalid-host is not a valid hostname " )
2021-12-22 11:02:02 +01:00
def test_push_signup_invalid_zulip_org_id ( self ) - > None :
zulip_org_id = " x " * RemoteZulipServer . UUID_LENGTH
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2021-12-22 11:02:02 +01:00
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-12-30 15:32:48 +01:00
self . assert_json_error ( result , " Invalid UUID " )
# This looks mostly like a proper UUID, but isn't actually a valid UUIDv4,
# which makes it slip past a basic validation via initializing uuid.UUID with it.
# Thus we should test this scenario separately.
zulip_org_id = " 18cedb98-5222-5f34-50a9-fc418e1ba972 "
request [ " zulip_org_id " ] = zulip_org_id
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-12-22 11:02:02 +01:00
self . assert_json_error ( result , " Invalid UUID " )
2024-03-23 11:15:07 +01:00
# check if zulip org id is of allowed length
zulip_org_id = " 18cedb98 "
request [ " zulip_org_id " ] = zulip_org_id
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " zulip_org_id is not length 36 " )
def test_push_signup_invalid_zulip_org_key ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 63 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " invalid-host " ,
contact_email = " server-admin@zulip.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " zulip_org_key is not length 64 " )
2018-05-04 01:40:46 +02:00
def test_push_signup_success ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
)
2023-12-25 03:01:58 +01:00
time_sent = now ( )
with time_machine . travel ( time_sent , tick = False ) :
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2018-05-04 01:40:46 +02:00
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
2024-01-26 17:06:12 +01:00
self . assertEqual ( server . contact_email , " server-admin@zulip.com " )
2023-12-25 03:01:58 +01:00
self . assertEqual ( server . last_request_datetime , time_sent )
2018-05-04 01:40:46 +02:00
# Update our hostname
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " zulip.example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
)
2023-12-25 03:01:58 +01:00
with time_machine . travel ( time_sent + timedelta ( minutes = 1 ) , tick = False ) :
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2018-05-04 01:40:46 +02:00
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " zulip.example.com " )
2024-01-26 17:06:12 +01:00
self . assertEqual ( server . contact_email , " server-admin@zulip.com " )
2023-12-25 03:01:58 +01:00
self . assertEqual ( server . last_request_datetime , time_sent + timedelta ( minutes = 1 ) )
2018-05-04 01:40:46 +02:00
# Now test rotating our key
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
new_org_key = get_random_string ( 64 ) ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
2024-01-26 17:06:12 +01:00
self . assertEqual ( server . contact_email , " server-admin@zulip.com " )
2018-05-04 01:40:46 +02:00
zulip_org_key = request [ " new_org_key " ]
self . assertEqual ( server . api_key , zulip_org_key )
2023-12-25 03:01:58 +01:00
# Update contact_email
2018-05-04 01:40:46 +02:00
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " zulip.example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " new-server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " zulip.example.com " )
2024-01-26 17:06:12 +01:00
self . assertEqual ( server . contact_email , " new-server-admin@zulip.com " )
2018-05-04 01:40:46 +02:00
# Now test trying to double-create with a new random key fails
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = get_random_string ( 64 ) ,
hostname = " example.com " ,
2024-01-26 17:06:12 +01:00
contact_email = " server-admin@zulip.com " ,
2018-05-04 01:40:46 +02:00
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-02-12 08:19:30 +01:00
self . assert_json_error (
result , f " Zulip server auth failure: key does not match role { zulip_org_id } "
)
2022-02-23 20:25:30 +01:00
2024-01-26 17:06:12 +01:00
def test_register_duplicate_hostname ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@zulip.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
new_zulip_org_id = str ( uuid . uuid4 ( ) )
new_zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = new_zulip_org_id ,
zulip_org_key = new_zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@zulip.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " A server with hostname example.com already exists " )
2024-01-26 17:06:12 +01:00
def test_register_contact_email_validation_rules ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
)
request [ " contact_email " ] = " server-admin "
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " Enter a valid email address. " )
request [ " contact_email " ] = " admin@example.com "
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2024-05-23 20:27:33 +02:00
self . assert_json_error ( result , " Invalid email address. " )
2024-01-26 17:06:12 +01:00
# An example disposable domain.
request [ " contact_email " ] = " admin@mailnator.com "
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " Please use your real email address. " )
request [ " contact_email " ] = " admin@zulip.com "
2024-05-23 20:31:31 +02:00
with mock . patch ( " zilencer.views.dns_resolver.Resolver " ) as resolver :
resolver . return_value . resolve . side_effect = DNSNoAnswer
resolver . return_value . resolve_name . return_value = [ " whee " ]
2024-01-26 17:06:12 +01:00
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error (
2024-05-23 20:31:31 +02:00
result , " zulip.com is invalid because it does not have any MX records "
2024-01-26 17:06:12 +01:00
)
2024-05-23 20:31:31 +02:00
with mock . patch ( " zilencer.views.dns_resolver.Resolver " ) as resolver :
resolver . return_value . resolve . side_effect = DNSNoAnswer
resolver . return_value . resolve_name . side_effect = DNSNoAnswer
2024-01-26 17:06:12 +01:00
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2024-05-23 20:31:31 +02:00
self . assert_json_error ( result , " zulip.com does not exist " )
with mock . patch ( " zilencer.views.dns_resolver.Resolver " ) as resolver :
resolver . return_value . resolve . return_value = [ " whee " ]
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
2024-01-26 17:06:12 +01:00
2022-02-23 20:25:30 +01:00
2023-01-02 20:50:23 +01:00
class TestUserPushIdentityCompat ( ZulipTestCase ) :
2022-02-23 20:25:30 +01:00
def test_filter_q ( self ) - > None :
2023-01-02 20:50:23 +01:00
user_identity_id = UserPushIdentityCompat ( user_id = 1 )
user_identity_uuid = UserPushIdentityCompat ( user_uuid = " aaaa " )
user_identity_both = UserPushIdentityCompat ( user_id = 1 , user_uuid = " aaaa " )
2022-02-23 20:25:30 +01:00
self . assertEqual ( user_identity_id . filter_q ( ) , Q ( user_id = 1 ) )
self . assertEqual ( user_identity_uuid . filter_q ( ) , Q ( user_uuid = " aaaa " ) )
self . assertEqual ( user_identity_both . filter_q ( ) , Q ( user_uuid = " aaaa " ) | Q ( user_id = 1 ) )
def test_eq ( self ) - > None :
2023-01-02 20:50:23 +01:00
user_identity_a = UserPushIdentityCompat ( user_id = 1 )
user_identity_b = UserPushIdentityCompat ( user_id = 1 )
user_identity_c = UserPushIdentityCompat ( user_id = 2 )
2022-02-23 20:25:30 +01:00
self . assertEqual ( user_identity_a , user_identity_b )
self . assertNotEqual ( user_identity_a , user_identity_c )
# An integer can't be equal to an instance of the class.
self . assertNotEqual ( user_identity_a , 1 )