2021-06-08 02:45:49 +02:00
import asyncio
2020-06-11 00:54:34 +02:00
import base64
2019-01-31 00:39:02 +01:00
import datetime
2021-06-11 22:51:27 +02:00
import re
2020-06-11 00:54:34 +02:00
import uuid
from contextlib import contextmanager
2022-10-06 11:56:48 +02:00
from typing import Any , Dict , Iterator , List , Mapping , Optional , Tuple , Union
2020-08-27 22:46:39 +02:00
from unittest import mock , skipUnless
2021-06-11 22:51:27 +02:00
from urllib import parse
2016-08-03 11:11:25 +02:00
2022-02-08 08:42:25 +01:00
import aioapns
2020-08-07 01:09:47 +02:00
import orjson
2021-06-11 22:51:27 +02:00
import responses
2016-08-03 11:11:25 +02:00
from django . conf import settings
2019-04-23 22:32:12 +02:00
from django . db import transaction
2022-02-23 20:25:30 +01:00
from django . db . models import F , Q
2021-08-15 18:35:37 +02:00
from django . http . response import ResponseHeaders
2020-06-11 00:54:34 +02:00
from django . test import override_settings
2018-05-04 01:40:46 +02:00
from django . utils . crypto import get_random_string
2020-06-11 00:54:34 +02:00
from django . utils . timezone import now
2021-06-11 22:51:27 +02:00
from requests . exceptions import ConnectionError
from requests . models import PreparedRequest
2016-08-03 11:11:25 +02:00
2019-01-31 00:39:02 +01:00
from analytics . lib . counts import CountStat , LoggingCountStat
from analytics . models import InstallationCount , RealmCount
2022-07-17 13:00:21 +02:00
from zerver . actions . message_delete import do_delete_messages
2022-04-14 23:54:53 +02:00
from zerver . actions . message_flags import do_mark_stream_messages_as_read , do_update_message_flags
2022-12-14 06:45:55 +01:00
from zerver . actions . user_groups import check_add_user_group
2022-04-14 23:49:26 +02:00
from zerver . actions . user_settings import do_regenerate_api_key
2021-07-16 22:11:10 +02:00
from zerver . lib . avatar import absolute_avatar_url
from zerver . lib . exceptions import JsonableError
2019-02-08 23:09:20 +01:00
from zerver . lib . push_notifications import (
2021-06-08 02:45:49 +02:00
APNsContext ,
2020-06-11 00:54:34 +02:00
DeviceToken ,
2023-01-02 20:50:23 +01:00
UserPushIdentityCompat ,
2019-02-08 23:09:20 +01:00
b64_to_hex ,
2020-06-02 18:09:26 +02:00
get_apns_badge_count ,
2020-07-16 07:05:02 +02:00
get_apns_badge_count_future ,
2021-06-08 02:45:49 +02:00
get_apns_context ,
2019-02-14 00:54:56 +01:00
get_message_payload_apns ,
get_message_payload_gcm ,
2019-02-08 23:09:20 +01:00
get_mobile_push_content ,
handle_push_notification ,
handle_remove_push_notification ,
hex_to_b64 ,
modernize_apns_payload ,
parse_gcm_options ,
send_android_push_notification_to_user ,
send_apple_push_notification ,
send_notifications_to_bouncer ,
)
2020-06-11 00:54:34 +02:00
from zerver . lib . remote_server import (
2022-11-17 09:30:48 +01:00
PushNotificationBouncerError ,
2020-06-11 00:54:34 +02:00
PushNotificationBouncerRetryLaterError ,
build_analytics_data ,
send_analytics_to_remote_server ,
2021-07-16 22:11:10 +02:00
send_to_push_bouncer ,
2020-06-11 00:54:34 +02:00
)
2021-08-14 03:18:13 +02:00
from zerver . lib . response import json_response_from_error
2020-06-11 00:54:34 +02:00
from zerver . lib . soft_deactivation import do_soft_deactivate_users
2020-07-01 04:19:54 +02:00
from zerver . lib . test_classes import ZulipTestCase
2020-08-14 10:03:36 +02:00
from zerver . lib . test_helpers import mock_queue_publish
2021-07-16 22:11:10 +02:00
from zerver . lib . timestamp import datetime_to_timestamp
2020-06-11 00:54:34 +02:00
from zerver . models import (
Message ,
2021-07-09 13:38:12 +02:00
NotificationTriggers ,
2020-06-11 00:54:34 +02:00
PushDeviceToken ,
RealmAuditLog ,
Recipient ,
Stream ,
Subscription ,
UserMessage ,
get_client ,
2021-07-16 22:11:10 +02:00
get_display_recipient ,
2020-06-11 00:54:34 +02:00
get_realm ,
get_stream ,
2022-02-23 20:27:39 +01:00
get_user_profile_by_id ,
2020-06-11 00:54:34 +02:00
)
2022-01-12 23:45:01 +01:00
from zilencer . models import RemoteZulipServerAuditLog
2020-08-27 22:46:39 +02:00
if settings . ZILENCER_ENABLED :
from zilencer . models import (
RemoteInstallationCount ,
RemotePushDeviceToken ,
RemoteRealmAuditLog ,
RemoteRealmCount ,
RemoteZulipServer ,
)
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
2020-08-27 22:46:39 +02:00
@skipUnless ( settings . ZILENCER_ENABLED , " requires zilencer " )
2017-05-16 08:05:31 +02:00
class BouncerTestCase ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2021-12-22 14:37:12 +01:00
self . server_uuid = " 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe "
2022-01-14 04:20:39 +01:00
self . server = RemoteZulipServer (
2021-02-12 08:19:30 +01:00
uuid = self . server_uuid ,
api_key = " magic_secret_api_key " ,
hostname = " demo.example.com " ,
last_updated = now ( ) ,
)
2022-01-14 04:20:39 +01:00
self . server . save ( )
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2016-10-27 23:55:31 +02:00
2017-11-05 10:51:25 +01:00
def tearDown ( self ) - > None :
2016-10-27 23:55:31 +02:00
RemoteZulipServer . objects . filter ( uuid = self . server_uuid ) . delete ( )
2017-10-27 08:28:23 +02:00
super ( ) . tearDown ( )
2016-10-27 23:55:31 +02:00
2021-08-15 18:35:37 +02:00
def request_callback ( self , request : PreparedRequest ) - > Tuple [ int , ResponseHeaders , bytes ] :
2021-06-11 22:51:27 +02:00
assert isinstance ( request . body , str ) or request . body is None
params : Dict [ str , List [ str ] ] = parse . parse_qs ( request . body )
# In Python 3, the values of the dict from `parse_qs` are
# in a list, because there might be multiple values.
# But since we are sending values with no same keys, hence
# we can safely pick the first value.
data = { k : v [ 0 ] for k , v in params . items ( ) }
assert request . url is not None # allow mypy to infer url is present.
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
local_url = request . url . replace ( settings . PUSH_NOTIFICATION_BOUNCER_URL , " " )
if request . method == " POST " :
result = self . uuid_post ( self . server_uuid , local_url , data , subdomain = " " )
elif request . method == " GET " :
result = self . uuid_get ( self . server_uuid , local_url , data , subdomain = " " )
return ( result . status_code , result . headers , result . content )
def add_mock_response ( self ) - > None :
# Match any endpoint with the PUSH_NOTIFICATION_BOUNCER_URL.
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
COMPILED_URL = re . compile ( settings . PUSH_NOTIFICATION_BOUNCER_URL + " .* " )
responses . add_callback ( responses . POST , COMPILED_URL , callback = self . request_callback )
responses . add_callback ( responses . GET , COMPILED_URL , callback = self . request_callback )
2016-10-27 23:55:31 +02:00
2021-02-12 08:20:45 +01:00
def get_generic_payload ( self , method : str = " register " ) - > Dict [ str , Any ] :
2017-05-16 08:05:31 +02:00
user_id = 10
token = " 111222 "
token_kind = PushDeviceToken . GCM
2021-02-12 08:20:45 +01:00
return { " user_id " : user_id , " token " : token , " token_kind " : token_kind }
2021-02-12 08:19:30 +01:00
2017-05-16 08:05:31 +02:00
class PushBouncerNotificationTest ( BouncerTestCase ) :
2017-08-29 06:28:30 +02:00
DEFAULT_SUBDOMAIN = " "
2017-11-05 10:51:25 +01:00
def test_unregister_remote_push_user_params ( self ) - > None :
2016-10-27 23:55:31 +02:00
token = " 111222 "
token_kind = PushDeviceToken . GCM
2021-02-12 08:20:45 +01:00
endpoint = " /api/v1/remotes/push/unregister "
result = self . uuid_post ( self . server_uuid , endpoint , { " token_kind " : token_kind } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token ' argument " )
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , { " token " : token } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token_kind ' argument " )
2017-08-29 06:28:30 +02:00
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2017-08-29 06:28:30 +02:00
realm = get_realm ( " zulip " )
realm . string_id = " "
realm . save ( )
2020-03-10 11:48:26 +01:00
result = self . api_post (
hamlet ,
endpoint ,
dict ( user_id = 15 , token = token , token_kind = token_kind ) ,
2021-02-12 08:20:45 +01:00
subdomain = " " ,
2020-03-10 11:48:26 +01:00
)
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Must validate with valid Zulip server API key " )
2022-01-14 04:20:39 +01:00
# Try with deactivated remote servers
self . server . deactivated = True
self . server . save ( )
result = self . uuid_post ( self . server_uuid , endpoint , self . get_generic_payload ( " unregister " ) )
self . assert_json_error_contains (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
401 ,
)
2022-02-08 00:13:33 +01:00
def test_register_remote_push_user_params ( self ) - > None :
2016-10-27 23:55:31 +02:00
token = " 111222 "
user_id = 11
token_kind = PushDeviceToken . GCM
2021-02-12 08:20:45 +01:00
endpoint = " /api/v1/remotes/push/register "
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " user_id " : user_id , " token_kind " : token_kind }
2021-02-12 08:19:30 +01:00
)
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token ' argument " )
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , { " user_id " : user_id , " token " : token } )
2016-10-27 23:55:31 +02:00
self . assert_json_error ( result , " Missing ' token_kind ' argument " )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " token " : token , " token_kind " : token_kind }
2021-02-12 08:19:30 +01:00
)
2022-02-23 20:25:30 +01:00
self . assert_json_error ( result , " Missing user_id or user_uuid " )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , endpoint , { " user_id " : user_id , " token " : token , " token_kind " : 17 }
2021-02-12 08:19:30 +01:00
)
2017-07-07 18:29:45 +02:00
self . assert_json_error ( result , " Invalid token type " )
2016-10-27 23:55:31 +02:00
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2020-03-12 14:17:25 +01:00
2017-08-29 06:28:30 +02:00
# We need the root ('') subdomain to be in use for this next
# test, since the push bouncer API is only available there:
realm = get_realm ( " zulip " )
realm . string_id = " "
realm . save ( )
2020-03-10 11:48:26 +01:00
result = self . api_post (
hamlet ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
)
2017-08-29 06:28:30 +02:00
self . assert_json_error ( result , " Must validate with valid Zulip server API key " )
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
self . server_uuid ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
2021-02-12 08:19:30 +01:00
subdomain = " zulip " ,
)
self . assert_json_error (
result , " Invalid subdomain for push notifications bouncer " , status_code = 401
)
2018-04-26 07:08:44 +02:00
2018-04-26 06:36:34 +02:00
# We do a bit of hackery here to the API_KEYS cache just to
# make the code simple for sending an incorrect API key.
2021-02-12 08:20:45 +01:00
self . API_KEYS [ self . server_uuid ] = " invalid "
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
2021-02-12 08:19:30 +01:00
self . server_uuid , endpoint , dict ( user_id = user_id , token_kind = token_kind , token = token )
)
self . assert_json_error (
2021-12-22 14:37:12 +01:00
result ,
" Zulip server auth failure: key does not match role 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe " ,
status_code = 401 ,
2021-02-12 08:19:30 +01:00
)
2018-04-26 06:36:34 +02:00
2020-01-16 22:02:06 +01:00
del self . API_KEYS [ self . server_uuid ]
2018-04-26 06:36:34 +02:00
2022-01-04 21:59:51 +01:00
self . API_KEYS [ " invalid_uuid " ] = " invalid "
result = self . uuid_post (
" invalid_uuid " ,
endpoint ,
dict ( user_id = user_id , token_kind = token_kind , token = token ) ,
subdomain = " zulip " ,
)
self . assert_json_error (
result ,
" Zulip server auth failure: invalid_uuid is not registered -- did you run `manage.py register_server`? " ,
status_code = 401 ,
)
del self . API_KEYS [ " invalid_uuid " ]
2021-12-22 14:37:12 +01:00
credentials_uuid = str ( uuid . uuid4 ( ) )
credentials = " {} : {} " . format ( credentials_uuid , " invalid " )
2021-08-02 23:20:39 +02:00
api_auth = " Basic " + base64 . b64encode ( credentials . encode ( ) ) . decode ( )
2021-02-12 08:19:30 +01:00
result = self . client_post (
endpoint ,
2021-02-12 08:20:45 +01:00
{ " user_id " : user_id , " token_kind " : token_kind , " token " : token } ,
2021-02-12 08:19:30 +01:00
HTTP_AUTHORIZATION = api_auth ,
)
self . assert_json_error (
2021-10-19 03:30:05 +02:00
result ,
2021-12-22 14:37:12 +01:00
f " Zulip server auth failure: { credentials_uuid } is not registered -- did you run `manage.py register_server`? " ,
2021-10-19 03:30:05 +02:00
status_code = 401 ,
2021-02-12 08:19:30 +01:00
)
2018-04-26 06:50:37 +02:00
2022-01-14 04:20:39 +01:00
# Try with deactivated remote servers
self . server . deactivated = True
self . server . save ( )
result = self . uuid_post ( self . server_uuid , endpoint , self . get_generic_payload ( " register " ) )
self . assert_json_error_contains (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
401 ,
)
2023-04-10 00:23:59 +02:00
def test_register_device_deduplication ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
token = " 111222 "
user_id = hamlet . id
user_uuid = str ( hamlet . uuid )
token_kind = PushDeviceToken . GCM
endpoint = " /api/v1/remotes/push/register "
# First we create a legacy user_id registration.
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ " user_id " : user_id , " token_kind " : token_kind , " token " : token } ,
)
self . assert_json_success ( result )
registrations = list ( RemotePushDeviceToken . objects . filter ( token = token ) )
self . assert_length ( registrations , 1 )
self . assertEqual ( registrations [ 0 ] . user_id , user_id )
self . assertEqual ( registrations [ 0 ] . user_uuid , None )
# Register same user+device with uuid now. The old registration should be deleted
# to avoid duplication.
result = self . uuid_post (
self . server_uuid ,
endpoint ,
{ " user_id " : user_id , " user_uuid " : user_uuid , " token_kind " : token_kind , " token " : token } ,
)
registrations = list ( RemotePushDeviceToken . objects . filter ( token = token ) )
self . assert_length ( registrations , 1 )
self . assertEqual ( registrations [ 0 ] . user_id , None )
self . assertEqual ( str ( registrations [ 0 ] . user_uuid ) , user_uuid )
2017-11-05 10:51:25 +01:00
def test_remote_push_user_endpoints ( self ) - > None :
2016-10-27 23:55:31 +02:00
endpoints = [
2021-02-12 08:20:45 +01:00
( " /api/v1/remotes/push/register " , " register " ) ,
( " /api/v1/remotes/push/unregister " , " unregister " ) ,
2016-10-27 23:55:31 +02:00
]
for endpoint , method in endpoints :
payload = self . get_generic_payload ( method )
# Verify correct results are success
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
token_count = 1 if method == " register " else 0
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , token_count )
2016-10-27 23:55:31 +02:00
# Try adding/removing tokens that are too big...
2017-05-07 20:00:17 +02:00
broken_token = " x " * 5000 # too big
2021-02-12 08:20:45 +01:00
payload [ " token " ] = broken_token
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
2021-09-28 14:17:16 +02:00
def test_send_notification_endpoint ( self ) - > None :
hamlet = self . example_user ( " hamlet " )
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid )
token = " aaaa "
2021-11-09 01:32:19 +01:00
android_tokens = [ ]
2023-04-10 00:55:16 +02:00
uuid_android_tokens = [ ]
2021-09-28 14:17:16 +02:00
for i in [ " aa " , " bb " ] :
2021-11-09 01:32:19 +01:00
android_tokens . append (
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . GCM ,
token = hex_to_b64 ( token + i ) ,
user_id = hamlet . id ,
server = server ,
)
2021-09-28 14:17:16 +02:00
)
2023-04-10 00:55:16 +02:00
# Create a duplicate, newer uuid-based registration for the same user to verify
# the bouncer will handle that correctly, without triggering a duplicate notification,
# and will delete the old, legacy registration.
uuid_android_tokens . append (
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . GCM ,
token = hex_to_b64 ( token + i ) ,
user_uuid = str ( hamlet . uuid ) ,
server = server ,
)
)
2021-11-09 01:32:19 +01:00
apple_token = RemotePushDeviceToken . objects . create (
2021-09-28 14:17:16 +02:00
kind = RemotePushDeviceToken . APNS ,
token = hex_to_b64 ( token ) ,
user_id = hamlet . id ,
server = server ,
)
2021-11-09 01:32:19 +01:00
many_ids = " , " . join ( str ( i ) for i in range ( 1 , 250 ) )
2021-09-28 14:17:16 +02:00
payload = {
" user_id " : hamlet . id ,
2023-04-10 00:55:16 +02:00
" user_uuid " : str ( hamlet . uuid ) ,
2021-11-09 01:32:19 +01:00
" gcm_payload " : { " event " : " remove " , " zulip_message_ids " : many_ids } ,
2022-01-03 23:06:42 +01:00
" apns_payload " : {
" badge " : 0 ,
" custom " : { " zulip " : { " event " : " remove " , " zulip_message_ids " : many_ids } } ,
} ,
2021-09-28 14:17:16 +02:00
" gcm_options " : { } ,
}
2021-11-09 01:32:19 +01:00
with mock . patch (
" zilencer.views.send_android_push_notification "
) as android_push , mock . patch (
2021-09-28 14:17:16 +02:00
" zilencer.views.send_apple_push_notification "
2021-11-09 01:32:19 +01:00
) as apple_push , self . assertLogs (
" zilencer.views " , level = " INFO "
) as logger :
2021-09-28 14:17:16 +02:00
result = self . uuid_post (
self . server_uuid ,
" /api/v1/remotes/push/notify " ,
payload ,
content_type = " application/json " ,
)
2022-06-07 01:37:01 +02:00
data = self . assert_json_success ( result )
2021-09-28 14:17:16 +02:00
self . assertEqual (
{ " result " : " success " , " msg " : " " , " total_android_devices " : 2 , " total_apple_devices " : 1 } ,
data ,
)
2021-10-20 01:16:18 +02:00
self . assertEqual (
logger . output ,
[
" INFO:zilencer.views: "
2023-04-10 00:55:16 +02:00
f " Deduplicating push registrations for server id: { server . id } user id: { hamlet . id } uuid: { str ( hamlet . uuid ) } and tokens: { sorted ( [ t . token for t in android_tokens [ : ] ] ) } " ,
" INFO:zilencer.views: "
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { hamlet . id } ><uuid: { str ( hamlet . uuid ) } >: "
" 2 via FCM devices, 1 via APNs devices " ,
2021-10-20 01:16:18 +02:00
] ,
)
2022-02-23 20:25:30 +01:00
2023-04-10 00:55:16 +02:00
user_identity = UserPushIdentityCompat ( user_id = hamlet . id , user_uuid = str ( hamlet . uuid ) )
2021-11-09 01:32:19 +01:00
apple_push . assert_called_once_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2021-11-09 01:32:19 +01:00
[ apple_token ] ,
2022-01-03 23:06:42 +01:00
{
" badge " : 0 ,
" custom " : {
" zulip " : {
" event " : " remove " ,
" zulip_message_ids " : " , " . join ( str ( i ) for i in range ( 50 , 250 ) ) ,
}
} ,
} ,
2021-11-09 01:32:19 +01:00
remote = server ,
)
android_push . assert_called_once_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2023-04-10 00:55:16 +02:00
list ( reversed ( uuid_android_tokens ) ) ,
2021-11-09 01:32:19 +01:00
{ " event " : " remove " , " zulip_message_ids " : " , " . join ( str ( i ) for i in range ( 50 , 250 ) ) } ,
{ } ,
remote = server ,
)
2021-09-28 14:17:16 +02:00
2019-11-19 03:12:54 +01:00
def test_remote_push_unregister_all ( self ) - > None :
2021-02-12 08:20:45 +01:00
payload = self . get_generic_payload ( " register " )
2019-11-19 03:12:54 +01:00
# Verify correct results are success
2021-02-12 08:20:45 +01:00
result = self . uuid_post ( self . server_uuid , " /api/v1/remotes/push/register " , payload )
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , 1 )
2021-02-12 08:19:30 +01:00
result = self . uuid_post (
2021-02-12 08:20:45 +01:00
self . server_uuid , " /api/v1/remotes/push/unregister/all " , dict ( user_id = 10 )
2021-02-12 08:19:30 +01:00
)
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
remote_tokens = RemotePushDeviceToken . objects . filter ( token = payload [ " token " ] )
2021-05-17 05:41:32 +02:00
self . assert_length ( remote_tokens , 0 )
2019-11-19 03:12:54 +01:00
2017-11-05 10:51:25 +01:00
def test_invalid_apns_token ( self ) - > None :
2017-07-07 18:18:37 +02:00
endpoints = [
2021-02-12 08:20:45 +01:00
( " /api/v1/remotes/push/register " , " apple-token " ) ,
2017-07-07 18:18:37 +02:00
]
for endpoint , method in endpoints :
payload = {
2021-02-12 08:20:45 +01:00
" user_id " : 10 ,
" token " : " xyz uses non-hex characters " ,
" token_kind " : PushDeviceToken . APNS ,
2017-07-07 18:18:37 +02:00
}
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post ( self . server_uuid , endpoint , payload )
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Invalid APNS token " )
2017-07-07 18:18:37 +02:00
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_push_bouncer_api ( self ) - > None :
2016-10-27 23:55:31 +02:00
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-10-27 23:55:31 +02:00
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid )
endpoints = [
2021-02-12 08:20:45 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenaz " , RemotePushDeviceToken . APNS ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token " , RemotePushDeviceToken . GCM ) ,
2016-10-27 23:55:31 +02:00
]
# Test error handling
2019-12-03 20:19:38 +01:00
for endpoint , token , kind in endpoints :
2016-10-27 23:55:31 +02:00
# Try adding/removing tokens that are too big...
2017-07-07 18:18:37 +02:00
broken_token = " a " * 5000 # too big
2021-02-12 08:19:30 +01:00
result = self . client_post (
2021-02-12 08:20:45 +01:00
endpoint , { " token " : broken_token , " token_kind " : kind } , subdomain = " zulip "
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
2021-02-12 08:19:30 +01:00
result = self . client_delete (
2021-02-12 08:20:45 +01:00
endpoint , { " token " : broken_token , " token_kind " : kind } , subdomain = " zulip "
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Empty or invalid length token " )
2016-10-27 23:55:31 +02:00
# Try to remove a non-existent token...
2021-02-12 08:19:30 +01:00
result = self . client_delete (
2021-02-12 08:20:45 +01:00
endpoint , { " token " : " abcd1234 " , " token_kind " : kind } , subdomain = " zulip "
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
self . assert_json_error ( result , " Token does not exist " )
2016-10-27 23:55:31 +02:00
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/push/register "
with responses . RequestsMock ( ) as resp , self . assertLogs ( level = " ERROR " ) as error_log :
resp . add ( responses . POST , URL , body = ConnectionError ( ) , status = 502 )
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
2019-12-03 20:19:38 +01:00
self . assert_json_error (
2021-02-12 08:19:30 +01:00
result ,
" ConnectionError while trying to connect to push notification bouncer " ,
502 ,
)
2022-08-31 19:45:49 +02:00
self . assertIn (
f " ERROR:django.request:Bad Gateway: { endpoint } \n Traceback " ,
error_log . output [ 0 ] ,
2021-02-12 08:19:30 +01:00
)
2021-06-11 22:51:27 +02:00
with responses . RequestsMock ( ) as resp , self . assertLogs ( level = " WARNING " ) as warn_log :
resp . add ( responses . POST , URL , body = orjson . dumps ( { " msg " : " error " } ) , status = 500 )
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
2019-12-03 20:19:38 +01:00
self . assert_json_error ( result , " Received 500 from push notification bouncer " , 502 )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2022-08-31 19:45:49 +02:00
warn_log . output [ 0 ] ,
" WARNING:root:Received 500 from push notification bouncer " ,
)
self . assertIn (
f " ERROR:django.request:Bad Gateway: { endpoint } \n Traceback " , warn_log . output [ 1 ]
2021-02-12 08:19:30 +01:00
)
2019-12-03 20:19:38 +01:00
2016-10-27 23:55:31 +02:00
# Add tokens
2017-07-07 18:29:45 +02:00
for endpoint , token , kind in endpoints :
2016-10-27 23:55:31 +02:00
# Test that we can push twice
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
)
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2016-10-27 23:55:31 +02:00
self . assertEqual ( tokens [ 0 ] . token , token )
# User should have tokens for both devices now.
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2016-10-27 23:55:31 +02:00
# Remove tokens
2017-07-07 18:29:45 +02:00
for endpoint , token , kind in endpoints :
2022-04-07 17:53:37 +02:00
result = self . client_delete ( endpoint , { " token " : token } , subdomain = " zulip " )
2016-10-27 23:55:31 +02:00
self . assert_json_success ( result )
2021-02-12 08:19:30 +01:00
tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter (
user_uuid = user . uuid , token = token , server = server
)
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2016-10-27 23:55:31 +02:00
2019-11-19 03:12:54 +01:00
# Re-add copies of those tokens
for endpoint , token , kind in endpoints :
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } , subdomain = " zulip " )
2019-11-19 03:12:54 +01:00
self . assert_json_success ( result )
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2019-11-19 03:12:54 +01:00
2019-12-03 20:19:38 +01:00
# Now we want to remove them using the bouncer after an API key change.
# First we test error handling in case of issues with the bouncer:
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.worker.queue_processors.clear_push_device_tokens " ,
2021-02-12 08:19:30 +01:00
side_effect = PushNotificationBouncerRetryLaterError ( " test " ) ,
2021-02-12 08:20:45 +01:00
) , mock . patch ( " zerver.worker.queue_processors.retry_event " ) as mock_retry :
2019-12-03 20:19:38 +01:00
do_regenerate_api_key ( user , user )
mock_retry . assert_called ( )
# We didn't manage to communicate with the bouncer, to the tokens are still there:
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 2 )
2019-12-03 20:19:38 +01:00
2020-03-28 01:25:56 +01:00
# Now we successfully remove them:
2019-11-19 03:12:54 +01:00
do_regenerate_api_key ( user , user )
2022-02-23 20:27:39 +01:00
tokens = list ( RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , server = server ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2019-11-19 03:12:54 +01:00
2021-02-12 08:19:30 +01:00
2019-01-31 00:39:02 +01:00
class AnalyticsBouncerTest ( BouncerTestCase ) :
2020-06-05 06:55:20 +02:00
TIME_ZERO = datetime . datetime ( 1988 , 3 , 14 , tzinfo = datetime . timezone . utc )
2019-01-31 00:39:02 +01:00
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_analytics_api ( self ) - > None :
2019-01-31 00:39:02 +01:00
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
ANALYTICS_URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/server/analytics "
ANALYTICS_STATUS_URL = ANALYTICS_URL + " /status "
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-01-31 00:39:02 +01:00
end_time = self . TIME_ZERO
2022-08-31 19:30:31 +02:00
with responses . RequestsMock ( ) as resp , self . assertLogs ( level = " WARNING " ) as mock_warning :
2021-06-11 22:51:27 +02:00
resp . add ( responses . GET , ANALYTICS_STATUS_URL , body = ConnectionError ( ) )
2019-12-03 20:19:38 +01:00
send_analytics_to_remote_server ( )
2022-08-31 19:30:31 +02:00
self . assertIn (
" WARNING:root:ConnectionError while trying to connect to push notification bouncer \n Traceback " ,
mock_warning . output [ 0 ] ,
2021-02-12 08:19:30 +01:00
)
2021-06-11 22:51:27 +02:00
self . assertTrue ( resp . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
2019-12-03 20:19:38 +01:00
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2019-10-03 02:01:36 +02:00
# Send any existing data over, so that we can start the test with a "clean" slate
2021-07-24 16:56:39 +02:00
audit_log = RealmAuditLog . objects . all ( ) . order_by ( " id " ) . last ( )
assert audit_log is not None
audit_log_max_id = audit_log . id
2019-10-03 02:01:36 +02:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
self . assertTrue ( responses . assert_call_count ( ANALYTICS_STATUS_URL , 1 ) )
2019-10-03 02:01:36 +02:00
remote_audit_log_count = RemoteRealmAuditLog . objects . count ( )
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , 0 )
2021-02-12 08:19:30 +01:00
def check_counts (
2021-06-11 22:51:27 +02:00
analytics_status_mock_request_call_count : int ,
analytics_mock_request_call_count : int ,
2021-02-12 08:19:30 +01:00
remote_realm_count : int ,
remote_installation_count : int ,
remote_realm_audit_log : int ,
) - > None :
2021-06-11 22:51:27 +02:00
self . assertTrue (
responses . assert_call_count (
ANALYTICS_STATUS_URL , analytics_status_mock_request_call_count
)
)
self . assertTrue (
responses . assert_call_count ( ANALYTICS_URL , analytics_mock_request_call_count )
)
2019-10-03 02:01:36 +02:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , remote_realm_count )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , remote_installation_count )
2021-02-12 08:19:30 +01:00
self . assertEqual (
RemoteRealmAuditLog . objects . count ( ) , remote_audit_log_count + remote_realm_audit_log
)
2019-10-03 02:01:36 +02:00
# Create some rows we'll send to remote server
2021-02-12 08:20:45 +01:00
realm_stat = LoggingCountStat ( " invites_sent::day " , RealmCount , CountStat . DAY )
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm , property = realm_stat . property , end_time = end_time , value = 5
)
2019-01-31 00:39:02 +01:00
InstallationCount . objects . create (
2021-02-12 08:19:30 +01:00
property = realm_stat . property ,
end_time = end_time ,
value = 5 ,
2019-04-23 22:32:12 +02:00
# We set a subgroup here to work around:
# https://github.com/zulip/zulip/issues/12362
2021-02-12 08:19:30 +01:00
subgroup = " test_subgroup " ,
)
2019-10-03 02:01:36 +02:00
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . USER_CREATED ,
event_time = end_time ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . REALM_LOGO_CHANGED ,
event_time = end_time ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-01-31 00:39:02 +01:00
self . assertEqual ( RealmCount . objects . count ( ) , 1 )
self . assertEqual ( InstallationCount . objects . count ( ) , 1 )
2019-10-03 02:01:36 +02:00
self . assertEqual ( RealmAuditLog . objects . filter ( id__gt = audit_log_max_id ) . count ( ) , 2 )
2019-01-31 00:39:02 +01:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 2 , 2 , 1 , 1 , 1 )
2019-10-03 02:01:36 +02:00
# Test having no new rows
2019-01-31 00:39:02 +01:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 3 , 2 , 1 , 1 , 1 )
2019-01-31 00:39:02 +01:00
2019-10-03 02:01:36 +02:00
# Test only having new RealmCount rows
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
property = realm_stat . property ,
end_time = end_time + datetime . timedelta ( days = 1 ) ,
value = 6 ,
)
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
property = realm_stat . property ,
end_time = end_time + datetime . timedelta ( days = 2 ) ,
value = 9 ,
)
2019-01-31 00:39:02 +01:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 4 , 3 , 3 , 1 , 1 )
2019-01-31 00:39:02 +01:00
2019-10-03 02:01:36 +02:00
# Test only having new InstallationCount rows
2019-01-31 00:39:02 +01:00
InstallationCount . objects . create (
2021-02-12 08:19:30 +01:00
property = realm_stat . property , end_time = end_time + datetime . timedelta ( days = 1 ) , value = 6
)
2019-01-31 00:39:02 +01:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 5 , 4 , 3 , 2 , 1 )
2019-10-03 02:01:36 +02:00
# Test only having new RealmAuditLog rows
# Non-synced event
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . REALM_LOGO_CHANGED ,
event_time = end_time ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 6 , 4 , 3 , 2 , 1 )
2019-10-03 02:01:36 +02:00
# Synced event
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . USER_REACTIVATED ,
event_time = end_time ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
send_analytics_to_remote_server ( )
2021-06-11 22:51:27 +02:00
check_counts ( 7 , 5 , 3 , 2 , 2 )
2019-01-31 00:39:02 +01:00
2021-02-12 08:19:30 +01:00
( realm_count_data , installation_count_data , realmauditlog_data ) = build_analytics_data (
RealmCount . objects . all ( ) , InstallationCount . objects . all ( ) , RealmAuditLog . objects . all ( )
)
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
self . server_uuid ,
2021-02-12 08:20:45 +01:00
" /api/v1/remotes/server/analytics " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" realm_counts " : orjson . dumps ( realm_count_data ) . decode ( ) ,
" installation_counts " : orjson . dumps ( installation_count_data ) . decode ( ) ,
" realmauditlog_rows " : orjson . dumps ( realmauditlog_data ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
} ,
subdomain = " " ,
)
2019-01-31 00:39:02 +01:00
self . assert_json_error ( result , " Data is out of order. " )
2021-02-12 08:19:30 +01:00
with mock . patch ( " zilencer.views.validate_incoming_table_data " ) , self . assertLogs (
2021-02-12 08:20:45 +01:00
level = " WARNING "
2021-02-12 08:19:30 +01:00
) as warn_log :
2019-04-23 22:32:12 +02:00
# We need to wrap a transaction here to avoid the
# IntegrityError that will be thrown in here from breaking
# the unittest transaction.
with transaction . atomic ( ) :
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
result = self . uuid_post (
2019-04-23 22:32:12 +02:00
self . server_uuid ,
2021-02-12 08:20:45 +01:00
" /api/v1/remotes/server/analytics " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" realm_counts " : orjson . dumps ( realm_count_data ) . decode ( ) ,
" installation_counts " : orjson . dumps ( installation_count_data ) . decode ( ) ,
" realmauditlog_rows " : orjson . dumps ( realmauditlog_data ) . decode ( ) ,
2021-02-12 08:19:30 +01:00
} ,
subdomain = " " ,
)
2019-04-23 22:32:12 +02:00
self . assert_json_error ( result , " Invalid data. " )
2021-02-12 08:19:30 +01:00
self . assertEqual (
warn_log . output ,
[
2021-12-22 14:37:12 +01:00
" WARNING:root:Invalid data saving zilencer_remoteinstallationcount for server demo.example.com/6cde5f7a-1f7e-4978-9716-49f69ebfc9fe "
2021-02-12 08:19:30 +01:00
] ,
)
2019-04-23 22:32:12 +02:00
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_analytics_api_invalid ( self ) - > None :
2019-01-31 00:39:02 +01:00
""" This is a variant of the below test_push_api, but using the full
push notification bouncer flow
"""
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-01-31 00:39:02 +01:00
end_time = self . TIME_ZERO
2021-02-12 08:20:45 +01:00
realm_stat = LoggingCountStat ( " invalid count stat " , RealmCount , CountStat . DAY )
2019-01-31 00:39:02 +01:00
RealmCount . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm , property = realm_stat . property , end_time = end_time , value = 5
)
2019-01-31 00:39:02 +01:00
self . assertEqual ( RealmCount . objects . count ( ) , 1 )
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
2020-07-28 15:51:27 +02:00
with self . assertLogs ( level = " WARNING " ) as m :
2019-01-31 00:39:02 +01:00
send_analytics_to_remote_server ( )
2020-07-28 15:51:27 +02:00
self . assertEqual ( m . output , [ " WARNING:root:Invalid property invalid count stat " ] )
2019-01-31 00:39:02 +01:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 0 )
2019-10-03 02:01:36 +02:00
# Servers on Zulip 2.0.6 and earlier only send realm_counts and installation_counts data,
# and don't send realmauditlog_rows. Make sure that continues to work.
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_old_two_table_format ( self ) - > None :
self . add_mock_response ( )
2019-10-03 02:01:36 +02:00
# Send fixture generated with Zulip 2.0 code
2021-02-12 08:19:30 +01:00
send_to_push_bouncer (
2021-02-12 08:20:45 +01:00
" POST " ,
" server/analytics " ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" realm_counts " : ' [ { " id " :1, " property " : " invites_sent::day " , " subgroup " :null, " end_time " :574300800.0, " value " :5, " realm " :2}] ' ,
" installation_counts " : " [] " ,
" version " : ' " 2.0.6+git " ' ,
2021-02-12 08:19:30 +01:00
} ,
)
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
ANALYTICS_URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/server/analytics "
self . assertTrue ( responses . assert_call_count ( ANALYTICS_URL , 1 ) )
2019-10-03 02:01:36 +02:00
self . assertEqual ( RemoteRealmCount . objects . count ( ) , 1 )
self . assertEqual ( RemoteInstallationCount . objects . count ( ) , 0 )
self . assertEqual ( RemoteRealmAuditLog . objects . count ( ) , 0 )
# Make sure we aren't sending data we don't mean to, even if we don't store it.
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_only_sending_intended_realmauditlog_data ( self ) - > None :
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-10-03 02:01:36 +02:00
# Event type in SYNCED_BILLING_EVENTS -- should be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . USER_REACTIVATED ,
event_time = self . TIME_ZERO ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
event_type = RealmAuditLog . REALM_LOGO_CHANGED ,
event_time = self . TIME_ZERO ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
2020-06-23 07:00:30 +02:00
# send_analytics_to_remote_server calls send_to_push_bouncer twice.
# We need to distinguish the first and second calls.
first_call = True
2019-10-03 02:01:36 +02:00
def check_for_unwanted_data ( * args : Any ) - > Any :
2020-06-23 07:00:30 +02:00
nonlocal first_call
if first_call :
first_call = False
2019-10-03 02:01:36 +02:00
else :
# Test that we're respecting SYNCED_BILLING_EVENTS
2020-06-09 00:25:09 +02:00
self . assertIn ( f ' " event_type " : { RealmAuditLog . USER_REACTIVATED } ' , str ( args ) )
self . assertNotIn ( f ' " event_type " : { RealmAuditLog . REALM_LOGO_CHANGED } ' , str ( args ) )
2019-10-03 02:01:36 +02:00
# Test that we're respecting REALMAUDITLOG_PUSHED_FIELDS
2021-02-12 08:20:45 +01:00
self . assertIn ( " backfilled " , str ( args ) )
self . assertNotIn ( " modified_user " , str ( args ) )
2019-10-03 02:01:36 +02:00
return send_to_push_bouncer ( * args )
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.remote_server.send_to_push_bouncer " , side_effect = check_for_unwanted_data
2021-02-12 08:19:30 +01:00
) :
2019-10-03 02:01:36 +02:00
send_analytics_to_remote_server ( )
2021-02-12 08:20:45 +01:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2021-06-11 22:51:27 +02:00
@responses.activate
def test_realmauditlog_data_mapping ( self ) - > None :
self . add_mock_response ( )
2021-02-12 08:20:45 +01:00
user = self . example_user ( " hamlet " )
2019-10-03 02:01:36 +02:00
log_entry = RealmAuditLog . objects . create (
2021-02-12 08:19:30 +01:00
realm = user . realm ,
modified_user = user ,
backfilled = True ,
event_type = RealmAuditLog . USER_REACTIVATED ,
event_time = self . TIME_ZERO ,
2021-02-12 08:20:45 +01:00
extra_data = " data " ,
2021-02-12 08:19:30 +01:00
)
2019-10-03 02:01:36 +02:00
send_analytics_to_remote_server ( )
2021-02-12 08:20:45 +01:00
remote_log_entry = RemoteRealmAuditLog . objects . order_by ( " id " ) . last ( )
2021-07-24 16:56:39 +02:00
assert remote_log_entry is not None
2021-12-22 14:37:12 +01:00
self . assertEqual ( str ( remote_log_entry . server . uuid ) , self . server_uuid )
2019-10-03 02:01:36 +02:00
self . assertEqual ( remote_log_entry . remote_id , log_entry . id )
self . assertEqual ( remote_log_entry . event_time , self . TIME_ZERO )
self . assertEqual ( remote_log_entry . backfilled , True )
2021-02-12 08:20:45 +01:00
self . assertEqual ( remote_log_entry . extra_data , " data " )
2019-10-03 02:01:36 +02:00
self . assertEqual ( remote_log_entry . event_type , RealmAuditLog . USER_REACTIVATED )
2021-02-12 08:19:30 +01:00
2017-05-11 10:55:05 +02:00
class PushNotificationTest ( BouncerTestCase ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2021-02-12 08:20:45 +01:00
self . user_profile = self . example_user ( " hamlet " )
self . sending_client = get_client ( " test " )
self . sender = self . example_user ( " hamlet " )
2022-09-27 20:33:33 +02:00
self . personal_recipient_user = self . example_user ( " othello " )
2017-05-11 09:12:21 +02:00
2022-09-27 21:42:31 +02:00
def get_message ( self , type : int , type_id : int , realm_id : int ) - > Message :
2017-05-11 10:15:00 +02:00
recipient , _ = Recipient . objects . get_or_create (
type_id = type_id ,
2017-05-11 09:12:21 +02:00
type = type ,
)
2018-11-10 16:48:13 +01:00
message = Message (
2017-05-11 09:12:21 +02:00
sender = self . sender ,
recipient = recipient ,
2022-09-27 21:42:31 +02:00
realm_id = realm_id ,
2021-02-12 08:20:45 +01:00
content = " This is test content " ,
rendered_content = " This is test content " ,
2019-08-28 02:43:19 +02:00
date_sent = now ( ) ,
2017-05-11 09:12:21 +02:00
sending_client = self . sending_client ,
)
2021-05-10 07:02:14 +02:00
message . set_topic_name ( " Test topic " )
2018-11-10 16:48:13 +01:00
message . save ( )
return message
2017-05-11 09:12:21 +02:00
2018-02-09 23:19:00 +01:00
@contextmanager
2021-09-02 20:45:33 +02:00
def mock_apns ( self ) - > Iterator [ APNsContext ] :
apns_context = APNsContext ( apns = mock . Mock ( ) , loop = asyncio . new_event_loop ( ) )
2022-01-14 04:47:42 +01:00
try :
with mock . patch ( " zerver.lib.push_notifications.get_apns_context " ) as mock_get :
mock_get . return_value = apns_context
yield apns_context
finally :
apns_context . loop . close ( )
2018-02-09 23:19:00 +01:00
2019-02-08 23:42:24 +01:00
def setup_apns_tokens ( self ) - > None :
2021-02-12 08:20:45 +01:00
self . tokens = [ " aaaa " , " bbbb " ]
2019-02-08 23:42:24 +01:00
for token in self . tokens :
PushDeviceToken . objects . create (
kind = PushDeviceToken . APNS ,
token = hex_to_b64 ( token ) ,
user = self . user_profile ,
2021-02-12 08:19:30 +01:00
ios_app_id = settings . ZULIP_IOS_APP_ID ,
)
2019-02-08 23:42:24 +01:00
2022-02-23 20:27:39 +01:00
self . remote_tokens = [ ( " cccc " , " ffff " ) ]
for id_token , uuid_token in self . remote_tokens :
# We want to set up both types of RemotePushDeviceToken here:
# the legacy one with user_id and the new with user_uuid.
# This allows tests to work with either, without needing to
# do their own setup.
2019-02-08 23:42:24 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
2022-02-23 20:27:39 +01:00
token = hex_to_b64 ( id_token ) ,
2019-02-08 23:42:24 +01:00
user_id = self . user_profile . id ,
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid ) ,
)
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . APNS ,
token = hex_to_b64 ( uuid_token ) ,
user_uuid = self . user_profile . uuid ,
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid ) ,
)
2019-02-08 23:42:24 +01:00
def setup_gcm_tokens ( self ) - > None :
2021-02-12 08:20:45 +01:00
self . gcm_tokens = [ " 1111 " , " 2222 " ]
2019-02-08 23:42:24 +01:00
for token in self . gcm_tokens :
PushDeviceToken . objects . create (
kind = PushDeviceToken . GCM ,
token = hex_to_b64 ( token ) ,
user = self . user_profile ,
2021-02-12 08:19:30 +01:00
ios_app_id = None ,
)
2019-02-08 23:42:24 +01:00
2022-02-23 20:27:39 +01:00
self . remote_gcm_tokens = [ ( " dddd " , " eeee " ) ]
for id_token , uuid_token in self . remote_gcm_tokens :
2019-02-08 23:42:24 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . GCM ,
2022-02-23 20:27:39 +01:00
token = hex_to_b64 ( id_token ) ,
2019-02-08 23:42:24 +01:00
user_id = self . user_profile . id ,
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid ) ,
)
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . create (
kind = RemotePushDeviceToken . GCM ,
token = hex_to_b64 ( uuid_token ) ,
user_uuid = self . user_profile . uuid ,
server = RemoteZulipServer . objects . get ( uuid = self . server_uuid ) ,
)
2019-02-08 23:42:24 +01:00
2021-02-12 08:19:30 +01:00
2017-05-11 10:55:05 +02:00
class HandlePushNotificationTest ( PushNotificationTest ) :
2017-08-29 06:28:30 +02:00
DEFAULT_SUBDOMAIN = " "
2021-08-15 18:35:37 +02:00
def request_callback ( self , request : PreparedRequest ) - > Tuple [ int , ResponseHeaders , bytes ] :
2021-06-11 22:51:27 +02:00
assert request . url is not None # allow mypy to infer url is present.
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
local_url = request . url . replace ( settings . PUSH_NOTIFICATION_BOUNCER_URL , " " )
2021-12-17 08:14:22 +01:00
assert isinstance ( request . body , bytes )
2021-06-11 22:51:27 +02:00
result = self . uuid_post (
self . server_uuid , local_url , request . body , content_type = " application/json "
)
return ( result . status_code , result . headers , result . content )
2017-05-11 10:55:05 +02:00
2021-06-11 22:51:27 +02:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
@responses.activate
2017-11-05 10:51:25 +01:00
def test_end_to_end ( self ) - > None :
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2017-05-17 07:16:20 +02:00
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-05-11 10:55:05 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2017-09-10 00:47:36 +02:00
}
2021-06-11 22:51:27 +02:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.gcm_client "
2021-09-02 20:45:33 +02:00
) as mock_gcm , self . mock_apns ( ) as apns_context , self . assertLogs (
2021-07-25 14:26:24 +02:00
" zerver.lib.push_notifications " , level = " INFO "
2021-10-20 01:16:18 +02:00
) as pn_logger , self . assertLogs (
" zilencer.views " , level = " INFO "
) as views_logger :
2017-05-17 07:16:20 +02:00
apns_devices = [
2019-02-08 23:09:20 +01:00
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
2017-05-11 10:55:05 +02:00
]
2017-05-17 07:16:20 +02:00
gcm_devices = [
2019-02-08 23:09:20 +01:00
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . GCM )
2017-05-17 07:16:20 +02:00
]
2022-02-23 20:25:30 +01:00
mock_gcm . json_request . return_value = {
" success " : { device [ 2 ] : message . id for device in gcm_devices }
}
2022-06-24 10:51:10 +02:00
apns_context . apns . send_notification = mock . AsyncMock ( )
apns_context . apns . send_notification . return_value . is_successful = True
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2021-10-20 01:16:18 +02:00
self . assertEqual (
views_logger . output ,
[
" INFO:zilencer.views: "
2022-03-10 13:31:16 +01:00
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { str ( self . user_profile . id ) } ><uuid: { str ( self . user_profile . uuid ) } >: "
2021-10-20 01:16:18 +02:00
f " { len ( gcm_devices ) } via FCM devices, { len ( apns_devices ) } via APNs devices "
] ,
)
2017-08-19 01:38:11 +02:00
for _ , _ , token in apns_devices :
2021-07-25 14:26:24 +02:00
self . assertIn (
" INFO:zerver.lib.push_notifications: "
2022-03-10 13:31:16 +01:00
f " APNs: Success sending for user <id: { str ( self . user_profile . id ) } ><uuid: { str ( self . user_profile . uuid ) } > to device { token } " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2021-02-12 08:19:30 +01:00
)
2017-05-17 07:16:20 +02:00
for _ , _ , token in gcm_devices :
2021-07-25 14:26:24 +02:00
self . assertIn (
2023-01-03 01:51:16 +01:00
f " INFO:zerver.lib.push_notifications:GCM: Sent { token } as { message . id } " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2020-05-02 08:44:14 +02:00
)
2017-05-11 10:55:05 +02:00
2021-06-11 22:51:27 +02:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
@responses.activate
2020-07-14 08:12:46 +02:00
def test_unregistered_client ( self ) - > None :
2021-06-11 22:51:27 +02:00
self . add_mock_response ( )
2020-07-14 08:12:46 +02:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2020-07-14 08:12:46 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
message = message ,
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2020-07-14 08:12:46 +02:00
}
2021-06-11 22:51:27 +02:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.gcm_client "
2021-09-02 20:45:33 +02:00
) as mock_gcm , self . mock_apns ( ) as apns_context , self . assertLogs (
2021-07-25 14:26:24 +02:00
" zerver.lib.push_notifications " , level = " INFO "
2021-10-20 01:16:18 +02:00
) as pn_logger , self . assertLogs (
" zilencer.views " , level = " INFO "
) as views_logger :
2020-07-14 08:12:46 +02:00
apns_devices = [
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS )
2020-07-14 08:12:46 +02:00
]
gcm_devices = [
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . GCM )
2020-07-14 08:12:46 +02:00
]
2021-02-12 08:20:45 +01:00
mock_gcm . json_request . return_value = { " success " : { gcm_devices [ 0 ] [ 2 ] : message . id } }
2022-06-24 10:51:10 +02:00
apns_context . apns . send_notification = mock . AsyncMock ( )
apns_context . apns . send_notification . return_value . is_successful = False
apns_context . apns . send_notification . return_value . description = " Unregistered "
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2021-10-20 01:16:18 +02:00
self . assertEqual (
views_logger . output ,
[
" INFO:zilencer.views: "
2022-03-10 13:31:16 +01:00
f " Sending mobile push notifications for remote user 6cde5f7a-1f7e-4978-9716-49f69ebfc9fe:<id: { str ( self . user_profile . id ) } ><uuid: { str ( self . user_profile . uuid ) } >: "
2021-10-20 01:16:18 +02:00
f " { len ( gcm_devices ) } via FCM devices, { len ( apns_devices ) } via APNs devices "
] ,
)
2018-05-21 20:20:23 +02:00
for _ , _ , token in apns_devices :
2021-07-25 14:26:24 +02:00
self . assertIn (
" INFO:zerver.lib.push_notifications: "
f " APNs: Removing invalid/expired token { token } (Unregistered) " ,
2021-10-20 01:16:18 +02:00
pn_logger . output ,
2020-05-02 08:44:14 +02:00
)
2021-02-12 08:19:30 +01:00
self . assertEqual (
RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . APNS ) . count ( ) , 0
)
2018-05-21 20:20:23 +02:00
2021-06-11 22:51:27 +02:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
@responses.activate
2019-12-02 19:46:11 +01:00
def test_connection_error ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2017-08-18 09:04:52 +02:00
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-08-18 09:04:52 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-08-18 09:04:52 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" user_profile_id " : self . user_profile . id ,
" message_id " : message . id ,
" trigger " : " private_message " ,
2017-09-10 00:47:36 +02:00
}
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/push/notify "
responses . add ( responses . POST , URL , body = ConnectionError ( ) )
with mock . patch ( " zerver.lib.push_notifications.gcm_client " ) as mock_gcm :
2017-08-18 09:04:52 +02:00
gcm_devices = [
2019-02-08 23:09:20 +01:00
( b64_to_hex ( device . token ) , device . ios_app_id , device . token )
2021-02-12 08:19:30 +01:00
for device in RemotePushDeviceToken . objects . filter ( kind = PushDeviceToken . GCM )
2017-08-18 09:04:52 +02:00
]
2021-02-12 08:20:45 +01:00
mock_gcm . json_request . return_value = { " success " : { gcm_devices [ 0 ] [ 2 ] : message . id } }
2019-12-02 19:46:11 +01:00
with self . assertRaises ( PushNotificationBouncerRetryLaterError ) :
handle_push_notification ( self . user_profile . id , missed_message )
2017-08-18 09:04:52 +02:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_enabled " , return_value = True )
2018-12-11 07:05:40 +01:00
def test_read_message ( self , mock_push_notifications : mock . MagicMock ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2021-06-10 14:38:54 +02:00
usermessage = UserMessage . objects . create (
2017-05-11 10:55:05 +02:00
user_profile = user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2017-09-10 00:47:36 +02:00
}
2021-06-10 14:38:54 +02:00
# If the message is unread, we should send push notifications.
with mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification "
) as mock_send_apple , mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android :
handle_push_notification ( user_profile . id , missed_message )
mock_send_apple . assert_called_once ( )
mock_send_android . assert_called_once ( )
# If the message has been read, don't send push notifications.
usermessage . flags . read = True
usermessage . save ( )
with mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification "
) as mock_send_apple , mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android :
handle_push_notification ( user_profile . id , missed_message )
mock_send_apple . assert_not_called ( )
mock_send_android . assert_not_called ( )
2017-05-11 10:55:05 +02:00
2018-12-05 19:36:58 +01:00
def test_deleted_message ( self ) - > None :
2022-02-08 00:13:33 +01:00
""" Simulates the race where message is deleted before handling push notifications """
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-12-05 19:36:58 +01:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . read ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2018-12-05 19:36:58 +01:00
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2018-12-05 19:36:58 +01:00
}
# Now, delete the message the normal way
2019-11-12 21:20:31 +01:00
do_delete_messages ( user_profile . realm , [ message ] )
2018-12-05 19:36:58 +01:00
2020-10-29 20:21:18 +01:00
# This mock.patch() should be assertNoLogs once that feature
# is added to Python.
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.uses_notification_bouncer "
) as mock_check , mock . patch ( " logging.error " ) as mock_logging_error , mock . patch (
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
2021-02-12 08:19:30 +01:00
) as mock_push_notifications :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2018-12-05 19:44:25 +01:00
# Check we didn't proceed through and didn't log anything.
2018-12-05 19:36:58 +01:00
mock_check . assert_not_called ( )
2018-12-05 19:44:25 +01:00
mock_logging_error . assert_not_called ( )
2018-12-05 19:36:58 +01:00
def test_missing_message ( self ) - > None :
""" Simulates the race where message is missing when handling push notifications """
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-12-05 19:36:58 +01:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . read ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2018-12-05 19:36:58 +01:00
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2018-12-05 19:36:58 +01:00
}
# Now delete the message forcefully, so it just doesn't exist.
message . delete ( )
# This should log an error
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.uses_notification_bouncer "
2021-02-12 08:19:30 +01:00
) as mock_check , self . assertLogs ( level = " INFO " ) as mock_logging_info , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
2021-02-12 08:19:30 +01:00
) as mock_push_notifications :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2018-12-05 19:36:58 +01:00
# Check we didn't proceed through.
mock_check . assert_not_called ( )
2021-02-12 08:19:30 +01:00
self . assertEqual (
mock_logging_info . output ,
[
f " INFO:root:Unexpected message access failure handling push notifications: { user_profile . id } { missed_message [ ' message_id ' ] } "
] ,
)
2018-12-05 19:36:58 +01:00
2017-11-05 10:51:25 +01:00
def test_send_notifications_to_bouncer ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-05-11 10:55:05 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2017-09-10 00:47:36 +02:00
}
2021-02-12 08:19:30 +01:00
with self . settings ( PUSH_NOTIFICATION_BOUNCER_URL = True ) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_apns " , return_value = { " apns " : True }
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-09-28 14:17:16 +02:00
" zerver.lib.push_notifications.send_notifications_to_bouncer " , return_value = ( 3 , 5 )
) as mock_send , self . assertLogs (
" zerver.lib.push_notifications " , level = " INFO "
) as mock_logging_info :
2019-02-08 23:09:20 +01:00
handle_push_notification ( user_profile . id , missed_message )
2021-02-12 08:19:30 +01:00
mock_send . assert_called_with (
user_profile . id ,
2021-02-12 08:20:45 +01:00
{ " apns " : True } ,
{ " gcm " : True } ,
2021-02-12 08:19:30 +01:00
{ } ,
)
2021-09-28 14:17:16 +02:00
self . assertEqual (
mock_logging_info . output ,
[
f " INFO:zerver.lib.push_notifications:Sending push notifications to mobile clients for user { user_profile . id } " ,
f " INFO:zerver.lib.push_notifications:Sent mobile push notifications for user { user_profile . id } through bouncer: 3 via FCM devices, 5 via APNs devices " ,
] ,
)
2017-05-11 10:55:05 +02:00
2017-11-05 10:51:25 +01:00
def test_non_bouncer_push ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2017-05-11 10:55:05 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
message = message ,
2017-05-11 10:55:05 +02:00
)
android_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . GCM )
)
2017-05-11 10:55:05 +02:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2017-05-11 10:55:05 +02:00
2017-09-10 00:47:36 +02:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2017-09-10 00:47:36 +02:00
}
2021-02-12 08:19:30 +01:00
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_apns " , return_value = { " apns " : True }
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_apple_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_apple , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_android_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_android , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
2021-02-12 08:19:30 +01:00
) as mock_push_notifications :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2022-02-23 20:25:30 +01:00
mock_send_apple . assert_called_with ( user_identity , apple_devices , { " apns " : True } )
mock_send_android . assert_called_with ( user_identity , android_devices , { " gcm " : True } , { } )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-05-11 10:55:05 +02:00
2018-07-28 14:31:45 +02:00
def test_send_remove_notifications_to_bouncer ( self ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-07-28 14:31:45 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
2019-02-14 01:23:55 +01:00
message = message ,
flags = UserMessage . flags . active_mobile_push_notification ,
2018-07-28 14:31:45 +02:00
)
2021-02-12 08:19:30 +01:00
with self . settings ( PUSH_NOTIFICATION_BOUNCER_URL = True ) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_notifications_to_bouncer "
2021-02-12 08:19:30 +01:00
) as mock_send :
2019-02-14 01:08:51 +01:00
handle_remove_push_notification ( user_profile . id , [ message . id ] )
2020-06-02 18:09:26 +02:00
mock_send . assert_called_with (
2019-02-14 01:08:51 +01:00
user_profile . id ,
2020-06-02 18:09:26 +02:00
{
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" custom " : {
" zulip " : {
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : " http://zulip.testserver " ,
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
2020-06-02 18:09:26 +02:00
} ,
} ,
} ,
2019-02-14 01:08:51 +01:00
{
2021-02-12 08:20:45 +01:00
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : " http://zulip.testserver " ,
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
" zulip_message_id " : message . id ,
2019-02-14 01:08:51 +01:00
} ,
2021-02-12 08:20:45 +01:00
{ " priority " : " normal " } ,
2021-02-12 08:19:30 +01:00
)
user_message = UserMessage . objects . get ( user_profile = self . user_profile , message = message )
2019-02-14 01:23:55 +01:00
self . assertEqual ( user_message . flags . active_mobile_push_notification , False )
2018-07-28 14:31:45 +02:00
def test_non_bouncer_push_remove ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2018-07-28 14:31:45 +02:00
UserMessage . objects . create (
user_profile = self . user_profile ,
2019-02-14 01:23:55 +01:00
message = message ,
flags = UserMessage . flags . active_mobile_push_notification ,
2018-07-28 14:31:45 +02:00
)
android_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . GCM )
)
2018-07-28 14:31:45 +02:00
2020-06-02 18:09:26 +02:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2020-06-02 18:09:26 +02:00
2021-02-12 08:19:30 +01:00
with mock . patch (
2022-03-09 23:40:34 +01:00
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
) as mock_push_notifications , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_android_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_android , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_apple_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_apple :
2019-02-14 01:08:51 +01:00
handle_remove_push_notification ( self . user_profile . id , [ message . id ] )
2022-03-09 23:40:34 +01:00
mock_push_notifications . assert_called_once ( )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2019-02-14 01:08:51 +01:00
mock_send_android . assert_called_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2019-02-14 01:08:51 +01:00
android_devices ,
{
2021-02-12 08:20:45 +01:00
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : " http://zulip.testserver " ,
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
" zulip_message_id " : message . id ,
2019-02-14 01:08:51 +01:00
} ,
2021-02-12 08:20:45 +01:00
{ " priority " : " normal " } ,
2021-02-12 08:19:30 +01:00
)
2020-06-02 18:09:26 +02:00
mock_send_apple . assert_called_with (
2022-02-23 20:25:30 +01:00
user_identity ,
2020-06-02 18:09:26 +02:00
apple_devices ,
2021-02-12 08:19:30 +01:00
{
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" custom " : {
" zulip " : {
" server " : " testserver " ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : " http://zulip.testserver " ,
" user_id " : self . user_profile . id ,
" event " : " remove " ,
" zulip_message_ids " : str ( message . id ) ,
2021-02-12 08:19:30 +01:00
}
} ,
} ,
)
user_message = UserMessage . objects . get ( user_profile = self . user_profile , message = message )
2019-02-14 01:23:55 +01:00
self . assertEqual ( user_message . flags . active_mobile_push_notification , False )
2018-07-28 14:31:45 +02:00
2017-11-05 10:51:25 +01:00
def test_user_message_does_not_exist ( self ) - > None :
2017-11-10 00:51:06 +01:00
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream " )
sender = self . example_user ( " iago " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
self . subscribe ( sender , " public_stream " )
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
2021-02-12 08:20:45 +01:00
missed_message = { " message_id " : message_id }
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " ERROR " ) as logger , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
2021-02-12 08:19:30 +01:00
) as mock_push_notifications :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2021-07-25 14:26:24 +02:00
self . assertEqual (
" ERROR:zerver.lib.push_notifications: "
f " Could not find UserMessage with message_id { message_id } and user_id { self . user_profile . id } "
" \n NoneType: None " , # This is an effect of using `exc_info=True` in the actual logger.
logger . output [ 0 ] ,
2020-05-02 08:44:14 +02:00
)
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-11-10 00:51:06 +01:00
2022-03-09 23:30:38 +01:00
def test_user_message_does_not_exist_remove ( self ) - > None :
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
self . make_stream ( " public_stream " )
sender = self . example_user ( " iago " )
self . subscribe ( sender , " public_stream " )
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
with mock . patch (
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
) as mock_push_notifications , mock . patch (
" zerver.lib.push_notifications.send_android_push_notification "
) as mock_send_android , mock . patch (
" zerver.lib.push_notifications.send_apple_push_notification "
) as mock_send_apple :
handle_remove_push_notification ( self . user_profile . id , [ message_id ] )
mock_push_notifications . assert_called_once ( )
mock_send_android . assert_called_once ( )
mock_send_apple . assert_called_once ( )
2017-11-17 10:47:43 +01:00
def test_user_message_soft_deactivated ( self ) - > None :
2017-11-10 00:51:06 +01:00
""" This simulates a condition that should only be an error if the user is
not long - term idle ; we fake it , though , in the sense that the user should
not have received the message in the first place """
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
self . setup_gcm_tokens ( )
2021-02-12 08:20:45 +01:00
self . make_stream ( " public_stream " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
sender = self . example_user ( " iago " )
2021-02-12 08:20:45 +01:00
self . subscribe ( self . user_profile , " public_stream " )
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
self . subscribe ( sender , " public_stream " )
2020-08-09 14:15:58 +02:00
logger_string = " zulip.soft_deactivation "
2021-02-12 08:20:45 +01:00
with self . assertLogs ( logger_string , level = " INFO " ) as info_logs :
2020-07-26 14:05:18 +02:00
do_soft_deactivate_users ( [ self . user_profile ] )
2021-02-12 08:19:30 +01:00
self . assertEqual (
info_logs . output ,
[
f " INFO: { logger_string } :Soft deactivated user { self . user_profile . id } " ,
f " INFO: { logger_string } :Soft-deactivated batch of 1 users; 0 remain to process " ,
] ,
)
2020-03-07 11:43:05 +01:00
message_id = self . send_stream_message ( sender , " public_stream " , " test " )
2017-11-10 00:51:06 +01:00
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message_id ,
" trigger " : " stream_push_notify " ,
2017-11-10 00:51:06 +01:00
}
android_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . GCM )
)
2017-11-10 00:51:06 +01:00
apple_devices = list (
2021-02-12 08:19:30 +01:00
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
with mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_apns " , return_value = { " apns " : True }
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.get_message_payload_gcm " ,
return_value = ( { " gcm " : True } , { } ) ,
2021-02-12 08:19:30 +01:00
) , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_apple_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_apple , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.send_android_push_notification "
2021-02-12 08:19:30 +01:00
) as mock_send_android , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.logger.error "
2021-02-12 08:19:30 +01:00
) as mock_logger , mock . patch (
2021-02-12 08:20:45 +01:00
" zerver.lib.push_notifications.push_notifications_enabled " , return_value = True
2021-02-12 08:19:30 +01:00
) as mock_push_notifications :
2019-02-08 23:09:20 +01:00
handle_push_notification ( self . user_profile . id , missed_message )
2017-11-10 00:51:06 +01:00
mock_logger . assert_not_called ( )
2023-01-02 20:50:23 +01:00
user_identity = UserPushIdentityCompat ( user_id = self . user_profile . id )
2022-02-23 20:25:30 +01:00
mock_send_apple . assert_called_with ( user_identity , apple_devices , { " apns " : True } )
mock_send_android . assert_called_with ( user_identity , android_devices , { " gcm " : True } , { } )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called_once ( )
2017-05-11 10:55:05 +02:00
2022-04-15 22:07:22 +02:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_enabled " , return_value = True )
def test_user_push_soft_reactivate_soft_deactivated_user (
self , mock_push_notifications : mock . MagicMock
) - > None :
othello = self . example_user ( " othello " )
cordelia = self . example_user ( " cordelia " )
2022-12-14 06:45:55 +01:00
large_user_group = check_add_user_group (
get_realm ( " zulip " ) ,
2022-11-21 03:37:11 +01:00
" large_user_group " ,
[ self . user_profile , othello , cordelia ] ,
acting_user = None ,
2022-04-15 22:07:22 +02:00
)
# Personal mention in a stream message should soft reactivate the user
with self . soft_deactivate_and_check_long_term_idle ( self . user_profile , expected = False ) :
mention = f " @** { self . user_profile . full_name } ** "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{ " message_id " : stream_mentioned_message_id , " trigger " : " mentioned " } ,
)
# Private message should soft reactivate the user
with self . soft_deactivate_and_check_long_term_idle ( self . user_profile , expected = False ) :
# Soft reactivate the user by sending a personal message
personal_message_id = self . send_personal_message ( othello , self . user_profile , " Message " )
handle_push_notification (
self . user_profile . id ,
{ " message_id " : personal_message_id , " trigger " : " private_message " } ,
)
# Wild card mention should NOT soft reactivate the user
with self . soft_deactivate_and_check_long_term_idle ( self . user_profile , expected = True ) :
# Soft reactivate the user by sending a personal message
mention = " @**all** "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{ " message_id " : stream_mentioned_message_id , " trigger " : " wildcard_mentioned " } ,
)
# Group mention should NOT soft reactivate the user
with self . soft_deactivate_and_check_long_term_idle ( self . user_profile , expected = True ) :
# Soft reactivate the user by sending a personal message
mention = " @*large_user_group* "
stream_mentioned_message_id = self . send_stream_message ( othello , " Denmark " , mention )
handle_push_notification (
self . user_profile . id ,
{
" message_id " : stream_mentioned_message_id ,
" trigger " : " mentioned " ,
" mentioned_user_group_id " : large_user_group . id ,
} ,
)
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.logger.info " )
@mock.patch ( " zerver.lib.push_notifications.push_notifications_enabled " , return_value = True )
2021-02-12 08:19:30 +01:00
def test_user_push_notification_already_active (
self , mock_push_notifications : mock . MagicMock , mock_info : mock . MagicMock
) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " hamlet " )
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2020-07-14 08:12:46 +02:00
UserMessage . objects . create (
user_profile = user_profile ,
flags = UserMessage . flags . active_mobile_push_notification ,
message = message ,
)
missed_message = {
2021-02-12 08:20:45 +01:00
" message_id " : message . id ,
" trigger " : " private_message " ,
2020-07-14 08:12:46 +02:00
}
handle_push_notification ( user_profile . id , missed_message )
mock_push_notifications . assert_called_once ( )
# Check we didn't proceed ahead and function returned.
mock_info . assert_not_called ( )
2021-02-12 08:19:30 +01:00
2017-08-29 01:03:29 +02:00
class TestAPNs ( PushNotificationTest ) :
2017-11-05 10:51:25 +01:00
def devices ( self ) - > List [ DeviceToken ] :
2021-02-12 08:19:30 +01:00
return list (
PushDeviceToken . objects . filter ( user = self . user_profile , kind = PushDeviceToken . APNS )
)
2017-08-29 01:03:29 +02:00
2021-02-12 08:19:30 +01:00
def send (
2021-08-15 18:35:37 +02:00
self ,
devices : Optional [ List [ Union [ PushDeviceToken , RemotePushDeviceToken ] ] ] = None ,
2022-10-06 11:56:48 +02:00
payload_data : Mapping [ str , Any ] = { } ,
2021-02-12 08:19:30 +01:00
) - > None :
2021-08-15 18:35:37 +02:00
send_apple_push_notification (
2023-01-02 20:50:23 +01:00
UserPushIdentityCompat ( user_id = self . user_profile . id ) ,
2022-02-23 20:25:30 +01:00
devices if devices is not None else self . devices ( ) ,
payload_data ,
2021-08-15 18:35:37 +02:00
)
2017-10-12 03:56:50 +02:00
2021-06-08 02:45:49 +02:00
def test_get_apns_context ( self ) - > None :
2018-12-15 20:05:43 +01:00
""" This test is pretty hacky, and needs to carefully reset the state
it modifies in order to avoid leaking state that can lead to
nondeterministic results for other tests .
"""
2018-02-12 23:34:59 +01:00
import zerver . lib . push_notifications
2021-02-12 08:19:30 +01:00
2021-06-08 02:45:49 +02:00
zerver . lib . push_notifications . get_apns_context . cache_clear ( )
2019-12-01 20:19:13 +01:00
try :
2022-02-08 08:42:25 +01:00
with self . settings ( APNS_CERT_FILE = " /foo.pem " ) , mock . patch (
" ssl.SSLContext.load_cert_chain "
) as mock_load_cert_chain :
2021-06-08 02:45:49 +02:00
apns_context = get_apns_context ( )
assert apns_context is not None
2022-01-14 04:47:42 +01:00
try :
2022-02-08 08:42:25 +01:00
mock_load_cert_chain . assert_called_once_with ( " /foo.pem " )
assert apns_context . apns . pool . loop == apns_context . loop
2022-01-14 04:47:42 +01:00
finally :
apns_context . loop . close ( )
2019-12-01 20:19:13 +01:00
finally :
2021-06-08 02:45:49 +02:00
# Reset the cache for `get_apns_context` so that we don't
2019-12-01 20:19:13 +01:00
# leak changes to the rest of the world.
2021-06-08 02:45:49 +02:00
zerver . lib . push_notifications . get_apns_context . cache_clear ( )
2018-02-10 03:22:26 +01:00
2018-02-09 23:19:00 +01:00
def test_not_configured ( self ) - > None :
2020-07-16 16:54:49 +02:00
self . setup_apns_tokens ( )
2021-07-25 14:26:24 +02:00
with mock . patch (
" zerver.lib.push_notifications.get_apns_context "
) as mock_get , self . assertLogs ( " zerver.lib.push_notifications " , level = " DEBUG " ) as logger :
2018-02-09 23:19:00 +01:00
mock_get . return_value = None
self . send ( )
2021-07-25 14:26:24 +02:00
notification_drop_log = (
" DEBUG:zerver.lib.push_notifications: "
2018-02-09 23:19:00 +01:00
" APNs: Dropping a notification because nothing configured. "
2021-02-12 08:19:30 +01:00
" Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE). "
)
2021-07-25 14:26:24 +02:00
2018-11-27 18:12:11 +01:00
from zerver . lib . push_notifications import initialize_push_notifications
2021-02-12 08:19:30 +01:00
2018-11-27 18:12:11 +01:00
initialize_push_notifications ( )
2021-07-25 14:26:24 +02:00
mobile_notifications_not_configured_log = (
" WARNING:zerver.lib.push_notifications: "
2018-11-27 18:12:11 +01:00
" Mobile push notifications are not configured. \n "
2021-02-12 08:19:30 +01:00
" See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html "
)
2017-08-29 01:03:29 +02:00
2021-07-25 14:26:24 +02:00
self . assertEqual (
[ notification_drop_log , mobile_notifications_not_configured_log ] , logger . output
)
2017-11-05 10:51:25 +01:00
def test_success ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2021-09-02 20:45:33 +02:00
with self . mock_apns ( ) as apns_context , self . assertLogs (
2021-07-25 14:26:24 +02:00
" zerver.lib.push_notifications " , level = " INFO "
) as logger :
2022-06-24 10:51:10 +02:00
apns_context . apns . send_notification = mock . AsyncMock ( )
apns_context . apns . send_notification . return_value . is_successful = True
2017-08-29 01:03:29 +02:00
self . send ( )
for device in self . devices ( ) :
2021-07-25 14:26:24 +02:00
self . assertIn (
2022-03-10 13:31:16 +01:00
f " INFO:zerver.lib.push_notifications:APNs: Success sending for user <id: { self . user_profile . id } > to device { device . token } " ,
2021-07-25 14:26:24 +02:00
logger . output ,
2021-02-12 08:19:30 +01:00
)
2017-08-29 01:03:29 +02:00
2021-09-02 20:33:36 +02:00
def test_http_retry_eventually_fails ( self ) - > None :
2019-02-08 23:42:24 +01:00
self . setup_apns_tokens ( )
2021-09-02 20:45:33 +02:00
with self . mock_apns ( ) as apns_context , self . assertLogs (
2021-07-25 14:26:24 +02:00
" zerver.lib.push_notifications " , level = " INFO "
) as logger :
2022-06-24 10:51:10 +02:00
apns_context . apns . send_notification = mock . AsyncMock (
side_effect = aioapns . exceptions . ConnectionError ( )
2021-02-12 08:19:30 +01:00
)
2017-10-12 03:56:50 +02:00
self . send ( devices = self . devices ( ) [ 0 : 1 ] )
2021-07-25 14:26:24 +02:00
self . assertIn (
2022-03-25 21:51:55 +01:00
f " ERROR:zerver.lib.push_notifications:APNs: ConnectionError sending for user <id: { self . user_profile . id } > to device { self . devices ( ) [ 0 ] . token } ; check certificate expiration " ,
2021-07-25 14:26:24 +02:00
logger . output ,
)
2017-10-12 03:56:50 +02:00
2021-09-02 21:17:42 +02:00
def test_internal_server_error ( self ) - > None :
self . setup_apns_tokens ( )
with self . mock_apns ( ) as apns_context , self . assertLogs (
" zerver.lib.push_notifications " , level = " INFO "
) as logger :
2022-06-24 10:51:10 +02:00
apns_context . apns . send_notification = mock . AsyncMock ( )
apns_context . apns . send_notification . return_value . is_successful = False
apns_context . apns . send_notification . return_value . description = " InternalServerError "
2021-09-02 21:17:42 +02:00
self . send ( devices = self . devices ( ) [ 0 : 1 ] )
self . assertIn (
2022-03-10 13:31:16 +01:00
f " WARNING:zerver.lib.push_notifications:APNs: Failed to send for user <id: { self . user_profile . id } > to device { self . devices ( ) [ 0 ] . token } : InternalServerError " ,
2021-09-02 21:17:42 +02:00
logger . output ,
)
2017-11-05 10:51:25 +01:00
def test_modernize_apns_payload ( self ) - > None :
2021-02-12 08:19:30 +01:00
payload = {
2021-02-12 08:20:45 +01:00
" alert " : " Message from Hamlet " ,
" badge " : 0 ,
" custom " : { " zulip " : { " message_ids " : [ 3 ] } } ,
2021-02-12 08:19:30 +01:00
}
2017-09-28 03:08:37 +02:00
self . assertEqual (
2019-02-08 23:09:20 +01:00
modernize_apns_payload (
2021-02-12 08:20:45 +01:00
{ " alert " : " Message from Hamlet " , " message_ids " : [ 3 ] , " badge " : 0 }
2021-02-12 08:19:30 +01:00
) ,
payload ,
)
self . assertEqual ( modernize_apns_payload ( payload ) , payload )
2018-10-04 23:31:04 +02:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_enabled " , return_value = True )
2020-06-02 18:09:26 +02:00
def test_apns_badge_count ( self , mock_push_notifications : mock . MagicMock ) - > None :
2021-02-12 08:20:45 +01:00
user_profile = self . example_user ( " othello " )
2020-06-02 18:09:26 +02:00
# Test APNs badge count for personal messages.
2021-02-12 08:19:30 +01:00
message_ids = [
2021-02-12 08:20:45 +01:00
self . send_personal_message ( self . sender , user_profile , " Content of message " )
2021-02-12 08:19:30 +01:00
for i in range ( 3 )
]
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , 3 )
2020-06-02 18:09:26 +02:00
# Similarly, test APNs badge count for stream mention.
stream = self . subscribe ( user_profile , " Denmark " )
2021-02-12 08:19:30 +01:00
message_ids + = [
self . send_stream_message (
2021-02-12 08:20:45 +01:00
self . sender , stream . name , " Hi, @**Othello, the Moor of Venice** "
2021-02-12 08:19:30 +01:00
)
for i in range ( 2 )
]
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , 5 )
2020-06-02 18:09:26 +02:00
num_messages = len ( message_ids )
# Mark the messages as read and test whether
# the count decreases correctly.
for i , message_id in enumerate ( message_ids ) :
2022-03-10 14:30:45 +01:00
do_update_message_flags ( user_profile , " add " , " read " , [ message_id ] )
2020-07-16 07:05:02 +02:00
self . assertEqual ( get_apns_badge_count ( user_profile ) , 0 )
self . assertEqual ( get_apns_badge_count_future ( user_profile ) , num_messages - i - 1 )
2020-06-02 18:09:26 +02:00
mock_push_notifications . assert_called ( )
2021-02-12 08:19:30 +01:00
2017-05-11 09:26:00 +02:00
class TestGetAPNsPayload ( PushNotificationTest ) :
2019-02-14 00:54:56 +01:00
def test_get_message_payload_apns_personal_message ( self ) - > None :
2018-10-04 23:31:04 +02:00
user_profile = self . example_user ( " othello " )
message_id = self . send_personal_message (
2020-03-07 11:43:05 +01:00
self . sender ,
user_profile ,
2021-02-12 08:20:45 +01:00
" Content of personal message " ,
2018-10-04 23:31:04 +02:00
)
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . PRIVATE_MESSAGE
)
2018-10-04 23:31:04 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
" title " : " King Hamlet " ,
" subtitle " : " " ,
" body " : message . content ,
2018-10-04 23:31:04 +02:00
} ,
2021-02-12 08:20:45 +01:00
" badge " : 0 ,
" sound " : " default " ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2018-10-04 23:31:04 +02:00
}
self . assertDictEqual ( payload , expected )
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.push_notifications_enabled " , return_value = True )
2021-02-12 08:19:30 +01:00
def test_get_message_payload_apns_huddle_message (
self , mock_push_notifications : mock . MagicMock
) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2018-02-16 23:18:47 +01:00
message_id = self . send_huddle_message (
2021-02-12 08:20:45 +01:00
self . sender , [ self . example_user ( " othello " ) , self . example_user ( " cordelia " ) ]
2021-02-12 08:19:30 +01:00
)
2018-02-16 23:18:47 +01:00
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . PRIVATE_MESSAGE
)
2017-05-11 09:26:00 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-04-11 16:26:54 +02:00
" title " : " Cordelia, Lear ' s daughter, King Hamlet, Othello, the Moor of Venice " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
" body " : message . content ,
2017-08-31 22:27:46 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" pm_users " : " , " . join (
2021-12-04 02:15:21 +01:00
str ( user_profile_id )
for user_profile_id in sorted (
s . user_profile_id
2021-12-04 01:54:29 +01:00
for s in Subscription . objects . filter ( recipient = message . recipient )
)
2021-02-12 08:19:30 +01:00
) ,
2021-02-12 08:20:45 +01:00
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2017-12-12 05:40:11 +01:00
}
self . assertDictEqual ( payload , expected )
2018-12-11 07:05:40 +01:00
mock_push_notifications . assert_called ( )
2017-12-12 05:40:11 +01:00
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_get_message_payload_apns_stream_message ( self ) - > None :
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns ( self . sender , message , NotificationTriggers . STREAM_PUSH )
2018-10-04 23:31:04 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-05-10 07:02:14 +02:00
" title " : " #Verona > Test topic " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
" body " : message . content ,
2018-10-04 23:31:04 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2019-02-08 23:09:20 +01:00
" stream " : get_display_recipient ( message . recipient ) ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2018-11-10 16:11:12 +01:00
" topic " : message . topic_name ( ) ,
2021-02-12 08:20:45 +01:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2020-03-12 14:17:25 +01:00
" user_id " : self . sender . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2018-10-04 23:31:04 +02:00
}
self . assertDictEqual ( payload , expected )
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_get_message_payload_apns_stream_mention ( self ) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns ( user_profile , message , NotificationTriggers . MENTION )
2017-12-12 05:40:11 +01:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-05-10 07:02:14 +02:00
" title " : " #Verona > Test topic " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet mentioned you: " ,
" body " : message . content ,
2017-12-12 05:40:11 +01:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2019-08-26 04:40:07 +02:00
" stream " : get_display_recipient ( message . recipient ) ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2019-08-26 04:40:07 +02:00
" topic " : message . topic_name ( ) ,
2021-02-12 08:20:45 +01:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2019-08-26 04:40:07 +02:00
" user_id " : user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2019-08-26 04:40:07 +02:00
}
self . assertDictEqual ( payload , expected )
2020-05-20 10:01:15 +02:00
def test_get_message_payload_apns_user_group_mention ( self ) - > None :
user_profile = self . example_user ( " othello " )
2022-12-14 06:45:55 +01:00
user_group = check_add_user_group (
get_realm ( " zulip " ) , " test_user_group " , [ user_profile ] , acting_user = None
2022-11-21 03:37:11 +01:00
)
2020-05-20 10:01:15 +02:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . MENTION , user_group . id , user_group . name
)
2020-05-20 10:01:15 +02:00
expected = {
" alert " : {
" title " : " #Verona > Test topic " ,
" subtitle " : " King Hamlet mentioned @test_user_group: " ,
" body " : message . content ,
} ,
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" stream " : get_display_recipient ( message . recipient ) ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2020-05-20 10:01:15 +02:00
" topic " : message . topic_name ( ) ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
" user_id " : user_profile . id ,
2021-07-08 14:45:05 +02:00
" mentioned_user_group_id " : user_group . id ,
" mentioned_user_group_name " : user_group . name ,
2020-05-20 10:01:15 +02:00
}
} ,
}
self . assertDictEqual ( payload , expected )
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
def test_get_message_payload_apns_stream_wildcard_mention ( self ) - > None :
2019-08-26 04:40:07 +02:00
user_profile = self . example_user ( " othello " )
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . WILDCARD_MENTION
)
2019-08-26 04:40:07 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-05-10 07:02:14 +02:00
" title " : " #Verona > Test topic " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet mentioned everyone: " ,
" body " : message . content ,
2019-08-26 04:40:07 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " stream " ,
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
2019-02-08 23:09:20 +01:00
" stream " : get_display_recipient ( message . recipient ) ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2018-11-10 16:11:12 +01:00
" topic " : message . topic_name ( ) ,
2021-02-12 08:20:45 +01:00
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2017-05-11 09:26:00 +02:00
}
self . assertDictEqual ( payload , expected )
2021-02-12 08:19:30 +01:00
@override_settings ( PUSH_NOTIFICATION_REDACT_CONTENT = True )
2019-02-14 00:54:56 +01:00
def test_get_message_payload_apns_redacted_content ( self ) - > None :
2018-10-19 00:09:18 +02:00
user_profile = self . example_user ( " othello " )
2018-02-16 23:18:47 +01:00
message_id = self . send_huddle_message (
2021-02-12 08:20:45 +01:00
self . sender , [ self . example_user ( " othello " ) , self . example_user ( " cordelia " ) ]
2021-02-12 08:19:30 +01:00
)
2018-02-16 23:18:47 +01:00
message = Message . objects . get ( id = message_id )
2021-09-03 16:49:27 +02:00
payload = get_message_payload_apns (
user_profile , message , NotificationTriggers . PRIVATE_MESSAGE
)
2017-10-10 11:14:10 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" alert " : {
2021-04-11 16:26:54 +02:00
" title " : " Cordelia, Lear ' s daughter, King Hamlet, Othello, the Moor of Venice " ,
2021-02-12 08:20:45 +01:00
" subtitle " : " King Hamlet: " ,
2021-07-01 00:13:23 +02:00
" body " : " *This organization has disabled including message content in mobile push notifications* " ,
2017-10-10 11:14:10 +02:00
} ,
2021-02-12 08:20:45 +01:00
" sound " : " default " ,
" badge " : 0 ,
" custom " : {
" zulip " : {
" message_ids " : [ message . id ] ,
" recipient_type " : " private " ,
" pm_users " : " , " . join (
2021-12-04 02:15:21 +01:00
str ( user_profile_id )
for user_profile_id in sorted (
s . user_profile_id
2021-12-04 01:54:29 +01:00
for s in Subscription . objects . filter ( recipient = message . recipient )
)
2021-02-12 08:19:30 +01:00
) ,
2021-02-12 08:20:45 +01:00
" sender_email " : self . sender . email ,
" sender_id " : self . sender . id ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : self . sender . realm . id ,
" realm_uri " : self . sender . realm . uri ,
2019-04-20 17:21:26 +02:00
" user_id " : user_profile . id ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
2017-10-10 11:14:10 +02:00
}
self . assertDictEqual ( payload , expected )
2021-02-12 08:19:30 +01:00
2017-05-11 10:15:00 +02:00
class TestGetGCMPayload ( PushNotificationTest ) :
2021-07-08 14:15:42 +02:00
def _test_get_message_payload_gcm_mentions (
2021-07-08 14:45:05 +02:00
self ,
trigger : str ,
alert : str ,
* ,
mentioned_user_group_id : Optional [ int ] = None ,
mentioned_user_group_name : Optional [ str ] = None ,
2021-07-08 14:15:42 +02:00
) - > None :
2021-02-12 08:20:45 +01:00
stream = Stream . objects . filter ( name = " Verona " ) . get ( )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-02-12 08:20:45 +01:00
message . content = " a " * 210
message . rendered_content = " a " * 210
2017-05-11 10:15:00 +02:00
message . save ( )
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2021-07-08 14:45:05 +02:00
payload , gcm_options = get_message_payload_gcm (
2021-09-03 16:49:27 +02:00
hamlet , message , trigger , mentioned_user_group_id , mentioned_user_group_name
2021-02-12 08:19:30 +01:00
)
2021-07-08 14:45:05 +02:00
expected_payload = {
" user_id " : hamlet . id ,
" event " : " message " ,
" alert " : alert ,
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
" content " : " a " * 200 + " … " ,
" content_truncated " : True ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
" realm_uri " : hamlet . realm . uri ,
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " stream " ,
" stream " : get_display_recipient ( message . recipient ) ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2021-07-08 14:45:05 +02:00
" topic " : message . topic_name ( ) ,
}
if mentioned_user_group_id is not None :
expected_payload [ " mentioned_user_group_id " ] = mentioned_user_group_id
expected_payload [ " mentioned_user_group_name " ] = mentioned_user_group_name
self . assertDictEqual ( payload , expected_payload )
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-05-11 10:15:00 +02:00
2021-07-08 13:11:52 +02:00
def test_get_message_payload_gcm_personal_mention ( self ) - > None :
self . _test_get_message_payload_gcm_mentions (
" mentioned " , " King Hamlet mentioned you in #Verona "
)
2021-07-08 14:15:42 +02:00
def test_get_message_payload_gcm_user_group_mention ( self ) - > None :
2021-07-08 14:45:05 +02:00
# Note that the @mobile_team user group doesn't actually
# exist; this test is just verifying the formatting logic.
2021-07-08 14:15:42 +02:00
self . _test_get_message_payload_gcm_mentions (
2021-07-08 14:45:05 +02:00
" mentioned " ,
" King Hamlet mentioned @mobile_team in #Verona " ,
mentioned_user_group_id = 3 ,
mentioned_user_group_name = " mobile_team " ,
2021-07-08 14:15:42 +02:00
)
2021-07-08 13:11:52 +02:00
def test_get_message_payload_gcm_wildcard_mention ( self ) - > None :
self . _test_get_message_payload_gcm_mentions (
" wildcard_mentioned " , " King Hamlet mentioned everyone in #Verona "
)
def test_get_message_payload_gcm_private_message ( self ) - > None :
2022-09-27 21:42:31 +02:00
message = self . get_message (
Recipient . PERSONAL ,
type_id = self . personal_recipient_user . id ,
realm_id = self . personal_recipient_user . realm_id ,
)
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2021-09-03 16:49:27 +02:00
payload , gcm_options = get_message_payload_gcm (
hamlet , message , NotificationTriggers . PRIVATE_MESSAGE
)
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
payload ,
{
" user_id " : hamlet . id ,
" event " : " message " ,
2023-01-24 15:38:23 +01:00
" alert " : " New direct message from King Hamlet " ,
2021-02-12 08:19:30 +01:00
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
" content " : message . content ,
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
" realm_uri " : hamlet . realm . uri ,
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " private " ,
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-05-11 10:15:00 +02:00
2019-02-14 00:54:56 +01:00
def test_get_message_payload_gcm_stream_notifications ( self ) - > None :
2021-04-27 16:56:45 +02:00
stream = Stream . objects . get ( name = " Denmark " )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2021-09-03 16:49:27 +02:00
payload , gcm_options = get_message_payload_gcm (
hamlet , message , NotificationTriggers . STREAM_PUSH
)
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
payload ,
{
" user_id " : hamlet . id ,
" event " : " message " ,
2021-07-08 13:11:52 +02:00
" alert " : " New stream message from King Hamlet in #Denmark " ,
2021-02-12 08:19:30 +01:00
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
" content " : message . content ,
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
" realm_uri " : hamlet . realm . uri ,
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " stream " ,
2021-05-10 07:02:14 +02:00
" topic " : " Test topic " ,
2021-02-12 08:19:30 +01:00
" stream " : " Denmark " ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2021-02-12 08:19:30 +01:00
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
@override_settings ( PUSH_NOTIFICATION_REDACT_CONTENT = True )
2019-02-14 00:54:56 +01:00
def test_get_message_payload_gcm_redacted_content ( self ) - > None :
2021-04-27 16:56:45 +02:00
stream = Stream . objects . get ( name = " Denmark " )
2022-09-27 21:42:31 +02:00
message = self . get_message ( Recipient . STREAM , stream . id , stream . realm_id )
2021-02-12 08:20:45 +01:00
hamlet = self . example_user ( " hamlet " )
2021-09-03 16:49:27 +02:00
payload , gcm_options = get_message_payload_gcm (
hamlet , message , NotificationTriggers . STREAM_PUSH
)
2021-02-12 08:19:30 +01:00
self . assertDictEqual (
payload ,
{
" user_id " : hamlet . id ,
" event " : " message " ,
2021-07-08 13:11:52 +02:00
" alert " : " New stream message from King Hamlet in #Denmark " ,
2021-02-12 08:19:30 +01:00
" zulip_message_id " : message . id ,
" time " : datetime_to_timestamp ( message . date_sent ) ,
2021-07-01 00:13:23 +02:00
" content " : " *This organization has disabled including message content in mobile push notifications* " ,
2021-02-12 08:19:30 +01:00
" content_truncated " : False ,
" server " : settings . EXTERNAL_HOST ,
" realm_id " : hamlet . realm . id ,
" realm_uri " : hamlet . realm . uri ,
" sender_id " : hamlet . id ,
" sender_email " : hamlet . email ,
" sender_full_name " : " King Hamlet " ,
" sender_avatar_url " : absolute_avatar_url ( message . sender ) ,
" recipient_type " : " stream " ,
2021-05-10 07:02:14 +02:00
" topic " : " Test topic " ,
2021-02-12 08:19:30 +01:00
" stream " : " Denmark " ,
2022-01-29 22:59:06 +01:00
" stream_id " : stream . id ,
2021-02-12 08:19:30 +01:00
} ,
)
self . assertDictEqual (
gcm_options ,
{
" priority " : " high " ,
} ,
)
2017-10-10 11:14:10 +02:00
2017-05-11 10:31:31 +02:00
class TestSendNotificationsToBouncer ( ZulipTestCase ) :
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.remote_server.send_to_push_bouncer " )
2017-11-05 10:51:25 +01:00
def test_send_notifications_to_bouncer ( self , mock_send : mock . MagicMock ) - > None :
2021-09-28 14:17:16 +02:00
mock_send . return_value = { " total_android_devices " : 1 , " total_apple_devices " : 3 }
total_android_devices , total_apple_devices = send_notifications_to_bouncer (
1 , { " apns " : True } , { " gcm " : True } , { }
)
2017-05-11 10:31:31 +02:00
post_data = {
2022-02-23 20:27:39 +01:00
" user_uuid " : get_user_profile_by_id ( 1 ) . uuid ,
2021-02-12 08:20:45 +01:00
" user_id " : 1 ,
" apns_payload " : { " apns " : True } ,
" gcm_payload " : { " gcm " : True } ,
" gcm_options " : { } ,
2017-05-11 10:31:31 +02:00
}
2021-02-12 08:19:30 +01:00
mock_send . assert_called_with (
2021-02-12 08:20:45 +01:00
" POST " ,
" push/notify " ,
2021-02-12 08:19:30 +01:00
orjson . dumps ( post_data ) ,
2021-02-12 08:20:45 +01:00
extra_headers = { " Content-type " : " application/json " } ,
2021-02-12 08:19:30 +01:00
)
2021-09-28 14:17:16 +02:00
self . assertEqual ( total_android_devices , 1 )
self . assertEqual ( total_apple_devices , 3 )
2021-02-12 08:19:30 +01:00
2017-05-11 10:31:31 +02:00
2021-06-11 22:51:27 +02:00
@override_settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " )
2019-02-08 23:42:24 +01:00
class TestSendToPushBouncer ( ZulipTestCase ) :
2021-06-11 22:51:27 +02:00
def add_mock_response (
self , body : bytes = orjson . dumps ( { " msg " : " error " } ) , status : int = 200
) - > None :
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/register "
responses . add ( responses . POST , URL , body = body , status = status )
@responses.activate
def test_500_error ( self ) - > None :
self . add_mock_response ( status = 500 )
2020-10-29 20:21:18 +01:00
with self . assertLogs ( level = " WARNING " ) as m :
with self . assertRaises ( PushNotificationBouncerRetryLaterError ) :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " data " : " true " } )
2020-10-29 20:21:18 +01:00
self . assertEqual ( m . output , [ " WARNING:root:Received 500 from push notification bouncer " ] )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_400_error ( self ) - > None :
self . add_mock_response ( status = 400 )
2019-02-08 23:09:20 +01:00
with self . assertRaises ( JsonableError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:20:45 +01:00
self . assertEqual ( exc . exception . msg , " error " )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
2017-11-05 10:51:25 +01:00
def test_400_error_invalid_server_key ( self ) - > None :
2022-08-01 22:54:47 +02:00
from zilencer . auth import InvalidZulipServerError
2020-06-11 00:54:34 +02:00
2017-10-12 03:02:35 +02:00
# This is the exception our decorator uses for an invalid Zulip server
2021-08-14 03:18:13 +02:00
error_response = json_response_from_error ( InvalidZulipServerError ( " testRole " ) )
self . add_mock_response ( body = error_response . content , status = error_response . status_code )
2022-11-17 09:30:48 +01:00
with self . assertRaises ( PushNotificationBouncerError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:19:30 +01:00
self . assertEqual (
str ( exc . exception ) ,
2021-02-12 08:20:45 +01:00
" Push notifications bouncer error: "
2021-10-19 03:30:05 +02:00
" Zulip server auth failure: testRole is not registered -- did you run `manage.py register_server`? " ,
2021-02-12 08:19:30 +01:00
)
2017-10-12 03:02:35 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_400_error_when_content_is_not_serializable ( self ) - > None :
self . add_mock_response ( body = b " / " , status = 400 )
2020-08-07 01:09:47 +02:00
with self . assertRaises ( orjson . JSONDecodeError ) :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2017-05-12 09:47:31 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
def test_300_error ( self ) - > None :
self . add_mock_response ( body = b " / " , status = 300 )
2022-11-17 09:30:48 +01:00
with self . assertRaises ( PushNotificationBouncerError ) as exc :
2021-06-11 22:51:27 +02:00
send_to_push_bouncer ( " POST " , " register " , { " msg " : " true " } )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2021-02-12 08:20:45 +01:00
str ( exc . exception ) , " Push notification bouncer returned unexpected status code 300 "
2021-02-12 08:19:30 +01:00
)
2017-05-12 09:47:31 +02:00
2020-06-08 18:38:50 +02:00
class TestPushApi ( BouncerTestCase ) :
2021-06-11 22:51:27 +02:00
@responses.activate
2020-06-08 18:38:50 +02:00
def test_push_api_error_handling ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-03-06 18:40:46 +01:00
self . login_user ( user )
2016-09-15 15:19:46 +02:00
endpoints = [
2021-02-12 08:20:45 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenaz " ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token " ) ,
2016-09-15 15:19:46 +02:00
]
# Test error handling
2017-07-07 18:18:37 +02:00
for endpoint , label in endpoints :
2016-09-15 15:19:46 +02:00
# Try adding/removing tokens that are too big...
2017-07-07 18:18:37 +02:00
broken_token = " a " * 5000 # too big
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : broken_token } )
self . assert_json_error ( result , " Empty or invalid length token " )
2016-09-15 15:19:46 +02:00
2021-02-12 08:20:45 +01:00
if label == " apple-tokenaz " :
result = self . client_post ( endpoint , { " token " : " xyz has non-hex characters " } )
self . assert_json_error ( result , " Invalid APNS token " )
2017-07-07 18:18:37 +02:00
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : broken_token } )
self . assert_json_error ( result , " Empty or invalid length token " )
2016-09-15 15:19:46 +02:00
# Try to remove a non-existent token...
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : " abcd1234 " } )
self . assert_json_error ( result , " Token does not exist " )
2016-09-15 15:19:46 +02:00
2020-06-08 18:38:50 +02:00
# Use push notification bouncer and try to remove non-existing tokens.
2021-02-12 08:19:30 +01:00
with self . settings (
2021-02-12 08:20:45 +01:00
PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com "
2021-06-11 22:51:27 +02:00
) , responses . RequestsMock ( ) as resp :
2021-08-15 18:35:37 +02:00
assert settings . PUSH_NOTIFICATION_BOUNCER_URL is not None
2021-06-11 22:51:27 +02:00
URL = settings . PUSH_NOTIFICATION_BOUNCER_URL + " /api/v1/remotes/push/unregister "
resp . add_callback ( responses . POST , URL , callback = self . request_callback )
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : " abcd1234 " } )
self . assert_json_error ( result , " Token does not exist " )
2021-06-11 22:51:27 +02:00
self . assertTrue ( resp . assert_call_count ( URL , 1 ) )
2020-06-08 18:38:50 +02:00
2021-06-11 22:51:27 +02:00
@responses.activate
2020-06-08 18:38:50 +02:00
def test_push_api_add_and_remove_device_tokens ( self ) - > None :
2021-02-12 08:20:45 +01:00
user = self . example_user ( " cordelia " )
2020-06-08 18:38:50 +02:00
self . login_user ( user )
no_bouncer_requests = [
2021-02-12 08:20:45 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenaa " ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token-1 " ) ,
2020-06-08 18:38:50 +02:00
]
bouncer_requests = [
2021-02-12 08:20:45 +01:00
( " /json/users/me/apns_device_token " , " apple-tokenbb " ) ,
( " /json/users/me/android_gcm_reg_id " , " android-token-2 " ) ,
2020-06-08 18:38:50 +02:00
]
# Add tokens without using push notification bouncer.
for endpoint , token in no_bouncer_requests :
# Test that we can push twice.
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2016-09-15 15:19:46 +02:00
self . assertEqual ( tokens [ 0 ] . token , token )
2021-06-11 22:51:27 +02:00
with self . settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " ) :
self . add_mock_response ( )
2020-06-08 18:38:50 +02:00
# Enable push notification bouncer and add tokens.
for endpoint , token in bouncer_requests :
# Test that we can push twice.
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
2021-02-12 08:20:45 +01:00
result = self . client_post ( endpoint , { " token " : token } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 1 )
2020-06-08 18:38:50 +02:00
self . assertEqual ( tokens [ 0 ] . token , token )
2021-08-15 18:35:37 +02:00
remote_tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , token = token )
2021-08-15 18:35:37 +02:00
)
self . assert_length ( remote_tokens , 1 )
self . assertEqual ( remote_tokens [ 0 ] . token , token )
2020-06-08 18:38:50 +02:00
# PushDeviceToken will include all the device tokens.
2021-08-15 18:35:37 +02:00
token_values = list ( PushDeviceToken . objects . values_list ( " token " , flat = True ) )
2021-02-12 08:19:30 +01:00
self . assertEqual (
2021-08-15 18:35:37 +02:00
token_values , [ " apple-tokenaa " , " android-token-1 " , " apple-tokenbb " , " android-token-2 " ]
2021-02-12 08:19:30 +01:00
)
2020-06-08 18:38:50 +02:00
# RemotePushDeviceToken will only include tokens of
# the devices using push notification bouncer.
2021-08-15 18:35:37 +02:00
remote_token_values = list ( RemotePushDeviceToken . objects . values_list ( " token " , flat = True ) )
self . assertEqual ( remote_token_values , [ " apple-tokenbb " , " android-token-2 " ] )
2020-06-08 18:38:50 +02:00
# Test removing tokens without using push notification bouncer.
for endpoint , token in no_bouncer_requests :
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : token } )
2016-09-15 15:19:46 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
2016-09-15 15:19:46 +02:00
2020-06-08 18:38:50 +02:00
# Use push notification bouncer and test removing device tokens.
# Tokens will be removed both locally and remotely.
2021-06-11 22:51:27 +02:00
with self . settings ( PUSH_NOTIFICATION_BOUNCER_URL = " https://push.zulip.org.example.com " ) :
2020-06-08 18:38:50 +02:00
for endpoint , token in bouncer_requests :
2021-02-12 08:20:45 +01:00
result = self . client_delete ( endpoint , { " token " : token } )
2020-06-08 18:38:50 +02:00
self . assert_json_success ( result )
tokens = list ( PushDeviceToken . objects . filter ( user = user , token = token ) )
2021-02-12 08:19:30 +01:00
remote_tokens = list (
2022-02-23 20:27:39 +01:00
RemotePushDeviceToken . objects . filter ( user_uuid = user . uuid , token = token )
2021-02-12 08:19:30 +01:00
)
2021-05-17 05:41:32 +02:00
self . assert_length ( tokens , 0 )
self . assert_length ( remote_tokens , 0 )
2020-06-08 18:38:50 +02:00
# Verify that the above process indeed removed all the tokens we created.
self . assertEqual ( RemotePushDeviceToken . objects . all ( ) . count ( ) , 0 )
self . assertEqual ( PushDeviceToken . objects . all ( ) . count ( ) , 0 )
2021-02-12 08:19:30 +01:00
2020-07-01 04:19:54 +02:00
class GCMParseOptionsTest ( ZulipTestCase ) :
2019-02-08 22:44:55 +01:00
def test_invalid_option ( self ) - > None :
with self . assertRaises ( JsonableError ) :
2019-02-08 23:09:20 +01:00
parse_gcm_options ( { " invalid " : True } , { } )
2019-02-08 22:44:55 +01:00
def test_invalid_priority_value ( self ) - > None :
with self . assertRaises ( JsonableError ) :
2019-02-08 23:09:20 +01:00
parse_gcm_options ( { " priority " : " invalid " } , { } )
2019-02-08 22:44:55 +01:00
def test_default_priority ( self ) - > None :
2021-02-12 08:19:30 +01:00
self . assertEqual ( " high " , parse_gcm_options ( { } , { " event " : " message " } ) )
self . assertEqual ( " normal " , parse_gcm_options ( { } , { " event " : " remove " } ) )
self . assertEqual ( " normal " , parse_gcm_options ( { } , { } ) )
2019-02-08 22:44:55 +01:00
def test_explicit_priority ( self ) - > None :
2021-02-12 08:19:30 +01:00
self . assertEqual ( " normal " , parse_gcm_options ( { " priority " : " normal " } , { } ) )
self . assertEqual ( " high " , parse_gcm_options ( { " priority " : " high " } , { } ) )
2019-02-08 22:44:55 +01:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.gcm_client " )
2019-02-08 22:44:55 +01:00
class GCMSendTest ( PushNotificationTest ) :
2017-11-05 10:51:25 +01:00
def setUp ( self ) - > None :
2017-10-27 08:28:23 +02:00
super ( ) . setUp ( )
2019-02-08 23:42:24 +01:00
self . setup_gcm_tokens ( )
2016-08-08 14:20:41 +02:00
2017-11-05 10:51:25 +01:00
def get_gcm_data ( self , * * kwargs : Any ) - > Dict [ str , Any ] :
2016-08-08 14:20:41 +02:00
data = {
2021-02-12 08:20:45 +01:00
" key 1 " : " Data 1 " ,
" key 2 " : " Data 2 " ,
2016-08-08 14:20:41 +02:00
}
data . update ( kwargs )
return data
2021-07-25 14:26:24 +02:00
def test_gcm_is_none ( self , mock_gcm : mock . MagicMock ) - > None :
2019-02-08 22:59:38 +01:00
mock_gcm . __bool__ . return_value = False
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " DEBUG " ) as logger :
send_android_push_notification_to_user ( self . user_profile , { } , { } )
self . assertEqual (
" DEBUG:zerver.lib.push_notifications: "
" Skipping sending a GCM push notification since PUSH_NOTIFICATION_BOUNCER_URL "
" and ANDROID_GCM_API_KEY are both unset " ,
logger . output [ 0 ] ,
)
2016-08-08 14:20:41 +02:00
2021-07-25 14:26:24 +02:00
def test_json_request_raises_ioerror ( self , mock_gcm : mock . MagicMock ) - > None :
2022-01-22 07:56:30 +01:00
mock_gcm . json_request . side_effect = OSError ( " error " )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , { } , { } )
self . assertIn (
" WARNING:zerver.lib.push_notifications:Error while pushing to GCM \n Traceback " ,
logger . output [ 0 ] ,
)
2017-05-17 09:58:27 +02:00
2021-02-12 08:20:45 +01:00
@mock.patch ( " zerver.lib.push_notifications.logger.warning " )
2021-07-25 14:26:24 +02:00
def test_success ( self , mock_warning : mock . MagicMock , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
res [ " success " ] = { token : ind for ind , token in enumerate ( self . gcm_tokens ) }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
2021-10-20 01:52:23 +02:00
self . assert_length ( logger . output , 3 )
2022-03-10 13:31:16 +01:00
log_msg1 = f " INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id: { self . user_profile . id } > to 2 devices "
2021-10-20 01:52:23 +02:00
log_msg2 = f " INFO:zerver.lib.push_notifications:GCM: Sent { 1111 } as { 0 } "
log_msg3 = f " INFO:zerver.lib.push_notifications:GCM: Sent { 2222 } as { 1 } "
self . assertEqual ( [ log_msg1 , log_msg2 , log_msg3 ] , logger . output )
2016-08-08 14:20:41 +02:00
mock_warning . assert_not_called ( )
2021-07-25 14:26:24 +02:00
def test_canonical_equal ( self , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
res [ " canonical " ] = { 1 : 1 }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
self . assertEqual (
f " WARNING:zerver.lib.push_notifications:GCM: Got canonical ref but it already matches our ID { 1 } ! " ,
logger . output [ 0 ] ,
2020-05-02 08:44:14 +02:00
)
2016-08-08 14:20:41 +02:00
2021-07-25 14:26:24 +02:00
def test_canonical_pushdevice_not_present ( self , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
t1 = hex_to_b64 ( " 1111 " )
t2 = hex_to_b64 ( " 3333 " )
res [ " canonical " ] = { t1 : t2 }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
2018-05-11 01:39:38 +02:00
def get_count ( hex_token : str ) - > int :
2019-02-08 23:09:20 +01:00
token = hex_to_b64 ( hex_token )
2021-02-12 08:19:30 +01:00
return PushDeviceToken . objects . filter ( token = token , kind = PushDeviceToken . GCM ) . count ( )
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 1 )
self . assertEqual ( get_count ( " 3333 " ) , 0 )
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
msg = f " WARNING:zerver.lib.push_notifications:GCM: Got canonical ref { t2 } replacing { t1 } but new ID not registered! Updating. "
self . assertEqual ( msg , logger . output [ 0 ] )
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 0 )
self . assertEqual ( get_count ( " 3333 " ) , 1 )
2016-08-08 14:20:41 +02:00
2021-07-25 14:26:24 +02:00
def test_canonical_pushdevice_different ( self , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
old_token = hex_to_b64 ( " 1111 " )
new_token = hex_to_b64 ( " 2222 " )
res [ " canonical " ] = { old_token : new_token }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
2018-05-11 01:39:38 +02:00
def get_count ( hex_token : str ) - > int :
2019-02-08 23:09:20 +01:00
token = hex_to_b64 ( hex_token )
2021-02-12 08:19:30 +01:00
return PushDeviceToken . objects . filter ( token = token , kind = PushDeviceToken . GCM ) . count ( )
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 1 )
self . assertEqual ( get_count ( " 2222 " ) , 1 )
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
self . assertEqual (
2022-03-10 13:31:16 +01:00
f " INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id: { self . user_profile . id } > to 2 devices " ,
2021-07-25 14:26:24 +02:00
logger . output [ 0 ] ,
)
2021-10-20 01:52:23 +02:00
self . assertEqual (
f " INFO:zerver.lib.push_notifications:GCM: Got canonical ref { new_token } , dropping { old_token } " ,
logger . output [ 1 ] ,
)
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 0 )
self . assertEqual ( get_count ( " 2222 " ) , 1 )
2016-08-08 14:20:41 +02:00
2021-07-25 14:26:24 +02:00
def test_not_registered ( self , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
token = hex_to_b64 ( " 1111 " )
res [ " errors " ] = { " NotRegistered " : [ token ] }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
2018-05-11 01:39:38 +02:00
def get_count ( hex_token : str ) - > int :
2019-02-08 23:09:20 +01:00
token = hex_to_b64 ( hex_token )
2021-02-12 08:19:30 +01:00
return PushDeviceToken . objects . filter ( token = token , kind = PushDeviceToken . GCM ) . count ( )
2016-08-08 14:20:41 +02:00
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 1 )
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " INFO " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
self . assertEqual (
2022-03-10 13:31:16 +01:00
f " INFO:zerver.lib.push_notifications:GCM: Sending notification for local user <id: { self . user_profile . id } > to 2 devices " ,
2021-10-20 01:52:23 +02:00
logger . output [ 0 ] ,
)
self . assertEqual (
f " INFO:zerver.lib.push_notifications:GCM: Removing { token } " ,
logger . output [ 1 ] ,
2021-07-25 14:26:24 +02:00
)
2021-02-12 08:20:45 +01:00
self . assertEqual ( get_count ( " 1111 " ) , 0 )
2016-08-08 14:20:41 +02:00
2021-07-25 14:26:24 +02:00
def test_failure ( self , mock_gcm : mock . MagicMock ) - > None :
2016-12-13 08:41:48 +01:00
res = { }
2021-02-12 08:20:45 +01:00
token = hex_to_b64 ( " 1111 " )
res [ " errors " ] = { " Failed " : [ token ] }
2019-02-08 23:04:42 +01:00
mock_gcm . json_request . return_value = res
2016-08-08 14:20:41 +02:00
data = self . get_gcm_data ( )
2021-07-25 14:26:24 +02:00
with self . assertLogs ( " zerver.lib.push_notifications " , level = " WARNING " ) as logger :
send_android_push_notification_to_user ( self . user_profile , data , { } )
msg = f " WARNING:zerver.lib.push_notifications:GCM: Delivery to { token } failed: Failed "
self . assertEqual ( msg , logger . output [ 0 ] )
2016-12-14 13:23:05 +01:00
2021-02-12 08:19:30 +01:00
2019-02-14 01:08:51 +01:00
class TestClearOnRead ( ZulipTestCase ) :
def test_mark_stream_as_read ( self ) - > None :
n_msgs = 3
hamlet = self . example_user ( " hamlet " )
hamlet . enable_stream_push_notifications = True
hamlet . save ( )
stream = self . subscribe ( hamlet , " Denmark " )
2021-02-12 08:19:30 +01:00
message_ids = [
self . send_stream_message ( self . example_user ( " iago " ) , stream . name , f " yo { i } " )
for i in range ( n_msgs )
]
2019-02-14 01:08:51 +01:00
UserMessage . objects . filter (
user_profile_id = hamlet . id ,
2019-08-15 08:05:44 +02:00
message_id__in = message_ids ,
2021-02-12 08:20:45 +01:00
) . update ( flags = F ( " flags " ) . bitor ( UserMessage . flags . active_mobile_push_notification ) )
2019-02-14 01:08:51 +01:00
2022-04-14 23:54:53 +02:00
with mock_queue_publish ( " zerver.actions.message_flags.queue_json_publish " ) as mock_publish :
2021-08-15 18:35:37 +02:00
assert stream . recipient_id is not None
2020-10-16 16:25:32 +02:00
do_mark_stream_messages_as_read ( hamlet , stream . recipient_id )
2019-02-14 01:08:51 +01:00
queue_items = [ c [ 0 ] [ 1 ] for c in mock_publish . call_args_list ]
2021-02-12 08:20:45 +01:00
groups = [ item [ " message_ids " ] for item in queue_items ]
2019-02-14 01:08:51 +01:00
2020-06-30 08:24:37 +02:00
self . assert_length ( groups , 1 )
2019-08-15 08:05:44 +02:00
self . assertEqual ( sum ( len ( g ) for g in groups ) , len ( message_ids ) )
2020-04-09 21:51:58 +02:00
self . assertEqual ( { id for g in groups for id in g } , set ( message_ids ) )
2019-02-14 01:08:51 +01:00
2021-02-12 08:19:30 +01:00
2017-10-06 23:16:29 +02:00
class TestPushNotificationsContent ( ZulipTestCase ) :
2017-11-05 10:51:25 +01:00
def test_fixtures ( self ) - > None :
2020-08-07 01:09:47 +02:00
fixtures = orjson . loads ( self . fixture_data ( " markdown_test_cases.json " ) )
2017-10-06 23:16:29 +02:00
tests = fixtures [ " regular_tests " ]
for test in tests :
if " text_content " in test :
2019-08-14 01:44:45 +02:00
with self . subTest ( markdown_test_case = test [ " name " ] ) :
output = get_mobile_push_content ( test [ " expected_output " ] )
self . assertEqual ( output , test [ " text_content " ] )
2017-10-06 23:16:29 +02:00
2017-11-05 10:51:25 +01:00
def test_backend_only_fixtures ( self ) - > None :
2019-07-24 07:47:59 +02:00
realm = get_realm ( " zulip " )
cordelia = self . example_user ( " cordelia " )
stream = get_stream ( " Verona " , realm )
2017-10-06 23:16:29 +02:00
fixtures = [
{
2021-02-12 08:20:45 +01:00
" name " : " realm_emoji " ,
" rendered_content " : f ' <p>Testing <img alt= " :green_tick: " class= " emoji " src= " /user_avatars/ { realm . id } /emoji/green_tick.png " title= " green tick " > realm emoji.</p> ' ,
" expected_output " : " Testing :green_tick: realm emoji. " ,
2017-10-06 23:16:29 +02:00
} ,
{
2021-02-12 08:20:45 +01:00
" name " : " mentions " ,
2021-04-11 16:26:54 +02:00
" rendered_content " : f ' <p>Mentioning <span class= " user-mention " data-user-id= " { cordelia . id } " >@Cordelia, Lear \' s daughter</span>.</p> ' ,
" expected_output " : " Mentioning @Cordelia, Lear ' s daughter. " ,
2017-10-06 23:16:29 +02:00
} ,
{
2021-02-12 08:20:45 +01:00
" name " : " stream_names " ,
" rendered_content " : f ' <p>Testing stream names <a class= " stream " data-stream-id= " { stream . id } " href= " /#narrow/stream/Verona " >#Verona</a>.</p> ' ,
" expected_output " : " Testing stream names #Verona. " ,
2017-10-06 23:16:29 +02:00
} ,
]
for test in fixtures :
actual_output = get_mobile_push_content ( test [ " rendered_content " ] )
self . assertEqual ( actual_output , test [ " expected_output " ] )
2018-05-04 01:40:46 +02:00
2021-02-12 08:19:30 +01:00
2020-08-27 22:46:39 +02:00
@skipUnless ( settings . ZILENCER_ENABLED , " requires zilencer " )
2018-05-04 01:40:46 +02:00
class PushBouncerSignupTest ( ZulipTestCase ) :
2022-01-12 23:45:01 +01:00
def test_deactivate_remote_server ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
self . assertEqual ( server . contact_email , " server-admin@example.com " )
2022-04-07 17:53:37 +02:00
result = self . uuid_post ( zulip_org_id , " /api/v1/remotes/server/deactivate " , subdomain = " " )
2022-01-12 23:45:01 +01:00
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
remote_realm_audit_log = RemoteZulipServerAuditLog . objects . filter (
event_type = RealmAuditLog . REMOTE_SERVER_DEACTIVATED
) . last ( )
assert remote_realm_audit_log is not None
self . assertTrue ( server . deactivated )
# Now test that trying to deactivate again reports the right error.
result = self . uuid_post (
zulip_org_id , " /api/v1/remotes/server/deactivate " , request , subdomain = " "
)
self . assert_json_error (
result ,
" The mobile push notification service registration for your server has been deactivated " ,
status_code = 401 ,
)
2018-05-04 01:40:46 +02:00
def test_push_signup_invalid_host ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " invalid-host " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " invalid-host is not a valid hostname " )
def test_push_signup_invalid_email ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_error ( result , " Enter a valid email address. " )
2021-12-22 11:02:02 +01:00
def test_push_signup_invalid_zulip_org_id ( self ) - > None :
zulip_org_id = " x " * RemoteZulipServer . UUID_LENGTH
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-12-30 15:32:48 +01:00
self . assert_json_error ( result , " Invalid UUID " )
# This looks mostly like a proper UUID, but isn't actually a valid UUIDv4,
# which makes it slip past a basic validation via initializing uuid.UUID with it.
# Thus we should test this scenario separately.
zulip_org_id = " 18cedb98-5222-5f34-50a9-fc418e1ba972 "
request [ " zulip_org_id " ] = zulip_org_id
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-12-22 11:02:02 +01:00
self . assert_json_error ( result , " Invalid UUID " )
2018-05-04 01:40:46 +02:00
def test_push_signup_success ( self ) - > None :
zulip_org_id = str ( uuid . uuid4 ( ) )
zulip_org_key = get_random_string ( 64 )
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
self . assertEqual ( server . contact_email , " server-admin@example.com " )
# Update our hostname
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " zulip.example.com " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " zulip.example.com " )
self . assertEqual ( server . contact_email , " server-admin@example.com " )
# Now test rotating our key
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " example.com " ,
contact_email = " server-admin@example.com " ,
new_org_key = get_random_string ( 64 ) ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " example.com " )
self . assertEqual ( server . contact_email , " server-admin@example.com " )
zulip_org_key = request [ " new_org_key " ]
self . assertEqual ( server . api_key , zulip_org_key )
# Update our hostname
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = zulip_org_key ,
hostname = " zulip.example.com " ,
contact_email = " new-server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
self . assert_json_success ( result )
server = RemoteZulipServer . objects . get ( uuid = zulip_org_id )
self . assertEqual ( server . hostname , " zulip.example.com " )
self . assertEqual ( server . contact_email , " new-server-admin@example.com " )
# Now test trying to double-create with a new random key fails
request = dict (
zulip_org_id = zulip_org_id ,
zulip_org_key = get_random_string ( 64 ) ,
hostname = " example.com " ,
contact_email = " server-admin@example.com " ,
)
result = self . client_post ( " /api/v1/remotes/server/register " , request )
2021-02-12 08:19:30 +01:00
self . assert_json_error (
result , f " Zulip server auth failure: key does not match role { zulip_org_id } "
)
2022-02-23 20:25:30 +01:00
2023-01-02 20:50:23 +01:00
class TestUserPushIdentityCompat ( ZulipTestCase ) :
2022-02-23 20:25:30 +01:00
def test_filter_q ( self ) - > None :
2023-01-02 20:50:23 +01:00
user_identity_id = UserPushIdentityCompat ( user_id = 1 )
user_identity_uuid = UserPushIdentityCompat ( user_uuid = " aaaa " )
user_identity_both = UserPushIdentityCompat ( user_id = 1 , user_uuid = " aaaa " )
2022-02-23 20:25:30 +01:00
self . assertEqual ( user_identity_id . filter_q ( ) , Q ( user_id = 1 ) )
self . assertEqual ( user_identity_uuid . filter_q ( ) , Q ( user_uuid = " aaaa " ) )
self . assertEqual ( user_identity_both . filter_q ( ) , Q ( user_uuid = " aaaa " ) | Q ( user_id = 1 ) )
def test_eq ( self ) - > None :
2023-01-02 20:50:23 +01:00
user_identity_a = UserPushIdentityCompat ( user_id = 1 )
user_identity_b = UserPushIdentityCompat ( user_id = 1 )
user_identity_c = UserPushIdentityCompat ( user_id = 2 )
2022-02-23 20:25:30 +01:00
self . assertEqual ( user_identity_a , user_identity_b )
self . assertNotEqual ( user_identity_a , user_identity_c )
# An integer can't be equal to an instance of the class.
self . assertNotEqual ( user_identity_a , 1 )