2018-02-09 23:19:00 +01:00
|
|
|
from contextlib import contextmanager
|
2019-01-31 00:39:02 +01:00
|
|
|
import datetime
|
2017-08-29 01:05:20 +02:00
|
|
|
import itertools
|
2017-08-18 09:04:52 +02:00
|
|
|
import requests
|
2016-08-03 11:11:25 +02:00
|
|
|
import mock
|
2016-08-08 14:20:41 +02:00
|
|
|
from mock import call
|
2019-02-02 23:53:44 +01:00
|
|
|
from typing import Any, Dict, List, Optional
|
2016-08-03 11:11:25 +02:00
|
|
|
|
2018-04-26 06:50:37 +02:00
|
|
|
import base64
|
2017-10-06 23:16:29 +02:00
|
|
|
import os
|
2016-10-27 23:55:31 +02:00
|
|
|
import ujson
|
2018-05-04 01:40:46 +02:00
|
|
|
import uuid
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2019-02-02 23:53:44 +01:00
|
|
|
from django.test import override_settings
|
2016-08-03 11:11:25 +02:00
|
|
|
from django.conf import settings
|
2016-10-27 23:55:31 +02:00
|
|
|
from django.http import HttpResponse
|
2019-04-23 22:32:12 +02:00
|
|
|
from django.db import transaction
|
2019-02-14 01:08:51 +01:00
|
|
|
from django.db.models import F
|
2018-05-04 01:40:46 +02:00
|
|
|
from django.utils.crypto import get_random_string
|
2019-01-31 00:39:02 +01:00
|
|
|
from django.utils.timezone import utc as timezone_utc
|
2016-08-03 11:11:25 +02:00
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
from analytics.lib.counts import CountStat, LoggingCountStat
|
|
|
|
from analytics.models import InstallationCount, RealmCount
|
2017-05-08 17:54:11 +02:00
|
|
|
from zerver.models import (
|
2017-05-11 09:12:21 +02:00
|
|
|
PushDeviceToken,
|
|
|
|
Message,
|
2017-05-11 10:55:05 +02:00
|
|
|
UserMessage,
|
2017-11-29 02:49:11 +01:00
|
|
|
receives_offline_email_notifications,
|
|
|
|
receives_offline_push_notifications,
|
2017-05-08 17:54:11 +02:00
|
|
|
receives_online_notifications,
|
2017-08-17 16:55:32 +02:00
|
|
|
receives_stream_notifications,
|
2017-05-11 09:12:21 +02:00
|
|
|
get_client,
|
2017-08-29 06:28:30 +02:00
|
|
|
get_realm,
|
2019-07-24 07:47:59 +02:00
|
|
|
get_stream,
|
2017-05-11 09:12:21 +02:00
|
|
|
Recipient,
|
2019-10-03 02:01:36 +02:00
|
|
|
RealmAuditLog,
|
2017-05-11 10:15:00 +02:00
|
|
|
Stream,
|
2018-02-16 23:18:47 +01:00
|
|
|
Subscription,
|
2017-05-08 17:54:11 +02:00
|
|
|
)
|
2019-11-19 03:12:54 +01:00
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_delete_messages,
|
|
|
|
do_mark_stream_messages_as_read,
|
|
|
|
do_regenerate_api_key,
|
|
|
|
)
|
2017-11-10 00:51:06 +01:00
|
|
|
from zerver.lib.soft_deactivation import do_soft_deactivate_users
|
2019-02-08 23:09:20 +01:00
|
|
|
from zerver.lib.push_notifications import (
|
|
|
|
absolute_avatar_url,
|
|
|
|
b64_to_hex,
|
|
|
|
datetime_to_timestamp,
|
|
|
|
DeviceToken,
|
|
|
|
get_apns_client,
|
|
|
|
get_display_recipient,
|
2019-02-14 00:54:56 +01:00
|
|
|
get_message_payload_apns,
|
|
|
|
get_message_payload_gcm,
|
2019-02-08 23:09:20 +01:00
|
|
|
get_mobile_push_content,
|
|
|
|
handle_push_notification,
|
|
|
|
handle_remove_push_notification,
|
|
|
|
hex_to_b64,
|
|
|
|
modernize_apns_payload,
|
|
|
|
num_push_devices_for_user,
|
|
|
|
parse_gcm_options,
|
|
|
|
send_android_push_notification_to_user,
|
|
|
|
send_apple_push_notification,
|
|
|
|
send_notifications_to_bouncer,
|
|
|
|
send_to_push_bouncer,
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
from zerver.lib.remote_server import send_analytics_to_remote_server, \
|
2019-12-02 19:46:11 +01:00
|
|
|
build_analytics_data, PushNotificationBouncerException, PushNotificationBouncerRetryLaterError
|
2019-01-31 00:39:02 +01:00
|
|
|
from zerver.lib.request import JsonableError
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.test_classes import (
|
2019-02-08 22:44:55 +01:00
|
|
|
TestCase, ZulipTestCase,
|
2016-11-10 19:30:09 +01:00
|
|
|
)
|
2016-08-03 11:11:25 +02:00
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
from zilencer.models import RemoteZulipServer, RemotePushDeviceToken, \
|
2019-10-03 02:01:36 +02:00
|
|
|
RemoteRealmCount, RemoteInstallationCount, RemoteRealmAuditLog
|
2016-10-27 23:55:31 +02:00
|
|
|
from django.utils.timezone import now
|
|
|
|
|
2017-10-06 23:16:29 +02:00
|
|
|
ZERVER_DIR = os.path.dirname(os.path.dirname(__file__))
|
|
|
|
|
2017-05-16 08:05:31 +02:00
|
|
|
class BouncerTestCase(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-05-16 08:05:31 +02:00
|
|
|
self.server_uuid = "1234-abcd"
|
2016-10-27 23:55:31 +02:00
|
|
|
server = RemoteZulipServer(uuid=self.server_uuid,
|
|
|
|
api_key="magic_secret_api_key",
|
|
|
|
hostname="demo.example.com",
|
|
|
|
last_updated=now())
|
|
|
|
server.save()
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def tearDown(self) -> None:
|
2016-10-27 23:55:31 +02:00
|
|
|
RemoteZulipServer.objects.filter(uuid=self.server_uuid).delete()
|
2017-10-27 08:28:23 +02:00
|
|
|
super().tearDown()
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def bounce_request(self, *args: Any, **kwargs: Any) -> HttpResponse:
|
2016-10-27 23:55:31 +02:00
|
|
|
"""This method is used to carry out the push notification bouncer
|
|
|
|
requests using the Django test browser, rather than python-requests.
|
|
|
|
"""
|
|
|
|
# args[0] is method, args[1] is URL.
|
|
|
|
local_url = args[1].replace(settings.PUSH_NOTIFICATION_BOUNCER_URL, "")
|
|
|
|
if args[0] == "POST":
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
local_url,
|
|
|
|
kwargs['data'],
|
|
|
|
subdomain='')
|
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
elif args[0] == "GET":
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_get(
|
|
|
|
self.server_uuid,
|
|
|
|
local_url,
|
|
|
|
kwargs['data'],
|
|
|
|
subdomain='')
|
2016-10-27 23:55:31 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unsupported method for bounce_request")
|
|
|
|
return result
|
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_generic_payload(self, method: str='register') -> Dict[str, Any]:
|
2017-05-16 08:05:31 +02:00
|
|
|
user_id = 10
|
|
|
|
token = "111222"
|
|
|
|
token_kind = PushDeviceToken.GCM
|
|
|
|
|
|
|
|
return {'user_id': user_id,
|
|
|
|
'token': token,
|
|
|
|
'token_kind': token_kind}
|
|
|
|
|
|
|
|
class PushBouncerNotificationTest(BouncerTestCase):
|
2017-08-29 06:28:30 +02:00
|
|
|
DEFAULT_SUBDOMAIN = ""
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_unregister_remote_push_user_params(self) -> None:
|
2016-10-27 23:55:31 +02:00
|
|
|
token = "111222"
|
|
|
|
token_kind = PushDeviceToken.GCM
|
|
|
|
|
|
|
|
endpoint = '/api/v1/remotes/push/unregister'
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'token_kind': token_kind})
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Missing 'token' argument")
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'token': token})
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Missing 'token_kind' argument")
|
2017-08-29 06:28:30 +02:00
|
|
|
|
|
|
|
# We need the root ('') subdomain to be in use for this next
|
|
|
|
# test, since the push bouncer API is only available there:
|
2020-03-10 11:48:26 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
2017-08-29 06:28:30 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.string_id = ""
|
|
|
|
realm.save()
|
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
result = self.api_post(
|
|
|
|
hamlet,
|
|
|
|
endpoint,
|
|
|
|
dict(user_id=15, token=token, token_kind=token_kind),
|
|
|
|
subdomain='',
|
|
|
|
)
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Must validate with valid Zulip server API key")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_register_remote_push_user_paramas(self) -> None:
|
2016-10-27 23:55:31 +02:00
|
|
|
token = "111222"
|
|
|
|
user_id = 11
|
|
|
|
token_kind = PushDeviceToken.GCM
|
|
|
|
|
|
|
|
endpoint = '/api/v1/remotes/push/register'
|
|
|
|
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'user_id': user_id, 'token_kind': token_kind})
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Missing 'token' argument")
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'user_id': user_id, 'token': token})
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Missing 'token_kind' argument")
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'token': token, 'token_kind': token_kind})
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, "Missing 'user_id' argument")
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, {'user_id': user_id, 'token': token, 'token_kind': 17})
|
2017-07-07 18:29:45 +02:00
|
|
|
self.assert_json_error(result, "Invalid token type")
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
result = self.api_post(
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet,
|
2020-03-10 11:48:26 +01:00
|
|
|
endpoint,
|
|
|
|
dict(user_id=user_id, token_kin=token_kind, token=token),
|
|
|
|
)
|
2017-08-29 06:28:30 +02:00
|
|
|
self.assert_json_error(result, "Account is not associated with this subdomain",
|
|
|
|
status_code=401)
|
|
|
|
|
|
|
|
# We need the root ('') subdomain to be in use for this next
|
|
|
|
# test, since the push bouncer API is only available there:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.string_id = ""
|
|
|
|
realm.save()
|
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
result = self.api_post(
|
|
|
|
hamlet,
|
|
|
|
endpoint,
|
|
|
|
dict(user_id=user_id, token_kind=token_kind, token=token),
|
|
|
|
)
|
2017-08-29 06:28:30 +02:00
|
|
|
self.assert_json_error(result, "Must validate with valid Zulip server API key")
|
|
|
|
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
endpoint,
|
|
|
|
dict(user_id=user_id, token_kind=token_kind, token=token),
|
|
|
|
subdomain="zulip")
|
2018-04-26 07:08:44 +02:00
|
|
|
self.assert_json_error(result, "Invalid subdomain for push notifications bouncer",
|
|
|
|
status_code=401)
|
|
|
|
|
2018-04-26 06:36:34 +02:00
|
|
|
# We do a bit of hackery here to the API_KEYS cache just to
|
|
|
|
# make the code simple for sending an incorrect API key.
|
2020-01-16 22:02:06 +01:00
|
|
|
self.API_KEYS[self.server_uuid] = 'invalid'
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
endpoint,
|
|
|
|
dict(user_id=user_id, token_kind=token_kind, token=token))
|
2018-04-26 06:36:34 +02:00
|
|
|
self.assert_json_error(result, "Zulip server auth failure: key does not match role 1234-abcd",
|
|
|
|
status_code=401)
|
|
|
|
|
2020-01-16 22:02:06 +01:00
|
|
|
del self.API_KEYS[self.server_uuid]
|
2018-04-26 06:36:34 +02:00
|
|
|
|
2018-04-26 06:50:37 +02:00
|
|
|
credentials = "%s:%s" % ("5678-efgh", 'invalid')
|
|
|
|
api_auth = 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
|
|
|
|
result = self.client_post(endpoint, {'user_id': user_id,
|
|
|
|
'token_kind': token_kind,
|
|
|
|
'token': token},
|
|
|
|
HTTP_AUTHORIZATION = api_auth)
|
|
|
|
self.assert_json_error(result, "Zulip server auth failure: 5678-efgh is not registered",
|
|
|
|
status_code=401)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_remote_push_user_endpoints(self) -> None:
|
2016-10-27 23:55:31 +02:00
|
|
|
endpoints = [
|
|
|
|
('/api/v1/remotes/push/register', 'register'),
|
|
|
|
('/api/v1/remotes/push/unregister', 'unregister'),
|
|
|
|
]
|
|
|
|
|
|
|
|
for endpoint, method in endpoints:
|
|
|
|
payload = self.get_generic_payload(method)
|
|
|
|
|
|
|
|
# Verify correct results are success
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, payload)
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload['token'])
|
|
|
|
token_count = 1 if method == 'register' else 0
|
|
|
|
self.assertEqual(len(remote_tokens), token_count)
|
|
|
|
|
|
|
|
# Try adding/removing tokens that are too big...
|
2017-05-07 20:00:17 +02:00
|
|
|
broken_token = "x" * 5000 # too big
|
2016-10-27 23:55:31 +02:00
|
|
|
payload['token'] = broken_token
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, payload)
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, 'Empty or invalid length token')
|
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
def test_remote_push_unregister_all(self) -> None:
|
|
|
|
payload = self.get_generic_payload('register')
|
|
|
|
|
|
|
|
# Verify correct results are success
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid,
|
|
|
|
'/api/v1/remotes/push/register', payload)
|
2019-11-19 03:12:54 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload['token'])
|
|
|
|
self.assertEqual(len(remote_tokens), 1)
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid,
|
|
|
|
'/api/v1/remotes/push/unregister/all',
|
|
|
|
dict(user_id=10))
|
2019-11-19 03:12:54 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
remote_tokens = RemotePushDeviceToken.objects.filter(token=payload['token'])
|
|
|
|
self.assertEqual(len(remote_tokens), 0)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invalid_apns_token(self) -> None:
|
2017-07-07 18:18:37 +02:00
|
|
|
endpoints = [
|
2017-07-31 20:36:49 +02:00
|
|
|
('/api/v1/remotes/push/register', 'apple-token'),
|
2017-07-07 18:18:37 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for endpoint, method in endpoints:
|
|
|
|
payload = {
|
|
|
|
'user_id': 10,
|
|
|
|
'token': 'xyz uses non-hex characters',
|
|
|
|
'token_kind': PushDeviceToken.APNS,
|
|
|
|
}
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(self.server_uuid, endpoint, payload)
|
2017-07-07 18:18:37 +02:00
|
|
|
self.assert_json_error(result, 'Invalid APNS token')
|
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-04-25 20:50:12 +02:00
|
|
|
def test_push_bouncer_api(self, mock_request: Any) -> None:
|
2016-10-27 23:55:31 +02:00
|
|
|
"""This is a variant of the below test_push_api, but using the full
|
|
|
|
push notification bouncer flow
|
|
|
|
"""
|
2019-04-25 20:50:12 +02:00
|
|
|
mock_request.side_effect = self.bounce_request
|
2017-05-07 19:39:30 +02:00
|
|
|
user = self.example_user('cordelia')
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2016-10-27 23:55:31 +02:00
|
|
|
server = RemoteZulipServer.objects.get(uuid=self.server_uuid)
|
|
|
|
|
|
|
|
endpoints = [
|
2017-07-07 18:29:45 +02:00
|
|
|
('/json/users/me/apns_device_token', 'apple-tokenaz', RemotePushDeviceToken.APNS),
|
|
|
|
('/json/users/me/android_gcm_reg_id', 'android-token', RemotePushDeviceToken.GCM),
|
2016-10-27 23:55:31 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
# Test error handling
|
2019-12-03 20:19:38 +01:00
|
|
|
for endpoint, token, kind in endpoints:
|
2016-10-27 23:55:31 +02:00
|
|
|
# Try adding/removing tokens that are too big...
|
2017-07-07 18:18:37 +02:00
|
|
|
broken_token = "a" * 5000 # too big
|
2017-07-07 18:29:45 +02:00
|
|
|
result = self.client_post(endpoint, {'token': broken_token,
|
2017-08-29 06:28:30 +02:00
|
|
|
'token_kind': kind},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, 'Empty or invalid length token')
|
|
|
|
|
2017-07-07 18:29:45 +02:00
|
|
|
result = self.client_delete(endpoint, {'token': broken_token,
|
2017-08-29 06:28:30 +02:00
|
|
|
'token_kind': kind},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, 'Empty or invalid length token')
|
|
|
|
|
|
|
|
# Try to remove a non-existent token...
|
2017-07-07 18:29:45 +02:00
|
|
|
result = self.client_delete(endpoint, {'token': 'abcd1234',
|
2017-08-29 06:28:30 +02:00
|
|
|
'token_kind': kind},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_error(result, 'Token does not exist')
|
|
|
|
|
2019-12-03 20:19:38 +01:00
|
|
|
with mock.patch('zerver.lib.remote_server.requests.request',
|
|
|
|
side_effect=requests.ConnectionError):
|
|
|
|
result = self.client_post(endpoint, {'token': token},
|
|
|
|
subdomain="zulip")
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "ConnectionError while trying to connect to push notification bouncer", 502)
|
|
|
|
|
|
|
|
with mock.patch('zerver.lib.remote_server.requests.request',
|
|
|
|
return_value=Result(status=500)):
|
|
|
|
result = self.client_post(endpoint, {'token': token},
|
|
|
|
subdomain="zulip")
|
|
|
|
self.assert_json_error(result, "Received 500 from push notification bouncer", 502)
|
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
# Add tokens
|
2017-07-07 18:29:45 +02:00
|
|
|
for endpoint, token, kind in endpoints:
|
2016-10-27 23:55:31 +02:00
|
|
|
# Test that we can push twice
|
2017-08-29 06:28:30 +02:00
|
|
|
result = self.client_post(endpoint, {'token': token},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-08-29 06:28:30 +02:00
|
|
|
result = self.client_post(endpoint, {'token': token},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id, token=token,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 1)
|
|
|
|
self.assertEqual(tokens[0].token, token)
|
|
|
|
|
|
|
|
# User should have tokens for both devices now.
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 2)
|
|
|
|
|
|
|
|
# Remove tokens
|
2017-07-07 18:29:45 +02:00
|
|
|
for endpoint, token, kind in endpoints:
|
|
|
|
result = self.client_delete(endpoint, {'token': token,
|
2017-08-29 06:28:30 +02:00
|
|
|
'token_kind': kind},
|
|
|
|
subdomain="zulip")
|
2016-10-27 23:55:31 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id, token=token,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 0)
|
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
# Re-add copies of those tokens
|
|
|
|
for endpoint, token, kind in endpoints:
|
|
|
|
result = self.client_post(endpoint, {'token': token},
|
|
|
|
subdomain="zulip")
|
|
|
|
self.assert_json_success(result)
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 2)
|
|
|
|
|
2019-12-03 20:19:38 +01:00
|
|
|
# Now we want to remove them using the bouncer after an API key change.
|
|
|
|
# First we test error handling in case of issues with the bouncer:
|
|
|
|
with mock.patch('zerver.worker.queue_processors.clear_push_device_tokens',
|
|
|
|
side_effect=PushNotificationBouncerRetryLaterError("test")), \
|
|
|
|
mock.patch('zerver.worker.queue_processors.retry_event') as mock_retry:
|
|
|
|
do_regenerate_api_key(user, user)
|
|
|
|
mock_retry.assert_called()
|
|
|
|
|
|
|
|
# We didn't manage to communicate with the bouncer, to the tokens are still there:
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 2)
|
|
|
|
|
|
|
|
# Now we succesfully remove them:
|
2019-11-19 03:12:54 +01:00
|
|
|
do_regenerate_api_key(user, user)
|
|
|
|
tokens = list(RemotePushDeviceToken.objects.filter(user_id=user.id,
|
|
|
|
server=server))
|
|
|
|
self.assertEqual(len(tokens), 0)
|
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
class AnalyticsBouncerTest(BouncerTestCase):
|
|
|
|
TIME_ZERO = datetime.datetime(1988, 3, 14).replace(tzinfo=timezone_utc)
|
|
|
|
|
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-04-25 20:50:12 +02:00
|
|
|
def test_analytics_api(self, mock_request: Any) -> None:
|
2019-01-31 00:39:02 +01:00
|
|
|
"""This is a variant of the below test_push_api, but using the full
|
|
|
|
push notification bouncer flow
|
|
|
|
"""
|
2019-04-25 20:50:12 +02:00
|
|
|
mock_request.side_effect = self.bounce_request
|
2019-01-31 00:39:02 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
|
2019-12-03 20:19:38 +01:00
|
|
|
with mock.patch('zerver.lib.remote_server.requests.request',
|
|
|
|
side_effect=requests.ConnectionError), \
|
|
|
|
mock.patch('zerver.lib.remote_server.logging.warning') as mock_warning:
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
mock_warning.assert_called_once_with(
|
|
|
|
"ConnectionError while trying to connect to push notification bouncer")
|
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
# Send any existing data over, so that we can start the test with a "clean" slate
|
|
|
|
audit_log_max_id = RealmAuditLog.objects.all().order_by('id').last().id
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
self.assertEqual(mock_request.call_count, 2)
|
|
|
|
remote_audit_log_count = RemoteRealmAuditLog.objects.count()
|
|
|
|
self.assertEqual(RemoteRealmCount.objects.count(), 0)
|
|
|
|
self.assertEqual(RemoteInstallationCount.objects.count(), 0)
|
|
|
|
|
|
|
|
def check_counts(mock_request_call_count: int, remote_realm_count: int,
|
|
|
|
remote_installation_count: int, remote_realm_audit_log: int) -> None:
|
|
|
|
self.assertEqual(mock_request.call_count, mock_request_call_count)
|
|
|
|
self.assertEqual(RemoteRealmCount.objects.count(), remote_realm_count)
|
|
|
|
self.assertEqual(RemoteInstallationCount.objects.count(), remote_installation_count)
|
|
|
|
self.assertEqual(RemoteRealmAuditLog.objects.count(),
|
|
|
|
remote_audit_log_count + remote_realm_audit_log)
|
|
|
|
|
|
|
|
# Create some rows we'll send to remote server
|
2019-01-31 00:39:02 +01:00
|
|
|
realm_stat = LoggingCountStat('invites_sent::day', RealmCount, CountStat.DAY)
|
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=user.realm, property=realm_stat.property, end_time=end_time, value=5)
|
|
|
|
InstallationCount.objects.create(
|
2019-04-23 22:32:12 +02:00
|
|
|
property=realm_stat.property, end_time=end_time, value=5,
|
|
|
|
# We set a subgroup here to work around:
|
|
|
|
# https://github.com/zulip/zulip/issues/12362
|
|
|
|
subgroup="test_subgroup")
|
2019-10-03 02:01:36 +02:00
|
|
|
# Event type in SYNCED_BILLING_EVENTS -- should be included
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.USER_CREATED,
|
|
|
|
event_time=end_time, extra_data='data')
|
|
|
|
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.REALM_LOGO_CHANGED,
|
|
|
|
event_time=end_time, extra_data='data')
|
2019-01-31 00:39:02 +01:00
|
|
|
self.assertEqual(RealmCount.objects.count(), 1)
|
|
|
|
self.assertEqual(InstallationCount.objects.count(), 1)
|
2019-10-03 02:01:36 +02:00
|
|
|
self.assertEqual(RealmAuditLog.objects.filter(id__gt=audit_log_max_id).count(), 2)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
|
|
|
send_analytics_to_remote_server()
|
2019-10-03 02:01:36 +02:00
|
|
|
check_counts(4, 1, 1, 1)
|
|
|
|
|
|
|
|
# Test having no new rows
|
2019-01-31 00:39:02 +01:00
|
|
|
send_analytics_to_remote_server()
|
2019-10-03 02:01:36 +02:00
|
|
|
check_counts(5, 1, 1, 1)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
# Test only having new RealmCount rows
|
2019-01-31 00:39:02 +01:00
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=user.realm, property=realm_stat.property, end_time=end_time + datetime.timedelta(days=1), value=6)
|
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=user.realm, property=realm_stat.property, end_time=end_time + datetime.timedelta(days=2), value=9)
|
|
|
|
send_analytics_to_remote_server()
|
2019-10-03 02:01:36 +02:00
|
|
|
check_counts(7, 3, 1, 1)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
# Test only having new InstallationCount rows
|
2019-01-31 00:39:02 +01:00
|
|
|
InstallationCount.objects.create(
|
|
|
|
property=realm_stat.property, end_time=end_time + datetime.timedelta(days=1), value=6)
|
|
|
|
send_analytics_to_remote_server()
|
2019-10-03 02:01:36 +02:00
|
|
|
check_counts(9, 3, 2, 1)
|
|
|
|
|
|
|
|
# Test only having new RealmAuditLog rows
|
|
|
|
# Non-synced event
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.REALM_LOGO_CHANGED,
|
|
|
|
event_time=end_time, extra_data='data')
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
check_counts(10, 3, 2, 1)
|
|
|
|
# Synced event
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.USER_REACTIVATED,
|
|
|
|
event_time=end_time, extra_data='data')
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
check_counts(12, 3, 2, 2)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
|
|
|
(realm_count_data,
|
2019-10-03 02:01:36 +02:00
|
|
|
installation_count_data,
|
|
|
|
realmauditlog_data) = build_analytics_data(RealmCount.objects.all(),
|
|
|
|
InstallationCount.objects.all(),
|
|
|
|
RealmAuditLog.objects.all())
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
'/api/v1/remotes/server/analytics',
|
|
|
|
{'realm_counts': ujson.dumps(realm_count_data),
|
|
|
|
'installation_counts': ujson.dumps(installation_count_data),
|
|
|
|
'realmauditlog_rows': ujson.dumps(realmauditlog_data)},
|
|
|
|
subdomain="")
|
2019-01-31 00:39:02 +01:00
|
|
|
self.assert_json_error(result, "Data is out of order.")
|
|
|
|
|
2019-10-03 01:54:36 +02:00
|
|
|
with mock.patch("zilencer.views.validate_incoming_table_data"):
|
2019-04-23 22:32:12 +02:00
|
|
|
# We need to wrap a transaction here to avoid the
|
|
|
|
# IntegrityError that will be thrown in here from breaking
|
|
|
|
# the unittest transaction.
|
|
|
|
with transaction.atomic():
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
2019-04-23 22:32:12 +02:00
|
|
|
self.server_uuid,
|
|
|
|
'/api/v1/remotes/server/analytics',
|
|
|
|
{'realm_counts': ujson.dumps(realm_count_data),
|
2019-10-03 02:01:36 +02:00
|
|
|
'installation_counts': ujson.dumps(installation_count_data),
|
|
|
|
'realmauditlog_rows': ujson.dumps(realmauditlog_data)},
|
2019-04-23 22:32:12 +02:00
|
|
|
subdomain="")
|
|
|
|
self.assert_json_error(result, "Invalid data.")
|
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-04-25 20:50:12 +02:00
|
|
|
def test_analytics_api_invalid(self, mock_request: Any) -> None:
|
2019-01-31 00:39:02 +01:00
|
|
|
"""This is a variant of the below test_push_api, but using the full
|
|
|
|
push notification bouncer flow
|
|
|
|
"""
|
2019-04-25 20:50:12 +02:00
|
|
|
mock_request.side_effect = self.bounce_request
|
2019-01-31 00:39:02 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
|
|
|
|
realm_stat = LoggingCountStat('invalid count stat', RealmCount, CountStat.DAY)
|
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=user.realm, property=realm_stat.property, end_time=end_time, value=5)
|
|
|
|
|
|
|
|
self.assertEqual(RealmCount.objects.count(), 1)
|
|
|
|
|
|
|
|
self.assertEqual(RemoteRealmCount.objects.count(), 0)
|
2019-09-03 03:47:10 +02:00
|
|
|
with mock.patch('zerver.lib.remote_server.logging.warning') as log_warning:
|
2019-01-31 00:39:02 +01:00
|
|
|
send_analytics_to_remote_server()
|
2019-09-03 03:47:10 +02:00
|
|
|
log_warning.assert_called_once()
|
2019-01-31 00:39:02 +01:00
|
|
|
self.assertEqual(RemoteRealmCount.objects.count(), 0)
|
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
# Servers on Zulip 2.0.6 and earlier only send realm_counts and installation_counts data,
|
|
|
|
# and don't send realmauditlog_rows. Make sure that continues to work.
|
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-10-03 02:01:36 +02:00
|
|
|
def test_old_two_table_format(self, mock_request: Any) -> None:
|
|
|
|
mock_request.side_effect = self.bounce_request
|
|
|
|
# Send fixture generated with Zulip 2.0 code
|
|
|
|
send_to_push_bouncer('POST', 'server/analytics', {
|
|
|
|
'realm_counts': '[{"id":1,"property":"invites_sent::day","subgroup":null,"end_time":574300800.0,"value":5,"realm":2}]', # lint:ignore
|
|
|
|
'installation_counts': '[]',
|
|
|
|
'version': '"2.0.6+git"'})
|
|
|
|
self.assertEqual(mock_request.call_count, 1)
|
|
|
|
self.assertEqual(RemoteRealmCount.objects.count(), 1)
|
|
|
|
self.assertEqual(RemoteInstallationCount.objects.count(), 0)
|
|
|
|
self.assertEqual(RemoteRealmAuditLog.objects.count(), 0)
|
|
|
|
|
|
|
|
# Make sure we aren't sending data we don't mean to, even if we don't store it.
|
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-10-03 02:01:36 +02:00
|
|
|
def test_only_sending_intended_realmauditlog_data(self, mock_request: Any) -> None:
|
|
|
|
mock_request.side_effect = self.bounce_request
|
|
|
|
user = self.example_user('hamlet')
|
|
|
|
# Event type in SYNCED_BILLING_EVENTS -- should be included
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.USER_REACTIVATED,
|
|
|
|
event_time=self.TIME_ZERO, extra_data='data')
|
|
|
|
# Event type not in SYNCED_BILLING_EVENTS -- should not be included
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=RealmAuditLog.REALM_LOGO_CHANGED,
|
|
|
|
event_time=self.TIME_ZERO, extra_data='data')
|
|
|
|
|
|
|
|
def check_for_unwanted_data(*args: Any) -> Any:
|
|
|
|
if check_for_unwanted_data.first_call: # type: ignore
|
|
|
|
check_for_unwanted_data.first_call = False # type: ignore
|
|
|
|
else:
|
|
|
|
# Test that we're respecting SYNCED_BILLING_EVENTS
|
|
|
|
self.assertIn('"event_type":{}'.format(RealmAuditLog.USER_REACTIVATED), str(args))
|
|
|
|
self.assertNotIn('"event_type":{}'.format(RealmAuditLog.REALM_LOGO_CHANGED), str(args))
|
|
|
|
# Test that we're respecting REALMAUDITLOG_PUSHED_FIELDS
|
|
|
|
self.assertIn('backfilled', str(args))
|
|
|
|
self.assertNotIn('modified_user', str(args))
|
|
|
|
return send_to_push_bouncer(*args)
|
|
|
|
|
|
|
|
# send_analytics_to_remote_server calls send_to_push_bouncer twice.
|
|
|
|
# We need to distinguish the first and second calls.
|
|
|
|
check_for_unwanted_data.first_call = True # type: ignore
|
|
|
|
with mock.patch('zerver.lib.remote_server.send_to_push_bouncer',
|
|
|
|
side_effect=check_for_unwanted_data):
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
|
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL='https://push.zulip.org.example.com')
|
2019-12-03 20:08:07 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.requests.request')
|
2019-10-03 02:01:36 +02:00
|
|
|
def test_realmauditlog_data_mapping(self, mock_request: Any) -> None:
|
|
|
|
mock_request.side_effect = self.bounce_request
|
|
|
|
user = self.example_user('hamlet')
|
|
|
|
log_entry = RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, backfilled=True,
|
|
|
|
event_type=RealmAuditLog.USER_REACTIVATED, event_time=self.TIME_ZERO, extra_data='data')
|
|
|
|
send_analytics_to_remote_server()
|
|
|
|
remote_log_entry = RemoteRealmAuditLog.objects.order_by('id').last()
|
|
|
|
self.assertEqual(remote_log_entry.server.uuid, self.server_uuid)
|
|
|
|
self.assertEqual(remote_log_entry.remote_id, log_entry.id)
|
|
|
|
self.assertEqual(remote_log_entry.event_time, self.TIME_ZERO)
|
|
|
|
self.assertEqual(remote_log_entry.backfilled, True)
|
|
|
|
self.assertEqual(remote_log_entry.extra_data, 'data')
|
|
|
|
self.assertEqual(remote_log_entry.event_type, RealmAuditLog.USER_REACTIVATED)
|
|
|
|
|
2017-05-11 10:55:05 +02:00
|
|
|
class PushNotificationTest(BouncerTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user_profile = self.example_user('hamlet')
|
2017-05-11 09:12:21 +02:00
|
|
|
self.sending_client = get_client('test')
|
2017-05-24 02:42:31 +02:00
|
|
|
self.sender = self.example_user('hamlet')
|
2017-05-11 09:12:21 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_message(self, type: int, type_id: int=100) -> Message:
|
2017-05-11 10:15:00 +02:00
|
|
|
recipient, _ = Recipient.objects.get_or_create(
|
|
|
|
type_id=type_id,
|
2017-05-11 09:12:21 +02:00
|
|
|
type=type,
|
|
|
|
)
|
|
|
|
|
2018-11-10 16:48:13 +01:00
|
|
|
message = Message(
|
2017-05-11 09:12:21 +02:00
|
|
|
sender=self.sender,
|
|
|
|
recipient=recipient,
|
|
|
|
content='This is test content',
|
2017-10-07 00:19:01 +02:00
|
|
|
rendered_content='This is test content',
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent=now(),
|
2017-05-11 09:12:21 +02:00
|
|
|
sending_client=self.sending_client,
|
|
|
|
)
|
2018-11-10 16:48:13 +01:00
|
|
|
message.set_topic_name('Test Topic')
|
|
|
|
message.save()
|
|
|
|
|
|
|
|
return message
|
2017-05-11 09:12:21 +02:00
|
|
|
|
2018-02-09 23:19:00 +01:00
|
|
|
@contextmanager
|
|
|
|
def mock_apns(self) -> mock.MagicMock:
|
|
|
|
mock_apns = mock.Mock()
|
|
|
|
with mock.patch('zerver.lib.push_notifications.get_apns_client') as mock_get:
|
|
|
|
mock_get.return_value = mock_apns
|
|
|
|
yield mock_apns
|
|
|
|
|
2019-02-08 23:42:24 +01:00
|
|
|
def setup_apns_tokens(self) -> None:
|
|
|
|
self.tokens = [u'aaaa', u'bbbb']
|
|
|
|
for token in self.tokens:
|
|
|
|
PushDeviceToken.objects.create(
|
|
|
|
kind=PushDeviceToken.APNS,
|
|
|
|
token=hex_to_b64(token),
|
|
|
|
user=self.user_profile,
|
|
|
|
ios_app_id=settings.ZULIP_IOS_APP_ID)
|
|
|
|
|
|
|
|
self.remote_tokens = [u'cccc']
|
|
|
|
for token in self.remote_tokens:
|
|
|
|
RemotePushDeviceToken.objects.create(
|
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
token=hex_to_b64(token),
|
|
|
|
user_id=self.user_profile.id,
|
|
|
|
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
|
|
|
|
)
|
|
|
|
|
|
|
|
def setup_gcm_tokens(self) -> None:
|
|
|
|
self.gcm_tokens = [u'1111', u'2222']
|
|
|
|
for token in self.gcm_tokens:
|
|
|
|
PushDeviceToken.objects.create(
|
|
|
|
kind=PushDeviceToken.GCM,
|
|
|
|
token=hex_to_b64(token),
|
|
|
|
user=self.user_profile,
|
|
|
|
ios_app_id=None)
|
|
|
|
|
|
|
|
self.remote_gcm_tokens = [u'dddd']
|
|
|
|
for token in self.remote_gcm_tokens:
|
|
|
|
RemotePushDeviceToken.objects.create(
|
|
|
|
kind=RemotePushDeviceToken.GCM,
|
|
|
|
token=hex_to_b64(token),
|
|
|
|
user_id=self.user_profile.id,
|
|
|
|
server=RemoteZulipServer.objects.get(uuid=self.server_uuid),
|
|
|
|
)
|
|
|
|
|
2017-05-11 10:55:05 +02:00
|
|
|
class HandlePushNotificationTest(PushNotificationTest):
|
2017-08-29 06:28:30 +02:00
|
|
|
DEFAULT_SUBDOMAIN = ""
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def bounce_request(self, *args: Any, **kwargs: Any) -> HttpResponse:
|
2017-05-11 10:55:05 +02:00
|
|
|
"""This method is used to carry out the push notification bouncer
|
|
|
|
requests using the Django test browser, rather than python-requests.
|
|
|
|
"""
|
|
|
|
# args[0] is method, args[1] is URL.
|
|
|
|
local_url = args[1].replace(settings.PUSH_NOTIFICATION_BOUNCER_URL, "")
|
|
|
|
if args[0] == "POST":
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
local_url,
|
|
|
|
kwargs['data'],
|
|
|
|
content_type="application/json")
|
2017-05-11 10:55:05 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unsupported method for bounce_request")
|
|
|
|
return result
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_end_to_end(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
|
|
|
self.setup_gcm_tokens()
|
2017-05-17 07:16:20 +02:00
|
|
|
|
2017-05-11 10:55:05 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
|
2017-09-10 00:47:36 +02:00
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
2017-10-19 06:37:35 +02:00
|
|
|
'trigger': 'private_message',
|
2017-09-10 00:47:36 +02:00
|
|
|
}
|
2017-05-11 10:55:05 +02:00
|
|
|
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=''), \
|
2019-12-03 20:08:07 +01:00
|
|
|
mock.patch('zerver.lib.remote_server.requests.request',
|
2017-05-11 10:55:05 +02:00
|
|
|
side_effect=self.bounce_request), \
|
2019-02-13 02:46:41 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.gcm_client') as mock_gcm, \
|
2018-02-09 23:19:00 +01:00
|
|
|
self.mock_apns() as mock_apns, \
|
2018-11-27 18:45:45 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger.info') as mock_info, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.logger.warning'):
|
2017-05-17 07:16:20 +02:00
|
|
|
apns_devices = [
|
2019-02-08 23:09:20 +01:00
|
|
|
(b64_to_hex(device.token), device.ios_app_id, device.token)
|
2017-05-17 07:16:20 +02:00
|
|
|
for device in RemotePushDeviceToken.objects.filter(
|
|
|
|
kind=PushDeviceToken.APNS)
|
2017-05-11 10:55:05 +02:00
|
|
|
]
|
2017-05-17 07:16:20 +02:00
|
|
|
gcm_devices = [
|
2019-02-08 23:09:20 +01:00
|
|
|
(b64_to_hex(device.token), device.ios_app_id, device.token)
|
2017-05-17 07:16:20 +02:00
|
|
|
for device in RemotePushDeviceToken.objects.filter(
|
|
|
|
kind=PushDeviceToken.GCM)
|
|
|
|
]
|
|
|
|
mock_gcm.json_request.return_value = {
|
|
|
|
'success': {gcm_devices[0][2]: message.id}}
|
2017-08-19 01:38:11 +02:00
|
|
|
mock_apns.get_notification_result.return_value = 'Success'
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2017-08-19 01:38:11 +02:00
|
|
|
for _, _, token in apns_devices:
|
|
|
|
mock_info.assert_any_call(
|
|
|
|
"APNs: Success sending for user %d to device %s",
|
|
|
|
self.user_profile.id, token)
|
2017-05-17 07:16:20 +02:00
|
|
|
for _, _, token in gcm_devices:
|
|
|
|
mock_info.assert_any_call(
|
|
|
|
"GCM: Sent %s as %s" % (token, message.id))
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2018-05-21 20:20:23 +02:00
|
|
|
# Now test the unregistered case
|
2018-08-28 21:11:49 +02:00
|
|
|
mock_apns.get_notification_result.return_value = ('Unregistered', 1234567)
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2018-05-21 20:20:23 +02:00
|
|
|
for _, _, token in apns_devices:
|
|
|
|
mock_info.assert_any_call(
|
|
|
|
"APNs: Removing invalid/expired token %s (%s)" %
|
|
|
|
(token, "Unregistered"))
|
|
|
|
self.assertEqual(RemotePushDeviceToken.objects.filter(kind=PushDeviceToken.APNS).count(), 0)
|
|
|
|
|
2019-12-02 19:46:11 +01:00
|
|
|
def test_connection_error(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
|
|
|
self.setup_gcm_tokens()
|
2017-08-18 09:04:52 +02:00
|
|
|
|
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
|
2017-09-10 00:47:36 +02:00
|
|
|
missed_message = {
|
|
|
|
'user_profile_id': self.user_profile.id,
|
|
|
|
'message_id': message.id,
|
2017-10-19 06:37:35 +02:00
|
|
|
'trigger': 'private_message',
|
2017-09-10 00:47:36 +02:00
|
|
|
}
|
2017-08-18 09:04:52 +02:00
|
|
|
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=''), \
|
2019-12-03 20:08:07 +01:00
|
|
|
mock.patch('zerver.lib.remote_server.requests.request',
|
2017-08-18 09:04:52 +02:00
|
|
|
side_effect=self.bounce_request), \
|
2019-02-13 02:46:41 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.gcm_client') as mock_gcm, \
|
2019-12-03 20:19:38 +01:00
|
|
|
mock.patch('zerver.lib.remote_server.requests.request',
|
2019-12-02 19:46:11 +01:00
|
|
|
side_effect=requests.ConnectionError):
|
2017-08-18 09:04:52 +02:00
|
|
|
gcm_devices = [
|
2019-02-08 23:09:20 +01:00
|
|
|
(b64_to_hex(device.token), device.ios_app_id, device.token)
|
2017-08-18 09:04:52 +02:00
|
|
|
for device in RemotePushDeviceToken.objects.filter(
|
|
|
|
kind=PushDeviceToken.GCM)
|
|
|
|
]
|
|
|
|
mock_gcm.json_request.return_value = {
|
|
|
|
'success': {gcm_devices[0][2]: message.id}}
|
2019-12-02 19:46:11 +01:00
|
|
|
with self.assertRaises(PushNotificationBouncerRetryLaterError):
|
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2017-08-18 09:04:52 +02:00
|
|
|
|
2018-12-11 07:05:40 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True)
|
|
|
|
def test_disabled_notifications(self, mock_push_notifications: mock.MagicMock) -> None:
|
2017-05-24 02:42:31 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-05-11 10:55:05 +02:00
|
|
|
user_profile.enable_online_email_notifications = False
|
|
|
|
user_profile.enable_online_push_notifications = False
|
|
|
|
user_profile.enable_offline_email_notifications = False
|
|
|
|
user_profile.enable_offline_push_notifications = False
|
2017-08-17 16:55:32 +02:00
|
|
|
user_profile.enable_stream_push_notifications = False
|
2017-05-11 10:55:05 +02:00
|
|
|
user_profile.save()
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(user_profile.id, {})
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called()
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2018-12-11 07:05:40 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True)
|
|
|
|
def test_read_message(self, mock_push_notifications: mock.MagicMock) -> None:
|
2017-05-24 02:42:31 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-05-11 10:55:05 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=user_profile,
|
|
|
|
flags=UserMessage.flags.read,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
|
2017-09-10 00:47:36 +02:00
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
2017-10-19 06:37:35 +02:00
|
|
|
'trigger': 'private_message',
|
2017-09-10 00:47:36 +02:00
|
|
|
}
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(user_profile.id, missed_message)
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2018-12-05 19:36:58 +01:00
|
|
|
def test_deleted_message(self) -> None:
|
|
|
|
"""Simulates the race where message is deleted before handlingx push notifications"""
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=user_profile,
|
|
|
|
flags=UserMessage.flags.read,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
|
|
|
'trigger': 'private_message',
|
|
|
|
}
|
|
|
|
# Now, delete the message the normal way
|
2019-11-12 21:20:31 +01:00
|
|
|
do_delete_messages(user_profile.realm, [message])
|
2018-12-05 19:36:58 +01:00
|
|
|
|
2018-12-05 19:44:25 +01:00
|
|
|
with mock.patch('zerver.lib.push_notifications.uses_notification_bouncer') as mock_check, \
|
2018-12-11 07:05:40 +01:00
|
|
|
mock.patch('logging.error') as mock_logging_error, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True) as mock_push_notifications:
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(user_profile.id, missed_message)
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2018-12-05 19:44:25 +01:00
|
|
|
# Check we didn't proceed through and didn't log anything.
|
2018-12-05 19:36:58 +01:00
|
|
|
mock_check.assert_not_called()
|
2018-12-05 19:44:25 +01:00
|
|
|
mock_logging_error.assert_not_called()
|
2018-12-05 19:36:58 +01:00
|
|
|
|
|
|
|
def test_missing_message(self) -> None:
|
|
|
|
"""Simulates the race where message is missing when handling push notifications"""
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=user_profile,
|
|
|
|
flags=UserMessage.flags.read,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
|
|
|
'trigger': 'private_message',
|
|
|
|
}
|
|
|
|
# Now delete the message forcefully, so it just doesn't exist.
|
|
|
|
message.delete()
|
|
|
|
|
|
|
|
# This should log an error
|
|
|
|
with mock.patch('zerver.lib.push_notifications.uses_notification_bouncer') as mock_check, \
|
2018-12-11 07:05:40 +01:00
|
|
|
mock.patch('logging.error') as mock_logging_error, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True) as mock_push_notifications:
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(user_profile.id, missed_message)
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2018-12-05 19:36:58 +01:00
|
|
|
# Check we didn't proceed through.
|
|
|
|
mock_check.assert_not_called()
|
|
|
|
mock_logging_error.assert_called_once()
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_send_notifications_to_bouncer(self) -> None:
|
2017-05-24 02:42:31 +02:00
|
|
|
user_profile = self.example_user('hamlet')
|
2017-05-11 10:55:05 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=user_profile,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
|
2017-09-10 00:47:36 +02:00
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
2017-10-19 06:37:35 +02:00
|
|
|
'trigger': 'private_message',
|
2017-09-10 00:47:36 +02:00
|
|
|
}
|
2017-05-11 10:55:05 +02:00
|
|
|
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=True), \
|
2019-02-14 00:54:56 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_message_payload_apns',
|
2017-05-11 10:55:05 +02:00
|
|
|
return_value={'apns': True}), \
|
2019-02-14 00:54:56 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_message_payload_gcm',
|
2019-02-14 01:02:39 +01:00
|
|
|
return_value=({'gcm': True}, {})), \
|
2017-05-11 10:55:05 +02:00
|
|
|
mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_notifications_to_bouncer') as mock_send:
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(user_profile.id, missed_message)
|
2017-05-11 10:55:05 +02:00
|
|
|
mock_send.assert_called_with(user_profile.id,
|
|
|
|
{'apns': True},
|
|
|
|
{'gcm': True},
|
2019-02-14 01:02:39 +01:00
|
|
|
{},
|
2017-05-11 10:55:05 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_bouncer_push(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
|
|
|
self.setup_gcm_tokens()
|
2017-05-11 10:55:05 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
|
|
|
message=message
|
|
|
|
)
|
|
|
|
|
|
|
|
android_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=self.user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
|
|
|
|
|
|
|
apple_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=self.user_profile,
|
|
|
|
kind=PushDeviceToken.APNS))
|
|
|
|
|
2017-09-10 00:47:36 +02:00
|
|
|
missed_message = {
|
|
|
|
'message_id': message.id,
|
2017-10-19 06:37:35 +02:00
|
|
|
'trigger': 'private_message',
|
2017-09-10 00:47:36 +02:00
|
|
|
}
|
2019-02-14 00:54:56 +01:00
|
|
|
with mock.patch('zerver.lib.push_notifications.get_message_payload_apns',
|
2017-05-11 10:55:05 +02:00
|
|
|
return_value={'apns': True}), \
|
2019-02-14 00:54:56 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_message_payload_gcm',
|
2019-02-14 01:02:39 +01:00
|
|
|
return_value=({'gcm': True}, {})), \
|
2017-05-11 10:55:05 +02:00
|
|
|
mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_apple_push_notification') as mock_send_apple, \
|
|
|
|
mock.patch('zerver.lib.push_notifications'
|
2018-12-11 07:05:40 +01:00
|
|
|
'.send_android_push_notification') as mock_send_android, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True) as mock_push_notifications:
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2017-05-11 10:55:05 +02:00
|
|
|
mock_send_apple.assert_called_with(self.user_profile.id,
|
|
|
|
apple_devices,
|
2017-08-19 01:38:11 +02:00
|
|
|
{'apns': True})
|
2019-02-14 01:02:39 +01:00
|
|
|
mock_send_android.assert_called_with(android_devices, {'gcm': True}, {})
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2018-07-28 14:31:45 +02:00
|
|
|
def test_send_remove_notifications_to_bouncer(self) -> None:
|
|
|
|
user_profile = self.example_user('hamlet')
|
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=user_profile,
|
2019-02-14 01:23:55 +01:00
|
|
|
message=message,
|
|
|
|
flags=UserMessage.flags.active_mobile_push_notification,
|
2018-07-28 14:31:45 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
with self.settings(PUSH_NOTIFICATION_BOUNCER_URL=True), \
|
|
|
|
mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_notifications_to_bouncer') as mock_send_android, \
|
2019-02-14 00:49:53 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_base_payload',
|
2018-07-28 14:31:45 +02:00
|
|
|
return_value={'gcm': True}):
|
2019-02-14 01:08:51 +01:00
|
|
|
handle_remove_push_notification(user_profile.id, [message.id])
|
|
|
|
mock_send_android.assert_called_with(
|
|
|
|
user_profile.id,
|
|
|
|
{},
|
|
|
|
{
|
|
|
|
'gcm': True,
|
|
|
|
'event': 'remove',
|
|
|
|
'zulip_message_ids': str(message.id),
|
|
|
|
'zulip_message_id': message.id,
|
|
|
|
},
|
|
|
|
{'priority': 'normal'})
|
2019-02-14 01:23:55 +01:00
|
|
|
user_message = UserMessage.objects.get(user_profile=self.user_profile,
|
|
|
|
message=message)
|
|
|
|
self.assertEqual(user_message.flags.active_mobile_push_notification, False)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
|
|
|
def test_non_bouncer_push_remove(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
|
|
|
self.setup_gcm_tokens()
|
2018-07-28 14:31:45 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, type_id=1)
|
|
|
|
UserMessage.objects.create(
|
|
|
|
user_profile=self.user_profile,
|
2019-02-14 01:23:55 +01:00
|
|
|
message=message,
|
|
|
|
flags=UserMessage.flags.active_mobile_push_notification,
|
2018-07-28 14:31:45 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
android_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=self.user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
|
|
|
|
|
|
|
with mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_android_push_notification') as mock_send_android, \
|
2019-02-14 00:49:53 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_base_payload',
|
2018-07-28 14:31:45 +02:00
|
|
|
return_value={'gcm': True}):
|
2019-02-14 01:08:51 +01:00
|
|
|
handle_remove_push_notification(self.user_profile.id, [message.id])
|
|
|
|
mock_send_android.assert_called_with(
|
|
|
|
android_devices,
|
|
|
|
{
|
|
|
|
'gcm': True,
|
|
|
|
'event': 'remove',
|
|
|
|
'zulip_message_ids': str(message.id),
|
|
|
|
'zulip_message_id': message.id,
|
|
|
|
},
|
|
|
|
{'priority': 'normal'})
|
2019-02-14 01:23:55 +01:00
|
|
|
user_message = UserMessage.objects.get(user_profile=self.user_profile,
|
|
|
|
message=message)
|
|
|
|
self.assertEqual(user_message.flags.active_mobile_push_notification, False)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_user_message_does_not_exist(self) -> None:
|
2017-11-10 00:51:06 +01:00
|
|
|
"""This simulates a condition that should only be an error if the user is
|
|
|
|
not long-term idle; we fake it, though, in the sense that the user should
|
|
|
|
not have received the message in the first place"""
|
|
|
|
self.make_stream('public_stream')
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user('iago')
|
|
|
|
message_id = self.send_stream_message(sender, "public_stream", "test")
|
2017-11-10 00:51:06 +01:00
|
|
|
missed_message = {'message_id': message_id}
|
2018-12-11 07:05:40 +01:00
|
|
|
with mock.patch('zerver.lib.push_notifications.logger.error') as mock_logger, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True) as mock_push_notifications:
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2017-05-11 10:55:05 +02:00
|
|
|
mock_logger.assert_called_with("Could not find UserMessage with "
|
2017-11-10 00:51:06 +01:00
|
|
|
"message_id %s and user_id %s" %
|
|
|
|
(message_id, self.user_profile.id,))
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2017-11-10 00:51:06 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_user_message_soft_deactivated(self) -> None:
|
2017-11-10 00:51:06 +01:00
|
|
|
"""This simulates a condition that should only be an error if the user is
|
|
|
|
not long-term idle; we fake it, though, in the sense that the user should
|
|
|
|
not have received the message in the first place"""
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
|
|
|
self.setup_gcm_tokens()
|
2017-11-10 00:51:06 +01:00
|
|
|
self.make_stream('public_stream')
|
|
|
|
self.subscribe(self.user_profile, 'public_stream')
|
|
|
|
do_soft_deactivate_users([self.user_profile])
|
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
sender = self.example_user('iago')
|
|
|
|
message_id = self.send_stream_message(sender, "public_stream", "test")
|
2017-11-10 00:51:06 +01:00
|
|
|
missed_message = {
|
|
|
|
'message_id': message_id,
|
|
|
|
'trigger': 'stream_push_notify',
|
|
|
|
}
|
|
|
|
|
|
|
|
android_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=self.user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
|
|
|
|
|
|
|
apple_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=self.user_profile,
|
|
|
|
kind=PushDeviceToken.APNS))
|
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
with mock.patch('zerver.lib.push_notifications.get_message_payload_apns',
|
2017-11-10 00:51:06 +01:00
|
|
|
return_value={'apns': True}), \
|
2019-02-14 00:54:56 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.get_message_payload_gcm',
|
2019-02-14 01:02:39 +01:00
|
|
|
return_value=({'gcm': True}, {})), \
|
2017-11-10 00:51:06 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_apple_push_notification') as mock_send_apple, \
|
|
|
|
mock.patch('zerver.lib.push_notifications'
|
|
|
|
'.send_android_push_notification') as mock_send_android, \
|
2018-12-11 07:05:40 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger.error') as mock_logger, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True) as mock_push_notifications:
|
2019-02-08 23:09:20 +01:00
|
|
|
handle_push_notification(self.user_profile.id, missed_message)
|
2017-11-10 00:51:06 +01:00
|
|
|
mock_logger.assert_not_called()
|
|
|
|
mock_send_apple.assert_called_with(self.user_profile.id,
|
|
|
|
apple_devices,
|
|
|
|
{'apns': True})
|
2019-02-14 01:02:39 +01:00
|
|
|
mock_send_android.assert_called_with(android_devices, {'gcm': True}, {})
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called_once()
|
2017-05-11 10:55:05 +02:00
|
|
|
|
2017-08-29 01:03:29 +02:00
|
|
|
class TestAPNs(PushNotificationTest):
|
2017-11-05 10:51:25 +01:00
|
|
|
def devices(self) -> List[DeviceToken]:
|
2017-08-29 01:03:29 +02:00
|
|
|
return list(PushDeviceToken.objects.filter(
|
|
|
|
user=self.user_profile, kind=PushDeviceToken.APNS))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def send(self, devices: Optional[List[PushDeviceToken]]=None,
|
|
|
|
payload_data: Dict[str, Any]={}) -> None:
|
2017-10-12 03:56:50 +02:00
|
|
|
if devices is None:
|
|
|
|
devices = self.devices()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_apple_push_notification(
|
2017-10-12 03:56:50 +02:00
|
|
|
self.user_profile.id, devices, payload_data)
|
|
|
|
|
2018-02-10 03:22:26 +01:00
|
|
|
def test_get_apns_client(self) -> None:
|
2018-12-15 20:05:43 +01:00
|
|
|
"""This test is pretty hacky, and needs to carefully reset the state
|
|
|
|
it modifies in order to avoid leaking state that can lead to
|
|
|
|
nondeterministic results for other tests.
|
|
|
|
"""
|
2018-02-12 23:34:59 +01:00
|
|
|
import zerver.lib.push_notifications
|
|
|
|
zerver.lib.push_notifications._apns_client_initialized = False
|
2019-12-01 20:19:13 +01:00
|
|
|
try:
|
|
|
|
with self.settings(APNS_CERT_FILE='/foo.pem'), \
|
|
|
|
mock.patch('apns2.client.APNsClient') as mock_client:
|
|
|
|
client = get_apns_client()
|
|
|
|
self.assertEqual(mock_client.return_value, client)
|
|
|
|
finally:
|
|
|
|
# Reset the values set by `get_apns_client` so that we don't
|
|
|
|
# leak changes to the rest of the world.
|
|
|
|
zerver.lib.push_notifications._apns_client_initialized = False
|
|
|
|
zerver.lib.push_notifications._apns_client = None
|
2018-02-10 03:22:26 +01:00
|
|
|
|
2018-02-09 23:19:00 +01:00
|
|
|
def test_not_configured(self) -> None:
|
|
|
|
with mock.patch('zerver.lib.push_notifications.get_apns_client') as mock_get, \
|
2018-11-27 18:45:45 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger') as mock_logging:
|
2018-02-09 23:19:00 +01:00
|
|
|
mock_get.return_value = None
|
|
|
|
self.send()
|
2018-11-27 18:12:11 +01:00
|
|
|
mock_logging.debug.assert_called_once_with(
|
2018-02-09 23:19:00 +01:00
|
|
|
"APNs: Dropping a notification because nothing configured. "
|
|
|
|
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
|
2018-11-27 18:12:11 +01:00
|
|
|
mock_logging.warning.assert_not_called()
|
|
|
|
from zerver.lib.push_notifications import initialize_push_notifications
|
|
|
|
initialize_push_notifications()
|
|
|
|
mock_logging.warning.assert_called_once_with(
|
|
|
|
"Mobile push notifications are not configured.\n "
|
|
|
|
"See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html")
|
2017-08-29 01:03:29 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_success(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2018-02-09 23:19:00 +01:00
|
|
|
with self.mock_apns() as mock_apns, \
|
2018-11-27 18:45:45 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger') as mock_logging:
|
2017-08-29 01:03:29 +02:00
|
|
|
mock_apns.get_notification_result.return_value = 'Success'
|
|
|
|
self.send()
|
2017-10-02 11:11:42 +02:00
|
|
|
mock_logging.warning.assert_not_called()
|
2017-08-29 01:03:29 +02:00
|
|
|
for device in self.devices():
|
|
|
|
mock_logging.info.assert_any_call(
|
|
|
|
"APNs: Success sending for user %d to device %s",
|
|
|
|
self.user_profile.id, device.token)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_http_retry(self) -> None:
|
2017-08-29 01:05:20 +02:00
|
|
|
import hyper
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2018-02-09 23:19:00 +01:00
|
|
|
with self.mock_apns() as mock_apns, \
|
2018-11-27 18:45:45 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger') as mock_logging:
|
2017-08-29 01:05:20 +02:00
|
|
|
mock_apns.get_notification_result.side_effect = itertools.chain(
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
itertools.repeat('Success'))
|
|
|
|
self.send()
|
2017-10-02 11:11:42 +02:00
|
|
|
mock_logging.warning.assert_called_once_with(
|
2017-08-29 01:05:20 +02:00
|
|
|
"APNs: HTTP error sending for user %d to device %s: %s",
|
|
|
|
self.user_profile.id, self.devices()[0].token, "StreamResetError")
|
|
|
|
for device in self.devices():
|
|
|
|
mock_logging.info.assert_any_call(
|
|
|
|
"APNs: Success sending for user %d to device %s",
|
|
|
|
self.user_profile.id, device.token)
|
|
|
|
|
2018-12-05 19:44:25 +01:00
|
|
|
def test_http_retry_pipefail(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2018-12-05 19:44:25 +01:00
|
|
|
with self.mock_apns() as mock_apns, \
|
|
|
|
mock.patch('zerver.lib.push_notifications.logger') as mock_logging:
|
|
|
|
mock_apns.get_notification_result.side_effect = itertools.chain(
|
|
|
|
[BrokenPipeError()],
|
|
|
|
itertools.repeat('Success'))
|
|
|
|
self.send()
|
|
|
|
mock_logging.warning.assert_called_once_with(
|
|
|
|
"APNs: BrokenPipeError sending for user %d to device %s: %s",
|
|
|
|
self.user_profile.id, self.devices()[0].token, "BrokenPipeError")
|
|
|
|
for device in self.devices():
|
|
|
|
mock_logging.info.assert_any_call(
|
|
|
|
"APNs: Success sending for user %d to device %s",
|
|
|
|
self.user_profile.id, device.token)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_http_retry_eventually_fails(self) -> None:
|
2017-10-12 03:56:50 +02:00
|
|
|
import hyper
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2018-02-09 23:19:00 +01:00
|
|
|
with self.mock_apns() as mock_apns, \
|
2018-11-27 18:45:45 +01:00
|
|
|
mock.patch('zerver.lib.push_notifications.logger') as mock_logging:
|
2017-10-12 03:56:50 +02:00
|
|
|
mock_apns.get_notification_result.side_effect = itertools.chain(
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
[hyper.http20.exceptions.StreamResetError()],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.send(devices=self.devices()[0:1])
|
|
|
|
self.assertEqual(mock_logging.warning.call_count, 5)
|
|
|
|
mock_logging.warning.assert_called_with(
|
|
|
|
'APNs: Failed to send for user %d to device %s: %s',
|
|
|
|
self.user_profile.id, self.devices()[0].token, 'HTTP error, retries exhausted')
|
|
|
|
self.assertEqual(mock_logging.info.call_count, 1)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_modernize_apns_payload(self) -> None:
|
2017-09-28 03:08:37 +02:00
|
|
|
payload = {'alert': 'Message from Hamlet',
|
2017-10-02 09:00:25 +02:00
|
|
|
'badge': 0,
|
2017-09-28 03:08:37 +02:00
|
|
|
'custom': {'zulip': {'message_ids': [3]}}}
|
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
modernize_apns_payload(
|
2017-09-28 03:08:37 +02:00
|
|
|
{'alert': 'Message from Hamlet',
|
|
|
|
'message_ids': [3]}),
|
|
|
|
payload)
|
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
modernize_apns_payload(payload),
|
2017-09-28 03:08:37 +02:00
|
|
|
payload)
|
2018-10-04 23:31:04 +02:00
|
|
|
|
2017-05-11 09:26:00 +02:00
|
|
|
class TestGetAPNsPayload(PushNotificationTest):
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_apns_personal_message(self) -> None:
|
2018-10-04 23:31:04 +02:00
|
|
|
user_profile = self.example_user("othello")
|
|
|
|
message_id = self.send_personal_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.sender,
|
|
|
|
user_profile,
|
2018-10-04 23:31:04 +02:00
|
|
|
'Content of personal message',
|
|
|
|
)
|
|
|
|
message = Message.objects.get(id=message_id)
|
|
|
|
message.trigger = 'private_message'
|
2019-02-14 00:54:56 +01:00
|
|
|
payload = get_message_payload_apns(user_profile, message)
|
2018-10-04 23:31:04 +02:00
|
|
|
expected = {
|
|
|
|
'alert': {
|
|
|
|
'title': 'King Hamlet',
|
|
|
|
'subtitle': '',
|
|
|
|
'body': message.content,
|
|
|
|
},
|
|
|
|
'badge': 0,
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2018-10-04 23:31:04 +02:00
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
|
|
|
'recipient_type': 'private',
|
2019-08-19 04:24:45 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
2018-10-04 23:31:04 +02:00
|
|
|
'server': settings.EXTERNAL_HOST,
|
2019-08-19 04:24:45 +02:00
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
2019-04-20 17:21:26 +02:00
|
|
|
"user_id": user_profile.id,
|
2018-10-04 23:31:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
|
|
|
|
2018-12-11 07:05:40 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.push_notifications_enabled', return_value = True)
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_apns_huddle_message(self, mock_push_notifications: mock.MagicMock) -> None:
|
2018-10-19 00:09:18 +02:00
|
|
|
user_profile = self.example_user("othello")
|
2018-02-16 23:18:47 +01:00
|
|
|
message_id = self.send_huddle_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.sender,
|
|
|
|
[self.example_user('othello'), self.example_user('cordelia')])
|
2018-02-16 23:18:47 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
2017-10-19 06:37:35 +02:00
|
|
|
message.trigger = 'private_message'
|
2019-02-14 00:54:56 +01:00
|
|
|
payload = get_message_payload_apns(user_profile, message)
|
2017-05-11 09:26:00 +02:00
|
|
|
expected = {
|
2017-08-31 22:27:46 +02:00
|
|
|
'alert': {
|
2018-10-04 23:31:04 +02:00
|
|
|
'title': 'Cordelia Lear, King Hamlet, Othello, the Moor of Venice',
|
|
|
|
'subtitle': 'King Hamlet:',
|
2017-08-31 22:27:46 +02:00
|
|
|
'body': message.content,
|
|
|
|
},
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2017-10-02 09:00:25 +02:00
|
|
|
'badge': 0,
|
2017-08-19 01:38:11 +02:00
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
2017-12-12 05:40:11 +01:00
|
|
|
'recipient_type': 'private',
|
2018-02-16 23:18:47 +01:00
|
|
|
'pm_users': ','.join(
|
|
|
|
str(s.user_profile_id)
|
|
|
|
for s in Subscription.objects.filter(
|
|
|
|
recipient=message.recipient)),
|
2019-08-19 04:24:45 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
2017-12-12 05:40:11 +01:00
|
|
|
'server': settings.EXTERNAL_HOST,
|
2019-08-19 04:24:45 +02:00
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
2019-04-20 17:21:26 +02:00
|
|
|
"user_id": user_profile.id,
|
2017-12-12 05:40:11 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
2018-12-11 07:05:40 +01:00
|
|
|
mock_push_notifications.assert_called()
|
2017-12-12 05:40:11 +01:00
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_apns_stream_message(self):
|
2018-10-04 23:31:04 +02:00
|
|
|
# type: () -> None
|
|
|
|
stream = Stream.objects.filter(name='Verona').get()
|
|
|
|
message = self.get_message(Recipient.STREAM, stream.id)
|
|
|
|
message.trigger = 'push_stream_notify'
|
|
|
|
message.stream_name = 'Verona'
|
2020-03-12 14:17:25 +01:00
|
|
|
payload = get_message_payload_apns(self.sender, message)
|
2018-10-04 23:31:04 +02:00
|
|
|
expected = {
|
|
|
|
'alert': {
|
2018-11-10 16:48:13 +01:00
|
|
|
'title': '#Verona > Test Topic',
|
2018-10-04 23:31:04 +02:00
|
|
|
'subtitle': 'King Hamlet:',
|
|
|
|
'body': message.content,
|
|
|
|
},
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2018-10-04 23:31:04 +02:00
|
|
|
'badge': 0,
|
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
|
|
|
'recipient_type': 'stream',
|
2019-08-19 04:24:45 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
2019-02-08 23:09:20 +01:00
|
|
|
"stream": get_display_recipient(message.recipient),
|
2018-11-10 16:11:12 +01:00
|
|
|
"topic": message.topic_name(),
|
2018-10-04 23:31:04 +02:00
|
|
|
'server': settings.EXTERNAL_HOST,
|
2019-08-19 04:24:45 +02:00
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
2020-03-12 14:17:25 +01:00
|
|
|
"user_id": self.sender.id,
|
2018-10-04 23:31:04 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_apns_stream_mention(self):
|
2017-12-12 05:40:11 +01:00
|
|
|
# type: () -> None
|
2018-10-19 00:09:18 +02:00
|
|
|
user_profile = self.example_user("othello")
|
2017-12-12 05:40:11 +01:00
|
|
|
stream = Stream.objects.filter(name='Verona').get()
|
|
|
|
message = self.get_message(Recipient.STREAM, stream.id)
|
|
|
|
message.trigger = 'mentioned'
|
|
|
|
message.stream_name = 'Verona'
|
2019-02-14 00:54:56 +01:00
|
|
|
payload = get_message_payload_apns(user_profile, message)
|
2017-12-12 05:40:11 +01:00
|
|
|
expected = {
|
|
|
|
'alert': {
|
2018-11-10 16:48:13 +01:00
|
|
|
'title': '#Verona > Test Topic',
|
2018-10-04 23:31:04 +02:00
|
|
|
'subtitle': 'King Hamlet mentioned you:',
|
2017-12-12 05:40:11 +01:00
|
|
|
'body': message.content,
|
|
|
|
},
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2017-12-12 05:40:11 +01:00
|
|
|
'badge': 0,
|
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
|
|
|
'recipient_type': 'stream',
|
2019-08-26 04:40:07 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
|
|
|
"stream": get_display_recipient(message.recipient),
|
|
|
|
"topic": message.topic_name(),
|
|
|
|
'server': settings.EXTERNAL_HOST,
|
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
|
|
|
"user_id": user_profile.id,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
|
|
|
|
|
|
|
def test_get_message_payload_apns_stream_wildcard_mention(self):
|
|
|
|
# type: () -> None
|
|
|
|
user_profile = self.example_user("othello")
|
|
|
|
stream = Stream.objects.filter(name='Verona').get()
|
|
|
|
message = self.get_message(Recipient.STREAM, stream.id)
|
|
|
|
message.trigger = 'wildcard_mentioned'
|
|
|
|
message.stream_name = 'Verona'
|
|
|
|
payload = get_message_payload_apns(user_profile, message)
|
|
|
|
expected = {
|
|
|
|
'alert': {
|
|
|
|
'title': '#Verona > Test Topic',
|
|
|
|
'subtitle': 'King Hamlet mentioned everyone:',
|
|
|
|
'body': message.content,
|
|
|
|
},
|
|
|
|
'sound': 'default',
|
|
|
|
'badge': 0,
|
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
|
|
|
'recipient_type': 'stream',
|
2019-08-19 04:24:45 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
2019-02-08 23:09:20 +01:00
|
|
|
"stream": get_display_recipient(message.recipient),
|
2018-11-10 16:11:12 +01:00
|
|
|
"topic": message.topic_name(),
|
2017-12-12 05:40:11 +01:00
|
|
|
'server': settings.EXTERNAL_HOST,
|
2019-08-19 04:24:45 +02:00
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
2019-04-20 17:21:26 +02:00
|
|
|
"user_id": user_profile.id,
|
2017-08-19 01:38:11 +02:00
|
|
|
}
|
|
|
|
}
|
2017-05-11 09:26:00 +02:00
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
|
|
|
|
2017-10-10 11:14:10 +02:00
|
|
|
@override_settings(PUSH_NOTIFICATION_REDACT_CONTENT = True)
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_apns_redacted_content(self) -> None:
|
2018-10-19 00:09:18 +02:00
|
|
|
user_profile = self.example_user("othello")
|
2018-02-16 23:18:47 +01:00
|
|
|
message_id = self.send_huddle_message(
|
2020-03-07 11:43:05 +01:00
|
|
|
self.sender,
|
|
|
|
[self.example_user('othello'), self.example_user('cordelia')])
|
2018-02-16 23:18:47 +01:00
|
|
|
message = Message.objects.get(id=message_id)
|
2017-10-20 03:34:28 +02:00
|
|
|
message.trigger = 'private_message'
|
2019-02-14 00:54:56 +01:00
|
|
|
payload = get_message_payload_apns(user_profile, message)
|
2017-10-10 11:14:10 +02:00
|
|
|
expected = {
|
|
|
|
'alert': {
|
2018-10-04 23:31:04 +02:00
|
|
|
'title': 'Cordelia Lear, King Hamlet, Othello, the Moor of Venice',
|
|
|
|
'subtitle': "King Hamlet:",
|
2017-10-10 11:14:10 +02:00
|
|
|
'body': "***REDACTED***",
|
|
|
|
},
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2017-10-10 11:14:10 +02:00
|
|
|
'badge': 0,
|
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': [message.id],
|
2017-12-12 05:40:11 +01:00
|
|
|
'recipient_type': 'private',
|
2018-02-16 23:18:47 +01:00
|
|
|
'pm_users': ','.join(
|
|
|
|
str(s.user_profile_id)
|
|
|
|
for s in Subscription.objects.filter(
|
|
|
|
recipient=message.recipient)),
|
2019-08-19 04:24:45 +02:00
|
|
|
'sender_email': self.sender.email,
|
|
|
|
'sender_id': self.sender.id,
|
2017-12-12 05:40:11 +01:00
|
|
|
'server': settings.EXTERNAL_HOST,
|
2019-08-19 04:24:45 +02:00
|
|
|
'realm_id': self.sender.realm.id,
|
|
|
|
'realm_uri': self.sender.realm.uri,
|
2019-04-20 17:21:26 +02:00
|
|
|
"user_id": user_profile.id,
|
2017-10-10 11:14:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertDictEqual(payload, expected)
|
|
|
|
|
2017-05-11 10:15:00 +02:00
|
|
|
class TestGetGCMPayload(PushNotificationTest):
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_gcm(self) -> None:
|
2017-05-11 10:15:00 +02:00
|
|
|
stream = Stream.objects.filter(name='Verona').get()
|
|
|
|
message = self.get_message(Recipient.STREAM, stream.id)
|
|
|
|
message.content = 'a' * 210
|
2017-10-07 00:19:01 +02:00
|
|
|
message.rendered_content = 'a' * 210
|
2017-05-11 10:15:00 +02:00
|
|
|
message.save()
|
2017-10-19 06:37:35 +02:00
|
|
|
message.trigger = 'mentioned'
|
2017-05-11 10:15:00 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
payload, gcm_options = get_message_payload_gcm(hamlet, message)
|
2019-02-14 01:02:39 +01:00
|
|
|
self.assertDictEqual(payload, {
|
2020-03-12 14:17:25 +01:00
|
|
|
"user_id": hamlet.id,
|
2017-05-11 10:15:00 +02:00
|
|
|
"event": "message",
|
|
|
|
"alert": "New mention from King Hamlet",
|
|
|
|
"zulip_message_id": message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
"time": datetime_to_timestamp(message.date_sent),
|
2017-10-07 00:19:01 +02:00
|
|
|
"content": 'a' * 200 + '…',
|
2017-05-11 10:15:00 +02:00
|
|
|
"content_truncated": True,
|
2017-12-16 03:01:49 +01:00
|
|
|
"server": settings.EXTERNAL_HOST,
|
2020-03-12 14:17:25 +01:00
|
|
|
"realm_id": hamlet.realm.id,
|
|
|
|
"realm_uri": hamlet.realm.uri,
|
|
|
|
"sender_id": hamlet.id,
|
|
|
|
"sender_email": hamlet.email,
|
2017-05-11 10:15:00 +02:00
|
|
|
"sender_full_name": "King Hamlet",
|
2019-02-08 23:09:20 +01:00
|
|
|
"sender_avatar_url": absolute_avatar_url(message.sender),
|
2017-05-11 10:15:00 +02:00
|
|
|
"recipient_type": "stream",
|
2019-02-08 23:09:20 +01:00
|
|
|
"stream": get_display_recipient(message.recipient),
|
2018-11-10 16:11:12 +01:00
|
|
|
"topic": message.topic_name(),
|
2019-02-14 01:02:39 +01:00
|
|
|
})
|
|
|
|
self.assertDictEqual(gcm_options, {
|
|
|
|
"priority": "high",
|
|
|
|
})
|
2017-05-11 10:15:00 +02:00
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_gcm_personal(self) -> None:
|
2017-05-11 10:15:00 +02:00
|
|
|
message = self.get_message(Recipient.PERSONAL, 1)
|
2017-10-19 06:37:35 +02:00
|
|
|
message.trigger = 'private_message'
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
payload, gcm_options = get_message_payload_gcm(hamlet, message)
|
2019-02-14 01:02:39 +01:00
|
|
|
self.assertDictEqual(payload, {
|
2020-03-12 14:17:25 +01:00
|
|
|
"user_id": hamlet.id,
|
2017-05-11 10:15:00 +02:00
|
|
|
"event": "message",
|
|
|
|
"alert": "New private message from King Hamlet",
|
|
|
|
"zulip_message_id": message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
"time": datetime_to_timestamp(message.date_sent),
|
2017-05-11 10:15:00 +02:00
|
|
|
"content": message.content,
|
|
|
|
"content_truncated": False,
|
2017-12-16 03:01:49 +01:00
|
|
|
"server": settings.EXTERNAL_HOST,
|
2020-03-12 14:17:25 +01:00
|
|
|
"realm_id": hamlet.realm.id,
|
|
|
|
"realm_uri": hamlet.realm.uri,
|
|
|
|
"sender_id": hamlet.id,
|
|
|
|
"sender_email": hamlet.email,
|
2017-05-11 10:15:00 +02:00
|
|
|
"sender_full_name": "King Hamlet",
|
2019-02-08 23:09:20 +01:00
|
|
|
"sender_avatar_url": absolute_avatar_url(message.sender),
|
2017-05-11 10:15:00 +02:00
|
|
|
"recipient_type": "private",
|
2019-02-14 01:02:39 +01:00
|
|
|
})
|
|
|
|
self.assertDictEqual(gcm_options, {
|
|
|
|
"priority": "high",
|
|
|
|
})
|
2017-05-11 10:15:00 +02:00
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_gcm_stream_notifications(self) -> None:
|
2017-09-10 00:47:36 +02:00
|
|
|
message = self.get_message(Recipient.STREAM, 1)
|
2017-10-19 06:37:35 +02:00
|
|
|
message.trigger = 'stream_push_notify'
|
2017-09-10 00:47:36 +02:00
|
|
|
message.stream_name = 'Denmark'
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
payload, gcm_options = get_message_payload_gcm(hamlet, message)
|
2019-02-14 01:02:39 +01:00
|
|
|
self.assertDictEqual(payload, {
|
2020-03-12 14:17:25 +01:00
|
|
|
"user_id": hamlet.id,
|
2017-09-10 00:47:36 +02:00
|
|
|
"event": "message",
|
|
|
|
"alert": "New stream message from King Hamlet in Denmark",
|
|
|
|
"zulip_message_id": message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
"time": datetime_to_timestamp(message.date_sent),
|
2017-09-10 00:47:36 +02:00
|
|
|
"content": message.content,
|
|
|
|
"content_truncated": False,
|
2017-12-16 03:01:49 +01:00
|
|
|
"server": settings.EXTERNAL_HOST,
|
2020-03-12 14:17:25 +01:00
|
|
|
"realm_id": hamlet.realm.id,
|
|
|
|
"realm_uri": hamlet.realm.uri,
|
|
|
|
"sender_id": hamlet.id,
|
|
|
|
"sender_email": hamlet.email,
|
2017-09-10 00:47:36 +02:00
|
|
|
"sender_full_name": "King Hamlet",
|
2019-02-08 23:09:20 +01:00
|
|
|
"sender_avatar_url": absolute_avatar_url(message.sender),
|
2017-09-10 00:47:36 +02:00
|
|
|
"recipient_type": "stream",
|
2018-11-10 16:48:13 +01:00
|
|
|
"topic": "Test Topic",
|
2017-09-10 00:47:36 +02:00
|
|
|
"stream": "Denmark"
|
2019-02-14 01:02:39 +01:00
|
|
|
})
|
|
|
|
self.assertDictEqual(gcm_options, {
|
|
|
|
"priority": "high",
|
|
|
|
})
|
2017-09-10 00:47:36 +02:00
|
|
|
|
2017-10-10 11:14:10 +02:00
|
|
|
@override_settings(PUSH_NOTIFICATION_REDACT_CONTENT = True)
|
2019-02-14 00:54:56 +01:00
|
|
|
def test_get_message_payload_gcm_redacted_content(self) -> None:
|
2017-10-10 11:14:10 +02:00
|
|
|
message = self.get_message(Recipient.STREAM, 1)
|
2017-10-20 03:34:28 +02:00
|
|
|
message.trigger = 'stream_push_notify'
|
2017-10-10 11:14:10 +02:00
|
|
|
message.stream_name = 'Denmark'
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet = self.example_user('hamlet')
|
|
|
|
payload, gcm_options = get_message_payload_gcm(hamlet, message)
|
2019-02-14 01:02:39 +01:00
|
|
|
self.assertDictEqual(payload, {
|
2020-03-12 14:17:25 +01:00
|
|
|
"user_id": hamlet.id,
|
2017-10-10 11:14:10 +02:00
|
|
|
"event": "message",
|
|
|
|
"alert": "New stream message from King Hamlet in Denmark",
|
|
|
|
"zulip_message_id": message.id,
|
2019-08-28 02:43:19 +02:00
|
|
|
"time": datetime_to_timestamp(message.date_sent),
|
2017-10-10 11:14:10 +02:00
|
|
|
"content": "***REDACTED***",
|
|
|
|
"content_truncated": False,
|
2017-12-16 03:01:49 +01:00
|
|
|
"server": settings.EXTERNAL_HOST,
|
2020-03-12 14:17:25 +01:00
|
|
|
"realm_id": hamlet.realm.id,
|
|
|
|
"realm_uri": hamlet.realm.uri,
|
|
|
|
"sender_id": hamlet.id,
|
|
|
|
"sender_email": hamlet.email,
|
2017-10-10 11:14:10 +02:00
|
|
|
"sender_full_name": "King Hamlet",
|
2019-02-08 23:09:20 +01:00
|
|
|
"sender_avatar_url": absolute_avatar_url(message.sender),
|
2017-10-10 11:14:10 +02:00
|
|
|
"recipient_type": "stream",
|
2018-11-10 16:48:13 +01:00
|
|
|
"topic": "Test Topic",
|
2017-10-10 11:14:10 +02:00
|
|
|
"stream": "Denmark"
|
2019-02-14 01:02:39 +01:00
|
|
|
})
|
|
|
|
self.assertDictEqual(gcm_options, {
|
|
|
|
"priority": "high",
|
|
|
|
})
|
2017-10-10 11:14:10 +02:00
|
|
|
|
|
|
|
|
2017-05-11 10:31:31 +02:00
|
|
|
class TestSendNotificationsToBouncer(ZulipTestCase):
|
2019-01-31 00:44:02 +01:00
|
|
|
@mock.patch('zerver.lib.remote_server.send_to_push_bouncer')
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_send_notifications_to_bouncer(self, mock_send: mock.MagicMock) -> None:
|
2019-02-08 23:09:20 +01:00
|
|
|
send_notifications_to_bouncer(1, {'apns': True}, {'gcm': True}, {})
|
2017-05-11 10:31:31 +02:00
|
|
|
post_data = {
|
|
|
|
'user_id': 1,
|
|
|
|
'apns_payload': {'apns': True},
|
|
|
|
'gcm_payload': {'gcm': True},
|
2018-11-29 21:37:40 +01:00
|
|
|
'gcm_options': {},
|
2017-05-11 10:31:31 +02:00
|
|
|
}
|
|
|
|
mock_send.assert_called_with('POST',
|
2019-01-31 01:36:18 +01:00
|
|
|
'push/notify',
|
2017-05-11 10:31:31 +02:00
|
|
|
ujson.dumps(post_data),
|
|
|
|
extra_headers={'Content-type':
|
|
|
|
'application/json'})
|
|
|
|
|
2017-11-05 11:49:43 +01:00
|
|
|
class Result:
|
2017-11-05 10:51:25 +01:00
|
|
|
def __init__(self, status: int=200, content: str=ujson.dumps({'msg': 'error'})) -> None:
|
2017-10-12 03:02:35 +02:00
|
|
|
self.status_code = status
|
|
|
|
self.content = content
|
2017-05-12 09:47:31 +02:00
|
|
|
|
2019-02-08 23:42:24 +01:00
|
|
|
class TestSendToPushBouncer(ZulipTestCase):
|
2017-05-12 09:47:31 +02:00
|
|
|
@mock.patch('requests.request', return_value=Result(status=500))
|
2019-02-12 06:16:10 +01:00
|
|
|
@mock.patch('logging.warning')
|
|
|
|
def test_500_error(self, mock_request: mock.MagicMock, mock_warning: mock.MagicMock) -> None:
|
2019-12-03 20:19:38 +01:00
|
|
|
with self.assertRaises(PushNotificationBouncerRetryLaterError):
|
|
|
|
result, failed = send_to_push_bouncer('register', 'register', {'data': True})
|
2019-02-12 06:16:10 +01:00
|
|
|
mock_warning.assert_called_once()
|
2017-05-12 09:47:31 +02:00
|
|
|
|
|
|
|
@mock.patch('requests.request', return_value=Result(status=400))
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_400_error(self, mock_request: mock.MagicMock) -> None:
|
2019-02-08 23:09:20 +01:00
|
|
|
with self.assertRaises(JsonableError) as exc:
|
|
|
|
send_to_push_bouncer('register', 'register', {'msg': True})
|
2017-07-20 00:22:36 +02:00
|
|
|
self.assertEqual(exc.exception.msg, 'error')
|
2017-05-12 09:47:31 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_400_error_invalid_server_key(self) -> None:
|
2017-10-12 03:02:35 +02:00
|
|
|
from zerver.decorator import InvalidZulipServerError
|
|
|
|
# This is the exception our decorator uses for an invalid Zulip server
|
|
|
|
error_obj = InvalidZulipServerError("testRole")
|
|
|
|
with mock.patch('requests.request',
|
|
|
|
return_value=Result(status=400,
|
|
|
|
content=ujson.dumps(error_obj.to_json()))):
|
|
|
|
with self.assertRaises(PushNotificationBouncerException) as exc:
|
2019-02-08 23:09:20 +01:00
|
|
|
send_to_push_bouncer('register', 'register', {'msg': True})
|
2017-10-12 03:02:35 +02:00
|
|
|
self.assertEqual(str(exc.exception),
|
|
|
|
'Push notifications bouncer error: '
|
|
|
|
'Zulip server auth failure: testRole is not registered')
|
|
|
|
|
2017-05-12 09:47:31 +02:00
|
|
|
@mock.patch('requests.request', return_value=Result(status=400, content='/'))
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_400_error_when_content_is_not_serializable(self, mock_request: mock.MagicMock) -> None:
|
2017-10-12 03:02:35 +02:00
|
|
|
with self.assertRaises(ValueError) as exc:
|
2019-02-08 23:09:20 +01:00
|
|
|
send_to_push_bouncer('register', 'register', {'msg': True})
|
2017-10-12 03:02:35 +02:00
|
|
|
self.assertEqual(str(exc.exception),
|
|
|
|
'Expected object or value')
|
2017-05-12 09:47:31 +02:00
|
|
|
|
|
|
|
@mock.patch('requests.request', return_value=Result(status=300, content='/'))
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_300_error(self, mock_request: mock.MagicMock) -> None:
|
2017-10-12 03:02:35 +02:00
|
|
|
with self.assertRaises(PushNotificationBouncerException) as exc:
|
2019-02-08 23:09:20 +01:00
|
|
|
send_to_push_bouncer('register', 'register', {'msg': True})
|
2017-10-12 03:02:35 +02:00
|
|
|
self.assertEqual(str(exc.exception),
|
|
|
|
'Push notification bouncer returned unexpected status code 300')
|
2017-05-12 09:47:31 +02:00
|
|
|
|
2017-05-12 09:55:12 +02:00
|
|
|
class TestNumPushDevicesForUser(PushNotificationTest):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_when_kind_is_none(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2019-02-08 23:09:20 +01:00
|
|
|
self.assertEqual(num_push_devices_for_user(self.user_profile), 2)
|
2017-05-12 09:55:12 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_when_kind_is_not_none(self) -> None:
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_apns_tokens()
|
2019-02-08 23:09:20 +01:00
|
|
|
count = num_push_devices_for_user(self.user_profile,
|
|
|
|
kind=PushDeviceToken.APNS)
|
2017-05-12 09:55:12 +02:00
|
|
|
self.assertEqual(count, 2)
|
|
|
|
|
2016-09-15 15:19:46 +02:00
|
|
|
class TestPushApi(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_push_api(self) -> None:
|
2017-05-07 19:39:30 +02:00
|
|
|
user = self.example_user('cordelia')
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2016-09-15 15:19:46 +02:00
|
|
|
|
|
|
|
endpoints = [
|
2017-07-07 18:18:37 +02:00
|
|
|
('/json/users/me/apns_device_token', 'apple-tokenaz'),
|
2016-09-15 15:19:46 +02:00
|
|
|
('/json/users/me/android_gcm_reg_id', 'android-token'),
|
|
|
|
]
|
|
|
|
|
|
|
|
# Test error handling
|
2017-07-07 18:18:37 +02:00
|
|
|
for endpoint, label in endpoints:
|
2016-09-15 15:19:46 +02:00
|
|
|
# Try adding/removing tokens that are too big...
|
2017-07-07 18:18:37 +02:00
|
|
|
broken_token = "a" * 5000 # too big
|
2016-09-15 15:19:46 +02:00
|
|
|
result = self.client_post(endpoint, {'token': broken_token})
|
|
|
|
self.assert_json_error(result, 'Empty or invalid length token')
|
|
|
|
|
2017-07-07 18:18:37 +02:00
|
|
|
if label == 'apple-tokenaz':
|
|
|
|
result = self.client_post(endpoint, {'token': 'xyz has non-hex characters'})
|
|
|
|
self.assert_json_error(result, 'Invalid APNS token')
|
|
|
|
|
2016-09-15 15:19:46 +02:00
|
|
|
result = self.client_delete(endpoint, {'token': broken_token})
|
|
|
|
self.assert_json_error(result, 'Empty or invalid length token')
|
|
|
|
|
|
|
|
# Try to remove a non-existent token...
|
2017-07-07 18:18:37 +02:00
|
|
|
result = self.client_delete(endpoint, {'token': 'abcd1234'})
|
2016-09-15 15:19:46 +02:00
|
|
|
self.assert_json_error(result, 'Token does not exist')
|
|
|
|
|
|
|
|
# Add tokens
|
|
|
|
for endpoint, token in endpoints:
|
|
|
|
# Test that we can push twice
|
|
|
|
result = self.client_post(endpoint, {'token': token})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
result = self.client_post(endpoint, {'token': token})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
|
|
|
|
self.assertEqual(len(tokens), 1)
|
|
|
|
self.assertEqual(tokens[0].token, token)
|
|
|
|
|
|
|
|
# User should have tokens for both devices now.
|
|
|
|
tokens = list(PushDeviceToken.objects.filter(user=user))
|
|
|
|
self.assertEqual(len(tokens), 2)
|
|
|
|
|
|
|
|
# Remove tokens
|
|
|
|
for endpoint, token in endpoints:
|
|
|
|
result = self.client_delete(endpoint, {'token': token})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
tokens = list(PushDeviceToken.objects.filter(user=user, token=token))
|
|
|
|
self.assertEqual(len(tokens), 0)
|
|
|
|
|
2019-02-08 22:44:55 +01:00
|
|
|
class GCMParseOptionsTest(TestCase):
|
|
|
|
def test_invalid_option(self) -> None:
|
|
|
|
with self.assertRaises(JsonableError):
|
2019-02-08 23:09:20 +01:00
|
|
|
parse_gcm_options({"invalid": True}, {})
|
2019-02-08 22:44:55 +01:00
|
|
|
|
|
|
|
def test_invalid_priority_value(self) -> None:
|
|
|
|
with self.assertRaises(JsonableError):
|
2019-02-08 23:09:20 +01:00
|
|
|
parse_gcm_options({"priority": "invalid"}, {})
|
2019-02-08 22:44:55 +01:00
|
|
|
|
|
|
|
def test_default_priority(self) -> None:
|
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
"high", parse_gcm_options({}, {"event": "message"}))
|
2019-02-08 22:44:55 +01:00
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
"normal", parse_gcm_options({}, {"event": "remove"}))
|
2019-02-08 22:44:55 +01:00
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
"normal", parse_gcm_options({}, {}))
|
2019-02-08 22:44:55 +01:00
|
|
|
|
|
|
|
def test_explicit_priority(self) -> None:
|
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
"normal", parse_gcm_options({"priority": "normal"}, {}))
|
2019-02-08 22:44:55 +01:00
|
|
|
self.assertEqual(
|
2019-02-08 23:09:20 +01:00
|
|
|
"high", parse_gcm_options({"priority": "high"}, {}))
|
2019-02-08 22:44:55 +01:00
|
|
|
|
2019-02-13 02:46:41 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.gcm_client')
|
2019-02-08 22:44:55 +01:00
|
|
|
class GCMSendTest(PushNotificationTest):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2019-02-08 23:42:24 +01:00
|
|
|
self.setup_gcm_tokens()
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_gcm_data(self, **kwargs: Any) -> Dict[str, Any]:
|
2016-08-08 14:20:41 +02:00
|
|
|
data = {
|
|
|
|
'key 1': 'Data 1',
|
|
|
|
'key 2': 'Data 2',
|
|
|
|
}
|
|
|
|
data.update(kwargs)
|
|
|
|
return data
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.debug')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_gcm_is_none(self, mock_debug: mock.MagicMock, mock_gcm: mock.MagicMock) -> None:
|
2019-02-08 22:59:38 +01:00
|
|
|
mock_gcm.__bool__.return_value = False
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, {}, {})
|
2018-11-27 18:12:11 +01:00
|
|
|
mock_debug.assert_called_with(
|
2017-10-14 02:21:38 +02:00
|
|
|
"Skipping sending a GCM push notification since PUSH_NOTIFICATION_BOUNCER_URL "
|
|
|
|
"and ANDROID_GCM_API_KEY are both unset")
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.warning')
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_json_request_raises_ioerror(self, mock_warn: mock.MagicMock,
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm: mock.MagicMock) -> None:
|
|
|
|
mock_gcm.json_request.side_effect = IOError('error')
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, {}, {})
|
2017-05-17 09:58:27 +02:00
|
|
|
mock_warn.assert_called_with('error')
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.warning')
|
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.info')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_success(self, mock_info: mock.MagicMock, mock_warning: mock.MagicMock,
|
|
|
|
mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
|
|
|
res['success'] = {token: ind for ind, token in enumerate(self.gcm_tokens)}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-08-08 14:20:41 +02:00
|
|
|
self.assertEqual(mock_info.call_count, 2)
|
|
|
|
c1 = call("GCM: Sent 1111 as 0")
|
|
|
|
c2 = call("GCM: Sent 2222 as 1")
|
2016-08-09 19:02:31 +02:00
|
|
|
mock_info.assert_has_calls([c1, c2], any_order=True)
|
2016-08-08 14:20:41 +02:00
|
|
|
mock_warning.assert_not_called()
|
|
|
|
|
2018-11-29 21:37:40 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.warning')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_canonical_equal(self, mock_warning: mock.MagicMock, mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
|
|
|
res['canonical'] = {1: 1}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-08-08 14:20:41 +02:00
|
|
|
mock_warning.assert_called_once_with("GCM: Got canonical ref but it "
|
|
|
|
"already matches our ID 1!")
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.warning')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_canonical_pushdevice_not_present(self, mock_warning: mock.MagicMock,
|
|
|
|
mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
2019-02-08 23:09:20 +01:00
|
|
|
t1 = hex_to_b64(u'1111')
|
|
|
|
t2 = hex_to_b64(u'3333')
|
2016-12-13 08:41:48 +01:00
|
|
|
res['canonical'] = {t1: t2}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_count(hex_token: str) -> int:
|
2019-02-08 23:09:20 +01:00
|
|
|
token = hex_to_b64(hex_token)
|
2016-08-08 14:20:41 +02:00
|
|
|
return PushDeviceToken.objects.filter(
|
|
|
|
token=token, kind=PushDeviceToken.GCM).count()
|
|
|
|
|
|
|
|
self.assertEqual(get_count(u'1111'), 1)
|
|
|
|
self.assertEqual(get_count(u'3333'), 0)
|
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-08-08 14:20:41 +02:00
|
|
|
msg = ("GCM: Got canonical ref %s "
|
|
|
|
"replacing %s but new ID not "
|
|
|
|
"registered! Updating.")
|
|
|
|
mock_warning.assert_called_once_with(msg % (t2, t1))
|
|
|
|
|
|
|
|
self.assertEqual(get_count(u'1111'), 0)
|
|
|
|
self.assertEqual(get_count(u'3333'), 1)
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.info')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_canonical_pushdevice_different(self, mock_info: mock.MagicMock,
|
|
|
|
mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
2019-02-08 23:09:20 +01:00
|
|
|
old_token = hex_to_b64(u'1111')
|
|
|
|
new_token = hex_to_b64(u'2222')
|
2016-12-13 08:41:48 +01:00
|
|
|
res['canonical'] = {old_token: new_token}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_count(hex_token: str) -> int:
|
2019-02-08 23:09:20 +01:00
|
|
|
token = hex_to_b64(hex_token)
|
2016-08-08 14:20:41 +02:00
|
|
|
return PushDeviceToken.objects.filter(
|
|
|
|
token=token, kind=PushDeviceToken.GCM).count()
|
|
|
|
|
|
|
|
self.assertEqual(get_count(u'1111'), 1)
|
|
|
|
self.assertEqual(get_count(u'2222'), 1)
|
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-08-08 14:20:41 +02:00
|
|
|
mock_info.assert_called_once_with(
|
|
|
|
"GCM: Got canonical ref %s, dropping %s" % (new_token, old_token))
|
|
|
|
|
|
|
|
self.assertEqual(get_count(u'1111'), 0)
|
|
|
|
self.assertEqual(get_count(u'2222'), 1)
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.info')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_not_registered(self, mock_info: mock.MagicMock, mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
2019-02-08 23:09:20 +01:00
|
|
|
token = hex_to_b64(u'1111')
|
2016-12-13 08:41:48 +01:00
|
|
|
res['errors'] = {'NotRegistered': [token]}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_count(hex_token: str) -> int:
|
2019-02-08 23:09:20 +01:00
|
|
|
token = hex_to_b64(hex_token)
|
2016-08-08 14:20:41 +02:00
|
|
|
return PushDeviceToken.objects.filter(
|
|
|
|
token=token, kind=PushDeviceToken.GCM).count()
|
|
|
|
|
|
|
|
self.assertEqual(get_count(u'1111'), 1)
|
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-08-08 14:20:41 +02:00
|
|
|
mock_info.assert_called_once_with("GCM: Removing %s" % (token,))
|
|
|
|
self.assertEqual(get_count(u'1111'), 0)
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
@mock.patch('zerver.lib.push_notifications.logger.warning')
|
2019-02-08 23:04:42 +01:00
|
|
|
def test_failure(self, mock_warn: mock.MagicMock, mock_gcm: mock.MagicMock) -> None:
|
2016-12-13 08:41:48 +01:00
|
|
|
res = {}
|
2019-02-08 23:09:20 +01:00
|
|
|
token = hex_to_b64(u'1111')
|
2016-12-13 08:41:48 +01:00
|
|
|
res['errors'] = {'Failed': [token]}
|
2019-02-08 23:04:42 +01:00
|
|
|
mock_gcm.json_request.return_value = res
|
2016-08-08 14:20:41 +02:00
|
|
|
|
|
|
|
data = self.get_gcm_data()
|
2019-02-08 23:09:20 +01:00
|
|
|
send_android_push_notification_to_user(self.user_profile, data, {})
|
2016-12-13 08:41:48 +01:00
|
|
|
c1 = call("GCM: Delivery to %s failed: Failed" % (token,))
|
|
|
|
mock_warn.assert_has_calls([c1], any_order=True)
|
2016-12-14 13:23:05 +01:00
|
|
|
|
2019-02-14 01:08:51 +01:00
|
|
|
class TestClearOnRead(ZulipTestCase):
|
|
|
|
def test_mark_stream_as_read(self) -> None:
|
|
|
|
n_msgs = 3
|
|
|
|
max_unbatched = 2
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
hamlet.enable_stream_push_notifications = True
|
|
|
|
hamlet.save()
|
|
|
|
stream = self.subscribe(hamlet, "Denmark")
|
|
|
|
|
2020-03-07 11:43:05 +01:00
|
|
|
message_ids = [self.send_stream_message(self.example_user("iago"),
|
2019-08-15 08:05:44 +02:00
|
|
|
stream.name,
|
|
|
|
"yo {}".format(i))
|
|
|
|
for i in range(n_msgs)]
|
2019-02-14 01:08:51 +01:00
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=hamlet.id,
|
2019-08-15 08:05:44 +02:00
|
|
|
message_id__in=message_ids,
|
2019-02-14 01:08:51 +01:00
|
|
|
).update(
|
|
|
|
flags=F('flags').bitor(
|
|
|
|
UserMessage.flags.active_mobile_push_notification))
|
|
|
|
|
|
|
|
with mock.patch("zerver.lib.actions.queue_json_publish") as mock_publish:
|
|
|
|
with override_settings(MAX_UNBATCHED_REMOVE_NOTIFICATIONS=max_unbatched):
|
|
|
|
do_mark_stream_messages_as_read(hamlet, self.client, stream)
|
|
|
|
queue_items = [c[0][1] for c in mock_publish.call_args_list]
|
|
|
|
groups = [item['message_ids'] for item in queue_items]
|
|
|
|
|
2019-08-15 08:05:44 +02:00
|
|
|
self.assertEqual(len(groups), min(len(message_ids), max_unbatched))
|
2019-02-14 01:08:51 +01:00
|
|
|
for g in groups[:-1]:
|
|
|
|
self.assertEqual(len(g), 1)
|
2019-08-15 08:05:44 +02:00
|
|
|
self.assertEqual(sum(len(g) for g in groups), len(message_ids))
|
|
|
|
self.assertEqual(set(id for g in groups for id in g), set(message_ids))
|
2019-02-14 01:08:51 +01:00
|
|
|
|
2016-12-14 13:29:42 +01:00
|
|
|
class TestReceivesNotificationsFunctions(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2017-05-07 21:25:59 +02:00
|
|
|
self.user = self.example_user('cordelia')
|
2016-12-14 13:23:05 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_online_notifications_when_user_is_a_bot(self) -> None:
|
2016-12-14 13:23:05 +01:00
|
|
|
self.user.is_bot = True
|
|
|
|
|
|
|
|
self.user.enable_online_push_notifications = True
|
|
|
|
self.assertFalse(receives_online_notifications(self.user))
|
|
|
|
|
|
|
|
self.user.enable_online_push_notifications = False
|
|
|
|
self.assertFalse(receives_online_notifications(self.user))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_online_notifications_when_user_is_not_a_bot(self) -> None:
|
2016-12-14 13:23:05 +01:00
|
|
|
self.user.is_bot = False
|
|
|
|
|
|
|
|
self.user.enable_online_push_notifications = True
|
|
|
|
self.assertTrue(receives_online_notifications(self.user))
|
|
|
|
|
|
|
|
self.user.enable_online_push_notifications = False
|
|
|
|
self.assertFalse(receives_online_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_offline_notifications_when_user_is_a_bot(self) -> None:
|
2016-12-14 13:29:42 +01:00
|
|
|
self.user.is_bot = True
|
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = True
|
|
|
|
self.user.enable_offline_push_notifications = True
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = False
|
|
|
|
self.user.enable_offline_push_notifications = False
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = True
|
|
|
|
self.user.enable_offline_push_notifications = False
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = False
|
|
|
|
self.user.enable_offline_push_notifications = True
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_offline_notifications_when_user_is_not_a_bot(self) -> None:
|
2016-12-14 13:29:42 +01:00
|
|
|
self.user.is_bot = False
|
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = True
|
|
|
|
self.user.enable_offline_push_notifications = True
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertTrue(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertTrue(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = False
|
|
|
|
self.user.enable_offline_push_notifications = False
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = True
|
|
|
|
self.user.enable_offline_push_notifications = False
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertFalse(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertTrue(receives_offline_email_notifications(self.user))
|
2016-12-14 13:29:42 +01:00
|
|
|
|
|
|
|
self.user.enable_offline_email_notifications = False
|
|
|
|
self.user.enable_offline_push_notifications = True
|
2017-11-29 02:49:11 +01:00
|
|
|
self.assertTrue(receives_offline_push_notifications(self.user))
|
|
|
|
self.assertFalse(receives_offline_email_notifications(self.user))
|
2017-08-17 16:55:32 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_stream_notifications_when_user_is_a_bot(self) -> None:
|
2017-08-17 16:55:32 +02:00
|
|
|
self.user.is_bot = True
|
|
|
|
|
|
|
|
self.user.enable_stream_push_notifications = True
|
|
|
|
self.assertFalse(receives_stream_notifications(self.user))
|
|
|
|
|
|
|
|
self.user.enable_stream_push_notifications = False
|
|
|
|
self.assertFalse(receives_stream_notifications(self.user))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_receivers_stream_notifications_when_user_is_not_a_bot(self) -> None:
|
2017-08-17 16:55:32 +02:00
|
|
|
self.user.is_bot = False
|
|
|
|
|
|
|
|
self.user.enable_stream_push_notifications = True
|
|
|
|
self.assertTrue(receives_stream_notifications(self.user))
|
|
|
|
|
|
|
|
self.user.enable_stream_push_notifications = False
|
|
|
|
self.assertFalse(receives_stream_notifications(self.user))
|
2017-10-06 23:16:29 +02:00
|
|
|
|
|
|
|
class TestPushNotificationsContent(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_fixtures(self) -> None:
|
2018-04-20 03:57:21 +02:00
|
|
|
fixtures = ujson.loads(self.fixture_data("markdown_test_cases.json"))
|
2017-10-06 23:16:29 +02:00
|
|
|
tests = fixtures["regular_tests"]
|
|
|
|
for test in tests:
|
|
|
|
if "text_content" in test:
|
2019-08-14 01:44:45 +02:00
|
|
|
with self.subTest(markdown_test_case=test["name"]):
|
|
|
|
output = get_mobile_push_content(test["expected_output"])
|
|
|
|
self.assertEqual(output, test["text_content"])
|
2017-10-06 23:16:29 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_backend_only_fixtures(self) -> None:
|
2019-07-24 07:47:59 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
stream = get_stream("Verona", realm)
|
|
|
|
|
2017-10-06 23:16:29 +02:00
|
|
|
fixtures = [
|
|
|
|
{
|
|
|
|
'name': 'realm_emoji',
|
2019-07-24 07:47:59 +02:00
|
|
|
'rendered_content': '<p>Testing <img alt=":green_tick:" class="emoji" src="/user_avatars/%s/emoji/green_tick.png" title="green tick"> realm emoji.</p>' % (
|
|
|
|
realm.id,),
|
2017-10-06 23:16:29 +02:00
|
|
|
'expected_output': 'Testing :green_tick: realm emoji.',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'mentions',
|
2019-07-24 07:47:59 +02:00
|
|
|
'rendered_content': '<p>Mentioning <span class="user-mention" data-user-id="%s">@Cordelia Lear</span>.</p>' % (
|
|
|
|
cordelia.id,),
|
2017-10-06 23:16:29 +02:00
|
|
|
'expected_output': 'Mentioning @Cordelia Lear.',
|
|
|
|
},
|
|
|
|
{
|
|
|
|
'name': 'stream_names',
|
2019-07-24 07:47:59 +02:00
|
|
|
'rendered_content': '<p>Testing stream names <a class="stream" data-stream-id="%s" href="/#narrow/stream/Verona">#Verona</a>.</p>' % (
|
|
|
|
stream.id,),
|
2017-10-06 23:16:29 +02:00
|
|
|
'expected_output': 'Testing stream names #Verona.',
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
for test in fixtures:
|
|
|
|
actual_output = get_mobile_push_content(test["rendered_content"])
|
|
|
|
self.assertEqual(actual_output, test["expected_output"])
|
2018-05-04 01:40:46 +02:00
|
|
|
|
|
|
|
class PushBouncerSignupTest(ZulipTestCase):
|
|
|
|
def test_push_signup_invalid_host(self) -> None:
|
|
|
|
zulip_org_id = str(uuid.uuid4())
|
|
|
|
zulip_org_key = get_random_string(64)
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="invalid-host",
|
|
|
|
contact_email="server-admin@example.com",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_error(result, "invalid-host is not a valid hostname")
|
|
|
|
|
|
|
|
def test_push_signup_invalid_email(self) -> None:
|
|
|
|
zulip_org_id = str(uuid.uuid4())
|
|
|
|
zulip_org_key = get_random_string(64)
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="example.com",
|
|
|
|
contact_email="server-admin",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_error(result, "Enter a valid email address.")
|
|
|
|
|
|
|
|
def test_push_signup_success(self) -> None:
|
|
|
|
zulip_org_id = str(uuid.uuid4())
|
|
|
|
zulip_org_key = get_random_string(64)
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="example.com",
|
|
|
|
contact_email="server-admin@example.com",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
|
|
|
|
self.assertEqual(server.hostname, "example.com")
|
|
|
|
self.assertEqual(server.contact_email, "server-admin@example.com")
|
|
|
|
|
|
|
|
# Update our hostname
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="zulip.example.com",
|
|
|
|
contact_email="server-admin@example.com",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
|
|
|
|
self.assertEqual(server.hostname, "zulip.example.com")
|
|
|
|
self.assertEqual(server.contact_email, "server-admin@example.com")
|
|
|
|
|
|
|
|
# Now test rotating our key
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="example.com",
|
|
|
|
contact_email="server-admin@example.com",
|
|
|
|
new_org_key=get_random_string(64),
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
|
|
|
|
self.assertEqual(server.hostname, "example.com")
|
|
|
|
self.assertEqual(server.contact_email, "server-admin@example.com")
|
|
|
|
zulip_org_key = request["new_org_key"]
|
|
|
|
self.assertEqual(server.api_key, zulip_org_key)
|
|
|
|
|
|
|
|
# Update our hostname
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=zulip_org_key,
|
|
|
|
hostname="zulip.example.com",
|
|
|
|
contact_email="new-server-admin@example.com",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
|
|
|
|
self.assertEqual(server.hostname, "zulip.example.com")
|
|
|
|
self.assertEqual(server.contact_email, "new-server-admin@example.com")
|
|
|
|
|
|
|
|
# Now test trying to double-create with a new random key fails
|
|
|
|
request = dict(
|
|
|
|
zulip_org_id=zulip_org_id,
|
|
|
|
zulip_org_key=get_random_string(64),
|
|
|
|
hostname="example.com",
|
|
|
|
contact_email="server-admin@example.com",
|
|
|
|
)
|
|
|
|
result = self.client_post("/api/v1/remotes/server/register", request)
|
2018-05-04 18:04:01 +02:00
|
|
|
self.assert_json_error(result, "Zulip server auth failure: key does not match role %s" %
|
|
|
|
(zulip_org_id,))
|