2020-12-19 03:05:20 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
|
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
import asyncio
|
2017-08-19 01:12:40 +02:00
|
|
|
import base64
|
|
|
|
import logging
|
2017-10-07 00:18:07 +02:00
|
|
|
import re
|
2021-06-08 02:45:49 +02:00
|
|
|
from dataclasses import dataclass
|
2021-06-08 02:02:27 +02:00
|
|
|
from functools import lru_cache
|
2023-08-29 03:03:11 +02:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
Union,
|
|
|
|
)
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import gcm
|
|
|
|
import lxml.html
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2013-10-21 19:47:08 +02:00
|
|
|
from django.conf import settings
|
2018-10-11 00:53:13 +02:00
|
|
|
from django.db import IntegrityError, transaction
|
2022-02-23 20:25:30 +01:00
|
|
|
from django.db.models import F, Q
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2021-06-25 20:52:22 +02:00
|
|
|
from django.utils.translation import override as override_language
|
2023-08-02 23:53:10 +02:00
|
|
|
from typing_extensions import TypeAlias
|
2017-08-19 01:12:40 +02:00
|
|
|
|
|
|
|
from zerver.lib.avatar import absolute_avatar_url
|
2023-08-18 21:31:54 +02:00
|
|
|
from zerver.lib.emoji_utils import hex_codepoint_to_emoji
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2022-03-09 23:30:38 +01:00
|
|
|
from zerver.lib.message import access_message, huddle_users
|
2022-03-08 18:39:55 +01:00
|
|
|
from zerver.lib.outgoing_http import OutgoingSession
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.remote_server import send_json_to_push_bouncer, send_to_push_bouncer
|
2022-04-15 22:07:22 +02:00
|
|
|
from zerver.lib.soft_deactivation import soft_reactivate_if_personal_notification
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
2021-08-15 18:35:37 +02:00
|
|
|
AbstractPushDeviceToken,
|
2020-06-11 00:54:34 +02:00
|
|
|
ArchivedMessage,
|
|
|
|
Message,
|
2021-07-09 13:38:12 +02:00
|
|
|
NotificationTriggers,
|
2020-06-11 00:54:34 +02:00
|
|
|
PushDeviceToken,
|
|
|
|
Recipient,
|
2023-07-16 13:59:49 +02:00
|
|
|
Stream,
|
2021-12-30 01:18:46 +01:00
|
|
|
UserGroup,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
get_display_recipient,
|
|
|
|
get_user_profile_by_id,
|
|
|
|
)
|
2016-06-08 12:32:59 +02:00
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
if TYPE_CHECKING:
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
2019-08-10 00:30:33 +02:00
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-05-09 12:15:11 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2021-10-20 08:16:10 +02:00
|
|
|
from zilencer.models import RemotePushDeviceToken, RemoteZulipServer
|
2017-05-09 12:15:11 +02:00
|
|
|
|
2023-08-02 23:53:10 +02:00
|
|
|
DeviceToken: TypeAlias = Union[PushDeviceToken, "RemotePushDeviceToken"]
|
2017-05-09 12:15:11 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2013-10-21 19:47:08 +02:00
|
|
|
# We store the token as b64, but apns-client wants hex strings
|
2019-11-13 06:54:30 +01:00
|
|
|
def b64_to_hex(data: str) -> str:
|
2020-10-30 01:59:56 +01:00
|
|
|
return base64.b64decode(data).hex()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def hex_to_b64(data: str) -> str:
|
2020-10-30 01:59:56 +01:00
|
|
|
return base64.b64encode(bytes.fromhex(data)).decode()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-16 13:59:49 +02:00
|
|
|
def get_message_stream_name_from_database(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
Never use this function outside of the push-notifications
|
|
|
|
codepath. Most of our code knows how to get streams
|
|
|
|
up front in a more efficient manner.
|
|
|
|
"""
|
|
|
|
stream_id = message.recipient.type_id
|
|
|
|
return Stream.objects.get(id=stream_id).name
|
|
|
|
|
|
|
|
|
2023-01-02 20:50:23 +01:00
|
|
|
class UserPushIdentityCompat:
|
2022-02-23 20:25:30 +01:00
|
|
|
"""Compatibility class for supporting the transition from remote servers
|
|
|
|
sending their UserProfile ids to the bouncer to sending UserProfile uuids instead.
|
|
|
|
|
|
|
|
Until we can drop support for receiving user_id, we need this
|
|
|
|
class, because a user's identity in the push notification context
|
|
|
|
may be represented either by an id or uuid.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, user_id: Optional[int] = None, user_uuid: Optional[str] = None) -> None:
|
|
|
|
assert user_id is not None or user_uuid is not None
|
|
|
|
self.user_id = user_id
|
|
|
|
self.user_uuid = user_uuid
|
|
|
|
|
|
|
|
def filter_q(self) -> Q:
|
|
|
|
"""
|
|
|
|
This aims to support correctly querying for RemotePushDeviceToken.
|
|
|
|
If only one of (user_id, user_uuid) is provided, the situation is trivial,
|
|
|
|
If both are provided, we want to query for tokens matching EITHER the
|
|
|
|
uuid or the id - because the user may have devices with old registrations,
|
|
|
|
so user_id-based, as well as new registration with uuid. Notifications
|
|
|
|
naturally should be sent to both.
|
|
|
|
"""
|
|
|
|
if self.user_id is not None and self.user_uuid is None:
|
|
|
|
return Q(user_id=self.user_id)
|
|
|
|
elif self.user_uuid is not None and self.user_id is None:
|
|
|
|
return Q(user_uuid=self.user_uuid)
|
|
|
|
else:
|
|
|
|
assert self.user_id is not None and self.user_uuid is not None
|
|
|
|
return Q(user_uuid=self.user_uuid) | Q(user_id=self.user_id)
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
2022-03-10 13:31:16 +01:00
|
|
|
result = ""
|
|
|
|
if self.user_id is not None:
|
|
|
|
result += f"<id:{self.user_id}>"
|
2022-02-23 20:25:30 +01:00
|
|
|
if self.user_uuid is not None:
|
2022-03-10 13:31:16 +01:00
|
|
|
result += f"<uuid:{self.user_uuid}>"
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2022-03-10 13:31:16 +01:00
|
|
|
return result
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2023-08-10 20:06:57 +02:00
|
|
|
def __eq__(self, other: object) -> bool:
|
2023-01-02 20:50:23 +01:00
|
|
|
if isinstance(other, UserPushIdentityCompat):
|
2022-02-23 20:25:30 +01:00
|
|
|
return self.user_id == other.user_id and self.user_uuid == other.user_uuid
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to APNs, for iOS
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
@dataclass
|
|
|
|
class APNsContext:
|
|
|
|
apns: "aioapns.APNs"
|
|
|
|
loop: asyncio.AbstractEventLoop
|
|
|
|
|
|
|
|
|
2021-06-08 02:02:27 +02:00
|
|
|
@lru_cache(maxsize=None)
|
2021-06-08 02:45:49 +02:00
|
|
|
def get_apns_context() -> Optional[APNsContext]:
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do this import as part of optimizing Zulip's base
|
|
|
|
# import time.
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-08 08:21:49 +01:00
|
|
|
if settings.APNS_CERT_FILE is None: # nocoverage
|
2021-06-08 02:02:27 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
# NB if called concurrently, this will make excess connections.
|
|
|
|
# That's a little sloppy, but harmless unless a server gets
|
|
|
|
# hammered with a ton of these all at once after startup.
|
2021-06-08 02:45:49 +02:00
|
|
|
loop = asyncio.new_event_loop()
|
2022-02-08 08:42:25 +01:00
|
|
|
|
2023-08-29 00:43:51 +02:00
|
|
|
# Defining a no-op error-handling function overrides the default
|
|
|
|
# behaviour of logging at ERROR level whenever delivery fails; we
|
|
|
|
# handle those errors by checking the result in
|
|
|
|
# send_apple_push_notification.
|
|
|
|
async def err_func(
|
|
|
|
request: aioapns.NotificationRequest, result: aioapns.common.NotificationResult
|
|
|
|
) -> None:
|
|
|
|
pass # nocoverage
|
|
|
|
|
2022-02-08 08:42:25 +01:00
|
|
|
async def make_apns() -> aioapns.APNs:
|
|
|
|
return aioapns.APNs(
|
|
|
|
client_cert=settings.APNS_CERT_FILE,
|
|
|
|
topic=settings.APNS_TOPIC,
|
|
|
|
max_connection_attempts=APNS_MAX_RETRIES,
|
|
|
|
use_sandbox=settings.APNS_SANDBOX,
|
2023-08-29 00:43:51 +02:00
|
|
|
err_func=err_func,
|
2022-02-08 08:42:25 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
apns = loop.run_until_complete(make_apns())
|
2021-06-08 02:45:49 +02:00
|
|
|
return APNsContext(apns=apns, loop=loop)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def apns_enabled() -> bool:
|
2022-02-08 08:21:49 +01:00
|
|
|
return settings.APNS_CERT_FILE is not None
|
2018-02-12 23:34:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-06 11:56:48 +02:00
|
|
|
def modernize_apns_payload(data: Mapping[str, Any]) -> Mapping[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Take a payload in an unknown Zulip version's format, and return in current format."""
|
2017-09-28 03:08:37 +02:00
|
|
|
# TODO this isn't super robust as is -- if a buggy remote server
|
|
|
|
# sends a malformed payload, we are likely to raise an exception.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "message_ids" in data:
|
2017-09-28 03:08:37 +02:00
|
|
|
# The format sent by 1.6.0, from the earliest pre-1.6.0
|
|
|
|
# version with bouncer support up until 613d093d7 pre-1.7.0:
|
2023-06-19 16:42:11 +02:00
|
|
|
# 'alert': str, # just sender, and text about direct message/mention
|
2017-09-28 03:08:37 +02:00
|
|
|
# 'message_ids': List[int], # always just one
|
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"alert": data["alert"],
|
|
|
|
"badge": 0,
|
|
|
|
"custom": {
|
|
|
|
"zulip": {
|
|
|
|
"message_ids": data["message_ids"],
|
2017-09-28 03:08:37 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
else:
|
2017-12-12 05:54:23 +01:00
|
|
|
# Something already compatible with the current format.
|
|
|
|
# `alert` may be a string, or a dict with `title` and `body`.
|
|
|
|
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
|
|
|
|
# item in `custom.zulip` is `message_ids`.
|
2017-09-28 03:08:37 +02:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-29 01:05:20 +02:00
|
|
|
APNS_MAX_RETRIES = 3
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_apple_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity: UserPushIdentityCompat,
|
2021-10-20 01:52:23 +02:00
|
|
|
devices: Sequence[DeviceToken],
|
2022-10-06 11:56:48 +02:00
|
|
|
payload_data: Mapping[str, Any],
|
2021-10-20 09:13:30 +02:00
|
|
|
remote: Optional["RemoteZulipServer"] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-07-16 16:54:49 +02:00
|
|
|
if not devices:
|
|
|
|
return
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do the APNS imports as part of optimizing Zulip's base
|
|
|
|
# import time; since these are only needed in the push
|
|
|
|
# notification queue worker, it's best to only import them in the
|
|
|
|
# code that needs them.
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
|
|
|
import aioapns.exceptions
|
2018-08-08 18:16:57 +02:00
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
apns_context = get_apns_context()
|
|
|
|
if apns_context is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.debug(
|
|
|
|
"APNs: Dropping a notification because nothing configured. "
|
|
|
|
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE)."
|
|
|
|
)
|
2018-02-09 23:19:00 +01:00
|
|
|
return
|
2018-05-21 20:20:23 +02:00
|
|
|
|
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2021-08-15 18:35:37 +02:00
|
|
|
DeviceTokenClass: Type[AbstractPushDeviceToken] = RemotePushDeviceToken
|
2018-05-21 20:20:23 +02:00
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
if remote:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Sending notification for remote user %s:%s to %d devices",
|
2021-10-20 01:52:23 +02:00
|
|
|
remote.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:52:23 +02:00
|
|
|
len(devices),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Sending notification for local user %s to %d devices",
|
|
|
|
user_identity,
|
|
|
|
len(devices),
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2022-10-06 11:56:48 +02:00
|
|
|
payload_data = dict(modernize_apns_payload(payload_data))
|
2021-07-15 21:56:55 +02:00
|
|
|
message = {**payload_data.pop("custom", {}), "aps": payload_data}
|
2017-08-29 01:05:20 +02:00
|
|
|
|
2023-08-29 03:03:11 +02:00
|
|
|
async def send_all_notifications() -> Iterable[
|
|
|
|
Tuple[DeviceToken, Union[aioapns.common.NotificationResult, BaseException]]
|
|
|
|
]:
|
|
|
|
requests = [
|
|
|
|
aioapns.NotificationRequest(
|
|
|
|
device_token=device.token, message=message, time_to_live=24 * 3600
|
|
|
|
)
|
|
|
|
for device in devices
|
|
|
|
]
|
|
|
|
results = list(
|
|
|
|
await asyncio.gather(
|
|
|
|
*(apns_context.apns.send_notification(request) for request in requests),
|
|
|
|
return_exceptions=True,
|
2021-09-02 20:33:36 +02:00
|
|
|
)
|
2023-08-29 03:03:11 +02:00
|
|
|
)
|
|
|
|
return zip(devices, results)
|
|
|
|
|
|
|
|
results = apns_context.loop.run_until_complete(send_all_notifications())
|
|
|
|
|
|
|
|
for device, result in results:
|
|
|
|
if isinstance(result, aioapns.exceptions.ConnectionError):
|
2022-03-25 21:51:55 +01:00
|
|
|
logger.error(
|
|
|
|
"APNs: ConnectionError sending for user %s to device %s; check certificate expiration",
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-09-02 20:33:36 +02:00
|
|
|
device.token,
|
|
|
|
)
|
2023-08-29 03:03:11 +02:00
|
|
|
elif isinstance(result, BaseException):
|
2023-08-30 21:15:59 +02:00
|
|
|
logger.error(
|
2023-08-29 03:03:11 +02:00
|
|
|
"APNs: Error sending for user %s to device %s",
|
|
|
|
user_identity,
|
|
|
|
device.token,
|
|
|
|
exc_info=result,
|
|
|
|
)
|
|
|
|
elif result.is_successful:
|
2022-02-23 20:25:30 +01:00
|
|
|
logger.info(
|
|
|
|
"APNs: Success sending for user %s to device %s", user_identity, device.token
|
|
|
|
)
|
2021-09-02 20:33:36 +02:00
|
|
|
elif result.description in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
|
|
|
|
logger.info(
|
|
|
|
"APNs: Removing invalid/expired token %s (%s)", device.token, result.description
|
|
|
|
)
|
2018-05-21 20:20:23 +02:00
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
2023-09-05 20:25:23 +02:00
|
|
|
DeviceTokenClass._default_manager.filter(
|
|
|
|
token=device.token, kind=DeviceTokenClass.APNS
|
|
|
|
).delete()
|
2017-08-19 01:38:11 +02:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.warning(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Failed to send for user %s to device %s: %s",
|
|
|
|
user_identity,
|
2021-09-02 20:33:36 +02:00
|
|
|
device.token,
|
|
|
|
result.description,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to GCM, for Android
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-03-08 18:39:55 +01:00
|
|
|
class FCMSession(OutgoingSession):
|
|
|
|
def __init__(self) -> None:
|
|
|
|
# We don't set up retries, since the gcm package does that for us.
|
|
|
|
super().__init__(role="fcm", timeout=5)
|
|
|
|
|
|
|
|
|
2019-02-13 02:56:23 +01:00
|
|
|
def make_gcm_client() -> gcm.GCM: # nocoverage
|
|
|
|
# From GCM upstream's doc for migrating to FCM:
|
|
|
|
#
|
|
|
|
# FCM supports HTTP and XMPP protocols that are virtually
|
|
|
|
# identical to the GCM server protocols, so you don't need to
|
|
|
|
# update your sending logic for the migration.
|
|
|
|
#
|
|
|
|
# https://developers.google.com/cloud-messaging/android/android-migrate-fcm
|
|
|
|
#
|
|
|
|
# The one thing we're required to change on the server is the URL of
|
|
|
|
# the endpoint. So we get to keep using the GCM client library we've
|
|
|
|
# been using (as long as we're happy with it) -- just monkey-patch in
|
|
|
|
# that one change, because the library's API doesn't anticipate that
|
|
|
|
# as a customization point.
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm.gcm.GCM_URL = "https://fcm.googleapis.com/fcm/send"
|
2019-02-13 02:56:23 +01:00
|
|
|
return gcm.GCM(settings.ANDROID_GCM_API_KEY)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-12 10:12:38 +02:00
|
|
|
if settings.ANDROID_GCM_API_KEY: # nocoverage
|
2019-02-13 02:56:23 +01:00
|
|
|
gcm_client = make_gcm_client()
|
2013-12-09 23:19:59 +01:00
|
|
|
else:
|
2019-02-13 02:46:41 +01:00
|
|
|
gcm_client = None
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def gcm_enabled() -> bool: # nocoverage
|
2019-02-13 02:46:41 +01:00
|
|
|
return gcm_client is not None
|
2018-02-12 23:34:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
# This is purely used in testing
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_android_push_notification_to_user(
|
|
|
|
user_profile: UserProfile, data: Dict[str, Any], options: Dict[str, Any]
|
|
|
|
) -> None:
|
|
|
|
devices = list(PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM))
|
2022-02-23 20:25:30 +01:00
|
|
|
send_android_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
UserPushIdentityCompat(user_id=user_profile.id), devices, data, options
|
2022-02-23 20:25:30 +01:00
|
|
|
)
|
2017-03-06 03:11:44 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2019-02-08 02:14:45 +01:00
|
|
|
Parse GCM options, supplying defaults, and raising an error if invalid.
|
|
|
|
|
2019-02-08 03:06:04 +01:00
|
|
|
The options permitted here form part of the Zulip notification
|
|
|
|
bouncer's API. They are:
|
|
|
|
|
|
|
|
`priority`: Passed through to GCM; see upstream doc linked below.
|
2019-02-08 02:44:58 +01:00
|
|
|
Zulip servers should always set this; when unset, we guess a value
|
|
|
|
based on the behavior of old server versions.
|
2019-02-08 03:06:04 +01:00
|
|
|
|
|
|
|
Including unrecognized options is an error.
|
|
|
|
|
|
|
|
For details on options' semantics, see this GCM upstream doc:
|
2019-11-20 16:12:57 +01:00
|
|
|
https://firebase.google.com/docs/cloud-messaging/http-server-ref
|
2019-02-08 03:06:04 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
Returns `priority`.
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
priority = options.pop("priority", None)
|
2019-02-08 02:44:58 +01:00
|
|
|
if priority is None:
|
|
|
|
# An older server. Identify if this seems to be an actual notification.
|
2021-02-12 08:20:45 +01:00
|
|
|
if data.get("event") == "message":
|
|
|
|
priority = "high"
|
2019-02-08 02:44:58 +01:00
|
|
|
else: # `'event': 'remove'`, presumably
|
2021-02-12 08:20:45 +01:00
|
|
|
priority = "normal"
|
|
|
|
if priority not in ("normal", "high"):
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_(
|
2023-07-17 22:40:33 +02:00
|
|
|
"Invalid GCM option to bouncer: priority {priority!r}",
|
|
|
|
).format(priority=priority)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-08 02:14:45 +01:00
|
|
|
|
2019-02-08 02:38:13 +01:00
|
|
|
if options:
|
|
|
|
# We're strict about the API; there is no use case for a newer Zulip
|
|
|
|
# server talking to an older bouncer, so we only need to provide
|
|
|
|
# one-way compatibility.
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_(
|
2023-07-17 22:40:33 +02:00
|
|
|
"Invalid GCM options to bouncer: {options}",
|
|
|
|
).format(options=orjson.dumps(options).decode())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-08 02:38:13 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
return priority # when this grows a second option, can make it a tuple
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_android_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity: UserPushIdentityCompat,
|
2021-07-24 20:08:12 +02:00
|
|
|
devices: Sequence[DeviceToken],
|
|
|
|
data: Dict[str, Any],
|
|
|
|
options: Dict[str, Any],
|
2021-10-20 09:13:30 +02:00
|
|
|
remote: Optional["RemoteZulipServer"] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-11-29 21:37:40 +01:00
|
|
|
"""
|
|
|
|
Send a GCM message to the given devices.
|
|
|
|
|
2019-11-20 16:12:57 +01:00
|
|
|
See https://firebase.google.com/docs/cloud-messaging/http-server-ref
|
2018-11-29 21:37:40 +01:00
|
|
|
for the GCM upstream API which this talks to.
|
|
|
|
|
|
|
|
data: The JSON object (decoded) to send as the 'data' parameter of
|
|
|
|
the GCM message.
|
2019-02-08 03:06:04 +01:00
|
|
|
options: Additional options to control the GCM message sent.
|
|
|
|
For details, see `parse_gcm_options`.
|
2018-11-29 21:37:40 +01:00
|
|
|
"""
|
2020-07-16 16:54:49 +02:00
|
|
|
if not devices:
|
|
|
|
return
|
2019-02-13 02:46:41 +01:00
|
|
|
if not gcm_client:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.debug(
|
|
|
|
"Skipping sending a GCM push notification since "
|
|
|
|
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset"
|
|
|
|
)
|
2013-12-09 23:19:59 +01:00
|
|
|
return
|
2018-11-29 21:37:40 +01:00
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
if remote:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"GCM: Sending notification for remote user %s:%s to %d devices",
|
2021-10-20 01:52:23 +02:00
|
|
|
remote.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:52:23 +02:00
|
|
|
len(devices),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"GCM: Sending notification for local user %s to %d devices", user_identity, len(devices)
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2019-02-08 02:12:01 +01:00
|
|
|
reg_ids = [device.token for device in devices]
|
2019-02-08 02:14:45 +01:00
|
|
|
priority = parse_gcm_options(options, data)
|
2017-05-17 09:58:27 +02:00
|
|
|
try:
|
2019-11-20 16:12:57 +01:00
|
|
|
# See https://firebase.google.com/docs/cloud-messaging/http-server-ref .
|
2019-02-08 02:38:13 +01:00
|
|
|
# Two kwargs `retries` and `session` get eaten by `json_request`;
|
|
|
|
# the rest pass through to the GCM server.
|
2022-03-08 18:39:55 +01:00
|
|
|
#
|
|
|
|
# One initial request plus 2 retries, with 5-second timeouts,
|
|
|
|
# and expected 1 + 2 seconds (the gcm module jitters its
|
|
|
|
# backoff by ±50%, so worst case * 1.5) between them, totals
|
|
|
|
# 18s expected, up to 19.5s worst case.
|
2021-02-12 08:19:30 +01:00
|
|
|
res = gcm_client.json_request(
|
push_notifications: Drop FCM retries to 2, not 10.
This reverts bc15085098709d746ba69c99d61601f8d4316e6e (which provided
not justification for its change) and moves further, down to 2 retries
from the default of 5.
10 retries, with exponential backoff, is equivalent to sleeping 2^11
seconds, or just about 34 minutes (though the code uses a jitter which
may make this up to 51 minutes). This is an unreasonable amount of
time to spend in this codepath -- as only one worker is used, and it
is single-threaded, this could effectively block all missed message
notifications for half an hour or longer.
This is also necessary because messages sent through the push bouncer
are sent synchronously; the sending server uses a 30-second timeout,
set in PushBouncerSession. Having retries which linger longer than
this can cause duplicate messages; the sending server will time out
and re-queue the message in RabbitMQ, while the push bouncer's request
will continue, and may succeed.
Limit to 2 retries (APNS currently uses 3), and results expected max
of 4 seconds of sleep, potentially up to 6. If this fails, there
exists another retry loop above it, at the RabbitMQ layer (either
locally, or via the remote server's queue), which will result in up to
3 additional retries -- all told, the request will me made to FCM up
to 12 times.
2022-03-08 18:49:10 +01:00
|
|
|
registration_ids=reg_ids,
|
|
|
|
priority=priority,
|
|
|
|
data=data,
|
|
|
|
retries=2,
|
2022-03-08 18:39:55 +01:00
|
|
|
session=FCMSession(),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-12 01:35:37 +02:00
|
|
|
except OSError:
|
|
|
|
logger.warning("Error while pushing to GCM", exc_info=True)
|
2017-05-17 09:58:27 +02:00
|
|
|
return
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if res and "success" in res:
|
|
|
|
for reg_id, msg_id in res["success"].items():
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Sent %s as %s", reg_id, msg_id)
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2019-02-08 02:12:01 +01:00
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2021-08-15 18:35:37 +02:00
|
|
|
DeviceTokenClass: Type[AbstractPushDeviceToken] = RemotePushDeviceToken
|
2019-02-08 02:12:01 +01:00
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2014-01-22 22:42:23 +01:00
|
|
|
# res.canonical will contain results when there are duplicate registrations for the same
|
|
|
|
# device. The "canonical" registration is the latest registration made by the device.
|
2020-03-27 01:32:21 +01:00
|
|
|
# Ref: https://developer.android.com/google/gcm/adv.html#canonical
|
2021-02-12 08:20:45 +01:00
|
|
|
if "canonical" in res:
|
|
|
|
for reg_id, new_reg_id in res["canonical"].items():
|
2016-12-13 08:41:48 +01:00
|
|
|
if reg_id == new_reg_id:
|
|
|
|
# I'm not sure if this should happen. In any case, not really actionable.
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("GCM: Got canonical ref but it already matches our ID %s!", reg_id)
|
2023-09-05 20:25:23 +02:00
|
|
|
elif not DeviceTokenClass._default_manager.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
token=new_reg_id, kind=DeviceTokenClass.GCM
|
2023-09-07 18:22:41 +02:00
|
|
|
).exists():
|
2016-12-13 08:41:48 +01:00
|
|
|
# This case shouldn't happen; any time we get a canonical ref it should have been
|
|
|
|
# previously registered in our system.
|
|
|
|
#
|
|
|
|
# That said, recovery is easy: just update the current PDT object to use the new ID.
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning(
|
2020-05-02 08:44:14 +02:00
|
|
|
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating.",
|
2021-02-12 08:19:30 +01:00
|
|
|
new_reg_id,
|
|
|
|
reg_id,
|
|
|
|
)
|
2023-09-05 20:25:23 +02:00
|
|
|
DeviceTokenClass._default_manager.filter(
|
|
|
|
token=reg_id, kind=DeviceTokenClass.GCM
|
|
|
|
).update(token=new_reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
# Since we know the new ID is registered in our system we can just drop the old one.
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Got canonical ref %s, dropping %s", new_reg_id, reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
|
2023-09-05 20:25:23 +02:00
|
|
|
DeviceTokenClass._default_manager.filter(
|
|
|
|
token=reg_id, kind=DeviceTokenClass.GCM
|
|
|
|
).delete()
|
2016-12-13 08:41:48 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "errors" in res:
|
|
|
|
for error, reg_ids in res["errors"].items():
|
|
|
|
if error in ["NotRegistered", "InvalidRegistration"]:
|
2016-12-13 08:41:48 +01:00
|
|
|
for reg_id in reg_ids:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Removing %s", reg_id)
|
2018-03-23 18:23:23 +01:00
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
2023-09-05 20:25:23 +02:00
|
|
|
DeviceTokenClass._default_manager.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
token=reg_id, kind=DeviceTokenClass.GCM
|
|
|
|
).delete()
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
for reg_id in reg_ids:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("GCM: Delivery to %s failed: %s", reg_id, error)
|
2016-12-13 08:41:48 +01:00
|
|
|
|
|
|
|
# python-gcm handles retrying of the unsent messages.
|
|
|
|
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to a bouncer
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def uses_notification_bouncer() -> bool:
|
2017-08-19 00:42:04 +02:00
|
|
|
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_notifications_to_bouncer(
|
|
|
|
user_profile_id: int,
|
|
|
|
apns_payload: Dict[str, Any],
|
|
|
|
gcm_payload: Dict[str, Any],
|
|
|
|
gcm_options: Dict[str, Any],
|
2021-09-28 14:17:16 +02:00
|
|
|
) -> Tuple[int, int]:
|
2017-08-19 00:42:04 +02:00
|
|
|
post_data = {
|
2022-02-23 20:27:39 +01:00
|
|
|
"user_uuid": str(get_user_profile_by_id(user_profile_id).uuid),
|
|
|
|
# user_uuid is the intended future format, but we also need to send user_id
|
|
|
|
# to avoid breaking old mobile registrations, which were made with user_id.
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_id": user_profile_id,
|
|
|
|
"apns_payload": apns_payload,
|
|
|
|
"gcm_payload": gcm_payload,
|
|
|
|
"gcm_options": gcm_options,
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
2018-01-17 08:10:45 +01:00
|
|
|
# Calls zilencer.views.remote_server_notify_push
|
2021-09-28 14:17:16 +02:00
|
|
|
response_data = send_json_to_push_bouncer("POST", "push/notify", post_data)
|
|
|
|
assert isinstance(response_data["total_android_devices"], int)
|
|
|
|
assert isinstance(response_data["total_apple_devices"], int)
|
|
|
|
|
|
|
|
return response_data["total_android_devices"], response_data["total_apple_devices"]
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Managing device tokens
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def add_push_device_token(
|
|
|
|
user_profile: UserProfile, token_str: str, kind: int, ios_app_id: Optional[str] = None
|
|
|
|
) -> PushDeviceToken:
|
|
|
|
logger.info(
|
|
|
|
"Registering push device: %d %r %d %r", user_profile.id, token_str, kind, ios_app_id
|
|
|
|
)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2020-06-08 18:38:50 +02:00
|
|
|
# Regardless of whether we're using the push notifications
|
|
|
|
# bouncer, we want to store a PushDeviceToken record locally.
|
|
|
|
# These can be used to discern whether the user has any mobile
|
|
|
|
# devices configured, and is also where we will store encryption
|
|
|
|
# keys for mobile push notifications.
|
|
|
|
try:
|
|
|
|
with transaction.atomic():
|
2020-06-30 03:10:29 +02:00
|
|
|
token = PushDeviceToken.objects.create(
|
2020-06-08 18:38:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
kind=kind,
|
|
|
|
token=token_str,
|
|
|
|
ios_app_id=ios_app_id,
|
|
|
|
# last_updated is to be renamed to date_created.
|
2021-02-12 08:19:30 +01:00
|
|
|
last_updated=timezone_now(),
|
|
|
|
)
|
2020-06-08 18:38:50 +02:00
|
|
|
except IntegrityError:
|
2020-06-30 03:10:29 +02:00
|
|
|
token = PushDeviceToken.objects.get(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
kind=kind,
|
|
|
|
token=token_str,
|
|
|
|
)
|
2020-06-08 18:38:50 +02:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
# If we're sending things to the push notification bouncer
|
|
|
|
# register this user with them here
|
|
|
|
if uses_notification_bouncer():
|
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
2022-02-23 20:27:39 +01:00
|
|
|
"user_uuid": str(user_profile.uuid),
|
2023-04-10 00:23:59 +02:00
|
|
|
# user_id is sent so that the bouncer can delete any pre-existing registrations
|
|
|
|
# for this user+device to avoid duplication upon adding the uuid registration.
|
|
|
|
"user_id": str(user_profile.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
"token": token_str,
|
|
|
|
"token_kind": kind,
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if kind == PushDeviceToken.APNS:
|
2021-02-12 08:20:45 +01:00
|
|
|
post_data["ios_app_id"] = ios_app_id
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.info("Sending new push device to bouncer: %r", post_data)
|
2018-04-29 00:06:26 +02:00
|
|
|
# Calls zilencer.views.register_remote_push_device
|
2021-02-12 08:20:45 +01:00
|
|
|
send_to_push_bouncer("POST", "push/register", post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2020-06-30 03:10:29 +02:00
|
|
|
return token
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def remove_push_device_token(user_profile: UserProfile, token_str: str, kind: int) -> None:
|
2020-06-08 18:38:50 +02:00
|
|
|
try:
|
|
|
|
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
|
|
|
|
token.delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
# If we are using bouncer, don't raise the exception. It will
|
|
|
|
# be raised by the code below eventually. This is important
|
|
|
|
# during the transition period after upgrading to a version
|
|
|
|
# that stores local PushDeviceToken objects even when using
|
|
|
|
# the push notifications bouncer.
|
|
|
|
if not uses_notification_bouncer():
|
|
|
|
raise JsonableError(_("Token does not exist"))
|
2017-08-19 00:42:04 +02:00
|
|
|
|
|
|
|
# If we're sending things to the push notification bouncer
|
2018-02-20 06:03:12 +01:00
|
|
|
# unregister this user with them here
|
2017-08-19 00:42:04 +02:00
|
|
|
if uses_notification_bouncer():
|
|
|
|
# TODO: Make this a remove item
|
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
2022-02-23 20:27:39 +01:00
|
|
|
# We don't know here if the token was registered with uuid
|
|
|
|
# or using the legacy id format, so we need to send both.
|
|
|
|
"user_uuid": str(user_profile.uuid),
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_id": user_profile.id,
|
|
|
|
"token": token_str,
|
|
|
|
"token_kind": kind,
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
2018-04-29 00:07:47 +02:00
|
|
|
# Calls zilencer.views.unregister_remote_push_device
|
2019-01-31 01:36:18 +01:00
|
|
|
send_to_push_bouncer("POST", "push/unregister", post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
def clear_push_device_tokens(user_profile_id: int) -> None:
|
|
|
|
# Deletes all of a user's PushDeviceTokens.
|
|
|
|
if uses_notification_bouncer():
|
2022-02-23 20:27:39 +01:00
|
|
|
user_uuid = str(get_user_profile_by_id(user_profile_id).uuid)
|
2019-11-19 03:12:54 +01:00
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
2022-02-23 20:27:39 +01:00
|
|
|
# We want to clear all registered token, and they may have
|
|
|
|
# been registered with either uuid or id.
|
|
|
|
"user_uuid": user_uuid,
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_id": user_profile_id,
|
2019-11-19 03:12:54 +01:00
|
|
|
}
|
|
|
|
send_to_push_bouncer("POST", "push/unregister/all", post_data)
|
|
|
|
return
|
|
|
|
|
|
|
|
PushDeviceToken.objects.filter(user_id=user_profile_id).delete()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Push notifications in general
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def push_notifications_enabled() -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""True just if this server has configured a way to send push notifications."""
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
uses_notification_bouncer()
|
|
|
|
and settings.ZULIP_ORG_KEY is not None
|
|
|
|
and settings.ZULIP_ORG_ID is not None
|
|
|
|
): # nocoverage
|
2018-02-12 23:34:59 +01:00
|
|
|
# We have the needed configuration to send push notifications through
|
|
|
|
# the bouncer. Better yet would be to confirm that this config actually
|
|
|
|
# works -- e.g., that we have ever successfully sent to the bouncer --
|
|
|
|
# but this is a good start.
|
|
|
|
return True
|
2020-02-29 01:00:44 +01:00
|
|
|
if settings.DEVELOPMENT and (apns_enabled() or gcm_enabled()): # nocoverage
|
|
|
|
# Since much of the notifications logic is platform-specific, the mobile
|
|
|
|
# developers often work on just one platform at a time, so we should
|
|
|
|
# only require one to be configured.
|
|
|
|
return True
|
|
|
|
elif apns_enabled() and gcm_enabled(): # nocoverage
|
2018-02-12 23:34:59 +01:00
|
|
|
# We have the needed configuration to send through APNs and GCM directly
|
|
|
|
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-27 18:12:11 +01:00
|
|
|
def initialize_push_notifications() -> None:
|
|
|
|
if not push_notifications_enabled():
|
2018-12-03 21:03:56 +01:00
|
|
|
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
|
|
|
|
# Avoid unnecessary spam on development environment startup
|
|
|
|
return
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.warning(
|
|
|
|
"Mobile push notifications are not configured.\n "
|
|
|
|
"See https://zulip.readthedocs.io/en/latest/"
|
|
|
|
"production/mobile-push-notifications.html"
|
|
|
|
)
|
|
|
|
|
2018-11-27 18:12:11 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_mobile_push_content(rendered_content: str) -> str:
|
2019-01-07 22:15:46 +01:00
|
|
|
def get_text(elem: lxml.html.HtmlElement) -> str:
|
2020-10-23 02:43:28 +02:00
|
|
|
# Convert default emojis to their Unicode equivalent.
|
2017-10-07 00:18:07 +02:00
|
|
|
classes = elem.get("class", "")
|
|
|
|
if "emoji" in classes:
|
2018-07-02 00:05:24 +02:00
|
|
|
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
|
2017-10-07 00:18:07 +02:00
|
|
|
if match:
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_code = match.group("emoji_code")
|
2023-08-18 21:31:54 +02:00
|
|
|
return hex_codepoint_to_emoji(emoji_code)
|
2017-10-07 00:18:07 +02:00
|
|
|
# Handles realm emojis, avatars etc.
|
|
|
|
if elem.tag == "img":
|
|
|
|
return elem.get("alt", "")
|
2021-02-12 08:20:45 +01:00
|
|
|
if elem.tag == "blockquote":
|
|
|
|
return "" # To avoid empty line before quote text
|
|
|
|
return elem.text or ""
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def format_as_quote(quote_text: str) -> str:
|
2020-06-10 07:28:15 +02:00
|
|
|
return "".join(
|
2021-02-12 08:19:30 +01:00
|
|
|
f"> {line}\n" for line in quote_text.splitlines() if line # Remove empty lines
|
2020-06-10 07:28:15 +02:00
|
|
|
)
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2019-08-11 07:41:34 +02:00
|
|
|
def render_olist(ol: lxml.html.HtmlElement) -> str:
|
|
|
|
items = []
|
2021-02-12 08:20:45 +01:00
|
|
|
counter = int(ol.get("start")) if ol.get("start") else 1
|
|
|
|
nested_levels = len(list(ol.iterancestors("ol")))
|
|
|
|
indent = ("\n" + " " * nested_levels) if nested_levels else ""
|
2019-08-11 07:41:34 +02:00
|
|
|
|
|
|
|
for li in ol:
|
2021-02-12 08:20:45 +01:00
|
|
|
items.append(indent + str(counter) + ". " + process(li).strip())
|
2019-08-11 07:41:34 +02:00
|
|
|
counter += 1
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return "\n".join(items)
|
2019-08-11 07:41:34 +02:00
|
|
|
|
2020-07-14 22:53:19 +02:00
|
|
|
def render_spoiler(elem: lxml.html.HtmlElement) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
header = elem.find_class("spoiler-header")[0]
|
2020-07-14 22:53:19 +02:00
|
|
|
text = process(header).strip()
|
|
|
|
if len(text) == 0:
|
2020-07-16 06:33:11 +02:00
|
|
|
return "(…)\n"
|
|
|
|
return f"{text} (…)\n"
|
2020-07-14 22:53:19 +02:00
|
|
|
|
2019-01-07 22:15:46 +01:00
|
|
|
def process(elem: lxml.html.HtmlElement) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
plain_text = ""
|
|
|
|
if elem.tag == "ol":
|
2019-08-11 07:41:34 +02:00
|
|
|
plain_text = render_olist(elem)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif "spoiler-block" in elem.get("class", ""):
|
2020-07-14 22:53:19 +02:00
|
|
|
plain_text += render_spoiler(elem)
|
2019-08-11 07:41:34 +02:00
|
|
|
else:
|
|
|
|
plain_text = get_text(elem)
|
2021-02-12 08:20:45 +01:00
|
|
|
sub_text = ""
|
2019-08-11 07:41:34 +02:00
|
|
|
for child in elem:
|
|
|
|
sub_text += process(child)
|
2021-02-12 08:20:45 +01:00
|
|
|
if elem.tag == "blockquote":
|
2019-08-11 07:41:34 +02:00
|
|
|
sub_text = format_as_quote(sub_text)
|
|
|
|
plain_text += sub_text
|
|
|
|
plain_text += elem.tail or ""
|
2017-10-07 00:18:07 +02:00
|
|
|
return plain_text
|
|
|
|
|
2017-10-10 11:14:10 +02:00
|
|
|
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
|
2021-07-01 00:13:23 +02:00
|
|
|
return (
|
|
|
|
"*"
|
|
|
|
+ _(
|
|
|
|
"This organization has disabled including message content in mobile push notifications"
|
|
|
|
)
|
|
|
|
+ "*"
|
|
|
|
)
|
2019-01-07 22:15:21 +01:00
|
|
|
|
2021-09-15 00:10:28 +02:00
|
|
|
elem = lxml.html.fragment_fromstring(rendered_content, create_parent=True)
|
2019-01-07 22:15:21 +01:00
|
|
|
plain_text = process(elem)
|
|
|
|
return plain_text
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def truncate_content(content: str) -> Tuple[str, bool]:
|
2020-10-23 02:43:28 +02:00
|
|
|
# We use Unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
|
2017-10-07 00:12:52 +02:00
|
|
|
# of three dots as this saves two extra characters for textual
|
2020-10-23 02:43:28 +02:00
|
|
|
# content. This function will need to be updated to handle Unicode
|
2017-10-07 00:12:52 +02:00
|
|
|
# combining characters and tags when we start supporting themself.
|
|
|
|
if len(content) <= 200:
|
2017-12-16 03:08:23 +01:00
|
|
|
return content, False
|
|
|
|
return content[:200] + "…", True
|
2017-10-07 00:12:52 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-20 17:21:26 +02:00
|
|
|
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Common fields for all notification payloads."""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {}
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# These will let the app support logging into multiple realms and servers.
|
2021-02-12 08:20:45 +01:00
|
|
|
data["server"] = settings.EXTERNAL_HOST
|
|
|
|
data["realm_id"] = user_profile.realm.id
|
|
|
|
data["realm_uri"] = user_profile.realm.uri
|
|
|
|
data["user_id"] = user_profile.id
|
2019-02-14 00:49:53 +01:00
|
|
|
|
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-08 14:45:05 +02:00
|
|
|
def get_message_payload(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
|
|
|
) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Common fields for `message` payloads, for all platforms."""
|
2019-04-20 17:21:26 +02:00
|
|
|
data = get_base_payload(user_profile)
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# `sender_id` is preferred, but some existing versions use `sender_email`.
|
2021-02-12 08:20:45 +01:00
|
|
|
data["sender_id"] = message.sender.id
|
|
|
|
data["sender_email"] = message.sender.email
|
2021-07-08 14:45:05 +02:00
|
|
|
if mentioned_user_group_id is not None:
|
|
|
|
assert mentioned_user_group_name is not None
|
|
|
|
data["mentioned_user_group_id"] = mentioned_user_group_id
|
|
|
|
data["mentioned_user_group_name"] = mentioned_user_group_name
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2018-02-16 23:18:47 +01:00
|
|
|
if message.recipient.type == Recipient.STREAM:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "stream"
|
2023-07-16 13:59:49 +02:00
|
|
|
data["stream"] = get_message_stream_name_from_database(message)
|
2022-01-29 22:59:06 +01:00
|
|
|
data["stream_id"] = message.recipient.type_id
|
2021-02-12 08:20:45 +01:00
|
|
|
data["topic"] = message.topic_name()
|
2018-02-16 23:18:47 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "private"
|
|
|
|
data["pm_users"] = huddle_users(message.recipient.id)
|
2018-02-16 23:18:47 +01:00
|
|
|
else: # Recipient.PERSONAL
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "private"
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2017-12-16 02:52:42 +01:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-04 23:31:04 +02:00
|
|
|
def get_apns_alert_title(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
On an iOS notification, this is the first bolded line.
|
|
|
|
"""
|
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
2019-08-10 00:30:33 +02:00
|
|
|
recipients = get_display_recipient(message.recipient)
|
|
|
|
assert isinstance(recipients, list)
|
2021-02-12 08:20:45 +01:00
|
|
|
return ", ".join(sorted(r["full_name"] for r in recipients))
|
2018-10-04 23:31:04 +02:00
|
|
|
elif message.is_stream_message():
|
2023-07-16 13:59:49 +02:00
|
|
|
stream_name = get_message_stream_name_from_database(message)
|
|
|
|
return f"#{stream_name} > {message.topic_name()}"
|
2023-06-19 16:42:11 +02:00
|
|
|
# For 1:1 direct messages, we just show the sender name.
|
2018-10-04 23:31:04 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-20 10:01:15 +02:00
|
|
|
def get_apns_alert_subtitle(
|
2021-09-03 16:49:27 +02:00
|
|
|
message: Message,
|
|
|
|
trigger: str,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2020-05-20 10:01:15 +02:00
|
|
|
) -> str:
|
2018-10-04 23:31:04 +02:00
|
|
|
"""
|
|
|
|
On an iOS notification, this is the second bolded line.
|
|
|
|
"""
|
2021-09-03 16:49:27 +02:00
|
|
|
if trigger == NotificationTriggers.MENTION:
|
2021-07-08 12:59:15 +02:00
|
|
|
if mentioned_user_group_name is not None:
|
2020-05-20 10:01:15 +02:00
|
|
|
return _("{full_name} mentioned @{user_group_name}:").format(
|
2021-07-08 12:59:15 +02:00
|
|
|
full_name=message.sender.full_name, user_group_name=mentioned_user_group_name
|
2020-05-20 10:01:15 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
return _("{full_name} mentioned you:").format(full_name=message.sender.full_name)
|
2023-08-29 12:24:10 +02:00
|
|
|
elif trigger in (
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION,
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION,
|
|
|
|
):
|
2020-06-15 23:22:24 +02:00
|
|
|
return _("{full_name} mentioned everyone:").format(full_name=message.sender.full_name)
|
2018-10-04 23:31:04 +02:00
|
|
|
elif message.recipient.type == Recipient.PERSONAL:
|
|
|
|
return ""
|
2023-06-19 16:42:11 +02:00
|
|
|
# For group direct messages, or regular messages to a stream,
|
|
|
|
# just use a colon to indicate this is the sender.
|
2018-10-04 23:31:04 +02:00
|
|
|
return message.sender.full_name + ":"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_apns_badge_count(
|
|
|
|
user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []
|
|
|
|
) -> int:
|
2020-07-16 07:05:02 +02:00
|
|
|
# NOTE: We have temporarily set get_apns_badge_count to always
|
|
|
|
# return 0 until we can debug a likely mobile app side issue with
|
|
|
|
# handling notifications while the app is open.
|
|
|
|
return 0
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_apns_badge_count_future(
|
|
|
|
user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []
|
|
|
|
) -> int:
|
2020-07-16 07:05:02 +02:00
|
|
|
# Future implementation of get_apns_badge_count; unused but
|
|
|
|
# we expect to use this once we resolve client-side bugs.
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile)
|
|
|
|
.extra(where=[UserMessage.where_active_push_notification()])
|
|
|
|
.exclude(
|
|
|
|
# If we've just marked some messages as read, they're still
|
|
|
|
# marked as having active notifications; we'll clear that flag
|
|
|
|
# only after we've sent that update to the devices. So we need
|
|
|
|
# to exclude them explicitly from the count.
|
|
|
|
message_id__in=read_messages_ids
|
|
|
|
)
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
|
2020-06-02 18:09:26 +02:00
|
|
|
|
2020-05-20 10:01:15 +02:00
|
|
|
def get_message_payload_apns(
|
2021-07-08 14:45:05 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
2021-09-03 16:49:27 +02:00
|
|
|
trigger: str,
|
2021-07-08 14:45:05 +02:00
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2020-05-20 10:01:15 +02:00
|
|
|
) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""A `message` payload for iOS, via APNs."""
|
2021-07-08 14:45:05 +02:00
|
|
|
zulip_data = get_message_payload(
|
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name
|
|
|
|
)
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_data.update(
|
|
|
|
message_ids=[message.id],
|
|
|
|
)
|
2017-12-16 02:52:42 +01:00
|
|
|
|
2020-07-05 02:42:53 +02:00
|
|
|
assert message.rendered_content is not None
|
2021-06-25 20:52:22 +02:00
|
|
|
with override_language(user_profile.default_language):
|
|
|
|
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
|
|
|
|
apns_data = {
|
|
|
|
"alert": {
|
|
|
|
"title": get_apns_alert_title(message),
|
2023-08-29 11:42:30 +02:00
|
|
|
"subtitle": get_apns_alert_subtitle(message, trigger, mentioned_user_group_name),
|
2021-06-25 20:52:22 +02:00
|
|
|
"body": content,
|
|
|
|
},
|
|
|
|
"sound": "default",
|
|
|
|
"badge": get_apns_badge_count(user_profile),
|
|
|
|
"custom": {"zulip": zulip_data},
|
|
|
|
}
|
2017-12-12 05:40:11 +01:00
|
|
|
return apns_data
|
2017-05-08 13:21:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_message_payload_gcm(
|
2021-07-08 14:45:05 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2019-02-14 01:02:39 +01:00
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""A `message` payload + options, for Android via GCM/FCM."""
|
2021-07-08 14:45:05 +02:00
|
|
|
data = get_message_payload(
|
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name
|
|
|
|
)
|
2020-07-05 02:42:53 +02:00
|
|
|
assert message.rendered_content is not None
|
2021-06-25 20:52:22 +02:00
|
|
|
with override_language(user_profile.default_language):
|
|
|
|
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
|
|
|
|
data.update(
|
|
|
|
event="message",
|
|
|
|
zulip_message_id=message.id, # message_id is reserved for CCS
|
|
|
|
time=datetime_to_timestamp(message.date_sent),
|
|
|
|
content=content,
|
|
|
|
content_truncated=truncated,
|
|
|
|
sender_full_name=message.sender.full_name,
|
|
|
|
sender_avatar_url=absolute_avatar_url(message.sender),
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm_options = {"priority": "high"}
|
2019-02-14 01:02:39 +01:00
|
|
|
return data, gcm_options
|
2017-05-08 13:26:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_remove_payload_gcm(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message_ids: List[int],
|
2019-02-14 01:02:39 +01:00
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""A `remove` payload + options, for Android via GCM/FCM."""
|
2019-04-20 17:21:26 +02:00
|
|
|
gcm_payload = get_base_payload(user_profile)
|
2020-09-03 05:32:15 +02:00
|
|
|
gcm_payload.update(
|
2021-02-12 08:20:45 +01:00
|
|
|
event="remove",
|
|
|
|
zulip_message_ids=",".join(str(id) for id in message_ids),
|
2019-02-14 01:08:51 +01:00
|
|
|
# Older clients (all clients older than 2019-02-13) look only at
|
|
|
|
# `zulip_message_id` and ignore `zulip_message_ids`. Do our best.
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_message_id=message_ids[0],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm_options = {"priority": "normal"}
|
2019-02-14 01:02:39 +01:00
|
|
|
return gcm_payload, gcm_options
|
2019-02-14 00:54:56 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-02 18:09:26 +02:00
|
|
|
def get_remove_payload_apns(user_profile: UserProfile, message_ids: List[int]) -> Dict[str, Any]:
|
|
|
|
zulip_data = get_base_payload(user_profile)
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_data.update(
|
2021-02-12 08:20:45 +01:00
|
|
|
event="remove",
|
|
|
|
zulip_message_ids=",".join(str(id) for id in message_ids),
|
2020-09-03 05:32:15 +02:00
|
|
|
)
|
2020-06-02 18:09:26 +02:00
|
|
|
apns_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"badge": get_apns_badge_count(user_profile, message_ids),
|
|
|
|
"custom": {"zulip": zulip_data},
|
2020-06-02 18:09:26 +02:00
|
|
|
}
|
|
|
|
return apns_data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:08:51 +01:00
|
|
|
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
|
2020-06-23 08:35:33 +02:00
|
|
|
"""This should be called when a message that previously had a
|
|
|
|
mobile push notification executed is read. This triggers a push to the
|
|
|
|
mobile app, when the message is read on the server, to remove the
|
2018-07-28 14:31:45 +02:00
|
|
|
message from the notification.
|
|
|
|
"""
|
2022-03-09 23:40:34 +01:00
|
|
|
if not push_notifications_enabled():
|
|
|
|
return
|
|
|
|
|
2018-07-28 14:31:45 +02:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2022-03-09 23:30:38 +01:00
|
|
|
|
|
|
|
# We may no longer have access to the message here; for example,
|
|
|
|
# the user (1) got a message, (2) read the message in the web UI,
|
|
|
|
# and then (3) it was deleted. When trying to send the push
|
|
|
|
# notification for (2), after (3) has happened, there is no
|
|
|
|
# message to fetch -- but we nonetheless want to remove the mobile
|
2023-06-17 00:10:28 +02:00
|
|
|
# notification. Because of this, verification of access to
|
|
|
|
# the messages is skipped here.
|
2022-03-09 23:30:38 +01:00
|
|
|
# Because of this, no access to the Message objects should be
|
|
|
|
# done; they are treated as a list of opaque ints.
|
2021-11-02 23:16:10 +01:00
|
|
|
|
|
|
|
# APNs has a 4KB limit on the maximum size of messages, which
|
|
|
|
# translated to several hundred message IDs in one of these
|
|
|
|
# notifications. In rare cases, it's possible for someone to mark
|
|
|
|
# thousands of push notification eligible messages as read at
|
|
|
|
# once. We could handle this situation with a loop, but we choose
|
|
|
|
# to truncate instead to avoid extra network traffic, because it's
|
|
|
|
# very likely the user has manually cleared the notifications in
|
|
|
|
# their mobile device's UI anyway.
|
|
|
|
#
|
|
|
|
# When truncating, we keep only the newest N messages in this
|
|
|
|
# remove event. This is optimal because older messages are the
|
|
|
|
# ones most likely to have already been manually cleared at some
|
|
|
|
# point in the past.
|
|
|
|
#
|
|
|
|
# We choose 200 here because a 10-digit message ID plus a comma and
|
|
|
|
# space consume 12 bytes, and 12 x 200 = 2400 bytes is still well
|
|
|
|
# below the 4KB limit (leaving plenty of space for metadata).
|
|
|
|
MAX_APNS_MESSAGE_IDS = 200
|
2022-10-30 00:32:32 +02:00
|
|
|
truncated_message_ids = sorted(message_ids)[-MAX_APNS_MESSAGE_IDS:]
|
2021-11-02 23:16:10 +01:00
|
|
|
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, truncated_message_ids)
|
|
|
|
apns_payload = get_remove_payload_apns(user_profile, truncated_message_ids)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
|
|
|
if uses_notification_bouncer():
|
2021-02-12 08:19:30 +01:00
|
|
|
send_notifications_to_bouncer(user_profile_id, apns_payload, gcm_payload, gcm_options)
|
2019-02-14 01:23:55 +01:00
|
|
|
else:
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_profile_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
android_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)
|
|
|
|
)
|
|
|
|
apple_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)
|
|
|
|
)
|
2019-02-14 01:23:55 +01:00
|
|
|
if android_devices:
|
2022-02-23 20:25:30 +01:00
|
|
|
send_android_push_notification(user_identity, android_devices, gcm_payload, gcm_options)
|
2020-06-02 18:09:26 +02:00
|
|
|
if apple_devices:
|
2022-02-23 20:25:30 +01:00
|
|
|
send_apple_push_notification(user_identity, apple_devices, apns_payload)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
2021-11-02 23:16:10 +01:00
|
|
|
# We intentionally use the non-truncated message_ids here. We are
|
|
|
|
# assuming in this very rare case that the user has manually
|
|
|
|
# dismissed these notifications on the device side, and the server
|
|
|
|
# should no longer track them as outstanding notifications.
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic(savepoint=False):
|
|
|
|
UserMessage.select_for_update_query().filter(
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id__in=message_ids,
|
|
|
|
).update(flags=F("flags").bitand(~UserMessage.flags.active_mobile_push_notification))
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
|
2017-08-18 09:04:52 +02:00
|
|
|
"""
|
|
|
|
missed_message is the event received by the
|
|
|
|
zerver.worker.queue_processors.PushNotificationWorker.consume function.
|
|
|
|
"""
|
2018-12-11 07:05:40 +01:00
|
|
|
if not push_notifications_enabled():
|
|
|
|
return
|
2017-11-10 00:29:52 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2021-12-19 12:04:36 +01:00
|
|
|
if user_profile.is_bot: # nocoverage
|
|
|
|
# We don't expect to reach here for bot users. However, this code exists
|
|
|
|
# to find and throw away any pre-existing events in the queue while
|
|
|
|
# upgrading from versions before our notifiability logic was implemented.
|
|
|
|
# TODO/compatibility: This block can be removed when one can no longer
|
|
|
|
# upgrade from versions <= 4.0 to versions >= 5.0
|
|
|
|
logger.warning(
|
|
|
|
"Send-push-notification event found for bot user %s. Skipping.", user_profile_id
|
|
|
|
)
|
|
|
|
return
|
2021-09-02 02:49:20 +02:00
|
|
|
|
|
|
|
if not (
|
|
|
|
user_profile.enable_offline_push_notifications
|
|
|
|
or user_profile.enable_online_push_notifications
|
|
|
|
):
|
|
|
|
# BUG: Investigate why it's possible to get here.
|
|
|
|
return # nocoverage
|
|
|
|
|
2023-05-18 17:51:21 +02:00
|
|
|
with transaction.atomic(savepoint=False):
|
|
|
|
try:
|
|
|
|
(message, user_message) = access_message(
|
|
|
|
user_profile, missed_message["message_id"], lock_message=True
|
|
|
|
)
|
|
|
|
except JsonableError:
|
|
|
|
if ArchivedMessage.objects.filter(id=missed_message["message_id"]).exists():
|
|
|
|
# If the cause is a race with the message being deleted,
|
|
|
|
# that's normal and we have no need to log an error.
|
|
|
|
return
|
|
|
|
logging.info(
|
|
|
|
"Unexpected message access failure handling push notifications: %s %s",
|
|
|
|
user_profile.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
missed_message["message_id"],
|
2020-05-02 08:44:14 +02:00
|
|
|
)
|
2017-11-10 00:51:06 +01:00
|
|
|
return
|
2017-09-10 00:47:36 +02:00
|
|
|
|
2023-05-18 17:51:21 +02:00
|
|
|
if user_message is not None:
|
|
|
|
# If the user has read the message already, don't push-notify.
|
|
|
|
if user_message.flags.read or user_message.flags.active_mobile_push_notification:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Otherwise, we mark the message as having an active mobile
|
|
|
|
# push notification, so that we can send revocation messages
|
|
|
|
# later.
|
|
|
|
user_message.flags.active_mobile_push_notification = True
|
|
|
|
user_message.save(update_fields=["flags"])
|
|
|
|
else:
|
|
|
|
# Users should only be getting push notifications into this
|
|
|
|
# queue for messages they haven't received if they're
|
|
|
|
# long-term idle; anything else is likely a bug.
|
|
|
|
if not user_profile.long_term_idle:
|
|
|
|
logger.error(
|
|
|
|
"Could not find UserMessage with message_id %s and user_id %s",
|
|
|
|
missed_message["message_id"],
|
|
|
|
user_profile_id,
|
|
|
|
exc_info=True,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2021-09-03 16:49:27 +02:00
|
|
|
trigger = missed_message["trigger"]
|
2023-08-08 19:29:33 +02:00
|
|
|
|
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `wildcard_mentioned` to `stream_wildcard_mentioned`.
|
|
|
|
# Remove this when one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "wildcard_mentioned":
|
|
|
|
trigger = NotificationTriggers.STREAM_WILDCARD_MENTION # nocoverage
|
|
|
|
|
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `followed_topic_wildcard_mentioned` to `stream_wildcard_mentioned_in_followed_topic`.
|
|
|
|
# Remove this when one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "followed_topic_wildcard_mentioned":
|
|
|
|
trigger = NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC # nocoverage
|
|
|
|
|
2023-08-04 19:54:41 +02:00
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `private_message` to `direct_message`. Remove this when
|
|
|
|
# one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "private_message":
|
|
|
|
trigger = NotificationTriggers.DIRECT_MESSAGE # nocoverage
|
|
|
|
|
2021-07-08 12:59:15 +02:00
|
|
|
mentioned_user_group_name = None
|
2022-04-15 22:07:22 +02:00
|
|
|
# mentioned_user_group_id will be None if the user is personally mentioned
|
|
|
|
# regardless whether they are a member of the mentioned user group in the
|
|
|
|
# message or not.
|
2021-07-08 12:59:15 +02:00
|
|
|
mentioned_user_group_id = missed_message.get("mentioned_user_group_id")
|
2017-05-08 13:21:56 +02:00
|
|
|
|
2021-07-08 12:59:15 +02:00
|
|
|
if mentioned_user_group_id is not None:
|
2021-12-30 01:18:46 +01:00
|
|
|
user_group = UserGroup.objects.get(id=mentioned_user_group_id, realm=user_profile.realm)
|
2021-07-08 12:59:15 +02:00
|
|
|
mentioned_user_group_name = user_group.name
|
|
|
|
|
2022-04-15 22:07:22 +02:00
|
|
|
# Soft reactivate if pushing to a long_term_idle user that is personally mentioned
|
|
|
|
soft_reactivate_if_personal_notification(user_profile, {trigger}, mentioned_user_group_name)
|
|
|
|
|
2021-07-08 14:45:05 +02:00
|
|
|
apns_payload = get_message_payload_apns(
|
2021-09-03 16:49:27 +02:00
|
|
|
user_profile, message, trigger, mentioned_user_group_id, mentioned_user_group_name
|
2021-07-08 14:45:05 +02:00
|
|
|
)
|
2021-07-08 14:15:42 +02:00
|
|
|
gcm_payload, gcm_options = get_message_payload_gcm(
|
2023-08-29 16:00:58 +02:00
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name
|
2021-07-08 14:15:42 +02:00
|
|
|
)
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Sending push notifications to mobile clients for user %s", user_profile_id)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
if uses_notification_bouncer():
|
2021-09-28 14:17:16 +02:00
|
|
|
total_android_devices, total_apple_devices = send_notifications_to_bouncer(
|
|
|
|
user_profile_id, apns_payload, gcm_payload, gcm_options
|
|
|
|
)
|
|
|
|
logger.info(
|
|
|
|
"Sent mobile push notifications for user %s through bouncer: %s via FCM devices, %s via APNs devices",
|
|
|
|
user_profile_id,
|
|
|
|
total_android_devices,
|
|
|
|
total_apple_devices,
|
|
|
|
)
|
2019-12-03 20:19:38 +01:00
|
|
|
return
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
android_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.GCM)
|
|
|
|
)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
apple_devices = list(
|
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS)
|
|
|
|
)
|
2017-05-12 07:50:18 +02:00
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
logger.info(
|
2021-10-20 01:16:18 +02:00
|
|
|
"Sending mobile push notifications for local user %s: %s via FCM devices, %s via APNs devices",
|
2021-09-28 14:17:16 +02:00
|
|
|
user_profile_id,
|
|
|
|
len(android_devices),
|
|
|
|
len(apple_devices),
|
|
|
|
)
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_profile.id)
|
2022-02-23 20:25:30 +01:00
|
|
|
send_apple_push_notification(user_identity, apple_devices, apns_payload)
|
|
|
|
send_android_push_notification(user_identity, android_devices, gcm_payload, gcm_options)
|