2020-12-19 03:05:20 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
|
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
import asyncio
|
2017-08-19 01:12:40 +02:00
|
|
|
import base64
|
2023-10-05 13:53:09 +02:00
|
|
|
import copy
|
2017-08-19 01:12:40 +02:00
|
|
|
import logging
|
2017-10-07 00:18:07 +02:00
|
|
|
import re
|
2021-06-08 02:45:49 +02:00
|
|
|
from dataclasses import dataclass
|
2023-11-06 10:41:08 +01:00
|
|
|
from email.headerregistry import Address
|
2024-04-02 00:19:08 +02:00
|
|
|
from functools import cache
|
2023-08-29 03:03:11 +02:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
Union,
|
|
|
|
)
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import lxml.html
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2013-10-21 19:47:08 +02:00
|
|
|
from django.conf import settings
|
2024-04-12 18:34:37 +02:00
|
|
|
from django.db import transaction
|
2022-02-23 20:25:30 +01:00
|
|
|
from django.db.models import F, Q
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2021-06-25 20:52:22 +02:00
|
|
|
from django.utils.translation import override as override_language
|
2024-06-13 14:38:58 +02:00
|
|
|
from firebase_admin import App as FCMApp
|
|
|
|
from firebase_admin import credentials as firebase_credentials
|
|
|
|
from firebase_admin import exceptions as firebase_exceptions
|
|
|
|
from firebase_admin import initialize_app as firebase_initialize_app
|
|
|
|
from firebase_admin import messaging as firebase_messaging
|
|
|
|
from firebase_admin.messaging import UnregisteredError as FCMUnregisteredError
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import TypeAlias, override
|
2017-08-19 01:12:40 +02:00
|
|
|
|
2023-11-07 15:12:37 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat
|
2023-11-23 22:07:41 +01:00
|
|
|
from zerver.actions.realm_settings import (
|
|
|
|
do_set_push_notifications_enabled_end_timestamp,
|
|
|
|
do_set_realm_property,
|
|
|
|
)
|
2023-11-06 10:41:08 +01:00
|
|
|
from zerver.lib.avatar import absolute_avatar_url, get_avatar_for_inaccessible_user
|
2023-12-15 03:04:08 +01:00
|
|
|
from zerver.lib.display_recipient import get_display_recipient
|
2023-08-18 21:31:54 +02:00
|
|
|
from zerver.lib.emoji_utils import hex_codepoint_to_emoji
|
2023-10-08 00:43:41 +02:00
|
|
|
from zerver.lib.exceptions import ErrorCode, JsonableError
|
2024-04-11 22:48:10 +02:00
|
|
|
from zerver.lib.message import access_message_and_usermessage, huddle_users
|
2024-04-22 12:44:22 +02:00
|
|
|
from zerver.lib.notification_data import get_mentioned_user_group
|
2023-11-23 22:07:41 +01:00
|
|
|
from zerver.lib.remote_server import (
|
2024-03-18 01:18:53 +01:00
|
|
|
record_push_notifications_recently_working,
|
2023-11-23 22:07:41 +01:00
|
|
|
send_json_to_push_bouncer,
|
2023-12-11 14:24:13 +01:00
|
|
|
send_server_data_to_push_bouncer,
|
2023-11-23 22:07:41 +01:00
|
|
|
send_to_push_bouncer,
|
|
|
|
)
|
2022-04-15 22:07:22 +02:00
|
|
|
from zerver.lib.soft_deactivation import soft_reactivate_if_personal_notification
|
2023-11-27 06:23:24 +01:00
|
|
|
from zerver.lib.tex import change_katex_to_raw_latex
|
2023-09-29 20:04:09 +02:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2024-06-17 17:39:07 +02:00
|
|
|
from zerver.lib.url_decoding import is_same_server_message_link
|
2023-11-06 10:41:08 +01:00
|
|
|
from zerver.lib.users import check_can_access_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
2021-08-15 18:35:37 +02:00
|
|
|
AbstractPushDeviceToken,
|
2020-06-11 00:54:34 +02:00
|
|
|
ArchivedMessage,
|
|
|
|
Message,
|
|
|
|
PushDeviceToken,
|
2023-11-23 22:07:41 +01:00
|
|
|
Realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
2023-07-16 13:59:49 +02:00
|
|
|
Stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_fake_email_domain
|
2023-12-15 20:21:59 +01:00
|
|
|
from zerver.models.scheduled_jobs import NotificationTriggers
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user_profile_by_id
|
2016-06-08 12:32:59 +02:00
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
if TYPE_CHECKING:
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
2019-08-10 00:30:33 +02:00
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-05-09 12:15:11 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2021-10-20 08:16:10 +02:00
|
|
|
from zilencer.models import RemotePushDeviceToken, RemoteZulipServer
|
2017-05-09 12:15:11 +02:00
|
|
|
|
2023-08-02 23:53:10 +02:00
|
|
|
DeviceToken: TypeAlias = Union[PushDeviceToken, "RemotePushDeviceToken"]
|
2017-05-09 12:15:11 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2013-10-21 19:47:08 +02:00
|
|
|
# We store the token as b64, but apns-client wants hex strings
|
2019-11-13 06:54:30 +01:00
|
|
|
def b64_to_hex(data: str) -> str:
|
2020-10-30 01:59:56 +01:00
|
|
|
return base64.b64decode(data).hex()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def hex_to_b64(data: str) -> str:
|
2020-10-30 01:59:56 +01:00
|
|
|
return base64.b64encode(bytes.fromhex(data)).decode()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-16 13:59:49 +02:00
|
|
|
def get_message_stream_name_from_database(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
Never use this function outside of the push-notifications
|
|
|
|
codepath. Most of our code knows how to get streams
|
|
|
|
up front in a more efficient manner.
|
|
|
|
"""
|
|
|
|
stream_id = message.recipient.type_id
|
|
|
|
return Stream.objects.get(id=stream_id).name
|
|
|
|
|
|
|
|
|
2023-01-02 20:50:23 +01:00
|
|
|
class UserPushIdentityCompat:
|
2022-02-23 20:25:30 +01:00
|
|
|
"""Compatibility class for supporting the transition from remote servers
|
|
|
|
sending their UserProfile ids to the bouncer to sending UserProfile uuids instead.
|
|
|
|
|
|
|
|
Until we can drop support for receiving user_id, we need this
|
|
|
|
class, because a user's identity in the push notification context
|
|
|
|
may be represented either by an id or uuid.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self, user_id: Optional[int] = None, user_uuid: Optional[str] = None) -> None:
|
|
|
|
assert user_id is not None or user_uuid is not None
|
|
|
|
self.user_id = user_id
|
|
|
|
self.user_uuid = user_uuid
|
|
|
|
|
|
|
|
def filter_q(self) -> Q:
|
|
|
|
"""
|
|
|
|
This aims to support correctly querying for RemotePushDeviceToken.
|
|
|
|
If only one of (user_id, user_uuid) is provided, the situation is trivial,
|
|
|
|
If both are provided, we want to query for tokens matching EITHER the
|
|
|
|
uuid or the id - because the user may have devices with old registrations,
|
|
|
|
so user_id-based, as well as new registration with uuid. Notifications
|
|
|
|
naturally should be sent to both.
|
|
|
|
"""
|
|
|
|
if self.user_id is not None and self.user_uuid is None:
|
|
|
|
return Q(user_id=self.user_id)
|
|
|
|
elif self.user_uuid is not None and self.user_id is None:
|
|
|
|
return Q(user_uuid=self.user_uuid)
|
|
|
|
else:
|
|
|
|
assert self.user_id is not None and self.user_uuid is not None
|
|
|
|
return Q(user_uuid=self.user_uuid) | Q(user_id=self.user_id)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-02-23 20:25:30 +01:00
|
|
|
def __str__(self) -> str:
|
2022-03-10 13:31:16 +01:00
|
|
|
result = ""
|
|
|
|
if self.user_id is not None:
|
|
|
|
result += f"<id:{self.user_id}>"
|
2022-02-23 20:25:30 +01:00
|
|
|
if self.user_uuid is not None:
|
2022-03-10 13:31:16 +01:00
|
|
|
result += f"<uuid:{self.user_uuid}>"
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2022-03-10 13:31:16 +01:00
|
|
|
return result
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-08-10 20:06:57 +02:00
|
|
|
def __eq__(self, other: object) -> bool:
|
2023-01-02 20:50:23 +01:00
|
|
|
if isinstance(other, UserPushIdentityCompat):
|
2022-02-23 20:25:30 +01:00
|
|
|
return self.user_id == other.user_id and self.user_uuid == other.user_uuid
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to APNs, for iOS
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
@dataclass
|
|
|
|
class APNsContext:
|
|
|
|
apns: "aioapns.APNs"
|
|
|
|
loop: asyncio.AbstractEventLoop
|
|
|
|
|
|
|
|
|
2023-11-23 22:16:18 +01:00
|
|
|
def has_apns_credentials() -> bool:
|
2023-11-08 01:37:08 +01:00
|
|
|
return settings.APNS_TOKEN_KEY_FILE is not None or settings.APNS_CERT_FILE is not None
|
|
|
|
|
|
|
|
|
2024-04-02 00:19:08 +02:00
|
|
|
@cache
|
2021-06-08 02:45:49 +02:00
|
|
|
def get_apns_context() -> Optional[APNsContext]:
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do this import as part of optimizing Zulip's base
|
|
|
|
# import time.
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-11-23 22:16:18 +01:00
|
|
|
if not has_apns_credentials(): # nocoverage
|
2021-06-08 02:02:27 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
# NB if called concurrently, this will make excess connections.
|
|
|
|
# That's a little sloppy, but harmless unless a server gets
|
|
|
|
# hammered with a ton of these all at once after startup.
|
2021-06-08 02:45:49 +02:00
|
|
|
loop = asyncio.new_event_loop()
|
2022-02-08 08:42:25 +01:00
|
|
|
|
2023-08-29 00:43:51 +02:00
|
|
|
# Defining a no-op error-handling function overrides the default
|
|
|
|
# behaviour of logging at ERROR level whenever delivery fails; we
|
|
|
|
# handle those errors by checking the result in
|
|
|
|
# send_apple_push_notification.
|
|
|
|
async def err_func(
|
|
|
|
request: aioapns.NotificationRequest, result: aioapns.common.NotificationResult
|
|
|
|
) -> None:
|
|
|
|
pass # nocoverage
|
|
|
|
|
2022-02-08 08:42:25 +01:00
|
|
|
async def make_apns() -> aioapns.APNs:
|
|
|
|
return aioapns.APNs(
|
|
|
|
client_cert=settings.APNS_CERT_FILE,
|
2023-11-08 01:37:08 +01:00
|
|
|
key=settings.APNS_TOKEN_KEY_FILE,
|
|
|
|
key_id=settings.APNS_TOKEN_KEY_ID,
|
|
|
|
team_id=settings.APNS_TEAM_ID,
|
2022-02-08 08:42:25 +01:00
|
|
|
max_connection_attempts=APNS_MAX_RETRIES,
|
|
|
|
use_sandbox=settings.APNS_SANDBOX,
|
2023-08-29 00:43:51 +02:00
|
|
|
err_func=err_func,
|
2023-11-06 20:25:28 +01:00
|
|
|
# The actual APNs topic will vary between notifications,
|
|
|
|
# so we set it there, overriding any value we put here.
|
|
|
|
# We can't just leave this out, though, because then
|
|
|
|
# the constructor attempts to guess.
|
|
|
|
topic="invalid.nonsense",
|
2022-02-08 08:42:25 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
apns = loop.run_until_complete(make_apns())
|
2021-06-08 02:45:49 +02:00
|
|
|
return APNsContext(apns=apns, loop=loop)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-06 11:56:48 +02:00
|
|
|
def modernize_apns_payload(data: Mapping[str, Any]) -> Mapping[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Take a payload in an unknown Zulip version's format, and return in current format."""
|
2017-09-28 03:08:37 +02:00
|
|
|
# TODO this isn't super robust as is -- if a buggy remote server
|
|
|
|
# sends a malformed payload, we are likely to raise an exception.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "message_ids" in data:
|
2017-09-28 03:08:37 +02:00
|
|
|
# The format sent by 1.6.0, from the earliest pre-1.6.0
|
|
|
|
# version with bouncer support up until 613d093d7 pre-1.7.0:
|
2023-06-19 16:42:11 +02:00
|
|
|
# 'alert': str, # just sender, and text about direct message/mention
|
2017-09-28 03:08:37 +02:00
|
|
|
# 'message_ids': List[int], # always just one
|
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"alert": data["alert"],
|
|
|
|
"badge": 0,
|
|
|
|
"custom": {
|
|
|
|
"zulip": {
|
|
|
|
"message_ids": data["message_ids"],
|
2017-09-28 03:08:37 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
else:
|
2017-12-12 05:54:23 +01:00
|
|
|
# Something already compatible with the current format.
|
|
|
|
# `alert` may be a string, or a dict with `title` and `body`.
|
|
|
|
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
|
|
|
|
# item in `custom.zulip` is `message_ids`.
|
2017-09-28 03:08:37 +02:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-29 01:05:20 +02:00
|
|
|
APNS_MAX_RETRIES = 3
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_apple_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity: UserPushIdentityCompat,
|
2021-10-20 01:52:23 +02:00
|
|
|
devices: Sequence[DeviceToken],
|
2022-10-06 11:56:48 +02:00
|
|
|
payload_data: Mapping[str, Any],
|
2021-10-20 09:13:30 +02:00
|
|
|
remote: Optional["RemoteZulipServer"] = None,
|
2023-10-28 02:01:22 +02:00
|
|
|
) -> int:
|
2020-07-16 16:54:49 +02:00
|
|
|
if not devices:
|
2023-10-28 02:01:22 +02:00
|
|
|
return 0
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do the APNS imports as part of optimizing Zulip's base
|
|
|
|
# import time; since these are only needed in the push
|
|
|
|
# notification queue worker, it's best to only import them in the
|
|
|
|
# code that needs them.
|
2021-06-08 02:45:49 +02:00
|
|
|
import aioapns
|
|
|
|
import aioapns.exceptions
|
2018-08-08 18:16:57 +02:00
|
|
|
|
2021-06-08 02:45:49 +02:00
|
|
|
apns_context = get_apns_context()
|
|
|
|
if apns_context is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.debug(
|
|
|
|
"APNs: Dropping a notification because nothing configured. "
|
|
|
|
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE)."
|
|
|
|
)
|
2023-10-28 02:01:22 +02:00
|
|
|
return 0
|
2018-05-21 20:20:23 +02:00
|
|
|
|
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2021-08-15 18:35:37 +02:00
|
|
|
DeviceTokenClass: Type[AbstractPushDeviceToken] = RemotePushDeviceToken
|
2018-05-21 20:20:23 +02:00
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
if remote:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Sending notification for remote user %s:%s to %d devices",
|
2021-10-20 01:52:23 +02:00
|
|
|
remote.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:52:23 +02:00
|
|
|
len(devices),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Sending notification for local user %s to %d devices",
|
|
|
|
user_identity,
|
|
|
|
len(devices),
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2022-10-06 11:56:48 +02:00
|
|
|
payload_data = dict(modernize_apns_payload(payload_data))
|
2021-07-15 21:56:55 +02:00
|
|
|
message = {**payload_data.pop("custom", {}), "aps": payload_data}
|
2017-08-29 01:05:20 +02:00
|
|
|
|
2023-11-06 20:25:28 +01:00
|
|
|
have_missing_app_id = False
|
2023-11-07 00:01:14 +01:00
|
|
|
for device in devices:
|
|
|
|
if device.ios_app_id is None:
|
|
|
|
# This should be present for all APNs tokens, as an invariant maintained
|
|
|
|
# by the views that add the token to our database.
|
|
|
|
logger.error(
|
|
|
|
"APNs: Missing ios_app_id for user %s device %s", user_identity, device.token
|
|
|
|
)
|
2023-11-06 20:25:28 +01:00
|
|
|
have_missing_app_id = True
|
|
|
|
if have_missing_app_id:
|
|
|
|
devices = [device for device in devices if device.ios_app_id is not None]
|
2023-11-07 00:01:14 +01:00
|
|
|
|
2024-01-29 00:32:21 +01:00
|
|
|
async def send_all_notifications() -> (
|
|
|
|
Iterable[Tuple[DeviceToken, Union[aioapns.common.NotificationResult, BaseException]]]
|
|
|
|
):
|
2023-08-29 03:03:11 +02:00
|
|
|
requests = [
|
|
|
|
aioapns.NotificationRequest(
|
2023-11-06 20:25:28 +01:00
|
|
|
apns_topic=device.ios_app_id,
|
|
|
|
device_token=device.token,
|
|
|
|
message=message,
|
|
|
|
time_to_live=24 * 3600,
|
2023-08-29 03:03:11 +02:00
|
|
|
)
|
|
|
|
for device in devices
|
|
|
|
]
|
2023-09-12 23:19:57 +02:00
|
|
|
results = await asyncio.gather(
|
|
|
|
*(apns_context.apns.send_notification(request) for request in requests),
|
|
|
|
return_exceptions=True,
|
2023-08-29 03:03:11 +02:00
|
|
|
)
|
|
|
|
return zip(devices, results)
|
|
|
|
|
|
|
|
results = apns_context.loop.run_until_complete(send_all_notifications())
|
|
|
|
|
2023-10-28 02:01:22 +02:00
|
|
|
successfully_sent_count = 0
|
2023-08-29 03:03:11 +02:00
|
|
|
for device, result in results:
|
|
|
|
if isinstance(result, aioapns.exceptions.ConnectionError):
|
2022-03-25 21:51:55 +01:00
|
|
|
logger.error(
|
|
|
|
"APNs: ConnectionError sending for user %s to device %s; check certificate expiration",
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-09-02 20:33:36 +02:00
|
|
|
device.token,
|
|
|
|
)
|
2023-08-29 03:03:11 +02:00
|
|
|
elif isinstance(result, BaseException):
|
2023-08-30 21:15:59 +02:00
|
|
|
logger.error(
|
2023-08-29 03:03:11 +02:00
|
|
|
"APNs: Error sending for user %s to device %s",
|
|
|
|
user_identity,
|
|
|
|
device.token,
|
|
|
|
exc_info=result,
|
|
|
|
)
|
|
|
|
elif result.is_successful:
|
2023-10-28 02:01:22 +02:00
|
|
|
successfully_sent_count += 1
|
2022-02-23 20:25:30 +01:00
|
|
|
logger.info(
|
|
|
|
"APNs: Success sending for user %s to device %s", user_identity, device.token
|
|
|
|
)
|
2021-09-02 20:33:36 +02:00
|
|
|
elif result.description in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
|
|
|
|
logger.info(
|
|
|
|
"APNs: Removing invalid/expired token %s (%s)", device.token, result.description
|
|
|
|
)
|
2018-05-21 20:20:23 +02:00
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
2023-09-05 20:25:23 +02:00
|
|
|
DeviceTokenClass._default_manager.filter(
|
|
|
|
token=device.token, kind=DeviceTokenClass.APNS
|
|
|
|
).delete()
|
2017-08-19 01:38:11 +02:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.warning(
|
2022-02-23 20:25:30 +01:00
|
|
|
"APNs: Failed to send for user %s to device %s: %s",
|
|
|
|
user_identity,
|
2021-09-02 20:33:36 +02:00
|
|
|
device.token,
|
|
|
|
result.description,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2023-10-28 02:01:22 +02:00
|
|
|
return successfully_sent_count
|
|
|
|
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
2024-06-13 21:04:40 +02:00
|
|
|
# Sending to FCM, for Android
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-06-13 14:38:58 +02:00
|
|
|
# Note: This is a timeout value per retry, not a total timeout.
|
|
|
|
FCM_REQUEST_TIMEOUT = 5
|
|
|
|
|
|
|
|
|
|
|
|
def make_fcm_app() -> FCMApp: # nocoverage
|
|
|
|
if settings.ANDROID_FCM_CREDENTIALS_PATH is None:
|
|
|
|
return None
|
|
|
|
|
|
|
|
fcm_credentials = firebase_credentials.Certificate(settings.ANDROID_FCM_CREDENTIALS_PATH)
|
|
|
|
fcm_app = firebase_initialize_app(
|
|
|
|
fcm_credentials, options=dict(httpTimeout=FCM_REQUEST_TIMEOUT)
|
|
|
|
)
|
|
|
|
|
|
|
|
return fcm_app
|
|
|
|
|
|
|
|
|
|
|
|
if settings.ANDROID_FCM_CREDENTIALS_PATH: # nocoverage
|
|
|
|
fcm_app = make_fcm_app()
|
2013-12-09 23:19:59 +01:00
|
|
|
else:
|
2024-06-13 14:38:58 +02:00
|
|
|
fcm_app = None
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-06-13 20:56:42 +02:00
|
|
|
def has_fcm_credentials() -> bool: # nocoverage
|
2024-06-13 14:38:58 +02:00
|
|
|
return fcm_app is not None
|
2018-02-12 23:34:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
# This is purely used in testing
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_android_push_notification_to_user(
|
|
|
|
user_profile: UserProfile, data: Dict[str, Any], options: Dict[str, Any]
|
|
|
|
) -> None:
|
2024-06-13 20:53:09 +02:00
|
|
|
devices = list(PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.FCM))
|
2022-02-23 20:25:30 +01:00
|
|
|
send_android_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
UserPushIdentityCompat(user_id=user_profile.id), devices, data, options
|
2022-02-23 20:25:30 +01:00
|
|
|
)
|
2017-03-06 03:11:44 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-06-13 21:24:17 +02:00
|
|
|
def parse_fcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2024-06-13 21:04:40 +02:00
|
|
|
Parse FCM options, supplying defaults, and raising an error if invalid.
|
2019-02-08 02:14:45 +01:00
|
|
|
|
2019-02-08 03:06:04 +01:00
|
|
|
The options permitted here form part of the Zulip notification
|
|
|
|
bouncer's API. They are:
|
|
|
|
|
2024-06-13 21:04:40 +02:00
|
|
|
`priority`: Passed through to FCM; see upstream doc linked below.
|
2019-02-08 02:44:58 +01:00
|
|
|
Zulip servers should always set this; when unset, we guess a value
|
|
|
|
based on the behavior of old server versions.
|
2019-02-08 03:06:04 +01:00
|
|
|
|
|
|
|
Including unrecognized options is an error.
|
|
|
|
|
2024-06-13 21:04:40 +02:00
|
|
|
For details on options' semantics, see this FCM upstream doc:
|
2024-06-13 14:38:58 +02:00
|
|
|
https://firebase.google.com/docs/cloud-messaging/android/message-priority
|
2019-02-08 03:06:04 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
Returns `priority`.
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
priority = options.pop("priority", None)
|
2019-02-08 02:44:58 +01:00
|
|
|
if priority is None:
|
|
|
|
# An older server. Identify if this seems to be an actual notification.
|
2021-02-12 08:20:45 +01:00
|
|
|
if data.get("event") == "message":
|
|
|
|
priority = "high"
|
2019-02-08 02:44:58 +01:00
|
|
|
else: # `'event': 'remove'`, presumably
|
2021-02-12 08:20:45 +01:00
|
|
|
priority = "normal"
|
|
|
|
if priority not in ("normal", "high"):
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_(
|
2023-07-17 22:40:33 +02:00
|
|
|
"Invalid GCM option to bouncer: priority {priority!r}",
|
|
|
|
).format(priority=priority)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-08 02:14:45 +01:00
|
|
|
|
2019-02-08 02:38:13 +01:00
|
|
|
if options:
|
|
|
|
# We're strict about the API; there is no use case for a newer Zulip
|
|
|
|
# server talking to an older bouncer, so we only need to provide
|
|
|
|
# one-way compatibility.
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_(
|
2023-07-17 22:40:33 +02:00
|
|
|
"Invalid GCM options to bouncer: {options}",
|
|
|
|
).format(options=orjson.dumps(options).decode())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-08 02:38:13 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
return priority # when this grows a second option, can make it a tuple
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_android_push_notification(
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity: UserPushIdentityCompat,
|
2021-07-24 20:08:12 +02:00
|
|
|
devices: Sequence[DeviceToken],
|
|
|
|
data: Dict[str, Any],
|
|
|
|
options: Dict[str, Any],
|
2021-10-20 09:13:30 +02:00
|
|
|
remote: Optional["RemoteZulipServer"] = None,
|
2023-10-28 02:01:22 +02:00
|
|
|
) -> int:
|
2018-11-29 21:37:40 +01:00
|
|
|
"""
|
2024-06-13 21:04:40 +02:00
|
|
|
Send a FCM message to the given devices.
|
2018-11-29 21:37:40 +01:00
|
|
|
|
2019-11-20 16:12:57 +01:00
|
|
|
See https://firebase.google.com/docs/cloud-messaging/http-server-ref
|
2024-06-13 21:04:40 +02:00
|
|
|
for the FCM upstream API which this talks to.
|
2018-11-29 21:37:40 +01:00
|
|
|
|
|
|
|
data: The JSON object (decoded) to send as the 'data' parameter of
|
2024-06-13 21:04:40 +02:00
|
|
|
the FCM message.
|
|
|
|
options: Additional options to control the FCM message sent.
|
2024-06-13 21:24:17 +02:00
|
|
|
For details, see `parse_fcm_options`.
|
2018-11-29 21:37:40 +01:00
|
|
|
"""
|
2020-07-16 16:54:49 +02:00
|
|
|
if not devices:
|
2023-10-28 02:01:22 +02:00
|
|
|
return 0
|
2024-06-13 14:38:58 +02:00
|
|
|
if not fcm_app:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.debug(
|
2024-06-13 14:38:58 +02:00
|
|
|
"Skipping sending a FCM push notification since "
|
|
|
|
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_FCM_CREDENTIALS_PATH are both unset"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-10-28 02:01:22 +02:00
|
|
|
return 0
|
2018-11-29 21:37:40 +01:00
|
|
|
|
2021-10-20 01:52:23 +02:00
|
|
|
if remote:
|
|
|
|
logger.info(
|
2024-06-13 14:38:58 +02:00
|
|
|
"FCM: Sending notification for remote user %s:%s to %d devices",
|
2021-10-20 01:52:23 +02:00
|
|
|
remote.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:52:23 +02:00
|
|
|
len(devices),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
logger.info(
|
2024-06-13 14:38:58 +02:00
|
|
|
"FCM: Sending notification for local user %s to %d devices", user_identity, len(devices)
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2024-06-13 14:38:58 +02:00
|
|
|
|
|
|
|
token_list = [device.token for device in devices]
|
2024-06-13 21:24:17 +02:00
|
|
|
priority = parse_fcm_options(options, data)
|
2024-06-13 14:38:58 +02:00
|
|
|
|
|
|
|
# The API requires all values to be strings. Our data dict is going to have
|
|
|
|
# things like an integer realm and user ids etc., so just convert everything
|
|
|
|
# like that.
|
|
|
|
data = {k: str(v) if not isinstance(v, str) else v for k, v in data.items()}
|
|
|
|
messages = [
|
|
|
|
firebase_messaging.Message(
|
|
|
|
data=data, token=token, android=firebase_messaging.AndroidConfig(priority=priority)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-06-13 14:38:58 +02:00
|
|
|
for token in token_list
|
|
|
|
]
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2024-06-13 14:38:58 +02:00
|
|
|
try:
|
|
|
|
batch_response = firebase_messaging.send_each(messages, app=fcm_app)
|
|
|
|
except firebase_exceptions.FirebaseError:
|
|
|
|
logger.warning("Error while pushing to FCM", exc_info=True)
|
|
|
|
return 0
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2019-02-08 02:12:01 +01:00
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2021-08-15 18:35:37 +02:00
|
|
|
DeviceTokenClass: Type[AbstractPushDeviceToken] = RemotePushDeviceToken
|
2019-02-08 02:12:01 +01:00
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2024-06-13 14:38:58 +02:00
|
|
|
successfully_sent_count = 0
|
|
|
|
for idx, response in enumerate(batch_response.responses):
|
|
|
|
# We enumerate to have idx to track which token the response
|
|
|
|
# corresponds to. send_each() preserves the order of the messages,
|
|
|
|
# so this works.
|
|
|
|
|
|
|
|
token = token_list[idx]
|
|
|
|
if response.success:
|
|
|
|
successfully_sent_count += 1
|
|
|
|
logger.info("FCM: Sent message with ID: %s to %s", response.message_id, token)
|
|
|
|
else:
|
|
|
|
error = response.exception
|
|
|
|
if isinstance(error, FCMUnregisteredError):
|
|
|
|
logger.info("FCM: Removing %s due to %s", token, error.code)
|
|
|
|
|
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
|
|
|
DeviceTokenClass._default_manager.filter(
|
|
|
|
token=token, kind=DeviceTokenClass.FCM
|
|
|
|
).delete()
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
2024-06-13 14:38:58 +02:00
|
|
|
logger.warning("FCM: Delivery failed for %s: %s:%s", token, error.__class__, error)
|
2016-12-13 08:41:48 +01:00
|
|
|
|
2023-10-28 02:01:22 +02:00
|
|
|
return successfully_sent_count
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to a bouncer
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def uses_notification_bouncer() -> bool:
|
2017-08-19 00:42:04 +02:00
|
|
|
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-11-23 22:07:41 +01:00
|
|
|
def sends_notifications_directly() -> bool:
|
2024-06-13 20:56:42 +02:00
|
|
|
return has_apns_credentials() and has_fcm_credentials() and not uses_notification_bouncer()
|
2023-11-23 22:07:41 +01:00
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_notifications_to_bouncer(
|
2023-11-07 15:12:37 +01:00
|
|
|
user_profile: UserProfile,
|
2021-02-12 08:19:30 +01:00
|
|
|
apns_payload: Dict[str, Any],
|
|
|
|
gcm_payload: Dict[str, Any],
|
|
|
|
gcm_options: Dict[str, Any],
|
2023-12-12 00:06:37 +01:00
|
|
|
android_devices: Sequence[DeviceToken],
|
|
|
|
apple_devices: Sequence[DeviceToken],
|
2023-12-14 21:37:13 +01:00
|
|
|
) -> None:
|
|
|
|
if len(android_devices) + len(apple_devices) == 0:
|
|
|
|
logger.info(
|
|
|
|
"Skipping contacting the bouncer for user %s because there are no registered devices",
|
|
|
|
user_profile.id,
|
|
|
|
)
|
|
|
|
|
|
|
|
return
|
2023-12-12 01:17:15 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
post_data = {
|
2023-11-07 15:12:37 +01:00
|
|
|
"user_uuid": str(user_profile.uuid),
|
2022-02-23 20:27:39 +01:00
|
|
|
# user_uuid is the intended future format, but we also need to send user_id
|
|
|
|
# to avoid breaking old mobile registrations, which were made with user_id.
|
2023-11-07 15:12:37 +01:00
|
|
|
"user_id": user_profile.id,
|
2023-11-09 19:24:49 +01:00
|
|
|
"realm_uuid": str(user_profile.realm.uuid),
|
2021-02-12 08:20:45 +01:00
|
|
|
"apns_payload": apns_payload,
|
|
|
|
"gcm_payload": gcm_payload,
|
|
|
|
"gcm_options": gcm_options,
|
2023-12-12 00:06:37 +01:00
|
|
|
"android_devices": [device.token for device in android_devices],
|
|
|
|
"apple_devices": [device.token for device in apple_devices],
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
2018-01-17 08:10:45 +01:00
|
|
|
# Calls zilencer.views.remote_server_notify_push
|
2024-02-07 00:38:35 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
response_data = send_json_to_push_bouncer("POST", "push/notify", post_data)
|
|
|
|
except PushNotificationsDisallowedByBouncerError as e:
|
|
|
|
logger.warning("Bouncer refused to send push notification: %s", e.reason)
|
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"push_notifications_enabled",
|
|
|
|
False,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(user_profile.realm, None, acting_user=None)
|
|
|
|
return
|
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
assert isinstance(response_data["total_android_devices"], int)
|
|
|
|
assert isinstance(response_data["total_apple_devices"], int)
|
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
assert isinstance(response_data["deleted_devices"], dict)
|
|
|
|
assert isinstance(response_data["deleted_devices"]["android_devices"], list)
|
|
|
|
assert isinstance(response_data["deleted_devices"]["apple_devices"], list)
|
|
|
|
android_deleted_devices = response_data["deleted_devices"]["android_devices"]
|
|
|
|
apple_deleted_devices = response_data["deleted_devices"]["apple_devices"]
|
|
|
|
if android_deleted_devices or apple_deleted_devices:
|
|
|
|
logger.info(
|
|
|
|
"Deleting push tokens based on response from bouncer: Android: %s, Apple: %s",
|
|
|
|
sorted(android_deleted_devices),
|
|
|
|
sorted(apple_deleted_devices),
|
|
|
|
)
|
|
|
|
PushDeviceToken.objects.filter(
|
2024-06-13 20:53:09 +02:00
|
|
|
kind=PushDeviceToken.FCM, token__in=android_deleted_devices
|
2023-12-12 00:06:37 +01:00
|
|
|
).delete()
|
|
|
|
PushDeviceToken.objects.filter(
|
|
|
|
kind=PushDeviceToken.APNS, token__in=apple_deleted_devices
|
|
|
|
).delete()
|
|
|
|
|
2023-11-07 15:12:37 +01:00
|
|
|
total_android_devices, total_apple_devices = (
|
|
|
|
response_data["total_android_devices"],
|
|
|
|
response_data["total_apple_devices"],
|
|
|
|
)
|
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["mobile_pushes_sent::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=total_android_devices + total_apple_devices,
|
|
|
|
)
|
|
|
|
|
2023-12-11 22:21:48 +01:00
|
|
|
remote_realm_dict = response_data.get("realm")
|
|
|
|
if remote_realm_dict is not None:
|
|
|
|
# The server may have updated our understanding of whether
|
|
|
|
# push notifications will work.
|
|
|
|
assert isinstance(remote_realm_dict, dict)
|
2024-03-18 01:18:53 +01:00
|
|
|
can_push = remote_realm_dict["can_push"]
|
2023-12-11 22:21:48 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
user_profile.realm,
|
|
|
|
"push_notifications_enabled",
|
2024-03-18 01:18:53 +01:00
|
|
|
can_push,
|
2023-12-11 22:21:48 +01:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(
|
|
|
|
user_profile.realm, remote_realm_dict["expected_end_timestamp"], acting_user=None
|
|
|
|
)
|
2024-03-18 01:18:53 +01:00
|
|
|
if can_push:
|
|
|
|
record_push_notifications_recently_working()
|
2023-12-11 22:21:48 +01:00
|
|
|
|
2023-12-14 21:37:13 +01:00
|
|
|
logger.info(
|
|
|
|
"Sent mobile push notifications for user %s through bouncer: %s via FCM devices, %s via APNs devices",
|
|
|
|
user_profile.id,
|
|
|
|
total_android_devices,
|
|
|
|
total_apple_devices,
|
|
|
|
)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Managing device tokens
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def add_push_device_token(
|
|
|
|
user_profile: UserProfile, token_str: str, kind: int, ios_app_id: Optional[str] = None
|
2024-04-12 18:34:37 +02:00
|
|
|
) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.info(
|
|
|
|
"Registering push device: %d %r %d %r", user_profile.id, token_str, kind, ios_app_id
|
|
|
|
)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2020-06-08 18:38:50 +02:00
|
|
|
# Regardless of whether we're using the push notifications
|
|
|
|
# bouncer, we want to store a PushDeviceToken record locally.
|
|
|
|
# These can be used to discern whether the user has any mobile
|
|
|
|
# devices configured, and is also where we will store encryption
|
|
|
|
# keys for mobile push notifications.
|
2024-04-12 18:34:37 +02:00
|
|
|
PushDeviceToken.objects.bulk_create(
|
|
|
|
[
|
|
|
|
PushDeviceToken(
|
2020-06-08 18:38:50 +02:00
|
|
|
user_id=user_profile.id,
|
|
|
|
token=token_str,
|
2024-04-12 18:34:37 +02:00
|
|
|
kind=kind,
|
2020-06-08 18:38:50 +02:00
|
|
|
ios_app_id=ios_app_id,
|
|
|
|
# last_updated is to be renamed to date_created.
|
2021-02-12 08:19:30 +01:00
|
|
|
last_updated=timezone_now(),
|
2024-04-12 18:34:37 +02:00
|
|
|
),
|
|
|
|
],
|
|
|
|
ignore_conflicts=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not uses_notification_bouncer():
|
|
|
|
return
|
2020-06-08 18:38:50 +02:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
# If we're sending things to the push notification bouncer
|
|
|
|
# register this user with them here
|
2024-04-12 18:34:37 +02:00
|
|
|
post_data = {
|
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
|
|
|
"user_uuid": str(user_profile.uuid),
|
|
|
|
"realm_uuid": str(user_profile.realm.uuid),
|
|
|
|
# user_id is sent so that the bouncer can delete any pre-existing registrations
|
|
|
|
# for this user+device to avoid duplication upon adding the uuid registration.
|
|
|
|
"user_id": str(user_profile.id),
|
|
|
|
"token": token_str,
|
|
|
|
"token_kind": kind,
|
|
|
|
}
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2024-04-12 18:34:37 +02:00
|
|
|
if kind == PushDeviceToken.APNS:
|
|
|
|
post_data["ios_app_id"] = ios_app_id
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2024-04-12 18:34:37 +02:00
|
|
|
logger.info("Sending new push device to bouncer: %r", post_data)
|
|
|
|
# Calls zilencer.views.register_remote_push_device
|
|
|
|
send_to_push_bouncer("POST", "push/register", post_data)
|
2020-06-30 03:10:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def remove_push_device_token(user_profile: UserProfile, token_str: str, kind: int) -> None:
|
2020-06-08 18:38:50 +02:00
|
|
|
try:
|
|
|
|
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
|
|
|
|
token.delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
# If we are using bouncer, don't raise the exception. It will
|
|
|
|
# be raised by the code below eventually. This is important
|
|
|
|
# during the transition period after upgrading to a version
|
|
|
|
# that stores local PushDeviceToken objects even when using
|
|
|
|
# the push notifications bouncer.
|
|
|
|
if not uses_notification_bouncer():
|
|
|
|
raise JsonableError(_("Token does not exist"))
|
2017-08-19 00:42:04 +02:00
|
|
|
|
|
|
|
# If we're sending things to the push notification bouncer
|
2018-02-20 06:03:12 +01:00
|
|
|
# unregister this user with them here
|
2017-08-19 00:42:04 +02:00
|
|
|
if uses_notification_bouncer():
|
|
|
|
# TODO: Make this a remove item
|
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
2023-12-25 23:10:35 +01:00
|
|
|
"realm_uuid": str(user_profile.realm.uuid),
|
2022-02-23 20:27:39 +01:00
|
|
|
# We don't know here if the token was registered with uuid
|
|
|
|
# or using the legacy id format, so we need to send both.
|
|
|
|
"user_uuid": str(user_profile.uuid),
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_id": user_profile.id,
|
|
|
|
"token": token_str,
|
|
|
|
"token_kind": kind,
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
2018-04-29 00:07:47 +02:00
|
|
|
# Calls zilencer.views.unregister_remote_push_device
|
2019-01-31 01:36:18 +01:00
|
|
|
send_to_push_bouncer("POST", "push/unregister", post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
def clear_push_device_tokens(user_profile_id: int) -> None:
|
|
|
|
# Deletes all of a user's PushDeviceTokens.
|
|
|
|
if uses_notification_bouncer():
|
2023-12-25 23:10:35 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
|
|
|
user_uuid = str(user_profile.uuid)
|
2019-11-19 03:12:54 +01:00
|
|
|
post_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"server_uuid": settings.ZULIP_ORG_ID,
|
2023-12-25 23:10:35 +01:00
|
|
|
"realm_uuid": str(user_profile.realm.uuid),
|
2022-02-23 20:27:39 +01:00
|
|
|
# We want to clear all registered token, and they may have
|
|
|
|
# been registered with either uuid or id.
|
|
|
|
"user_uuid": user_uuid,
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_id": user_profile_id,
|
2019-11-19 03:12:54 +01:00
|
|
|
}
|
|
|
|
send_to_push_bouncer("POST", "push/unregister/all", post_data)
|
|
|
|
return
|
|
|
|
|
|
|
|
PushDeviceToken.objects.filter(user_id=user_profile_id).delete()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Push notifications in general
|
|
|
|
#
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-11-23 22:10:26 +01:00
|
|
|
def push_notifications_configured() -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""True just if this server has configured a way to send push notifications."""
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
uses_notification_bouncer()
|
|
|
|
and settings.ZULIP_ORG_KEY is not None
|
|
|
|
and settings.ZULIP_ORG_ID is not None
|
|
|
|
): # nocoverage
|
2018-02-12 23:34:59 +01:00
|
|
|
# We have the needed configuration to send push notifications through
|
|
|
|
# the bouncer. Better yet would be to confirm that this config actually
|
|
|
|
# works -- e.g., that we have ever successfully sent to the bouncer --
|
|
|
|
# but this is a good start.
|
|
|
|
return True
|
2024-06-13 20:56:42 +02:00
|
|
|
if settings.DEVELOPMENT and (has_apns_credentials() or has_fcm_credentials()): # nocoverage
|
2020-02-29 01:00:44 +01:00
|
|
|
# Since much of the notifications logic is platform-specific, the mobile
|
|
|
|
# developers often work on just one platform at a time, so we should
|
|
|
|
# only require one to be configured.
|
|
|
|
return True
|
2024-06-13 20:56:42 +02:00
|
|
|
elif has_apns_credentials() and has_fcm_credentials(): # nocoverage
|
2024-06-13 21:04:40 +02:00
|
|
|
# We have the needed configuration to send through APNs and FCM directly
|
2018-02-12 23:34:59 +01:00
|
|
|
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-27 18:12:11 +01:00
|
|
|
def initialize_push_notifications() -> None:
|
2023-11-23 22:07:41 +01:00
|
|
|
"""Called during startup of the push notifications worker to check
|
|
|
|
whether we expect mobile push notifications to work on this server
|
|
|
|
and update state accordingly.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if sends_notifications_directly():
|
|
|
|
# This server sends push notifications directly. Make sure we
|
|
|
|
# are set to report to clients that push notifications are
|
|
|
|
# enabled.
|
|
|
|
for realm in Realm.objects.filter(push_notifications_enabled=False):
|
|
|
|
do_set_realm_property(realm, "push_notifications_enabled", True, acting_user=None)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(realm, None, acting_user=None)
|
|
|
|
return
|
|
|
|
|
2023-11-23 22:10:26 +01:00
|
|
|
if not push_notifications_configured():
|
2023-11-23 22:07:41 +01:00
|
|
|
for realm in Realm.objects.filter(push_notifications_enabled=True):
|
|
|
|
do_set_realm_property(realm, "push_notifications_enabled", False, acting_user=None)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(realm, None, acting_user=None)
|
|
|
|
if settings.DEVELOPMENT and not settings.TEST_SUITE:
|
2018-12-03 21:03:56 +01:00
|
|
|
# Avoid unnecessary spam on development environment startup
|
2023-11-23 22:07:41 +01:00
|
|
|
return # nocoverage
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.warning(
|
|
|
|
"Mobile push notifications are not configured.\n "
|
|
|
|
"See https://zulip.readthedocs.io/en/latest/"
|
|
|
|
"production/mobile-push-notifications.html"
|
|
|
|
)
|
2023-11-23 22:07:41 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if uses_notification_bouncer():
|
|
|
|
# If we're using the notification bouncer, check if we can
|
2023-12-09 13:29:59 +01:00
|
|
|
# actually send push notifications, and update our
|
|
|
|
# understanding of that state for each realm accordingly.
|
2023-12-11 14:24:13 +01:00
|
|
|
send_server_data_to_push_bouncer(consider_usage_statistics=False)
|
2023-11-23 22:07:41 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
logger.warning( # nocoverage
|
|
|
|
"Mobile push notifications are not fully configured.\n "
|
|
|
|
"See https://zulip.readthedocs.io/en/latest/production/mobile-push-notifications.html"
|
|
|
|
)
|
|
|
|
for realm in Realm.objects.filter(push_notifications_enabled=True): # nocoverage
|
|
|
|
do_set_realm_property(realm, "push_notifications_enabled", False, acting_user=None)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(realm, None, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-27 18:12:11 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_mobile_push_content(rendered_content: str) -> str:
|
2019-01-07 22:15:46 +01:00
|
|
|
def get_text(elem: lxml.html.HtmlElement) -> str:
|
2020-10-23 02:43:28 +02:00
|
|
|
# Convert default emojis to their Unicode equivalent.
|
2017-10-07 00:18:07 +02:00
|
|
|
classes = elem.get("class", "")
|
|
|
|
if "emoji" in classes:
|
2018-07-02 00:05:24 +02:00
|
|
|
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
|
2017-10-07 00:18:07 +02:00
|
|
|
if match:
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_code = match.group("emoji_code")
|
2023-08-18 21:31:54 +02:00
|
|
|
return hex_codepoint_to_emoji(emoji_code)
|
2017-10-07 00:18:07 +02:00
|
|
|
# Handles realm emojis, avatars etc.
|
|
|
|
if elem.tag == "img":
|
|
|
|
return elem.get("alt", "")
|
2021-02-12 08:20:45 +01:00
|
|
|
if elem.tag == "blockquote":
|
|
|
|
return "" # To avoid empty line before quote text
|
|
|
|
return elem.text or ""
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def format_as_quote(quote_text: str) -> str:
|
2020-06-10 07:28:15 +02:00
|
|
|
return "".join(
|
2023-12-05 18:45:07 +01:00
|
|
|
f"> {line}\n"
|
|
|
|
for line in quote_text.splitlines()
|
|
|
|
if line # Remove empty lines
|
2020-06-10 07:28:15 +02:00
|
|
|
)
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2019-08-11 07:41:34 +02:00
|
|
|
def render_olist(ol: lxml.html.HtmlElement) -> str:
|
|
|
|
items = []
|
2021-02-12 08:20:45 +01:00
|
|
|
counter = int(ol.get("start")) if ol.get("start") else 1
|
2023-09-12 23:19:57 +02:00
|
|
|
nested_levels = sum(1 for ancestor in ol.iterancestors("ol"))
|
2021-02-12 08:20:45 +01:00
|
|
|
indent = ("\n" + " " * nested_levels) if nested_levels else ""
|
2019-08-11 07:41:34 +02:00
|
|
|
|
|
|
|
for li in ol:
|
2021-02-12 08:20:45 +01:00
|
|
|
items.append(indent + str(counter) + ". " + process(li).strip())
|
2019-08-11 07:41:34 +02:00
|
|
|
counter += 1
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return "\n".join(items)
|
2019-08-11 07:41:34 +02:00
|
|
|
|
2020-07-14 22:53:19 +02:00
|
|
|
def render_spoiler(elem: lxml.html.HtmlElement) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
header = elem.find_class("spoiler-header")[0]
|
2020-07-14 22:53:19 +02:00
|
|
|
text = process(header).strip()
|
|
|
|
if len(text) == 0:
|
2020-07-16 06:33:11 +02:00
|
|
|
return "(…)\n"
|
|
|
|
return f"{text} (…)\n"
|
2020-07-14 22:53:19 +02:00
|
|
|
|
2019-01-07 22:15:46 +01:00
|
|
|
def process(elem: lxml.html.HtmlElement) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
plain_text = ""
|
|
|
|
if elem.tag == "ol":
|
2019-08-11 07:41:34 +02:00
|
|
|
plain_text = render_olist(elem)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif "spoiler-block" in elem.get("class", ""):
|
2020-07-14 22:53:19 +02:00
|
|
|
plain_text += render_spoiler(elem)
|
2019-08-11 07:41:34 +02:00
|
|
|
else:
|
|
|
|
plain_text = get_text(elem)
|
2021-02-12 08:20:45 +01:00
|
|
|
sub_text = ""
|
2019-08-11 07:41:34 +02:00
|
|
|
for child in elem:
|
|
|
|
sub_text += process(child)
|
2021-02-12 08:20:45 +01:00
|
|
|
if elem.tag == "blockquote":
|
2019-08-11 07:41:34 +02:00
|
|
|
sub_text = format_as_quote(sub_text)
|
|
|
|
plain_text += sub_text
|
|
|
|
plain_text += elem.tail or ""
|
2017-10-07 00:18:07 +02:00
|
|
|
return plain_text
|
|
|
|
|
2024-06-12 17:13:34 +02:00
|
|
|
def is_user_said_paragraph(element: lxml.html.HtmlElement) -> bool:
|
|
|
|
# The user said paragraph has these exact elements:
|
|
|
|
# 1. A user mention
|
|
|
|
# 2. A same server message link ("said")
|
|
|
|
# 3. A colon (:)
|
|
|
|
user_mention_elements = element.find_class("user-mention")
|
|
|
|
if len(user_mention_elements) != 1:
|
|
|
|
return False
|
|
|
|
|
|
|
|
message_link_elements = []
|
|
|
|
anchor_elements = element.cssselect("a[href]")
|
|
|
|
for elem in anchor_elements:
|
|
|
|
href = elem.get("href")
|
|
|
|
if is_same_server_message_link(href):
|
|
|
|
message_link_elements.append(elem)
|
|
|
|
|
|
|
|
if len(message_link_elements) != 1:
|
|
|
|
return False
|
|
|
|
|
|
|
|
remaining_text = (
|
|
|
|
element.text_content()
|
|
|
|
.replace(user_mention_elements[0].text_content(), "")
|
|
|
|
.replace(message_link_elements[0].text_content(), "")
|
|
|
|
)
|
|
|
|
return remaining_text.strip() == ":"
|
|
|
|
|
|
|
|
def get_collapsible_status_array(elements: List[lxml.html.HtmlElement]) -> List[bool]:
|
|
|
|
collapsible_status: List[bool] = [
|
|
|
|
element.tag == "blockquote" or is_user_said_paragraph(element) for element in elements
|
|
|
|
]
|
|
|
|
return collapsible_status
|
|
|
|
|
|
|
|
def potentially_collapse_quotes(element: lxml.html.HtmlElement) -> None:
|
|
|
|
children = element.getchildren()
|
|
|
|
collapsible_status = get_collapsible_status_array(children)
|
|
|
|
|
|
|
|
if all(collapsible_status) or all(not x for x in collapsible_status):
|
|
|
|
return
|
|
|
|
|
|
|
|
collapse_element = lxml.html.Element("p")
|
|
|
|
collapse_element.text = "[…]"
|
|
|
|
for index, child in enumerate(children):
|
|
|
|
if collapsible_status[index]:
|
|
|
|
if index > 0 and collapsible_status[index - 1]:
|
|
|
|
child.drop_tree()
|
|
|
|
else:
|
|
|
|
child.getparent().replace(child, collapse_element)
|
|
|
|
|
2017-10-10 11:14:10 +02:00
|
|
|
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
|
2024-03-22 17:37:51 +01:00
|
|
|
return _("New message")
|
2019-01-07 22:15:21 +01:00
|
|
|
|
2021-09-15 00:10:28 +02:00
|
|
|
elem = lxml.html.fragment_fromstring(rendered_content, create_parent=True)
|
2023-11-27 06:23:24 +01:00
|
|
|
change_katex_to_raw_latex(elem)
|
2024-06-12 17:13:34 +02:00
|
|
|
potentially_collapse_quotes(elem)
|
2019-01-07 22:15:21 +01:00
|
|
|
plain_text = process(elem)
|
|
|
|
return plain_text
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def truncate_content(content: str) -> Tuple[str, bool]:
|
2020-10-23 02:43:28 +02:00
|
|
|
# We use Unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
|
2017-10-07 00:12:52 +02:00
|
|
|
# of three dots as this saves two extra characters for textual
|
2020-10-23 02:43:28 +02:00
|
|
|
# content. This function will need to be updated to handle Unicode
|
2017-10-07 00:12:52 +02:00
|
|
|
# combining characters and tags when we start supporting themself.
|
|
|
|
if len(content) <= 200:
|
2017-12-16 03:08:23 +01:00
|
|
|
return content, False
|
|
|
|
return content[:200] + "…", True
|
2017-10-07 00:12:52 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-04-20 17:21:26 +02:00
|
|
|
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Common fields for all notification payloads."""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {}
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# These will let the app support logging into multiple realms and servers.
|
2021-02-12 08:20:45 +01:00
|
|
|
data["server"] = settings.EXTERNAL_HOST
|
|
|
|
data["realm_id"] = user_profile.realm.id
|
2024-05-06 15:27:22 +02:00
|
|
|
data["realm_uri"] = user_profile.realm.url
|
2024-05-07 06:08:52 +02:00
|
|
|
data["realm_url"] = user_profile.realm.url
|
2023-12-07 19:03:21 +01:00
|
|
|
data["realm_name"] = user_profile.realm.name
|
2021-02-12 08:20:45 +01:00
|
|
|
data["user_id"] = user_profile.id
|
2019-02-14 00:49:53 +01:00
|
|
|
|
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-08 14:45:05 +02:00
|
|
|
def get_message_payload(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2023-11-06 10:41:08 +01:00
|
|
|
can_access_sender: bool = True,
|
2021-07-08 14:45:05 +02:00
|
|
|
) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""Common fields for `message` payloads, for all platforms."""
|
2019-04-20 17:21:26 +02:00
|
|
|
data = get_base_payload(user_profile)
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# `sender_id` is preferred, but some existing versions use `sender_email`.
|
2021-02-12 08:20:45 +01:00
|
|
|
data["sender_id"] = message.sender.id
|
2023-11-06 10:41:08 +01:00
|
|
|
if not can_access_sender:
|
|
|
|
# A guest user can only receive a stream message from an
|
|
|
|
# inaccessible user as we allow unsubscribed users to send
|
|
|
|
# messages to streams. For direct messages, the guest gains
|
|
|
|
# access to the user if they where previously inaccessible.
|
|
|
|
data["sender_email"] = Address(
|
|
|
|
username=f"user{message.sender.id}", domain=get_fake_email_domain(message.realm.host)
|
|
|
|
).addr_spec
|
|
|
|
else:
|
|
|
|
data["sender_email"] = message.sender.email
|
|
|
|
|
2023-09-29 20:04:09 +02:00
|
|
|
data["time"] = datetime_to_timestamp(message.date_sent)
|
2021-07-08 14:45:05 +02:00
|
|
|
if mentioned_user_group_id is not None:
|
|
|
|
assert mentioned_user_group_name is not None
|
|
|
|
data["mentioned_user_group_id"] = mentioned_user_group_id
|
|
|
|
data["mentioned_user_group_name"] = mentioned_user_group_name
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2018-02-16 23:18:47 +01:00
|
|
|
if message.recipient.type == Recipient.STREAM:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "stream"
|
2023-07-16 13:59:49 +02:00
|
|
|
data["stream"] = get_message_stream_name_from_database(message)
|
2022-01-29 22:59:06 +01:00
|
|
|
data["stream_id"] = message.recipient.type_id
|
2021-02-12 08:20:45 +01:00
|
|
|
data["topic"] = message.topic_name()
|
2024-03-22 00:39:33 +01:00
|
|
|
elif message.recipient.type == Recipient.DIRECT_MESSAGE_GROUP:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "private"
|
|
|
|
data["pm_users"] = huddle_users(message.recipient.id)
|
2018-02-16 23:18:47 +01:00
|
|
|
else: # Recipient.PERSONAL
|
2021-02-12 08:20:45 +01:00
|
|
|
data["recipient_type"] = "private"
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2017-12-16 02:52:42 +01:00
|
|
|
return data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-04 23:31:04 +02:00
|
|
|
def get_apns_alert_title(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
On an iOS notification, this is the first bolded line.
|
|
|
|
"""
|
2024-03-22 00:39:33 +01:00
|
|
|
if message.recipient.type == Recipient.DIRECT_MESSAGE_GROUP:
|
2019-08-10 00:30:33 +02:00
|
|
|
recipients = get_display_recipient(message.recipient)
|
|
|
|
assert isinstance(recipients, list)
|
2021-02-12 08:20:45 +01:00
|
|
|
return ", ".join(sorted(r["full_name"] for r in recipients))
|
2018-10-04 23:31:04 +02:00
|
|
|
elif message.is_stream_message():
|
2023-07-16 13:59:49 +02:00
|
|
|
stream_name = get_message_stream_name_from_database(message)
|
|
|
|
return f"#{stream_name} > {message.topic_name()}"
|
2023-06-19 16:42:11 +02:00
|
|
|
# For 1:1 direct messages, we just show the sender name.
|
2018-10-04 23:31:04 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-20 10:01:15 +02:00
|
|
|
def get_apns_alert_subtitle(
|
2021-09-03 16:49:27 +02:00
|
|
|
message: Message,
|
|
|
|
trigger: str,
|
2023-11-06 10:41:08 +01:00
|
|
|
user_profile: UserProfile,
|
2021-09-03 16:49:27 +02:00
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2023-11-06 10:41:08 +01:00
|
|
|
can_access_sender: bool = True,
|
2020-05-20 10:01:15 +02:00
|
|
|
) -> str:
|
2018-10-04 23:31:04 +02:00
|
|
|
"""
|
|
|
|
On an iOS notification, this is the second bolded line.
|
|
|
|
"""
|
2023-11-06 10:41:08 +01:00
|
|
|
sender_name = message.sender.full_name
|
|
|
|
if not can_access_sender:
|
|
|
|
# A guest user can only receive a stream message from an
|
|
|
|
# inaccessible user as we allow unsubscribed users to send
|
|
|
|
# messages to streams. For direct messages, the guest gains
|
|
|
|
# access to the user if they where previously inaccessible.
|
|
|
|
sender_name = str(UserProfile.INACCESSIBLE_USER_NAME)
|
|
|
|
|
2021-09-03 16:49:27 +02:00
|
|
|
if trigger == NotificationTriggers.MENTION:
|
2021-07-08 12:59:15 +02:00
|
|
|
if mentioned_user_group_name is not None:
|
2020-05-20 10:01:15 +02:00
|
|
|
return _("{full_name} mentioned @{user_group_name}:").format(
|
2023-11-06 10:41:08 +01:00
|
|
|
full_name=sender_name, user_group_name=mentioned_user_group_name
|
2020-05-20 10:01:15 +02:00
|
|
|
)
|
|
|
|
else:
|
2023-11-06 10:41:08 +01:00
|
|
|
return _("{full_name} mentioned you:").format(full_name=sender_name)
|
2023-08-29 12:24:10 +02:00
|
|
|
elif trigger in (
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION,
|
|
|
|
NotificationTriggers.STREAM_WILDCARD_MENTION,
|
|
|
|
):
|
2023-11-06 10:41:08 +01:00
|
|
|
return _("{full_name} mentioned everyone:").format(full_name=sender_name)
|
2018-10-04 23:31:04 +02:00
|
|
|
elif message.recipient.type == Recipient.PERSONAL:
|
|
|
|
return ""
|
2023-06-19 16:42:11 +02:00
|
|
|
# For group direct messages, or regular messages to a stream,
|
|
|
|
# just use a colon to indicate this is the sender.
|
2023-11-06 10:41:08 +01:00
|
|
|
return sender_name + ":"
|
2018-10-04 23:31:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_apns_badge_count(
|
|
|
|
user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []
|
|
|
|
) -> int:
|
2020-07-16 07:05:02 +02:00
|
|
|
# NOTE: We have temporarily set get_apns_badge_count to always
|
|
|
|
# return 0 until we can debug a likely mobile app side issue with
|
|
|
|
# handling notifications while the app is open.
|
|
|
|
return 0
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_apns_badge_count_future(
|
|
|
|
user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]] = []
|
|
|
|
) -> int:
|
2020-07-16 07:05:02 +02:00
|
|
|
# Future implementation of get_apns_badge_count; unused but
|
|
|
|
# we expect to use this once we resolve client-side bugs.
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile)
|
|
|
|
.extra(where=[UserMessage.where_active_push_notification()])
|
|
|
|
.exclude(
|
|
|
|
# If we've just marked some messages as read, they're still
|
|
|
|
# marked as having active notifications; we'll clear that flag
|
|
|
|
# only after we've sent that update to the devices. So we need
|
|
|
|
# to exclude them explicitly from the count.
|
|
|
|
message_id__in=read_messages_ids
|
|
|
|
)
|
|
|
|
.count()
|
|
|
|
)
|
|
|
|
|
2020-06-02 18:09:26 +02:00
|
|
|
|
2020-05-20 10:01:15 +02:00
|
|
|
def get_message_payload_apns(
|
2021-07-08 14:45:05 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
2021-09-03 16:49:27 +02:00
|
|
|
trigger: str,
|
2021-07-08 14:45:05 +02:00
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2023-11-06 10:41:08 +01:00
|
|
|
can_access_sender: bool = True,
|
2020-05-20 10:01:15 +02:00
|
|
|
) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
"""A `message` payload for iOS, via APNs."""
|
2021-07-08 14:45:05 +02:00
|
|
|
zulip_data = get_message_payload(
|
2023-11-06 10:41:08 +01:00
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name, can_access_sender
|
2021-07-08 14:45:05 +02:00
|
|
|
)
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_data.update(
|
|
|
|
message_ids=[message.id],
|
|
|
|
)
|
2017-12-16 02:52:42 +01:00
|
|
|
|
2020-07-05 02:42:53 +02:00
|
|
|
assert message.rendered_content is not None
|
2021-06-25 20:52:22 +02:00
|
|
|
with override_language(user_profile.default_language):
|
|
|
|
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
|
|
|
|
apns_data = {
|
|
|
|
"alert": {
|
|
|
|
"title": get_apns_alert_title(message),
|
2023-11-06 10:41:08 +01:00
|
|
|
"subtitle": get_apns_alert_subtitle(
|
|
|
|
message, trigger, user_profile, mentioned_user_group_name, can_access_sender
|
|
|
|
),
|
2021-06-25 20:52:22 +02:00
|
|
|
"body": content,
|
|
|
|
},
|
|
|
|
"sound": "default",
|
|
|
|
"badge": get_apns_badge_count(user_profile),
|
|
|
|
"custom": {"zulip": zulip_data},
|
|
|
|
}
|
2017-12-12 05:40:11 +01:00
|
|
|
return apns_data
|
2017-05-08 13:21:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_message_payload_gcm(
|
2021-07-08 14:45:05 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
mentioned_user_group_id: Optional[int] = None,
|
|
|
|
mentioned_user_group_name: Optional[str] = None,
|
2023-11-06 10:41:08 +01:00
|
|
|
can_access_sender: bool = True,
|
2019-02-14 01:02:39 +01:00
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
2024-06-13 21:04:40 +02:00
|
|
|
"""A `message` payload + options, for Android via FCM."""
|
2021-07-08 14:45:05 +02:00
|
|
|
data = get_message_payload(
|
2023-11-06 10:41:08 +01:00
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name, can_access_sender
|
2021-07-08 14:45:05 +02:00
|
|
|
)
|
2023-11-06 10:41:08 +01:00
|
|
|
|
|
|
|
if not can_access_sender:
|
|
|
|
# A guest user can only receive a stream message from an
|
|
|
|
# inaccessible user as we allow unsubscribed users to send
|
|
|
|
# messages to streams. For direct messages, the guest gains
|
|
|
|
# access to the user if they where previously inaccessible.
|
|
|
|
sender_avatar_url = get_avatar_for_inaccessible_user()
|
|
|
|
sender_name = str(UserProfile.INACCESSIBLE_USER_NAME)
|
|
|
|
else:
|
|
|
|
sender_avatar_url = absolute_avatar_url(message.sender)
|
|
|
|
sender_name = message.sender.full_name
|
|
|
|
|
2020-07-05 02:42:53 +02:00
|
|
|
assert message.rendered_content is not None
|
2021-06-25 20:52:22 +02:00
|
|
|
with override_language(user_profile.default_language):
|
|
|
|
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
|
|
|
|
data.update(
|
|
|
|
event="message",
|
|
|
|
zulip_message_id=message.id, # message_id is reserved for CCS
|
|
|
|
content=content,
|
|
|
|
content_truncated=truncated,
|
2023-11-06 10:41:08 +01:00
|
|
|
sender_full_name=sender_name,
|
|
|
|
sender_avatar_url=sender_avatar_url,
|
2021-06-25 20:52:22 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm_options = {"priority": "high"}
|
2019-02-14 01:02:39 +01:00
|
|
|
return data, gcm_options
|
2017-05-08 13:26:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_remove_payload_gcm(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile,
|
|
|
|
message_ids: List[int],
|
2019-02-14 01:02:39 +01:00
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
2024-06-13 21:04:40 +02:00
|
|
|
"""A `remove` payload + options, for Android via FCM."""
|
2019-04-20 17:21:26 +02:00
|
|
|
gcm_payload = get_base_payload(user_profile)
|
2020-09-03 05:32:15 +02:00
|
|
|
gcm_payload.update(
|
2021-02-12 08:20:45 +01:00
|
|
|
event="remove",
|
|
|
|
zulip_message_ids=",".join(str(id) for id in message_ids),
|
2019-02-14 01:08:51 +01:00
|
|
|
# Older clients (all clients older than 2019-02-13) look only at
|
|
|
|
# `zulip_message_id` and ignore `zulip_message_ids`. Do our best.
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_message_id=message_ids[0],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm_options = {"priority": "normal"}
|
2019-02-14 01:02:39 +01:00
|
|
|
return gcm_payload, gcm_options
|
2019-02-14 00:54:56 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-02 18:09:26 +02:00
|
|
|
def get_remove_payload_apns(user_profile: UserProfile, message_ids: List[int]) -> Dict[str, Any]:
|
|
|
|
zulip_data = get_base_payload(user_profile)
|
2020-09-03 05:32:15 +02:00
|
|
|
zulip_data.update(
|
2021-02-12 08:20:45 +01:00
|
|
|
event="remove",
|
|
|
|
zulip_message_ids=",".join(str(id) for id in message_ids),
|
2020-09-03 05:32:15 +02:00
|
|
|
)
|
2020-06-02 18:09:26 +02:00
|
|
|
apns_data = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"badge": get_apns_badge_count(user_profile, message_ids),
|
|
|
|
"custom": {"zulip": zulip_data},
|
2020-06-02 18:09:26 +02:00
|
|
|
}
|
|
|
|
return apns_data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-14 01:08:51 +01:00
|
|
|
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
|
2020-06-23 08:35:33 +02:00
|
|
|
"""This should be called when a message that previously had a
|
|
|
|
mobile push notification executed is read. This triggers a push to the
|
|
|
|
mobile app, when the message is read on the server, to remove the
|
2018-07-28 14:31:45 +02:00
|
|
|
message from the notification.
|
|
|
|
"""
|
2023-11-23 22:10:26 +01:00
|
|
|
if not push_notifications_configured():
|
2022-03-09 23:40:34 +01:00
|
|
|
return
|
|
|
|
|
2018-07-28 14:31:45 +02:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2022-03-09 23:30:38 +01:00
|
|
|
|
|
|
|
# We may no longer have access to the message here; for example,
|
|
|
|
# the user (1) got a message, (2) read the message in the web UI,
|
|
|
|
# and then (3) it was deleted. When trying to send the push
|
|
|
|
# notification for (2), after (3) has happened, there is no
|
|
|
|
# message to fetch -- but we nonetheless want to remove the mobile
|
2023-06-17 00:10:28 +02:00
|
|
|
# notification. Because of this, verification of access to
|
|
|
|
# the messages is skipped here.
|
2022-03-09 23:30:38 +01:00
|
|
|
# Because of this, no access to the Message objects should be
|
|
|
|
# done; they are treated as a list of opaque ints.
|
2021-11-02 23:16:10 +01:00
|
|
|
|
|
|
|
# APNs has a 4KB limit on the maximum size of messages, which
|
|
|
|
# translated to several hundred message IDs in one of these
|
|
|
|
# notifications. In rare cases, it's possible for someone to mark
|
|
|
|
# thousands of push notification eligible messages as read at
|
|
|
|
# once. We could handle this situation with a loop, but we choose
|
|
|
|
# to truncate instead to avoid extra network traffic, because it's
|
|
|
|
# very likely the user has manually cleared the notifications in
|
|
|
|
# their mobile device's UI anyway.
|
|
|
|
#
|
|
|
|
# When truncating, we keep only the newest N messages in this
|
|
|
|
# remove event. This is optimal because older messages are the
|
|
|
|
# ones most likely to have already been manually cleared at some
|
|
|
|
# point in the past.
|
|
|
|
#
|
|
|
|
# We choose 200 here because a 10-digit message ID plus a comma and
|
|
|
|
# space consume 12 bytes, and 12 x 200 = 2400 bytes is still well
|
|
|
|
# below the 4KB limit (leaving plenty of space for metadata).
|
|
|
|
MAX_APNS_MESSAGE_IDS = 200
|
2022-10-30 00:32:32 +02:00
|
|
|
truncated_message_ids = sorted(message_ids)[-MAX_APNS_MESSAGE_IDS:]
|
2021-11-02 23:16:10 +01:00
|
|
|
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, truncated_message_ids)
|
|
|
|
apns_payload = get_remove_payload_apns(user_profile, truncated_message_ids)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
android_devices = list(
|
2024-06-13 20:53:09 +02:00
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.FCM).order_by("id")
|
2023-12-12 00:06:37 +01:00
|
|
|
)
|
|
|
|
apple_devices = list(
|
2023-12-12 01:32:43 +01:00
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS).order_by("id")
|
2023-12-12 00:06:37 +01:00
|
|
|
)
|
2018-07-28 14:31:45 +02:00
|
|
|
if uses_notification_bouncer():
|
2023-12-12 00:06:37 +01:00
|
|
|
send_notifications_to_bouncer(
|
|
|
|
user_profile, apns_payload, gcm_payload, gcm_options, android_devices, apple_devices
|
|
|
|
)
|
2019-02-14 01:23:55 +01:00
|
|
|
else:
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_profile_id)
|
2023-11-07 15:12:37 +01:00
|
|
|
|
|
|
|
android_successfully_sent_count = send_android_push_notification(
|
|
|
|
user_identity, android_devices, gcm_payload, gcm_options
|
|
|
|
)
|
|
|
|
apple_successfully_sent_count = send_apple_push_notification(
|
|
|
|
user_identity, apple_devices, apns_payload
|
|
|
|
)
|
|
|
|
|
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["mobile_pushes_sent::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=android_successfully_sent_count + apple_successfully_sent_count,
|
|
|
|
)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
2021-11-02 23:16:10 +01:00
|
|
|
# We intentionally use the non-truncated message_ids here. We are
|
|
|
|
# assuming in this very rare case that the user has manually
|
|
|
|
# dismissed these notifications on the device side, and the server
|
|
|
|
# should no longer track them as outstanding notifications.
|
2022-08-14 12:02:05 +02:00
|
|
|
with transaction.atomic(savepoint=False):
|
|
|
|
UserMessage.select_for_update_query().filter(
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id__in=message_ids,
|
|
|
|
).update(flags=F("flags").bitand(~UserMessage.flags.active_mobile_push_notification))
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
|
2017-08-18 09:04:52 +02:00
|
|
|
"""
|
|
|
|
missed_message is the event received by the
|
2024-04-16 20:49:37 +02:00
|
|
|
zerver.worker.missedmessage_mobile_notifications.PushNotificationWorker.consume function.
|
2017-08-18 09:04:52 +02:00
|
|
|
"""
|
2023-11-23 22:10:26 +01:00
|
|
|
if not push_notifications_configured():
|
2018-12-11 07:05:40 +01:00
|
|
|
return
|
2017-11-10 00:29:52 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2021-12-19 12:04:36 +01:00
|
|
|
if user_profile.is_bot: # nocoverage
|
|
|
|
# We don't expect to reach here for bot users. However, this code exists
|
|
|
|
# to find and throw away any pre-existing events in the queue while
|
|
|
|
# upgrading from versions before our notifiability logic was implemented.
|
|
|
|
# TODO/compatibility: This block can be removed when one can no longer
|
|
|
|
# upgrade from versions <= 4.0 to versions >= 5.0
|
|
|
|
logger.warning(
|
|
|
|
"Send-push-notification event found for bot user %s. Skipping.", user_profile_id
|
|
|
|
)
|
|
|
|
return
|
2021-09-02 02:49:20 +02:00
|
|
|
|
|
|
|
if not (
|
|
|
|
user_profile.enable_offline_push_notifications
|
|
|
|
or user_profile.enable_online_push_notifications
|
|
|
|
):
|
|
|
|
# BUG: Investigate why it's possible to get here.
|
|
|
|
return # nocoverage
|
|
|
|
|
2023-05-18 17:51:21 +02:00
|
|
|
with transaction.atomic(savepoint=False):
|
|
|
|
try:
|
2024-04-11 22:48:10 +02:00
|
|
|
(message, user_message) = access_message_and_usermessage(
|
|
|
|
user_profile, missed_message["message_id"], lock_message=True
|
2023-05-18 17:51:21 +02:00
|
|
|
)
|
|
|
|
except JsonableError:
|
|
|
|
if ArchivedMessage.objects.filter(id=missed_message["message_id"]).exists():
|
|
|
|
# If the cause is a race with the message being deleted,
|
|
|
|
# that's normal and we have no need to log an error.
|
|
|
|
return
|
|
|
|
logging.info(
|
|
|
|
"Unexpected message access failure handling push notifications: %s %s",
|
|
|
|
user_profile.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
missed_message["message_id"],
|
2020-05-02 08:44:14 +02:00
|
|
|
)
|
2017-11-10 00:51:06 +01:00
|
|
|
return
|
2017-09-10 00:47:36 +02:00
|
|
|
|
2023-05-18 17:51:21 +02:00
|
|
|
if user_message is not None:
|
|
|
|
# If the user has read the message already, don't push-notify.
|
|
|
|
if user_message.flags.read or user_message.flags.active_mobile_push_notification:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Otherwise, we mark the message as having an active mobile
|
|
|
|
# push notification, so that we can send revocation messages
|
|
|
|
# later.
|
|
|
|
user_message.flags.active_mobile_push_notification = True
|
|
|
|
user_message.save(update_fields=["flags"])
|
|
|
|
else:
|
|
|
|
# Users should only be getting push notifications into this
|
|
|
|
# queue for messages they haven't received if they're
|
|
|
|
# long-term idle; anything else is likely a bug.
|
|
|
|
if not user_profile.long_term_idle:
|
|
|
|
logger.error(
|
|
|
|
"Could not find UserMessage with message_id %s and user_id %s",
|
|
|
|
missed_message["message_id"],
|
|
|
|
user_profile_id,
|
|
|
|
exc_info=True,
|
|
|
|
)
|
|
|
|
return
|
|
|
|
|
2021-09-03 16:49:27 +02:00
|
|
|
trigger = missed_message["trigger"]
|
2023-08-08 19:29:33 +02:00
|
|
|
|
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `wildcard_mentioned` to `stream_wildcard_mentioned`.
|
|
|
|
# Remove this when one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "wildcard_mentioned":
|
|
|
|
trigger = NotificationTriggers.STREAM_WILDCARD_MENTION # nocoverage
|
|
|
|
|
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `followed_topic_wildcard_mentioned` to `stream_wildcard_mentioned_in_followed_topic`.
|
|
|
|
# Remove this when one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "followed_topic_wildcard_mentioned":
|
|
|
|
trigger = NotificationTriggers.STREAM_WILDCARD_MENTION_IN_FOLLOWED_TOPIC # nocoverage
|
|
|
|
|
2023-08-04 19:54:41 +02:00
|
|
|
# TODO/compatibility: Translation code for the rename of
|
|
|
|
# `private_message` to `direct_message`. Remove this when
|
|
|
|
# one can no longer directly upgrade from 7.x to main.
|
|
|
|
if trigger == "private_message":
|
|
|
|
trigger = NotificationTriggers.DIRECT_MESSAGE # nocoverage
|
|
|
|
|
2024-04-22 12:44:22 +02:00
|
|
|
# mentioned_user_group will be None if the user is personally mentioned
|
2022-04-15 22:07:22 +02:00
|
|
|
# regardless whether they are a member of the mentioned user group in the
|
|
|
|
# message or not.
|
2024-04-22 12:44:22 +02:00
|
|
|
mentioned_user_group_id = None
|
|
|
|
mentioned_user_group_name = None
|
2023-12-08 20:53:31 +01:00
|
|
|
mentioned_user_group_members_count = None
|
2024-04-22 12:44:22 +02:00
|
|
|
mentioned_user_group = get_mentioned_user_group([missed_message], user_profile)
|
|
|
|
if mentioned_user_group is not None:
|
|
|
|
mentioned_user_group_id = mentioned_user_group.id
|
|
|
|
mentioned_user_group_name = mentioned_user_group.name
|
2023-12-08 20:53:31 +01:00
|
|
|
mentioned_user_group_members_count = mentioned_user_group.members_count
|
2021-07-08 12:59:15 +02:00
|
|
|
|
2022-04-15 22:07:22 +02:00
|
|
|
# Soft reactivate if pushing to a long_term_idle user that is personally mentioned
|
2023-12-08 20:53:31 +01:00
|
|
|
soft_reactivate_if_personal_notification(
|
|
|
|
user_profile, {trigger}, mentioned_user_group_members_count
|
|
|
|
)
|
2022-04-15 22:07:22 +02:00
|
|
|
|
2023-11-06 10:41:08 +01:00
|
|
|
if message.is_stream_message():
|
|
|
|
# This will almost always be True. The corner case where you
|
|
|
|
# can be receiving a message from a user you cannot access
|
|
|
|
# involves your being a guest user whose access is restricted
|
|
|
|
# by a can_access_all_users_group policy, and you can't access
|
|
|
|
# the sender because they are sending a message to a public
|
|
|
|
# stream that you are subscribed to but they are not.
|
|
|
|
|
|
|
|
can_access_sender = check_can_access_user(message.sender, user_profile)
|
|
|
|
else:
|
|
|
|
# For private messages, the recipient will gain access
|
|
|
|
# to the sender if they did not had access previously.
|
|
|
|
can_access_sender = True
|
|
|
|
|
2021-07-08 14:45:05 +02:00
|
|
|
apns_payload = get_message_payload_apns(
|
2023-11-06 10:41:08 +01:00
|
|
|
user_profile,
|
|
|
|
message,
|
|
|
|
trigger,
|
|
|
|
mentioned_user_group_id,
|
|
|
|
mentioned_user_group_name,
|
|
|
|
can_access_sender,
|
2021-07-08 14:45:05 +02:00
|
|
|
)
|
2021-07-08 14:15:42 +02:00
|
|
|
gcm_payload, gcm_options = get_message_payload_gcm(
|
2023-11-06 10:41:08 +01:00
|
|
|
user_profile, message, mentioned_user_group_id, mentioned_user_group_name, can_access_sender
|
2021-07-08 14:15:42 +02:00
|
|
|
)
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Sending push notifications to mobile clients for user %s", user_profile_id)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
android_devices = list(
|
2024-06-13 20:53:09 +02:00
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.FCM).order_by("id")
|
2023-12-12 00:06:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
apple_devices = list(
|
2023-12-12 01:32:43 +01:00
|
|
|
PushDeviceToken.objects.filter(user=user_profile, kind=PushDeviceToken.APNS).order_by("id")
|
2023-12-12 00:06:37 +01:00
|
|
|
)
|
2017-11-10 00:29:52 +01:00
|
|
|
if uses_notification_bouncer():
|
2023-12-14 21:37:13 +01:00
|
|
|
send_notifications_to_bouncer(
|
2023-12-12 00:06:37 +01:00
|
|
|
user_profile, apns_payload, gcm_payload, gcm_options, android_devices, apple_devices
|
2021-09-28 14:17:16 +02:00
|
|
|
)
|
2019-12-03 20:19:38 +01:00
|
|
|
return
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
logger.info(
|
2021-10-20 01:16:18 +02:00
|
|
|
"Sending mobile push notifications for local user %s: %s via FCM devices, %s via APNs devices",
|
2021-09-28 14:17:16 +02:00
|
|
|
user_profile_id,
|
|
|
|
len(android_devices),
|
|
|
|
len(apple_devices),
|
|
|
|
)
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_profile.id)
|
2023-11-07 15:12:37 +01:00
|
|
|
|
|
|
|
apple_successfully_sent_count = send_apple_push_notification(
|
|
|
|
user_identity, apple_devices, apns_payload
|
|
|
|
)
|
|
|
|
android_successfully_sent_count = send_android_push_notification(
|
|
|
|
user_identity, android_devices, gcm_payload, gcm_options
|
|
|
|
)
|
|
|
|
|
|
|
|
do_increment_logging_stat(
|
|
|
|
user_profile.realm,
|
|
|
|
COUNT_STATS["mobile_pushes_sent::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=apple_successfully_sent_count + android_successfully_sent_count,
|
|
|
|
)
|
2023-10-05 13:53:09 +02:00
|
|
|
|
|
|
|
|
|
|
|
def send_test_push_notification_directly_to_devices(
|
|
|
|
user_identity: UserPushIdentityCompat,
|
|
|
|
devices: Sequence[DeviceToken],
|
|
|
|
base_payload: Dict[str, Any],
|
|
|
|
remote: Optional["RemoteZulipServer"] = None,
|
|
|
|
) -> None:
|
|
|
|
payload = copy.deepcopy(base_payload)
|
2023-12-07 19:03:21 +01:00
|
|
|
payload["event"] = "test"
|
2023-10-05 13:53:09 +02:00
|
|
|
|
|
|
|
apple_devices = [device for device in devices if device.kind == PushDeviceToken.APNS]
|
2024-06-13 20:53:09 +02:00
|
|
|
android_devices = [device for device in devices if device.kind == PushDeviceToken.FCM]
|
2023-10-05 13:53:09 +02:00
|
|
|
# Let's make the payloads separate objects to make sure mutating to make e.g. Android
|
|
|
|
# adjustments doesn't affect the Apple payload and vice versa.
|
|
|
|
apple_payload = copy.deepcopy(payload)
|
|
|
|
android_payload = copy.deepcopy(payload)
|
|
|
|
|
2024-06-05 07:26:05 +02:00
|
|
|
# TODO/compatibility: Backwards-compatibility name for realm_url.
|
|
|
|
realm_url = base_payload.get("realm_url", base_payload["realm_uri"])
|
2023-12-07 19:03:21 +01:00
|
|
|
realm_name = base_payload["realm_name"]
|
2023-10-05 13:53:09 +02:00
|
|
|
apns_data = {
|
|
|
|
"alert": {
|
|
|
|
"title": _("Test notification"),
|
2024-06-03 10:47:42 +02:00
|
|
|
"body": _("This is a test notification from {realm_name} ({realm_url}).").format(
|
|
|
|
realm_name=realm_name, realm_url=realm_url
|
2023-12-07 19:03:21 +01:00
|
|
|
),
|
2023-10-05 13:53:09 +02:00
|
|
|
},
|
|
|
|
"sound": "default",
|
|
|
|
"custom": {"zulip": apple_payload},
|
|
|
|
}
|
|
|
|
send_apple_push_notification(user_identity, apple_devices, apns_data, remote=remote)
|
|
|
|
|
|
|
|
android_payload["time"] = datetime_to_timestamp(timezone_now())
|
|
|
|
gcm_options = {"priority": "high"}
|
|
|
|
send_android_push_notification(
|
|
|
|
user_identity, android_devices, android_payload, gcm_options, remote=remote
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def send_test_push_notification(user_profile: UserProfile, devices: List[PushDeviceToken]) -> None:
|
|
|
|
base_payload = get_base_payload(user_profile)
|
|
|
|
if uses_notification_bouncer():
|
|
|
|
for device in devices:
|
|
|
|
post_data = {
|
2023-12-25 23:10:35 +01:00
|
|
|
"realm_uuid": str(user_profile.realm.uuid),
|
2023-10-05 13:53:09 +02:00
|
|
|
"user_uuid": str(user_profile.uuid),
|
|
|
|
"user_id": user_profile.id,
|
|
|
|
"token": device.token,
|
|
|
|
"token_kind": device.kind,
|
|
|
|
"base_payload": base_payload,
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.info("Sending test push notification to bouncer: %r", post_data)
|
|
|
|
send_json_to_push_bouncer("POST", "push/test_notification", post_data)
|
|
|
|
|
|
|
|
return
|
|
|
|
|
|
|
|
# This server doesn't need the bouncer, so we send directly to the device.
|
|
|
|
user_identity = UserPushIdentityCompat(
|
|
|
|
user_id=user_profile.id, user_uuid=str(user_profile.uuid)
|
|
|
|
)
|
|
|
|
send_test_push_notification_directly_to_devices(
|
|
|
|
user_identity, devices, base_payload, remote=None
|
|
|
|
)
|
2023-10-08 00:43:41 +02:00
|
|
|
|
|
|
|
|
|
|
|
class InvalidPushDeviceTokenError(JsonableError):
|
|
|
|
code = ErrorCode.INVALID_PUSH_DEVICE_TOKEN
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
@override
|
|
|
|
def msg_format() -> str:
|
|
|
|
return _("Device not recognized")
|
|
|
|
|
|
|
|
|
|
|
|
class InvalidRemotePushDeviceTokenError(JsonableError):
|
|
|
|
code = ErrorCode.INVALID_REMOTE_PUSH_DEVICE_TOKEN
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
@override
|
|
|
|
def msg_format() -> str:
|
|
|
|
return _("Device not recognized by the push bouncer")
|
2024-02-07 00:38:35 +01:00
|
|
|
|
|
|
|
|
|
|
|
class PushNotificationsDisallowedByBouncerError(Exception):
|
|
|
|
def __init__(self, reason: str) -> None:
|
|
|
|
self.reason = reason
|