2017-08-19 01:12:40 +02:00
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
import logging
|
2019-01-07 22:15:46 +01:00
|
|
|
import lxml.html
|
2017-10-07 00:18:07 +02:00
|
|
|
import re
|
2017-08-19 01:12:40 +02:00
|
|
|
import time
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
2018-10-11 00:53:13 +02:00
|
|
|
from django.db import IntegrityError, transaction
|
2019-02-14 02:01:54 +01:00
|
|
|
from django.db.models import F
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-03-06 03:57:31 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2019-02-13 02:56:23 +01:00
|
|
|
import gcm
|
2018-11-29 21:37:40 +01:00
|
|
|
import ujson
|
2017-08-19 01:12:40 +02:00
|
|
|
|
|
|
|
from zerver.decorator import statsd_increment
|
|
|
|
from zerver.lib.avatar import absolute_avatar_url
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2019-02-14 02:01:54 +01:00
|
|
|
from zerver.lib.message import access_message, \
|
|
|
|
bulk_access_messages_expect_usermessage, huddle_users
|
2019-12-03 20:19:38 +01:00
|
|
|
from zerver.lib.remote_server import send_to_push_bouncer, send_json_to_push_bouncer
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2019-04-20 17:21:26 +02:00
|
|
|
from zerver.models import PushDeviceToken, Message, Recipient, \
|
2019-02-14 02:01:54 +01:00
|
|
|
UserMessage, UserProfile, \
|
2019-02-02 23:53:55 +01:00
|
|
|
get_display_recipient, receives_offline_push_notifications, \
|
|
|
|
receives_online_notifications, get_user_profile_by_id, \
|
2018-12-05 19:36:58 +01:00
|
|
|
ArchivedMessage
|
2016-06-08 12:32:59 +02:00
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from apns2.client import APNsClient
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2017-05-09 12:15:11 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
|
|
|
from zilencer.models import RemotePushDeviceToken
|
2017-05-12 10:12:38 +02:00
|
|
|
else: # nocoverage -- Not convenient to add test for this.
|
2017-05-09 12:15:11 +02:00
|
|
|
from mock import Mock
|
2020-04-22 04:13:37 +02:00
|
|
|
RemotePushDeviceToken = Mock() # type: ignore[misc] # https://github.com/JukkaL/mypy/issues/1188
|
2017-05-09 12:15:11 +02:00
|
|
|
|
|
|
|
DeviceToken = Union[PushDeviceToken, RemotePushDeviceToken]
|
|
|
|
|
2013-10-21 19:47:08 +02:00
|
|
|
# We store the token as b64, but apns-client wants hex strings
|
2019-11-13 06:54:30 +01:00
|
|
|
def b64_to_hex(data: str) -> str:
|
2016-06-15 08:28:52 +02:00
|
|
|
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def hex_to_b64(data: str) -> str:
|
|
|
|
return base64.b64encode(binascii.unhexlify(data)).decode()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to APNs, for iOS
|
|
|
|
#
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
_apns_client: Optional["APNsClient"] = None
|
2018-02-09 23:19:00 +01:00
|
|
|
_apns_client_initialized = False
|
2017-08-19 01:38:11 +02:00
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
def get_apns_client() -> 'Optional[APNsClient]':
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do this import as part of optimizing Zulip's base
|
|
|
|
# import time.
|
|
|
|
from apns2.client import APNsClient
|
2018-02-09 23:19:00 +01:00
|
|
|
global _apns_client, _apns_client_initialized
|
|
|
|
if not _apns_client_initialized:
|
2017-08-19 01:38:11 +02:00
|
|
|
# NB if called concurrently, this will make excess connections.
|
|
|
|
# That's a little sloppy, but harmless unless a server gets
|
|
|
|
# hammered with a ton of these all at once after startup.
|
2018-02-09 23:19:00 +01:00
|
|
|
if settings.APNS_CERT_FILE is not None:
|
|
|
|
_apns_client = APNsClient(credentials=settings.APNS_CERT_FILE,
|
|
|
|
use_sandbox=settings.APNS_SANDBOX)
|
|
|
|
_apns_client_initialized = True
|
2017-08-19 01:38:11 +02:00
|
|
|
return _apns_client
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def apns_enabled() -> bool:
|
|
|
|
client = get_apns_client()
|
|
|
|
return client is not None
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def modernize_apns_payload(data: Dict[str, Any]) -> Dict[str, Any]:
|
2017-09-28 03:08:37 +02:00
|
|
|
'''Take a payload in an unknown Zulip version's format, and return in current format.'''
|
|
|
|
# TODO this isn't super robust as is -- if a buggy remote server
|
|
|
|
# sends a malformed payload, we are likely to raise an exception.
|
|
|
|
if 'message_ids' in data:
|
|
|
|
# The format sent by 1.6.0, from the earliest pre-1.6.0
|
|
|
|
# version with bouncer support up until 613d093d7 pre-1.7.0:
|
|
|
|
# 'alert': str, # just sender, and text about PM/group-PM/mention
|
|
|
|
# 'message_ids': List[int], # always just one
|
|
|
|
return {
|
|
|
|
'alert': data['alert'],
|
2017-10-02 09:00:25 +02:00
|
|
|
'badge': 0,
|
2017-09-28 03:08:37 +02:00
|
|
|
'custom': {
|
|
|
|
'zulip': {
|
|
|
|
'message_ids': data['message_ids'],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
else:
|
2017-12-12 05:54:23 +01:00
|
|
|
# Something already compatible with the current format.
|
|
|
|
# `alert` may be a string, or a dict with `title` and `body`.
|
|
|
|
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
|
|
|
|
# item in `custom.zulip` is `message_ids`.
|
2017-09-28 03:08:37 +02:00
|
|
|
return data
|
|
|
|
|
2017-08-29 01:05:20 +02:00
|
|
|
APNS_MAX_RETRIES = 3
|
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
@statsd_increment("apple_push_notification")
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_apple_push_notification(user_id: int, devices: List[DeviceToken],
|
2018-05-21 20:39:58 +02:00
|
|
|
payload_data: Dict[str, Any], remote: bool=False) -> None:
|
2018-08-08 18:16:57 +02:00
|
|
|
# We lazily do the APNS imports as part of optimizing Zulip's base
|
|
|
|
# import time; since these are only needed in the push
|
|
|
|
# notification queue worker, it's best to only import them in the
|
|
|
|
# code that needs them.
|
|
|
|
from apns2.payload import Payload as APNsPayload
|
|
|
|
from hyper.http20.exceptions import HTTP20Error
|
|
|
|
|
2019-08-10 00:30:33 +02:00
|
|
|
client = get_apns_client()
|
2018-02-09 23:19:00 +01:00
|
|
|
if client is None:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.debug("APNs: Dropping a notification because nothing configured. "
|
|
|
|
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
|
2018-02-09 23:19:00 +01:00
|
|
|
return
|
2018-05-21 20:20:23 +02:00
|
|
|
|
|
|
|
if remote:
|
|
|
|
DeviceTokenClass = RemotePushDeviceToken
|
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.info("APNs: Sending notification for user %d to %d devices",
|
|
|
|
user_id, len(devices))
|
2017-09-28 03:08:37 +02:00
|
|
|
payload = APNsPayload(**modernize_apns_payload(payload_data))
|
2017-08-19 01:38:11 +02:00
|
|
|
expiration = int(time.time() + 24 * 3600)
|
2017-08-29 01:05:20 +02:00
|
|
|
retries_left = APNS_MAX_RETRIES
|
2017-08-19 01:38:11 +02:00
|
|
|
for device in devices:
|
|
|
|
# TODO obviously this should be made to actually use the async
|
2017-08-29 01:05:20 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def attempt_send() -> Optional[str]:
|
2017-08-29 01:05:20 +02:00
|
|
|
try:
|
2018-12-06 01:20:13 +01:00
|
|
|
stream_id = client.send_notification_async(
|
2019-10-01 10:20:16 +02:00
|
|
|
device.token, payload, topic=settings.APNS_TOPIC,
|
2018-12-06 01:20:13 +01:00
|
|
|
expiration=expiration)
|
2017-08-29 01:05:20 +02:00
|
|
|
return client.get_notification_result(stream_id)
|
|
|
|
except HTTP20Error as e:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning("APNs: HTTP error sending for user %d to device %s: %s",
|
|
|
|
user_id, device.token, e.__class__.__name__)
|
2017-08-29 01:05:20 +02:00
|
|
|
return None
|
2018-12-05 19:02:18 +01:00
|
|
|
except BrokenPipeError as e:
|
2018-12-05 19:44:25 +01:00
|
|
|
logger.warning("APNs: BrokenPipeError sending for user %d to device %s: %s",
|
2018-12-05 19:02:18 +01:00
|
|
|
user_id, device.token, e.__class__.__name__)
|
|
|
|
return None
|
2018-12-06 01:21:47 +01:00
|
|
|
except ConnectionError as e: # nocoverage
|
|
|
|
logger.warning("APNs: ConnectionError sending for user %d to device %s: %s",
|
|
|
|
user_id, device.token, e.__class__.__name__)
|
|
|
|
return None
|
2017-08-29 01:05:20 +02:00
|
|
|
|
|
|
|
result = attempt_send()
|
|
|
|
while result is None and retries_left > 0:
|
|
|
|
retries_left -= 1
|
|
|
|
result = attempt_send()
|
|
|
|
if result is None:
|
|
|
|
result = "HTTP error, retries exhausted"
|
|
|
|
|
2018-08-28 21:11:49 +02:00
|
|
|
if result[0] == "Unregistered":
|
|
|
|
# For some reason, "Unregistered" result values have a
|
|
|
|
# different format, as a tuple of the pair ("Unregistered", 12345132131).
|
2019-08-10 00:30:33 +02:00
|
|
|
result = result[0]
|
2017-08-19 01:38:11 +02:00
|
|
|
if result == 'Success':
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.info("APNs: Success sending for user %d to device %s",
|
|
|
|
user_id, device.token)
|
2018-05-21 20:20:23 +02:00
|
|
|
elif result in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("APNs: Removing invalid/expired token %s (%s)", device.token, result)
|
2018-05-21 20:20:23 +02:00
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
|
|
|
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
|
2017-08-19 01:38:11 +02:00
|
|
|
else:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning("APNs: Failed to send for user %d to device %s: %s",
|
|
|
|
user_id, device.token, result)
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to GCM, for Android
|
|
|
|
#
|
|
|
|
|
2019-02-13 02:56:23 +01:00
|
|
|
def make_gcm_client() -> gcm.GCM: # nocoverage
|
|
|
|
# From GCM upstream's doc for migrating to FCM:
|
|
|
|
#
|
|
|
|
# FCM supports HTTP and XMPP protocols that are virtually
|
|
|
|
# identical to the GCM server protocols, so you don't need to
|
|
|
|
# update your sending logic for the migration.
|
|
|
|
#
|
|
|
|
# https://developers.google.com/cloud-messaging/android/android-migrate-fcm
|
|
|
|
#
|
|
|
|
# The one thing we're required to change on the server is the URL of
|
|
|
|
# the endpoint. So we get to keep using the GCM client library we've
|
|
|
|
# been using (as long as we're happy with it) -- just monkey-patch in
|
|
|
|
# that one change, because the library's API doesn't anticipate that
|
|
|
|
# as a customization point.
|
|
|
|
gcm.gcm.GCM_URL = 'https://fcm.googleapis.com/fcm/send'
|
|
|
|
return gcm.GCM(settings.ANDROID_GCM_API_KEY)
|
|
|
|
|
2017-05-12 10:12:38 +02:00
|
|
|
if settings.ANDROID_GCM_API_KEY: # nocoverage
|
2019-02-13 02:56:23 +01:00
|
|
|
gcm_client = make_gcm_client()
|
2013-12-09 23:19:59 +01:00
|
|
|
else:
|
2019-02-13 02:46:41 +01:00
|
|
|
gcm_client = None
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def gcm_enabled() -> bool: # nocoverage
|
2019-02-13 02:46:41 +01:00
|
|
|
return gcm_client is not None
|
2018-02-12 23:34:59 +01:00
|
|
|
|
2018-11-29 21:37:40 +01:00
|
|
|
def send_android_push_notification_to_user(user_profile: UserProfile, data: Dict[str, Any],
|
|
|
|
options: Dict[str, Any]) -> None:
|
2017-03-06 03:11:44 +01:00
|
|
|
devices = list(PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
2018-11-29 21:37:40 +01:00
|
|
|
send_android_push_notification(devices, data, options)
|
2017-03-06 03:11:44 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2019-02-08 02:14:45 +01:00
|
|
|
Parse GCM options, supplying defaults, and raising an error if invalid.
|
|
|
|
|
2019-02-08 03:06:04 +01:00
|
|
|
The options permitted here form part of the Zulip notification
|
|
|
|
bouncer's API. They are:
|
|
|
|
|
|
|
|
`priority`: Passed through to GCM; see upstream doc linked below.
|
2019-02-08 02:44:58 +01:00
|
|
|
Zulip servers should always set this; when unset, we guess a value
|
|
|
|
based on the behavior of old server versions.
|
2019-02-08 03:06:04 +01:00
|
|
|
|
|
|
|
Including unrecognized options is an error.
|
|
|
|
|
|
|
|
For details on options' semantics, see this GCM upstream doc:
|
2019-11-20 16:12:57 +01:00
|
|
|
https://firebase.google.com/docs/cloud-messaging/http-server-ref
|
2019-02-08 03:06:04 +01:00
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
Returns `priority`.
|
2019-02-08 02:38:13 +01:00
|
|
|
"""
|
2019-02-08 02:44:58 +01:00
|
|
|
priority = options.pop('priority', None)
|
|
|
|
if priority is None:
|
|
|
|
# An older server. Identify if this seems to be an actual notification.
|
|
|
|
if data.get('event') == 'message':
|
|
|
|
priority = 'high'
|
|
|
|
else: # `'event': 'remove'`, presumably
|
|
|
|
priority = 'normal'
|
2019-02-08 02:14:45 +01:00
|
|
|
if priority not in ('normal', 'high'):
|
|
|
|
raise JsonableError(_("Invalid GCM option to bouncer: priority %r")
|
|
|
|
% (priority,))
|
|
|
|
|
2019-02-08 02:38:13 +01:00
|
|
|
if options:
|
|
|
|
# We're strict about the API; there is no use case for a newer Zulip
|
|
|
|
# server talking to an older bouncer, so we only need to provide
|
|
|
|
# one-way compatibility.
|
|
|
|
raise JsonableError(_("Invalid GCM options to bouncer: %s")
|
|
|
|
% (ujson.dumps(options),))
|
|
|
|
|
2019-02-08 02:14:45 +01:00
|
|
|
return priority # when this grows a second option, can make it a tuple
|
|
|
|
|
2017-03-06 03:11:44 +01:00
|
|
|
@statsd_increment("android_push_notification")
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, Any],
|
2018-11-29 21:37:40 +01:00
|
|
|
options: Dict[str, Any], remote: bool=False) -> None:
|
|
|
|
"""
|
|
|
|
Send a GCM message to the given devices.
|
|
|
|
|
2019-11-20 16:12:57 +01:00
|
|
|
See https://firebase.google.com/docs/cloud-messaging/http-server-ref
|
2018-11-29 21:37:40 +01:00
|
|
|
for the GCM upstream API which this talks to.
|
|
|
|
|
|
|
|
data: The JSON object (decoded) to send as the 'data' parameter of
|
|
|
|
the GCM message.
|
2019-02-08 03:06:04 +01:00
|
|
|
options: Additional options to control the GCM message sent.
|
|
|
|
For details, see `parse_gcm_options`.
|
2018-11-29 21:37:40 +01:00
|
|
|
"""
|
2019-02-13 02:46:41 +01:00
|
|
|
if not gcm_client:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.debug("Skipping sending a GCM push notification since "
|
|
|
|
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset")
|
2013-12-09 23:19:59 +01:00
|
|
|
return
|
2018-11-29 21:37:40 +01:00
|
|
|
|
2019-02-08 02:12:01 +01:00
|
|
|
reg_ids = [device.token for device in devices]
|
2019-02-08 02:14:45 +01:00
|
|
|
priority = parse_gcm_options(options, data)
|
2017-05-17 09:58:27 +02:00
|
|
|
try:
|
2019-11-20 16:12:57 +01:00
|
|
|
# See https://firebase.google.com/docs/cloud-messaging/http-server-ref .
|
2019-02-08 02:38:13 +01:00
|
|
|
# Two kwargs `retries` and `session` get eaten by `json_request`;
|
|
|
|
# the rest pass through to the GCM server.
|
2019-02-13 02:46:41 +01:00
|
|
|
res = gcm_client.json_request(registration_ids=reg_ids,
|
|
|
|
priority=priority,
|
|
|
|
data=data,
|
|
|
|
retries=10)
|
2020-04-09 21:51:58 +02:00
|
|
|
except OSError as e:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning(str(e))
|
2017-05-17 09:58:27 +02:00
|
|
|
return
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2016-12-13 08:41:48 +01:00
|
|
|
if res and 'success' in res:
|
|
|
|
for reg_id, msg_id in res['success'].items():
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Sent %s as %s", reg_id, msg_id)
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2019-02-08 02:12:01 +01:00
|
|
|
if remote:
|
|
|
|
DeviceTokenClass = RemotePushDeviceToken
|
|
|
|
else:
|
|
|
|
DeviceTokenClass = PushDeviceToken
|
|
|
|
|
2014-01-22 22:42:23 +01:00
|
|
|
# res.canonical will contain results when there are duplicate registrations for the same
|
|
|
|
# device. The "canonical" registration is the latest registration made by the device.
|
2020-03-27 01:32:21 +01:00
|
|
|
# Ref: https://developer.android.com/google/gcm/adv.html#canonical
|
2016-12-13 08:41:48 +01:00
|
|
|
if 'canonical' in res:
|
|
|
|
for reg_id, new_reg_id in res['canonical'].items():
|
|
|
|
if reg_id == new_reg_id:
|
|
|
|
# I'm not sure if this should happen. In any case, not really actionable.
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("GCM: Got canonical ref but it already matches our ID %s!", reg_id)
|
2017-05-09 12:15:11 +02:00
|
|
|
elif not DeviceTokenClass.objects.filter(token=new_reg_id,
|
|
|
|
kind=DeviceTokenClass.GCM).count():
|
2016-12-13 08:41:48 +01:00
|
|
|
# This case shouldn't happen; any time we get a canonical ref it should have been
|
|
|
|
# previously registered in our system.
|
|
|
|
#
|
|
|
|
# That said, recovery is easy: just update the current PDT object to use the new ID.
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning(
|
2020-05-02 08:44:14 +02:00
|
|
|
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating.",
|
|
|
|
new_reg_id, reg_id)
|
2017-05-09 12:15:11 +02:00
|
|
|
DeviceTokenClass.objects.filter(
|
|
|
|
token=reg_id, kind=DeviceTokenClass.GCM).update(token=new_reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
# Since we know the new ID is registered in our system we can just drop the old one.
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Got canonical ref %s, dropping %s", new_reg_id, reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
|
2017-05-09 12:15:11 +02:00
|
|
|
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
|
2016-12-13 08:41:48 +01:00
|
|
|
|
|
|
|
if 'errors' in res:
|
|
|
|
for error, reg_ids in res['errors'].items():
|
|
|
|
if error in ['NotRegistered', 'InvalidRegistration']:
|
|
|
|
for reg_id in reg_ids:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("GCM: Removing %s", reg_id)
|
2018-03-23 18:23:23 +01:00
|
|
|
# We remove all entries for this token (There
|
|
|
|
# could be multiple for different Zulip servers).
|
|
|
|
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
for reg_id in reg_ids:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("GCM: Delivery to %s failed: %s", reg_id, error)
|
2016-12-13 08:41:48 +01:00
|
|
|
|
|
|
|
# python-gcm handles retrying of the unsent messages.
|
|
|
|
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Sending to a bouncer
|
|
|
|
#
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def uses_notification_bouncer() -> bool:
|
2017-08-19 00:42:04 +02:00
|
|
|
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def send_notifications_to_bouncer(user_profile_id: int,
|
|
|
|
apns_payload: Dict[str, Any],
|
2018-11-29 21:37:40 +01:00
|
|
|
gcm_payload: Dict[str, Any],
|
|
|
|
gcm_options: Dict[str, Any]) -> None:
|
2017-08-19 00:42:04 +02:00
|
|
|
post_data = {
|
|
|
|
'user_id': user_profile_id,
|
|
|
|
'apns_payload': apns_payload,
|
|
|
|
'gcm_payload': gcm_payload,
|
2018-11-29 21:37:40 +01:00
|
|
|
'gcm_options': gcm_options,
|
2017-08-19 00:42:04 +02:00
|
|
|
}
|
2018-01-17 08:10:45 +01:00
|
|
|
# Calls zilencer.views.remote_server_notify_push
|
2019-01-31 01:36:18 +01:00
|
|
|
send_json_to_push_bouncer('POST', 'push/notify', post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
|
|
|
#
|
|
|
|
# Managing device tokens
|
|
|
|
#
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def num_push_devices_for_user(user_profile: UserProfile, kind: Optional[int]=None) -> PushDeviceToken:
|
2017-08-19 00:42:04 +02:00
|
|
|
if kind is None:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile).count()
|
|
|
|
else:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def add_push_device_token(user_profile: UserProfile,
|
2019-11-13 06:54:30 +01:00
|
|
|
token_str: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
kind: int,
|
|
|
|
ios_app_id: Optional[str]=None) -> None:
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.info("Registering push device: %d %r %d %r",
|
|
|
|
user_profile.id, token_str, kind, ios_app_id)
|
2017-08-19 00:42:04 +02:00
|
|
|
|
|
|
|
# If we're sending things to the push notification bouncer
|
|
|
|
# register this user with them here
|
|
|
|
if uses_notification_bouncer():
|
|
|
|
post_data = {
|
|
|
|
'server_uuid': settings.ZULIP_ORG_ID,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'token': token_str,
|
|
|
|
'token_kind': kind,
|
|
|
|
}
|
|
|
|
|
|
|
|
if kind == PushDeviceToken.APNS:
|
|
|
|
post_data['ios_app_id'] = ios_app_id
|
|
|
|
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.info("Sending new push device to bouncer: %r", post_data)
|
2018-04-29 00:06:26 +02:00
|
|
|
# Calls zilencer.views.register_remote_push_device
|
2019-01-31 01:36:18 +01:00
|
|
|
send_to_push_bouncer('POST', 'push/register', post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
return
|
|
|
|
|
2018-10-11 00:53:13 +02:00
|
|
|
try:
|
|
|
|
with transaction.atomic():
|
|
|
|
PushDeviceToken.objects.create(
|
|
|
|
user_id=user_profile.id,
|
|
|
|
kind=kind,
|
|
|
|
token=token_str,
|
|
|
|
ios_app_id=ios_app_id,
|
|
|
|
# last_updated is to be renamed to date_created.
|
|
|
|
last_updated=timezone_now())
|
|
|
|
except IntegrityError:
|
|
|
|
pass
|
2017-08-19 00:42:04 +02:00
|
|
|
|
2019-11-13 06:54:30 +01:00
|
|
|
def remove_push_device_token(user_profile: UserProfile, token_str: str, kind: int) -> None:
|
2017-08-19 00:42:04 +02:00
|
|
|
|
|
|
|
# If we're sending things to the push notification bouncer
|
2018-02-20 06:03:12 +01:00
|
|
|
# unregister this user with them here
|
2017-08-19 00:42:04 +02:00
|
|
|
if uses_notification_bouncer():
|
|
|
|
# TODO: Make this a remove item
|
|
|
|
post_data = {
|
|
|
|
'server_uuid': settings.ZULIP_ORG_ID,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'token': token_str,
|
|
|
|
'token_kind': kind,
|
|
|
|
}
|
2018-04-29 00:07:47 +02:00
|
|
|
# Calls zilencer.views.unregister_remote_push_device
|
2019-01-31 01:36:18 +01:00
|
|
|
send_to_push_bouncer("POST", "push/unregister", post_data)
|
2017-08-19 00:42:04 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
2018-10-12 20:18:07 +02:00
|
|
|
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
|
2017-08-19 00:42:04 +02:00
|
|
|
token.delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
raise JsonableError(_("Token does not exist"))
|
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
def clear_push_device_tokens(user_profile_id: int) -> None:
|
|
|
|
# Deletes all of a user's PushDeviceTokens.
|
|
|
|
if uses_notification_bouncer():
|
|
|
|
post_data = {
|
|
|
|
'server_uuid': settings.ZULIP_ORG_ID,
|
|
|
|
'user_id': user_profile_id,
|
|
|
|
}
|
|
|
|
send_to_push_bouncer("POST", "push/unregister/all", post_data)
|
|
|
|
return
|
|
|
|
|
|
|
|
PushDeviceToken.objects.filter(user_id=user_profile_id).delete()
|
|
|
|
|
2017-08-19 00:42:04 +02:00
|
|
|
#
|
|
|
|
# Push notifications in general
|
|
|
|
#
|
|
|
|
|
2018-02-12 23:34:59 +01:00
|
|
|
def push_notifications_enabled() -> bool:
|
|
|
|
'''True just if this server has configured a way to send push notifications.'''
|
|
|
|
if (uses_notification_bouncer()
|
|
|
|
and settings.ZULIP_ORG_KEY is not None
|
|
|
|
and settings.ZULIP_ORG_ID is not None): # nocoverage
|
|
|
|
# We have the needed configuration to send push notifications through
|
|
|
|
# the bouncer. Better yet would be to confirm that this config actually
|
|
|
|
# works -- e.g., that we have ever successfully sent to the bouncer --
|
|
|
|
# but this is a good start.
|
|
|
|
return True
|
2020-02-29 01:00:44 +01:00
|
|
|
if settings.DEVELOPMENT and (apns_enabled() or gcm_enabled()): # nocoverage
|
|
|
|
# Since much of the notifications logic is platform-specific, the mobile
|
|
|
|
# developers often work on just one platform at a time, so we should
|
|
|
|
# only require one to be configured.
|
|
|
|
return True
|
|
|
|
elif apns_enabled() and gcm_enabled(): # nocoverage
|
2018-02-12 23:34:59 +01:00
|
|
|
# We have the needed configuration to send through APNs and GCM directly
|
|
|
|
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-11-27 18:12:11 +01:00
|
|
|
def initialize_push_notifications() -> None:
|
|
|
|
if not push_notifications_enabled():
|
2018-12-03 21:03:56 +01:00
|
|
|
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
|
|
|
|
# Avoid unnecessary spam on development environment startup
|
|
|
|
return
|
2018-11-27 18:45:45 +01:00
|
|
|
logger.warning("Mobile push notifications are not configured.\n "
|
|
|
|
"See https://zulip.readthedocs.io/en/latest/"
|
|
|
|
"production/mobile-push-notifications.html")
|
2018-11-27 18:12:11 +01:00
|
|
|
|
2018-10-04 23:31:04 +02:00
|
|
|
def get_gcm_alert(message: Message) -> str:
|
2017-05-08 12:58:11 +02:00
|
|
|
"""
|
|
|
|
Determine what alert string to display based on the missed messages.
|
|
|
|
"""
|
|
|
|
sender_str = message.sender.full_name
|
2017-10-19 06:37:35 +02:00
|
|
|
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
|
2017-05-08 12:58:11 +02:00
|
|
|
return "New private group message from %s" % (sender_str,)
|
2017-10-19 06:37:35 +02:00
|
|
|
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
|
2017-05-08 12:58:11 +02:00
|
|
|
return "New private message from %s" % (sender_str,)
|
2019-08-26 04:40:07 +02:00
|
|
|
elif message.is_stream_message() and (message.trigger == 'mentioned' or
|
|
|
|
message.trigger == 'wildcard_mentioned'):
|
2017-05-08 12:58:11 +02:00
|
|
|
return "New mention from %s" % (sender_str,)
|
2018-10-05 05:08:54 +02:00
|
|
|
else: # message.is_stream_message() and message.trigger == 'stream_push_notify'
|
2018-10-30 02:24:04 +01:00
|
|
|
return "New stream message from %s in %s" % (sender_str, get_display_recipient(message.recipient),)
|
2017-05-08 12:58:11 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_mobile_push_content(rendered_content: str) -> str:
|
2019-01-07 22:15:46 +01:00
|
|
|
def get_text(elem: lxml.html.HtmlElement) -> str:
|
2017-10-07 00:18:07 +02:00
|
|
|
# Convert default emojis to their unicode equivalent.
|
|
|
|
classes = elem.get("class", "")
|
|
|
|
if "emoji" in classes:
|
2018-07-02 00:05:24 +02:00
|
|
|
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
|
2017-10-07 00:18:07 +02:00
|
|
|
if match:
|
|
|
|
emoji_code = match.group('emoji_code')
|
|
|
|
char_repr = ""
|
|
|
|
for codepoint in emoji_code.split('-'):
|
|
|
|
char_repr += chr(int(codepoint, 16))
|
|
|
|
return char_repr
|
|
|
|
# Handles realm emojis, avatars etc.
|
|
|
|
if elem.tag == "img":
|
|
|
|
return elem.get("alt", "")
|
2018-05-02 07:01:29 +02:00
|
|
|
if elem.tag == 'blockquote':
|
|
|
|
return '' # To avoid empty line before quote text
|
|
|
|
return elem.text or ''
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def format_as_quote(quote_text: str) -> str:
|
2018-05-02 07:01:29 +02:00
|
|
|
quote_text_list = filter(None, quote_text.split('\n')) # Remove empty lines
|
|
|
|
quote_text = '\n'.join(map(lambda x: "> "+x, quote_text_list))
|
|
|
|
quote_text += '\n'
|
|
|
|
return quote_text
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2019-08-11 07:41:34 +02:00
|
|
|
def render_olist(ol: lxml.html.HtmlElement) -> str:
|
|
|
|
items = []
|
|
|
|
counter = int(ol.get('start')) if ol.get('start') else 1
|
|
|
|
nested_levels = len(list(ol.iterancestors('ol')))
|
|
|
|
indent = ('\n' + ' ' * nested_levels) if nested_levels else ''
|
|
|
|
|
|
|
|
for li in ol:
|
|
|
|
items.append(indent + str(counter) + '. ' + process(li).strip())
|
|
|
|
counter += 1
|
|
|
|
|
|
|
|
return '\n'.join(items)
|
|
|
|
|
2019-01-07 22:15:46 +01:00
|
|
|
def process(elem: lxml.html.HtmlElement) -> str:
|
2019-08-11 07:41:34 +02:00
|
|
|
plain_text = ''
|
|
|
|
if elem.tag == 'ol':
|
|
|
|
plain_text = render_olist(elem)
|
|
|
|
else:
|
|
|
|
plain_text = get_text(elem)
|
|
|
|
sub_text = ''
|
|
|
|
for child in elem:
|
|
|
|
sub_text += process(child)
|
|
|
|
if elem.tag == 'blockquote':
|
|
|
|
sub_text = format_as_quote(sub_text)
|
|
|
|
plain_text += sub_text
|
|
|
|
plain_text += elem.tail or ""
|
2017-10-07 00:18:07 +02:00
|
|
|
return plain_text
|
|
|
|
|
2017-10-10 11:14:10 +02:00
|
|
|
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
|
|
|
|
return "***REDACTED***"
|
2019-01-07 22:15:21 +01:00
|
|
|
|
2019-01-07 22:15:46 +01:00
|
|
|
elem = lxml.html.fromstring(rendered_content)
|
2019-01-07 22:15:21 +01:00
|
|
|
plain_text = process(elem)
|
|
|
|
return plain_text
|
2017-10-07 00:18:07 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def truncate_content(content: str) -> Tuple[str, bool]:
|
2017-10-07 00:12:52 +02:00
|
|
|
# We use unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
|
|
|
|
# of three dots as this saves two extra characters for textual
|
|
|
|
# content. This function will need to be updated to handle unicode
|
|
|
|
# combining characters and tags when we start supporting themself.
|
|
|
|
if len(content) <= 200:
|
2017-12-16 03:08:23 +01:00
|
|
|
return content, False
|
|
|
|
return content[:200] + "…", True
|
2017-10-07 00:12:52 +02:00
|
|
|
|
2019-04-20 17:21:26 +02:00
|
|
|
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
|
2019-02-14 00:49:53 +01:00
|
|
|
'''Common fields for all notification payloads.'''
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {}
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# These will let the app support logging into multiple realms and servers.
|
|
|
|
data['server'] = settings.EXTERNAL_HOST
|
2019-04-20 17:21:26 +02:00
|
|
|
data['realm_id'] = user_profile.realm.id
|
|
|
|
data['realm_uri'] = user_profile.realm.uri
|
|
|
|
data['user_id'] = user_profile.id
|
2019-02-14 00:49:53 +01:00
|
|
|
|
|
|
|
return data
|
|
|
|
|
2019-04-20 17:21:26 +02:00
|
|
|
def get_message_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
2019-02-14 00:54:56 +01:00
|
|
|
'''Common fields for `message` payloads, for all platforms.'''
|
2019-04-20 17:21:26 +02:00
|
|
|
data = get_base_payload(user_profile)
|
2017-12-16 03:01:49 +01:00
|
|
|
|
|
|
|
# `sender_id` is preferred, but some existing versions use `sender_email`.
|
|
|
|
data['sender_id'] = message.sender.id
|
2017-12-16 02:52:42 +01:00
|
|
|
data['sender_email'] = message.sender.email
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2018-02-16 23:18:47 +01:00
|
|
|
if message.recipient.type == Recipient.STREAM:
|
2017-12-16 02:52:42 +01:00
|
|
|
data['recipient_type'] = "stream"
|
|
|
|
data['stream'] = get_display_recipient(message.recipient)
|
2018-11-08 17:33:44 +01:00
|
|
|
data['topic'] = message.topic_name()
|
2018-02-16 23:18:47 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
|
|
|
data['recipient_type'] = "private"
|
|
|
|
data['pm_users'] = huddle_users(message.recipient.id)
|
|
|
|
else: # Recipient.PERSONAL
|
2017-12-16 02:52:42 +01:00
|
|
|
data['recipient_type'] = "private"
|
2017-12-16 03:01:49 +01:00
|
|
|
|
2017-12-16 02:52:42 +01:00
|
|
|
return data
|
|
|
|
|
2018-10-04 23:31:04 +02:00
|
|
|
def get_apns_alert_title(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
On an iOS notification, this is the first bolded line.
|
|
|
|
"""
|
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
2019-08-10 00:30:33 +02:00
|
|
|
recipients = get_display_recipient(message.recipient)
|
|
|
|
assert isinstance(recipients, list)
|
2018-10-04 23:31:04 +02:00
|
|
|
return ', '.join(sorted(r['full_name'] for r in recipients))
|
|
|
|
elif message.is_stream_message():
|
2018-10-30 02:24:04 +01:00
|
|
|
return "#%s > %s" % (get_display_recipient(message.recipient), message.topic_name(),)
|
2018-10-04 23:31:04 +02:00
|
|
|
# For personal PMs, we just show the sender name.
|
|
|
|
return message.sender.full_name
|
|
|
|
|
|
|
|
def get_apns_alert_subtitle(message: Message) -> str:
|
|
|
|
"""
|
|
|
|
On an iOS notification, this is the second bolded line.
|
|
|
|
"""
|
|
|
|
if message.trigger == "mentioned":
|
2019-08-27 00:02:04 +02:00
|
|
|
return _("%(full_name)s mentioned you:") % dict(full_name=message.sender.full_name)
|
2019-08-26 04:40:07 +02:00
|
|
|
elif message.trigger == "wildcard_mentioned":
|
2019-08-27 00:02:04 +02:00
|
|
|
return _("%(full_name)s mentioned everyone:") % dict(full_name=message.sender.full_name)
|
2018-10-04 23:31:04 +02:00
|
|
|
elif message.recipient.type == Recipient.PERSONAL:
|
|
|
|
return ""
|
|
|
|
# For group PMs, or regular messages to a stream, just use a colon to indicate this is the sender.
|
|
|
|
return message.sender.full_name + ":"
|
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
def get_message_payload_apns(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
|
|
|
'''A `message` payload for iOS, via APNs.'''
|
2019-04-20 17:21:26 +02:00
|
|
|
zulip_data = get_message_payload(user_profile, message)
|
2017-12-16 02:52:42 +01:00
|
|
|
zulip_data.update({
|
|
|
|
'message_ids': [message.id],
|
|
|
|
})
|
|
|
|
|
2017-12-16 03:08:23 +01:00
|
|
|
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
|
2017-12-12 05:40:11 +01:00
|
|
|
apns_data = {
|
2017-08-31 22:27:46 +02:00
|
|
|
'alert': {
|
2018-10-04 23:31:04 +02:00
|
|
|
'title': get_apns_alert_title(message),
|
|
|
|
'subtitle': get_apns_alert_subtitle(message),
|
2017-12-16 03:08:23 +01:00
|
|
|
'body': content,
|
2017-08-31 22:27:46 +02:00
|
|
|
},
|
2018-11-02 02:00:41 +01:00
|
|
|
'sound': 'default',
|
2017-12-16 02:52:42 +01:00
|
|
|
'badge': 0, # TODO: set badge count in a better way
|
|
|
|
'custom': {'zulip': zulip_data},
|
|
|
|
}
|
2017-12-12 05:40:11 +01:00
|
|
|
return apns_data
|
2017-05-08 13:21:56 +02:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_message_payload_gcm(
|
|
|
|
user_profile: UserProfile, message: Message,
|
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
|
|
'''A `message` payload + options, for Android via GCM/FCM.'''
|
2019-04-20 17:21:26 +02:00
|
|
|
data = get_message_payload(user_profile, message)
|
2017-12-16 03:08:23 +01:00
|
|
|
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
|
2017-12-16 02:52:42 +01:00
|
|
|
data.update({
|
2017-05-08 13:26:01 +02:00
|
|
|
'event': 'message',
|
2018-10-04 23:31:04 +02:00
|
|
|
'alert': get_gcm_alert(message),
|
2017-05-31 21:25:43 +02:00
|
|
|
'zulip_message_id': message.id, # message_id is reserved for CCS
|
2019-08-28 02:43:19 +02:00
|
|
|
'time': datetime_to_timestamp(message.date_sent),
|
2017-12-16 03:08:23 +01:00
|
|
|
'content': content,
|
|
|
|
'content_truncated': truncated,
|
2017-05-08 13:26:01 +02:00
|
|
|
'sender_full_name': message.sender.full_name,
|
2017-06-15 07:44:00 +02:00
|
|
|
'sender_avatar_url': absolute_avatar_url(message.sender),
|
2017-12-16 02:52:42 +01:00
|
|
|
})
|
2019-02-14 01:02:39 +01:00
|
|
|
gcm_options = {'priority': 'high'}
|
|
|
|
return data, gcm_options
|
2017-05-08 13:26:01 +02:00
|
|
|
|
2019-02-14 01:02:39 +01:00
|
|
|
def get_remove_payload_gcm(
|
2019-02-14 01:08:51 +01:00
|
|
|
user_profile: UserProfile, message_ids: List[int],
|
2019-02-14 01:02:39 +01:00
|
|
|
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
|
|
'''A `remove` payload + options, for Android via GCM/FCM.'''
|
2019-04-20 17:21:26 +02:00
|
|
|
gcm_payload = get_base_payload(user_profile)
|
2019-02-14 00:54:56 +01:00
|
|
|
gcm_payload.update({
|
|
|
|
'event': 'remove',
|
2019-02-14 01:08:51 +01:00
|
|
|
'zulip_message_ids': ','.join(str(id) for id in message_ids),
|
|
|
|
# Older clients (all clients older than 2019-02-13) look only at
|
|
|
|
# `zulip_message_id` and ignore `zulip_message_ids`. Do our best.
|
|
|
|
'zulip_message_id': message_ids[0],
|
2019-02-14 00:54:56 +01:00
|
|
|
})
|
2019-02-14 01:02:39 +01:00
|
|
|
gcm_options = {'priority': 'normal'}
|
|
|
|
return gcm_payload, gcm_options
|
2019-02-14 00:54:56 +01:00
|
|
|
|
2019-02-14 01:08:51 +01:00
|
|
|
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
|
2018-07-28 14:31:45 +02:00
|
|
|
"""This should be called when a message that had previously had a
|
|
|
|
mobile push executed is read. This triggers a mobile push notifica
|
|
|
|
mobile app when the message is read on the server, to remove the
|
|
|
|
message from the notification.
|
|
|
|
|
|
|
|
"""
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2019-02-14 02:01:54 +01:00
|
|
|
message_ids = bulk_access_messages_expect_usermessage(user_profile_id, message_ids)
|
2019-02-14 01:08:51 +01:00
|
|
|
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, message_ids)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
|
|
|
if uses_notification_bouncer():
|
2019-12-03 20:19:38 +01:00
|
|
|
send_notifications_to_bouncer(user_profile_id,
|
|
|
|
{},
|
|
|
|
gcm_payload,
|
|
|
|
gcm_options)
|
2019-02-14 01:23:55 +01:00
|
|
|
else:
|
|
|
|
android_devices = list(PushDeviceToken.objects.filter(
|
|
|
|
user=user_profile, kind=PushDeviceToken.GCM))
|
|
|
|
if android_devices:
|
|
|
|
send_android_push_notification(android_devices, gcm_payload, gcm_options)
|
2018-07-28 14:31:45 +02:00
|
|
|
|
2019-02-14 02:01:54 +01:00
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_profile_id,
|
|
|
|
message_id__in=message_ids,
|
|
|
|
).update(
|
|
|
|
flags=F('flags').bitand(
|
|
|
|
~UserMessage.flags.active_mobile_push_notification))
|
2018-08-02 01:29:06 +02:00
|
|
|
|
2017-03-06 03:05:04 +01:00
|
|
|
@statsd_increment("push_notifications")
|
2017-11-05 11:15:10 +01:00
|
|
|
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
|
2017-08-18 09:04:52 +02:00
|
|
|
"""
|
|
|
|
missed_message is the event received by the
|
|
|
|
zerver.worker.queue_processors.PushNotificationWorker.consume function.
|
|
|
|
"""
|
2018-12-11 07:05:40 +01:00
|
|
|
if not push_notifications_enabled():
|
|
|
|
return
|
2017-11-10 00:29:52 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-11-29 02:49:11 +01:00
|
|
|
if not (receives_offline_push_notifications(user_profile) or
|
2017-11-10 00:29:52 +01:00
|
|
|
receives_online_notifications(user_profile)):
|
|
|
|
return
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2017-11-10 00:51:06 +01:00
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2018-12-05 19:36:58 +01:00
|
|
|
try:
|
|
|
|
(message, user_message) = access_message(user_profile, missed_message['message_id'])
|
|
|
|
except JsonableError:
|
|
|
|
if ArchivedMessage.objects.filter(id=missed_message['message_id']).exists():
|
|
|
|
# If the cause is a race with the message being deleted,
|
|
|
|
# that's normal and we have no need to log an error.
|
|
|
|
return
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.error(
|
|
|
|
"Unexpected message access failure handling push notifications: %s %s",
|
|
|
|
user_profile.id, missed_message['message_id'],
|
|
|
|
)
|
2018-12-05 19:36:58 +01:00
|
|
|
return
|
|
|
|
|
2017-11-10 00:51:06 +01:00
|
|
|
if user_message is not None:
|
2018-11-27 18:17:07 +01:00
|
|
|
# If the user has read the message already, don't push-notify.
|
2017-11-10 00:51:06 +01:00
|
|
|
#
|
|
|
|
# TODO: It feels like this is already handled when things are
|
|
|
|
# put in the queue; maybe we should centralize this logic with
|
|
|
|
# the `zerver/tornado/event_queue.py` logic?
|
|
|
|
if user_message.flags.read:
|
|
|
|
return
|
2018-08-02 01:29:06 +02:00
|
|
|
|
|
|
|
# Otherwise, we mark the message as having an active mobile
|
|
|
|
# push notification, so that we can send revocation messages
|
|
|
|
# later.
|
|
|
|
user_message.flags.active_mobile_push_notification = True
|
|
|
|
user_message.save(update_fields=["flags"])
|
2017-11-10 00:51:06 +01:00
|
|
|
else:
|
|
|
|
# Users should only be getting push notifications into this
|
|
|
|
# queue for messages they haven't received if they're
|
|
|
|
# long-term idle; anything else is likely a bug.
|
|
|
|
if not user_profile.long_term_idle:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.error(
|
|
|
|
"Could not find UserMessage with message_id %s and user_id %s",
|
|
|
|
missed_message['message_id'], user_profile_id,
|
|
|
|
)
|
2017-11-10 00:51:06 +01:00
|
|
|
return
|
2017-09-10 00:47:36 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
message.trigger = missed_message['trigger']
|
2017-05-08 13:21:56 +02:00
|
|
|
|
2019-02-14 00:54:56 +01:00
|
|
|
apns_payload = get_message_payload_apns(user_profile, message)
|
2019-02-14 01:02:39 +01:00
|
|
|
gcm_payload, gcm_options = get_message_payload_gcm(user_profile, message)
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Sending push notifications to mobile clients for user %s", user_profile_id)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
if uses_notification_bouncer():
|
2019-12-03 20:19:38 +01:00
|
|
|
send_notifications_to_bouncer(user_profile_id,
|
|
|
|
apns_payload,
|
|
|
|
gcm_payload,
|
|
|
|
gcm_options)
|
|
|
|
return
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.APNS))
|
2017-05-12 07:50:18 +02:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
if apple_devices:
|
|
|
|
send_apple_push_notification(user_profile.id, apple_devices,
|
|
|
|
apns_payload)
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2017-11-10 00:29:52 +01:00
|
|
|
if android_devices:
|
2018-11-29 21:37:40 +01:00
|
|
|
send_android_push_notification(android_devices, gcm_payload, gcm_options)
|