2013-10-21 19:47:08 +02:00
|
|
|
from __future__ import absolute_import
|
2016-06-15 08:28:52 +02:00
|
|
|
|
2016-08-03 12:20:12 +02:00
|
|
|
import random
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import Any, Dict, List, Optional, SupportsInt, Text
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-06-04 01:51:51 +02:00
|
|
|
from zerver.models import PushDeviceToken, UserProfile
|
2016-08-03 12:47:12 +02:00
|
|
|
from zerver.models import get_user_profile_by_id
|
2013-10-21 19:47:08 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
|
|
|
from zerver.decorator import statsd_increment
|
2016-06-08 12:32:59 +02:00
|
|
|
from zerver.lib.utils import generate_random_token
|
|
|
|
from zerver.lib.redis_utils import get_redis_client
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
from apns import APNs, Frame, Payload, SENT_BUFFER_QTY
|
2016-12-13 08:41:48 +01:00
|
|
|
from gcm import GCM
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
|
2017-01-24 06:36:39 +01:00
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import time
|
2016-06-08 12:32:59 +02:00
|
|
|
from functools import partial
|
|
|
|
|
|
|
|
# APNS error codes
|
|
|
|
ERROR_CODES = {
|
|
|
|
1: 'Processing error',
|
|
|
|
2: 'Missing device token', # looks like token was empty?
|
|
|
|
3: 'Missing topic', # topic is encoded in the certificate, looks like certificate is wrong. bail out.
|
|
|
|
4: 'Missing payload', # bail out, our message looks like empty
|
|
|
|
5: 'Invalid token size', # current token has wrong size, skip it and retry
|
|
|
|
6: 'Invalid topic size', # can not happen, we do not send topic, it is part of certificate. bail out.
|
|
|
|
7: 'Invalid payload size', # our payload is probably too big. bail out.
|
|
|
|
8: 'Invalid token', # our device token is broken, skipt it and retry
|
|
|
|
10: 'Shutdown', # server went into maintenance mode. reported token is the last success, skip it and retry.
|
|
|
|
None: 'Unknown', # unknown error, for sure we try again, but user should limit number of retries
|
|
|
|
}
|
|
|
|
|
|
|
|
redis_client = get_redis_client()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
# Maintain a long-lived Session object to avoid having to re-SSL-handshake
|
|
|
|
# for each request
|
2013-10-24 21:50:43 +02:00
|
|
|
connection = None
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
# We maintain an additional APNS connection for pushing to Zulip apps that have been signed
|
|
|
|
# by the Dropbox certs (and have an app id of com.dropbox.zulip)
|
|
|
|
dbx_connection = None
|
2016-06-08 12:32:59 +02:00
|
|
|
|
2016-08-03 10:41:39 +02:00
|
|
|
# `APNS_SANDBOX` should be a bool
|
|
|
|
assert isinstance(settings.APNS_SANDBOX, bool)
|
|
|
|
|
2016-08-08 10:40:45 +02:00
|
|
|
def get_apns_key(identifer):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (SupportsInt) -> str
|
2016-08-08 10:40:45 +02:00
|
|
|
return 'apns:' + str(identifer)
|
2016-06-08 12:32:59 +02:00
|
|
|
|
|
|
|
class APNsMessage(object):
|
|
|
|
def __init__(self, user, tokens, alert=None, badge=None, sound=None,
|
2016-12-03 00:04:17 +01:00
|
|
|
category=None, **kwargs):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (UserProfile, List[Text], Text, int, Text, Text, **Any) -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
self.frame = Frame()
|
|
|
|
self.tokens = tokens
|
2016-08-03 11:54:00 +02:00
|
|
|
expiry = int(time.time() + 24 * 3600)
|
2016-06-08 12:32:59 +02:00
|
|
|
priority = 10
|
|
|
|
payload = Payload(alert=alert, badge=badge, sound=sound,
|
|
|
|
category=category, custom=kwargs)
|
|
|
|
for token in tokens:
|
2016-08-03 12:47:12 +02:00
|
|
|
data = {'token': token, 'user_id': user.id}
|
2016-08-03 12:20:12 +02:00
|
|
|
identifier = random.getrandbits(32)
|
2016-06-08 12:32:59 +02:00
|
|
|
key = get_apns_key(identifier)
|
|
|
|
redis_client.hmset(key, data)
|
|
|
|
redis_client.expire(key, expiry)
|
|
|
|
self.frame.add_item(token, payload, identifier, expiry, priority)
|
|
|
|
|
|
|
|
def get_frame(self):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: () -> Frame
|
2016-06-08 12:32:59 +02:00
|
|
|
return self.frame
|
|
|
|
|
2016-08-04 10:16:07 +02:00
|
|
|
def response_listener(error_response):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (Dict[str, SupportsInt]) -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
identifier = error_response['identifier']
|
|
|
|
key = get_apns_key(identifier)
|
|
|
|
if not redis_client.exists(key):
|
|
|
|
logging.warn("APNs key, {}, doesn't not exist.".format(key))
|
|
|
|
return
|
|
|
|
|
|
|
|
code = error_response['status']
|
2016-08-04 13:00:04 +02:00
|
|
|
assert isinstance(code, int)
|
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
errmsg = ERROR_CODES[code]
|
|
|
|
data = redis_client.hgetall(key)
|
|
|
|
token = data['token']
|
2016-08-03 12:47:12 +02:00
|
|
|
user = get_user_profile_by_id(int(data['user_id']))
|
2016-06-08 12:32:59 +02:00
|
|
|
b64_token = hex_to_b64(token)
|
|
|
|
|
2016-08-04 10:49:19 +02:00
|
|
|
logging.warn("APNS: Failed to deliver APNS notification to %s, reason: %s" % (b64_token, errmsg))
|
2016-06-08 12:32:59 +02:00
|
|
|
if code == 8:
|
|
|
|
# Invalid Token, remove from our database
|
2016-08-04 10:49:19 +02:00
|
|
|
logging.warn("APNS: Removing token from database due to above failure")
|
2016-06-08 12:32:59 +02:00
|
|
|
try:
|
|
|
|
PushDeviceToken.objects.get(user=user, token=b64_token).delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2016-08-04 09:59:15 +02:00
|
|
|
def get_connection(cert_file, key_file):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (str, str) -> APNs
|
2016-06-08 12:32:59 +02:00
|
|
|
connection = APNs(use_sandbox=settings.APNS_SANDBOX,
|
2016-08-04 09:59:15 +02:00
|
|
|
cert_file=cert_file,
|
|
|
|
key_file=key_file,
|
2016-06-08 12:32:59 +02:00
|
|
|
enhanced=True)
|
2016-08-04 10:16:07 +02:00
|
|
|
connection.gateway_server.register_response_listener(response_listener)
|
2016-08-04 09:59:15 +02:00
|
|
|
return connection
|
|
|
|
|
|
|
|
if settings.APNS_CERT_FILE is not None and os.path.exists(settings.APNS_CERT_FILE):
|
|
|
|
connection = get_connection(settings.APNS_CERT_FILE,
|
|
|
|
settings.APNS_KEY_FILE)
|
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
if settings.DBX_APNS_CERT_FILE is not None and os.path.exists(settings.DBX_APNS_CERT_FILE):
|
2016-08-04 09:59:15 +02:00
|
|
|
dbx_connection = get_connection(settings.DBX_APNS_CERT_FILE,
|
|
|
|
settings.DBX_APNS_KEY_FILE)
|
2015-02-10 08:08:47 +01:00
|
|
|
|
2013-12-09 23:17:16 +01:00
|
|
|
def num_push_devices_for_user(user_profile, kind = None):
|
2016-06-04 01:51:51 +02:00
|
|
|
# type: (UserProfile, Optional[int]) -> PushDeviceToken
|
2013-12-09 23:17:16 +01:00
|
|
|
if kind is None:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile).count()
|
|
|
|
else:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
# We store the token as b64, but apns-client wants hex strings
|
|
|
|
def b64_to_hex(data):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (bytes) -> Text
|
2016-06-15 08:28:52 +02:00
|
|
|
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
def hex_to_b64(data):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> bytes
|
2016-06-15 08:28:52 +02:00
|
|
|
return base64.b64encode(binascii.unhexlify(data.encode('utf-8')))
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
def _do_push_to_apns_service(user, message, apns_connection):
|
2016-06-08 12:32:59 +02:00
|
|
|
# type: (UserProfile, APNsMessage, APNs) -> None
|
2015-02-11 08:20:51 +01:00
|
|
|
if not apns_connection:
|
|
|
|
logging.info("Not delivering APNS message %s to user %s due to missing connection" % (message, user))
|
|
|
|
return
|
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
frame = message.get_frame()
|
|
|
|
apns_connection.gateway_server.send_notification_multiple(frame)
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
# Send a push notification to the desired clients
|
|
|
|
# extra_data is a dict that will be passed to the
|
|
|
|
# mobile app
|
|
|
|
@statsd_increment("apple_push_notification")
|
|
|
|
def send_apple_push_notification(user, alert, **extra_data):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (UserProfile, Text, **Any) -> None
|
2015-02-10 08:08:47 +01:00
|
|
|
if not connection and not dbx_connection:
|
2016-07-08 18:57:01 +02:00
|
|
|
logging.error("Attempting to send push notification, but no connection was found. "
|
|
|
|
"This may be because we could not find the APNS Certificate file.")
|
2015-02-10 08:08:47 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
devices = PushDeviceToken.objects.filter(user=user, kind=PushDeviceToken.APNS)
|
|
|
|
# Plain b64 token kept for debugging purposes
|
2016-08-04 13:00:26 +02:00
|
|
|
tokens = [(b64_to_hex(device.token), device.ios_app_id, device.token)
|
|
|
|
for device in devices]
|
|
|
|
|
|
|
|
for conn, app_ids in [
|
|
|
|
(connection, [settings.ZULIP_IOS_APP_ID, None]),
|
|
|
|
(dbx_connection, [settings.DBX_IOS_APP_ID])]:
|
|
|
|
|
|
|
|
valid_devices = [device for device in tokens if device[1] in app_ids]
|
|
|
|
valid_tokens = [device[0] for device in valid_devices]
|
|
|
|
if valid_tokens:
|
|
|
|
logging.info("APNS: Sending apple push notification "
|
|
|
|
"to devices: %s" % (valid_devices,))
|
|
|
|
zulip_message = APNsMessage(user, valid_tokens, alert=alert, **extra_data)
|
|
|
|
_do_push_to_apns_service(user, zulip_message, conn)
|
|
|
|
else:
|
|
|
|
logging.warn("APNS: Not sending notification because "
|
|
|
|
"tokens didn't match devices: %s" % (app_ids,))
|
2015-02-10 08:08:47 +01:00
|
|
|
|
2013-10-21 19:47:08 +02:00
|
|
|
# NOTE: This is used by the check_apns_tokens manage.py command. Do not call it otherwise, as the
|
|
|
|
# feedback() call can take up to 15s
|
|
|
|
def check_apns_feedback():
|
2016-06-04 01:51:51 +02:00
|
|
|
# type: () -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
feedback_connection = APNs(use_sandbox=settings.APNS_SANDBOX,
|
|
|
|
cert_file=settings.APNS_CERT_FILE,
|
|
|
|
key_file=settings.APNS_KEY_FILE)
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
for token, since in feedback_connection.feedback_server.items():
|
2013-10-21 19:47:08 +02:00
|
|
|
since_date = timestamp_to_datetime(since)
|
|
|
|
logging.info("Found unavailable token %s, unavailable since %s" % (token, since_date))
|
|
|
|
|
2016-08-04 13:15:49 +02:00
|
|
|
PushDeviceToken.objects.filter(token=hex_to_b64(token), last_updated__lt=since_date,
|
|
|
|
kind=PushDeviceToken.APNS).delete()
|
2013-10-21 19:47:08 +02:00
|
|
|
logging.info("Finished checking feedback for stale tokens")
|
2013-12-09 23:19:59 +01:00
|
|
|
|
|
|
|
|
|
|
|
if settings.ANDROID_GCM_API_KEY:
|
2016-12-13 08:41:48 +01:00
|
|
|
gcm = GCM(settings.ANDROID_GCM_API_KEY)
|
2013-12-09 23:19:59 +01:00
|
|
|
else:
|
|
|
|
gcm = None
|
|
|
|
|
|
|
|
@statsd_increment("android_push_notification")
|
|
|
|
def send_android_push_notification(user, data):
|
2016-06-15 08:28:52 +02:00
|
|
|
# type: (UserProfile, Dict[str, Any]) -> None
|
2013-12-09 23:19:59 +01:00
|
|
|
if not gcm:
|
|
|
|
logging.error("Attempting to send a GCM push notification, but no API key was configured")
|
|
|
|
return
|
|
|
|
|
|
|
|
reg_ids = [device.token for device in
|
2016-12-03 00:04:17 +01:00
|
|
|
PushDeviceToken.objects.filter(user=user, kind=PushDeviceToken.GCM)]
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2016-12-13 08:41:48 +01:00
|
|
|
res = gcm.json_request(registration_ids=reg_ids, data=data)
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2016-12-13 08:41:48 +01:00
|
|
|
if res and 'success' in res:
|
|
|
|
for reg_id, msg_id in res['success'].items():
|
|
|
|
logging.info("GCM: Sent %s as %s" % (reg_id, msg_id))
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2014-01-22 22:42:23 +01:00
|
|
|
# res.canonical will contain results when there are duplicate registrations for the same
|
|
|
|
# device. The "canonical" registration is the latest registration made by the device.
|
|
|
|
# Ref: http://developer.android.com/google/gcm/adv.html#canonical
|
2016-12-13 08:41:48 +01:00
|
|
|
if 'canonical' in res:
|
|
|
|
for reg_id, new_reg_id in res['canonical'].items():
|
|
|
|
if reg_id == new_reg_id:
|
|
|
|
# I'm not sure if this should happen. In any case, not really actionable.
|
|
|
|
logging.warning("GCM: Got canonical ref but it already matches our ID %s!" % (reg_id,))
|
|
|
|
elif not PushDeviceToken.objects.filter(token=new_reg_id, kind=PushDeviceToken.GCM).count():
|
|
|
|
# This case shouldn't happen; any time we get a canonical ref it should have been
|
|
|
|
# previously registered in our system.
|
|
|
|
#
|
|
|
|
# That said, recovery is easy: just update the current PDT object to use the new ID.
|
|
|
|
logging.warning(
|
2017-01-24 07:06:13 +01:00
|
|
|
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
|
|
|
|
(new_reg_id, reg_id))
|
2016-12-13 08:41:48 +01:00
|
|
|
PushDeviceToken.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
token=reg_id, kind=PushDeviceToken.GCM).update(token=new_reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
# Since we know the new ID is registered in our system we can just drop the old one.
|
|
|
|
logging.info("GCM: Got canonical ref %s, dropping %s" % (new_reg_id, reg_id))
|
|
|
|
|
|
|
|
PushDeviceToken.objects.filter(token=reg_id, kind=PushDeviceToken.GCM).delete()
|
|
|
|
|
|
|
|
if 'errors' in res:
|
|
|
|
for error, reg_ids in res['errors'].items():
|
|
|
|
if error in ['NotRegistered', 'InvalidRegistration']:
|
|
|
|
for reg_id in reg_ids:
|
|
|
|
logging.info("GCM: Removing %s" % (reg_id,))
|
|
|
|
|
|
|
|
device = PushDeviceToken.objects.get(token=reg_id, kind=PushDeviceToken.GCM)
|
|
|
|
device.delete()
|
|
|
|
else:
|
|
|
|
for reg_id in reg_ids:
|
|
|
|
logging.warning("GCM: Delivery to %s failed: %s" % (reg_id, error))
|
|
|
|
|
|
|
|
# python-gcm handles retrying of the unsent messages.
|
|
|
|
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
|