2013-10-21 19:47:08 +02:00
|
|
|
from __future__ import absolute_import
|
2016-06-15 08:28:52 +02:00
|
|
|
|
2016-08-03 12:20:12 +02:00
|
|
|
import random
|
2016-10-27 23:55:31 +02:00
|
|
|
import requests
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import Any, Dict, List, Optional, SupportsInt, Text
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
from version import ZULIP_VERSION
|
2017-03-06 03:05:04 +01:00
|
|
|
from zerver.models import PushDeviceToken, Message, Recipient, UserProfile, \
|
|
|
|
UserMessage, get_display_recipient, receives_offline_notifications, \
|
|
|
|
receives_online_notifications
|
2016-08-03 12:47:12 +02:00
|
|
|
from zerver.models import get_user_profile_by_id
|
2017-03-06 03:05:04 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2017-03-06 04:11:13 +01:00
|
|
|
from zerver.lib.request import JsonableError
|
2017-03-06 03:05:04 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
|
2013-10-21 19:47:08 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2016-06-08 12:32:59 +02:00
|
|
|
from zerver.lib.utils import generate_random_token
|
|
|
|
from zerver.lib.redis_utils import get_redis_client
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
from apns import APNs, Frame, Payload, SENT_BUFFER_QTY
|
2016-12-13 08:41:48 +01:00
|
|
|
from gcm import GCM
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-03-06 03:57:31 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2016-10-27 23:55:31 +02:00
|
|
|
from six.moves import urllib
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2017-01-24 06:36:39 +01:00
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import time
|
2016-10-27 23:55:31 +02:00
|
|
|
import ujson
|
2016-06-08 12:32:59 +02:00
|
|
|
from functools import partial
|
|
|
|
|
|
|
|
# APNS error codes
|
|
|
|
ERROR_CODES = {
|
|
|
|
1: 'Processing error',
|
|
|
|
2: 'Missing device token', # looks like token was empty?
|
|
|
|
3: 'Missing topic', # topic is encoded in the certificate, looks like certificate is wrong. bail out.
|
|
|
|
4: 'Missing payload', # bail out, our message looks like empty
|
|
|
|
5: 'Invalid token size', # current token has wrong size, skip it and retry
|
|
|
|
6: 'Invalid topic size', # can not happen, we do not send topic, it is part of certificate. bail out.
|
|
|
|
7: 'Invalid payload size', # our payload is probably too big. bail out.
|
|
|
|
8: 'Invalid token', # our device token is broken, skipt it and retry
|
|
|
|
10: 'Shutdown', # server went into maintenance mode. reported token is the last success, skip it and retry.
|
|
|
|
None: 'Unknown', # unknown error, for sure we try again, but user should limit number of retries
|
|
|
|
}
|
|
|
|
|
|
|
|
redis_client = get_redis_client()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
# Maintain a long-lived Session object to avoid having to re-SSL-handshake
|
|
|
|
# for each request
|
2013-10-24 21:50:43 +02:00
|
|
|
connection = None
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
# We maintain an additional APNS connection for pushing to Zulip apps that have been signed
|
|
|
|
# by the Dropbox certs (and have an app id of com.dropbox.zulip)
|
|
|
|
dbx_connection = None
|
2016-06-08 12:32:59 +02:00
|
|
|
|
2016-08-03 10:41:39 +02:00
|
|
|
# `APNS_SANDBOX` should be a bool
|
|
|
|
assert isinstance(settings.APNS_SANDBOX, bool)
|
|
|
|
|
2016-08-08 10:40:45 +02:00
|
|
|
def get_apns_key(identifer):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (SupportsInt) -> str
|
2016-08-08 10:40:45 +02:00
|
|
|
return 'apns:' + str(identifer)
|
2016-06-08 12:32:59 +02:00
|
|
|
|
|
|
|
class APNsMessage(object):
|
2017-03-06 03:22:17 +01:00
|
|
|
def __init__(self, user_id, tokens, alert=None, badge=None, sound=None,
|
2016-12-03 00:04:17 +01:00
|
|
|
category=None, **kwargs):
|
2017-03-06 03:22:17 +01:00
|
|
|
# type: (int, List[Text], Text, int, Text, Text, **Any) -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
self.frame = Frame()
|
|
|
|
self.tokens = tokens
|
2016-08-03 11:54:00 +02:00
|
|
|
expiry = int(time.time() + 24 * 3600)
|
2016-06-08 12:32:59 +02:00
|
|
|
priority = 10
|
|
|
|
payload = Payload(alert=alert, badge=badge, sound=sound,
|
|
|
|
category=category, custom=kwargs)
|
|
|
|
for token in tokens:
|
2017-03-06 03:22:17 +01:00
|
|
|
data = {'token': token, 'user_id': user_id}
|
2016-08-03 12:20:12 +02:00
|
|
|
identifier = random.getrandbits(32)
|
2016-06-08 12:32:59 +02:00
|
|
|
key = get_apns_key(identifier)
|
|
|
|
redis_client.hmset(key, data)
|
|
|
|
redis_client.expire(key, expiry)
|
|
|
|
self.frame.add_item(token, payload, identifier, expiry, priority)
|
|
|
|
|
|
|
|
def get_frame(self):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: () -> Frame
|
2016-06-08 12:32:59 +02:00
|
|
|
return self.frame
|
|
|
|
|
2016-08-04 10:16:07 +02:00
|
|
|
def response_listener(error_response):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (Dict[str, SupportsInt]) -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
identifier = error_response['identifier']
|
|
|
|
key = get_apns_key(identifier)
|
|
|
|
if not redis_client.exists(key):
|
|
|
|
logging.warn("APNs key, {}, doesn't not exist.".format(key))
|
|
|
|
return
|
|
|
|
|
|
|
|
code = error_response['status']
|
2016-08-04 13:00:04 +02:00
|
|
|
assert isinstance(code, int)
|
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
errmsg = ERROR_CODES[code]
|
|
|
|
data = redis_client.hgetall(key)
|
|
|
|
token = data['token']
|
2016-08-03 12:47:12 +02:00
|
|
|
user = get_user_profile_by_id(int(data['user_id']))
|
2016-06-08 12:32:59 +02:00
|
|
|
b64_token = hex_to_b64(token)
|
|
|
|
|
2016-08-04 10:49:19 +02:00
|
|
|
logging.warn("APNS: Failed to deliver APNS notification to %s, reason: %s" % (b64_token, errmsg))
|
2016-06-08 12:32:59 +02:00
|
|
|
if code == 8:
|
|
|
|
# Invalid Token, remove from our database
|
2016-08-04 10:49:19 +02:00
|
|
|
logging.warn("APNS: Removing token from database due to above failure")
|
2016-06-08 12:32:59 +02:00
|
|
|
try:
|
|
|
|
PushDeviceToken.objects.get(user=user, token=b64_token).delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
2016-08-04 09:59:15 +02:00
|
|
|
def get_connection(cert_file, key_file):
|
2016-08-08 10:50:10 +02:00
|
|
|
# type: (str, str) -> APNs
|
2016-06-08 12:32:59 +02:00
|
|
|
connection = APNs(use_sandbox=settings.APNS_SANDBOX,
|
2016-08-04 09:59:15 +02:00
|
|
|
cert_file=cert_file,
|
|
|
|
key_file=key_file,
|
2016-06-08 12:32:59 +02:00
|
|
|
enhanced=True)
|
2016-08-04 10:16:07 +02:00
|
|
|
connection.gateway_server.register_response_listener(response_listener)
|
2016-08-04 09:59:15 +02:00
|
|
|
return connection
|
|
|
|
|
|
|
|
if settings.APNS_CERT_FILE is not None and os.path.exists(settings.APNS_CERT_FILE):
|
|
|
|
connection = get_connection(settings.APNS_CERT_FILE,
|
|
|
|
settings.APNS_KEY_FILE)
|
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
if settings.DBX_APNS_CERT_FILE is not None and os.path.exists(settings.DBX_APNS_CERT_FILE):
|
2016-08-04 09:59:15 +02:00
|
|
|
dbx_connection = get_connection(settings.DBX_APNS_CERT_FILE,
|
|
|
|
settings.DBX_APNS_KEY_FILE)
|
2015-02-10 08:08:47 +01:00
|
|
|
|
2013-12-09 23:17:16 +01:00
|
|
|
def num_push_devices_for_user(user_profile, kind = None):
|
2016-06-04 01:51:51 +02:00
|
|
|
# type: (UserProfile, Optional[int]) -> PushDeviceToken
|
2013-12-09 23:17:16 +01:00
|
|
|
if kind is None:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile).count()
|
|
|
|
else:
|
|
|
|
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
# We store the token as b64, but apns-client wants hex strings
|
|
|
|
def b64_to_hex(data):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (bytes) -> Text
|
2016-06-15 08:28:52 +02:00
|
|
|
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
|
2013-10-21 19:47:08 +02:00
|
|
|
|
|
|
|
def hex_to_b64(data):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> bytes
|
2016-06-15 08:28:52 +02:00
|
|
|
return base64.b64encode(binascii.unhexlify(data.encode('utf-8')))
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2017-03-06 03:24:50 +01:00
|
|
|
def _do_push_to_apns_service(user_id, message, apns_connection):
|
|
|
|
# type: (int, APNsMessage, APNs) -> None
|
2015-02-11 08:20:51 +01:00
|
|
|
if not apns_connection:
|
2017-03-06 03:24:50 +01:00
|
|
|
logging.info("Not delivering APNS message %s to user %s due to missing connection" % (message, user_id))
|
2015-02-11 08:20:51 +01:00
|
|
|
return
|
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
frame = message.get_frame()
|
|
|
|
apns_connection.gateway_server.send_notification_multiple(frame)
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2017-03-06 03:19:40 +01:00
|
|
|
def send_apple_push_notification_to_user(user, alert, **extra_data):
|
|
|
|
# type: (UserProfile, Text, **Any) -> None
|
|
|
|
devices = PushDeviceToken.objects.filter(user=user, kind=PushDeviceToken.APNS)
|
2017-03-06 03:30:56 +01:00
|
|
|
send_apple_push_notification(user.id, devices, zulip=dict(alert=alert),
|
|
|
|
**extra_data)
|
2017-03-06 03:19:40 +01:00
|
|
|
|
2015-02-10 08:08:47 +01:00
|
|
|
# Send a push notification to the desired clients
|
|
|
|
# extra_data is a dict that will be passed to the
|
|
|
|
# mobile app
|
|
|
|
@statsd_increment("apple_push_notification")
|
2017-03-06 03:30:56 +01:00
|
|
|
def send_apple_push_notification(user_id, devices, **extra_data):
|
|
|
|
# type: (int, List[PushDeviceToken], **Any) -> None
|
2015-02-10 08:08:47 +01:00
|
|
|
if not connection and not dbx_connection:
|
2016-07-08 18:57:01 +02:00
|
|
|
logging.error("Attempting to send push notification, but no connection was found. "
|
|
|
|
"This may be because we could not find the APNS Certificate file.")
|
2015-02-10 08:08:47 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
# Plain b64 token kept for debugging purposes
|
2016-08-04 13:00:26 +02:00
|
|
|
tokens = [(b64_to_hex(device.token), device.ios_app_id, device.token)
|
|
|
|
for device in devices]
|
|
|
|
|
|
|
|
for conn, app_ids in [
|
|
|
|
(connection, [settings.ZULIP_IOS_APP_ID, None]),
|
|
|
|
(dbx_connection, [settings.DBX_IOS_APP_ID])]:
|
|
|
|
|
|
|
|
valid_devices = [device for device in tokens if device[1] in app_ids]
|
|
|
|
valid_tokens = [device[0] for device in valid_devices]
|
|
|
|
if valid_tokens:
|
|
|
|
logging.info("APNS: Sending apple push notification "
|
|
|
|
"to devices: %s" % (valid_devices,))
|
2017-03-06 03:30:56 +01:00
|
|
|
zulip_message = APNsMessage(user_id, valid_tokens,
|
|
|
|
alert=extra_data['zulip']['alert'],
|
|
|
|
**extra_data)
|
2017-03-06 03:25:38 +01:00
|
|
|
_do_push_to_apns_service(user_id, zulip_message, conn)
|
2016-08-04 13:00:26 +02:00
|
|
|
else:
|
|
|
|
logging.warn("APNS: Not sending notification because "
|
|
|
|
"tokens didn't match devices: %s" % (app_ids,))
|
2015-02-10 08:08:47 +01:00
|
|
|
|
2013-10-21 19:47:08 +02:00
|
|
|
# NOTE: This is used by the check_apns_tokens manage.py command. Do not call it otherwise, as the
|
|
|
|
# feedback() call can take up to 15s
|
|
|
|
def check_apns_feedback():
|
2016-06-04 01:51:51 +02:00
|
|
|
# type: () -> None
|
2016-06-08 12:32:59 +02:00
|
|
|
feedback_connection = APNs(use_sandbox=settings.APNS_SANDBOX,
|
|
|
|
cert_file=settings.APNS_CERT_FILE,
|
|
|
|
key_file=settings.APNS_KEY_FILE)
|
2013-10-21 19:47:08 +02:00
|
|
|
|
2016-06-08 12:32:59 +02:00
|
|
|
for token, since in feedback_connection.feedback_server.items():
|
2013-10-21 19:47:08 +02:00
|
|
|
since_date = timestamp_to_datetime(since)
|
|
|
|
logging.info("Found unavailable token %s, unavailable since %s" % (token, since_date))
|
|
|
|
|
2016-08-04 13:15:49 +02:00
|
|
|
PushDeviceToken.objects.filter(token=hex_to_b64(token), last_updated__lt=since_date,
|
|
|
|
kind=PushDeviceToken.APNS).delete()
|
2013-10-21 19:47:08 +02:00
|
|
|
logging.info("Finished checking feedback for stale tokens")
|
2013-12-09 23:19:59 +01:00
|
|
|
|
|
|
|
|
|
|
|
if settings.ANDROID_GCM_API_KEY:
|
2016-12-13 08:41:48 +01:00
|
|
|
gcm = GCM(settings.ANDROID_GCM_API_KEY)
|
2013-12-09 23:19:59 +01:00
|
|
|
else:
|
|
|
|
gcm = None
|
|
|
|
|
2017-03-06 03:11:44 +01:00
|
|
|
def send_android_push_notification_to_user(user_profile, data):
|
2016-06-15 08:28:52 +02:00
|
|
|
# type: (UserProfile, Dict[str, Any]) -> None
|
2017-03-06 03:11:44 +01:00
|
|
|
devices = list(PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.GCM))
|
|
|
|
send_android_push_notification(devices, data)
|
|
|
|
|
|
|
|
@statsd_increment("android_push_notification")
|
|
|
|
def send_android_push_notification(devices, data):
|
|
|
|
# type: (List[PushDeviceToken], Dict[str, Any]) -> None
|
2013-12-09 23:19:59 +01:00
|
|
|
if not gcm:
|
|
|
|
logging.error("Attempting to send a GCM push notification, but no API key was configured")
|
|
|
|
return
|
2017-03-06 03:11:44 +01:00
|
|
|
reg_ids = [device.token for device in devices]
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2016-12-13 08:41:48 +01:00
|
|
|
res = gcm.json_request(registration_ids=reg_ids, data=data)
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2016-12-13 08:41:48 +01:00
|
|
|
if res and 'success' in res:
|
|
|
|
for reg_id, msg_id in res['success'].items():
|
|
|
|
logging.info("GCM: Sent %s as %s" % (reg_id, msg_id))
|
2013-12-09 23:19:59 +01:00
|
|
|
|
2014-01-22 22:42:23 +01:00
|
|
|
# res.canonical will contain results when there are duplicate registrations for the same
|
|
|
|
# device. The "canonical" registration is the latest registration made by the device.
|
|
|
|
# Ref: http://developer.android.com/google/gcm/adv.html#canonical
|
2016-12-13 08:41:48 +01:00
|
|
|
if 'canonical' in res:
|
|
|
|
for reg_id, new_reg_id in res['canonical'].items():
|
|
|
|
if reg_id == new_reg_id:
|
|
|
|
# I'm not sure if this should happen. In any case, not really actionable.
|
|
|
|
logging.warning("GCM: Got canonical ref but it already matches our ID %s!" % (reg_id,))
|
|
|
|
elif not PushDeviceToken.objects.filter(token=new_reg_id, kind=PushDeviceToken.GCM).count():
|
|
|
|
# This case shouldn't happen; any time we get a canonical ref it should have been
|
|
|
|
# previously registered in our system.
|
|
|
|
#
|
|
|
|
# That said, recovery is easy: just update the current PDT object to use the new ID.
|
|
|
|
logging.warning(
|
2017-01-24 07:06:13 +01:00
|
|
|
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating." %
|
|
|
|
(new_reg_id, reg_id))
|
2016-12-13 08:41:48 +01:00
|
|
|
PushDeviceToken.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
token=reg_id, kind=PushDeviceToken.GCM).update(token=new_reg_id)
|
2016-12-13 08:41:48 +01:00
|
|
|
else:
|
|
|
|
# Since we know the new ID is registered in our system we can just drop the old one.
|
|
|
|
logging.info("GCM: Got canonical ref %s, dropping %s" % (new_reg_id, reg_id))
|
|
|
|
|
|
|
|
PushDeviceToken.objects.filter(token=reg_id, kind=PushDeviceToken.GCM).delete()
|
|
|
|
|
|
|
|
if 'errors' in res:
|
|
|
|
for error, reg_ids in res['errors'].items():
|
|
|
|
if error in ['NotRegistered', 'InvalidRegistration']:
|
|
|
|
for reg_id in reg_ids:
|
|
|
|
logging.info("GCM: Removing %s" % (reg_id,))
|
|
|
|
|
|
|
|
device = PushDeviceToken.objects.get(token=reg_id, kind=PushDeviceToken.GCM)
|
|
|
|
device.delete()
|
|
|
|
else:
|
|
|
|
for reg_id in reg_ids:
|
|
|
|
logging.warning("GCM: Delivery to %s failed: %s" % (reg_id, error))
|
|
|
|
|
|
|
|
# python-gcm handles retrying of the unsent messages.
|
|
|
|
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
|
2017-03-06 03:05:04 +01:00
|
|
|
|
|
|
|
@statsd_increment("push_notifications")
|
|
|
|
def handle_push_notification(user_profile_id, missed_message):
|
|
|
|
# type: (int, Dict[str, Any]) -> None
|
|
|
|
try:
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
|
|
|
if not (receives_offline_notifications(user_profile) or receives_online_notifications(user_profile)):
|
|
|
|
return
|
|
|
|
|
|
|
|
umessage = UserMessage.objects.get(user_profile=user_profile,
|
|
|
|
message__id=missed_message['message_id'])
|
|
|
|
message = umessage.message
|
|
|
|
if umessage.flags.read:
|
|
|
|
return
|
|
|
|
sender_str = message.sender.full_name
|
|
|
|
|
2017-03-06 03:11:44 +01:00
|
|
|
android_devices = [device for device in
|
|
|
|
PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.GCM)]
|
2017-03-06 03:19:40 +01:00
|
|
|
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
|
|
|
|
kind=PushDeviceToken.APNS))
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2017-03-06 03:19:40 +01:00
|
|
|
if apple_devices or android_devices:
|
2017-03-06 03:05:04 +01:00
|
|
|
# TODO: set badge count in a better way
|
|
|
|
# Determine what alert string to display based on the missed messages
|
|
|
|
if message.recipient.type == Recipient.HUDDLE:
|
|
|
|
alert = "New private group message from %s" % (sender_str,)
|
|
|
|
elif message.recipient.type == Recipient.PERSONAL:
|
|
|
|
alert = "New private message from %s" % (sender_str,)
|
|
|
|
elif message.recipient.type == Recipient.STREAM:
|
|
|
|
alert = "New mention from %s" % (sender_str,)
|
|
|
|
else:
|
|
|
|
alert = "New Zulip mentions and private messages from %s" % (sender_str,)
|
|
|
|
|
2017-03-06 03:19:40 +01:00
|
|
|
if apple_devices:
|
2017-03-06 03:30:56 +01:00
|
|
|
apple_extra_data = {
|
|
|
|
'alert': alert,
|
|
|
|
'message_ids': [message.id],
|
|
|
|
}
|
|
|
|
send_apple_push_notification(user_profile.id, apple_devices,
|
2017-03-06 03:19:40 +01:00
|
|
|
badge=1, zulip=apple_extra_data)
|
2017-03-06 03:05:04 +01:00
|
|
|
|
2017-03-06 03:11:44 +01:00
|
|
|
if android_devices:
|
2017-03-06 03:05:04 +01:00
|
|
|
content = message.content
|
|
|
|
content_truncated = (len(content) > 200)
|
|
|
|
if content_truncated:
|
|
|
|
content = content[:200] + "..."
|
|
|
|
|
|
|
|
android_data = {
|
|
|
|
'user': user_profile.email,
|
|
|
|
'event': 'message',
|
|
|
|
'alert': alert,
|
|
|
|
'zulip_message_id': message.id, # message_id is reserved for CCS
|
|
|
|
'time': datetime_to_timestamp(message.pub_date),
|
|
|
|
'content': content,
|
|
|
|
'content_truncated': content_truncated,
|
|
|
|
'sender_email': message.sender.email,
|
|
|
|
'sender_full_name': message.sender.full_name,
|
|
|
|
'sender_avatar_url': avatar_url(message.sender),
|
|
|
|
}
|
|
|
|
|
|
|
|
if message.recipient.type == Recipient.STREAM:
|
|
|
|
android_data['recipient_type'] = "stream"
|
|
|
|
android_data['stream'] = get_display_recipient(message.recipient)
|
|
|
|
android_data['topic'] = message.subject
|
|
|
|
elif message.recipient.type in (Recipient.HUDDLE, Recipient.PERSONAL):
|
|
|
|
android_data['recipient_type'] = "private"
|
|
|
|
|
2017-03-06 03:11:44 +01:00
|
|
|
send_android_push_notification(android_devices, android_data)
|
2017-03-06 03:05:04 +01:00
|
|
|
|
|
|
|
except UserMessage.DoesNotExist:
|
|
|
|
logging.error("Could not find UserMessage with message_id %s" % (missed_message['message_id'],))
|
2017-03-06 03:57:31 +01:00
|
|
|
|
|
|
|
def add_push_device_token(user_profile, token_str, kind, ios_app_id=None):
|
|
|
|
# type: (UserProfile, str, int, Optional[str]) -> None
|
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
# If we're sending things to the push notification bouncer
|
|
|
|
# register this user with them here
|
|
|
|
if settings.PUSH_NOTIFICATION_BOUNCER_URL is not None:
|
|
|
|
post_data = {
|
|
|
|
'server_uuid': settings.ZULIP_ORG_ID,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'token': token_str,
|
|
|
|
'token_kind': kind,
|
|
|
|
}
|
|
|
|
|
|
|
|
if kind == PushDeviceToken.APNS:
|
|
|
|
post_data['ios_app_id'] = ios_app_id
|
|
|
|
|
|
|
|
send_to_push_bouncer('POST', 'register', post_data)
|
|
|
|
return
|
|
|
|
|
2017-03-06 03:57:31 +01:00
|
|
|
# If another user was previously logged in on the same device and didn't
|
|
|
|
# properly log out, the token will still be registered to the wrong account
|
|
|
|
PushDeviceToken.objects.filter(token=token_str).exclude(user=user_profile).delete()
|
|
|
|
|
|
|
|
# Overwrite with the latest value
|
|
|
|
token, created = PushDeviceToken.objects.get_or_create(user=user_profile,
|
|
|
|
token=token_str,
|
|
|
|
defaults=dict(
|
|
|
|
kind=kind,
|
|
|
|
ios_app_id=ios_app_id))
|
|
|
|
if not created:
|
2017-04-15 04:03:56 +02:00
|
|
|
token.last_updated = timezone_now()
|
2017-03-06 03:57:31 +01:00
|
|
|
token.save(update_fields=['last_updated'])
|
2017-03-06 04:11:13 +01:00
|
|
|
|
2017-03-06 07:01:28 +01:00
|
|
|
def remove_push_device_token(user_profile, token_str, kind):
|
|
|
|
# type: (UserProfile, str, int) -> None
|
2016-10-27 23:55:31 +02:00
|
|
|
|
|
|
|
# If we're sending things to the push notification bouncer
|
|
|
|
# register this user with them here
|
|
|
|
if settings.PUSH_NOTIFICATION_BOUNCER_URL is not None:
|
|
|
|
# TODO: Make this a remove item
|
|
|
|
post_data = {
|
|
|
|
'server_uuid': settings.ZULIP_ORG_ID,
|
|
|
|
'user_id': user_profile.id,
|
|
|
|
'token': token_str,
|
|
|
|
'token_kind': kind,
|
|
|
|
}
|
|
|
|
send_to_push_bouncer("POST", "unregister", post_data)
|
|
|
|
return
|
|
|
|
|
2017-03-06 04:11:13 +01:00
|
|
|
try:
|
|
|
|
token = PushDeviceToken.objects.get(token=token_str, kind=kind)
|
|
|
|
token.delete()
|
|
|
|
except PushDeviceToken.DoesNotExist:
|
|
|
|
raise JsonableError(_("Token does not exist"))
|
2016-10-27 23:55:31 +02:00
|
|
|
|
|
|
|
|
|
|
|
def send_to_push_bouncer(method, endpoint, post_data):
|
|
|
|
# type: (str, str, Dict[str, Any]) -> None
|
|
|
|
url = urllib.parse.urljoin(settings.PUSH_NOTIFICATION_BOUNCER_URL,
|
|
|
|
'/api/v1/remotes/push/' + endpoint)
|
|
|
|
api_auth = requests.auth.HTTPBasicAuth(settings.ZULIP_ORG_ID,
|
|
|
|
settings.ZULIP_ORG_KEY)
|
|
|
|
|
|
|
|
res = requests.request(method,
|
|
|
|
url,
|
|
|
|
data=ujson.dumps(post_data),
|
|
|
|
auth=api_auth,
|
|
|
|
timeout=30,
|
|
|
|
verify=True,
|
|
|
|
headers={"User-agent": "ZulipServer/%s" % (ZULIP_VERSION,)})
|
|
|
|
|
|
|
|
# TODO: Think more carefully about how this error hanlding should work.
|
|
|
|
if res.status_code >= 500:
|
|
|
|
raise JsonableError(_("Error received from push notification bouncer"))
|
|
|
|
elif res.status_code >= 400:
|
|
|
|
try:
|
|
|
|
msg = ujson.loads(res.content)['msg']
|
|
|
|
except Exception:
|
|
|
|
raise JsonableError(_("Error received from push notification bouncer"))
|
|
|
|
raise JsonableError(msg)
|
|
|
|
elif res.status_code != 200:
|
|
|
|
raise JsonableError(_("Error received from push notification bouncer"))
|
|
|
|
|
|
|
|
# If we don't throw an exception, it's a successful bounce!
|