2015-11-01 17:11:06 +01:00
|
|
|
from __future__ import print_function
|
2016-06-12 00:47:19 +02:00
|
|
|
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import cast, Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Text
|
2016-01-25 23:42:16 +01:00
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
from confirmation.models import Confirmation
|
|
|
|
from django.conf import settings
|
|
|
|
from django.core.mail import EmailMultiAlternatives
|
|
|
|
from django.template import loader
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-05-04 02:06:31 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2017-03-06 08:45:59 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2016-06-03 22:59:19 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Recipient,
|
|
|
|
ScheduledJob,
|
|
|
|
UserMessage,
|
|
|
|
Stream,
|
|
|
|
get_display_recipient,
|
|
|
|
UserProfile,
|
|
|
|
get_user_profile_by_email,
|
|
|
|
get_user_profile_by_id,
|
|
|
|
receives_offline_notifications,
|
|
|
|
get_context_for_message,
|
2016-08-14 09:16:25 +02:00
|
|
|
Message,
|
|
|
|
Realm,
|
2016-06-03 22:59:19 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
import datetime
|
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import ujson
|
2016-01-24 03:39:44 +01:00
|
|
|
from six.moves import urllib
|
2014-07-15 21:03:51 +02:00
|
|
|
from collections import defaultdict
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def unsubscribe_token(user_profile):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (UserProfile) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
# Leverage the Django confirmations framework to generate and track unique
|
|
|
|
# unsubscription tokens.
|
|
|
|
return Confirmation.objects.get_link_for_object(user_profile).split("/")[-1]
|
|
|
|
|
|
|
|
def one_click_unsubscribe_link(user_profile, endpoint):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (UserProfile, Text) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Generate a unique link that a logged-out user can visit to unsubscribe from
|
|
|
|
Zulip e-mails without having to first log in.
|
|
|
|
"""
|
|
|
|
token = unsubscribe_token(user_profile)
|
|
|
|
resource_path = "accounts/unsubscribe/%s/%s" % (endpoint, token)
|
2016-08-14 01:37:24 +02:00
|
|
|
return "%s/%s" % (user_profile.realm.uri.rstrip("/"), resource_path)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-03-19 01:00:15 +01:00
|
|
|
def hash_util_encode(string):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Text
|
2017-03-19 00:43:14 +01:00
|
|
|
# Do the same encoding operation as hash_util.encodeHashComponent on the
|
2014-01-24 22:29:17 +01:00
|
|
|
# frontend.
|
2014-02-12 20:44:16 +01:00
|
|
|
# `safe` has a default value of "/", but we want those encoded, too.
|
2016-01-24 03:39:44 +01:00
|
|
|
return urllib.parse.quote(
|
2016-07-04 20:49:32 +02:00
|
|
|
string.encode("utf-8"), safe=b"").replace(".", "%2E").replace("%", ".")
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2016-08-14 09:16:25 +02:00
|
|
|
def pm_narrow_url(realm, participants):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Realm, List[Text]) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
participants.sort()
|
2016-08-14 09:16:25 +02:00
|
|
|
base_url = u"%s/#narrow/pm-with/" % (realm.uri,)
|
2017-03-19 01:00:15 +01:00
|
|
|
return base_url + hash_util_encode(",".join(participants))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2016-08-14 09:16:25 +02:00
|
|
|
def stream_narrow_url(realm, stream):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Realm, Text) -> Text
|
2016-08-14 09:16:25 +02:00
|
|
|
base_url = u"%s/#narrow/stream/" % (realm.uri,)
|
2017-03-19 01:00:15 +01:00
|
|
|
return base_url + hash_util_encode(stream)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2016-08-14 09:16:25 +02:00
|
|
|
def topic_narrow_url(realm, stream, topic):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Realm, Text, Text) -> Text
|
2016-08-14 09:16:25 +02:00
|
|
|
base_url = u"%s/#narrow/stream/" % (realm.uri,)
|
2017-03-19 01:00:15 +01:00
|
|
|
return u"%s%s/topic/%s" % (base_url, hash_util_encode(stream),
|
|
|
|
hash_util_encode(topic))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def build_message_list(user_profile, messages):
|
2016-06-03 22:59:19 +02:00
|
|
|
# type: (UserProfile, List[Message]) -> List[Dict[str, Any]]
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Builds the message list object for the missed message email template.
|
|
|
|
The messages are collapsed into per-recipient and per-sender blocks, like
|
|
|
|
our web interface
|
|
|
|
"""
|
2016-01-25 23:42:16 +01:00
|
|
|
messages_to_render = [] # type: List[Dict[str, Any]]
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def sender_string(message):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Message) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
|
2016-06-13 10:32:39 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
else:
|
|
|
|
return ''
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def relative_to_full_url(content):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
# URLs for uploaded content are of the form
|
|
|
|
# "/user_uploads/abc.png". Make them full paths.
|
|
|
|
#
|
|
|
|
# There's a small chance of colliding with non-Zulip URLs containing
|
|
|
|
# "/user_uploads/", but we don't have much information about the
|
|
|
|
# structure of the URL to leverage.
|
|
|
|
content = re.sub(
|
|
|
|
r"/user_uploads/(\S*)",
|
2017-01-22 07:20:29 +01:00
|
|
|
user_profile.realm.uri + r"/user_uploads/\1", content)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
# Our proxying user-uploaded images seems to break inline images in HTML
|
|
|
|
# emails, so scrub the image but leave the link.
|
|
|
|
content = re.sub(
|
|
|
|
r"<img src=(\S+)/user_uploads/(\S+)>", "", content)
|
|
|
|
|
|
|
|
# URLs for emoji are of the form
|
2016-12-28 05:07:10 +01:00
|
|
|
# "static/generated/emoji/images/emoji/snowflake.png".
|
2014-01-24 22:29:17 +01:00
|
|
|
content = re.sub(
|
2017-01-22 07:20:29 +01:00
|
|
|
r"/static/generated/emoji/images/emoji/",
|
|
|
|
user_profile.realm.uri + r"/static/generated/emoji/images/emoji/",
|
2014-01-24 22:29:17 +01:00
|
|
|
content)
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
|
|
|
def fix_plaintext_image_urls(content):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
# Replace image URLs in plaintext content of the form
|
|
|
|
# [image name](image url)
|
|
|
|
# with a simple hyperlink.
|
|
|
|
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
|
|
|
|
|
|
|
|
def fix_emoji_sizes(html):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
return html.replace(' class="emoji"', ' height="20px"')
|
|
|
|
|
|
|
|
def build_message_payload(message):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Message) -> Dict[str, Text]
|
2014-01-24 22:29:17 +01:00
|
|
|
plain = message.content
|
|
|
|
plain = fix_plaintext_image_urls(plain)
|
|
|
|
plain = relative_to_full_url(plain)
|
|
|
|
|
|
|
|
html = message.rendered_content
|
|
|
|
html = relative_to_full_url(html)
|
|
|
|
html = fix_emoji_sizes(html)
|
|
|
|
|
|
|
|
return {'plain': plain, 'html': html}
|
|
|
|
|
|
|
|
def build_sender_payload(message):
|
2016-06-03 22:59:19 +02:00
|
|
|
# type: (Message) -> Dict[str, Any]
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
|
|
|
return {'sender': sender,
|
|
|
|
'content': [build_message_payload(message)]}
|
|
|
|
|
|
|
|
def message_header(user_profile, message):
|
2016-06-13 10:32:39 +02:00
|
|
|
# type: (UserProfile, Message) -> Dict[str, Any]
|
2014-01-24 22:29:17 +01:00
|
|
|
disp_recipient = get_display_recipient(message.recipient)
|
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
2017-01-09 20:45:11 +01:00
|
|
|
header = u"You and %s" % (message.sender.full_name,)
|
2016-08-14 09:16:25 +02:00
|
|
|
html_link = pm_narrow_url(user_profile.realm, [message.sender.email])
|
2016-06-13 10:32:39 +02:00
|
|
|
header_html = u"<a style='color: #ffffff;' href='%s'>%s</a>" % (html_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2016-12-23 17:12:07 +01:00
|
|
|
assert not isinstance(disp_recipient, Text)
|
2014-01-24 22:29:17 +01:00
|
|
|
other_recipients = [r['full_name'] for r in disp_recipient
|
2016-11-30 14:17:35 +01:00
|
|
|
if r['email'] != user_profile.email]
|
2016-06-13 10:32:39 +02:00
|
|
|
header = u"You and %s" % (", ".join(other_recipients),)
|
2016-08-14 09:16:25 +02:00
|
|
|
html_link = pm_narrow_url(user_profile.realm, [r["email"] for r in disp_recipient
|
2016-12-03 00:04:17 +01:00
|
|
|
if r["email"] != user_profile.email])
|
2016-06-13 10:32:39 +02:00
|
|
|
header_html = u"<a style='color: #ffffff;' href='%s'>%s</a>" % (html_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
2016-12-23 17:12:07 +01:00
|
|
|
assert isinstance(disp_recipient, Text)
|
2016-07-14 17:48:11 +02:00
|
|
|
header = u"%s > %s" % (disp_recipient, message.topic_name())
|
2016-08-14 09:16:25 +02:00
|
|
|
stream_link = stream_narrow_url(user_profile.realm, disp_recipient)
|
|
|
|
topic_link = topic_narrow_url(user_profile.realm, disp_recipient, message.subject)
|
2016-06-13 10:32:39 +02:00
|
|
|
header_html = u"<a href='%s'>%s</a> > <a href='%s'>%s</a>" % (
|
2014-01-24 22:29:17 +01:00
|
|
|
stream_link, disp_recipient, topic_link, message.subject)
|
|
|
|
return {"plain": header,
|
|
|
|
"html": header_html,
|
|
|
|
"stream_message": message.recipient.type_name() == "stream"}
|
|
|
|
|
|
|
|
# # Collapse message list to
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# "header": {
|
|
|
|
# "plain":"header",
|
|
|
|
# "html":"htmlheader"
|
|
|
|
# }
|
|
|
|
# "senders":[
|
|
|
|
# {
|
|
|
|
# "sender":"sender_name",
|
|
|
|
# "content":[
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# },
|
|
|
|
# ]
|
|
|
|
|
|
|
|
messages.sort(key=lambda message: message.pub_date)
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
header = message_header(user_profile, message)
|
|
|
|
|
|
|
|
# If we want to collapse into the previous recipient block
|
|
|
|
if len(messages_to_render) > 0 and messages_to_render[-1]['header'] == header:
|
|
|
|
sender = sender_string(message)
|
|
|
|
sender_block = messages_to_render[-1]['senders']
|
|
|
|
|
|
|
|
# Same message sender, collapse again
|
|
|
|
if sender_block[-1]['sender'] == sender:
|
|
|
|
sender_block[-1]['content'].append(build_message_payload(message))
|
|
|
|
else:
|
|
|
|
# Start a new sender block
|
|
|
|
sender_block.append(build_sender_payload(message))
|
|
|
|
else:
|
|
|
|
# New recipient and sender block
|
|
|
|
recipient_block = {'header': header,
|
|
|
|
'senders': [build_sender_payload(message)]}
|
|
|
|
|
|
|
|
messages_to_render.append(recipient_block)
|
|
|
|
|
|
|
|
return messages_to_render
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
@statsd_increment("missed_message_reminders")
|
2014-09-05 06:33:47 +02:00
|
|
|
def do_send_missedmessage_events_reply_in_zulip(user_profile, missed_messages, message_count):
|
2016-06-03 22:59:19 +02:00
|
|
|
# type: (UserProfile, List[Message], int) -> None
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
|
|
|
Send a reminder email to a user if she's missed some PMs by being offline.
|
|
|
|
|
|
|
|
The email will have its reply to address set to a limited used email
|
|
|
|
address that will send a zulip message to the correct recipient. This
|
|
|
|
allows the user to respond to missed PMs, huddles, and @-mentions directly
|
|
|
|
from the email.
|
|
|
|
|
|
|
|
`user_profile` is the user to send the reminder to
|
|
|
|
`missed_messages` is a list of Message objects to remind about they should
|
|
|
|
all have the same recipient and subject
|
|
|
|
"""
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2015-02-06 02:40:34 +01:00
|
|
|
# Disabled missedmessage emails internally
|
2015-02-21 02:46:19 +01:00
|
|
|
if not user_profile.enable_offline_email_notifications:
|
2014-08-11 14:15:16 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
recipients = set((msg.recipient_id, msg.subject) for msg in missed_messages)
|
|
|
|
if len(recipients) != 1:
|
|
|
|
raise ValueError(
|
|
|
|
'All missed_messages must have the same recipient and subject %r' %
|
|
|
|
recipients
|
|
|
|
)
|
|
|
|
|
2016-08-14 09:08:34 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
2016-11-08 10:07:47 +01:00
|
|
|
template_payload = common_context(user_profile)
|
|
|
|
template_payload.update({
|
2014-08-11 14:15:16 +02:00
|
|
|
'name': user_profile.full_name,
|
|
|
|
'messages': build_message_list(user_profile, missed_messages),
|
2014-09-05 06:33:47 +02:00
|
|
|
'message_count': message_count,
|
2015-11-01 17:10:46 +01:00
|
|
|
'mention': missed_messages[0].recipient.type == Recipient.STREAM,
|
2016-08-14 09:08:34 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-03-08 04:46:49 +01:00
|
|
|
# If this setting (email mirroring integration) is enabled, only then
|
|
|
|
# can users reply to email to send message to Zulip. Thus, one must
|
|
|
|
# ensure to display warning in the template.
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN:
|
|
|
|
template_payload.update({
|
|
|
|
'reply_warning': False,
|
|
|
|
'reply_to_zulip': True,
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
template_payload.update({
|
|
|
|
'reply_warning': True,
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
headers = {}
|
|
|
|
from zerver.lib.email_mirror import create_missed_message_address
|
|
|
|
address = create_missed_message_address(user_profile, missed_messages[0])
|
|
|
|
headers['Reply-To'] = address
|
|
|
|
|
|
|
|
senders = set(m.sender.full_name for m in missed_messages)
|
|
|
|
sender_str = ", ".join(senders)
|
|
|
|
plural_messages = 's' if len(missed_messages) > 1 else ''
|
|
|
|
|
|
|
|
subject = "Missed Zulip%s from %s" % (plural_messages, sender_str)
|
2016-08-24 07:53:05 +02:00
|
|
|
from_email = 'Zulip <%s>' % (settings.NOREPLY_EMAIL_ADDRESS,)
|
|
|
|
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
|
|
|
|
# If this setting is enabled, you can reply to the Zulip
|
|
|
|
# missed message emails directly back to the original sender.
|
|
|
|
# However, one must ensure the Zulip server is in the SPF
|
|
|
|
# record for the domain, or there will be spam/deliverability
|
|
|
|
# problems.
|
|
|
|
headers['Sender'] = from_email
|
2016-06-21 11:23:23 +02:00
|
|
|
sender = missed_messages[0].sender
|
2016-08-12 21:07:25 +02:00
|
|
|
from_email = '"%s" <%s>' % (sender_str, sender.email)
|
2017-03-08 04:46:49 +01:00
|
|
|
template_payload.update({
|
|
|
|
'reply_warning': False,
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-30 23:33:25 +02:00
|
|
|
text_content = loader.render_to_string('zerver/emails/missed_message.txt', template_payload)
|
|
|
|
html_content = loader.render_to_string('zerver/emails/missed_message.html', template_payload)
|
2017-03-06 08:45:59 +01:00
|
|
|
email_content = {
|
|
|
|
'subject': subject,
|
|
|
|
'text_content': text_content,
|
|
|
|
'html_content': html_content,
|
|
|
|
'from_email': from_email,
|
|
|
|
'to': [user_profile.email],
|
|
|
|
'headers': headers
|
|
|
|
}
|
|
|
|
queue_json_publish("missedmessage_email_senders", email_content, send_missedmessage_email)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
user_profile.last_reminder = timezone_now()
|
2014-08-11 14:15:16 +02:00
|
|
|
user_profile.save(update_fields=['last_reminder'])
|
|
|
|
|
2017-03-06 08:45:59 +01:00
|
|
|
|
|
|
|
def send_missedmessage_email(data):
|
|
|
|
# type: (Mapping[str, Any]) -> None
|
|
|
|
msg = EmailMultiAlternatives(
|
|
|
|
data.get('subject'),
|
|
|
|
data.get('text_content'),
|
|
|
|
data.get('from_email'),
|
|
|
|
data.get('to'),
|
|
|
|
headers=data.get('headers'))
|
|
|
|
msg.attach_alternative(data.get('html_content'), "text/html")
|
|
|
|
msg.send()
|
|
|
|
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
def handle_missedmessage_emails(user_profile_id, missed_email_events):
|
2016-06-03 22:59:19 +02:00
|
|
|
# type: (int, Iterable[Dict[str, Any]]) -> None
|
2014-01-24 22:29:17 +01:00
|
|
|
message_ids = [event.get('message_id') for event in missed_email_events]
|
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
|
|
|
if not receives_offline_notifications(user_profile):
|
|
|
|
return
|
|
|
|
|
2017-03-06 08:45:59 +01:00
|
|
|
messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
|
|
|
|
id__in=message_ids,
|
|
|
|
usermessage__flags=~UserMessage.flags.read)
|
2017-03-14 08:38:01 +01:00
|
|
|
|
|
|
|
# Cancel missed-message emails for deleted messages
|
|
|
|
messages = [um for um in messages if um.content != "(deleted)"]
|
|
|
|
|
2014-07-08 02:06:51 +02:00
|
|
|
if not messages:
|
|
|
|
return
|
|
|
|
|
2016-12-21 13:17:53 +01:00
|
|
|
messages_by_recipient_subject = defaultdict(list) # type: Dict[Tuple[int, Text], List[Message]]
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg in messages:
|
2016-07-14 17:48:11 +02:00
|
|
|
messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-06-22 08:21:57 +02:00
|
|
|
message_count_by_recipient_subject = {
|
2014-09-05 06:33:47 +02:00
|
|
|
recipient_subject: len(msgs)
|
|
|
|
for recipient_subject, msgs in messages_by_recipient_subject.items()
|
|
|
|
}
|
|
|
|
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg_list in messages_by_recipient_subject.values():
|
|
|
|
msg = min(msg_list, key=lambda msg: msg.pub_date)
|
|
|
|
if msg.recipient.type == Recipient.STREAM:
|
2014-07-08 02:06:51 +02:00
|
|
|
msg_list.extend(get_context_for_message(msg))
|
|
|
|
|
|
|
|
# Send an email per recipient subject pair
|
2016-06-21 11:23:23 +02:00
|
|
|
for recipient_subject, msg_list in messages_by_recipient_subject.items():
|
|
|
|
unique_messages = {m.id: m for m in msg_list}
|
|
|
|
do_send_missedmessage_events_reply_in_zulip(
|
2014-09-05 06:33:47 +02:00
|
|
|
user_profile,
|
2016-01-25 01:27:18 +01:00
|
|
|
list(unique_messages.values()),
|
2016-06-21 11:23:23 +02:00
|
|
|
message_count_by_recipient_subject[recipient_subject],
|
2014-09-05 06:33:47 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-05-04 02:06:31 +02:00
|
|
|
def clear_followup_emails_queue(email):
|
|
|
|
# type: (Text) -> None
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
2017-05-04 02:06:31 +02:00
|
|
|
Clear out queued emails that would otherwise be sent to a specific email address.
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
2017-05-04 02:06:31 +02:00
|
|
|
items = ScheduledJob.objects.filter(type=ScheduledJob.EMAIL, filter_string__iexact = email)
|
|
|
|
items.delete()
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2014-01-29 20:40:46 +01:00
|
|
|
def log_digest_event(msg):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> None
|
2014-01-29 20:40:46 +01:00
|
|
|
import logging
|
|
|
|
logging.basicConfig(filename=settings.DIGEST_LOG_PATH, level=logging.INFO)
|
|
|
|
logging.info(msg)
|
|
|
|
|
2017-05-02 07:17:15 +02:00
|
|
|
def send_future_email(template_prefix, recipients, sender=None, context={},
|
2017-05-04 02:06:31 +02:00
|
|
|
delay=datetime.timedelta(0), tags=[]):
|
|
|
|
# type: (str, List[Dict[str, Any]], Optional[Dict[str, Text]], Dict[str, Any], datetime.timedelta, Iterable[Text]) -> None
|
2017-05-02 07:17:15 +02:00
|
|
|
subject = loader.render_to_string(template_prefix + '.subject', context).strip()
|
|
|
|
email_text = loader.render_to_string(template_prefix + '.txt', context)
|
|
|
|
email_html = loader.render_to_string(template_prefix + '.html', context)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2015-08-21 18:22:42 +02:00
|
|
|
# SMTP mail delivery implementation
|
2014-01-24 22:29:17 +01:00
|
|
|
if sender is None:
|
2017-05-04 02:06:31 +02:00
|
|
|
# This may likely overridden by settings.DEFAULT_FROM_EMAIL
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = {'email': settings.NOREPLY_EMAIL_ADDRESS, 'name': 'Zulip'}
|
2017-05-04 02:06:31 +02:00
|
|
|
for recipient in recipients:
|
|
|
|
email_fields = {'email_html': email_html,
|
|
|
|
'email_subject': subject,
|
|
|
|
'email_text': email_text,
|
|
|
|
'recipient_email': recipient.get('email'),
|
|
|
|
'recipient_name': recipient.get('name'),
|
|
|
|
'sender_email': sender['email'],
|
|
|
|
'sender_name': sender['name']}
|
|
|
|
ScheduledJob.objects.create(type=ScheduledJob.EMAIL, filter_string=recipient.get('email'),
|
|
|
|
data=ujson.dumps(email_fields),
|
|
|
|
scheduled_timestamp=timezone_now() + delay)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def enqueue_welcome_emails(email, name):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text, Text) -> None
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2016-07-19 07:37:53 +02:00
|
|
|
if settings.WELCOME_EMAIL_SENDER is not None:
|
2016-12-21 13:17:53 +01:00
|
|
|
sender = settings.WELCOME_EMAIL_SENDER # type: Dict[str, Text]
|
2016-07-19 07:37:53 +02:00
|
|
|
else:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = {'email': settings.ZULIP_ADMINISTRATOR, 'name': 'Zulip'}
|
|
|
|
|
|
|
|
user_profile = get_user_profile_by_email(email)
|
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "welcome")
|
2017-05-03 18:20:16 +02:00
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
2016-11-08 10:07:47 +01:00
|
|
|
'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS,
|
|
|
|
'unsubscribe_link': unsubscribe_link
|
|
|
|
})
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
|
|
|
"zerver/emails/followup_day1", [{'email': email, 'name': name}],
|
|
|
|
sender=sender, context=context, delay=datetime.timedelta(hours=1),
|
|
|
|
tags=["followup-emails"])
|
|
|
|
send_future_email(
|
|
|
|
"zerver/emails/followup_day2", [{'email': email, 'name': name}],
|
|
|
|
sender=sender, context=context, delay=datetime.timedelta(days=1),
|
|
|
|
tags=["followup-emails"])
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
def convert_html_to_markdown(html):
|
2016-12-21 13:17:53 +01:00
|
|
|
# type: (Text) -> Text
|
2014-01-24 22:29:17 +01:00
|
|
|
# On Linux, the tool installs as html2markdown, and there's a command called
|
|
|
|
# html2text that does something totally different. On OSX, the tool installs
|
|
|
|
# as html2text.
|
|
|
|
commands = ["html2markdown", "html2text"]
|
|
|
|
|
|
|
|
for command in commands:
|
|
|
|
try:
|
|
|
|
# A body width of 0 means do not try to wrap the text for us.
|
|
|
|
p = subprocess.Popen(
|
|
|
|
[command, "--body-width=0"], stdout=subprocess.PIPE,
|
|
|
|
stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
break
|
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
|
2016-07-13 19:34:24 +02:00
|
|
|
markdown = p.communicate(input=html.encode('utf-8'))[0].decode('utf-8').strip()
|
2014-01-24 22:29:17 +01:00
|
|
|
# We want images to get linked and inline previewed, but html2text will turn
|
|
|
|
# them into links of the form `![](http://foo.com/image.png)`, which is
|
|
|
|
# ugly. Run a regex over the resulting description, turning links of the
|
|
|
|
# form `![](http://foo.com/image.png?12345)` into
|
|
|
|
# `[image.png](http://foo.com/image.png)`.
|
2016-07-13 19:34:24 +02:00
|
|
|
return re.sub(u"!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)",
|
|
|
|
u"[\\2](\\1/\\2)", markdown)
|