2016-06-12 00:47:19 +02:00
|
|
|
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import cast, Any, Dict, Iterable, List, Mapping, Optional, Sequence, Tuple, Text
|
2016-01-25 23:42:16 +01:00
|
|
|
|
2017-07-08 04:38:13 +02:00
|
|
|
from confirmation.models import Confirmation, create_confirmation_link
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.template import loader
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-05-04 02:06:31 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2017-11-26 20:45:09 +01:00
|
|
|
from zerver.lib.send_email import send_future_email, FromAddress
|
2017-03-06 08:45:59 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2016-06-03 22:59:19 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Recipient,
|
2017-07-02 21:10:41 +02:00
|
|
|
ScheduledEmail,
|
2016-06-03 22:59:19 +02:00
|
|
|
UserMessage,
|
|
|
|
Stream,
|
|
|
|
get_display_recipient,
|
|
|
|
UserProfile,
|
2017-07-11 12:13:35 +02:00
|
|
|
get_user,
|
2016-06-03 22:59:19 +02:00
|
|
|
get_user_profile_by_id,
|
2017-11-29 02:49:11 +01:00
|
|
|
receives_offline_email_notifications,
|
2016-06-03 22:59:19 +02:00
|
|
|
get_context_for_message,
|
2016-08-14 09:16:25 +02:00
|
|
|
Message,
|
|
|
|
Realm,
|
2016-06-03 22:59:19 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
import datetime
|
2017-06-26 19:43:32 +02:00
|
|
|
from email.utils import formataddr
|
2017-10-07 17:27:16 +02:00
|
|
|
import lxml.html
|
2014-01-24 22:29:17 +01:00
|
|
|
import re
|
|
|
|
import subprocess
|
|
|
|
import ujson
|
2017-11-05 05:30:31 +01:00
|
|
|
import urllib
|
2014-07-15 21:03:51 +02:00
|
|
|
from collections import defaultdict
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def one_click_unsubscribe_link(user_profile: UserProfile, email_type: str) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Generate a unique link that a logged-out user can visit to unsubscribe from
|
|
|
|
Zulip e-mails without having to first log in.
|
|
|
|
"""
|
2017-07-08 08:43:25 +02:00
|
|
|
return create_confirmation_link(user_profile, user_profile.realm.host,
|
|
|
|
Confirmation.UNSUBSCRIBE,
|
|
|
|
url_args = {'email_type': email_type})
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def hash_util_encode(string: Text) -> Text:
|
2017-03-19 00:43:14 +01:00
|
|
|
# Do the same encoding operation as hash_util.encodeHashComponent on the
|
2014-01-24 22:29:17 +01:00
|
|
|
# frontend.
|
2014-02-12 20:44:16 +01:00
|
|
|
# `safe` has a default value of "/", but we want those encoded, too.
|
2016-01-24 03:39:44 +01:00
|
|
|
return urllib.parse.quote(
|
2016-07-04 20:49:32 +02:00
|
|
|
string.encode("utf-8"), safe=b"").replace(".", "%2E").replace("%", ".")
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-02-15 21:02:47 +01:00
|
|
|
def encode_stream(stream_id: int, stream_name: Text) -> Text:
|
|
|
|
# We encode streams for urls as something like 99-Verona.
|
|
|
|
stream_name = stream_name.replace(' ', '-')
|
|
|
|
return str(stream_id) + '-' + hash_util_encode(stream_name)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def pm_narrow_url(realm: Realm, participants: List[Text]) -> Text:
|
2014-01-24 22:29:17 +01:00
|
|
|
participants.sort()
|
2017-11-04 05:34:38 +01:00
|
|
|
base_url = "%s/#narrow/pm-with/" % (realm.uri,)
|
2017-03-19 01:00:15 +01:00
|
|
|
return base_url + hash_util_encode(",".join(participants))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def stream_narrow_url(realm: Realm, stream: Text) -> Text:
|
2017-11-04 05:34:38 +01:00
|
|
|
base_url = "%s/#narrow/stream/" % (realm.uri,)
|
2017-03-19 01:00:15 +01:00
|
|
|
return base_url + hash_util_encode(stream)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def topic_narrow_url(realm: Realm, stream: Text, topic: Text) -> Text:
|
2017-11-04 05:34:38 +01:00
|
|
|
base_url = "%s/#narrow/stream/" % (realm.uri,)
|
|
|
|
return "%s%s/topic/%s" % (base_url, hash_util_encode(stream),
|
|
|
|
hash_util_encode(topic))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def relative_to_full_url(base_url: Text, content: Text) -> Text:
|
2017-10-07 17:27:16 +02:00
|
|
|
# Convert relative URLs to absolute URLs.
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-11 22:41:19 +02:00
|
|
|
|
|
|
|
# We handle narrow URLs separately because of two reasons:
|
|
|
|
# 1: 'lxml' seems to be having an issue in dealing with URLs that begin
|
|
|
|
# `#` due to which it doesn't add a `/` before joining the base_url to
|
|
|
|
# the relative URL.
|
|
|
|
# 2: We also need to update the title attribute in the narrow links which
|
|
|
|
# is not possible with `make_links_absolute()`.
|
|
|
|
for link_info in fragment.iterlinks():
|
|
|
|
elem, attrib, link, pos = link_info
|
|
|
|
match = re.match("/?#narrow/", link)
|
|
|
|
if match is not None:
|
|
|
|
link = re.sub(r"^/?#narrow/", base_url + "/#narrow/", link)
|
|
|
|
elem.set(attrib, link)
|
|
|
|
# Only manually linked narrow URLs have title attribute set.
|
|
|
|
if elem.get('title') is not None:
|
|
|
|
elem.set('title', link)
|
|
|
|
|
2017-09-21 00:06:22 +02:00
|
|
|
# Inline images can't be displayed in the emails as the request
|
|
|
|
# from the mail server can't be authenticated because it has no
|
2017-10-13 16:59:58 +02:00
|
|
|
# user_profile object linked to it. So we scrub the inline image
|
|
|
|
# container.
|
|
|
|
inline_image_containers = fragment.find_class("message_inline_image")
|
|
|
|
for container in inline_image_containers:
|
|
|
|
container.drop_tree()
|
|
|
|
|
2018-01-24 19:23:51 +01:00
|
|
|
# The previous block handles most inline images, but for messages
|
|
|
|
# where the entire markdown input was just the URL of an image
|
|
|
|
# (i.e. the entire body is a message_inline_image object), the
|
|
|
|
# entire message body will be that image element; here, we need a
|
|
|
|
# more drastic edit to the content.
|
|
|
|
if fragment.get('class') == 'message_inline_image':
|
|
|
|
content_template = '<p><a href="%s" target="_blank" title="%s">%s</a></p>'
|
|
|
|
image_link = fragment.find('a').get('href')
|
|
|
|
image_title = fragment.find('a').get('title')
|
|
|
|
new_content = (content_template % (image_link, image_title, image_link))
|
|
|
|
fragment = lxml.html.fromstring(new_content)
|
|
|
|
|
2017-10-13 16:59:58 +02:00
|
|
|
fragment.make_links_absolute(base_url)
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode("utf-8")
|
2017-09-21 00:02:25 +02:00
|
|
|
|
|
|
|
return content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fix_emojis(content: Text, base_url: Text, emojiset: Text) -> Text:
|
|
|
|
def make_emoji_img_elem(emoji_span_elem: Any) -> Dict[str, Any]:
|
2017-10-28 00:55:16 +02:00
|
|
|
# Convert the emoji spans to img tags.
|
|
|
|
classes = emoji_span_elem.get('class')
|
|
|
|
match = re.search('emoji-(?P<emoji_code>\S+)', classes)
|
|
|
|
emoji_code = match.group('emoji_code')
|
|
|
|
emoji_name = emoji_span_elem.get('title')
|
|
|
|
alt_code = emoji_span_elem.text
|
|
|
|
image_url = base_url + '/static/generated/emoji/images-%(emojiset)s-64/%(emoji_code)s.png' % {
|
|
|
|
'emojiset': emojiset,
|
|
|
|
'emoji_code': emoji_code
|
|
|
|
}
|
2017-11-25 19:05:12 +01:00
|
|
|
img_elem = lxml.html.fromstring(
|
2017-10-28 00:55:16 +02:00
|
|
|
'<img alt="%(alt_code)s" src="%(image_url)s" title="%(title)s">' % {
|
|
|
|
'alt_code': alt_code,
|
|
|
|
'image_url': image_url,
|
|
|
|
'title': emoji_name,
|
|
|
|
})
|
|
|
|
img_elem.set('style', 'height: 20px;')
|
|
|
|
img_elem.tail = emoji_span_elem.tail
|
|
|
|
return img_elem
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-28 00:55:16 +02:00
|
|
|
for elem in fragment.cssselect('span.emoji'):
|
|
|
|
parent = elem.getparent()
|
|
|
|
img_elem = make_emoji_img_elem(elem)
|
|
|
|
parent.replace(elem, img_elem)
|
|
|
|
|
|
|
|
for realm_emoji in fragment.cssselect('.emoji'):
|
|
|
|
del realm_emoji.attrib['class']
|
|
|
|
realm_emoji.set('style', 'height: 20px;')
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode('utf-8')
|
2017-09-27 19:39:42 +02:00
|
|
|
return content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_message_list(user_profile: UserProfile, messages: List[Message]) -> List[Dict[str, Any]]:
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Builds the message list object for the missed message email template.
|
|
|
|
The messages are collapsed into per-recipient and per-sender blocks, like
|
|
|
|
our web interface
|
|
|
|
"""
|
2017-05-17 21:12:32 +02:00
|
|
|
messages_to_render = [] # type: List[Dict[str, Any]]
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def sender_string(message: Message) -> Text:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
|
2016-06-13 10:32:39 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
else:
|
|
|
|
return ''
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fix_plaintext_image_urls(content: Text) -> Text:
|
2014-01-24 22:29:17 +01:00
|
|
|
# Replace image URLs in plaintext content of the form
|
|
|
|
# [image name](image url)
|
|
|
|
# with a simple hyperlink.
|
|
|
|
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_message_payload(message: Message) -> Dict[str, Text]:
|
2014-01-24 22:29:17 +01:00
|
|
|
plain = message.content
|
|
|
|
plain = fix_plaintext_image_urls(plain)
|
2017-09-21 00:06:22 +02:00
|
|
|
# There's a small chance of colliding with non-Zulip URLs containing
|
|
|
|
# "/user_uploads/", but we don't have much information about the
|
|
|
|
# structure of the URL to leverage. We can't use `relative_to_full_url()`
|
|
|
|
# function here because it uses a stricter regex which will not work for
|
|
|
|
# plain text.
|
|
|
|
plain = re.sub(
|
|
|
|
r"/user_uploads/(\S*)",
|
|
|
|
user_profile.realm.uri + r"/user_uploads/\1", plain)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-05-26 02:08:16 +02:00
|
|
|
assert message.rendered_content is not None
|
2014-01-24 22:29:17 +01:00
|
|
|
html = message.rendered_content
|
2017-09-21 00:02:25 +02:00
|
|
|
html = relative_to_full_url(user_profile.realm.uri, html)
|
2017-10-28 00:55:16 +02:00
|
|
|
html = fix_emojis(html, user_profile.realm.uri, user_profile.emojiset)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
return {'plain': plain, 'html': html}
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_sender_payload(message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
|
|
|
return {'sender': sender,
|
|
|
|
'content': [build_message_payload(message)]}
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_header(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
disp_recipient = get_display_recipient(message.recipient)
|
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
2017-11-04 05:34:38 +01:00
|
|
|
header = "You and %s" % (message.sender.full_name,)
|
2016-08-14 09:16:25 +02:00
|
|
|
html_link = pm_narrow_url(user_profile.realm, [message.sender.email])
|
2017-11-04 05:34:38 +01:00
|
|
|
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (html_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2016-12-23 17:12:07 +01:00
|
|
|
assert not isinstance(disp_recipient, Text)
|
2014-01-24 22:29:17 +01:00
|
|
|
other_recipients = [r['full_name'] for r in disp_recipient
|
2016-11-30 14:17:35 +01:00
|
|
|
if r['email'] != user_profile.email]
|
2017-11-04 05:34:38 +01:00
|
|
|
header = "You and %s" % (", ".join(other_recipients),)
|
2016-08-14 09:16:25 +02:00
|
|
|
html_link = pm_narrow_url(user_profile.realm, [r["email"] for r in disp_recipient
|
2016-12-03 00:04:17 +01:00
|
|
|
if r["email"] != user_profile.email])
|
2017-11-04 05:34:38 +01:00
|
|
|
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (html_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
2016-12-23 17:12:07 +01:00
|
|
|
assert isinstance(disp_recipient, Text)
|
2017-11-04 05:34:38 +01:00
|
|
|
header = "%s > %s" % (disp_recipient, message.topic_name())
|
2016-08-14 09:16:25 +02:00
|
|
|
stream_link = stream_narrow_url(user_profile.realm, disp_recipient)
|
|
|
|
topic_link = topic_narrow_url(user_profile.realm, disp_recipient, message.subject)
|
2017-11-04 05:34:38 +01:00
|
|
|
header_html = "<a href='%s'>%s</a> > <a href='%s'>%s</a>" % (
|
2014-01-24 22:29:17 +01:00
|
|
|
stream_link, disp_recipient, topic_link, message.subject)
|
|
|
|
return {"plain": header,
|
|
|
|
"html": header_html,
|
|
|
|
"stream_message": message.recipient.type_name() == "stream"}
|
|
|
|
|
|
|
|
# # Collapse message list to
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# "header": {
|
|
|
|
# "plain":"header",
|
|
|
|
# "html":"htmlheader"
|
|
|
|
# }
|
|
|
|
# "senders":[
|
|
|
|
# {
|
|
|
|
# "sender":"sender_name",
|
|
|
|
# "content":[
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# },
|
|
|
|
# ]
|
|
|
|
|
|
|
|
messages.sort(key=lambda message: message.pub_date)
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
header = message_header(user_profile, message)
|
|
|
|
|
|
|
|
# If we want to collapse into the previous recipient block
|
|
|
|
if len(messages_to_render) > 0 and messages_to_render[-1]['header'] == header:
|
|
|
|
sender = sender_string(message)
|
|
|
|
sender_block = messages_to_render[-1]['senders']
|
|
|
|
|
|
|
|
# Same message sender, collapse again
|
|
|
|
if sender_block[-1]['sender'] == sender:
|
|
|
|
sender_block[-1]['content'].append(build_message_payload(message))
|
|
|
|
else:
|
|
|
|
# Start a new sender block
|
|
|
|
sender_block.append(build_sender_payload(message))
|
|
|
|
else:
|
|
|
|
# New recipient and sender block
|
|
|
|
recipient_block = {'header': header,
|
|
|
|
'senders': [build_sender_payload(message)]}
|
|
|
|
|
|
|
|
messages_to_render.append(recipient_block)
|
|
|
|
|
|
|
|
return messages_to_render
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
@statsd_increment("missed_message_reminders")
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
|
|
|
missed_messages: List[Message],
|
|
|
|
message_count: int) -> None:
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
|
|
|
Send a reminder email to a user if she's missed some PMs by being offline.
|
|
|
|
|
|
|
|
The email will have its reply to address set to a limited used email
|
|
|
|
address that will send a zulip message to the correct recipient. This
|
|
|
|
allows the user to respond to missed PMs, huddles, and @-mentions directly
|
|
|
|
from the email.
|
|
|
|
|
|
|
|
`user_profile` is the user to send the reminder to
|
|
|
|
`missed_messages` is a list of Message objects to remind about they should
|
|
|
|
all have the same recipient and subject
|
|
|
|
"""
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2015-02-06 02:40:34 +01:00
|
|
|
# Disabled missedmessage emails internally
|
2015-02-21 02:46:19 +01:00
|
|
|
if not user_profile.enable_offline_email_notifications:
|
2014-08-11 14:15:16 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
recipients = set((msg.recipient_id, msg.subject) for msg in missed_messages)
|
|
|
|
if len(recipients) != 1:
|
|
|
|
raise ValueError(
|
|
|
|
'All missed_messages must have the same recipient and subject %r' %
|
|
|
|
recipients
|
|
|
|
)
|
|
|
|
|
2016-08-14 09:08:34 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
2017-05-04 23:37:01 +02:00
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
2014-08-11 14:15:16 +02:00
|
|
|
'name': user_profile.full_name,
|
|
|
|
'messages': build_message_list(user_profile, missed_messages),
|
2014-09-05 06:33:47 +02:00
|
|
|
'message_count': message_count,
|
2017-10-28 21:53:47 +02:00
|
|
|
'mention': missed_messages[0].is_stream_message(),
|
2016-08-14 09:08:34 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2018-01-06 23:30:43 +01:00
|
|
|
'realm_name_in_notifications': user_profile.realm_name_in_notifications,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-03-08 04:46:49 +01:00
|
|
|
# If this setting (email mirroring integration) is enabled, only then
|
|
|
|
# can users reply to email to send message to Zulip. Thus, one must
|
|
|
|
# ensure to display warning in the template.
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_warning': False,
|
|
|
|
'reply_to_zulip': True,
|
|
|
|
})
|
|
|
|
else:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_warning': True,
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
from zerver.lib.email_mirror import create_missed_message_address
|
2017-06-26 19:43:32 +02:00
|
|
|
reply_to_address = create_missed_message_address(user_profile, missed_messages[0])
|
|
|
|
if reply_to_address == FromAddress.NOREPLY:
|
|
|
|
reply_to_name = None
|
|
|
|
else:
|
|
|
|
reply_to_name = "Zulip"
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-05-12 22:47:34 +02:00
|
|
|
senders = list(set(m.sender for m in missed_messages))
|
2017-05-03 09:22:58 +02:00
|
|
|
if (missed_messages[0].recipient.type == Recipient.HUDDLE):
|
|
|
|
display_recipient = get_display_recipient(missed_messages[0].recipient)
|
|
|
|
# Make sure that this is a list of strings, not a string.
|
|
|
|
assert not isinstance(display_recipient, Text)
|
|
|
|
other_recipients = [r['full_name'] for r in display_recipient
|
|
|
|
if r['id'] != user_profile.id]
|
|
|
|
context.update({'group_pm': True})
|
|
|
|
if len(other_recipients) == 2:
|
2017-11-04 05:34:38 +01:00
|
|
|
huddle_display_name = "%s" % (" and ".join(other_recipients))
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
elif len(other_recipients) == 3:
|
2017-11-04 05:34:38 +01:00
|
|
|
huddle_display_name = "%s, %s, and %s" % (
|
2017-11-10 03:34:13 +01:00
|
|
|
other_recipients[0], other_recipients[1], other_recipients[2])
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
else:
|
2017-11-04 05:34:38 +01:00
|
|
|
huddle_display_name = "%s, and %s others" % (
|
2017-11-10 03:34:13 +01:00
|
|
|
', '.join(other_recipients[:2]), len(other_recipients) - 2)
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
elif (missed_messages[0].recipient.type == Recipient.PERSONAL):
|
|
|
|
context.update({'private_message': True})
|
|
|
|
else:
|
2017-05-12 22:47:34 +02:00
|
|
|
# Keep only the senders who actually mentioned the user
|
|
|
|
#
|
|
|
|
# TODO: When we add wildcard mentions that send emails, add
|
|
|
|
# them to the filter here.
|
|
|
|
senders = list(set(m.sender for m in missed_messages if
|
|
|
|
UserMessage.objects.filter(message=m, user_profile=user_profile,
|
|
|
|
flags=UserMessage.flags.mentioned).exists()))
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'at_mention': True})
|
2017-05-12 22:47:34 +02:00
|
|
|
|
|
|
|
context.update({
|
|
|
|
'sender_str': ", ".join(sender.full_name for sender in senders),
|
|
|
|
'realm_str': user_profile.realm.name,
|
|
|
|
})
|
|
|
|
|
2017-07-25 22:55:09 +02:00
|
|
|
from_name = "Zulip missed messages" # type: Text
|
2017-07-07 23:59:12 +02:00
|
|
|
from_address = FromAddress.NOREPLY
|
2016-08-24 07:53:05 +02:00
|
|
|
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
|
|
|
|
# If this setting is enabled, you can reply to the Zulip
|
|
|
|
# missed message emails directly back to the original sender.
|
|
|
|
# However, one must ensure the Zulip server is in the SPF
|
|
|
|
# record for the domain, or there will be spam/deliverability
|
|
|
|
# problems.
|
2017-05-12 22:47:34 +02:00
|
|
|
sender = senders[0]
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name, from_address = (sender.full_name, sender.email)
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_warning': False,
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
2017-06-26 19:43:32 +02:00
|
|
|
|
2017-05-05 01:31:07 +02:00
|
|
|
email_dict = {
|
|
|
|
'template_prefix': 'zerver/emails/missed_message',
|
2017-07-11 05:22:12 +02:00
|
|
|
'to_user_id': user_profile.id,
|
2017-06-26 19:43:32 +02:00
|
|
|
'from_name': from_name,
|
|
|
|
'from_address': from_address,
|
2017-06-26 19:43:32 +02:00
|
|
|
'reply_to_email': formataddr((reply_to_name, reply_to_address)),
|
2017-05-05 01:31:07 +02:00
|
|
|
'context': context}
|
2017-12-20 06:19:38 +01:00
|
|
|
queue_json_publish("email_senders", email_dict)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
user_profile.last_reminder = timezone_now()
|
2014-08-11 14:15:16 +02:00
|
|
|
user_profile.save(update_fields=['last_reminder'])
|
|
|
|
|
2017-11-29 02:49:11 +01:00
|
|
|
def handle_missedmessage_emails(user_profile_id: int,
|
|
|
|
missed_email_events: Iterable[Dict[str, Any]]) -> None:
|
2014-01-24 22:29:17 +01:00
|
|
|
message_ids = [event.get('message_id') for event in missed_email_events]
|
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-11-29 02:49:11 +01:00
|
|
|
if not receives_offline_email_notifications(user_profile):
|
2014-01-24 22:29:17 +01:00
|
|
|
return
|
|
|
|
|
2017-03-06 08:45:59 +01:00
|
|
|
messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
|
|
|
|
id__in=message_ids,
|
|
|
|
usermessage__flags=~UserMessage.flags.read)
|
2017-03-14 08:38:01 +01:00
|
|
|
|
|
|
|
# Cancel missed-message emails for deleted messages
|
|
|
|
messages = [um for um in messages if um.content != "(deleted)"]
|
|
|
|
|
2014-07-08 02:06:51 +02:00
|
|
|
if not messages:
|
|
|
|
return
|
|
|
|
|
2017-05-17 21:12:32 +02:00
|
|
|
messages_by_recipient_subject = defaultdict(list) # type: Dict[Tuple[int, Text], List[Message]]
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg in messages:
|
2017-08-25 04:15:05 +02:00
|
|
|
if msg.recipient.type == Recipient.PERSONAL:
|
|
|
|
# For PM's group using (recipient, sender).
|
|
|
|
messages_by_recipient_subject[(msg.recipient_id, msg.sender_id)].append(msg)
|
|
|
|
else:
|
|
|
|
messages_by_recipient_subject[(msg.recipient_id, msg.topic_name())].append(msg)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2016-06-22 08:21:57 +02:00
|
|
|
message_count_by_recipient_subject = {
|
2014-09-05 06:33:47 +02:00
|
|
|
recipient_subject: len(msgs)
|
|
|
|
for recipient_subject, msgs in messages_by_recipient_subject.items()
|
|
|
|
}
|
|
|
|
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg_list in messages_by_recipient_subject.values():
|
|
|
|
msg = min(msg_list, key=lambda msg: msg.pub_date)
|
2017-10-28 21:53:47 +02:00
|
|
|
if msg.is_stream_message():
|
2014-07-08 02:06:51 +02:00
|
|
|
msg_list.extend(get_context_for_message(msg))
|
|
|
|
|
|
|
|
# Send an email per recipient subject pair
|
2016-06-21 11:23:23 +02:00
|
|
|
for recipient_subject, msg_list in messages_by_recipient_subject.items():
|
|
|
|
unique_messages = {m.id: m for m in msg_list}
|
|
|
|
do_send_missedmessage_events_reply_in_zulip(
|
2014-09-05 06:33:47 +02:00
|
|
|
user_profile,
|
2016-01-25 01:27:18 +01:00
|
|
|
list(unique_messages.values()),
|
2016-06-21 11:23:23 +02:00
|
|
|
message_count_by_recipient_subject[recipient_subject],
|
2014-09-05 06:33:47 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def clear_scheduled_invitation_emails(email: str) -> None:
|
2017-09-21 14:29:25 +02:00
|
|
|
"""Unlike most scheduled emails, invitation emails don't have an
|
|
|
|
existing user object to key off of, so we filter by address here."""
|
|
|
|
items = ScheduledEmail.objects.filter(address__iexact=email,
|
|
|
|
type=ScheduledEmail.INVITATION_REMINDER)
|
|
|
|
items.delete()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def clear_scheduled_emails(user_id: int, email_type: Optional[int]=None) -> None:
|
2017-07-12 02:26:10 +02:00
|
|
|
items = ScheduledEmail.objects.filter(user_id=user_id)
|
2017-07-12 03:09:54 +02:00
|
|
|
if email_type is not None:
|
|
|
|
items = items.filter(type=email_type)
|
2017-05-04 02:06:31 +02:00
|
|
|
items.delete()
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def log_digest_event(msg: Text) -> None:
|
2014-01-29 20:40:46 +01:00
|
|
|
import logging
|
|
|
|
logging.basicConfig(filename=settings.DIGEST_LOG_PATH, level=logging.INFO)
|
|
|
|
logging.info(msg)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def enqueue_welcome_emails(user: UserProfile) -> None:
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2016-07-19 07:37:53 +02:00
|
|
|
if settings.WELCOME_EMAIL_SENDER is not None:
|
2017-05-04 02:47:55 +02:00
|
|
|
# line break to avoid triggering lint rule
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = settings.WELCOME_EMAIL_SENDER['name']
|
|
|
|
from_address = settings.WELCOME_EMAIL_SENDER['email']
|
2016-07-19 07:37:53 +02:00
|
|
|
else:
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = None
|
2017-06-26 19:43:32 +02:00
|
|
|
from_address = FromAddress.SUPPORT
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-09-22 04:29:01 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user, "welcome")
|
|
|
|
context = common_context(user)
|
2017-05-03 18:20:16 +02:00
|
|
|
context.update({
|
2017-08-18 05:33:06 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
|
|
|
'organization_setup_advice_link':
|
2018-01-27 04:52:37 +01:00
|
|
|
user.realm.uri + '/help/getting-your-organization-started-with-zulip',
|
2017-09-22 04:29:01 +02:00
|
|
|
'is_realm_admin': user.is_realm_admin,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
2017-12-05 03:19:48 +01:00
|
|
|
"zerver/emails/followup_day1", user.realm, to_user_id=user.id, from_name=from_name,
|
2017-10-19 04:41:28 +02:00
|
|
|
from_address=from_address, context=context)
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
2017-12-05 03:19:48 +01:00
|
|
|
"zerver/emails/followup_day2", user.realm, to_user_id=user.id, from_name=from_name,
|
2017-06-26 19:43:32 +02:00
|
|
|
from_address=from_address, context=context, delay=datetime.timedelta(days=1))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def convert_html_to_markdown(html: Text) -> Text:
|
2014-01-24 22:29:17 +01:00
|
|
|
# On Linux, the tool installs as html2markdown, and there's a command called
|
|
|
|
# html2text that does something totally different. On OSX, the tool installs
|
|
|
|
# as html2text.
|
|
|
|
commands = ["html2markdown", "html2text"]
|
|
|
|
|
|
|
|
for command in commands:
|
|
|
|
try:
|
|
|
|
# A body width of 0 means do not try to wrap the text for us.
|
|
|
|
p = subprocess.Popen(
|
|
|
|
[command, "--body-width=0"], stdout=subprocess.PIPE,
|
|
|
|
stdin=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
break
|
|
|
|
except OSError:
|
|
|
|
continue
|
|
|
|
|
2016-07-13 19:34:24 +02:00
|
|
|
markdown = p.communicate(input=html.encode('utf-8'))[0].decode('utf-8').strip()
|
2014-01-24 22:29:17 +01:00
|
|
|
# We want images to get linked and inline previewed, but html2text will turn
|
|
|
|
# them into links of the form `![](http://foo.com/image.png)`, which is
|
|
|
|
# ugly. Run a regex over the resulting description, turning links of the
|
|
|
|
# form `![](http://foo.com/image.png?12345)` into
|
|
|
|
# `[image.png](http://foo.com/image.png)`.
|
2017-11-04 05:34:38 +01:00
|
|
|
return re.sub("!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)",
|
|
|
|
"[\\2](\\1/\\2)", markdown)
|