2019-06-04 14:45:42 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union
|
2016-01-25 23:42:16 +01:00
|
|
|
|
2019-02-02 23:53:55 +01:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.conf import settings
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2018-11-14 12:46:56 +01:00
|
|
|
from django.contrib.auth import get_backends
|
|
|
|
from django_auth_ldap.backend import LDAPBackend
|
2018-07-26 20:19:45 +02:00
|
|
|
|
2017-05-04 02:06:31 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2018-07-26 20:19:45 +02:00
|
|
|
from zerver.lib.message import bulk_access_messages
|
2017-03-06 08:45:59 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2018-07-26 20:19:45 +02:00
|
|
|
from zerver.lib.send_email import send_future_email, FromAddress
|
2018-11-12 15:51:37 +01:00
|
|
|
from zerver.lib.url_encoding import personal_narrow_url, huddle_narrow_url, \
|
2018-11-12 15:04:03 +01:00
|
|
|
stream_narrow_url, topic_narrow_url
|
2016-06-03 22:59:19 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Recipient,
|
|
|
|
UserMessage,
|
|
|
|
Stream,
|
|
|
|
get_display_recipient,
|
|
|
|
UserProfile,
|
|
|
|
get_user_profile_by_id,
|
2017-11-29 02:49:11 +01:00
|
|
|
receives_offline_email_notifications,
|
2016-06-03 22:59:19 +02:00
|
|
|
get_context_for_message,
|
2016-08-14 09:16:25 +02:00
|
|
|
Message,
|
2016-06-03 22:59:19 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2019-02-02 23:53:55 +01:00
|
|
|
from datetime import timedelta
|
2017-06-26 19:43:32 +02:00
|
|
|
from email.utils import formataddr
|
2019-07-24 02:49:16 +02:00
|
|
|
import html2text
|
2018-05-17 15:03:48 +02:00
|
|
|
from lxml.cssselect import CSSSelector
|
2017-10-07 17:27:16 +02:00
|
|
|
import lxml.html
|
2014-01-24 22:29:17 +01:00
|
|
|
import re
|
2014-07-15 21:03:51 +02:00
|
|
|
from collections import defaultdict
|
2017-11-02 14:11:48 +01:00
|
|
|
import pytz
|
2019-07-11 13:04:11 +02:00
|
|
|
from bs4 import BeautifulSoup
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def relative_to_full_url(base_url: str, content: str) -> str:
|
2017-10-07 17:27:16 +02:00
|
|
|
# Convert relative URLs to absolute URLs.
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-11 22:41:19 +02:00
|
|
|
|
|
|
|
# We handle narrow URLs separately because of two reasons:
|
|
|
|
# 1: 'lxml' seems to be having an issue in dealing with URLs that begin
|
|
|
|
# `#` due to which it doesn't add a `/` before joining the base_url to
|
|
|
|
# the relative URL.
|
|
|
|
# 2: We also need to update the title attribute in the narrow links which
|
|
|
|
# is not possible with `make_links_absolute()`.
|
|
|
|
for link_info in fragment.iterlinks():
|
|
|
|
elem, attrib, link, pos = link_info
|
|
|
|
match = re.match("/?#narrow/", link)
|
|
|
|
if match is not None:
|
|
|
|
link = re.sub(r"^/?#narrow/", base_url + "/#narrow/", link)
|
|
|
|
elem.set(attrib, link)
|
|
|
|
# Only manually linked narrow URLs have title attribute set.
|
|
|
|
if elem.get('title') is not None:
|
|
|
|
elem.set('title', link)
|
|
|
|
|
2017-09-21 00:06:22 +02:00
|
|
|
# Inline images can't be displayed in the emails as the request
|
|
|
|
# from the mail server can't be authenticated because it has no
|
2017-10-13 16:59:58 +02:00
|
|
|
# user_profile object linked to it. So we scrub the inline image
|
|
|
|
# container.
|
|
|
|
inline_image_containers = fragment.find_class("message_inline_image")
|
|
|
|
for container in inline_image_containers:
|
|
|
|
container.drop_tree()
|
|
|
|
|
2018-01-24 19:23:51 +01:00
|
|
|
# The previous block handles most inline images, but for messages
|
|
|
|
# where the entire markdown input was just the URL of an image
|
|
|
|
# (i.e. the entire body is a message_inline_image object), the
|
|
|
|
# entire message body will be that image element; here, we need a
|
|
|
|
# more drastic edit to the content.
|
|
|
|
if fragment.get('class') == 'message_inline_image':
|
|
|
|
content_template = '<p><a href="%s" target="_blank" title="%s">%s</a></p>'
|
|
|
|
image_link = fragment.find('a').get('href')
|
|
|
|
image_title = fragment.find('a').get('title')
|
|
|
|
new_content = (content_template % (image_link, image_title, image_link))
|
|
|
|
fragment = lxml.html.fromstring(new_content)
|
|
|
|
|
2017-10-13 16:59:58 +02:00
|
|
|
fragment.make_links_absolute(base_url)
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode("utf-8")
|
2017-09-21 00:02:25 +02:00
|
|
|
|
|
|
|
return content
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_emojis(content: str, base_url: str, emojiset: str) -> str:
|
2018-05-17 15:03:48 +02:00
|
|
|
def make_emoji_img_elem(emoji_span_elem: CSSSelector) -> Dict[str, Any]:
|
2017-10-28 00:55:16 +02:00
|
|
|
# Convert the emoji spans to img tags.
|
|
|
|
classes = emoji_span_elem.get('class')
|
2018-07-02 00:05:24 +02:00
|
|
|
match = re.search(r'emoji-(?P<emoji_code>\S+)', classes)
|
2018-05-17 15:03:48 +02:00
|
|
|
# re.search is capable of returning None,
|
|
|
|
# but since the parent function should only be called with a valid css element
|
|
|
|
# we assert that it does not.
|
|
|
|
assert match is not None
|
2017-10-28 00:55:16 +02:00
|
|
|
emoji_code = match.group('emoji_code')
|
|
|
|
emoji_name = emoji_span_elem.get('title')
|
|
|
|
alt_code = emoji_span_elem.text
|
|
|
|
image_url = base_url + '/static/generated/emoji/images-%(emojiset)s-64/%(emoji_code)s.png' % {
|
|
|
|
'emojiset': emojiset,
|
|
|
|
'emoji_code': emoji_code
|
|
|
|
}
|
2017-11-25 19:05:12 +01:00
|
|
|
img_elem = lxml.html.fromstring(
|
2017-10-28 00:55:16 +02:00
|
|
|
'<img alt="%(alt_code)s" src="%(image_url)s" title="%(title)s">' % {
|
|
|
|
'alt_code': alt_code,
|
|
|
|
'image_url': image_url,
|
|
|
|
'title': emoji_name,
|
|
|
|
})
|
|
|
|
img_elem.set('style', 'height: 20px;')
|
|
|
|
img_elem.tail = emoji_span_elem.tail
|
|
|
|
return img_elem
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-28 00:55:16 +02:00
|
|
|
for elem in fragment.cssselect('span.emoji'):
|
|
|
|
parent = elem.getparent()
|
|
|
|
img_elem = make_emoji_img_elem(elem)
|
|
|
|
parent.replace(elem, img_elem)
|
|
|
|
|
|
|
|
for realm_emoji in fragment.cssselect('.emoji'):
|
|
|
|
del realm_emoji.attrib['class']
|
|
|
|
realm_emoji.set('style', 'height: 20px;')
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode('utf-8')
|
2017-09-27 19:39:42 +02:00
|
|
|
return content
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_message_list(user_profile: UserProfile, messages: List[Message]) -> List[Dict[str, Any]]:
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Builds the message list object for the missed message email template.
|
|
|
|
The messages are collapsed into per-recipient and per-sender blocks, like
|
|
|
|
our web interface
|
|
|
|
"""
|
2017-05-17 21:12:32 +02:00
|
|
|
messages_to_render = [] # type: List[Dict[str, Any]]
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def sender_string(message: Message) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
|
2016-06-13 10:32:39 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
else:
|
|
|
|
return ''
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_plaintext_image_urls(content: str) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
# Replace image URLs in plaintext content of the form
|
|
|
|
# [image name](image url)
|
|
|
|
# with a simple hyperlink.
|
|
|
|
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
|
|
|
|
|
2019-07-11 13:04:11 +02:00
|
|
|
def append_sender_to_message(message_plain: str, message_html: str, sender: str) -> Tuple[str, str]:
|
|
|
|
message_plain = "{}: {}".format(sender, message_plain)
|
|
|
|
message_soup = BeautifulSoup(message_html, "html.parser")
|
|
|
|
sender_name_soup = BeautifulSoup("<b>{}</b>: ".format(sender), "html.parser")
|
|
|
|
first_tag = message_soup.find()
|
|
|
|
if first_tag.name == "p":
|
|
|
|
first_tag.insert(0, sender_name_soup)
|
|
|
|
else:
|
|
|
|
message_soup.insert(0, sender_name_soup)
|
|
|
|
return message_plain, str(message_soup)
|
|
|
|
|
|
|
|
def build_message_payload(message: Message, sender: Optional[str]=None) -> Dict[str, str]:
|
2014-01-24 22:29:17 +01:00
|
|
|
plain = message.content
|
|
|
|
plain = fix_plaintext_image_urls(plain)
|
2017-09-21 00:06:22 +02:00
|
|
|
# There's a small chance of colliding with non-Zulip URLs containing
|
|
|
|
# "/user_uploads/", but we don't have much information about the
|
|
|
|
# structure of the URL to leverage. We can't use `relative_to_full_url()`
|
|
|
|
# function here because it uses a stricter regex which will not work for
|
|
|
|
# plain text.
|
|
|
|
plain = re.sub(
|
|
|
|
r"/user_uploads/(\S*)",
|
|
|
|
user_profile.realm.uri + r"/user_uploads/\1", plain)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-05-26 02:08:16 +02:00
|
|
|
assert message.rendered_content is not None
|
2014-01-24 22:29:17 +01:00
|
|
|
html = message.rendered_content
|
2017-09-21 00:02:25 +02:00
|
|
|
html = relative_to_full_url(user_profile.realm.uri, html)
|
2017-10-28 00:55:16 +02:00
|
|
|
html = fix_emojis(html, user_profile.realm.uri, user_profile.emojiset)
|
2019-07-11 13:04:11 +02:00
|
|
|
if sender:
|
|
|
|
plain, html = append_sender_to_message(plain, html, sender)
|
2014-01-24 22:29:17 +01:00
|
|
|
return {'plain': plain, 'html': html}
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_sender_payload(message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
|
|
|
return {'sender': sender,
|
2019-07-11 13:04:11 +02:00
|
|
|
'content': [build_message_payload(message, sender)]}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_header(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
2019-06-04 14:45:42 +02:00
|
|
|
narrow_link = get_narrow_url(user_profile, message)
|
2017-11-04 05:34:38 +01:00
|
|
|
header = "You and %s" % (message.sender.full_name,)
|
2019-06-04 14:45:42 +02:00
|
|
|
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (narrow_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2019-06-04 14:45:42 +02:00
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert not isinstance(display_recipient, str)
|
|
|
|
narrow_link = get_narrow_url(user_profile, message,
|
|
|
|
display_recipient=display_recipient)
|
|
|
|
other_recipients = [r['full_name'] for r in display_recipient
|
2018-11-12 15:51:37 +01:00
|
|
|
if r['id'] != user_profile.id]
|
2017-11-04 05:34:38 +01:00
|
|
|
header = "You and %s" % (", ".join(other_recipients),)
|
2019-06-04 14:45:42 +02:00
|
|
|
header_html = "<a style='color: #ffffff;' href='%s'>%s</a>" % (narrow_link, header)
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
2018-02-19 16:09:23 +01:00
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
2019-06-04 14:45:42 +02:00
|
|
|
narrow_link = get_narrow_url(user_profile, message, stream=stream)
|
2018-02-19 16:09:23 +01:00
|
|
|
header = "%s > %s" % (stream.name, message.topic_name())
|
|
|
|
stream_link = stream_narrow_url(user_profile.realm, stream)
|
2017-11-04 05:34:38 +01:00
|
|
|
header_html = "<a href='%s'>%s</a> > <a href='%s'>%s</a>" % (
|
2019-06-04 14:45:42 +02:00
|
|
|
stream_link, stream.name, narrow_link, message.topic_name())
|
2014-01-24 22:29:17 +01:00
|
|
|
return {"plain": header,
|
|
|
|
"html": header_html,
|
|
|
|
"stream_message": message.recipient.type_name() == "stream"}
|
|
|
|
|
|
|
|
# # Collapse message list to
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# "header": {
|
|
|
|
# "plain":"header",
|
|
|
|
# "html":"htmlheader"
|
|
|
|
# }
|
|
|
|
# "senders":[
|
|
|
|
# {
|
|
|
|
# "sender":"sender_name",
|
|
|
|
# "content":[
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# },
|
|
|
|
# ]
|
|
|
|
|
|
|
|
messages.sort(key=lambda message: message.pub_date)
|
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
header = message_header(user_profile, message)
|
|
|
|
|
|
|
|
# If we want to collapse into the previous recipient block
|
|
|
|
if len(messages_to_render) > 0 and messages_to_render[-1]['header'] == header:
|
|
|
|
sender = sender_string(message)
|
|
|
|
sender_block = messages_to_render[-1]['senders']
|
|
|
|
|
|
|
|
# Same message sender, collapse again
|
|
|
|
if sender_block[-1]['sender'] == sender:
|
|
|
|
sender_block[-1]['content'].append(build_message_payload(message))
|
|
|
|
else:
|
|
|
|
# Start a new sender block
|
|
|
|
sender_block.append(build_sender_payload(message))
|
|
|
|
else:
|
|
|
|
# New recipient and sender block
|
|
|
|
recipient_block = {'header': header,
|
|
|
|
'senders': [build_sender_payload(message)]}
|
|
|
|
|
|
|
|
messages_to_render.append(recipient_block)
|
|
|
|
|
|
|
|
return messages_to_render
|
|
|
|
|
2019-06-04 14:45:42 +02:00
|
|
|
def get_narrow_url(user_profile: UserProfile, message: Message,
|
|
|
|
display_recipient: Optional[Union[str, List[Dict[str, Any]]]]=None,
|
|
|
|
stream: Optional[Stream]=None) -> str:
|
|
|
|
"""The display_recipient and stream arguments are optional. If not
|
|
|
|
provided, we'll compute them from the message; they exist as a
|
|
|
|
performance optimization for cases where the caller needs those
|
|
|
|
data too.
|
|
|
|
"""
|
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
|
|
|
assert stream is None
|
|
|
|
assert display_recipient is None
|
|
|
|
return personal_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=message.sender,
|
|
|
|
)
|
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
|
|
|
assert stream is None
|
|
|
|
if display_recipient is None:
|
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert display_recipient is not None
|
|
|
|
assert not isinstance(display_recipient, str)
|
|
|
|
other_user_ids = [r['id'] for r in display_recipient
|
|
|
|
if r['id'] != user_profile.id]
|
|
|
|
return huddle_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
other_user_ids=other_user_ids,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
assert display_recipient is None
|
|
|
|
if stream is None:
|
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
|
|
|
return topic_narrow_url(user_profile.realm, stream, message.topic_name())
|
|
|
|
|
2019-01-14 14:04:08 +01:00
|
|
|
def message_content_allowed_in_missedmessage_emails(user_profile: UserProfile) -> bool:
|
|
|
|
return user_profile.realm.message_content_allowed_in_email_notifications and \
|
|
|
|
user_profile.message_content_in_email_notifications
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
@statsd_increment("missed_message_reminders")
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
2018-07-14 07:31:10 +02:00
|
|
|
missed_messages: List[Dict[str, Any]],
|
2017-11-05 11:15:10 +01:00
|
|
|
message_count: int) -> None:
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
|
|
|
Send a reminder email to a user if she's missed some PMs by being offline.
|
|
|
|
|
|
|
|
The email will have its reply to address set to a limited used email
|
|
|
|
address that will send a zulip message to the correct recipient. This
|
|
|
|
allows the user to respond to missed PMs, huddles, and @-mentions directly
|
|
|
|
from the email.
|
|
|
|
|
|
|
|
`user_profile` is the user to send the reminder to
|
2018-07-14 07:31:10 +02:00
|
|
|
`missed_messages` is a list of dictionaries to Message objects and other data
|
|
|
|
for a group of messages that share a recipient (and topic)
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2015-02-06 02:40:34 +01:00
|
|
|
# Disabled missedmessage emails internally
|
2015-02-21 02:46:19 +01:00
|
|
|
if not user_profile.enable_offline_email_notifications:
|
2014-08-11 14:15:16 +02:00
|
|
|
return
|
|
|
|
|
2018-11-08 16:12:10 +01:00
|
|
|
recipients = set((msg['message'].recipient_id, msg['message'].topic_name()) for msg in missed_messages)
|
2014-08-11 14:15:16 +02:00
|
|
|
if len(recipients) != 1:
|
|
|
|
raise ValueError(
|
2018-11-08 16:12:10 +01:00
|
|
|
'All missed_messages must have the same recipient and topic %r' %
|
2019-04-20 01:00:46 +02:00
|
|
|
(recipients,)
|
2014-08-11 14:15:16 +02:00
|
|
|
)
|
|
|
|
|
2019-07-20 02:16:17 +02:00
|
|
|
# This link is no longer a part of the email, but keeping the code in case
|
|
|
|
# we find a clean way to add it back in the future
|
2016-08-14 09:08:34 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
2017-05-04 23:37:01 +02:00
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
2014-08-11 14:15:16 +02:00
|
|
|
'name': user_profile.full_name,
|
2014-09-05 06:33:47 +02:00
|
|
|
'message_count': message_count,
|
2016-08-14 09:08:34 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2018-01-06 23:30:43 +01:00
|
|
|
'realm_name_in_notifications': user_profile.realm_name_in_notifications,
|
2019-01-14 14:04:08 +01:00
|
|
|
'show_message_content': message_content_allowed_in_missedmessage_emails(user_profile)
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2018-07-14 07:46:13 +02:00
|
|
|
triggers = list(message['trigger'] for message in missed_messages)
|
|
|
|
unique_triggers = set(triggers)
|
|
|
|
context.update({
|
|
|
|
'mention': 'mentioned' in unique_triggers,
|
2019-07-08 08:02:17 +02:00
|
|
|
'stream_email_notify': 'stream_email_notify' in unique_triggers,
|
2018-07-14 07:46:13 +02:00
|
|
|
'mention_count': triggers.count('mentioned'),
|
|
|
|
})
|
|
|
|
|
2017-03-08 04:46:49 +01:00
|
|
|
# If this setting (email mirroring integration) is enabled, only then
|
|
|
|
# can users reply to email to send message to Zulip. Thus, one must
|
|
|
|
# ensure to display warning in the template.
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': True,
|
|
|
|
})
|
|
|
|
else:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
from zerver.lib.email_mirror import create_missed_message_address
|
2018-07-14 07:31:10 +02:00
|
|
|
reply_to_address = create_missed_message_address(user_profile, missed_messages[0]['message'])
|
2017-06-26 19:43:32 +02:00
|
|
|
if reply_to_address == FromAddress.NOREPLY:
|
|
|
|
reply_to_name = None
|
|
|
|
else:
|
|
|
|
reply_to_name = "Zulip"
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2019-06-18 14:15:23 +02:00
|
|
|
narrow_url = get_narrow_url(user_profile, missed_messages[0]['message'])
|
|
|
|
context.update({
|
|
|
|
'narrow_url': narrow_url,
|
|
|
|
})
|
|
|
|
|
2018-07-14 07:31:10 +02:00
|
|
|
senders = list(set(m['message'].sender for m in missed_messages))
|
|
|
|
if (missed_messages[0]['message'].recipient.type == Recipient.HUDDLE):
|
|
|
|
display_recipient = get_display_recipient(missed_messages[0]['message'].recipient)
|
2017-05-03 09:22:58 +02:00
|
|
|
# Make sure that this is a list of strings, not a string.
|
2018-05-11 01:40:23 +02:00
|
|
|
assert not isinstance(display_recipient, str)
|
2017-05-03 09:22:58 +02:00
|
|
|
other_recipients = [r['full_name'] for r in display_recipient
|
|
|
|
if r['id'] != user_profile.id]
|
|
|
|
context.update({'group_pm': True})
|
|
|
|
if len(other_recipients) == 2:
|
2019-04-20 01:00:46 +02:00
|
|
|
huddle_display_name = " and ".join(other_recipients)
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
elif len(other_recipients) == 3:
|
2017-11-04 05:34:38 +01:00
|
|
|
huddle_display_name = "%s, %s, and %s" % (
|
2017-11-10 03:34:13 +01:00
|
|
|
other_recipients[0], other_recipients[1], other_recipients[2])
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
else:
|
2017-11-04 05:34:38 +01:00
|
|
|
huddle_display_name = "%s, and %s others" % (
|
2017-11-10 03:34:13 +01:00
|
|
|
', '.join(other_recipients[:2]), len(other_recipients) - 2)
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
2018-07-14 07:31:10 +02:00
|
|
|
elif (missed_messages[0]['message'].recipient.type == Recipient.PERSONAL):
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'private_message': True})
|
2019-07-08 08:02:17 +02:00
|
|
|
elif (context['mention'] or context['stream_email_notify']):
|
2017-05-12 22:47:34 +02:00
|
|
|
# Keep only the senders who actually mentioned the user
|
2019-07-08 08:02:17 +02:00
|
|
|
if context['mention']:
|
|
|
|
senders = list(set(m['message'].sender for m in missed_messages
|
2018-07-14 07:55:27 +02:00
|
|
|
if m['trigger'] == 'mentioned'))
|
2019-07-08 08:02:17 +02:00
|
|
|
# TODO: When we add wildcard mentions that send emails, we
|
|
|
|
# should make sure the right logic applies here.
|
|
|
|
|
2019-06-11 12:39:42 +02:00
|
|
|
message = missed_messages[0]['message']
|
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
|
|
|
stream_header = "%s > %s" % (stream.name, message.topic_name())
|
|
|
|
context.update({
|
|
|
|
'stream_header': stream_header,
|
|
|
|
})
|
2018-07-14 07:55:27 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid messages!")
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2018-03-08 08:27:29 +01:00
|
|
|
# If message content is disabled, then flush all information we pass to email.
|
2019-01-14 14:04:08 +01:00
|
|
|
if not message_content_allowed_in_missedmessage_emails(user_profile):
|
2018-03-08 08:27:29 +01:00
|
|
|
context.update({
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
'messages': [],
|
|
|
|
'sender_str': "",
|
|
|
|
'realm_str': user_profile.realm.name,
|
|
|
|
'huddle_display_name': "",
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
context.update({
|
2018-07-14 07:31:10 +02:00
|
|
|
'messages': build_message_list(user_profile, list(m['message'] for m in missed_messages)),
|
2018-03-08 08:27:29 +01:00
|
|
|
'sender_str': ", ".join(sender.full_name for sender in senders),
|
|
|
|
'realm_str': user_profile.realm.name,
|
|
|
|
})
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
from_name = "Zulip missed messages" # type: str
|
2017-07-07 23:59:12 +02:00
|
|
|
from_address = FromAddress.NOREPLY
|
2016-08-24 07:53:05 +02:00
|
|
|
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
|
|
|
|
# If this setting is enabled, you can reply to the Zulip
|
|
|
|
# missed message emails directly back to the original sender.
|
|
|
|
# However, one must ensure the Zulip server is in the SPF
|
|
|
|
# record for the domain, or there will be spam/deliverability
|
|
|
|
# problems.
|
2017-05-12 22:47:34 +02:00
|
|
|
sender = senders[0]
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name, from_address = (sender.full_name, sender.email)
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
2017-06-26 19:43:32 +02:00
|
|
|
|
2017-05-05 01:31:07 +02:00
|
|
|
email_dict = {
|
|
|
|
'template_prefix': 'zerver/emails/missed_message',
|
2018-12-03 23:26:51 +01:00
|
|
|
'to_user_ids': [user_profile.id],
|
2017-06-26 19:43:32 +02:00
|
|
|
'from_name': from_name,
|
|
|
|
'from_address': from_address,
|
2017-06-26 19:43:32 +02:00
|
|
|
'reply_to_email': formataddr((reply_to_name, reply_to_address)),
|
2017-05-05 01:31:07 +02:00
|
|
|
'context': context}
|
2017-12-20 06:19:38 +01:00
|
|
|
queue_json_publish("email_senders", email_dict)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
user_profile.last_reminder = timezone_now()
|
2014-08-11 14:15:16 +02:00
|
|
|
user_profile.save(update_fields=['last_reminder'])
|
|
|
|
|
2017-11-29 02:49:11 +01:00
|
|
|
def handle_missedmessage_emails(user_profile_id: int,
|
|
|
|
missed_email_events: Iterable[Dict[str, Any]]) -> None:
|
2018-07-14 07:46:13 +02:00
|
|
|
message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-11-29 02:49:11 +01:00
|
|
|
if not receives_offline_email_notifications(user_profile):
|
2014-01-24 22:29:17 +01:00
|
|
|
return
|
|
|
|
|
2018-12-05 19:36:58 +01:00
|
|
|
# Note: This query structure automatically filters out any
|
|
|
|
# messages that were permanently deleted, since those would now be
|
|
|
|
# in the ArchivedMessage table, not the Message table.
|
2017-03-06 08:45:59 +01:00
|
|
|
messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
|
|
|
|
id__in=message_ids,
|
|
|
|
usermessage__flags=~UserMessage.flags.read)
|
2017-03-14 08:38:01 +01:00
|
|
|
|
|
|
|
# Cancel missed-message emails for deleted messages
|
|
|
|
messages = [um for um in messages if um.content != "(deleted)"]
|
|
|
|
|
2014-07-08 02:06:51 +02:00
|
|
|
if not messages:
|
|
|
|
return
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
# We bucket messages by tuples that identify similar messages.
|
|
|
|
# For streams it's recipient_id and topic.
|
|
|
|
# For PMs it's recipient id and sender.
|
|
|
|
messages_by_bucket = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]]
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg in messages:
|
2017-08-25 04:15:05 +02:00
|
|
|
if msg.recipient.type == Recipient.PERSONAL:
|
|
|
|
# For PM's group using (recipient, sender).
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
|
2017-08-25 04:15:05 +02:00
|
|
|
else:
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket = {
|
|
|
|
bucket_tup: len(msgs)
|
|
|
|
for bucket_tup, msgs in messages_by_bucket.items()
|
2014-09-05 06:33:47 +02:00
|
|
|
}
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
for msg_list in messages_by_bucket.values():
|
2014-07-15 21:03:51 +02:00
|
|
|
msg = min(msg_list, key=lambda msg: msg.pub_date)
|
2017-10-28 21:53:47 +02:00
|
|
|
if msg.is_stream_message():
|
2018-07-26 20:19:45 +02:00
|
|
|
context_messages = get_context_for_message(msg)
|
|
|
|
filtered_context_messages = bulk_access_messages(user_profile, context_messages)
|
|
|
|
msg_list.extend(filtered_context_messages)
|
2014-07-08 02:06:51 +02:00
|
|
|
|
2018-05-19 03:36:05 +02:00
|
|
|
# Sort emails by least recently-active discussion.
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups = [] # type: List[Tuple[Tuple[int, str], int]]
|
|
|
|
for bucket_tup, msg_list in messages_by_bucket.items():
|
2018-05-19 03:36:05 +02:00
|
|
|
max_message_id = max(msg_list, key=lambda msg: msg.id).id
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups.append((bucket_tup, max_message_id))
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups = sorted(bucket_tups, key=lambda x: x[1])
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 16:12:10 +01:00
|
|
|
# Send an email per bucket.
|
2018-11-08 15:52:17 +01:00
|
|
|
for bucket_tup, ignored_max_id in bucket_tups:
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages = {}
|
2018-11-08 15:52:17 +01:00
|
|
|
for m in messages_by_bucket[bucket_tup]:
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages[m.id] = dict(
|
|
|
|
message=m,
|
2018-07-14 07:46:13 +02:00
|
|
|
trigger=message_ids.get(m.id)
|
2018-07-14 07:31:10 +02:00
|
|
|
)
|
2016-06-21 11:23:23 +02:00
|
|
|
do_send_missedmessage_events_reply_in_zulip(
|
2014-09-05 06:33:47 +02:00
|
|
|
user_profile,
|
2016-01-25 01:27:18 +01:00
|
|
|
list(unique_messages.values()),
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket[bucket_tup],
|
2014-09-05 06:33:47 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def log_digest_event(msg: str) -> None:
|
2014-01-29 20:40:46 +01:00
|
|
|
import logging
|
2018-08-12 01:56:58 +02:00
|
|
|
import time
|
|
|
|
logging.Formatter.converter = time.gmtime
|
2014-01-29 20:40:46 +01:00
|
|
|
logging.basicConfig(filename=settings.DIGEST_LOG_PATH, level=logging.INFO)
|
|
|
|
logging.info(msg)
|
|
|
|
|
2017-11-02 14:11:48 +01:00
|
|
|
def followup_day2_email_delay(user: UserProfile) -> timedelta:
|
|
|
|
days_to_delay = 2
|
|
|
|
user_tz = user.timezone
|
|
|
|
if user_tz == '':
|
|
|
|
user_tz = 'UTC'
|
|
|
|
signup_day = user.date_joined.astimezone(pytz.timezone(user_tz)).isoweekday()
|
|
|
|
if signup_day == 5:
|
|
|
|
# If the day is Friday then delay should be till Monday
|
|
|
|
days_to_delay = 3
|
|
|
|
elif signup_day == 4:
|
|
|
|
# If the day is Thursday then delay should be till Friday
|
|
|
|
days_to_delay = 1
|
|
|
|
|
|
|
|
# The delay should be 1 hour before the above calculated delay as
|
|
|
|
# our goal is to maximize the chance that this email is near the top
|
|
|
|
# of the user's inbox when the user sits down to deal with their inbox,
|
|
|
|
# or comes in while they are dealing with their inbox.
|
|
|
|
return timedelta(days=days_to_delay, hours=-1)
|
|
|
|
|
2018-06-29 09:31:00 +02:00
|
|
|
def enqueue_welcome_emails(user: UserProfile, realm_creation: bool=False) -> None:
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2016-07-19 07:37:53 +02:00
|
|
|
if settings.WELCOME_EMAIL_SENDER is not None:
|
2017-05-04 02:47:55 +02:00
|
|
|
# line break to avoid triggering lint rule
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = settings.WELCOME_EMAIL_SENDER['name']
|
|
|
|
from_address = settings.WELCOME_EMAIL_SENDER['email']
|
2016-07-19 07:37:53 +02:00
|
|
|
else:
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = None
|
2017-06-26 19:43:32 +02:00
|
|
|
from_address = FromAddress.SUPPORT
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-11-14 12:58:35 +01:00
|
|
|
other_account_count = UserProfile.objects.filter(
|
2018-12-07 00:05:57 +01:00
|
|
|
delivery_email__iexact=user.delivery_email).exclude(id=user.id).count()
|
2017-09-22 04:29:01 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user, "welcome")
|
|
|
|
context = common_context(user)
|
2017-05-03 18:20:16 +02:00
|
|
|
context.update({
|
2017-08-18 05:33:06 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2018-04-24 23:02:01 +02:00
|
|
|
'keyboard_shortcuts_link': user.realm.uri + '/help/keyboard-shortcuts',
|
2018-11-14 12:46:56 +01:00
|
|
|
'realm_name': user.realm.name,
|
|
|
|
'realm_creation': realm_creation,
|
|
|
|
'email': user.email,
|
|
|
|
'is_realm_admin': user.is_realm_admin,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2018-05-26 12:15:47 +02:00
|
|
|
if user.is_realm_admin:
|
|
|
|
context['getting_started_link'] = (user.realm.uri +
|
|
|
|
'/help/getting-your-organization-started-with-zulip')
|
|
|
|
else:
|
2018-11-14 12:46:56 +01:00
|
|
|
context['getting_started_link'] = "https://zulipchat.com"
|
|
|
|
|
2019-02-02 23:53:55 +01:00
|
|
|
from zproject.backends import email_belongs_to_ldap
|
2018-11-29 16:32:17 +01:00
|
|
|
|
|
|
|
if email_belongs_to_ldap(user.realm, user.email):
|
|
|
|
context["ldap"] = True
|
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
|
|
|
for backend in get_backends():
|
|
|
|
if isinstance(backend, LDAPBackend):
|
|
|
|
context["ldap_username"] = backend.django_to_ldap_username(user.email)
|
|
|
|
elif not settings.LDAP_EMAIL_ATTR:
|
|
|
|
context["ldap_username"] = user.email
|
2018-05-26 12:15:47 +02:00
|
|
|
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
2018-12-03 23:26:51 +01:00
|
|
|
"zerver/emails/followup_day1", user.realm, to_user_ids=[user.id], from_name=from_name,
|
2017-10-19 04:41:28 +02:00
|
|
|
from_address=from_address, context=context)
|
2018-11-14 12:58:35 +01:00
|
|
|
|
|
|
|
if other_account_count == 0:
|
|
|
|
send_future_email(
|
2018-12-03 23:26:51 +01:00
|
|
|
"zerver/emails/followup_day2", user.realm, to_user_ids=[user.id], from_name=from_name,
|
2018-11-14 12:58:35 +01:00
|
|
|
from_address=from_address, context=context, delay=followup_day2_email_delay(user))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def convert_html_to_markdown(html: str) -> str:
|
2019-07-24 02:49:16 +02:00
|
|
|
parser = html2text.HTML2Text()
|
|
|
|
markdown = parser.handle(html).strip()
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
# We want images to get linked and inline previewed, but html2text will turn
|
|
|
|
# them into links of the form `![](http://foo.com/image.png)`, which is
|
|
|
|
# ugly. Run a regex over the resulting description, turning links of the
|
|
|
|
# form `![](http://foo.com/image.png?12345)` into
|
|
|
|
# `[image.png](http://foo.com/image.png)`.
|
2017-11-04 05:34:38 +01:00
|
|
|
return re.sub("!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)",
|
|
|
|
"[\\2](\\1/\\2)", markdown)
|