2020-06-11 00:54:34 +02:00
|
|
|
import re
|
|
|
|
from collections import defaultdict
|
|
|
|
from datetime import timedelta
|
2020-06-05 23:26:35 +02:00
|
|
|
from email.headerregistry import Address
|
2019-08-18 00:40:35 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
2016-01-25 23:42:16 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import html2text
|
|
|
|
import lxml.html
|
|
|
|
import pytz
|
|
|
|
from bs4 import BeautifulSoup
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth import get_backends
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-02-12 19:52:39 +01:00
|
|
|
from django.utils.translation import override as override_language
|
|
|
|
from django.utils.translation import ugettext as _
|
2020-06-11 00:54:34 +02:00
|
|
|
from lxml.cssselect import CSSSelector
|
2018-07-26 20:19:45 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2017-05-04 02:06:31 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2020-07-15 01:21:28 +02:00
|
|
|
from zerver.lib.markdown.fenced_code import FENCE_RE
|
2018-07-26 20:19:45 +02:00
|
|
|
from zerver.lib.message import bulk_access_messages
|
2017-03-06 08:45:59 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.send_email import FromAddress, send_future_email
|
|
|
|
from zerver.lib.types import DisplayRecipientT
|
|
|
|
from zerver.lib.url_encoding import (
|
|
|
|
huddle_narrow_url,
|
|
|
|
personal_narrow_url,
|
|
|
|
stream_narrow_url,
|
|
|
|
topic_narrow_url,
|
|
|
|
)
|
2016-06-03 22:59:19 +02:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2016-06-03 22:59:19 +02:00
|
|
|
Recipient,
|
|
|
|
Stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserMessage,
|
2016-06-03 22:59:19 +02:00
|
|
|
UserProfile,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_context_for_message,
|
|
|
|
get_display_recipient,
|
2016-06-03 22:59:19 +02:00
|
|
|
get_user_profile_by_id,
|
2017-11-29 02:49:11 +01:00
|
|
|
receives_offline_email_notifications,
|
2016-06-03 22:59:19 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def relative_to_full_url(base_url: str, content: str) -> str:
|
2017-10-07 17:27:16 +02:00
|
|
|
# Convert relative URLs to absolute URLs.
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-11 22:41:19 +02:00
|
|
|
|
|
|
|
# We handle narrow URLs separately because of two reasons:
|
|
|
|
# 1: 'lxml' seems to be having an issue in dealing with URLs that begin
|
|
|
|
# `#` due to which it doesn't add a `/` before joining the base_url to
|
|
|
|
# the relative URL.
|
|
|
|
# 2: We also need to update the title attribute in the narrow links which
|
|
|
|
# is not possible with `make_links_absolute()`.
|
|
|
|
for link_info in fragment.iterlinks():
|
|
|
|
elem, attrib, link, pos = link_info
|
|
|
|
match = re.match("/?#narrow/", link)
|
|
|
|
if match is not None:
|
|
|
|
link = re.sub(r"^/?#narrow/", base_url + "/#narrow/", link)
|
|
|
|
elem.set(attrib, link)
|
|
|
|
# Only manually linked narrow URLs have title attribute set.
|
|
|
|
if elem.get('title') is not None:
|
|
|
|
elem.set('title', link)
|
|
|
|
|
2017-09-21 00:06:22 +02:00
|
|
|
# Inline images can't be displayed in the emails as the request
|
|
|
|
# from the mail server can't be authenticated because it has no
|
2017-10-13 16:59:58 +02:00
|
|
|
# user_profile object linked to it. So we scrub the inline image
|
|
|
|
# container.
|
|
|
|
inline_image_containers = fragment.find_class("message_inline_image")
|
|
|
|
for container in inline_image_containers:
|
|
|
|
container.drop_tree()
|
|
|
|
|
2018-01-24 19:23:51 +01:00
|
|
|
# The previous block handles most inline images, but for messages
|
2020-08-11 01:47:49 +02:00
|
|
|
# where the entire Markdown input was just the URL of an image
|
2018-01-24 19:23:51 +01:00
|
|
|
# (i.e. the entire body is a message_inline_image object), the
|
|
|
|
# entire message body will be that image element; here, we need a
|
|
|
|
# more drastic edit to the content.
|
|
|
|
if fragment.get('class') == 'message_inline_image':
|
|
|
|
content_template = '<p><a href="%s" target="_blank" title="%s">%s</a></p>'
|
|
|
|
image_link = fragment.find('a').get('href')
|
|
|
|
image_title = fragment.find('a').get('title')
|
|
|
|
new_content = (content_template % (image_link, image_title, image_link))
|
|
|
|
fragment = lxml.html.fromstring(new_content)
|
|
|
|
|
2017-10-13 16:59:58 +02:00
|
|
|
fragment.make_links_absolute(base_url)
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode("utf-8")
|
2017-09-21 00:02:25 +02:00
|
|
|
|
|
|
|
return content
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_emojis(content: str, base_url: str, emojiset: str) -> str:
|
2018-05-17 15:03:48 +02:00
|
|
|
def make_emoji_img_elem(emoji_span_elem: CSSSelector) -> Dict[str, Any]:
|
2017-10-28 00:55:16 +02:00
|
|
|
# Convert the emoji spans to img tags.
|
|
|
|
classes = emoji_span_elem.get('class')
|
2018-07-02 00:05:24 +02:00
|
|
|
match = re.search(r'emoji-(?P<emoji_code>\S+)', classes)
|
2018-05-17 15:03:48 +02:00
|
|
|
# re.search is capable of returning None,
|
|
|
|
# but since the parent function should only be called with a valid css element
|
|
|
|
# we assert that it does not.
|
|
|
|
assert match is not None
|
2017-10-28 00:55:16 +02:00
|
|
|
emoji_code = match.group('emoji_code')
|
|
|
|
emoji_name = emoji_span_elem.get('title')
|
|
|
|
alt_code = emoji_span_elem.text
|
2020-06-10 06:41:04 +02:00
|
|
|
image_url = base_url + f'/static/generated/emoji/images-{emojiset}-64/{emoji_code}.png'
|
2017-11-25 19:05:12 +01:00
|
|
|
img_elem = lxml.html.fromstring(
|
2020-06-10 06:41:04 +02:00
|
|
|
f'<img alt="{alt_code}" src="{image_url}" title="{emoji_name}">')
|
2017-10-28 00:55:16 +02:00
|
|
|
img_elem.set('style', 'height: 20px;')
|
|
|
|
img_elem.tail = emoji_span_elem.tail
|
|
|
|
return img_elem
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-28 00:55:16 +02:00
|
|
|
for elem in fragment.cssselect('span.emoji'):
|
|
|
|
parent = elem.getparent()
|
|
|
|
img_elem = make_emoji_img_elem(elem)
|
|
|
|
parent.replace(elem, img_elem)
|
|
|
|
|
|
|
|
for realm_emoji in fragment.cssselect('.emoji'):
|
|
|
|
del realm_emoji.attrib['class']
|
|
|
|
realm_emoji.set('style', 'height: 20px;')
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
content = lxml.html.tostring(fragment).decode('utf-8')
|
2017-09-27 19:39:42 +02:00
|
|
|
return content
|
|
|
|
|
2020-07-15 01:21:28 +02:00
|
|
|
def fix_spoilers_in_html(content: str, language: str) -> str:
|
|
|
|
with override_language(language):
|
|
|
|
spoiler_title: str = _("Open Zulip to see the spoiler content")
|
|
|
|
fragment = lxml.html.fromstring(content)
|
|
|
|
spoilers = fragment.find_class("spoiler-block")
|
|
|
|
for spoiler in spoilers:
|
|
|
|
header = spoiler.find_class("spoiler-header")[0]
|
|
|
|
spoiler_content = spoiler.find_class("spoiler-content")[0]
|
|
|
|
header_content = header.find("p")
|
|
|
|
if header_content is None:
|
|
|
|
# Create a new element to append the spoiler to)
|
|
|
|
header_content = lxml.html.fromstring("<p></p>")
|
|
|
|
header.append(header_content)
|
|
|
|
else:
|
|
|
|
# Add a space. Its simpler to append a new span element than
|
|
|
|
# inserting text after the last node ends since neither .text
|
|
|
|
# and .tail do the right thing for us.
|
|
|
|
header_content.append(lxml.html.fromstring("<span> </span>"))
|
|
|
|
span_elem = lxml.html.fromstring(
|
|
|
|
f'<span class="spoiler-title" title="{spoiler_title}">({spoiler_title})</span')
|
|
|
|
header_content.append(span_elem)
|
|
|
|
header.drop_tag()
|
|
|
|
spoiler_content.drop_tree()
|
|
|
|
content = lxml.html.tostring(fragment).decode("utf-8")
|
|
|
|
return content
|
|
|
|
|
|
|
|
def fix_spoilers_in_text(content: str, language: str) -> str:
|
|
|
|
with override_language(language):
|
|
|
|
spoiler_title: str = _("Open Zulip to see the spoiler content")
|
|
|
|
lines = content.split('\n')
|
|
|
|
output = []
|
|
|
|
open_fence = None
|
|
|
|
for line in lines:
|
|
|
|
m = FENCE_RE.match(line)
|
|
|
|
if m:
|
|
|
|
fence = m.group('fence')
|
|
|
|
lang = m.group('lang')
|
|
|
|
if lang == 'spoiler':
|
|
|
|
open_fence = fence
|
|
|
|
output.append(line)
|
|
|
|
output.append(f"({spoiler_title})")
|
|
|
|
elif fence == open_fence:
|
|
|
|
open_fence = None
|
|
|
|
output.append(line)
|
|
|
|
elif not open_fence:
|
|
|
|
output.append(line)
|
|
|
|
return '\n'.join(output)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_message_list(user_profile: UserProfile, messages: List[Message]) -> List[Dict[str, Any]]:
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
|
|
|
Builds the message list object for the missed message email template.
|
|
|
|
The messages are collapsed into per-recipient and per-sender blocks, like
|
|
|
|
our web interface
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
messages_to_render: List[Dict[str, Any]] = []
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def sender_string(message: Message) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
|
2016-06-13 10:32:39 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
else:
|
|
|
|
return ''
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_plaintext_image_urls(content: str) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
# Replace image URLs in plaintext content of the form
|
|
|
|
# [image name](image url)
|
|
|
|
# with a simple hyperlink.
|
|
|
|
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
|
|
|
|
|
2019-07-11 13:04:11 +02:00
|
|
|
def append_sender_to_message(message_plain: str, message_html: str, sender: str) -> Tuple[str, str]:
|
2020-06-09 00:25:09 +02:00
|
|
|
message_plain = f"{sender}: {message_plain}"
|
2019-07-11 13:04:11 +02:00
|
|
|
message_soup = BeautifulSoup(message_html, "html.parser")
|
2020-06-09 00:25:09 +02:00
|
|
|
sender_name_soup = BeautifulSoup(f"<b>{sender}</b>: ", "html.parser")
|
2019-07-11 13:04:11 +02:00
|
|
|
first_tag = message_soup.find()
|
|
|
|
if first_tag.name == "p":
|
|
|
|
first_tag.insert(0, sender_name_soup)
|
|
|
|
else:
|
|
|
|
message_soup.insert(0, sender_name_soup)
|
|
|
|
return message_plain, str(message_soup)
|
|
|
|
|
|
|
|
def build_message_payload(message: Message, sender: Optional[str]=None) -> Dict[str, str]:
|
2014-01-24 22:29:17 +01:00
|
|
|
plain = message.content
|
|
|
|
plain = fix_plaintext_image_urls(plain)
|
2017-09-21 00:06:22 +02:00
|
|
|
# There's a small chance of colliding with non-Zulip URLs containing
|
|
|
|
# "/user_uploads/", but we don't have much information about the
|
|
|
|
# structure of the URL to leverage. We can't use `relative_to_full_url()`
|
|
|
|
# function here because it uses a stricter regex which will not work for
|
|
|
|
# plain text.
|
|
|
|
plain = re.sub(
|
|
|
|
r"/user_uploads/(\S*)",
|
|
|
|
user_profile.realm.uri + r"/user_uploads/\1", plain)
|
2020-07-15 01:21:28 +02:00
|
|
|
plain = fix_spoilers_in_text(plain, user_profile.default_language)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-05-26 02:08:16 +02:00
|
|
|
assert message.rendered_content is not None
|
2014-01-24 22:29:17 +01:00
|
|
|
html = message.rendered_content
|
2017-09-21 00:02:25 +02:00
|
|
|
html = relative_to_full_url(user_profile.realm.uri, html)
|
2017-10-28 00:55:16 +02:00
|
|
|
html = fix_emojis(html, user_profile.realm.uri, user_profile.emojiset)
|
2020-07-15 01:21:28 +02:00
|
|
|
html = fix_spoilers_in_html(html, user_profile.default_language)
|
2019-07-11 13:04:11 +02:00
|
|
|
if sender:
|
|
|
|
plain, html = append_sender_to_message(plain, html, sender)
|
2014-01-24 22:29:17 +01:00
|
|
|
return {'plain': plain, 'html': html}
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_sender_payload(message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
|
|
|
return {'sender': sender,
|
2019-07-11 13:04:11 +02:00
|
|
|
'content': [build_message_payload(message, sender)]}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def message_header(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
2019-06-04 14:45:42 +02:00
|
|
|
narrow_link = get_narrow_url(user_profile, message)
|
2020-06-10 06:41:04 +02:00
|
|
|
header = f"You and {message.sender.full_name}"
|
|
|
|
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
2014-01-24 22:29:17 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2019-06-04 14:45:42 +02:00
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert not isinstance(display_recipient, str)
|
|
|
|
narrow_link = get_narrow_url(user_profile, message,
|
|
|
|
display_recipient=display_recipient)
|
|
|
|
other_recipients = [r['full_name'] for r in display_recipient
|
2018-11-12 15:51:37 +01:00
|
|
|
if r['id'] != user_profile.id]
|
2020-06-10 06:41:04 +02:00
|
|
|
header = "You and {}".format(", ".join(other_recipients))
|
|
|
|
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
2018-02-19 16:09:23 +01:00
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
2019-06-04 14:45:42 +02:00
|
|
|
narrow_link = get_narrow_url(user_profile, message, stream=stream)
|
2020-06-10 06:41:04 +02:00
|
|
|
header = f"{stream.name} > {message.topic_name()}"
|
2018-02-19 16:09:23 +01:00
|
|
|
stream_link = stream_narrow_url(user_profile.realm, stream)
|
2020-06-10 06:41:04 +02:00
|
|
|
header_html = f"<a href='{stream_link}'>{stream.name}</a> > <a href='{narrow_link}'>{message.topic_name()}</a>"
|
2014-01-24 22:29:17 +01:00
|
|
|
return {"plain": header,
|
|
|
|
"html": header_html,
|
|
|
|
"stream_message": message.recipient.type_name() == "stream"}
|
|
|
|
|
|
|
|
# # Collapse message list to
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# "header": {
|
|
|
|
# "plain":"header",
|
|
|
|
# "html":"htmlheader"
|
|
|
|
# }
|
|
|
|
# "senders":[
|
|
|
|
# {
|
|
|
|
# "sender":"sender_name",
|
|
|
|
# "content":[
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# },
|
|
|
|
# ]
|
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
messages.sort(key=lambda message: message.date_sent)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
for message in messages:
|
|
|
|
header = message_header(user_profile, message)
|
|
|
|
|
|
|
|
# If we want to collapse into the previous recipient block
|
|
|
|
if len(messages_to_render) > 0 and messages_to_render[-1]['header'] == header:
|
|
|
|
sender = sender_string(message)
|
|
|
|
sender_block = messages_to_render[-1]['senders']
|
|
|
|
|
|
|
|
# Same message sender, collapse again
|
|
|
|
if sender_block[-1]['sender'] == sender:
|
|
|
|
sender_block[-1]['content'].append(build_message_payload(message))
|
|
|
|
else:
|
|
|
|
# Start a new sender block
|
|
|
|
sender_block.append(build_sender_payload(message))
|
|
|
|
else:
|
|
|
|
# New recipient and sender block
|
|
|
|
recipient_block = {'header': header,
|
|
|
|
'senders': [build_sender_payload(message)]}
|
|
|
|
|
|
|
|
messages_to_render.append(recipient_block)
|
|
|
|
|
|
|
|
return messages_to_render
|
|
|
|
|
2019-06-04 14:45:42 +02:00
|
|
|
def get_narrow_url(user_profile: UserProfile, message: Message,
|
2019-08-18 00:40:35 +02:00
|
|
|
display_recipient: Optional[DisplayRecipientT]=None,
|
2019-06-04 14:45:42 +02:00
|
|
|
stream: Optional[Stream]=None) -> str:
|
|
|
|
"""The display_recipient and stream arguments are optional. If not
|
|
|
|
provided, we'll compute them from the message; they exist as a
|
|
|
|
performance optimization for cases where the caller needs those
|
|
|
|
data too.
|
|
|
|
"""
|
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
|
|
|
assert stream is None
|
|
|
|
assert display_recipient is None
|
|
|
|
return personal_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=message.sender,
|
|
|
|
)
|
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
|
|
|
assert stream is None
|
|
|
|
if display_recipient is None:
|
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert display_recipient is not None
|
|
|
|
assert not isinstance(display_recipient, str)
|
|
|
|
other_user_ids = [r['id'] for r in display_recipient
|
|
|
|
if r['id'] != user_profile.id]
|
|
|
|
return huddle_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
other_user_ids=other_user_ids,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
assert display_recipient is None
|
|
|
|
if stream is None:
|
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
|
|
|
return topic_narrow_url(user_profile.realm, stream, message.topic_name())
|
|
|
|
|
2019-01-14 14:04:08 +01:00
|
|
|
def message_content_allowed_in_missedmessage_emails(user_profile: UserProfile) -> bool:
|
|
|
|
return user_profile.realm.message_content_allowed_in_email_notifications and \
|
|
|
|
user_profile.message_content_in_email_notifications
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
@statsd_increment("missed_message_reminders")
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
|
2018-07-14 07:31:10 +02:00
|
|
|
missed_messages: List[Dict[str, Any]],
|
2017-11-05 11:15:10 +01:00
|
|
|
message_count: int) -> None:
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
|
|
|
Send a reminder email to a user if she's missed some PMs by being offline.
|
|
|
|
|
|
|
|
The email will have its reply to address set to a limited used email
|
|
|
|
address that will send a zulip message to the correct recipient. This
|
|
|
|
allows the user to respond to missed PMs, huddles, and @-mentions directly
|
|
|
|
from the email.
|
|
|
|
|
|
|
|
`user_profile` is the user to send the reminder to
|
2018-07-14 07:31:10 +02:00
|
|
|
`missed_messages` is a list of dictionaries to Message objects and other data
|
|
|
|
for a group of messages that share a recipient (and topic)
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2015-02-06 02:40:34 +01:00
|
|
|
# Disabled missedmessage emails internally
|
2015-02-21 02:46:19 +01:00
|
|
|
if not user_profile.enable_offline_email_notifications:
|
2014-08-11 14:15:16 +02:00
|
|
|
return
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
recipients = {(msg['message'].recipient_id, msg['message'].topic_name()) for msg in missed_messages}
|
2014-08-11 14:15:16 +02:00
|
|
|
if len(recipients) != 1:
|
|
|
|
raise ValueError(
|
2020-06-14 02:57:50 +02:00
|
|
|
f'All missed_messages must have the same recipient and topic {recipients!r}',
|
2014-08-11 14:15:16 +02:00
|
|
|
)
|
|
|
|
|
2019-07-20 02:16:17 +02:00
|
|
|
# This link is no longer a part of the email, but keeping the code in case
|
|
|
|
# we find a clean way to add it back in the future
|
2016-08-14 09:08:34 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
2017-05-04 23:37:01 +02:00
|
|
|
context = common_context(user_profile)
|
|
|
|
context.update({
|
2014-08-11 14:15:16 +02:00
|
|
|
'name': user_profile.full_name,
|
2014-09-05 06:33:47 +02:00
|
|
|
'message_count': message_count,
|
2016-08-14 09:08:34 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2018-01-06 23:30:43 +01:00
|
|
|
'realm_name_in_notifications': user_profile.realm_name_in_notifications,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2018-07-14 07:46:13 +02:00
|
|
|
triggers = list(message['trigger'] for message in missed_messages)
|
|
|
|
unique_triggers = set(triggers)
|
|
|
|
context.update({
|
2019-08-26 04:40:07 +02:00
|
|
|
'mention': 'mentioned' in unique_triggers or 'wildcard_mentioned' in unique_triggers,
|
2019-07-08 08:02:17 +02:00
|
|
|
'stream_email_notify': 'stream_email_notify' in unique_triggers,
|
2019-08-26 04:40:07 +02:00
|
|
|
'mention_count': triggers.count('mentioned') + triggers.count("wildcard_mentioned"),
|
2018-07-14 07:46:13 +02:00
|
|
|
})
|
|
|
|
|
2017-03-08 04:46:49 +01:00
|
|
|
# If this setting (email mirroring integration) is enabled, only then
|
|
|
|
# can users reply to email to send message to Zulip. Thus, one must
|
|
|
|
# ensure to display warning in the template.
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': True,
|
|
|
|
})
|
|
|
|
else:
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
from zerver.lib.email_mirror import create_missed_message_address
|
2018-07-14 07:31:10 +02:00
|
|
|
reply_to_address = create_missed_message_address(user_profile, missed_messages[0]['message'])
|
2017-06-26 19:43:32 +02:00
|
|
|
if reply_to_address == FromAddress.NOREPLY:
|
2020-06-05 23:26:35 +02:00
|
|
|
reply_to_name = ""
|
2017-06-26 19:43:32 +02:00
|
|
|
else:
|
|
|
|
reply_to_name = "Zulip"
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2019-06-18 14:15:23 +02:00
|
|
|
narrow_url = get_narrow_url(user_profile, missed_messages[0]['message'])
|
|
|
|
context.update({
|
|
|
|
'narrow_url': narrow_url,
|
|
|
|
})
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
senders = list({m['message'].sender for m in missed_messages})
|
2018-07-14 07:31:10 +02:00
|
|
|
if (missed_messages[0]['message'].recipient.type == Recipient.HUDDLE):
|
|
|
|
display_recipient = get_display_recipient(missed_messages[0]['message'].recipient)
|
2017-05-03 09:22:58 +02:00
|
|
|
# Make sure that this is a list of strings, not a string.
|
2018-05-11 01:40:23 +02:00
|
|
|
assert not isinstance(display_recipient, str)
|
2017-05-03 09:22:58 +02:00
|
|
|
other_recipients = [r['full_name'] for r in display_recipient
|
|
|
|
if r['id'] != user_profile.id]
|
|
|
|
context.update({'group_pm': True})
|
|
|
|
if len(other_recipients) == 2:
|
2019-04-20 01:00:46 +02:00
|
|
|
huddle_display_name = " and ".join(other_recipients)
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
elif len(other_recipients) == 3:
|
2020-06-10 06:41:04 +02:00
|
|
|
huddle_display_name = f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}"
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
huddle_display_name = "{}, and {} others".format(
|
2017-11-10 03:34:13 +01:00
|
|
|
', '.join(other_recipients[:2]), len(other_recipients) - 2)
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'huddle_display_name': huddle_display_name})
|
2018-07-14 07:31:10 +02:00
|
|
|
elif (missed_messages[0]['message'].recipient.type == Recipient.PERSONAL):
|
2017-05-03 09:22:58 +02:00
|
|
|
context.update({'private_message': True})
|
2019-07-08 08:02:17 +02:00
|
|
|
elif (context['mention'] or context['stream_email_notify']):
|
2017-05-12 22:47:34 +02:00
|
|
|
# Keep only the senders who actually mentioned the user
|
2019-07-08 08:02:17 +02:00
|
|
|
if context['mention']:
|
2020-04-09 21:51:58 +02:00
|
|
|
senders = list({m['message'].sender for m in missed_messages
|
|
|
|
if m['trigger'] == 'mentioned' or
|
|
|
|
m['trigger'] == 'wildcard_mentioned'})
|
2019-06-11 12:39:42 +02:00
|
|
|
message = missed_messages[0]['message']
|
|
|
|
stream = Stream.objects.only('id', 'name').get(id=message.recipient.type_id)
|
2020-06-10 06:41:04 +02:00
|
|
|
stream_header = f"{stream.name} > {message.topic_name()}"
|
2019-06-11 12:39:42 +02:00
|
|
|
context.update({
|
|
|
|
'stream_header': stream_header,
|
|
|
|
})
|
2018-07-14 07:55:27 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid messages!")
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2018-03-08 08:27:29 +01:00
|
|
|
# If message content is disabled, then flush all information we pass to email.
|
2019-01-14 14:04:08 +01:00
|
|
|
if not message_content_allowed_in_missedmessage_emails(user_profile):
|
2020-01-31 12:55:47 +01:00
|
|
|
realm = user_profile.realm
|
2018-03-08 08:27:29 +01:00
|
|
|
context.update({
|
|
|
|
'reply_to_zulip': False,
|
|
|
|
'messages': [],
|
|
|
|
'sender_str': "",
|
2020-01-31 12:55:47 +01:00
|
|
|
'realm_str': realm.name,
|
2018-03-08 08:27:29 +01:00
|
|
|
'huddle_display_name': "",
|
2020-01-31 13:51:06 +01:00
|
|
|
'show_message_content': False,
|
2020-01-31 12:55:47 +01:00
|
|
|
'message_content_disabled_by_user': not user_profile.message_content_in_email_notifications,
|
|
|
|
'message_content_disabled_by_realm': not realm.message_content_allowed_in_email_notifications,
|
2018-03-08 08:27:29 +01:00
|
|
|
})
|
|
|
|
else:
|
|
|
|
context.update({
|
2018-07-14 07:31:10 +02:00
|
|
|
'messages': build_message_list(user_profile, list(m['message'] for m in missed_messages)),
|
2018-03-08 08:27:29 +01:00
|
|
|
'sender_str': ", ".join(sender.full_name for sender in senders),
|
|
|
|
'realm_str': user_profile.realm.name,
|
2020-01-31 13:51:06 +01:00
|
|
|
'show_message_content': True,
|
2018-03-08 08:27:29 +01:00
|
|
|
})
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2020-02-12 19:52:39 +01:00
|
|
|
with override_language(user_profile.default_language):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
from_name: str = _("Zulip missed messages")
|
2017-07-07 23:59:12 +02:00
|
|
|
from_address = FromAddress.NOREPLY
|
2016-08-24 07:53:05 +02:00
|
|
|
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
|
|
|
|
# If this setting is enabled, you can reply to the Zulip
|
|
|
|
# missed message emails directly back to the original sender.
|
|
|
|
# However, one must ensure the Zulip server is in the SPF
|
|
|
|
# record for the domain, or there will be spam/deliverability
|
|
|
|
# problems.
|
2019-11-16 01:49:28 +01:00
|
|
|
#
|
|
|
|
# Also, this setting is not really compatible with
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_ADMINS.
|
2017-05-12 22:47:34 +02:00
|
|
|
sender = senders[0]
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name, from_address = (sender.full_name, sender.email)
|
2017-05-04 23:37:01 +02:00
|
|
|
context.update({
|
2017-03-08 04:46:49 +01:00
|
|
|
'reply_to_zulip': False,
|
|
|
|
})
|
2017-06-26 19:43:32 +02:00
|
|
|
|
2017-05-05 01:31:07 +02:00
|
|
|
email_dict = {
|
|
|
|
'template_prefix': 'zerver/emails/missed_message',
|
2018-12-03 23:26:51 +01:00
|
|
|
'to_user_ids': [user_profile.id],
|
2017-06-26 19:43:32 +02:00
|
|
|
'from_name': from_name,
|
|
|
|
'from_address': from_address,
|
2020-06-05 23:26:35 +02:00
|
|
|
'reply_to_email': str(Address(display_name=reply_to_name, addr_spec=reply_to_address)),
|
2017-05-05 01:31:07 +02:00
|
|
|
'context': context}
|
2017-12-20 06:19:38 +01:00
|
|
|
queue_json_publish("email_senders", email_dict)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
user_profile.last_reminder = timezone_now()
|
2014-08-11 14:15:16 +02:00
|
|
|
user_profile.save(update_fields=['last_reminder'])
|
|
|
|
|
2017-11-29 02:49:11 +01:00
|
|
|
def handle_missedmessage_emails(user_profile_id: int,
|
|
|
|
missed_email_events: Iterable[Dict[str, Any]]) -> None:
|
2018-07-14 07:46:13 +02:00
|
|
|
message_ids = {event.get('message_id'): event.get('trigger') for event in missed_email_events}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-11-29 02:49:11 +01:00
|
|
|
if not receives_offline_email_notifications(user_profile):
|
2014-01-24 22:29:17 +01:00
|
|
|
return
|
|
|
|
|
2018-12-05 19:36:58 +01:00
|
|
|
# Note: This query structure automatically filters out any
|
|
|
|
# messages that were permanently deleted, since those would now be
|
|
|
|
# in the ArchivedMessage table, not the Message table.
|
2017-03-06 08:45:59 +01:00
|
|
|
messages = Message.objects.filter(usermessage__user_profile_id=user_profile,
|
|
|
|
id__in=message_ids,
|
|
|
|
usermessage__flags=~UserMessage.flags.read)
|
2017-03-14 08:38:01 +01:00
|
|
|
|
|
|
|
# Cancel missed-message emails for deleted messages
|
|
|
|
messages = [um for um in messages if um.content != "(deleted)"]
|
|
|
|
|
2014-07-08 02:06:51 +02:00
|
|
|
if not messages:
|
|
|
|
return
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
# We bucket messages by tuples that identify similar messages.
|
|
|
|
# For streams it's recipient_id and topic.
|
|
|
|
# For PMs it's recipient id and sender.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
messages_by_bucket: Dict[Tuple[int, str], List[Message]] = defaultdict(list)
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg in messages:
|
2017-08-25 04:15:05 +02:00
|
|
|
if msg.recipient.type == Recipient.PERSONAL:
|
|
|
|
# For PM's group using (recipient, sender).
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
|
2017-08-25 04:15:05 +02:00
|
|
|
else:
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket = {
|
|
|
|
bucket_tup: len(msgs)
|
|
|
|
for bucket_tup, msgs in messages_by_bucket.items()
|
2014-09-05 06:33:47 +02:00
|
|
|
}
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
for msg_list in messages_by_bucket.values():
|
2019-08-28 02:43:19 +02:00
|
|
|
msg = min(msg_list, key=lambda msg: msg.date_sent)
|
2017-10-28 21:53:47 +02:00
|
|
|
if msg.is_stream_message():
|
2018-07-26 20:19:45 +02:00
|
|
|
context_messages = get_context_for_message(msg)
|
|
|
|
filtered_context_messages = bulk_access_messages(user_profile, context_messages)
|
|
|
|
msg_list.extend(filtered_context_messages)
|
2014-07-08 02:06:51 +02:00
|
|
|
|
2018-05-19 03:36:05 +02:00
|
|
|
# Sort emails by least recently-active discussion.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
bucket_tups: List[Tuple[Tuple[int, str], int]] = []
|
2018-11-08 15:52:17 +01:00
|
|
|
for bucket_tup, msg_list in messages_by_bucket.items():
|
2018-05-19 03:36:05 +02:00
|
|
|
max_message_id = max(msg_list, key=lambda msg: msg.id).id
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups.append((bucket_tup, max_message_id))
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups = sorted(bucket_tups, key=lambda x: x[1])
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 16:12:10 +01:00
|
|
|
# Send an email per bucket.
|
2018-11-08 15:52:17 +01:00
|
|
|
for bucket_tup, ignored_max_id in bucket_tups:
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages = {}
|
2018-11-08 15:52:17 +01:00
|
|
|
for m in messages_by_bucket[bucket_tup]:
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages[m.id] = dict(
|
|
|
|
message=m,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
trigger=message_ids.get(m.id),
|
2018-07-14 07:31:10 +02:00
|
|
|
)
|
2016-06-21 11:23:23 +02:00
|
|
|
do_send_missedmessage_events_reply_in_zulip(
|
2014-09-05 06:33:47 +02:00
|
|
|
user_profile,
|
2016-01-25 01:27:18 +01:00
|
|
|
list(unique_messages.values()),
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket[bucket_tup],
|
2014-09-05 06:33:47 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def log_digest_event(msg: str) -> None:
|
2014-01-29 20:40:46 +01:00
|
|
|
import logging
|
2018-08-12 01:56:58 +02:00
|
|
|
import time
|
|
|
|
logging.Formatter.converter = time.gmtime
|
2014-01-29 20:40:46 +01:00
|
|
|
logging.basicConfig(filename=settings.DIGEST_LOG_PATH, level=logging.INFO)
|
|
|
|
logging.info(msg)
|
|
|
|
|
2017-11-02 14:11:48 +01:00
|
|
|
def followup_day2_email_delay(user: UserProfile) -> timedelta:
|
|
|
|
days_to_delay = 2
|
|
|
|
user_tz = user.timezone
|
|
|
|
if user_tz == '':
|
|
|
|
user_tz = 'UTC'
|
|
|
|
signup_day = user.date_joined.astimezone(pytz.timezone(user_tz)).isoweekday()
|
|
|
|
if signup_day == 5:
|
|
|
|
# If the day is Friday then delay should be till Monday
|
|
|
|
days_to_delay = 3
|
|
|
|
elif signup_day == 4:
|
|
|
|
# If the day is Thursday then delay should be till Friday
|
|
|
|
days_to_delay = 1
|
|
|
|
|
|
|
|
# The delay should be 1 hour before the above calculated delay as
|
|
|
|
# our goal is to maximize the chance that this email is near the top
|
|
|
|
# of the user's inbox when the user sits down to deal with their inbox,
|
|
|
|
# or comes in while they are dealing with their inbox.
|
|
|
|
return timedelta(days=days_to_delay, hours=-1)
|
|
|
|
|
2018-06-29 09:31:00 +02:00
|
|
|
def enqueue_welcome_emails(user: UserProfile, realm_creation: bool=False) -> None:
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2016-07-19 07:37:53 +02:00
|
|
|
if settings.WELCOME_EMAIL_SENDER is not None:
|
2017-05-04 02:47:55 +02:00
|
|
|
# line break to avoid triggering lint rule
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = settings.WELCOME_EMAIL_SENDER['name']
|
|
|
|
from_address = settings.WELCOME_EMAIL_SENDER['email']
|
2016-07-19 07:37:53 +02:00
|
|
|
else:
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = None
|
2020-03-12 20:28:05 +01:00
|
|
|
from_address = FromAddress.support_placeholder
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-11-14 12:58:35 +01:00
|
|
|
other_account_count = UserProfile.objects.filter(
|
2018-12-07 00:05:57 +01:00
|
|
|
delivery_email__iexact=user.delivery_email).exclude(id=user.id).count()
|
2017-09-22 04:29:01 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user, "welcome")
|
|
|
|
context = common_context(user)
|
2017-05-03 18:20:16 +02:00
|
|
|
context.update({
|
2017-08-18 05:33:06 +02:00
|
|
|
'unsubscribe_link': unsubscribe_link,
|
2018-04-24 23:02:01 +02:00
|
|
|
'keyboard_shortcuts_link': user.realm.uri + '/help/keyboard-shortcuts',
|
2018-11-14 12:46:56 +01:00
|
|
|
'realm_name': user.realm.name,
|
|
|
|
'realm_creation': realm_creation,
|
2019-11-16 01:49:28 +01:00
|
|
|
'email': user.delivery_email,
|
2019-10-05 02:35:07 +02:00
|
|
|
'is_realm_admin': user.role == UserProfile.ROLE_REALM_ADMINISTRATOR,
|
2016-11-08 10:07:47 +01:00
|
|
|
})
|
2018-05-26 12:15:47 +02:00
|
|
|
if user.is_realm_admin:
|
|
|
|
context['getting_started_link'] = (user.realm.uri +
|
|
|
|
'/help/getting-your-organization-started-with-zulip')
|
|
|
|
else:
|
2020-06-08 23:04:39 +02:00
|
|
|
context['getting_started_link'] = "https://zulip.com"
|
2018-11-14 12:46:56 +01:00
|
|
|
|
2019-11-05 02:29:03 +01:00
|
|
|
# Imported here to avoid import cycles.
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import ZulipLDAPAuthBackend, email_belongs_to_ldap
|
2018-11-29 16:32:17 +01:00
|
|
|
|
2019-11-16 01:49:28 +01:00
|
|
|
if email_belongs_to_ldap(user.realm, user.delivery_email):
|
2018-11-29 16:32:17 +01:00
|
|
|
context["ldap"] = True
|
2019-11-05 02:29:03 +01:00
|
|
|
for backend in get_backends():
|
|
|
|
# If the user is doing authentication via LDAP, Note that
|
|
|
|
# we exclude ZulipLDAPUserPopulator here, since that
|
|
|
|
# isn't used for authentication.
|
|
|
|
if isinstance(backend, ZulipLDAPAuthBackend):
|
2019-11-16 01:49:28 +01:00
|
|
|
context["ldap_username"] = backend.django_to_ldap_username(user.delivery_email)
|
2019-11-05 02:29:03 +01:00
|
|
|
break
|
2018-05-26 12:15:47 +02:00
|
|
|
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
2018-12-03 23:26:51 +01:00
|
|
|
"zerver/emails/followup_day1", user.realm, to_user_ids=[user.id], from_name=from_name,
|
2017-10-19 04:41:28 +02:00
|
|
|
from_address=from_address, context=context)
|
2018-11-14 12:58:35 +01:00
|
|
|
|
|
|
|
if other_account_count == 0:
|
|
|
|
send_future_email(
|
2018-12-03 23:26:51 +01:00
|
|
|
"zerver/emails/followup_day2", user.realm, to_user_ids=[user.id], from_name=from_name,
|
2018-11-14 12:58:35 +01:00
|
|
|
from_address=from_address, context=context, delay=followup_day2_email_delay(user))
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def convert_html_to_markdown(html: str) -> str:
|
2019-07-24 02:49:16 +02:00
|
|
|
parser = html2text.HTML2Text()
|
|
|
|
markdown = parser.handle(html).strip()
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
# We want images to get linked and inline previewed, but html2text will turn
|
|
|
|
# them into links of the form `![](http://foo.com/image.png)`, which is
|
|
|
|
# ugly. Run a regex over the resulting description, turning links of the
|
|
|
|
# form `![](http://foo.com/image.png?12345)` into
|
|
|
|
# `[image.png](http://foo.com/image.png)`.
|
2017-11-04 05:34:38 +01:00
|
|
|
return re.sub("!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)",
|
|
|
|
"[\\2](\\1/\\2)", markdown)
|