2020-12-19 03:05:20 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/notifications.html
|
|
|
|
|
2020-05-21 14:51:36 +02:00
|
|
|
import math
|
2020-06-11 00:54:34 +02:00
|
|
|
import re
|
|
|
|
from collections import defaultdict
|
|
|
|
from datetime import timedelta
|
2020-06-05 23:26:35 +02:00
|
|
|
from email.headerregistry import Address
|
2019-08-18 00:40:35 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, Tuple
|
2016-01-25 23:42:16 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import html2text
|
|
|
|
import lxml.html
|
|
|
|
import pytz
|
|
|
|
from bs4 import BeautifulSoup
|
2014-01-24 22:29:17 +01:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth import get_backends
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2020-02-12 19:52:39 +01:00
|
|
|
from django.utils.translation import override as override_language
|
2020-06-11 00:54:34 +02:00
|
|
|
from lxml.cssselect import CSSSelector
|
2018-07-26 20:19:45 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from confirmation.models import one_click_unsubscribe_link
|
2017-05-04 02:06:31 +02:00
|
|
|
from zerver.decorator import statsd_increment
|
2020-07-15 01:21:28 +02:00
|
|
|
from zerver.lib.markdown.fenced_code import FENCE_RE
|
2018-07-26 20:19:45 +02:00
|
|
|
from zerver.lib.message import bulk_access_messages
|
2017-03-06 08:45:59 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.send_email import FromAddress, send_future_email
|
|
|
|
from zerver.lib.types import DisplayRecipientT
|
|
|
|
from zerver.lib.url_encoding import (
|
|
|
|
huddle_narrow_url,
|
|
|
|
personal_narrow_url,
|
|
|
|
stream_narrow_url,
|
|
|
|
topic_narrow_url,
|
|
|
|
)
|
2020-05-21 14:51:36 +02:00
|
|
|
from zerver.lib.user_groups import access_user_group_by_id, get_user_group_members
|
2016-06-03 22:59:19 +02:00
|
|
|
from zerver.models import (
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2016-06-03 22:59:19 +02:00
|
|
|
Recipient,
|
|
|
|
Stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserMessage,
|
2016-06-03 22:59:19 +02:00
|
|
|
UserProfile,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_context_for_message,
|
|
|
|
get_display_recipient,
|
2016-06-03 22:59:19 +02:00
|
|
|
get_user_profile_by_id,
|
2017-11-29 02:49:11 +01:00
|
|
|
receives_offline_email_notifications,
|
2016-06-03 22:59:19 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def relative_to_full_url(base_url: str, content: str) -> str:
|
2017-10-07 17:27:16 +02:00
|
|
|
# Convert relative URLs to absolute URLs.
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2017-10-11 22:41:19 +02:00
|
|
|
|
|
|
|
# We handle narrow URLs separately because of two reasons:
|
|
|
|
# 1: 'lxml' seems to be having an issue in dealing with URLs that begin
|
|
|
|
# `#` due to which it doesn't add a `/` before joining the base_url to
|
|
|
|
# the relative URL.
|
|
|
|
# 2: We also need to update the title attribute in the narrow links which
|
|
|
|
# is not possible with `make_links_absolute()`.
|
|
|
|
for link_info in fragment.iterlinks():
|
|
|
|
elem, attrib, link, pos = link_info
|
|
|
|
match = re.match("/?#narrow/", link)
|
|
|
|
if match is not None:
|
|
|
|
link = re.sub(r"^/?#narrow/", base_url + "/#narrow/", link)
|
|
|
|
elem.set(attrib, link)
|
|
|
|
# Only manually linked narrow URLs have title attribute set.
|
2021-02-12 08:20:45 +01:00
|
|
|
if elem.get("title") is not None:
|
|
|
|
elem.set("title", link)
|
2017-10-11 22:41:19 +02:00
|
|
|
|
2017-09-21 00:06:22 +02:00
|
|
|
# Inline images can't be displayed in the emails as the request
|
|
|
|
# from the mail server can't be authenticated because it has no
|
2017-10-13 16:59:58 +02:00
|
|
|
# user_profile object linked to it. So we scrub the inline image
|
|
|
|
# container.
|
|
|
|
inline_image_containers = fragment.find_class("message_inline_image")
|
|
|
|
for container in inline_image_containers:
|
|
|
|
container.drop_tree()
|
|
|
|
|
2018-01-24 19:23:51 +01:00
|
|
|
# The previous block handles most inline images, but for messages
|
2020-08-11 01:47:49 +02:00
|
|
|
# where the entire Markdown input was just the URL of an image
|
2018-01-24 19:23:51 +01:00
|
|
|
# (i.e. the entire body is a message_inline_image object), the
|
|
|
|
# entire message body will be that image element; here, we need a
|
|
|
|
# more drastic edit to the content.
|
2021-02-12 08:20:45 +01:00
|
|
|
if fragment.get("class") == "message_inline_image":
|
|
|
|
image_link = fragment.find("a").get("href")
|
|
|
|
image_title = fragment.find("a").get("title")
|
|
|
|
fragment = lxml.html.Element("p")
|
|
|
|
a = lxml.html.Element("a")
|
|
|
|
a.set("href", image_link)
|
|
|
|
a.set("target", "_blank")
|
|
|
|
a.set("title", image_title)
|
2021-01-26 20:35:27 +01:00
|
|
|
a.text = image_link
|
|
|
|
fragment.append(a)
|
2018-01-24 19:23:51 +01:00
|
|
|
|
2017-10-13 16:59:58 +02:00
|
|
|
fragment.make_links_absolute(base_url)
|
2020-10-30 01:31:33 +01:00
|
|
|
content = lxml.html.tostring(fragment, encoding="unicode")
|
2017-09-21 00:02:25 +02:00
|
|
|
|
|
|
|
return content
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_emojis(content: str, base_url: str, emojiset: str) -> str:
|
2018-05-17 15:03:48 +02:00
|
|
|
def make_emoji_img_elem(emoji_span_elem: CSSSelector) -> Dict[str, Any]:
|
2017-10-28 00:55:16 +02:00
|
|
|
# Convert the emoji spans to img tags.
|
2021-02-12 08:20:45 +01:00
|
|
|
classes = emoji_span_elem.get("class")
|
|
|
|
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
|
2018-05-17 15:03:48 +02:00
|
|
|
# re.search is capable of returning None,
|
|
|
|
# but since the parent function should only be called with a valid css element
|
|
|
|
# we assert that it does not.
|
|
|
|
assert match is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_code = match.group("emoji_code")
|
|
|
|
emoji_name = emoji_span_elem.get("title")
|
2017-10-28 00:55:16 +02:00
|
|
|
alt_code = emoji_span_elem.text
|
2021-02-12 08:20:45 +01:00
|
|
|
image_url = base_url + f"/static/generated/emoji/images-{emojiset}-64/{emoji_code}.png"
|
2017-11-25 19:05:12 +01:00
|
|
|
img_elem = lxml.html.fromstring(
|
2021-02-12 08:19:30 +01:00
|
|
|
f'<img alt="{alt_code}" src="{image_url}" title="{emoji_name}">'
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
img_elem.set("style", "height: 20px;")
|
2017-10-28 00:55:16 +02:00
|
|
|
img_elem.tail = emoji_span_elem.tail
|
|
|
|
return img_elem
|
|
|
|
|
2017-11-25 19:05:12 +01:00
|
|
|
fragment = lxml.html.fromstring(content)
|
2021-02-12 08:20:45 +01:00
|
|
|
for elem in fragment.cssselect("span.emoji"):
|
2017-10-28 00:55:16 +02:00
|
|
|
parent = elem.getparent()
|
|
|
|
img_elem = make_emoji_img_elem(elem)
|
|
|
|
parent.replace(elem, img_elem)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for realm_emoji in fragment.cssselect(".emoji"):
|
|
|
|
del realm_emoji.attrib["class"]
|
|
|
|
realm_emoji.set("style", "height: 20px;")
|
2017-10-28 00:55:16 +02:00
|
|
|
|
2020-10-30 01:31:33 +01:00
|
|
|
content = lxml.html.tostring(fragment, encoding="unicode")
|
2017-09-27 19:39:42 +02:00
|
|
|
return content
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-15 01:21:28 +02:00
|
|
|
def fix_spoilers_in_html(content: str, language: str) -> str:
|
|
|
|
with override_language(language):
|
|
|
|
spoiler_title: str = _("Open Zulip to see the spoiler content")
|
|
|
|
fragment = lxml.html.fromstring(content)
|
|
|
|
spoilers = fragment.find_class("spoiler-block")
|
|
|
|
for spoiler in spoilers:
|
|
|
|
header = spoiler.find_class("spoiler-header")[0]
|
|
|
|
spoiler_content = spoiler.find_class("spoiler-content")[0]
|
|
|
|
header_content = header.find("p")
|
|
|
|
if header_content is None:
|
|
|
|
# Create a new element to append the spoiler to)
|
|
|
|
header_content = lxml.html.fromstring("<p></p>")
|
|
|
|
header.append(header_content)
|
|
|
|
else:
|
|
|
|
# Add a space. Its simpler to append a new span element than
|
|
|
|
# inserting text after the last node ends since neither .text
|
|
|
|
# and .tail do the right thing for us.
|
|
|
|
header_content.append(lxml.html.fromstring("<span> </span>"))
|
|
|
|
span_elem = lxml.html.fromstring(
|
2021-02-12 08:19:30 +01:00
|
|
|
f'<span class="spoiler-title" title="{spoiler_title}">({spoiler_title})</span'
|
|
|
|
)
|
2020-07-15 01:21:28 +02:00
|
|
|
header_content.append(span_elem)
|
|
|
|
header.drop_tag()
|
|
|
|
spoiler_content.drop_tree()
|
2020-10-30 01:31:33 +01:00
|
|
|
content = lxml.html.tostring(fragment, encoding="unicode")
|
2020-07-15 01:21:28 +02:00
|
|
|
return content
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-15 01:21:28 +02:00
|
|
|
def fix_spoilers_in_text(content: str, language: str) -> str:
|
|
|
|
with override_language(language):
|
|
|
|
spoiler_title: str = _("Open Zulip to see the spoiler content")
|
2021-02-12 08:20:45 +01:00
|
|
|
lines = content.split("\n")
|
2020-07-15 01:21:28 +02:00
|
|
|
output = []
|
|
|
|
open_fence = None
|
|
|
|
for line in lines:
|
|
|
|
m = FENCE_RE.match(line)
|
|
|
|
if m:
|
2021-02-12 08:20:45 +01:00
|
|
|
fence = m.group("fence")
|
|
|
|
lang = m.group("lang")
|
|
|
|
if lang == "spoiler":
|
2020-07-15 01:21:28 +02:00
|
|
|
open_fence = fence
|
|
|
|
output.append(line)
|
|
|
|
output.append(f"({spoiler_title})")
|
|
|
|
elif fence == open_fence:
|
|
|
|
open_fence = None
|
|
|
|
output.append(line)
|
|
|
|
elif not open_fence:
|
|
|
|
output.append(line)
|
2021-02-12 08:20:45 +01:00
|
|
|
return "\n".join(output)
|
2020-07-15 01:21:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-11-13 16:52:13 +01:00
|
|
|
def build_message_list(
|
|
|
|
user: UserProfile,
|
|
|
|
messages: List[Message],
|
|
|
|
stream_map: Dict[int, Stream], # only needs id, name
|
|
|
|
) -> List[Dict[str, Any]]:
|
2014-01-24 22:29:17 +01:00
|
|
|
"""
|
2021-04-20 23:27:25 +02:00
|
|
|
Builds the message list object for the message notification email template.
|
2014-01-24 22:29:17 +01:00
|
|
|
The messages are collapsed into per-recipient and per-sender blocks, like
|
|
|
|
our web interface
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
messages_to_render: List[Dict[str, Any]] = []
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def sender_string(message: Message) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE):
|
2016-06-13 10:32:39 +02:00
|
|
|
return message.sender.full_name
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
return ""
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def fix_plaintext_image_urls(content: str) -> str:
|
2014-01-24 22:29:17 +01:00
|
|
|
# Replace image URLs in plaintext content of the form
|
|
|
|
# [image name](image url)
|
|
|
|
# with a simple hyperlink.
|
|
|
|
return re.sub(r"\[(\S*)\]\((\S*)\)", r"\2", content)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def append_sender_to_message(
|
|
|
|
message_plain: str, message_html: str, sender: str
|
|
|
|
) -> Tuple[str, str]:
|
2020-06-09 00:25:09 +02:00
|
|
|
message_plain = f"{sender}: {message_plain}"
|
2019-07-11 13:04:11 +02:00
|
|
|
message_soup = BeautifulSoup(message_html, "html.parser")
|
2020-06-09 00:25:09 +02:00
|
|
|
sender_name_soup = BeautifulSoup(f"<b>{sender}</b>: ", "html.parser")
|
2019-07-11 13:04:11 +02:00
|
|
|
first_tag = message_soup.find()
|
|
|
|
if first_tag.name == "p":
|
|
|
|
first_tag.insert(0, sender_name_soup)
|
|
|
|
else:
|
|
|
|
message_soup.insert(0, sender_name_soup)
|
|
|
|
return message_plain, str(message_soup)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def build_message_payload(message: Message, sender: Optional[str] = None) -> Dict[str, str]:
|
2014-01-24 22:29:17 +01:00
|
|
|
plain = message.content
|
|
|
|
plain = fix_plaintext_image_urls(plain)
|
2017-09-21 00:06:22 +02:00
|
|
|
# There's a small chance of colliding with non-Zulip URLs containing
|
|
|
|
# "/user_uploads/", but we don't have much information about the
|
|
|
|
# structure of the URL to leverage. We can't use `relative_to_full_url()`
|
|
|
|
# function here because it uses a stricter regex which will not work for
|
|
|
|
# plain text.
|
2021-02-12 08:19:30 +01:00
|
|
|
plain = re.sub(r"/user_uploads/(\S*)", user.realm.uri + r"/user_uploads/\1", plain)
|
2020-11-13 16:34:09 +01:00
|
|
|
plain = fix_spoilers_in_text(plain, user.default_language)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-05-26 02:08:16 +02:00
|
|
|
assert message.rendered_content is not None
|
2014-01-24 22:29:17 +01:00
|
|
|
html = message.rendered_content
|
2020-11-13 16:34:09 +01:00
|
|
|
html = relative_to_full_url(user.realm.uri, html)
|
|
|
|
html = fix_emojis(html, user.realm.uri, user.emojiset)
|
|
|
|
html = fix_spoilers_in_html(html, user.default_language)
|
2019-07-11 13:04:11 +02:00
|
|
|
if sender:
|
|
|
|
plain, html = append_sender_to_message(plain, html, sender)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"plain": plain, "html": html}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def build_sender_payload(message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"sender": sender, "content": [build_message_payload(message, sender)]}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2020-11-13 16:34:09 +01:00
|
|
|
def message_header(message: Message) -> Dict[str, Any]:
|
2014-01-24 22:29:17 +01:00
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
2020-11-13 16:34:09 +01:00
|
|
|
narrow_link = get_narrow_url(user, message)
|
2020-06-10 06:41:04 +02:00
|
|
|
header = f"You and {message.sender.full_name}"
|
|
|
|
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
2014-01-24 22:29:17 +01:00
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
2019-06-04 14:45:42 +02:00
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert not isinstance(display_recipient, str)
|
2021-02-12 08:19:30 +01:00
|
|
|
narrow_link = get_narrow_url(user, message, display_recipient=display_recipient)
|
2021-02-12 08:20:45 +01:00
|
|
|
other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user.id]
|
2020-06-10 06:41:04 +02:00
|
|
|
header = "You and {}".format(", ".join(other_recipients))
|
|
|
|
header_html = f"<a style='color: #ffffff;' href='{narrow_link}'>{header}</a>"
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
2020-11-13 16:52:13 +01:00
|
|
|
stream_id = message.recipient.type_id
|
|
|
|
stream = stream_map.get(stream_id, None)
|
|
|
|
if stream is None:
|
|
|
|
# Some of our callers don't populate stream_map, so
|
|
|
|
# we just populate the stream from the database.
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = Stream.objects.only("id", "name").get(id=stream_id)
|
2020-11-13 16:34:09 +01:00
|
|
|
narrow_link = get_narrow_url(user, message, stream=stream)
|
2020-06-10 06:41:04 +02:00
|
|
|
header = f"{stream.name} > {message.topic_name()}"
|
2020-11-13 16:34:09 +01:00
|
|
|
stream_link = stream_narrow_url(user.realm, stream)
|
2020-06-10 06:41:04 +02:00
|
|
|
header_html = f"<a href='{stream_link}'>{stream.name}</a> > <a href='{narrow_link}'>{message.topic_name()}</a>"
|
2021-02-12 08:19:30 +01:00
|
|
|
return {
|
|
|
|
"plain": header,
|
|
|
|
"html": header_html,
|
|
|
|
"stream_message": message.recipient.type_name() == "stream",
|
|
|
|
}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
# # Collapse message list to
|
|
|
|
# [
|
|
|
|
# {
|
|
|
|
# "header": {
|
|
|
|
# "plain":"header",
|
|
|
|
# "html":"htmlheader"
|
|
|
|
# }
|
|
|
|
# "senders":[
|
|
|
|
# {
|
|
|
|
# "sender":"sender_name",
|
|
|
|
# "content":[
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# {
|
|
|
|
# "plain":"content",
|
|
|
|
# "html":"htmlcontent"
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# }
|
|
|
|
# ]
|
|
|
|
# },
|
|
|
|
# ]
|
|
|
|
|
2019-08-28 02:43:19 +02:00
|
|
|
messages.sort(key=lambda message: message.date_sent)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
for message in messages:
|
2020-11-13 16:34:09 +01:00
|
|
|
header = message_header(message)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
# If we want to collapse into the previous recipient block
|
2021-02-12 08:20:45 +01:00
|
|
|
if len(messages_to_render) > 0 and messages_to_render[-1]["header"] == header:
|
2014-01-24 22:29:17 +01:00
|
|
|
sender = sender_string(message)
|
2021-02-12 08:20:45 +01:00
|
|
|
sender_block = messages_to_render[-1]["senders"]
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
# Same message sender, collapse again
|
2021-02-12 08:20:45 +01:00
|
|
|
if sender_block[-1]["sender"] == sender:
|
|
|
|
sender_block[-1]["content"].append(build_message_payload(message))
|
2014-01-24 22:29:17 +01:00
|
|
|
else:
|
|
|
|
# Start a new sender block
|
|
|
|
sender_block.append(build_sender_payload(message))
|
|
|
|
else:
|
|
|
|
# New recipient and sender block
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient_block = {"header": header, "senders": [build_sender_payload(message)]}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
messages_to_render.append(recipient_block)
|
|
|
|
|
|
|
|
return messages_to_render
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_narrow_url(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: Message,
|
|
|
|
display_recipient: Optional[DisplayRecipientT] = None,
|
|
|
|
stream: Optional[Stream] = None,
|
|
|
|
) -> str:
|
2019-06-04 14:45:42 +02:00
|
|
|
"""The display_recipient and stream arguments are optional. If not
|
|
|
|
provided, we'll compute them from the message; they exist as a
|
|
|
|
performance optimization for cases where the caller needs those
|
|
|
|
data too.
|
|
|
|
"""
|
|
|
|
if message.recipient.type == Recipient.PERSONAL:
|
|
|
|
assert stream is None
|
|
|
|
assert display_recipient is None
|
|
|
|
return personal_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
sender=message.sender,
|
|
|
|
)
|
|
|
|
elif message.recipient.type == Recipient.HUDDLE:
|
|
|
|
assert stream is None
|
|
|
|
if display_recipient is None:
|
|
|
|
display_recipient = get_display_recipient(message.recipient)
|
|
|
|
assert display_recipient is not None
|
|
|
|
assert not isinstance(display_recipient, str)
|
2021-02-12 08:20:45 +01:00
|
|
|
other_user_ids = [r["id"] for r in display_recipient if r["id"] != user_profile.id]
|
2019-06-04 14:45:42 +02:00
|
|
|
return huddle_narrow_url(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
other_user_ids=other_user_ids,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
assert display_recipient is None
|
|
|
|
if stream is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = Stream.objects.only("id", "name").get(id=message.recipient.type_id)
|
2019-06-04 14:45:42 +02:00
|
|
|
return topic_narrow_url(user_profile.realm, stream, message.topic_name())
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-21 14:51:36 +02:00
|
|
|
def get_mentioned_user_group_name(
|
|
|
|
messages: List[Dict[str, Any]], user_profile: UserProfile
|
|
|
|
) -> Optional[str]:
|
|
|
|
"""Returns the user group name to display in the email notification
|
|
|
|
if user group(s) are mentioned.
|
|
|
|
|
|
|
|
This implements the same algorithm as get_user_group_mentions_data
|
|
|
|
in zerver/lib/notification_data.py, but we're passed a list of
|
|
|
|
messages instead.
|
|
|
|
"""
|
|
|
|
for message in messages:
|
|
|
|
if message["mentioned_user_group_id"] is None and message["trigger"] == "mentioned":
|
|
|
|
# The user has also been personally mentioned, so that gets prioritized.
|
|
|
|
return None
|
|
|
|
|
|
|
|
# These IDs are those of the smallest user groups mentioned in each message.
|
|
|
|
mentioned_user_group_ids = [
|
|
|
|
message["mentioned_user_group_id"]
|
|
|
|
for message in messages
|
|
|
|
if message["mentioned_user_group_id"] is not None
|
|
|
|
]
|
|
|
|
|
|
|
|
# We now want to calculate the name of the smallest user group mentioned among
|
|
|
|
# all these messages.
|
|
|
|
smallest_user_group_size = math.inf
|
|
|
|
smallest_user_group_name = None
|
|
|
|
for user_group_id in mentioned_user_group_ids:
|
|
|
|
current_user_group = access_user_group_by_id(user_group_id, user_profile)
|
|
|
|
current_user_group_size = len(get_user_group_members(current_user_group))
|
|
|
|
|
|
|
|
if current_user_group_size < smallest_user_group_size:
|
|
|
|
# If multiple user groups are mentioned, we prefer the
|
|
|
|
# user group with the least members.
|
|
|
|
smallest_user_group_size = current_user_group_size
|
|
|
|
smallest_user_group_name = current_user_group.name
|
|
|
|
|
|
|
|
return smallest_user_group_name
|
|
|
|
|
|
|
|
|
2019-01-14 14:04:08 +01:00
|
|
|
def message_content_allowed_in_missedmessage_emails(user_profile: UserProfile) -> bool:
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
|
|
|
user_profile.realm.message_content_allowed_in_email_notifications
|
|
|
|
and user_profile.message_content_in_email_notifications
|
|
|
|
)
|
|
|
|
|
2019-01-14 14:04:08 +01:00
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
@statsd_increment("missed_message_reminders")
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_send_missedmessage_events_reply_in_zulip(
|
|
|
|
user_profile: UserProfile, missed_messages: List[Dict[str, Any]], message_count: int
|
|
|
|
) -> None:
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
|
|
|
Send a reminder email to a user if she's missed some PMs by being offline.
|
|
|
|
|
|
|
|
The email will have its reply to address set to a limited used email
|
2020-10-23 02:43:28 +02:00
|
|
|
address that will send a Zulip message to the correct recipient. This
|
2014-08-11 14:15:16 +02:00
|
|
|
allows the user to respond to missed PMs, huddles, and @-mentions directly
|
|
|
|
from the email.
|
|
|
|
|
|
|
|
`user_profile` is the user to send the reminder to
|
2018-07-14 07:31:10 +02:00
|
|
|
`missed_messages` is a list of dictionaries to Message objects and other data
|
|
|
|
for a group of messages that share a recipient (and topic)
|
2014-08-11 14:15:16 +02:00
|
|
|
"""
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
recipients = {
|
2021-02-12 08:20:45 +01:00
|
|
|
(msg["message"].recipient_id, msg["message"].topic_name()) for msg in missed_messages
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2014-08-11 14:15:16 +02:00
|
|
|
if len(recipients) != 1:
|
|
|
|
raise ValueError(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"All missed_messages must have the same recipient and topic {recipients!r}",
|
2014-08-11 14:15:16 +02:00
|
|
|
)
|
|
|
|
|
2019-07-20 02:16:17 +02:00
|
|
|
# This link is no longer a part of the email, but keeping the code in case
|
|
|
|
# we find a clean way to add it back in the future
|
2016-08-14 09:08:34 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user_profile, "missed_messages")
|
2017-05-04 23:37:01 +02:00
|
|
|
context = common_context(user_profile)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
name=user_profile.full_name,
|
|
|
|
message_count=message_count,
|
|
|
|
unsubscribe_link=unsubscribe_link,
|
|
|
|
realm_name_in_notifications=user_profile.realm_name_in_notifications,
|
|
|
|
)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2020-05-21 14:51:36 +02:00
|
|
|
mentioned_user_group_name = get_mentioned_user_group_name(missed_messages, user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
triggers = [message["trigger"] for message in missed_messages]
|
2018-07-14 07:46:13 +02:00
|
|
|
unique_triggers = set(triggers)
|
2020-05-21 14:51:36 +02:00
|
|
|
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
2021-02-12 08:20:45 +01:00
|
|
|
mention="mentioned" in unique_triggers or "wildcard_mentioned" in unique_triggers,
|
|
|
|
stream_email_notify="stream_email_notify" in unique_triggers,
|
|
|
|
mention_count=triggers.count("mentioned") + triggers.count("wildcard_mentioned"),
|
2020-05-21 14:51:36 +02:00
|
|
|
mentioned_user_group_name=mentioned_user_group_name,
|
2020-09-03 05:32:15 +02:00
|
|
|
)
|
2018-07-14 07:46:13 +02:00
|
|
|
|
2017-03-08 04:46:49 +01:00
|
|
|
# If this setting (email mirroring integration) is enabled, only then
|
|
|
|
# can users reply to email to send message to Zulip. Thus, one must
|
|
|
|
# ensure to display warning in the template.
|
|
|
|
if settings.EMAIL_GATEWAY_PATTERN:
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
reply_to_zulip=True,
|
|
|
|
)
|
2017-03-08 04:46:49 +01:00
|
|
|
else:
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
reply_to_zulip=False,
|
|
|
|
)
|
2017-03-08 04:46:49 +01:00
|
|
|
|
2014-08-11 14:15:16 +02:00
|
|
|
from zerver.lib.email_mirror import create_missed_message_address
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
reply_to_address = create_missed_message_address(user_profile, missed_messages[0]["message"])
|
2017-06-26 19:43:32 +02:00
|
|
|
if reply_to_address == FromAddress.NOREPLY:
|
2020-06-05 23:26:35 +02:00
|
|
|
reply_to_name = ""
|
2017-06-26 19:43:32 +02:00
|
|
|
else:
|
|
|
|
reply_to_name = "Zulip"
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
narrow_url = get_narrow_url(user_profile, missed_messages[0]["message"])
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
narrow_url=narrow_url,
|
|
|
|
)
|
2019-06-18 14:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
senders = list({m["message"].sender for m in missed_messages})
|
|
|
|
if missed_messages[0]["message"].recipient.type == Recipient.HUDDLE:
|
|
|
|
display_recipient = get_display_recipient(missed_messages[0]["message"].recipient)
|
2017-05-03 09:22:58 +02:00
|
|
|
# Make sure that this is a list of strings, not a string.
|
2018-05-11 01:40:23 +02:00
|
|
|
assert not isinstance(display_recipient, str)
|
2021-02-12 08:20:45 +01:00
|
|
|
other_recipients = [r["full_name"] for r in display_recipient if r["id"] != user_profile.id]
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(group_pm=True)
|
2017-05-03 09:22:58 +02:00
|
|
|
if len(other_recipients) == 2:
|
2019-04-20 01:00:46 +02:00
|
|
|
huddle_display_name = " and ".join(other_recipients)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(huddle_display_name=huddle_display_name)
|
2017-05-03 09:22:58 +02:00
|
|
|
elif len(other_recipients) == 3:
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle_display_name = (
|
|
|
|
f"{other_recipients[0]}, {other_recipients[1]}, and {other_recipients[2]}"
|
|
|
|
)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(huddle_display_name=huddle_display_name)
|
2017-05-03 09:22:58 +02:00
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
huddle_display_name = "{}, and {} others".format(
|
2021-02-12 08:20:45 +01:00
|
|
|
", ".join(other_recipients[:2]), len(other_recipients) - 2
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(huddle_display_name=huddle_display_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif missed_messages[0]["message"].recipient.type == Recipient.PERSONAL:
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(private_message=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif context["mention"] or context["stream_email_notify"]:
|
2017-05-12 22:47:34 +02:00
|
|
|
# Keep only the senders who actually mentioned the user
|
2021-02-12 08:20:45 +01:00
|
|
|
if context["mention"]:
|
2021-02-12 08:19:30 +01:00
|
|
|
senders = list(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
m["message"].sender
|
2021-02-12 08:19:30 +01:00
|
|
|
for m in missed_messages
|
2021-02-12 08:20:45 +01:00
|
|
|
if m["trigger"] == "mentioned" or m["trigger"] == "wildcard_mentioned"
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
message = missed_messages[0]["message"]
|
|
|
|
stream = Stream.objects.only("id", "name").get(id=message.recipient.type_id)
|
2020-06-10 06:41:04 +02:00
|
|
|
stream_header = f"{stream.name} > {message.topic_name()}"
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
stream_header=stream_header,
|
|
|
|
)
|
2018-07-14 07:55:27 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid messages!")
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2018-03-08 08:27:29 +01:00
|
|
|
# If message content is disabled, then flush all information we pass to email.
|
2019-01-14 14:04:08 +01:00
|
|
|
if not message_content_allowed_in_missedmessage_emails(user_profile):
|
2020-01-31 12:55:47 +01:00
|
|
|
realm = user_profile.realm
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
reply_to_zulip=False,
|
|
|
|
messages=[],
|
|
|
|
sender_str="",
|
|
|
|
realm_str=realm.name,
|
|
|
|
huddle_display_name="",
|
|
|
|
show_message_content=False,
|
|
|
|
message_content_disabled_by_user=not user_profile.message_content_in_email_notifications,
|
|
|
|
message_content_disabled_by_realm=not realm.message_content_allowed_in_email_notifications,
|
|
|
|
)
|
2018-03-08 08:27:29 +01:00
|
|
|
else:
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
2020-11-13 16:52:13 +01:00
|
|
|
messages=build_message_list(
|
|
|
|
user=user_profile,
|
2021-02-12 08:20:45 +01:00
|
|
|
messages=[m["message"] for m in missed_messages],
|
2020-11-13 16:52:13 +01:00
|
|
|
stream_map={},
|
|
|
|
),
|
2020-09-03 05:32:15 +02:00
|
|
|
sender_str=", ".join(sender.full_name for sender in senders),
|
|
|
|
realm_str=user_profile.realm.name,
|
|
|
|
show_message_content=True,
|
|
|
|
)
|
2017-05-12 22:47:34 +02:00
|
|
|
|
2020-02-12 19:52:39 +01:00
|
|
|
with override_language(user_profile.default_language):
|
2021-04-20 22:55:31 +02:00
|
|
|
from_name: str = _("Zulip notifications")
|
2017-07-07 23:59:12 +02:00
|
|
|
from_address = FromAddress.NOREPLY
|
2016-08-24 07:53:05 +02:00
|
|
|
if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER:
|
|
|
|
# If this setting is enabled, you can reply to the Zulip
|
2021-04-20 23:27:25 +02:00
|
|
|
# message notification emails directly back to the original sender.
|
2016-08-24 07:53:05 +02:00
|
|
|
# However, one must ensure the Zulip server is in the SPF
|
|
|
|
# record for the domain, or there will be spam/deliverability
|
|
|
|
# problems.
|
2019-11-16 01:49:28 +01:00
|
|
|
#
|
|
|
|
# Also, this setting is not really compatible with
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_ADMINS.
|
2017-05-12 22:47:34 +02:00
|
|
|
sender = senders[0]
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name, from_address = (sender.full_name, sender.email)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
reply_to_zulip=False,
|
|
|
|
)
|
2017-06-26 19:43:32 +02:00
|
|
|
|
2017-05-05 01:31:07 +02:00
|
|
|
email_dict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"template_prefix": "zerver/emails/missed_message",
|
|
|
|
"to_user_ids": [user_profile.id],
|
|
|
|
"from_name": from_name,
|
|
|
|
"from_address": from_address,
|
|
|
|
"reply_to_email": str(Address(display_name=reply_to_name, addr_spec=reply_to_address)),
|
|
|
|
"context": context,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-12-20 06:19:38 +01:00
|
|
|
queue_json_publish("email_senders", email_dict)
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
user_profile.last_reminder = timezone_now()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile.save(update_fields=["last_reminder"])
|
2014-08-11 14:15:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def handle_missedmessage_emails(
|
|
|
|
user_profile_id: int, missed_email_events: Iterable[Dict[str, Any]]
|
|
|
|
) -> None:
|
2020-05-21 14:51:36 +02:00
|
|
|
message_ids = {
|
|
|
|
event.get("message_id"): {
|
|
|
|
"trigger": event.get("trigger"),
|
|
|
|
"mentioned_user_group_id": event.get("mentioned_user_group_id"),
|
|
|
|
}
|
|
|
|
for event in missed_email_events
|
|
|
|
}
|
2014-01-24 22:29:17 +01:00
|
|
|
|
|
|
|
user_profile = get_user_profile_by_id(user_profile_id)
|
2017-11-29 02:49:11 +01:00
|
|
|
if not receives_offline_email_notifications(user_profile):
|
2014-01-24 22:29:17 +01:00
|
|
|
return
|
|
|
|
|
2018-12-05 19:36:58 +01:00
|
|
|
# Note: This query structure automatically filters out any
|
|
|
|
# messages that were permanently deleted, since those would now be
|
|
|
|
# in the ArchivedMessage table, not the Message table.
|
2021-02-12 08:19:30 +01:00
|
|
|
messages = Message.objects.filter(
|
|
|
|
usermessage__user_profile_id=user_profile,
|
|
|
|
id__in=message_ids,
|
|
|
|
usermessage__flags=~UserMessage.flags.read,
|
|
|
|
)
|
2017-03-14 08:38:01 +01:00
|
|
|
|
|
|
|
# Cancel missed-message emails for deleted messages
|
|
|
|
messages = [um for um in messages if um.content != "(deleted)"]
|
|
|
|
|
2014-07-08 02:06:51 +02:00
|
|
|
if not messages:
|
|
|
|
return
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
# We bucket messages by tuples that identify similar messages.
|
|
|
|
# For streams it's recipient_id and topic.
|
|
|
|
# For PMs it's recipient id and sender.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
messages_by_bucket: Dict[Tuple[int, str], List[Message]] = defaultdict(list)
|
2014-07-15 21:03:51 +02:00
|
|
|
for msg in messages:
|
2017-08-25 04:15:05 +02:00
|
|
|
if msg.recipient.type == Recipient.PERSONAL:
|
|
|
|
# For PM's group using (recipient, sender).
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.sender_id)].append(msg)
|
2017-08-25 04:15:05 +02:00
|
|
|
else:
|
2018-11-08 15:52:17 +01:00
|
|
|
messages_by_bucket[(msg.recipient_id, msg.topic_name())].append(msg)
|
2014-07-15 21:03:51 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket = {
|
2021-02-12 08:19:30 +01:00
|
|
|
bucket_tup: len(msgs) for bucket_tup, msgs in messages_by_bucket.items()
|
2014-09-05 06:33:47 +02:00
|
|
|
}
|
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
for msg_list in messages_by_bucket.values():
|
2019-08-28 02:43:19 +02:00
|
|
|
msg = min(msg_list, key=lambda msg: msg.date_sent)
|
2017-10-28 21:53:47 +02:00
|
|
|
if msg.is_stream_message():
|
2018-07-26 20:19:45 +02:00
|
|
|
context_messages = get_context_for_message(msg)
|
|
|
|
filtered_context_messages = bulk_access_messages(user_profile, context_messages)
|
|
|
|
msg_list.extend(filtered_context_messages)
|
2014-07-08 02:06:51 +02:00
|
|
|
|
2018-05-19 03:36:05 +02:00
|
|
|
# Sort emails by least recently-active discussion.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
bucket_tups: List[Tuple[Tuple[int, str], int]] = []
|
2018-11-08 15:52:17 +01:00
|
|
|
for bucket_tup, msg_list in messages_by_bucket.items():
|
2018-05-19 03:36:05 +02:00
|
|
|
max_message_id = max(msg_list, key=lambda msg: msg.id).id
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups.append((bucket_tup, max_message_id))
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 15:52:17 +01:00
|
|
|
bucket_tups = sorted(bucket_tups, key=lambda x: x[1])
|
2018-05-19 03:36:05 +02:00
|
|
|
|
2018-11-08 16:12:10 +01:00
|
|
|
# Send an email per bucket.
|
2018-11-08 15:52:17 +01:00
|
|
|
for bucket_tup, ignored_max_id in bucket_tups:
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages = {}
|
2018-11-08 15:52:17 +01:00
|
|
|
for m in messages_by_bucket[bucket_tup]:
|
2020-05-21 14:51:36 +02:00
|
|
|
message_info = message_ids.get(m.id)
|
2018-07-14 07:31:10 +02:00
|
|
|
unique_messages[m.id] = dict(
|
|
|
|
message=m,
|
2020-05-21 14:51:36 +02:00
|
|
|
trigger=message_info["trigger"] if message_info else None,
|
|
|
|
mentioned_user_group_id=message_info.get("mentioned_user_group_id")
|
|
|
|
if message_info is not None
|
|
|
|
else None,
|
2018-07-14 07:31:10 +02:00
|
|
|
)
|
2016-06-21 11:23:23 +02:00
|
|
|
do_send_missedmessage_events_reply_in_zulip(
|
2014-09-05 06:33:47 +02:00
|
|
|
user_profile,
|
2016-01-25 01:27:18 +01:00
|
|
|
list(unique_messages.values()),
|
2018-11-08 15:52:17 +01:00
|
|
|
message_count_by_bucket[bucket_tup],
|
2014-09-05 06:33:47 +02:00
|
|
|
)
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-02 14:11:48 +01:00
|
|
|
def followup_day2_email_delay(user: UserProfile) -> timedelta:
|
|
|
|
days_to_delay = 2
|
|
|
|
user_tz = user.timezone
|
2021-02-12 08:20:45 +01:00
|
|
|
if user_tz == "":
|
|
|
|
user_tz = "UTC"
|
2017-11-02 14:11:48 +01:00
|
|
|
signup_day = user.date_joined.astimezone(pytz.timezone(user_tz)).isoweekday()
|
|
|
|
if signup_day == 5:
|
|
|
|
# If the day is Friday then delay should be till Monday
|
|
|
|
days_to_delay = 3
|
|
|
|
elif signup_day == 4:
|
|
|
|
# If the day is Thursday then delay should be till Friday
|
|
|
|
days_to_delay = 1
|
|
|
|
|
|
|
|
# The delay should be 1 hour before the above calculated delay as
|
|
|
|
# our goal is to maximize the chance that this email is near the top
|
|
|
|
# of the user's inbox when the user sits down to deal with their inbox,
|
|
|
|
# or comes in while they are dealing with their inbox.
|
|
|
|
return timedelta(days=days_to_delay, hours=-1)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def enqueue_welcome_emails(user: UserProfile, realm_creation: bool = False) -> None:
|
2016-11-08 10:07:47 +01:00
|
|
|
from zerver.context_processors import common_context
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-07-19 07:37:53 +02:00
|
|
|
if settings.WELCOME_EMAIL_SENDER is not None:
|
2017-05-04 02:47:55 +02:00
|
|
|
# line break to avoid triggering lint rule
|
2021-02-12 08:20:45 +01:00
|
|
|
from_name = settings.WELCOME_EMAIL_SENDER["name"]
|
|
|
|
from_address = settings.WELCOME_EMAIL_SENDER["email"]
|
2016-07-19 07:37:53 +02:00
|
|
|
else:
|
2017-06-26 19:43:32 +02:00
|
|
|
from_name = None
|
2020-03-12 20:28:05 +01:00
|
|
|
from_address = FromAddress.support_placeholder
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
other_account_count = (
|
|
|
|
UserProfile.objects.filter(delivery_email__iexact=user.delivery_email)
|
|
|
|
.exclude(id=user.id)
|
|
|
|
.count()
|
|
|
|
)
|
2017-09-22 04:29:01 +02:00
|
|
|
unsubscribe_link = one_click_unsubscribe_link(user, "welcome")
|
|
|
|
context = common_context(user)
|
2020-09-03 05:32:15 +02:00
|
|
|
context.update(
|
|
|
|
unsubscribe_link=unsubscribe_link,
|
2021-02-12 08:20:45 +01:00
|
|
|
keyboard_shortcuts_link=user.realm.uri + "/help/keyboard-shortcuts",
|
2020-09-03 05:32:15 +02:00
|
|
|
realm_name=user.realm.name,
|
|
|
|
realm_creation=realm_creation,
|
|
|
|
email=user.delivery_email,
|
2021-04-28 02:08:52 +02:00
|
|
|
is_realm_admin=user.is_realm_admin,
|
2020-09-03 05:32:15 +02:00
|
|
|
)
|
2018-05-26 12:15:47 +02:00
|
|
|
if user.is_realm_admin:
|
2021-02-12 08:20:45 +01:00
|
|
|
context["getting_started_link"] = (
|
|
|
|
user.realm.uri + "/help/getting-your-organization-started-with-zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-26 12:15:47 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
context["getting_started_link"] = "https://zulip.com"
|
2018-11-14 12:46:56 +01:00
|
|
|
|
2019-11-05 02:29:03 +01:00
|
|
|
# Imported here to avoid import cycles.
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import ZulipLDAPAuthBackend, email_belongs_to_ldap
|
2018-11-29 16:32:17 +01:00
|
|
|
|
2019-11-16 01:49:28 +01:00
|
|
|
if email_belongs_to_ldap(user.realm, user.delivery_email):
|
2018-11-29 16:32:17 +01:00
|
|
|
context["ldap"] = True
|
2019-11-05 02:29:03 +01:00
|
|
|
for backend in get_backends():
|
|
|
|
# If the user is doing authentication via LDAP, Note that
|
|
|
|
# we exclude ZulipLDAPUserPopulator here, since that
|
|
|
|
# isn't used for authentication.
|
|
|
|
if isinstance(backend, ZulipLDAPAuthBackend):
|
2019-11-16 01:49:28 +01:00
|
|
|
context["ldap_username"] = backend.django_to_ldap_username(user.delivery_email)
|
2019-11-05 02:29:03 +01:00
|
|
|
break
|
2018-05-26 12:15:47 +02:00
|
|
|
|
2017-05-03 18:20:16 +02:00
|
|
|
send_future_email(
|
2021-02-12 08:19:30 +01:00
|
|
|
"zerver/emails/followup_day1",
|
|
|
|
user.realm,
|
|
|
|
to_user_ids=[user.id],
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
context=context,
|
|
|
|
)
|
2018-11-14 12:58:35 +01:00
|
|
|
|
|
|
|
if other_account_count == 0:
|
|
|
|
send_future_email(
|
2021-02-12 08:19:30 +01:00
|
|
|
"zerver/emails/followup_day2",
|
|
|
|
user.realm,
|
|
|
|
to_user_ids=[user.id],
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
context=context,
|
|
|
|
delay=followup_day2_email_delay(user),
|
|
|
|
)
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def convert_html_to_markdown(html: str) -> str:
|
2019-07-24 02:49:16 +02:00
|
|
|
parser = html2text.HTML2Text()
|
|
|
|
markdown = parser.handle(html).strip()
|
|
|
|
|
2014-01-24 22:29:17 +01:00
|
|
|
# We want images to get linked and inline previewed, but html2text will turn
|
|
|
|
# them into links of the form `![](http://foo.com/image.png)`, which is
|
|
|
|
# ugly. Run a regex over the resulting description, turning links of the
|
|
|
|
# form `![](http://foo.com/image.png?12345)` into
|
|
|
|
# `[image.png](http://foo.com/image.png)`.
|
2021-02-12 08:19:30 +01:00
|
|
|
return re.sub("!\\[\\]\\((\\S*)/(\\S*)\\?(\\S*)\\)", "[\\2](\\1/\\2)", markdown)
|