2020-06-11 00:54:34 +02:00
|
|
|
import datetime
|
|
|
|
import hashlib
|
|
|
|
import logging
|
|
|
|
import os
|
2021-04-03 12:14:01 +02:00
|
|
|
import smtplib
|
2023-01-18 05:25:49 +01:00
|
|
|
from contextlib import suppress
|
2020-06-05 23:26:35 +02:00
|
|
|
from email.headerregistry import Address
|
2020-06-11 00:54:34 +02:00
|
|
|
from email.parser import Parser
|
|
|
|
from email.policy import default
|
2020-06-14 13:32:38 +02:00
|
|
|
from email.utils import formataddr, parseaddr
|
2023-08-03 22:55:00 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Mapping, Optional, Sequence, Tuple, Union
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
import backoff
|
2023-04-05 11:19:58 +02:00
|
|
|
import css_inline
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2017-05-02 01:15:58 +02:00
|
|
|
from django.conf import settings
|
2021-03-20 14:07:02 +01:00
|
|
|
from django.core.mail import EmailMultiAlternatives, get_connection
|
|
|
|
from django.core.mail.backends.base import BaseEmailBackend
|
|
|
|
from django.core.mail.backends.smtp import EmailBackend
|
2021-04-03 11:58:16 +02:00
|
|
|
from django.core.mail.message import sanitize_address
|
2020-04-18 22:30:03 +02:00
|
|
|
from django.core.management import CommandError
|
2020-09-05 19:57:28 +02:00
|
|
|
from django.db import transaction
|
2023-08-03 22:20:37 +02:00
|
|
|
from django.db.models import QuerySet
|
2021-08-31 22:01:13 +02:00
|
|
|
from django.http import HttpRequest
|
2017-05-14 15:02:49 +02:00
|
|
|
from django.template import loader
|
2017-05-04 03:11:47 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2018-12-14 08:41:42 +01:00
|
|
|
from django.utils.translation import override as override_language
|
2017-05-02 01:15:58 +02:00
|
|
|
|
2023-08-03 23:22:21 +02:00
|
|
|
from confirmation.models import generate_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.logging_util import log_to_file
|
|
|
|
from zerver.models import EMAIL_TYPES, Realm, ScheduledEmail, UserProfile, get_user_profile_by_id
|
2021-04-28 20:23:14 +02:00
|
|
|
from zproject.email_backends import EmailLogBackEnd, get_forward_address
|
2017-08-28 08:41:13 +02:00
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
MAX_CONNECTION_TRIES = 3
|
|
|
|
|
2017-08-28 08:41:13 +02:00
|
|
|
## Logging setup ##
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
logger = logging.getLogger("zulip.send_email")
|
2017-12-13 01:45:57 +01:00
|
|
|
log_to_file(logger, settings.EMAIL_LOG_PATH)
|
2017-08-28 08:41:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-05 11:19:58 +02:00
|
|
|
def get_inliner_instance() -> css_inline.CSSInliner:
|
2023-04-27 11:50:02 +02:00
|
|
|
return css_inline.CSSInliner()
|
2023-04-05 11:19:58 +02:00
|
|
|
|
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class FromAddress:
|
2017-06-26 19:43:32 +02:00
|
|
|
SUPPORT = parseaddr(settings.ZULIP_ADMINISTRATOR)[1]
|
|
|
|
NOREPLY = parseaddr(settings.NOREPLY_EMAIL_ADDRESS)[1]
|
|
|
|
|
2020-03-12 20:28:05 +01:00
|
|
|
support_placeholder = "SUPPORT"
|
2021-02-12 08:20:45 +01:00
|
|
|
no_reply_placeholder = "NO_REPLY"
|
|
|
|
tokenized_no_reply_placeholder = "TOKENIZED_NO_REPLY"
|
2020-03-12 20:28:05 +01:00
|
|
|
|
2018-06-08 11:06:18 +02:00
|
|
|
# Generates an unpredictable noreply address.
|
|
|
|
@staticmethod
|
|
|
|
def tokenized_no_reply_address() -> str:
|
|
|
|
if settings.ADD_TOKENS_TO_NOREPLY_ADDRESS:
|
2021-02-12 08:19:30 +01:00
|
|
|
return parseaddr(settings.TOKENIZED_NOREPLY_EMAIL_ADDRESS)[1].format(
|
|
|
|
token=generate_key()
|
|
|
|
)
|
2018-06-08 11:06:18 +02:00
|
|
|
return FromAddress.NOREPLY
|
|
|
|
|
2020-02-14 13:58:58 +01:00
|
|
|
@staticmethod
|
2021-02-12 08:19:30 +01:00
|
|
|
def security_email_from_name(
|
|
|
|
language: Optional[str] = None, user_profile: Optional[UserProfile] = None
|
|
|
|
) -> str:
|
2020-02-14 13:58:58 +01:00
|
|
|
if language is None:
|
|
|
|
assert user_profile is not None
|
|
|
|
language = user_profile.default_language
|
|
|
|
|
|
|
|
with override_language(language):
|
|
|
|
return _("Zulip Account Security")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_email(
|
|
|
|
template_prefix: str,
|
|
|
|
to_user_ids: Optional[List[int]] = None,
|
|
|
|
to_emails: Optional[List[str]] = None,
|
|
|
|
from_name: Optional[str] = None,
|
|
|
|
from_address: Optional[str] = None,
|
|
|
|
reply_to_email: Optional[str] = None,
|
|
|
|
language: Optional[str] = None,
|
|
|
|
context: Mapping[str, Any] = {},
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
) -> EmailMultiAlternatives:
|
2017-07-17 05:42:08 +02:00
|
|
|
# Callers should pass exactly one of to_user_id and to_email.
|
2018-12-03 23:26:51 +01:00
|
|
|
assert (to_user_ids is None) ^ (to_emails is None)
|
|
|
|
if to_user_ids is not None:
|
|
|
|
to_users = [get_user_profile_by_id(to_user_id) for to_user_id in to_user_ids]
|
2020-06-14 13:32:38 +02:00
|
|
|
if realm is None:
|
2020-09-02 06:20:26 +02:00
|
|
|
assert len({to_user.realm_id for to_user in to_users}) == 1
|
2020-06-14 13:32:38 +02:00
|
|
|
realm = to_users[0].realm
|
2021-12-15 00:03:53 +01:00
|
|
|
to_emails = []
|
|
|
|
for to_user in to_users:
|
|
|
|
stringified = str(
|
|
|
|
Address(display_name=to_user.full_name, addr_spec=to_user.delivery_email)
|
|
|
|
)
|
|
|
|
# Check ASCII encoding length. Amazon SES rejects emails
|
|
|
|
# with From or To values longer than 320 characters (which
|
|
|
|
# appears to be a misinterpretation of the RFC); in that
|
|
|
|
# case we drop the name part from the address, under the
|
|
|
|
# theory that it's better to send the email with a
|
|
|
|
# simplified field than not at all.
|
|
|
|
if len(sanitize_address(stringified, "utf-8")) > 320:
|
|
|
|
stringified = str(Address(addr_spec=to_user.delivery_email))
|
|
|
|
to_emails.append(stringified)
|
2017-07-11 05:01:32 +02:00
|
|
|
|
2022-05-27 02:31:52 +02:00
|
|
|
# Attempt to suppress all auto-replies. This header originally
|
|
|
|
# came out of Microsoft Outlook and friends, but seems reasonably
|
|
|
|
# commonly-recognized.
|
|
|
|
extra_headers = {"X-Auto-Response-Suppress": "All"}
|
|
|
|
|
2020-06-14 13:32:38 +02:00
|
|
|
if realm is not None:
|
|
|
|
# formaddr is meant for formatting (display_name, email_address) pair for headers like "To",
|
|
|
|
# but we can use its utility for formatting the List-Id header, as it follows the same format,
|
|
|
|
# except having just a domain instead of an email address.
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_headers["List-Id"] = formataddr((realm.name, realm.host))
|
2020-06-14 13:32:38 +02:00
|
|
|
|
2023-02-03 00:41:49 +01:00
|
|
|
assert settings.STATIC_URL is not None
|
2020-06-13 03:34:01 +02:00
|
|
|
context = {
|
|
|
|
**context,
|
2021-02-12 08:20:45 +01:00
|
|
|
"support_email": FromAddress.SUPPORT,
|
2023-02-03 00:41:49 +01:00
|
|
|
# Emails use unhashed image URLs so that those continue to
|
|
|
|
# work over time, even if the prod-static directory is cleaned
|
|
|
|
# out; as such, they just use a STATIC_URL prefix.
|
2023-04-26 01:47:00 +02:00
|
|
|
"email_images_base_url": settings.STATIC_URL + "images/emails",
|
2021-02-12 08:20:45 +01:00
|
|
|
"physical_address": settings.PHYSICAL_ADDRESS,
|
2020-06-13 03:34:01 +02:00
|
|
|
}
|
2018-12-14 08:41:42 +01:00
|
|
|
|
2023-04-05 11:19:58 +02:00
|
|
|
def get_inlined_template(template: str) -> str:
|
|
|
|
inliner = get_inliner_instance()
|
|
|
|
return inliner.inline(template)
|
|
|
|
|
2018-12-14 08:41:42 +01:00
|
|
|
def render_templates() -> Tuple[str, str, str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
email_subject = (
|
|
|
|
loader.render_to_string(
|
2021-02-12 08:20:45 +01:00
|
|
|
template_prefix + ".subject.txt", context=context, using="Jinja2_plaintext"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.strip()
|
2021-02-12 08:20:45 +01:00
|
|
|
.replace("\n", "")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
message = loader.render_to_string(
|
2021-02-12 08:20:45 +01:00
|
|
|
template_prefix + ".txt", context=context, using="Jinja2_plaintext"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-12-14 08:41:42 +01:00
|
|
|
|
2023-04-05 11:19:58 +02:00
|
|
|
html_message = loader.render_to_string(template_prefix + ".html", context)
|
|
|
|
return (get_inlined_template(html_message), message, email_subject)
|
2018-12-14 08:41:42 +01:00
|
|
|
|
2020-09-18 20:43:19 +02:00
|
|
|
# The i18n story for emails is a bit complicated. For emails
|
|
|
|
# going to a single user, we want to use the language that user
|
|
|
|
# has configured for their Zulip account. For emails going to
|
|
|
|
# multiple users or to email addresses without a known Zulip
|
|
|
|
# account (E.g. invitations), we want to use the default language
|
|
|
|
# configured for the Zulip organization.
|
|
|
|
#
|
|
|
|
# See our i18n documentation for some high-level details:
|
|
|
|
# https://zulip.readthedocs.io/en/latest/translating/internationalization.html
|
|
|
|
|
2018-12-14 08:41:42 +01:00
|
|
|
if not language and to_user_ids is not None:
|
|
|
|
language = to_users[0].default_language
|
|
|
|
if language:
|
|
|
|
with override_language(language):
|
|
|
|
# Make sure that we render the email using the target's native language
|
2018-12-23 19:09:04 +01:00
|
|
|
(html_message, message, email_subject) = render_templates()
|
2018-12-14 08:41:42 +01:00
|
|
|
else:
|
2018-12-23 19:09:04 +01:00
|
|
|
(html_message, message, email_subject) = render_templates()
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("Missing language for email template '%s'", template_prefix)
|
2017-06-26 19:43:32 +02:00
|
|
|
|
|
|
|
if from_name is None:
|
|
|
|
from_name = "Zulip"
|
|
|
|
if from_address is None:
|
2017-06-26 19:43:32 +02:00
|
|
|
from_address = FromAddress.NOREPLY
|
2020-03-12 20:28:05 +01:00
|
|
|
if from_address == FromAddress.tokenized_no_reply_placeholder:
|
|
|
|
from_address = FromAddress.tokenized_no_reply_address()
|
|
|
|
if from_address == FromAddress.no_reply_placeholder:
|
|
|
|
from_address = FromAddress.NOREPLY
|
|
|
|
if from_address == FromAddress.support_placeholder:
|
|
|
|
from_address = FromAddress.SUPPORT
|
|
|
|
|
2021-04-03 11:58:16 +02:00
|
|
|
# Set the "From" that is displayed separately from the envelope-from.
|
2021-01-26 04:20:36 +01:00
|
|
|
extra_headers["From"] = str(Address(display_name=from_name, addr_spec=from_address))
|
2021-12-15 00:03:53 +01:00
|
|
|
# As above, with the "To" line, we drop the name part if it would
|
|
|
|
# result in an address which is longer than 320 bytes.
|
2021-04-03 11:58:16 +02:00
|
|
|
if len(sanitize_address(extra_headers["From"], "utf-8")) > 320:
|
|
|
|
extra_headers["From"] = str(Address(addr_spec=from_address))
|
2021-01-26 04:20:36 +01:00
|
|
|
|
2021-04-28 01:10:00 +02:00
|
|
|
# If we have an unsubscribe link for this email, configure it for
|
|
|
|
# "Unsubscribe" buttons in email clients via the List-Unsubscribe header.
|
|
|
|
#
|
|
|
|
# Note that Microsoft ignores URLs in List-Unsubscribe headers, as
|
|
|
|
# they only support the alternative `mailto:` format, which we
|
|
|
|
# have not implemented.
|
|
|
|
if "unsubscribe_link" in context:
|
|
|
|
extra_headers["List-Unsubscribe"] = f"<{context['unsubscribe_link']}>"
|
|
|
|
extra_headers["List-Unsubscribe-Post"] = "List-Unsubscribe=One-Click"
|
|
|
|
|
2017-05-05 01:03:22 +02:00
|
|
|
reply_to = None
|
|
|
|
if reply_to_email is not None:
|
|
|
|
reply_to = [reply_to_email]
|
2017-07-05 08:28:43 +02:00
|
|
|
# Remove the from_name in the reply-to for noreply emails, so that users
|
|
|
|
# see "noreply@..." rather than "Zulip" or whatever the from_name is
|
|
|
|
# when they reply in their email client.
|
|
|
|
elif from_address == FromAddress.NOREPLY:
|
|
|
|
reply_to = [FromAddress.NOREPLY]
|
2017-05-05 01:03:22 +02:00
|
|
|
|
2021-01-26 04:20:36 +01:00
|
|
|
envelope_from = FromAddress.NOREPLY
|
2021-02-12 08:19:30 +01:00
|
|
|
mail = EmailMultiAlternatives(
|
2021-01-26 04:20:36 +01:00
|
|
|
email_subject, message, envelope_from, to_emails, reply_to=reply_to, headers=extra_headers
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-05-05 01:03:22 +02:00
|
|
|
if html_message is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mail.attach_alternative(html_message, "text/html")
|
2017-06-10 06:19:32 +02:00
|
|
|
return mail
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class EmailNotDeliveredError(Exception):
|
2017-07-12 01:05:59 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class DoubledEmailArgumentError(CommandError):
|
2020-04-09 13:32:38 +02:00
|
|
|
def __init__(self, argument_name: str) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
msg = (
|
|
|
|
f"Argument '{argument_name}' is ambiguously present in both options and email template."
|
|
|
|
)
|
2020-04-09 13:32:38 +02:00
|
|
|
super().__init__(msg)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class NoEmailArgumentError(CommandError):
|
2020-04-09 13:32:38 +02:00
|
|
|
def __init__(self, argument_name: str) -> None:
|
2020-06-14 02:57:50 +02:00
|
|
|
msg = f"Argument '{argument_name}' is required in either options or email template."
|
2020-04-09 13:32:38 +02:00
|
|
|
super().__init__(msg)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-02 21:10:41 +02:00
|
|
|
# When changing the arguments to this function, you may need to write a
|
|
|
|
# migration to change or remove any emails in ScheduledEmail.
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_email(
|
|
|
|
template_prefix: str,
|
|
|
|
to_user_ids: Optional[List[int]] = None,
|
|
|
|
to_emails: Optional[List[str]] = None,
|
|
|
|
from_name: Optional[str] = None,
|
|
|
|
from_address: Optional[str] = None,
|
|
|
|
reply_to_email: Optional[str] = None,
|
|
|
|
language: Optional[str] = None,
|
2022-10-06 11:56:48 +02:00
|
|
|
context: Mapping[str, Any] = {},
|
2021-02-12 08:19:30 +01:00
|
|
|
realm: Optional[Realm] = None,
|
2021-03-20 14:07:02 +01:00
|
|
|
connection: Optional[BaseEmailBackend] = None,
|
2021-04-07 01:27:02 +02:00
|
|
|
dry_run: bool = False,
|
2021-08-31 22:01:13 +02:00
|
|
|
request: Optional[HttpRequest] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
mail = build_email(
|
|
|
|
template_prefix,
|
|
|
|
to_user_ids=to_user_ids,
|
|
|
|
to_emails=to_emails,
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
reply_to_email=reply_to_email,
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
realm=realm,
|
|
|
|
)
|
2017-08-28 08:41:13 +02:00
|
|
|
template = template_prefix.split("/")[-1]
|
|
|
|
|
2022-02-06 20:57:58 +01:00
|
|
|
log_email_config_errors()
|
|
|
|
|
2021-04-07 01:27:02 +02:00
|
|
|
if dry_run:
|
|
|
|
print(mail.message().get_payload()[0])
|
|
|
|
return
|
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
if connection is None:
|
|
|
|
connection = get_connection()
|
2021-04-03 12:14:01 +02:00
|
|
|
|
2021-08-31 22:01:13 +02:00
|
|
|
cause = ""
|
|
|
|
if request is not None:
|
|
|
|
cause = f" (triggered from {request.META['REMOTE_ADDR']})"
|
|
|
|
|
2022-06-15 04:03:32 +02:00
|
|
|
logging_recipient: Union[str, List[str]] = mail.to
|
2021-08-31 22:01:13 +02:00
|
|
|
if realm is not None:
|
|
|
|
logging_recipient = f"{mail.to} in {realm.string_id}"
|
|
|
|
|
|
|
|
logger.info("Sending %s email to %s%s", template, logging_recipient, cause)
|
2021-08-05 19:15:24 +02:00
|
|
|
|
2021-04-03 12:14:01 +02:00
|
|
|
try:
|
|
|
|
# This will call .open() for us, which is a no-op if it's already open;
|
|
|
|
# it will only call .close() if it was not open to begin with
|
|
|
|
if connection.send_messages([mail]) == 0:
|
2021-05-11 23:24:52 +02:00
|
|
|
logger.error("Unknown error sending %s email to %s", template, mail.to)
|
2022-11-17 09:30:48 +01:00
|
|
|
raise EmailNotDeliveredError
|
2021-05-11 23:24:52 +02:00
|
|
|
except smtplib.SMTPResponseException as e:
|
|
|
|
logger.exception(
|
|
|
|
"Error sending %s email to %s with error code %s: %s",
|
|
|
|
template,
|
|
|
|
mail.to,
|
|
|
|
e.smtp_code,
|
|
|
|
e.smtp_error,
|
|
|
|
stack_info=True,
|
|
|
|
)
|
2022-11-17 09:30:48 +01:00
|
|
|
raise EmailNotDeliveredError
|
2021-05-11 23:24:52 +02:00
|
|
|
except smtplib.SMTPException as e:
|
2022-10-08 07:34:49 +02:00
|
|
|
logger.exception("Error sending %s email to %s: %s", template, mail.to, e, stack_info=True)
|
2022-11-17 09:30:48 +01:00
|
|
|
raise EmailNotDeliveredError
|
2017-05-02 01:15:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-05-03 03:58:44 +02:00
|
|
|
@backoff.on_exception(backoff.expo, OSError, max_tries=MAX_CONNECTION_TRIES, logger=None)
|
2021-03-20 14:07:02 +01:00
|
|
|
def initialize_connection(connection: Optional[BaseEmailBackend] = None) -> BaseEmailBackend:
|
|
|
|
if not connection:
|
|
|
|
connection = get_connection()
|
2021-07-24 18:16:48 +02:00
|
|
|
assert connection is not None
|
2021-04-28 20:23:14 +02:00
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
if connection.open():
|
|
|
|
# If it's a new connection, no need to no-op to check connectivity
|
|
|
|
return connection
|
2021-04-28 20:23:14 +02:00
|
|
|
|
|
|
|
if isinstance(connection, EmailLogBackEnd) and not get_forward_address():
|
|
|
|
# With the development environment backend and without a
|
|
|
|
# configured forwarding address, we don't actually send emails.
|
|
|
|
#
|
|
|
|
# As a result, the connection cannot be closed by the server
|
|
|
|
# (as there is none), and `connection.noop` is not
|
|
|
|
# implemented, so we need to return the connection early.
|
|
|
|
return connection
|
|
|
|
|
|
|
|
# No-op to ensure that we don't return a connection that has been
|
|
|
|
# closed by the mail server.
|
2021-03-20 14:07:02 +01:00
|
|
|
if isinstance(connection, EmailBackend):
|
|
|
|
try:
|
2021-07-24 18:16:48 +02:00
|
|
|
assert connection.connection is not None
|
2021-03-20 14:07:02 +01:00
|
|
|
status = connection.connection.noop()[0]
|
|
|
|
except Exception:
|
|
|
|
status = -1
|
|
|
|
if status != 250:
|
|
|
|
# Close and connect again.
|
|
|
|
connection.close()
|
|
|
|
connection.open()
|
2021-04-28 20:23:14 +02:00
|
|
|
|
2021-03-20 14:07:02 +01:00
|
|
|
return connection
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def send_future_email(
|
|
|
|
template_prefix: str,
|
|
|
|
realm: Realm,
|
|
|
|
to_user_ids: Optional[List[int]] = None,
|
|
|
|
to_emails: Optional[List[str]] = None,
|
|
|
|
from_name: Optional[str] = None,
|
|
|
|
from_address: Optional[str] = None,
|
|
|
|
language: Optional[str] = None,
|
2022-10-06 11:56:48 +02:00
|
|
|
context: Mapping[str, Any] = {},
|
2021-02-12 08:19:30 +01:00
|
|
|
delay: datetime.timedelta = datetime.timedelta(0),
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
template_name = template_prefix.split("/")[-1]
|
2021-02-12 08:19:30 +01:00
|
|
|
email_fields = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"template_prefix": template_prefix,
|
|
|
|
"from_name": from_name,
|
|
|
|
"from_address": from_address,
|
|
|
|
"language": language,
|
|
|
|
"context": context,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-07-15 03:06:04 +02:00
|
|
|
|
2018-12-17 13:12:54 +01:00
|
|
|
if settings.DEVELOPMENT_LOG_EMAILS:
|
2021-02-12 08:19:30 +01:00
|
|
|
send_email(
|
|
|
|
template_prefix,
|
|
|
|
to_user_ids=to_user_ids,
|
|
|
|
to_emails=to_emails,
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
)
|
2017-10-03 02:04:32 +02:00
|
|
|
# For logging the email
|
2017-09-24 00:39:19 +02:00
|
|
|
|
2018-12-03 23:26:51 +01:00
|
|
|
assert (to_user_ids is None) ^ (to_emails is None)
|
scheduled_email: Create ScheduledEmail objects in a transaction.
This fixes two bugs: the most obvious is that there is a race where a
ScheduledEmail object could be observed in the window between creation
and when users are added; this is a momentary instance when the object
has no users, but one that will resolve itself.
The more subtle is that .save() will, if no records were found to be
updated, _re-create_ the object as it exists in memory, using an
INSERT[1]. Thus, there is a race with `deliver_scheduled_emails`
between when the users are added, and when `email.save()` runs:
1. Web request creates ScheduledEmail object
2. Web request creates ScheduledEmailUsers object
3. deliver_scheduled_emails locks the former, preventing updates.
4. deliver_scheduled_emails deletes both objects, commits, releasing lock
5. Web request calls `email.save()`; UPDATE finds no rows, so it
re-creates the ScheduledEmail object.
6. Future deliver_scheduled_emails runs find a ScheduledEmail with no
attending ScheduledEmailUsers objects
Wrapping the logical creation of both of these in a single transaction
avoids both of these races.
[1] https://docs.djangoproject.com/en/3.2/ref/models/instances/#how-django-knows-to-update-vs-insert
2021-08-17 03:21:10 +02:00
|
|
|
with transaction.atomic():
|
|
|
|
email = ScheduledEmail.objects.create(
|
|
|
|
type=EMAIL_TYPES[template_name],
|
|
|
|
scheduled_timestamp=timezone_now() + delay,
|
|
|
|
realm=realm,
|
|
|
|
data=orjson.dumps(email_fields).decode(),
|
|
|
|
)
|
2019-01-04 01:50:21 +01:00
|
|
|
|
scheduled_email: Create ScheduledEmail objects in a transaction.
This fixes two bugs: the most obvious is that there is a race where a
ScheduledEmail object could be observed in the window between creation
and when users are added; this is a momentary instance when the object
has no users, but one that will resolve itself.
The more subtle is that .save() will, if no records were found to be
updated, _re-create_ the object as it exists in memory, using an
INSERT[1]. Thus, there is a race with `deliver_scheduled_emails`
between when the users are added, and when `email.save()` runs:
1. Web request creates ScheduledEmail object
2. Web request creates ScheduledEmailUsers object
3. deliver_scheduled_emails locks the former, preventing updates.
4. deliver_scheduled_emails deletes both objects, commits, releasing lock
5. Web request calls `email.save()`; UPDATE finds no rows, so it
re-creates the ScheduledEmail object.
6. Future deliver_scheduled_emails runs find a ScheduledEmail with no
attending ScheduledEmailUsers objects
Wrapping the logical creation of both of these in a single transaction
avoids both of these races.
[1] https://docs.djangoproject.com/en/3.2/ref/models/instances/#how-django-knows-to-update-vs-insert
2021-08-17 03:21:10 +02:00
|
|
|
# We store the recipients in the ScheduledEmail object itself,
|
|
|
|
# rather than the JSON data object, so that we can find and clear
|
|
|
|
# them using clear_scheduled_emails.
|
|
|
|
try:
|
|
|
|
if to_user_ids is not None:
|
|
|
|
email.users.add(*to_user_ids)
|
|
|
|
else:
|
|
|
|
assert to_emails is not None
|
|
|
|
assert len(to_emails) == 1
|
|
|
|
email.address = parseaddr(to_emails[0])[1]
|
2021-08-17 05:24:16 +02:00
|
|
|
email.save()
|
scheduled_email: Create ScheduledEmail objects in a transaction.
This fixes two bugs: the most obvious is that there is a race where a
ScheduledEmail object could be observed in the window between creation
and when users are added; this is a momentary instance when the object
has no users, but one that will resolve itself.
The more subtle is that .save() will, if no records were found to be
updated, _re-create_ the object as it exists in memory, using an
INSERT[1]. Thus, there is a race with `deliver_scheduled_emails`
between when the users are added, and when `email.save()` runs:
1. Web request creates ScheduledEmail object
2. Web request creates ScheduledEmailUsers object
3. deliver_scheduled_emails locks the former, preventing updates.
4. deliver_scheduled_emails deletes both objects, commits, releasing lock
5. Web request calls `email.save()`; UPDATE finds no rows, so it
re-creates the ScheduledEmail object.
6. Future deliver_scheduled_emails runs find a ScheduledEmail with no
attending ScheduledEmailUsers objects
Wrapping the logical creation of both of these in a single transaction
avoids both of these races.
[1] https://docs.djangoproject.com/en/3.2/ref/models/instances/#how-django-knows-to-update-vs-insert
2021-08-17 03:21:10 +02:00
|
|
|
except Exception as e:
|
|
|
|
email.delete()
|
|
|
|
raise e
|
2018-12-03 23:26:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def send_email_to_admins(
|
|
|
|
template_prefix: str,
|
|
|
|
realm: Realm,
|
|
|
|
from_name: Optional[str] = None,
|
|
|
|
from_address: Optional[str] = None,
|
|
|
|
language: Optional[str] = None,
|
2022-10-06 11:56:48 +02:00
|
|
|
context: Mapping[str, Any] = {},
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2019-06-20 23:26:54 +02:00
|
|
|
admins = realm.get_human_admin_users()
|
2018-12-03 23:26:51 +01:00
|
|
|
admin_user_ids = [admin.id for admin in admins]
|
2021-02-12 08:19:30 +01:00
|
|
|
send_email(
|
|
|
|
template_prefix,
|
|
|
|
to_user_ids=admin_user_ids,
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
)
|
|
|
|
|
2018-12-04 23:34:04 +01:00
|
|
|
|
2021-06-11 12:53:45 +02:00
|
|
|
def send_email_to_billing_admins_and_realm_owners(
|
|
|
|
template_prefix: str,
|
|
|
|
realm: Realm,
|
|
|
|
from_name: Optional[str] = None,
|
|
|
|
from_address: Optional[str] = None,
|
|
|
|
language: Optional[str] = None,
|
2022-10-06 11:56:48 +02:00
|
|
|
context: Mapping[str, Any] = {},
|
2021-06-11 12:53:45 +02:00
|
|
|
) -> None:
|
|
|
|
send_email(
|
|
|
|
template_prefix,
|
|
|
|
to_user_ids=[user.id for user in realm.get_human_billing_admin_and_realm_owner_users()],
|
|
|
|
from_name=from_name,
|
|
|
|
from_address=from_address,
|
|
|
|
language=language,
|
|
|
|
context=context,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2019-03-15 18:48:01 +01:00
|
|
|
def clear_scheduled_invitation_emails(email: str) -> None:
|
|
|
|
"""Unlike most scheduled emails, invitation emails don't have an
|
|
|
|
existing user object to key off of, so we filter by address here."""
|
2021-02-12 08:19:30 +01:00
|
|
|
items = ScheduledEmail.objects.filter(
|
|
|
|
address__iexact=email, type=ScheduledEmail.INVITATION_REMINDER
|
|
|
|
)
|
2019-03-15 18:48:01 +01:00
|
|
|
items.delete()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-30 00:12:44 +02:00
|
|
|
@transaction.atomic(savepoint=False)
|
2021-08-14 02:54:46 +02:00
|
|
|
def clear_scheduled_emails(user_id: int, email_type: Optional[int] = None) -> None:
|
2020-09-05 19:57:28 +02:00
|
|
|
# We need to obtain a FOR UPDATE lock on the selected rows to keep a concurrent
|
|
|
|
# execution of this function (or something else) from deleting them before we access
|
|
|
|
# the .users attribute.
|
2021-08-14 03:19:07 +02:00
|
|
|
items = (
|
|
|
|
ScheduledEmail.objects.filter(users__in=[user_id])
|
|
|
|
.prefetch_related("users")
|
|
|
|
.select_for_update()
|
|
|
|
)
|
2019-03-15 18:48:01 +01:00
|
|
|
if email_type is not None:
|
|
|
|
items = items.filter(type=email_type)
|
2020-09-05 19:57:28 +02:00
|
|
|
|
2019-03-15 18:48:01 +01:00
|
|
|
for item in items:
|
2021-08-14 02:54:46 +02:00
|
|
|
item.users.remove(user_id)
|
2023-09-07 18:22:41 +02:00
|
|
|
if not item.users.all().exists():
|
2020-09-05 19:57:28 +02:00
|
|
|
# Due to our transaction holding the row lock we have a guarantee
|
|
|
|
# that the obtained COUNT is accurate, thus we can reliably use it
|
|
|
|
# to decide whether to delete the ScheduledEmail row.
|
2019-03-15 18:48:01 +01:00
|
|
|
item.delete()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-04 23:34:04 +01:00
|
|
|
def handle_send_email_format_changes(job: Dict[str, Any]) -> None:
|
|
|
|
# Reformat any jobs that used the old to_email
|
|
|
|
# and to_user_ids argument formats.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "to_email" in job:
|
|
|
|
if job["to_email"] is not None:
|
|
|
|
job["to_emails"] = [job["to_email"]]
|
|
|
|
del job["to_email"]
|
|
|
|
if "to_user_id" in job:
|
|
|
|
if job["to_user_id"] is not None:
|
|
|
|
job["to_user_ids"] = [job["to_user_id"]]
|
|
|
|
del job["to_user_id"]
|
2019-03-16 02:32:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-11 03:32:31 +02:00
|
|
|
def deliver_scheduled_emails(email: ScheduledEmail) -> None:
|
2020-08-07 01:09:47 +02:00
|
|
|
data = orjson.loads(email.data)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_ids = list(email.users.values_list("id", flat=True))
|
2020-09-05 19:57:28 +02:00
|
|
|
if not user_ids and not email.address:
|
2023-06-14 18:54:51 +02:00
|
|
|
# This state doesn't make sense, so something must have mutated the object
|
|
|
|
logger.warning(
|
|
|
|
"ScheduledEmail %s at %s had empty users and address attributes: %r",
|
|
|
|
email.id,
|
|
|
|
email.scheduled_timestamp,
|
|
|
|
data,
|
|
|
|
)
|
|
|
|
email.delete()
|
2020-09-05 19:57:28 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
if user_ids:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["to_user_ids"] = user_ids
|
2019-03-16 02:32:43 +01:00
|
|
|
if email.address is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["to_emails"] = [email.address]
|
2019-03-16 02:32:43 +01:00
|
|
|
handle_send_email_format_changes(data)
|
|
|
|
send_email(**data)
|
|
|
|
email.delete()
|
2020-04-14 19:57:09 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-14 19:57:09 +02:00
|
|
|
def get_header(option: Optional[str], header: Optional[str], name: str) -> str:
|
|
|
|
if option and header:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise DoubledEmailArgumentError(name)
|
2020-04-14 19:57:09 +02:00
|
|
|
if not option and not header:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise NoEmailArgumentError(name)
|
2020-04-14 19:57:09 +02:00
|
|
|
return str(option or header)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-15 02:09:12 +01:00
|
|
|
def send_custom_email(
|
2023-08-03 22:55:00 +02:00
|
|
|
users: QuerySet[UserProfile],
|
|
|
|
*,
|
|
|
|
target_emails: Sequence[str] = [],
|
|
|
|
options: Dict[str, Any],
|
2023-08-30 18:36:47 +02:00
|
|
|
add_context: Optional[Callable[[Dict[str, object], UserProfile], None]] = None,
|
2021-12-15 02:09:12 +01:00
|
|
|
) -> None:
|
2020-04-14 19:57:09 +02:00
|
|
|
"""
|
2021-12-15 02:09:12 +01:00
|
|
|
Helper for `manage.py send_custom_email`.
|
|
|
|
|
2020-04-14 19:57:09 +02:00
|
|
|
Can be used directly with from a management shell with
|
|
|
|
send_custom_email(user_profile_list, dict(
|
|
|
|
markdown_template_path="/path/to/markdown/file.md",
|
2021-05-10 07:02:14 +02:00
|
|
|
subject="Email subject",
|
2020-04-14 19:57:09 +02:00
|
|
|
from_name="Sender Name")
|
|
|
|
)
|
|
|
|
"""
|
|
|
|
|
|
|
|
with open(options["markdown_template_path"]) as f:
|
|
|
|
text = f.read()
|
|
|
|
parsed_email_template = Parser(policy=default).parsestr(text)
|
2021-08-02 23:20:39 +02:00
|
|
|
email_template_hash = hashlib.sha256(text.encode()).hexdigest()[0:32]
|
2020-04-14 19:57:09 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
email_id = f"zerver/emails/custom/custom_email_{email_template_hash}"
|
2020-04-14 19:57:09 +02:00
|
|
|
markdown_email_base_template_path = "templates/zerver/emails/custom_email_base.pre.html"
|
2023-04-05 11:19:58 +02:00
|
|
|
html_template_path = f"templates/{email_id}.html"
|
2020-06-10 06:41:04 +02:00
|
|
|
plain_text_template_path = f"templates/{email_id}.txt"
|
|
|
|
subject_path = f"templates/{email_id}.subject.txt"
|
2023-04-05 11:19:58 +02:00
|
|
|
os.makedirs(os.path.dirname(html_template_path), exist_ok=True)
|
2020-04-14 19:57:09 +02:00
|
|
|
|
2020-08-11 01:47:49 +02:00
|
|
|
# First, we render the Markdown input file just like our
|
2020-04-14 19:57:09 +02:00
|
|
|
# user-facing docs with render_markdown_path.
|
|
|
|
with open(plain_text_template_path, "w") as f:
|
|
|
|
f.write(parsed_email_template.get_payload())
|
|
|
|
|
2021-06-11 10:45:10 +02:00
|
|
|
from zerver.lib.templates import render_markdown_path
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-14 19:57:09 +02:00
|
|
|
rendered_input = render_markdown_path(plain_text_template_path.replace("templates/", ""))
|
|
|
|
|
|
|
|
# And then extend it with our standard email headers.
|
2023-04-05 11:19:58 +02:00
|
|
|
with open(html_template_path, "w") as f:
|
2020-04-14 19:57:09 +02:00
|
|
|
with open(markdown_email_base_template_path) as base_template:
|
2023-08-03 19:57:21 +02:00
|
|
|
# We use an ugly string substitution here, because we want to:
|
|
|
|
# 1. Only run Jinja once on the supplied content
|
|
|
|
# 2. Allow the supplied content to have jinja interpolation in it
|
|
|
|
# 3. Have that interpolation happen in the context of
|
|
|
|
# each individual email we send, so the contents can
|
|
|
|
# vary user-to-user
|
|
|
|
f.write(base_template.read().replace("{{ rendered_input }}", rendered_input))
|
2020-04-14 19:57:09 +02:00
|
|
|
|
|
|
|
with open(subject_path, "w") as f:
|
2021-02-12 08:19:30 +01:00
|
|
|
f.write(get_header(options.get("subject"), parsed_email_template.get("subject"), "subject"))
|
2020-04-14 19:57:09 +02:00
|
|
|
|
|
|
|
# Finally, we send the actual emails.
|
2023-08-21 20:19:35 +02:00
|
|
|
for user_profile in users.select_related("realm").order_by("id"):
|
2021-02-12 08:20:45 +01:00
|
|
|
if options.get("admins_only") and not user_profile.is_realm_admin:
|
2020-06-10 07:28:15 +02:00
|
|
|
continue
|
2023-08-30 18:36:47 +02:00
|
|
|
context: Dict[str, object] = {
|
2023-08-25 00:53:04 +02:00
|
|
|
"realm": user_profile.realm,
|
|
|
|
"realm_string_id": user_profile.realm.string_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_uri": user_profile.realm.uri,
|
|
|
|
"realm_name": user_profile.realm.name,
|
2020-04-14 19:57:09 +02:00
|
|
|
}
|
2023-08-03 22:55:00 +02:00
|
|
|
if add_context is not None:
|
|
|
|
add_context(context, user_profile)
|
2023-01-18 05:25:49 +01:00
|
|
|
with suppress(EmailNotDeliveredError):
|
2021-12-15 00:09:37 +01:00
|
|
|
send_email(
|
|
|
|
email_id,
|
|
|
|
to_user_ids=[user_profile.id],
|
|
|
|
from_address=FromAddress.SUPPORT,
|
|
|
|
reply_to_email=options.get("reply_to"),
|
|
|
|
from_name=get_header(
|
|
|
|
options.get("from_name"), parsed_email_template.get("from"), "from_name"
|
|
|
|
),
|
|
|
|
context=context,
|
|
|
|
dry_run=options["dry_run"],
|
|
|
|
)
|
2021-04-07 01:27:02 +02:00
|
|
|
|
|
|
|
if options["dry_run"]:
|
|
|
|
break
|
2021-12-15 02:09:12 +01:00
|
|
|
|
|
|
|
# Now send emails to any recipients without a user account.
|
|
|
|
# This code path is intended for rare RemoteZulipServer emails.
|
|
|
|
for email_address in target_emails:
|
|
|
|
send_email(
|
|
|
|
email_id,
|
|
|
|
to_emails=[email_address],
|
|
|
|
from_address=FromAddress.SUPPORT,
|
|
|
|
reply_to_email=options.get("reply_to"),
|
|
|
|
from_name=get_header(
|
|
|
|
options.get("from_name"), parsed_email_template.get("from"), "from_name"
|
|
|
|
),
|
|
|
|
context={"remote_server_email": True},
|
|
|
|
dry_run=options["dry_run"],
|
|
|
|
)
|
|
|
|
|
|
|
|
if options["dry_run"]:
|
|
|
|
break
|
2022-02-06 20:57:58 +01:00
|
|
|
|
|
|
|
|
|
|
|
def log_email_config_errors() -> None:
|
|
|
|
"""
|
|
|
|
The purpose of this function is to log (potential) config errors,
|
|
|
|
but without raising an exception.
|
|
|
|
"""
|
2022-04-12 23:43:41 +02:00
|
|
|
if settings.EMAIL_HOST_USER and settings.EMAIL_HOST_PASSWORD is None:
|
2022-02-06 20:57:58 +01:00
|
|
|
logger.error(
|
2023-02-27 18:19:32 +01:00
|
|
|
"An SMTP username was set (EMAIL_HOST_USER), but password is unset (EMAIL_HOST_PASSWORD). "
|
|
|
|
"To disable SMTP authentication, set EMAIL_HOST_USER to an empty string."
|
2022-02-06 20:57:58 +01:00
|
|
|
)
|