2019-04-23 22:32:12 +02:00
|
|
|
import logging
|
2023-04-10 00:55:16 +02:00
|
|
|
from collections import Counter
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timezone
|
2024-01-26 17:06:12 +01:00
|
|
|
from email.headerregistry import Address
|
2023-12-12 00:06:37 +01:00
|
|
|
from typing import Any, Dict, List, Optional, Type, TypedDict, TypeVar, Union
|
2021-12-22 11:02:02 +01:00
|
|
|
from uuid import UUID
|
2013-10-17 22:55:09 +02:00
|
|
|
|
2023-06-03 06:03:43 +02:00
|
|
|
import orjson
|
2023-12-11 18:00:42 +01:00
|
|
|
from django.conf import settings
|
2018-05-04 01:40:46 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.validators import URLValidator, validate_email
|
2018-10-11 00:53:13 +02:00
|
|
|
from django.db import IntegrityError, transaction
|
2022-07-01 03:12:07 +02:00
|
|
|
from django.db.models import Model
|
2018-09-25 12:24:11 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2022-08-06 06:30:08 +02:00
|
|
|
from django.utils.crypto import constant_time_compare
|
2023-09-18 17:07:13 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import gettext as err_
|
2018-01-13 19:38:13 +01:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2024-05-23 20:31:31 +02:00
|
|
|
from dns import resolver as dns_resolver
|
|
|
|
from dns.exception import DNSException
|
2024-03-23 11:15:07 +01:00
|
|
|
from pydantic import BaseModel, ConfigDict, Json, StringConstraints
|
|
|
|
from pydantic.functional_validators import AfterValidator
|
|
|
|
from typing_extensions import Annotated
|
2018-01-13 19:38:13 +01:00
|
|
|
|
2023-10-23 14:01:49 +02:00
|
|
|
from analytics.lib.counts import (
|
|
|
|
BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES,
|
|
|
|
COUNT_STATS,
|
2024-05-31 22:06:19 +02:00
|
|
|
LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS,
|
2023-10-23 14:01:49 +02:00
|
|
|
do_increment_logging_stat,
|
|
|
|
)
|
2023-12-11 18:00:42 +01:00
|
|
|
from corporate.lib.stripe import (
|
2024-02-29 08:30:13 +01:00
|
|
|
BILLING_SUPPORT_EMAIL,
|
2023-12-11 18:00:42 +01:00
|
|
|
RemoteRealmBillingSession,
|
|
|
|
RemoteServerBillingSession,
|
|
|
|
do_deactivate_remote_server,
|
2023-12-11 09:32:44 +01:00
|
|
|
get_push_status_for_remote_request,
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
2024-02-19 18:49:41 +01:00
|
|
|
from corporate.models import (
|
|
|
|
CustomerPlan,
|
|
|
|
get_current_plan_by_customer,
|
|
|
|
get_customer_by_remote_realm,
|
|
|
|
)
|
2022-08-01 22:54:47 +02:00
|
|
|
from zerver.decorator import require_post
|
2024-02-27 16:14:43 +01:00
|
|
|
from zerver.lib.email_validation import validate_is_not_disposable
|
2023-12-15 14:29:21 +01:00
|
|
|
from zerver.lib.exceptions import (
|
2023-12-12 17:15:57 +01:00
|
|
|
ErrorCode,
|
2023-12-15 14:29:21 +01:00
|
|
|
JsonableError,
|
|
|
|
RemoteRealmServerMismatchError,
|
|
|
|
RemoteServerDeactivatedError,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.push_notifications import (
|
2023-10-08 00:43:41 +02:00
|
|
|
InvalidRemotePushDeviceTokenError,
|
2023-01-02 20:50:23 +01:00
|
|
|
UserPushIdentityCompat,
|
2020-06-11 00:54:34 +02:00
|
|
|
send_android_push_notification,
|
|
|
|
send_apple_push_notification,
|
2023-10-05 13:53:09 +02:00
|
|
|
send_test_push_notification_directly_to_devices,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2024-02-29 08:30:13 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2023-12-09 00:09:01 +01:00
|
|
|
from zerver.lib.remote_server import (
|
|
|
|
InstallationCountDataForAnalytics,
|
|
|
|
RealmAuditLogDataForAnalytics,
|
|
|
|
RealmCountDataForAnalytics,
|
|
|
|
RealmDataForAnalytics,
|
|
|
|
)
|
2024-03-23 11:15:07 +01:00
|
|
|
from zerver.lib.request import RequestNotes, has_request_variables
|
2021-06-30 18:35:50 +02:00
|
|
|
from zerver.lib.response import json_success
|
2024-02-19 18:49:41 +01:00
|
|
|
from zerver.lib.send_email import FromAddress
|
2023-12-11 22:21:48 +01:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
2024-04-21 07:33:44 +02:00
|
|
|
from zerver.lib.typed_endpoint import (
|
|
|
|
ApnsAppId,
|
|
|
|
JsonBodyPayload,
|
|
|
|
RequiredStringConstraint,
|
|
|
|
typed_endpoint,
|
|
|
|
)
|
2024-03-23 11:15:07 +01:00
|
|
|
from zerver.lib.typed_endpoint_validators import check_string_fixed_length
|
2023-11-29 17:00:19 +01:00
|
|
|
from zerver.lib.types import RemoteRealmDictValue
|
2024-01-26 17:06:12 +01:00
|
|
|
from zerver.models.realms import DisposableEmailError
|
2024-03-23 11:15:07 +01:00
|
|
|
from zerver.views.push_notifications import validate_token
|
2022-08-01 22:54:47 +02:00
|
|
|
from zilencer.auth import InvalidZulipServerKeyError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zilencer.models import (
|
|
|
|
RemoteInstallationCount,
|
|
|
|
RemotePushDeviceToken,
|
2023-10-30 23:50:53 +01:00
|
|
|
RemoteRealm,
|
2020-06-11 00:54:34 +02:00
|
|
|
RemoteRealmAuditLog,
|
|
|
|
RemoteRealmCount,
|
|
|
|
RemoteZulipServer,
|
2021-12-30 23:51:38 +01:00
|
|
|
RemoteZulipServerAuditLog,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
|
|
|
|
2021-10-20 01:16:18 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-07-13 13:33:05 +02:00
|
|
|
|
2021-12-22 11:02:02 +01:00
|
|
|
def validate_uuid(uuid: str) -> None:
|
|
|
|
try:
|
2021-12-30 15:32:48 +01:00
|
|
|
uuid_object = UUID(uuid, version=4)
|
|
|
|
# The UUID initialization under some circumstances will modify the uuid
|
|
|
|
# string to create a valid UUIDv4, instead of raising a ValueError.
|
|
|
|
# The submitted uuid needing to be modified means it's invalid, so
|
|
|
|
# we need to check for that condition.
|
|
|
|
if str(uuid_object) != uuid:
|
|
|
|
raise ValidationError(err_("Invalid UUID"))
|
2021-12-22 11:02:02 +01:00
|
|
|
except ValueError:
|
|
|
|
raise ValidationError(err_("Invalid UUID"))
|
|
|
|
|
|
|
|
|
2022-08-01 23:51:10 +02:00
|
|
|
def validate_bouncer_token_request(token: str, kind: int) -> None:
|
2024-06-13 20:53:09 +02:00
|
|
|
if kind not in [RemotePushDeviceToken.APNS, RemotePushDeviceToken.FCM]:
|
2018-02-15 20:50:37 +01:00
|
|
|
raise JsonableError(err_("Invalid token type"))
|
2017-07-07 18:23:36 +02:00
|
|
|
validate_token(token, kind)
|
2017-05-08 14:25:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-01-12 23:45:01 +01:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def deactivate_remote_server(
|
|
|
|
request: HttpRequest,
|
|
|
|
remote_server: RemoteZulipServer,
|
|
|
|
) -> HttpResponse:
|
2023-12-13 02:44:55 +01:00
|
|
|
billing_session = RemoteServerBillingSession(remote_server)
|
|
|
|
do_deactivate_remote_server(remote_server, billing_session)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2022-01-12 23:45:01 +01:00
|
|
|
|
|
|
|
|
2018-05-04 01:40:46 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
2024-03-23 11:15:07 +01:00
|
|
|
@typed_endpoint
|
2018-05-04 01:40:46 +02:00
|
|
|
def register_remote_server(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
2024-03-23 11:15:07 +01:00
|
|
|
*,
|
|
|
|
zulip_org_id: Annotated[
|
|
|
|
str,
|
|
|
|
RequiredStringConstraint,
|
|
|
|
AfterValidator(lambda s: check_string_fixed_length(s, RemoteZulipServer.UUID_LENGTH)),
|
|
|
|
],
|
|
|
|
zulip_org_key: Annotated[
|
|
|
|
str,
|
|
|
|
RequiredStringConstraint,
|
|
|
|
AfterValidator(lambda s: check_string_fixed_length(s, RemoteZulipServer.API_KEY_LENGTH)),
|
|
|
|
],
|
|
|
|
hostname: Annotated[str, StringConstraints(max_length=RemoteZulipServer.HOSTNAME_MAX_LENGTH)],
|
|
|
|
contact_email: str,
|
|
|
|
new_org_key: Annotated[
|
|
|
|
Optional[str],
|
|
|
|
RequiredStringConstraint,
|
|
|
|
AfterValidator(lambda s: check_string_fixed_length(s, RemoteZulipServer.API_KEY_LENGTH)),
|
|
|
|
] = None,
|
2018-05-04 01:40:46 +02:00
|
|
|
) -> HttpResponse:
|
2024-05-20 22:16:21 +02:00
|
|
|
# StringConstraints validated the field lengths, but we still need to
|
2018-05-04 01:40:46 +02:00
|
|
|
# validate the format of these fields.
|
|
|
|
try:
|
|
|
|
# TODO: Ideally we'd not abuse the URL validator this way
|
|
|
|
url_validator = URLValidator()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_validator("http://" + hostname)
|
2018-05-04 01:40:46 +02:00
|
|
|
except ValidationError:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("{hostname} is not a valid hostname").format(hostname=hostname))
|
2018-05-04 01:40:46 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
validate_email(contact_email)
|
|
|
|
except ValidationError as e:
|
|
|
|
raise JsonableError(e.message)
|
|
|
|
|
2024-01-26 17:06:12 +01:00
|
|
|
# We don't want to allow disposable domains for contact_email either
|
|
|
|
try:
|
2024-02-27 16:14:43 +01:00
|
|
|
validate_is_not_disposable(contact_email)
|
2024-01-26 17:06:12 +01:00
|
|
|
except DisposableEmailError:
|
|
|
|
raise JsonableError(_("Please use your real email address."))
|
|
|
|
|
|
|
|
contact_email_domain = Address(addr_spec=contact_email).domain.lower()
|
|
|
|
if contact_email_domain == "example.com":
|
2024-05-23 20:27:33 +02:00
|
|
|
raise JsonableError(_("Invalid email address."))
|
2024-01-26 17:06:12 +01:00
|
|
|
|
|
|
|
# Check if the domain has an MX record
|
2024-05-23 20:31:31 +02:00
|
|
|
resolver = dns_resolver.Resolver()
|
|
|
|
resolver.timeout = 3
|
|
|
|
dns_mx_check_successful = False
|
2024-01-26 17:06:12 +01:00
|
|
|
try:
|
2024-05-23 20:31:31 +02:00
|
|
|
if resolver.resolve(contact_email_domain, "MX"):
|
|
|
|
dns_mx_check_successful = True
|
|
|
|
except DNSException:
|
|
|
|
pass
|
2024-05-23 20:30:12 +02:00
|
|
|
if not dns_mx_check_successful:
|
2024-05-23 20:31:31 +02:00
|
|
|
# Check if the A/AAAA exist, for better error reporting
|
|
|
|
try:
|
|
|
|
resolver.resolve_name(contact_email_domain)
|
|
|
|
raise JsonableError(
|
|
|
|
_("{domain} is invalid because it does not have any MX records").format(
|
|
|
|
domain=contact_email_domain
|
|
|
|
)
|
2024-01-26 17:06:12 +01:00
|
|
|
)
|
2024-05-23 20:31:31 +02:00
|
|
|
except DNSException:
|
|
|
|
raise JsonableError(_("{domain} does not exist").format(domain=contact_email_domain))
|
2024-01-26 17:06:12 +01:00
|
|
|
|
2021-12-22 11:02:02 +01:00
|
|
|
try:
|
|
|
|
validate_uuid(zulip_org_id)
|
|
|
|
except ValidationError as e:
|
|
|
|
raise JsonableError(e.message)
|
|
|
|
|
2024-01-26 17:06:12 +01:00
|
|
|
try:
|
|
|
|
remote_server = RemoteZulipServer.objects.get(uuid=zulip_org_id)
|
|
|
|
except RemoteZulipServer.DoesNotExist:
|
|
|
|
remote_server = None
|
|
|
|
|
|
|
|
if remote_server is not None:
|
|
|
|
if not constant_time_compare(remote_server.api_key, zulip_org_key):
|
|
|
|
raise InvalidZulipServerKeyError(zulip_org_id)
|
|
|
|
|
|
|
|
if remote_server.deactivated:
|
|
|
|
raise RemoteServerDeactivatedError
|
|
|
|
|
|
|
|
if remote_server is None and RemoteZulipServer.objects.filter(hostname=hostname).exists():
|
|
|
|
raise JsonableError(
|
|
|
|
_("A server with hostname {hostname} already exists").format(hostname=hostname)
|
2021-12-30 23:51:38 +01:00
|
|
|
)
|
2024-01-26 17:06:12 +01:00
|
|
|
|
|
|
|
with transaction.atomic():
|
|
|
|
if remote_server is None:
|
|
|
|
created = True
|
|
|
|
remote_server = RemoteZulipServer.objects.create(
|
|
|
|
uuid=zulip_org_id,
|
|
|
|
hostname=hostname,
|
|
|
|
contact_email=contact_email,
|
|
|
|
api_key=zulip_org_key,
|
|
|
|
last_request_datetime=timezone_now(),
|
|
|
|
)
|
2021-12-30 23:51:38 +01:00
|
|
|
RemoteZulipServerAuditLog.objects.create(
|
|
|
|
event_type=RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED,
|
|
|
|
server=remote_server,
|
|
|
|
event_time=remote_server.last_updated,
|
|
|
|
)
|
2018-05-04 01:40:46 +02:00
|
|
|
else:
|
2024-01-26 17:06:12 +01:00
|
|
|
created = False
|
2023-12-15 14:29:21 +01:00
|
|
|
remote_server.hostname = hostname
|
|
|
|
remote_server.contact_email = contact_email
|
|
|
|
if new_org_key is not None:
|
|
|
|
remote_server.api_key = new_org_key
|
2023-12-25 03:01:58 +01:00
|
|
|
|
|
|
|
remote_server.last_request_datetime = timezone_now()
|
2023-12-15 14:29:21 +01:00
|
|
|
remote_server.save()
|
2018-05-04 01:40:46 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"created": created})
|
2018-05-04 01:40:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def register_remote_push_device(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2024-03-23 11:15:07 +01:00
|
|
|
*,
|
|
|
|
user_id: Optional[Json[int]] = None,
|
|
|
|
user_uuid: Optional[str] = None,
|
|
|
|
realm_uuid: Optional[str] = None,
|
|
|
|
token: Annotated[str, RequiredStringConstraint],
|
|
|
|
token_kind: Json[int],
|
2024-04-21 07:33:44 +02:00
|
|
|
ios_app_id: Optional[ApnsAppId] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2022-08-01 23:51:10 +02:00
|
|
|
validate_bouncer_token_request(token, token_kind)
|
2023-11-06 22:18:52 +01:00
|
|
|
if token_kind == RemotePushDeviceToken.APNS and ios_app_id is None:
|
|
|
|
raise JsonableError(_("Missing ios_app_id"))
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-02-23 20:25:30 +01:00
|
|
|
if user_id is None and user_uuid is None:
|
|
|
|
raise JsonableError(_("Missing user_id or user_uuid"))
|
|
|
|
if user_id is not None and user_uuid is not None:
|
2023-04-10 00:23:59 +02:00
|
|
|
kwargs: Dict[str, object] = {"user_uuid": user_uuid, "user_id": None}
|
|
|
|
# Delete pre-existing user_id registration for this user+device to avoid
|
|
|
|
# duplication. Further down, uuid registration will be created.
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
|
|
|
server=server, token=token, kind=token_kind, user_id=user_id
|
|
|
|
).delete()
|
|
|
|
else:
|
2023-04-10 00:55:16 +02:00
|
|
|
# One of these is None, so these kwargs will lead to a proper registration
|
2023-04-10 00:23:59 +02:00
|
|
|
# of either user_id or user_uuid type
|
|
|
|
kwargs = {"user_id": user_id, "user_uuid": user_uuid}
|
2023-12-01 22:57:34 +01:00
|
|
|
|
|
|
|
if realm_uuid is not None:
|
|
|
|
# Servers 8.0+ also send the realm.uuid of the user.
|
|
|
|
assert isinstance(
|
|
|
|
user_uuid, str
|
|
|
|
), "Servers new enough to send realm_uuid, should also have user_uuid"
|
|
|
|
remote_realm = get_remote_realm_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
if remote_realm is not None:
|
|
|
|
# We want to associate the RemotePushDeviceToken with the RemoteRealm.
|
|
|
|
kwargs["remote_realm_id"] = remote_realm.id
|
|
|
|
|
2023-12-25 03:01:58 +01:00
|
|
|
remote_realm.last_request_datetime = timezone_now()
|
|
|
|
remote_realm.save(update_fields=["last_request_datetime"])
|
|
|
|
|
2024-04-12 18:34:37 +02:00
|
|
|
RemotePushDeviceToken.objects.bulk_create(
|
|
|
|
[
|
|
|
|
RemotePushDeviceToken(
|
2018-10-11 00:53:13 +02:00
|
|
|
server=server,
|
|
|
|
kind=token_kind,
|
|
|
|
token=token,
|
|
|
|
ios_app_id=ios_app_id,
|
|
|
|
# last_updated is to be renamed to date_created.
|
2023-09-18 17:07:13 +02:00
|
|
|
last_updated=timezone_now(),
|
2023-04-10 00:23:59 +02:00
|
|
|
**kwargs,
|
2024-04-12 18:34:37 +02:00
|
|
|
),
|
|
|
|
],
|
|
|
|
ignore_conflicts=True,
|
|
|
|
)
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def unregister_remote_push_device(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2024-03-23 11:15:07 +01:00
|
|
|
*,
|
|
|
|
token: Annotated[str, RequiredStringConstraint],
|
|
|
|
token_kind: Json[int],
|
|
|
|
user_id: Optional[Json[int]] = None,
|
|
|
|
user_uuid: Optional[str] = None,
|
|
|
|
realm_uuid: Optional[str] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2022-08-01 23:51:10 +02:00
|
|
|
validate_bouncer_token_request(token, token_kind)
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2023-12-25 23:10:35 +01:00
|
|
|
update_remote_realm_last_request_datetime_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
|
2023-03-17 08:38:23 +01:00
|
|
|
(num_deleted, ignored) = RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(), token=token, kind=token_kind, server=server
|
2021-02-12 08:19:30 +01:00
|
|
|
).delete()
|
2023-03-17 08:38:23 +01:00
|
|
|
if num_deleted == 0:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(err_("Token does not exist"))
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def unregister_all_remote_push_devices(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2024-03-23 11:15:07 +01:00
|
|
|
*,
|
|
|
|
user_id: Optional[Json[int]] = None,
|
|
|
|
user_uuid: Optional[str] = None,
|
|
|
|
realm_uuid: Optional[str] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2023-12-25 23:10:35 +01:00
|
|
|
update_remote_realm_last_request_datetime_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
|
2022-02-23 20:25:30 +01:00
|
|
|
RemotePushDeviceToken.objects.filter(user_identity.filter_q(), server=server).delete()
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2019-11-19 03:12:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-25 23:10:35 +01:00
|
|
|
def update_remote_realm_last_request_datetime_helper(
|
|
|
|
request: HttpRequest,
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
realm_uuid: Optional[str],
|
|
|
|
user_uuid: Optional[str],
|
|
|
|
) -> None:
|
|
|
|
if realm_uuid is not None:
|
|
|
|
assert user_uuid is not None
|
|
|
|
remote_realm = get_remote_realm_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
if remote_realm is not None:
|
|
|
|
remote_realm.last_request_datetime = timezone_now()
|
|
|
|
remote_realm.save(update_fields=["last_request_datetime"])
|
|
|
|
|
|
|
|
|
2023-04-10 00:55:16 +02:00
|
|
|
def delete_duplicate_registrations(
|
|
|
|
registrations: List[RemotePushDeviceToken], server_id: int, user_id: int, user_uuid: str
|
|
|
|
) -> List[RemotePushDeviceToken]:
|
|
|
|
"""
|
|
|
|
When migrating to support registration by UUID, we introduced a bug where duplicate
|
|
|
|
registrations for the same device+user could be created - one by user_id and one by
|
|
|
|
user_uuid. Given no good way of detecting these duplicates at database level, we need to
|
|
|
|
take advantage of the fact that when a remote server sends a push notification request
|
|
|
|
to us, it sends both user_id and user_uuid of the user.
|
|
|
|
See https://github.com/zulip/zulip/issues/24969 for reference.
|
|
|
|
|
|
|
|
This function, knowing the user_id and user_uuid of the user, can detect duplicates
|
|
|
|
and delete the legacy user_id registration if appropriate.
|
|
|
|
|
|
|
|
Return the list of registrations with the user_id-based duplicates removed.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# All registrations passed here should be of the same kind (apple vs android).
|
|
|
|
assert len({registration.kind for registration in registrations}) == 1
|
|
|
|
kind = registrations[0].kind
|
|
|
|
|
|
|
|
tokens_counter = Counter(device.token for device in registrations)
|
|
|
|
|
|
|
|
tokens_to_deduplicate = []
|
|
|
|
for key in tokens_counter:
|
|
|
|
if tokens_counter[key] <= 1:
|
|
|
|
continue
|
|
|
|
if tokens_counter[key] > 2:
|
|
|
|
raise AssertionError(
|
|
|
|
f"More than two registrations for token {key} for user id:{user_id} uuid:{user_uuid}, shouldn't be possible"
|
|
|
|
)
|
|
|
|
assert tokens_counter[key] == 2
|
|
|
|
tokens_to_deduplicate.append(key)
|
|
|
|
|
|
|
|
if not tokens_to_deduplicate:
|
|
|
|
return registrations
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Deduplicating push registrations for server id:%s user id:%s uuid:%s and tokens:%s",
|
|
|
|
server_id,
|
|
|
|
user_id,
|
|
|
|
user_uuid,
|
|
|
|
sorted(tokens_to_deduplicate),
|
|
|
|
)
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
|
|
|
token__in=tokens_to_deduplicate, kind=kind, server_id=server_id, user_id=user_id
|
|
|
|
).delete()
|
|
|
|
|
|
|
|
deduplicated_registrations_to_return = []
|
|
|
|
for registration in registrations:
|
|
|
|
if registration.token in tokens_to_deduplicate and registration.user_id is not None:
|
|
|
|
# user_id registrations are the ones we deleted
|
|
|
|
continue
|
|
|
|
deduplicated_registrations_to_return.append(registration)
|
|
|
|
|
|
|
|
return deduplicated_registrations_to_return
|
|
|
|
|
|
|
|
|
2023-10-05 13:53:09 +02:00
|
|
|
class TestNotificationPayload(BaseModel):
|
|
|
|
token: str
|
|
|
|
token_kind: int
|
|
|
|
user_id: int
|
|
|
|
user_uuid: str
|
2023-12-25 23:10:35 +01:00
|
|
|
realm_uuid: Optional[str] = None
|
2023-10-05 13:53:09 +02:00
|
|
|
base_payload: Dict[str, Any]
|
|
|
|
|
|
|
|
model_config = ConfigDict(extra="forbid")
|
|
|
|
|
|
|
|
|
|
|
|
@typed_endpoint
|
|
|
|
def remote_server_send_test_notification(
|
|
|
|
request: HttpRequest,
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
*,
|
|
|
|
payload: JsonBodyPayload[TestNotificationPayload],
|
|
|
|
) -> HttpResponse:
|
|
|
|
token = payload.token
|
|
|
|
token_kind = payload.token_kind
|
|
|
|
|
|
|
|
user_id = payload.user_id
|
|
|
|
user_uuid = payload.user_uuid
|
2023-12-25 23:10:35 +01:00
|
|
|
realm_uuid = payload.realm_uuid
|
2023-10-05 13:53:09 +02:00
|
|
|
|
|
|
|
# The remote server only sends the base payload with basic user and server info,
|
|
|
|
# and the actual format of the test notification is defined on the bouncer, as that
|
|
|
|
# gives us the flexibility to modify it freely, without relying on other servers
|
|
|
|
# upgrading.
|
|
|
|
base_payload = payload.base_payload
|
|
|
|
|
|
|
|
# This is a new endpoint, so it can assume it will only be used by newer
|
|
|
|
# servers that will send user both UUID and ID.
|
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
|
|
|
|
2023-12-25 23:10:35 +01:00
|
|
|
update_remote_realm_last_request_datetime_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
|
2023-10-05 13:53:09 +02:00
|
|
|
try:
|
|
|
|
device = RemotePushDeviceToken.objects.get(
|
|
|
|
user_identity.filter_q(), token=token, kind=token_kind, server=server
|
|
|
|
)
|
|
|
|
except RemotePushDeviceToken.DoesNotExist:
|
2023-10-08 00:43:41 +02:00
|
|
|
raise InvalidRemotePushDeviceTokenError
|
2023-10-05 13:53:09 +02:00
|
|
|
|
|
|
|
send_test_push_notification_directly_to_devices(
|
|
|
|
user_identity, [device], base_payload, remote=server
|
|
|
|
)
|
|
|
|
return json_success(request)
|
|
|
|
|
|
|
|
|
2023-12-01 22:57:34 +01:00
|
|
|
def get_remote_realm_helper(
|
|
|
|
request: HttpRequest, server: RemoteZulipServer, realm_uuid: str, user_uuid: str
|
|
|
|
) -> Optional[RemoteRealm]:
|
|
|
|
"""
|
|
|
|
Tries to fetch RemoteRealm for the given realm_uuid and server. Otherwise,
|
|
|
|
returns None and logs what happened using request and user_uuid args to make
|
|
|
|
the output more informative.
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
remote_realm = RemoteRealm.objects.get(uuid=realm_uuid)
|
|
|
|
except RemoteRealm.DoesNotExist:
|
|
|
|
logger.info(
|
|
|
|
"%s: Received request for unknown realm %s, server %s, user %s",
|
|
|
|
request.path,
|
|
|
|
realm_uuid,
|
|
|
|
server.id,
|
|
|
|
user_uuid,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
|
|
|
if remote_realm.server_id != server.id:
|
|
|
|
logger.warning(
|
|
|
|
"%s: Realm %s exists, but not registered to server %s",
|
|
|
|
request.path,
|
|
|
|
realm_uuid,
|
|
|
|
server.id,
|
|
|
|
)
|
2023-12-10 11:59:28 +01:00
|
|
|
raise RemoteRealmServerMismatchError
|
2023-12-01 22:57:34 +01:00
|
|
|
|
|
|
|
return remote_realm
|
|
|
|
|
|
|
|
|
2023-12-12 17:15:57 +01:00
|
|
|
class OldZulipServerError(JsonableError):
|
|
|
|
code = ErrorCode.INVALID_ZULIP_SERVER
|
|
|
|
|
|
|
|
def __init__(self, msg: str) -> None:
|
|
|
|
self._msg: str = msg
|
|
|
|
|
|
|
|
|
2024-02-06 23:02:01 +01:00
|
|
|
class PushNotificationsDisallowedError(JsonableError):
|
|
|
|
code = ErrorCode.PUSH_NOTIFICATIONS_DISALLOWED
|
|
|
|
|
|
|
|
def __init__(self, reason: str) -> None:
|
|
|
|
msg = _(
|
|
|
|
"Your plan doesn't allow sending push notifications. Reason provided by the server: {reason}"
|
|
|
|
).format(reason=reason)
|
|
|
|
super().__init__(msg)
|
|
|
|
|
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
class RemoteServerNotificationPayload(BaseModel):
|
|
|
|
user_id: Optional[int] = None
|
|
|
|
user_uuid: Optional[str] = None
|
|
|
|
realm_uuid: Optional[str] = None
|
|
|
|
gcm_payload: Dict[str, Any] = {}
|
|
|
|
apns_payload: Dict[str, Any] = {}
|
|
|
|
gcm_options: Dict[str, Any] = {}
|
|
|
|
|
|
|
|
android_devices: List[str] = []
|
|
|
|
apple_devices: List[str] = []
|
|
|
|
|
|
|
|
|
|
|
|
@typed_endpoint
|
2021-02-12 08:19:30 +01:00
|
|
|
def remote_server_notify_push(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2024-03-23 11:15:07 +01:00
|
|
|
*,
|
|
|
|
payload: JsonBodyPayload[RemoteServerNotificationPayload],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2024-03-23 11:15:07 +01:00
|
|
|
user_id = payload.user_id
|
|
|
|
user_uuid = payload.user_uuid
|
2023-04-10 00:55:16 +02:00
|
|
|
user_identity = UserPushIdentityCompat(user_id, user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
gcm_payload = payload.gcm_payload
|
|
|
|
apns_payload = payload.apns_payload
|
|
|
|
gcm_options = payload.gcm_options
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2024-03-23 11:15:07 +01:00
|
|
|
realm_uuid = payload.realm_uuid
|
2023-11-09 19:24:49 +01:00
|
|
|
remote_realm = None
|
|
|
|
if realm_uuid is not None:
|
2023-12-01 22:57:34 +01:00
|
|
|
assert isinstance(
|
|
|
|
user_uuid, str
|
|
|
|
), "Servers new enough to send realm_uuid, should also have user_uuid"
|
|
|
|
remote_realm = get_remote_realm_helper(request, server, realm_uuid, user_uuid)
|
2023-11-09 19:24:49 +01:00
|
|
|
|
2023-12-12 17:15:57 +01:00
|
|
|
push_status = get_push_status_for_remote_request(server, remote_realm)
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = f"[can_push={push_status.can_push}/{push_status.message}]"
|
2023-12-12 17:15:57 +01:00
|
|
|
if not push_status.can_push:
|
|
|
|
if server.last_api_feature_level is None:
|
|
|
|
raise OldZulipServerError(_("Your plan doesn't allow sending push notifications."))
|
|
|
|
else:
|
2024-02-06 23:02:01 +01:00
|
|
|
reason = push_status.message
|
|
|
|
raise PushNotificationsDisallowedError(reason=reason)
|
2023-12-12 17:15:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
android_devices = list(
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(),
|
2024-06-13 20:53:09 +02:00
|
|
|
kind=RemotePushDeviceToken.FCM,
|
2021-02-12 08:19:30 +01:00
|
|
|
server=server,
|
2024-05-30 20:52:29 +02:00
|
|
|
).order_by("id")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-04-10 00:55:16 +02:00
|
|
|
if android_devices and user_id is not None and user_uuid is not None:
|
|
|
|
android_devices = delete_duplicate_registrations(
|
|
|
|
android_devices, server.id, user_id, user_uuid
|
|
|
|
)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
apple_devices = list(
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(),
|
2021-02-12 08:19:30 +01:00
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
server=server,
|
2024-05-30 20:52:29 +02:00
|
|
|
).order_by("id")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-04-10 00:55:16 +02:00
|
|
|
if apple_devices and user_id is not None and user_uuid is not None:
|
|
|
|
apple_devices = delete_duplicate_registrations(apple_devices, server.id, user_id, user_uuid)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2023-09-18 17:07:34 +02:00
|
|
|
remote_queue_latency: Optional[str] = None
|
|
|
|
sent_time: Optional[Union[float, int]] = gcm_payload.get(
|
2023-10-04 21:55:58 +02:00
|
|
|
# TODO/compatibility: This could be a lot simpler if not for pre-5.0 Zulip servers
|
|
|
|
# that had an older format. Future implementation:
|
|
|
|
# "time", apns_payload["custom"]["zulip"].get("time")
|
|
|
|
"time",
|
|
|
|
apns_payload.get("custom", {}).get("zulip", {}).get("time"),
|
2023-09-18 17:07:34 +02:00
|
|
|
)
|
|
|
|
if sent_time is not None:
|
|
|
|
if isinstance(sent_time, int):
|
|
|
|
# The 'time' field only used to have whole-integer
|
|
|
|
# granularity, so if so we only report with
|
|
|
|
# whole-second granularity
|
|
|
|
remote_queue_latency = str(int(timezone_now().timestamp()) - sent_time)
|
|
|
|
else:
|
|
|
|
remote_queue_latency = f"{timezone_now().timestamp() - sent_time:.3f}"
|
|
|
|
logger.info(
|
|
|
|
"Remote queuing latency for %s:%s is %s seconds",
|
|
|
|
server.uuid,
|
|
|
|
user_identity,
|
|
|
|
remote_queue_latency,
|
|
|
|
)
|
|
|
|
|
2021-10-20 01:16:18 +02:00
|
|
|
logger.info(
|
|
|
|
"Sending mobile push notifications for remote user %s:%s: %s via FCM devices, %s via APNs devices",
|
|
|
|
server.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:16:18 +02:00
|
|
|
len(android_devices),
|
|
|
|
len(apple_devices),
|
|
|
|
)
|
2023-10-22 23:21:56 +02:00
|
|
|
do_increment_logging_stat(
|
|
|
|
server,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_received::day"],
|
2023-10-22 23:21:56 +02:00
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=len(android_devices) + len(apple_devices),
|
|
|
|
)
|
2023-11-09 19:24:49 +01:00
|
|
|
if remote_realm is not None:
|
2023-12-22 02:22:48 +01:00
|
|
|
ensure_devices_set_remote_realm(
|
|
|
|
android_devices=android_devices, apple_devices=apple_devices, remote_realm=remote_realm
|
|
|
|
)
|
2023-11-09 19:24:49 +01:00
|
|
|
do_increment_logging_stat(
|
|
|
|
remote_realm,
|
|
|
|
COUNT_STATS["mobile_pushes_received::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=len(android_devices) + len(apple_devices),
|
|
|
|
)
|
2021-10-20 01:16:18 +02:00
|
|
|
|
2023-12-25 03:01:58 +01:00
|
|
|
remote_realm.last_request_datetime = timezone_now()
|
|
|
|
remote_realm.save(update_fields=["last_request_datetime"])
|
|
|
|
|
2021-11-09 01:32:19 +01:00
|
|
|
# Truncate incoming pushes to 200, due to APNs maximum message
|
|
|
|
# sizes; see handle_remove_push_notification for the version of
|
|
|
|
# this for notifications generated natively on the server. We
|
|
|
|
# apply this to remote-server pushes in case they predate that
|
|
|
|
# commit.
|
|
|
|
def truncate_payload(payload: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
MAX_MESSAGE_IDS = 200
|
|
|
|
if payload and payload.get("event") == "remove" and payload.get("zulip_message_ids"):
|
|
|
|
ids = [int(id) for id in payload["zulip_message_ids"].split(",")]
|
2022-10-30 00:32:32 +02:00
|
|
|
truncated_ids = sorted(ids)[-MAX_MESSAGE_IDS:]
|
2021-11-09 01:32:19 +01:00
|
|
|
payload["zulip_message_ids"] = ",".join(str(id) for id in truncated_ids)
|
|
|
|
return payload
|
|
|
|
|
push_notifications: Drop FCM retries to 2, not 10.
This reverts bc15085098709d746ba69c99d61601f8d4316e6e (which provided
not justification for its change) and moves further, down to 2 retries
from the default of 5.
10 retries, with exponential backoff, is equivalent to sleeping 2^11
seconds, or just about 34 minutes (though the code uses a jitter which
may make this up to 51 minutes). This is an unreasonable amount of
time to spend in this codepath -- as only one worker is used, and it
is single-threaded, this could effectively block all missed message
notifications for half an hour or longer.
This is also necessary because messages sent through the push bouncer
are sent synchronously; the sending server uses a 30-second timeout,
set in PushBouncerSession. Having retries which linger longer than
this can cause duplicate messages; the sending server will time out
and re-queue the message in RabbitMQ, while the push bouncer's request
will continue, and may succeed.
Limit to 2 retries (APNS currently uses 3), and results expected max
of 4 seconds of sleep, potentially up to 6. If this fails, there
exists another retry loop above it, at the RabbitMQ layer (either
locally, or via the remote server's queue), which will result in up to
3 additional retries -- all told, the request will me made to FCM up
to 12 times.
2022-03-08 18:49:10 +01:00
|
|
|
# The full request must complete within 30s, the timeout set by
|
|
|
|
# Zulip remote hosts for push notification requests (see
|
|
|
|
# PushBouncerSession). The timeouts in the FCM and APNS codepaths
|
|
|
|
# must be set accordingly; see send_android_push_notification and
|
|
|
|
# send_apple_push_notification.
|
2024-06-13 14:38:58 +02:00
|
|
|
# TODO: This limit can be slightly exceeded now after changing the library
|
|
|
|
# used for sending FCM notifications. This is pending adjustment after
|
|
|
|
# getting some data on the behavior of the new API.
|
push_notifications: Drop FCM retries to 2, not 10.
This reverts bc15085098709d746ba69c99d61601f8d4316e6e (which provided
not justification for its change) and moves further, down to 2 retries
from the default of 5.
10 retries, with exponential backoff, is equivalent to sleeping 2^11
seconds, or just about 34 minutes (though the code uses a jitter which
may make this up to 51 minutes). This is an unreasonable amount of
time to spend in this codepath -- as only one worker is used, and it
is single-threaded, this could effectively block all missed message
notifications for half an hour or longer.
This is also necessary because messages sent through the push bouncer
are sent synchronously; the sending server uses a 30-second timeout,
set in PushBouncerSession. Having retries which linger longer than
this can cause duplicate messages; the sending server will time out
and re-queue the message in RabbitMQ, while the push bouncer's request
will continue, and may succeed.
Limit to 2 retries (APNS currently uses 3), and results expected max
of 4 seconds of sleep, potentially up to 6. If this fails, there
exists another retry loop above it, at the RabbitMQ layer (either
locally, or via the remote server's queue), which will result in up to
3 additional retries -- all told, the request will me made to FCM up
to 12 times.
2022-03-08 18:49:10 +01:00
|
|
|
|
2021-11-09 01:32:19 +01:00
|
|
|
gcm_payload = truncate_payload(gcm_payload)
|
2023-10-28 02:01:22 +02:00
|
|
|
android_successfully_delivered = send_android_push_notification(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity, android_devices, gcm_payload, gcm_options, remote=server
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2022-01-03 23:06:42 +01:00
|
|
|
if isinstance(apns_payload.get("custom"), dict) and isinstance(
|
|
|
|
apns_payload["custom"].get("zulip"), dict
|
|
|
|
):
|
|
|
|
apns_payload["custom"]["zulip"] = truncate_payload(apns_payload["custom"]["zulip"])
|
2023-10-28 02:01:22 +02:00
|
|
|
apple_successfully_delivered = send_apple_push_notification(
|
|
|
|
user_identity, apple_devices, apns_payload, remote=server
|
|
|
|
)
|
|
|
|
|
|
|
|
do_increment_logging_stat(
|
|
|
|
server,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_forwarded::day"],
|
2023-10-28 02:01:22 +02:00
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=android_successfully_delivered + apple_successfully_delivered,
|
|
|
|
)
|
2023-12-11 22:21:48 +01:00
|
|
|
|
|
|
|
remote_realm_dict: Optional[RemoteRealmDictValue] = None
|
2023-11-09 19:24:49 +01:00
|
|
|
if remote_realm is not None:
|
|
|
|
do_increment_logging_stat(
|
|
|
|
remote_realm,
|
|
|
|
COUNT_STATS["mobile_pushes_forwarded::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=android_successfully_delivered + apple_successfully_delivered,
|
|
|
|
)
|
2023-12-11 09:32:44 +01:00
|
|
|
remote_realm_dict = {
|
|
|
|
"can_push": push_status.can_push,
|
|
|
|
"expected_end_timestamp": push_status.expected_end_timestamp,
|
|
|
|
}
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
deleted_devices = get_deleted_devices(
|
|
|
|
user_identity,
|
|
|
|
server,
|
2024-03-23 11:15:07 +01:00
|
|
|
android_devices=payload.android_devices,
|
|
|
|
apple_devices=payload.apple_devices,
|
2023-12-12 00:06:37 +01:00
|
|
|
)
|
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
return json_success(
|
2022-01-31 13:44:02 +01:00
|
|
|
request,
|
|
|
|
data={
|
|
|
|
"total_android_devices": len(android_devices),
|
|
|
|
"total_apple_devices": len(apple_devices),
|
2023-12-12 00:06:37 +01:00
|
|
|
"deleted_devices": deleted_devices,
|
2023-12-11 22:21:48 +01:00
|
|
|
"realm": remote_realm_dict,
|
2022-01-31 13:44:02 +01:00
|
|
|
},
|
2021-09-28 14:17:16 +02:00
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
class DevicesToCleanUpDict(TypedDict):
|
|
|
|
android_devices: List[str]
|
|
|
|
apple_devices: List[str]
|
|
|
|
|
|
|
|
|
|
|
|
def get_deleted_devices(
|
|
|
|
user_identity: UserPushIdentityCompat,
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
android_devices: List[str],
|
|
|
|
apple_devices: List[str],
|
|
|
|
) -> DevicesToCleanUpDict:
|
|
|
|
"""The remote server sends us a list of (tokens of) devices that it
|
|
|
|
believes it has registered. However some of them may have been
|
|
|
|
deleted by us due to errors received in the low level code
|
|
|
|
responsible for directly sending push notifications.
|
|
|
|
|
|
|
|
Query the database for the RemotePushDeviceTokens from these lists
|
|
|
|
that we do indeed have and return a list of the ones that we don't
|
|
|
|
have and thus presumably have already deleted - the remote server
|
|
|
|
will want to delete them too.
|
|
|
|
"""
|
|
|
|
|
|
|
|
android_devices_we_have = RemotePushDeviceToken.objects.filter(
|
|
|
|
user_identity.filter_q(),
|
|
|
|
token__in=android_devices,
|
2024-06-13 20:53:09 +02:00
|
|
|
kind=RemotePushDeviceToken.FCM,
|
2023-12-12 00:06:37 +01:00
|
|
|
server=server,
|
|
|
|
).values_list("token", flat=True)
|
|
|
|
apple_devices_we_have = RemotePushDeviceToken.objects.filter(
|
|
|
|
user_identity.filter_q(),
|
|
|
|
token__in=apple_devices,
|
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
server=server,
|
|
|
|
).values_list("token", flat=True)
|
|
|
|
|
|
|
|
return DevicesToCleanUpDict(
|
|
|
|
android_devices=list(set(android_devices) - set(android_devices_we_have)),
|
|
|
|
apple_devices=list(set(apple_devices) - set(apple_devices_we_have)),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server: RemoteZulipServer,
|
|
|
|
model: Any,
|
|
|
|
rows: List[Dict[str, Any]],
|
|
|
|
*,
|
|
|
|
is_count_stat: bool,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2019-01-31 00:39:02 +01:00
|
|
|
last_id = get_last_id_from_server(server, model)
|
2019-10-03 01:54:36 +02:00
|
|
|
for row in rows:
|
2024-06-06 15:43:34 +02:00
|
|
|
# We are silent about stats not in COUNT_STATS which are
|
|
|
|
# in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER --
|
|
|
|
# these are stats we stopped recording, but old versions
|
|
|
|
# may still have and report.
|
|
|
|
if (
|
|
|
|
is_count_stat
|
|
|
|
and (
|
|
|
|
row["property"] not in COUNT_STATS
|
|
|
|
or row["property"] in BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES
|
|
|
|
)
|
|
|
|
and row["property"] not in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
|
2023-10-23 14:01:49 +02:00
|
|
|
):
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("Invalid property {property}").format(property=row["property"]))
|
2023-12-09 02:10:05 +01:00
|
|
|
|
|
|
|
if not is_count_stat and row["event_type"] not in RemoteRealmAuditLog.SYNCED_BILLING_EVENTS:
|
|
|
|
raise JsonableError(_("Invalid event type."))
|
|
|
|
|
2023-10-23 22:29:42 +02:00
|
|
|
if row.get("id") is None:
|
|
|
|
# This shouldn't be possible, as submitting data like this should be
|
|
|
|
# prevented by our param validators.
|
|
|
|
raise AssertionError(f"Missing id field in row {row}")
|
2021-02-12 08:20:45 +01:00
|
|
|
if row["id"] <= last_id:
|
2019-01-31 00:39:02 +01:00
|
|
|
raise JsonableError(_("Data is out of order."))
|
2021-02-12 08:20:45 +01:00
|
|
|
last_id = row["id"]
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-01 03:12:07 +02:00
|
|
|
ModelT = TypeVar("ModelT", bound=Model)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def batch_create_table_data(
|
|
|
|
server: RemoteZulipServer,
|
2022-07-01 03:12:07 +02:00
|
|
|
model: Type[ModelT],
|
|
|
|
row_objects: List[ModelT],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2023-11-21 17:59:13 +01:00
|
|
|
# We ignore previously-existing data, in case it was truncated and
|
2024-04-12 18:33:56 +02:00
|
|
|
# re-created on the remote server. `ignore_conflicts=True`
|
2023-11-21 17:59:13 +01:00
|
|
|
# cannot return the ids, or count thereof, of the new inserts,
|
|
|
|
# (see https://code.djangoproject.com/ticket/0138) so we rely on
|
|
|
|
# having a lock to accurately count them before and after. This
|
|
|
|
# query is also well-indexed.
|
|
|
|
before_count = model._default_manager.filter(server=server).count()
|
|
|
|
model._default_manager.bulk_create(row_objects, batch_size=1000, ignore_conflicts=True)
|
|
|
|
after_count = model._default_manager.filter(server=server).count()
|
|
|
|
inserted_count = after_count - before_count
|
|
|
|
if inserted_count < len(row_objects):
|
|
|
|
logging.warning(
|
|
|
|
"Dropped %d duplicated rows while saving %d rows of %s for server %s/%s",
|
|
|
|
len(row_objects) - inserted_count,
|
|
|
|
len(row_objects),
|
|
|
|
model._meta.db_table,
|
|
|
|
server.hostname,
|
|
|
|
server.uuid,
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-22 02:22:48 +01:00
|
|
|
def ensure_devices_set_remote_realm(
|
|
|
|
android_devices: List[RemotePushDeviceToken],
|
|
|
|
apple_devices: List[RemotePushDeviceToken],
|
|
|
|
remote_realm: RemoteRealm,
|
|
|
|
) -> None:
|
|
|
|
devices_to_update = []
|
|
|
|
for device in android_devices + apple_devices:
|
|
|
|
if device.remote_realm_id is None:
|
|
|
|
device.remote_realm = remote_realm
|
|
|
|
devices_to_update.append(device)
|
|
|
|
|
|
|
|
RemotePushDeviceToken.objects.bulk_update(devices_to_update, ["remote_realm"])
|
|
|
|
|
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
def update_remote_realm_data_for_server(
|
2023-11-27 01:53:08 +01:00
|
|
|
server: RemoteZulipServer, server_realms_info: List[RealmDataForAnalytics]
|
2023-10-30 23:50:53 +01:00
|
|
|
) -> None:
|
2024-02-24 01:55:29 +01:00
|
|
|
reported_uuids = [realm.uuid for realm in server_realms_info]
|
2023-12-15 16:01:04 +01:00
|
|
|
all_registered_remote_realms_for_server = list(RemoteRealm.objects.filter(server=server))
|
|
|
|
already_registered_remote_realms = [
|
|
|
|
remote_realm
|
|
|
|
for remote_realm in all_registered_remote_realms_for_server
|
2024-02-24 01:55:29 +01:00
|
|
|
if remote_realm.uuid in reported_uuids
|
2023-12-15 16:01:04 +01:00
|
|
|
]
|
|
|
|
# RemoteRealm registrations that we have for this server, but aren't
|
|
|
|
# present in the data sent to us. We assume this to mean the server
|
|
|
|
# must have deleted those realms from the database.
|
|
|
|
remote_realms_missing_from_server_data = [
|
|
|
|
remote_realm
|
|
|
|
for remote_realm in all_registered_remote_realms_for_server
|
2024-02-24 01:55:29 +01:00
|
|
|
if remote_realm.uuid not in reported_uuids
|
2023-12-15 16:01:04 +01:00
|
|
|
]
|
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
already_registered_uuids = {
|
2023-11-22 18:22:22 +01:00
|
|
|
remote_realm.uuid for remote_realm in already_registered_remote_realms
|
2023-11-08 20:02:10 +01:00
|
|
|
}
|
2023-10-30 23:50:53 +01:00
|
|
|
|
|
|
|
new_remote_realms = [
|
|
|
|
RemoteRealm(
|
|
|
|
server=server,
|
2023-11-27 01:53:08 +01:00
|
|
|
uuid=realm.uuid,
|
|
|
|
uuid_owner_secret=realm.uuid_owner_secret,
|
|
|
|
host=realm.host,
|
|
|
|
realm_deactivated=realm.deactivated,
|
|
|
|
realm_date_created=timestamp_to_datetime(realm.date_created),
|
2023-11-27 02:06:23 +01:00
|
|
|
org_type=realm.org_type,
|
2023-11-29 23:48:46 +01:00
|
|
|
name=realm.name,
|
|
|
|
authentication_methods=realm.authentication_methods,
|
2023-12-11 14:24:37 +01:00
|
|
|
is_system_bot_realm=realm.is_system_bot_realm,
|
2023-10-30 23:50:53 +01:00
|
|
|
)
|
|
|
|
for realm in server_realms_info
|
2023-11-27 01:53:08 +01:00
|
|
|
if realm.uuid not in already_registered_uuids
|
2023-10-30 23:50:53 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
try:
|
|
|
|
RemoteRealm.objects.bulk_create(new_remote_realms)
|
|
|
|
except IntegrityError:
|
|
|
|
raise JsonableError(_("Duplicate registration detected."))
|
|
|
|
|
2023-11-27 01:53:08 +01:00
|
|
|
uuid_to_realm_dict = {str(realm.uuid): realm for realm in server_realms_info}
|
2023-11-08 20:02:10 +01:00
|
|
|
remote_realms_to_update = []
|
|
|
|
remote_realm_audit_logs = []
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
# Update RemoteRealm entries, for which the corresponding realm's info has changed
|
|
|
|
# (for the attributes that make sense to sync like this).
|
|
|
|
for remote_realm in already_registered_remote_realms:
|
|
|
|
modified = False
|
|
|
|
realm = uuid_to_realm_dict[str(remote_realm.uuid)]
|
|
|
|
for remote_realm_attr, realm_dict_key in [
|
|
|
|
("host", "host"),
|
2023-11-30 00:20:42 +01:00
|
|
|
("org_type", "org_type"),
|
|
|
|
("name", "name"),
|
|
|
|
("authentication_methods", "authentication_methods"),
|
2023-11-08 20:02:10 +01:00
|
|
|
("realm_deactivated", "deactivated"),
|
2023-12-11 14:24:37 +01:00
|
|
|
("is_system_bot_realm", "is_system_bot_realm"),
|
2023-11-08 20:02:10 +01:00
|
|
|
]:
|
|
|
|
old_value = getattr(remote_realm, remote_realm_attr)
|
2023-11-27 01:53:08 +01:00
|
|
|
new_value = getattr(realm, realm_dict_key)
|
2023-11-30 00:20:42 +01:00
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
if old_value == new_value:
|
|
|
|
continue
|
|
|
|
|
|
|
|
setattr(remote_realm, remote_realm_attr, new_value)
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_id=None,
|
|
|
|
remote_realm=remote_realm,
|
2023-11-27 01:53:08 +01:00
|
|
|
realm_id=realm.id,
|
2023-11-08 20:02:10 +01:00
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_REALM_VALUE_UPDATED,
|
|
|
|
event_time=now,
|
|
|
|
extra_data={
|
|
|
|
"attr_name": remote_realm_attr,
|
|
|
|
"old_value": old_value,
|
|
|
|
"new_value": new_value,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
modified = True
|
|
|
|
|
2024-02-24 01:55:29 +01:00
|
|
|
if remote_realm.realm_locally_deleted and remote_realm.uuid in reported_uuids:
|
2024-02-24 00:58:58 +01:00
|
|
|
remote_realm.realm_locally_deleted = False
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_id=None,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
realm_id=uuid_to_realm_dict[str(remote_realm.uuid)].id,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_REALM_LOCALLY_DELETED_RESTORED,
|
|
|
|
event_time=now,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
modified = True
|
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
if modified:
|
|
|
|
remote_realms_to_update.append(remote_realm)
|
|
|
|
|
2023-11-30 00:20:42 +01:00
|
|
|
RemoteRealm.objects.bulk_update(
|
|
|
|
remote_realms_to_update,
|
2023-12-11 14:24:37 +01:00
|
|
|
[
|
|
|
|
"host",
|
|
|
|
"realm_deactivated",
|
|
|
|
"name",
|
|
|
|
"authentication_methods",
|
|
|
|
"org_type",
|
|
|
|
"is_system_bot_realm",
|
2024-02-24 00:58:58 +01:00
|
|
|
"realm_locally_deleted",
|
2023-12-11 14:24:37 +01:00
|
|
|
],
|
2023-11-30 00:20:42 +01:00
|
|
|
)
|
2023-11-08 20:02:10 +01:00
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
2023-12-15 16:01:04 +01:00
|
|
|
remote_realms_to_update = []
|
|
|
|
remote_realm_audit_logs = []
|
2024-02-29 08:30:13 +01:00
|
|
|
new_locally_deleted_remote_realms_on_paid_plan_contexts = []
|
2023-12-15 16:01:04 +01:00
|
|
|
for remote_realm in remote_realms_missing_from_server_data:
|
|
|
|
if not remote_realm.realm_locally_deleted:
|
|
|
|
# Otherwise we already knew about this, so nothing to do.
|
|
|
|
remote_realm.realm_locally_deleted = True
|
|
|
|
|
2024-01-05 02:43:54 +01:00
|
|
|
## Temporarily disabled deactivating the registration for
|
|
|
|
## locally deleted realms pending further work on how to
|
|
|
|
## handle test upgrades to 8.0.
|
|
|
|
# remote_realm.registration_deactivated = True
|
2023-12-15 16:01:04 +01:00
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_id=None,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
realm_id=None,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_REALM_LOCALLY_DELETED,
|
|
|
|
event_time=now,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
remote_realms_to_update.append(remote_realm)
|
|
|
|
|
2024-02-29 08:30:13 +01:00
|
|
|
billing_session = RemoteRealmBillingSession(remote_realm=remote_realm)
|
|
|
|
if billing_session.on_paid_plan():
|
|
|
|
context = {
|
|
|
|
"billing_entity": billing_session.billing_entity_display_name,
|
|
|
|
"support_url": billing_session.support_url(),
|
|
|
|
"notice_reason": "locally_deleted_realm_on_paid_plan",
|
|
|
|
}
|
|
|
|
new_locally_deleted_remote_realms_on_paid_plan_contexts.append(context)
|
|
|
|
|
2023-12-15 16:01:04 +01:00
|
|
|
RemoteRealm.objects.bulk_update(
|
|
|
|
remote_realms_to_update,
|
2024-02-23 04:15:31 +01:00
|
|
|
["realm_locally_deleted"],
|
2023-12-15 16:01:04 +01:00
|
|
|
)
|
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
2024-02-29 08:30:13 +01:00
|
|
|
email_dict: Dict[str, Any] = {
|
|
|
|
"template_prefix": "zerver/emails/internal_billing_notice",
|
|
|
|
"to_emails": [BILLING_SUPPORT_EMAIL],
|
|
|
|
"from_address": FromAddress.tokenized_no_reply_address(),
|
|
|
|
}
|
|
|
|
for context in new_locally_deleted_remote_realms_on_paid_plan_contexts:
|
|
|
|
email_dict["context"] = context
|
|
|
|
queue_json_publish("email_senders", email_dict)
|
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
|
2024-01-12 19:05:57 +01:00
|
|
|
def get_human_user_realm_uuids(
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
) -> List[UUID]:
|
|
|
|
query = RemoteRealm.objects.filter(
|
|
|
|
server=server,
|
|
|
|
realm_deactivated=False,
|
2024-02-18 01:04:47 +01:00
|
|
|
realm_locally_deleted=False,
|
2024-01-12 19:05:57 +01:00
|
|
|
registration_deactivated=False,
|
|
|
|
is_system_bot_realm=False,
|
|
|
|
).exclude(
|
|
|
|
host__startswith="zulipinternal.",
|
|
|
|
)
|
|
|
|
if settings.DEVELOPMENT: # nocoverage
|
|
|
|
query = query.exclude(host__startswith="analytics.")
|
|
|
|
|
|
|
|
billable_realm_uuids = list(query.values_list("uuid", flat=True))
|
2023-12-11 18:00:42 +01:00
|
|
|
|
|
|
|
return billable_realm_uuids
|
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic
|
2024-02-21 04:06:14 +01:00
|
|
|
def handle_customer_migration_from_server_to_realm(
|
2024-01-12 19:05:57 +01:00
|
|
|
server: RemoteZulipServer,
|
2023-12-14 23:05:11 +01:00
|
|
|
) -> None:
|
2023-12-11 18:00:42 +01:00
|
|
|
server_billing_session = RemoteServerBillingSession(server)
|
|
|
|
server_customer = server_billing_session.get_customer()
|
|
|
|
if server_customer is None:
|
|
|
|
return
|
2023-12-14 02:28:46 +01:00
|
|
|
|
2024-01-05 01:58:52 +01:00
|
|
|
if server_customer.sponsorship_pending:
|
|
|
|
# If we have a pending sponsorship request, defer moving any
|
|
|
|
# data until the sponsorship request has been processed. This
|
|
|
|
# avoids a race where a sponsorship request made at the server
|
|
|
|
# level gets approved after the legacy plan has already been
|
|
|
|
# moved to the sole human RemoteRealm, which would violate
|
|
|
|
# invariants.
|
|
|
|
return
|
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
server_plan = get_current_plan_by_customer(server_customer)
|
2023-12-14 02:28:46 +01:00
|
|
|
if server_plan is None:
|
|
|
|
# If the server has no current plan, either because it never
|
|
|
|
# had one or because a previous legacy plan was migrated to
|
|
|
|
# the RemoteRealm object, there's nothing to potentially
|
|
|
|
# migrate.
|
|
|
|
return
|
|
|
|
|
2024-01-12 19:05:57 +01:00
|
|
|
realm_uuids = get_human_user_realm_uuids(server)
|
2023-12-11 18:00:42 +01:00
|
|
|
if not realm_uuids:
|
|
|
|
return
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
remote_realm_audit_logs = []
|
2023-12-14 02:28:46 +01:00
|
|
|
|
2024-02-21 19:45:43 +01:00
|
|
|
if len(realm_uuids) != 1:
|
|
|
|
return
|
2024-02-19 18:49:41 +01:00
|
|
|
|
2024-02-21 19:45:43 +01:00
|
|
|
# Here, we have exactly one non-system-bot realm, and some
|
|
|
|
# sort of plan on the server; move it to the realm.
|
|
|
|
remote_realm = RemoteRealm.objects.get(uuid=realm_uuids[0], server=server)
|
|
|
|
remote_realm_customer = get_customer_by_remote_realm(remote_realm)
|
|
|
|
|
|
|
|
# Migrate customer from server to remote realm if there is only one realm.
|
|
|
|
if remote_realm_customer is None:
|
|
|
|
# In this case the migration is easy, since we can just move the customer
|
|
|
|
# object directly.
|
|
|
|
server_customer.remote_realm = remote_realm
|
|
|
|
server_customer.remote_server = None
|
|
|
|
server_customer.save(update_fields=["remote_realm", "remote_server"])
|
|
|
|
else:
|
|
|
|
# If there's a Customer object for the realm already, things are harder,
|
|
|
|
# because it's an unusual state and there may be a plan already active
|
|
|
|
# for the realm, or there may have been.
|
|
|
|
# In the simplest case, where the realm doesn't have an active plan and the
|
|
|
|
# server's plan state can easily be moved, we proceed with the migrations.
|
|
|
|
remote_realm_plan = get_current_plan_by_customer(remote_realm_customer)
|
|
|
|
if (
|
|
|
|
remote_realm_plan is None
|
|
|
|
and server_plan.status != CustomerPlan.SWITCH_PLAN_TIER_AT_PLAN_END
|
2024-03-29 01:34:00 +01:00
|
|
|
and remote_realm_customer.stripe_customer_id is None
|
2024-02-21 19:45:43 +01:00
|
|
|
):
|
2024-03-29 01:34:00 +01:00
|
|
|
# This is a simple case where we don't have to worry about the realm
|
|
|
|
# having an active plan or an already configured stripe_customer_id,
|
|
|
|
# or the server having a next plan scheduled that we'd need
|
2024-02-21 19:45:43 +01:00
|
|
|
# to figure out how to migrate correctly as well.
|
|
|
|
# Any other case is too complex to handle here, and should be handled manually,
|
|
|
|
# especially since that should be extremely rare.
|
|
|
|
server_plan.customer = remote_realm_customer
|
|
|
|
server_plan.save(update_fields=["customer"])
|
2024-03-29 01:34:00 +01:00
|
|
|
|
|
|
|
# The realm's customer does not have .stripe_customer_id set by assumption.
|
|
|
|
# This situation happens e.g. if the Customer was created by a sponsorship request,
|
|
|
|
# so we need to move the value over from the server.
|
|
|
|
# That's because the plan we're transferring might be paid or a free trial and
|
|
|
|
# therefore need a stripe_customer_id to generate invoices.
|
|
|
|
# Hypothetically if the server's customer didn't have a stripe_customer_id set,
|
|
|
|
# that would imply the plan doesn't require it (e.g. this might be a Community plan)
|
|
|
|
# so we don't have to worry about whether we're copying over a valid value or None here.
|
|
|
|
stripe_customer_id = server_customer.stripe_customer_id
|
|
|
|
server_customer.stripe_customer_id = None
|
|
|
|
server_customer.save(update_fields=["stripe_customer_id"])
|
|
|
|
|
|
|
|
remote_realm_customer.stripe_customer_id = stripe_customer_id
|
|
|
|
remote_realm_customer.save(update_fields=["stripe_customer_id"])
|
2024-02-21 19:45:43 +01:00
|
|
|
else:
|
|
|
|
logger.warning(
|
|
|
|
"Failed to migrate customer from server (id: %s) to realm (id: %s): RemoteRealm customer already exists "
|
|
|
|
"and plans can't be migrated automatically.",
|
|
|
|
server.id,
|
|
|
|
remote_realm.id,
|
|
|
|
)
|
|
|
|
raise JsonableError(
|
|
|
|
_(
|
|
|
|
"Couldn't reconcile billing data between server and realm. Please contact {support_email}"
|
|
|
|
).format(support_email=FromAddress.SUPPORT)
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
2024-02-21 19:45:43 +01:00
|
|
|
|
|
|
|
# TODO: Might be better to call do_change_plan_type here.
|
|
|
|
remote_realm.plan_type = server.plan_type
|
|
|
|
remote_realm.save(update_fields=["plan_type"])
|
|
|
|
server.plan_type = RemoteZulipServer.PLAN_TYPE_SELF_MANAGED
|
|
|
|
server.save(update_fields=["plan_type"])
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_PLAN_TRANSFERRED_SERVER_TO_REALM,
|
|
|
|
event_time=event_time,
|
|
|
|
extra_data={
|
|
|
|
"attr_name": "plan_type",
|
|
|
|
"old_value": RemoteRealm.PLAN_TYPE_SELF_MANAGED,
|
|
|
|
"new_value": remote_realm.plan_type,
|
|
|
|
},
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
2024-02-21 19:45:43 +01:00
|
|
|
)
|
2023-12-11 18:00:42 +01:00
|
|
|
|
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
@typed_endpoint
|
2023-11-21 17:55:46 +01:00
|
|
|
@transaction.atomic
|
2021-02-12 08:19:30 +01:00
|
|
|
def remote_server_post_analytics(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2023-11-17 14:07:41 +01:00
|
|
|
*,
|
|
|
|
realm_counts: Json[List[RealmCountDataForAnalytics]],
|
|
|
|
installation_counts: Json[List[InstallationCountDataForAnalytics]],
|
|
|
|
realmauditlog_rows: Optional[Json[List[RealmAuditLogDataForAnalytics]]] = None,
|
|
|
|
realms: Optional[Json[List[RealmDataForAnalytics]]] = None,
|
2023-11-20 21:40:43 +01:00
|
|
|
version: Optional[Json[str]] = None,
|
2024-06-23 02:50:05 +02:00
|
|
|
merge_base: Optional[Json[str]] = None,
|
2023-12-08 21:38:01 +01:00
|
|
|
api_feature_level: Optional[Json[int]] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2023-11-21 17:55:46 +01:00
|
|
|
# Lock the server, preventing this from racing with other
|
|
|
|
# duplicate submissions of the data
|
|
|
|
server = RemoteZulipServer.objects.select_for_update().get(id=server.id)
|
|
|
|
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_server_version_updated = False
|
2023-11-20 21:40:43 +01:00
|
|
|
if version is not None:
|
|
|
|
version = version[0 : RemoteZulipServer.VERSION_MAX_LENGTH]
|
2024-06-23 02:50:05 +02:00
|
|
|
if (
|
|
|
|
version != server.last_version
|
|
|
|
or merge_base != server.last_merge_base
|
|
|
|
or api_feature_level != server.last_api_feature_level
|
|
|
|
):
|
2023-11-20 21:40:43 +01:00
|
|
|
server.last_version = version
|
2024-06-23 02:50:05 +02:00
|
|
|
server.last_merge_base = merge_base
|
2023-12-08 21:38:01 +01:00
|
|
|
server.last_api_feature_level = api_feature_level
|
2024-06-23 02:50:05 +02:00
|
|
|
server.save(update_fields=["last_version", "last_merge_base", "last_api_feature_level"])
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_server_version_updated = True
|
2023-11-20 21:40:43 +01:00
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteRealmCount,
|
|
|
|
[dict(count) for count in realm_counts],
|
|
|
|
is_count_stat=True,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteInstallationCount,
|
|
|
|
[dict(count) for count in installation_counts],
|
|
|
|
is_count_stat=True,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
2023-11-20 21:40:43 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
if realmauditlog_rows is not None:
|
2023-11-17 14:07:41 +01:00
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteRealmAuditLog,
|
|
|
|
[dict(row) for row in realmauditlog_rows],
|
|
|
|
is_count_stat=False,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
if realms is not None:
|
2023-11-27 01:53:08 +01:00
|
|
|
update_remote_realm_data_for_server(server, realms)
|
2023-12-05 20:28:36 +01:00
|
|
|
if remote_server_version_updated:
|
|
|
|
fix_remote_realm_foreign_keys(server, realms)
|
|
|
|
|
|
|
|
realm_id_to_remote_realm = build_realm_id_to_remote_realm_dict(server, realms)
|
2023-10-30 23:50:53 +01:00
|
|
|
|
2024-05-31 22:06:19 +02:00
|
|
|
# Note that due to skipping rows from the remote server which
|
|
|
|
# match LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER, we may
|
|
|
|
# theoretically choose to omit the last RemoteRealmCount (or
|
|
|
|
# InstallationCount, below) row sent by the remote server, causing
|
|
|
|
# them to attempt to re-send that row repeatedlly. Since the last
|
|
|
|
# CountStat is not currently a skipped type, this is, in practice,
|
|
|
|
# unlikely to occur.
|
|
|
|
#
|
|
|
|
# TODO: Record the high-water RealmCount and InstallationCount's
|
|
|
|
# `remote_id` values on the RemoteServer, rather than computing
|
|
|
|
# them via get_last_id_from_server
|
2022-07-01 03:12:07 +02:00
|
|
|
remote_realm_counts = [
|
2021-02-12 08:19:30 +01:00
|
|
|
RemoteRealmCount(
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_realm=realm_id_to_remote_realm.get(row.realm),
|
2023-11-17 14:07:41 +01:00
|
|
|
property=row.property,
|
|
|
|
realm_id=row.realm,
|
|
|
|
remote_id=row.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
server=server,
|
2023-11-19 19:45:19 +01:00
|
|
|
end_time=datetime.fromtimestamp(row.end_time, tz=timezone.utc),
|
2023-11-17 14:07:41 +01:00
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
for row in realm_counts
|
2024-05-31 22:06:19 +02:00
|
|
|
if row.property not in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2022-07-01 03:12:07 +02:00
|
|
|
batch_create_table_data(server, RemoteRealmCount, remote_realm_counts)
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2022-07-01 03:12:07 +02:00
|
|
|
remote_installation_counts = [
|
2021-02-12 08:19:30 +01:00
|
|
|
RemoteInstallationCount(
|
2023-11-17 14:07:41 +01:00
|
|
|
property=row.property,
|
|
|
|
remote_id=row.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
server=server,
|
2023-11-19 19:45:19 +01:00
|
|
|
end_time=datetime.fromtimestamp(row.end_time, tz=timezone.utc),
|
2023-11-17 14:07:41 +01:00
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
for row in installation_counts
|
2024-05-31 22:06:19 +02:00
|
|
|
if row.property not in LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2022-07-01 03:12:07 +02:00
|
|
|
batch_create_table_data(server, RemoteInstallationCount, remote_installation_counts)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
if realmauditlog_rows is not None:
|
2023-12-12 10:47:06 +01:00
|
|
|
# Creating audit logs, syncing license ledger, and updating
|
|
|
|
# 'last_audit_log_update' needs to be an atomic operation.
|
|
|
|
# This helps to rely on 'last_audit_log_update' to assume
|
|
|
|
# RemoteRealmAuditLog and LicenseLedger are up-to-date.
|
|
|
|
with transaction.atomic():
|
|
|
|
# Important: Do not return early if we receive 0 rows; we must
|
|
|
|
# updated last_audit_log_update even if there are no new rows,
|
|
|
|
# to help identify server whose ability to connect to this
|
|
|
|
# endpoint is broken by a networking problem.
|
|
|
|
remote_realms_set = set()
|
|
|
|
remote_realm_audit_logs = []
|
|
|
|
for row in realmauditlog_rows:
|
|
|
|
extra_data = {}
|
|
|
|
if isinstance(row.extra_data, str):
|
|
|
|
try:
|
|
|
|
extra_data = orjson.loads(row.extra_data)
|
|
|
|
except orjson.JSONDecodeError:
|
|
|
|
raise JsonableError(_("Malformed audit log data"))
|
|
|
|
elif row.extra_data is not None:
|
|
|
|
assert isinstance(row.extra_data, dict)
|
|
|
|
extra_data = row.extra_data
|
|
|
|
remote_realms_set.add(realm_id_to_remote_realm.get(row.realm))
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
remote_realm=realm_id_to_remote_realm.get(row.realm),
|
|
|
|
realm_id=row.realm,
|
|
|
|
remote_id=row.id,
|
|
|
|
server=server,
|
|
|
|
event_time=datetime.fromtimestamp(row.event_time, tz=timezone.utc),
|
|
|
|
backfilled=row.backfilled,
|
|
|
|
extra_data=extra_data,
|
|
|
|
event_type=row.event_type,
|
|
|
|
)
|
2023-06-03 06:03:43 +02:00
|
|
|
)
|
2023-12-12 10:47:06 +01:00
|
|
|
batch_create_table_data(server, RemoteRealmAuditLog, remote_realm_audit_logs)
|
|
|
|
|
|
|
|
# We need to update 'last_audit_log_update' before calling the
|
|
|
|
# 'sync_license_ledger_if_needed' method to avoid 'MissingDataError'
|
|
|
|
# due to 'has_stale_audit_log' being True.
|
2023-12-14 17:59:15 +01:00
|
|
|
server.last_audit_log_update = timezone_now()
|
|
|
|
server.save(update_fields=["last_audit_log_update"])
|
2023-12-12 10:47:06 +01:00
|
|
|
|
2023-12-12 10:24:03 +01:00
|
|
|
# Update LicenseLedger for remote_realm customers using logs in RemoteRealmAuditlog.
|
2023-12-12 10:47:06 +01:00
|
|
|
for remote_realm in remote_realms_set:
|
|
|
|
if remote_realm:
|
|
|
|
billing_session = RemoteRealmBillingSession(remote_realm=remote_realm)
|
|
|
|
billing_session.sync_license_ledger_if_needed()
|
2019-10-03 02:01:36 +02:00
|
|
|
|
2023-12-12 10:24:03 +01:00
|
|
|
# Update LicenseLedger for remote_server customer using logs in RemoteRealmAuditlog.
|
|
|
|
remote_server_billing_session = RemoteServerBillingSession(remote_server=server)
|
|
|
|
remote_server_billing_session.sync_license_ledger_if_needed()
|
|
|
|
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
|
|
|
assert log_data is not None
|
|
|
|
can_push_values = set()
|
|
|
|
|
2024-01-05 02:57:24 +01:00
|
|
|
# Return details on exactly the set of remote realm the client told us about.
|
2023-12-14 17:28:53 +01:00
|
|
|
remote_realm_dict: Dict[str, RemoteRealmDictValue] = {}
|
2024-01-05 02:57:24 +01:00
|
|
|
remote_human_realm_count = len(
|
|
|
|
[
|
|
|
|
remote_realm
|
|
|
|
for remote_realm in realm_id_to_remote_realm.values()
|
|
|
|
if not remote_realm.is_system_bot_realm
|
|
|
|
]
|
|
|
|
)
|
|
|
|
for remote_realm in realm_id_to_remote_realm.values():
|
2023-11-29 17:00:19 +01:00
|
|
|
uuid = str(remote_realm.uuid)
|
2023-12-11 09:32:44 +01:00
|
|
|
status = get_push_status_for_remote_request(server, remote_realm)
|
2023-12-17 18:28:27 +01:00
|
|
|
if remote_realm.is_system_bot_realm:
|
|
|
|
# Ignore system bot realms for computing log_data
|
|
|
|
pass
|
|
|
|
elif remote_human_realm_count == 1: # nocoverage
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data["extra"] = f"[can_push={status.can_push}/{status.message}]"
|
2023-12-17 18:28:27 +01:00
|
|
|
else:
|
2023-12-14 17:28:53 +01:00
|
|
|
can_push_values.add(status.can_push)
|
2023-12-11 09:32:44 +01:00
|
|
|
remote_realm_dict[uuid] = {
|
|
|
|
"can_push": status.can_push,
|
|
|
|
"expected_end_timestamp": status.expected_end_timestamp,
|
|
|
|
}
|
2023-11-29 17:00:19 +01:00
|
|
|
|
2023-12-14 17:28:53 +01:00
|
|
|
if len(can_push_values) == 1:
|
|
|
|
can_push_value = next(iter(can_push_values))
|
|
|
|
log_data["extra"] = f"[can_push={can_push_value}/{remote_human_realm_count} realms]"
|
|
|
|
elif can_push_values == {True, False}:
|
|
|
|
log_data["extra"] = f"[can_push=mixed/{remote_human_realm_count} realms]"
|
|
|
|
elif remote_human_realm_count == 0:
|
|
|
|
log_data["extra"] = "[0 realms]"
|
2023-11-29 17:00:19 +01:00
|
|
|
return json_success(request, data={"realms": remote_realm_dict})
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-05 20:28:36 +01:00
|
|
|
def build_realm_id_to_remote_realm_dict(
|
|
|
|
server: RemoteZulipServer, realms: Optional[List[RealmDataForAnalytics]]
|
2024-01-05 02:57:24 +01:00
|
|
|
) -> Dict[int, RemoteRealm]:
|
2023-12-05 20:28:36 +01:00
|
|
|
if realms is None:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
realm_uuids = [realm.uuid for realm in realms]
|
|
|
|
remote_realms = RemoteRealm.objects.filter(uuid__in=realm_uuids, server=server)
|
|
|
|
|
|
|
|
uuid_to_remote_realm_dict = {
|
|
|
|
str(remote_realm.uuid): remote_realm for remote_realm in remote_realms
|
|
|
|
}
|
|
|
|
return {realm.id: uuid_to_remote_realm_dict[str(realm.uuid)] for realm in realms}
|
|
|
|
|
|
|
|
|
|
|
|
def fix_remote_realm_foreign_keys(
|
|
|
|
server: RemoteZulipServer, realms: List[RealmDataForAnalytics]
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Finds the RemoteRealmCount and RemoteRealmAuditLog entries without .remote_realm
|
|
|
|
set and sets it based on the "realms" data received from the remote server,
|
|
|
|
if possible.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if (
|
|
|
|
not RemoteRealmCount.objects.filter(server=server, remote_realm=None).exists()
|
|
|
|
and not RemoteRealmAuditLog.objects.filter(server=server, remote_realm=None).exists()
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
|
|
|
realm_id_to_remote_realm = build_realm_id_to_remote_realm_dict(server, realms)
|
|
|
|
for realm_id in realm_id_to_remote_realm:
|
|
|
|
RemoteRealmCount.objects.filter(server=server, remote_realm=None, realm_id=realm_id).update(
|
|
|
|
remote_realm=realm_id_to_remote_realm[realm_id]
|
|
|
|
)
|
|
|
|
RemoteRealmAuditLog.objects.filter(
|
|
|
|
server=server, remote_realm=None, realm_id=realm_id
|
|
|
|
).update(remote_realm=realm_id_to_remote_realm[realm_id])
|
|
|
|
|
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
def get_last_id_from_server(server: RemoteZulipServer, model: Any) -> int:
|
2023-10-23 22:29:42 +02:00
|
|
|
last_count = (
|
|
|
|
model.objects.filter(server=server)
|
|
|
|
# Rows with remote_id=None are managed by the bouncer service itself,
|
|
|
|
# and thus aren't meant for syncing and should be ignored here.
|
|
|
|
.exclude(remote_id=None)
|
|
|
|
.order_by("remote_id")
|
|
|
|
.only("remote_id")
|
|
|
|
.last()
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
if last_count is not None:
|
|
|
|
return last_count.remote_id
|
|
|
|
return 0
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
@has_request_variables
|
2022-08-01 23:51:10 +02:00
|
|
|
def remote_server_check_analytics(request: HttpRequest, server: RemoteZulipServer) -> HttpResponse:
|
2019-01-31 00:39:02 +01:00
|
|
|
result = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"last_realm_count_id": get_last_id_from_server(server, RemoteRealmCount),
|
|
|
|
"last_installation_count_id": get_last_id_from_server(server, RemoteInstallationCount),
|
|
|
|
"last_realmauditlog_id": get_last_id_from_server(server, RemoteRealmAuditLog),
|
2019-01-31 00:39:02 +01:00
|
|
|
}
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|