2019-04-23 22:32:12 +02:00
|
|
|
import logging
|
2023-04-10 00:55:16 +02:00
|
|
|
from collections import Counter
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timezone
|
2023-12-12 00:06:37 +01:00
|
|
|
from typing import Any, Dict, List, Optional, Type, TypedDict, TypeVar, Union
|
2021-12-22 11:02:02 +01:00
|
|
|
from uuid import UUID
|
2013-10-17 22:55:09 +02:00
|
|
|
|
2023-06-03 06:03:43 +02:00
|
|
|
import orjson
|
2023-12-11 18:00:42 +01:00
|
|
|
from django.conf import settings
|
2018-05-04 01:40:46 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.validators import URLValidator, validate_email
|
2018-10-11 00:53:13 +02:00
|
|
|
from django.db import IntegrityError, transaction
|
2022-07-01 03:12:07 +02:00
|
|
|
from django.db.models import Model
|
2018-09-25 12:24:11 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2022-08-06 06:30:08 +02:00
|
|
|
from django.utils.crypto import constant_time_compare
|
2023-09-18 17:07:13 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
|
|
|
from django.utils.translation import gettext as err_
|
2018-01-13 19:38:13 +01:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2023-11-17 14:07:41 +01:00
|
|
|
from pydantic import BaseModel, ConfigDict, Json
|
2018-01-13 19:38:13 +01:00
|
|
|
|
2023-10-23 14:01:49 +02:00
|
|
|
from analytics.lib.counts import (
|
|
|
|
BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES,
|
|
|
|
COUNT_STATS,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS,
|
2023-10-23 14:01:49 +02:00
|
|
|
do_increment_logging_stat,
|
|
|
|
)
|
2023-12-11 18:00:42 +01:00
|
|
|
from corporate.lib.stripe import (
|
|
|
|
RemoteRealmBillingSession,
|
|
|
|
RemoteServerBillingSession,
|
|
|
|
do_deactivate_remote_server,
|
2023-12-11 09:32:44 +01:00
|
|
|
get_push_status_for_remote_request,
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
2023-11-29 17:00:19 +01:00
|
|
|
from corporate.models import CustomerPlan, get_current_plan_by_customer
|
2022-08-01 22:54:47 +02:00
|
|
|
from zerver.decorator import require_post
|
2023-12-15 14:29:21 +01:00
|
|
|
from zerver.lib.exceptions import (
|
2023-12-12 17:15:57 +01:00
|
|
|
ErrorCode,
|
2023-12-15 14:29:21 +01:00
|
|
|
JsonableError,
|
|
|
|
RemoteRealmServerMismatchError,
|
|
|
|
RemoteServerDeactivatedError,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.push_notifications import (
|
2023-10-08 00:43:41 +02:00
|
|
|
InvalidRemotePushDeviceTokenError,
|
2023-01-02 20:50:23 +01:00
|
|
|
UserPushIdentityCompat,
|
2020-06-11 00:54:34 +02:00
|
|
|
send_android_push_notification,
|
|
|
|
send_apple_push_notification,
|
2023-10-05 13:53:09 +02:00
|
|
|
send_test_push_notification_directly_to_devices,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2023-12-09 00:09:01 +01:00
|
|
|
from zerver.lib.remote_server import (
|
|
|
|
InstallationCountDataForAnalytics,
|
|
|
|
RealmAuditLogDataForAnalytics,
|
|
|
|
RealmCountDataForAnalytics,
|
|
|
|
RealmDataForAnalytics,
|
|
|
|
)
|
2023-12-14 17:28:53 +01:00
|
|
|
from zerver.lib.request import REQ, RequestNotes, has_request_variables
|
2021-06-30 18:35:50 +02:00
|
|
|
from zerver.lib.response import json_success
|
2023-12-11 22:21:48 +01:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
2023-10-05 13:53:09 +02:00
|
|
|
from zerver.lib.typed_endpoint import JsonBodyPayload, typed_endpoint
|
2023-11-29 17:00:19 +01:00
|
|
|
from zerver.lib.types import RemoteRealmDictValue
|
2023-11-17 14:07:41 +01:00
|
|
|
from zerver.lib.validator import check_capped_string, check_int, check_string_fixed_length
|
2023-11-07 23:13:39 +01:00
|
|
|
from zerver.views.push_notifications import check_app_id, validate_token
|
2022-08-01 22:54:47 +02:00
|
|
|
from zilencer.auth import InvalidZulipServerKeyError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zilencer.models import (
|
|
|
|
RemoteInstallationCount,
|
|
|
|
RemotePushDeviceToken,
|
2023-10-30 23:50:53 +01:00
|
|
|
RemoteRealm,
|
2020-06-11 00:54:34 +02:00
|
|
|
RemoteRealmAuditLog,
|
|
|
|
RemoteRealmCount,
|
|
|
|
RemoteZulipServer,
|
2021-12-30 23:51:38 +01:00
|
|
|
RemoteZulipServerAuditLog,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
|
|
|
|
2021-10-20 01:16:18 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2018-07-13 13:33:05 +02:00
|
|
|
|
2021-12-22 11:02:02 +01:00
|
|
|
def validate_uuid(uuid: str) -> None:
|
|
|
|
try:
|
2021-12-30 15:32:48 +01:00
|
|
|
uuid_object = UUID(uuid, version=4)
|
|
|
|
# The UUID initialization under some circumstances will modify the uuid
|
|
|
|
# string to create a valid UUIDv4, instead of raising a ValueError.
|
|
|
|
# The submitted uuid needing to be modified means it's invalid, so
|
|
|
|
# we need to check for that condition.
|
|
|
|
if str(uuid_object) != uuid:
|
|
|
|
raise ValidationError(err_("Invalid UUID"))
|
2021-12-22 11:02:02 +01:00
|
|
|
except ValueError:
|
|
|
|
raise ValidationError(err_("Invalid UUID"))
|
|
|
|
|
|
|
|
|
2022-08-01 23:51:10 +02:00
|
|
|
def validate_bouncer_token_request(token: str, kind: int) -> None:
|
2017-07-07 18:29:45 +02:00
|
|
|
if kind not in [RemotePushDeviceToken.APNS, RemotePushDeviceToken.GCM]:
|
2018-02-15 20:50:37 +01:00
|
|
|
raise JsonableError(err_("Invalid token type"))
|
2017-07-07 18:23:36 +02:00
|
|
|
validate_token(token, kind)
|
2017-05-08 14:25:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-01-12 23:45:01 +01:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def deactivate_remote_server(
|
|
|
|
request: HttpRequest,
|
|
|
|
remote_server: RemoteZulipServer,
|
|
|
|
) -> HttpResponse:
|
2023-12-13 02:44:55 +01:00
|
|
|
billing_session = RemoteServerBillingSession(remote_server)
|
|
|
|
do_deactivate_remote_server(remote_server, billing_session)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2022-01-12 23:45:01 +01:00
|
|
|
|
|
|
|
|
2018-05-04 01:40:46 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def register_remote_server(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
zulip_org_id: str = REQ(str_validator=check_string_fixed_length(RemoteZulipServer.UUID_LENGTH)),
|
|
|
|
zulip_org_key: str = REQ(
|
|
|
|
str_validator=check_string_fixed_length(RemoteZulipServer.API_KEY_LENGTH)
|
|
|
|
),
|
|
|
|
hostname: str = REQ(str_validator=check_capped_string(RemoteZulipServer.HOSTNAME_MAX_LENGTH)),
|
2021-04-07 21:53:14 +02:00
|
|
|
contact_email: str = REQ(),
|
2021-02-12 08:19:30 +01:00
|
|
|
new_org_key: Optional[str] = REQ(
|
|
|
|
str_validator=check_string_fixed_length(RemoteZulipServer.API_KEY_LENGTH), default=None
|
|
|
|
),
|
2018-05-04 01:40:46 +02:00
|
|
|
) -> HttpResponse:
|
|
|
|
# REQ validated the the field lengths, but we still need to
|
|
|
|
# validate the format of these fields.
|
|
|
|
try:
|
|
|
|
# TODO: Ideally we'd not abuse the URL validator this way
|
|
|
|
url_validator = URLValidator()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_validator("http://" + hostname)
|
2018-05-04 01:40:46 +02:00
|
|
|
except ValidationError:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("{hostname} is not a valid hostname").format(hostname=hostname))
|
2018-05-04 01:40:46 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
validate_email(contact_email)
|
|
|
|
except ValidationError as e:
|
|
|
|
raise JsonableError(e.message)
|
|
|
|
|
2021-12-22 11:02:02 +01:00
|
|
|
try:
|
|
|
|
validate_uuid(zulip_org_id)
|
|
|
|
except ValidationError as e:
|
|
|
|
raise JsonableError(e.message)
|
|
|
|
|
2021-12-30 23:51:38 +01:00
|
|
|
with transaction.atomic():
|
|
|
|
remote_server, created = RemoteZulipServer.objects.get_or_create(
|
|
|
|
uuid=zulip_org_id,
|
|
|
|
defaults={
|
|
|
|
"hostname": hostname,
|
|
|
|
"contact_email": contact_email,
|
|
|
|
"api_key": zulip_org_key,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
if created:
|
|
|
|
RemoteZulipServerAuditLog.objects.create(
|
|
|
|
event_type=RemoteZulipServerAuditLog.REMOTE_SERVER_CREATED,
|
|
|
|
server=remote_server,
|
|
|
|
event_time=remote_server.last_updated,
|
|
|
|
)
|
2018-05-04 01:40:46 +02:00
|
|
|
else:
|
2022-08-06 06:30:08 +02:00
|
|
|
if not constant_time_compare(remote_server.api_key, zulip_org_key):
|
2021-12-30 23:51:38 +01:00
|
|
|
raise InvalidZulipServerKeyError(zulip_org_id)
|
2023-12-15 14:29:21 +01:00
|
|
|
|
|
|
|
if remote_server.deactivated:
|
|
|
|
raise RemoteServerDeactivatedError
|
|
|
|
|
|
|
|
remote_server.hostname = hostname
|
|
|
|
remote_server.contact_email = contact_email
|
|
|
|
if new_org_key is not None:
|
|
|
|
remote_server.api_key = new_org_key
|
|
|
|
remote_server.save()
|
2018-05-04 01:40:46 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"created": created})
|
2018-05-04 01:40:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def register_remote_push_device(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_id: Optional[int] = REQ(json_validator=check_int, default=None),
|
|
|
|
user_uuid: Optional[str] = REQ(default=None),
|
2023-12-01 22:57:34 +01:00
|
|
|
realm_uuid: Optional[str] = REQ(default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
token: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
token_kind: int = REQ(json_validator=check_int),
|
2023-11-07 23:13:39 +01:00
|
|
|
ios_app_id: Optional[str] = REQ(str_validator=check_app_id, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2022-08-01 23:51:10 +02:00
|
|
|
validate_bouncer_token_request(token, token_kind)
|
2023-11-06 22:18:52 +01:00
|
|
|
if token_kind == RemotePushDeviceToken.APNS and ios_app_id is None:
|
|
|
|
raise JsonableError(_("Missing ios_app_id"))
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-02-23 20:25:30 +01:00
|
|
|
if user_id is None and user_uuid is None:
|
|
|
|
raise JsonableError(_("Missing user_id or user_uuid"))
|
|
|
|
if user_id is not None and user_uuid is not None:
|
2023-04-10 00:23:59 +02:00
|
|
|
kwargs: Dict[str, object] = {"user_uuid": user_uuid, "user_id": None}
|
|
|
|
# Delete pre-existing user_id registration for this user+device to avoid
|
|
|
|
# duplication. Further down, uuid registration will be created.
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
|
|
|
server=server, token=token, kind=token_kind, user_id=user_id
|
|
|
|
).delete()
|
|
|
|
else:
|
2023-04-10 00:55:16 +02:00
|
|
|
# One of these is None, so these kwargs will lead to a proper registration
|
2023-04-10 00:23:59 +02:00
|
|
|
# of either user_id or user_uuid type
|
|
|
|
kwargs = {"user_id": user_id, "user_uuid": user_uuid}
|
2023-12-01 22:57:34 +01:00
|
|
|
|
|
|
|
if realm_uuid is not None:
|
|
|
|
# Servers 8.0+ also send the realm.uuid of the user.
|
|
|
|
assert isinstance(
|
|
|
|
user_uuid, str
|
|
|
|
), "Servers new enough to send realm_uuid, should also have user_uuid"
|
|
|
|
remote_realm = get_remote_realm_helper(request, server, realm_uuid, user_uuid)
|
|
|
|
if remote_realm is not None:
|
|
|
|
# We want to associate the RemotePushDeviceToken with the RemoteRealm.
|
|
|
|
kwargs["remote_realm_id"] = remote_realm.id
|
|
|
|
|
2018-10-11 00:53:13 +02:00
|
|
|
try:
|
|
|
|
with transaction.atomic():
|
|
|
|
RemotePushDeviceToken.objects.create(
|
|
|
|
server=server,
|
|
|
|
kind=token_kind,
|
|
|
|
token=token,
|
|
|
|
ios_app_id=ios_app_id,
|
|
|
|
# last_updated is to be renamed to date_created.
|
2023-09-18 17:07:13 +02:00
|
|
|
last_updated=timezone_now(),
|
2023-04-10 00:23:59 +02:00
|
|
|
**kwargs,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-10-11 00:53:13 +02:00
|
|
|
except IntegrityError:
|
|
|
|
pass
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def unregister_remote_push_device(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2021-02-12 08:19:30 +01:00
|
|
|
token: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
token_kind: int = REQ(json_validator=check_int),
|
2022-02-23 20:25:30 +01:00
|
|
|
user_id: Optional[int] = REQ(json_validator=check_int, default=None),
|
|
|
|
user_uuid: Optional[str] = REQ(default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2022-08-01 23:51:10 +02:00
|
|
|
validate_bouncer_token_request(token, token_kind)
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2023-03-17 08:38:23 +01:00
|
|
|
(num_deleted, ignored) = RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(), token=token, kind=token_kind, server=server
|
2021-02-12 08:19:30 +01:00
|
|
|
).delete()
|
2023-03-17 08:38:23 +01:00
|
|
|
if num_deleted == 0:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(err_("Token does not exist"))
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2017-05-08 13:48:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-19 03:12:54 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def unregister_all_remote_push_devices(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_id: Optional[int] = REQ(json_validator=check_int, default=None),
|
|
|
|
user_uuid: Optional[str] = REQ(default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2023-01-02 20:50:23 +01:00
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
|
|
|
RemotePushDeviceToken.objects.filter(user_identity.filter_q(), server=server).delete()
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2019-11-19 03:12:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-10 00:55:16 +02:00
|
|
|
def delete_duplicate_registrations(
|
|
|
|
registrations: List[RemotePushDeviceToken], server_id: int, user_id: int, user_uuid: str
|
|
|
|
) -> List[RemotePushDeviceToken]:
|
|
|
|
"""
|
|
|
|
When migrating to support registration by UUID, we introduced a bug where duplicate
|
|
|
|
registrations for the same device+user could be created - one by user_id and one by
|
|
|
|
user_uuid. Given no good way of detecting these duplicates at database level, we need to
|
|
|
|
take advantage of the fact that when a remote server sends a push notification request
|
|
|
|
to us, it sends both user_id and user_uuid of the user.
|
|
|
|
See https://github.com/zulip/zulip/issues/24969 for reference.
|
|
|
|
|
|
|
|
This function, knowing the user_id and user_uuid of the user, can detect duplicates
|
|
|
|
and delete the legacy user_id registration if appropriate.
|
|
|
|
|
|
|
|
Return the list of registrations with the user_id-based duplicates removed.
|
|
|
|
"""
|
|
|
|
|
|
|
|
# All registrations passed here should be of the same kind (apple vs android).
|
|
|
|
assert len({registration.kind for registration in registrations}) == 1
|
|
|
|
kind = registrations[0].kind
|
|
|
|
|
|
|
|
tokens_counter = Counter(device.token for device in registrations)
|
|
|
|
|
|
|
|
tokens_to_deduplicate = []
|
|
|
|
for key in tokens_counter:
|
|
|
|
if tokens_counter[key] <= 1:
|
|
|
|
continue
|
|
|
|
if tokens_counter[key] > 2:
|
|
|
|
raise AssertionError(
|
|
|
|
f"More than two registrations for token {key} for user id:{user_id} uuid:{user_uuid}, shouldn't be possible"
|
|
|
|
)
|
|
|
|
assert tokens_counter[key] == 2
|
|
|
|
tokens_to_deduplicate.append(key)
|
|
|
|
|
|
|
|
if not tokens_to_deduplicate:
|
|
|
|
return registrations
|
|
|
|
|
|
|
|
logger.info(
|
|
|
|
"Deduplicating push registrations for server id:%s user id:%s uuid:%s and tokens:%s",
|
|
|
|
server_id,
|
|
|
|
user_id,
|
|
|
|
user_uuid,
|
|
|
|
sorted(tokens_to_deduplicate),
|
|
|
|
)
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
|
|
|
token__in=tokens_to_deduplicate, kind=kind, server_id=server_id, user_id=user_id
|
|
|
|
).delete()
|
|
|
|
|
|
|
|
deduplicated_registrations_to_return = []
|
|
|
|
for registration in registrations:
|
|
|
|
if registration.token in tokens_to_deduplicate and registration.user_id is not None:
|
|
|
|
# user_id registrations are the ones we deleted
|
|
|
|
continue
|
|
|
|
deduplicated_registrations_to_return.append(registration)
|
|
|
|
|
|
|
|
return deduplicated_registrations_to_return
|
|
|
|
|
|
|
|
|
2023-10-05 13:53:09 +02:00
|
|
|
class TestNotificationPayload(BaseModel):
|
|
|
|
token: str
|
|
|
|
token_kind: int
|
|
|
|
user_id: int
|
|
|
|
user_uuid: str
|
|
|
|
base_payload: Dict[str, Any]
|
|
|
|
|
|
|
|
model_config = ConfigDict(extra="forbid")
|
|
|
|
|
|
|
|
|
|
|
|
@typed_endpoint
|
|
|
|
def remote_server_send_test_notification(
|
|
|
|
request: HttpRequest,
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
*,
|
|
|
|
payload: JsonBodyPayload[TestNotificationPayload],
|
|
|
|
) -> HttpResponse:
|
|
|
|
token = payload.token
|
|
|
|
token_kind = payload.token_kind
|
|
|
|
|
|
|
|
user_id = payload.user_id
|
|
|
|
user_uuid = payload.user_uuid
|
|
|
|
|
|
|
|
# The remote server only sends the base payload with basic user and server info,
|
|
|
|
# and the actual format of the test notification is defined on the bouncer, as that
|
|
|
|
# gives us the flexibility to modify it freely, without relying on other servers
|
|
|
|
# upgrading.
|
|
|
|
base_payload = payload.base_payload
|
|
|
|
|
|
|
|
# This is a new endpoint, so it can assume it will only be used by newer
|
|
|
|
# servers that will send user both UUID and ID.
|
|
|
|
user_identity = UserPushIdentityCompat(user_id=user_id, user_uuid=user_uuid)
|
|
|
|
|
|
|
|
try:
|
|
|
|
device = RemotePushDeviceToken.objects.get(
|
|
|
|
user_identity.filter_q(), token=token, kind=token_kind, server=server
|
|
|
|
)
|
|
|
|
except RemotePushDeviceToken.DoesNotExist:
|
2023-10-08 00:43:41 +02:00
|
|
|
raise InvalidRemotePushDeviceTokenError
|
2023-10-05 13:53:09 +02:00
|
|
|
|
|
|
|
send_test_push_notification_directly_to_devices(
|
|
|
|
user_identity, [device], base_payload, remote=server
|
|
|
|
)
|
|
|
|
return json_success(request)
|
|
|
|
|
|
|
|
|
2023-12-01 22:57:34 +01:00
|
|
|
def get_remote_realm_helper(
|
|
|
|
request: HttpRequest, server: RemoteZulipServer, realm_uuid: str, user_uuid: str
|
|
|
|
) -> Optional[RemoteRealm]:
|
|
|
|
"""
|
|
|
|
Tries to fetch RemoteRealm for the given realm_uuid and server. Otherwise,
|
|
|
|
returns None and logs what happened using request and user_uuid args to make
|
|
|
|
the output more informative.
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
remote_realm = RemoteRealm.objects.get(uuid=realm_uuid)
|
|
|
|
except RemoteRealm.DoesNotExist:
|
|
|
|
logger.info(
|
|
|
|
"%s: Received request for unknown realm %s, server %s, user %s",
|
|
|
|
request.path,
|
|
|
|
realm_uuid,
|
|
|
|
server.id,
|
|
|
|
user_uuid,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
|
|
|
if remote_realm.server_id != server.id:
|
|
|
|
logger.warning(
|
|
|
|
"%s: Realm %s exists, but not registered to server %s",
|
|
|
|
request.path,
|
|
|
|
realm_uuid,
|
|
|
|
server.id,
|
|
|
|
)
|
2023-12-10 11:59:28 +01:00
|
|
|
raise RemoteRealmServerMismatchError
|
2023-12-01 22:57:34 +01:00
|
|
|
|
|
|
|
return remote_realm
|
|
|
|
|
|
|
|
|
2023-12-12 17:15:57 +01:00
|
|
|
class OldZulipServerError(JsonableError):
|
|
|
|
code = ErrorCode.INVALID_ZULIP_SERVER
|
|
|
|
|
|
|
|
def __init__(self, msg: str) -> None:
|
|
|
|
self._msg: str = msg
|
|
|
|
|
|
|
|
|
2017-05-08 13:48:16 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def remote_server_notify_push(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2021-02-12 08:20:45 +01:00
|
|
|
payload: Dict[str, Any] = REQ(argument_type="body"),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2023-04-10 00:55:16 +02:00
|
|
|
user_id = payload.get("user_id")
|
|
|
|
user_uuid = payload.get("user_uuid")
|
|
|
|
user_identity = UserPushIdentityCompat(user_id, user_uuid)
|
2022-02-23 20:25:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
gcm_payload = payload["gcm_payload"]
|
|
|
|
apns_payload = payload["apns_payload"]
|
|
|
|
gcm_options = payload.get("gcm_options", {})
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2023-11-09 19:24:49 +01:00
|
|
|
realm_uuid = payload.get("realm_uuid")
|
|
|
|
remote_realm = None
|
|
|
|
if realm_uuid is not None:
|
2023-12-01 22:57:34 +01:00
|
|
|
assert isinstance(
|
|
|
|
user_uuid, str
|
|
|
|
), "Servers new enough to send realm_uuid, should also have user_uuid"
|
|
|
|
remote_realm = get_remote_realm_helper(request, server, realm_uuid, user_uuid)
|
2023-11-09 19:24:49 +01:00
|
|
|
|
2023-12-12 17:15:57 +01:00
|
|
|
push_status = get_push_status_for_remote_request(server, remote_realm)
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = f"[can_push={push_status.can_push}/{push_status.message}]"
|
2023-12-12 17:15:57 +01:00
|
|
|
if not push_status.can_push:
|
|
|
|
if server.last_api_feature_level is None:
|
|
|
|
raise OldZulipServerError(_("Your plan doesn't allow sending push notifications."))
|
|
|
|
else:
|
|
|
|
raise JsonableError(_("Your plan doesn't allow sending push notifications."))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
android_devices = list(
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(),
|
2021-02-12 08:19:30 +01:00
|
|
|
kind=RemotePushDeviceToken.GCM,
|
|
|
|
server=server,
|
|
|
|
)
|
|
|
|
)
|
2023-04-10 00:55:16 +02:00
|
|
|
if android_devices and user_id is not None and user_uuid is not None:
|
|
|
|
android_devices = delete_duplicate_registrations(
|
|
|
|
android_devices, server.id, user_id, user_uuid
|
|
|
|
)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
apple_devices = list(
|
|
|
|
RemotePushDeviceToken.objects.filter(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity.filter_q(),
|
2021-02-12 08:19:30 +01:00
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
server=server,
|
|
|
|
)
|
|
|
|
)
|
2023-04-10 00:55:16 +02:00
|
|
|
if apple_devices and user_id is not None and user_uuid is not None:
|
|
|
|
apple_devices = delete_duplicate_registrations(apple_devices, server.id, user_id, user_uuid)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2023-09-18 17:07:34 +02:00
|
|
|
remote_queue_latency: Optional[str] = None
|
|
|
|
sent_time: Optional[Union[float, int]] = gcm_payload.get(
|
2023-10-04 21:55:58 +02:00
|
|
|
# TODO/compatibility: This could be a lot simpler if not for pre-5.0 Zulip servers
|
|
|
|
# that had an older format. Future implementation:
|
|
|
|
# "time", apns_payload["custom"]["zulip"].get("time")
|
|
|
|
"time",
|
|
|
|
apns_payload.get("custom", {}).get("zulip", {}).get("time"),
|
2023-09-18 17:07:34 +02:00
|
|
|
)
|
|
|
|
if sent_time is not None:
|
|
|
|
if isinstance(sent_time, int):
|
|
|
|
# The 'time' field only used to have whole-integer
|
|
|
|
# granularity, so if so we only report with
|
|
|
|
# whole-second granularity
|
|
|
|
remote_queue_latency = str(int(timezone_now().timestamp()) - sent_time)
|
|
|
|
else:
|
|
|
|
remote_queue_latency = f"{timezone_now().timestamp() - sent_time:.3f}"
|
|
|
|
logger.info(
|
|
|
|
"Remote queuing latency for %s:%s is %s seconds",
|
|
|
|
server.uuid,
|
|
|
|
user_identity,
|
|
|
|
remote_queue_latency,
|
|
|
|
)
|
|
|
|
|
2021-10-20 01:16:18 +02:00
|
|
|
logger.info(
|
|
|
|
"Sending mobile push notifications for remote user %s:%s: %s via FCM devices, %s via APNs devices",
|
|
|
|
server.uuid,
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity,
|
2021-10-20 01:16:18 +02:00
|
|
|
len(android_devices),
|
|
|
|
len(apple_devices),
|
|
|
|
)
|
2023-10-22 23:21:56 +02:00
|
|
|
do_increment_logging_stat(
|
|
|
|
server,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_received::day"],
|
2023-10-22 23:21:56 +02:00
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=len(android_devices) + len(apple_devices),
|
|
|
|
)
|
2023-11-09 19:24:49 +01:00
|
|
|
if remote_realm is not None:
|
2023-12-22 02:22:48 +01:00
|
|
|
ensure_devices_set_remote_realm(
|
|
|
|
android_devices=android_devices, apple_devices=apple_devices, remote_realm=remote_realm
|
|
|
|
)
|
2023-11-09 19:24:49 +01:00
|
|
|
do_increment_logging_stat(
|
|
|
|
remote_realm,
|
|
|
|
COUNT_STATS["mobile_pushes_received::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=len(android_devices) + len(apple_devices),
|
|
|
|
)
|
2021-10-20 01:16:18 +02:00
|
|
|
|
2021-11-09 01:32:19 +01:00
|
|
|
# Truncate incoming pushes to 200, due to APNs maximum message
|
|
|
|
# sizes; see handle_remove_push_notification for the version of
|
|
|
|
# this for notifications generated natively on the server. We
|
|
|
|
# apply this to remote-server pushes in case they predate that
|
|
|
|
# commit.
|
|
|
|
def truncate_payload(payload: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
MAX_MESSAGE_IDS = 200
|
|
|
|
if payload and payload.get("event") == "remove" and payload.get("zulip_message_ids"):
|
|
|
|
ids = [int(id) for id in payload["zulip_message_ids"].split(",")]
|
2022-10-30 00:32:32 +02:00
|
|
|
truncated_ids = sorted(ids)[-MAX_MESSAGE_IDS:]
|
2021-11-09 01:32:19 +01:00
|
|
|
payload["zulip_message_ids"] = ",".join(str(id) for id in truncated_ids)
|
|
|
|
return payload
|
|
|
|
|
push_notifications: Drop FCM retries to 2, not 10.
This reverts bc15085098709d746ba69c99d61601f8d4316e6e (which provided
not justification for its change) and moves further, down to 2 retries
from the default of 5.
10 retries, with exponential backoff, is equivalent to sleeping 2^11
seconds, or just about 34 minutes (though the code uses a jitter which
may make this up to 51 minutes). This is an unreasonable amount of
time to spend in this codepath -- as only one worker is used, and it
is single-threaded, this could effectively block all missed message
notifications for half an hour or longer.
This is also necessary because messages sent through the push bouncer
are sent synchronously; the sending server uses a 30-second timeout,
set in PushBouncerSession. Having retries which linger longer than
this can cause duplicate messages; the sending server will time out
and re-queue the message in RabbitMQ, while the push bouncer's request
will continue, and may succeed.
Limit to 2 retries (APNS currently uses 3), and results expected max
of 4 seconds of sleep, potentially up to 6. If this fails, there
exists another retry loop above it, at the RabbitMQ layer (either
locally, or via the remote server's queue), which will result in up to
3 additional retries -- all told, the request will me made to FCM up
to 12 times.
2022-03-08 18:49:10 +01:00
|
|
|
# The full request must complete within 30s, the timeout set by
|
|
|
|
# Zulip remote hosts for push notification requests (see
|
|
|
|
# PushBouncerSession). The timeouts in the FCM and APNS codepaths
|
|
|
|
# must be set accordingly; see send_android_push_notification and
|
|
|
|
# send_apple_push_notification.
|
|
|
|
|
2021-11-09 01:32:19 +01:00
|
|
|
gcm_payload = truncate_payload(gcm_payload)
|
2023-10-28 02:01:22 +02:00
|
|
|
android_successfully_delivered = send_android_push_notification(
|
2022-02-23 20:25:30 +01:00
|
|
|
user_identity, android_devices, gcm_payload, gcm_options, remote=server
|
2021-10-20 01:52:23 +02:00
|
|
|
)
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2022-01-03 23:06:42 +01:00
|
|
|
if isinstance(apns_payload.get("custom"), dict) and isinstance(
|
|
|
|
apns_payload["custom"].get("zulip"), dict
|
|
|
|
):
|
|
|
|
apns_payload["custom"]["zulip"] = truncate_payload(apns_payload["custom"]["zulip"])
|
2023-10-28 02:01:22 +02:00
|
|
|
apple_successfully_delivered = send_apple_push_notification(
|
|
|
|
user_identity, apple_devices, apns_payload, remote=server
|
|
|
|
)
|
|
|
|
|
|
|
|
do_increment_logging_stat(
|
|
|
|
server,
|
2023-11-09 19:24:49 +01:00
|
|
|
REMOTE_INSTALLATION_COUNT_STATS["mobile_pushes_forwarded::day"],
|
2023-10-28 02:01:22 +02:00
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=android_successfully_delivered + apple_successfully_delivered,
|
|
|
|
)
|
2023-12-11 22:21:48 +01:00
|
|
|
|
|
|
|
remote_realm_dict: Optional[RemoteRealmDictValue] = None
|
2023-11-09 19:24:49 +01:00
|
|
|
if remote_realm is not None:
|
|
|
|
do_increment_logging_stat(
|
|
|
|
remote_realm,
|
|
|
|
COUNT_STATS["mobile_pushes_forwarded::day"],
|
|
|
|
None,
|
|
|
|
timezone_now(),
|
|
|
|
increment=android_successfully_delivered + apple_successfully_delivered,
|
|
|
|
)
|
2023-12-11 09:32:44 +01:00
|
|
|
remote_realm_dict = {
|
|
|
|
"can_push": push_status.can_push,
|
|
|
|
"expected_end_timestamp": push_status.expected_end_timestamp,
|
|
|
|
}
|
2017-05-09 10:31:47 +02:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
deleted_devices = get_deleted_devices(
|
|
|
|
user_identity,
|
|
|
|
server,
|
|
|
|
android_devices=payload.get("android_devices", []),
|
|
|
|
apple_devices=payload.get("apple_devices", []),
|
|
|
|
)
|
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
return json_success(
|
2022-01-31 13:44:02 +01:00
|
|
|
request,
|
|
|
|
data={
|
|
|
|
"total_android_devices": len(android_devices),
|
|
|
|
"total_apple_devices": len(apple_devices),
|
2023-12-12 00:06:37 +01:00
|
|
|
"deleted_devices": deleted_devices,
|
2023-12-11 22:21:48 +01:00
|
|
|
"realm": remote_realm_dict,
|
2022-01-31 13:44:02 +01:00
|
|
|
},
|
2021-09-28 14:17:16 +02:00
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-12 00:06:37 +01:00
|
|
|
class DevicesToCleanUpDict(TypedDict):
|
|
|
|
android_devices: List[str]
|
|
|
|
apple_devices: List[str]
|
|
|
|
|
|
|
|
|
|
|
|
def get_deleted_devices(
|
|
|
|
user_identity: UserPushIdentityCompat,
|
|
|
|
server: RemoteZulipServer,
|
|
|
|
android_devices: List[str],
|
|
|
|
apple_devices: List[str],
|
|
|
|
) -> DevicesToCleanUpDict:
|
|
|
|
"""The remote server sends us a list of (tokens of) devices that it
|
|
|
|
believes it has registered. However some of them may have been
|
|
|
|
deleted by us due to errors received in the low level code
|
|
|
|
responsible for directly sending push notifications.
|
|
|
|
|
|
|
|
Query the database for the RemotePushDeviceTokens from these lists
|
|
|
|
that we do indeed have and return a list of the ones that we don't
|
|
|
|
have and thus presumably have already deleted - the remote server
|
|
|
|
will want to delete them too.
|
|
|
|
"""
|
|
|
|
|
|
|
|
android_devices_we_have = RemotePushDeviceToken.objects.filter(
|
|
|
|
user_identity.filter_q(),
|
|
|
|
token__in=android_devices,
|
|
|
|
kind=RemotePushDeviceToken.GCM,
|
|
|
|
server=server,
|
|
|
|
).values_list("token", flat=True)
|
|
|
|
apple_devices_we_have = RemotePushDeviceToken.objects.filter(
|
|
|
|
user_identity.filter_q(),
|
|
|
|
token__in=apple_devices,
|
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
server=server,
|
|
|
|
).values_list("token", flat=True)
|
|
|
|
|
|
|
|
return DevicesToCleanUpDict(
|
|
|
|
android_devices=list(set(android_devices) - set(android_devices_we_have)),
|
|
|
|
apple_devices=list(set(apple_devices) - set(apple_devices_we_have)),
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server: RemoteZulipServer,
|
|
|
|
model: Any,
|
|
|
|
rows: List[Dict[str, Any]],
|
|
|
|
*,
|
|
|
|
is_count_stat: bool,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2019-01-31 00:39:02 +01:00
|
|
|
last_id = get_last_id_from_server(server, model)
|
2019-10-03 01:54:36 +02:00
|
|
|
for row in rows:
|
2023-10-23 14:01:49 +02:00
|
|
|
if is_count_stat and (
|
|
|
|
row["property"] not in COUNT_STATS
|
|
|
|
or row["property"] in BOUNCER_ONLY_REMOTE_COUNT_STAT_PROPERTIES
|
|
|
|
):
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("Invalid property {property}").format(property=row["property"]))
|
2023-12-09 02:10:05 +01:00
|
|
|
|
|
|
|
if not is_count_stat and row["event_type"] not in RemoteRealmAuditLog.SYNCED_BILLING_EVENTS:
|
|
|
|
raise JsonableError(_("Invalid event type."))
|
|
|
|
|
2023-10-23 22:29:42 +02:00
|
|
|
if row.get("id") is None:
|
|
|
|
# This shouldn't be possible, as submitting data like this should be
|
|
|
|
# prevented by our param validators.
|
|
|
|
raise AssertionError(f"Missing id field in row {row}")
|
2021-02-12 08:20:45 +01:00
|
|
|
if row["id"] <= last_id:
|
2019-01-31 00:39:02 +01:00
|
|
|
raise JsonableError(_("Data is out of order."))
|
2021-02-12 08:20:45 +01:00
|
|
|
last_id = row["id"]
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-07-01 03:12:07 +02:00
|
|
|
ModelT = TypeVar("ModelT", bound=Model)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def batch_create_table_data(
|
|
|
|
server: RemoteZulipServer,
|
2022-07-01 03:12:07 +02:00
|
|
|
model: Type[ModelT],
|
|
|
|
row_objects: List[ModelT],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2023-11-21 17:59:13 +01:00
|
|
|
# We ignore previously-existing data, in case it was truncated and
|
|
|
|
# re-created on the remote server. `ignore_concflicts=True`
|
|
|
|
# cannot return the ids, or count thereof, of the new inserts,
|
|
|
|
# (see https://code.djangoproject.com/ticket/0138) so we rely on
|
|
|
|
# having a lock to accurately count them before and after. This
|
|
|
|
# query is also well-indexed.
|
|
|
|
before_count = model._default_manager.filter(server=server).count()
|
|
|
|
model._default_manager.bulk_create(row_objects, batch_size=1000, ignore_conflicts=True)
|
|
|
|
after_count = model._default_manager.filter(server=server).count()
|
|
|
|
inserted_count = after_count - before_count
|
|
|
|
if inserted_count < len(row_objects):
|
|
|
|
logging.warning(
|
|
|
|
"Dropped %d duplicated rows while saving %d rows of %s for server %s/%s",
|
|
|
|
len(row_objects) - inserted_count,
|
|
|
|
len(row_objects),
|
|
|
|
model._meta.db_table,
|
|
|
|
server.hostname,
|
|
|
|
server.uuid,
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-22 02:22:48 +01:00
|
|
|
def ensure_devices_set_remote_realm(
|
|
|
|
android_devices: List[RemotePushDeviceToken],
|
|
|
|
apple_devices: List[RemotePushDeviceToken],
|
|
|
|
remote_realm: RemoteRealm,
|
|
|
|
) -> None:
|
|
|
|
devices_to_update = []
|
|
|
|
for device in android_devices + apple_devices:
|
|
|
|
if device.remote_realm_id is None:
|
|
|
|
device.remote_realm = remote_realm
|
|
|
|
devices_to_update.append(device)
|
|
|
|
|
|
|
|
RemotePushDeviceToken.objects.bulk_update(devices_to_update, ["remote_realm"])
|
|
|
|
|
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
def update_remote_realm_data_for_server(
|
2023-11-27 01:53:08 +01:00
|
|
|
server: RemoteZulipServer, server_realms_info: List[RealmDataForAnalytics]
|
2023-10-30 23:50:53 +01:00
|
|
|
) -> None:
|
2023-11-27 01:53:08 +01:00
|
|
|
uuids = [realm.uuid for realm in server_realms_info]
|
2023-12-15 16:01:04 +01:00
|
|
|
all_registered_remote_realms_for_server = list(RemoteRealm.objects.filter(server=server))
|
|
|
|
already_registered_remote_realms = [
|
|
|
|
remote_realm
|
|
|
|
for remote_realm in all_registered_remote_realms_for_server
|
|
|
|
if remote_realm.uuid in uuids
|
|
|
|
]
|
|
|
|
# RemoteRealm registrations that we have for this server, but aren't
|
|
|
|
# present in the data sent to us. We assume this to mean the server
|
|
|
|
# must have deleted those realms from the database.
|
|
|
|
remote_realms_missing_from_server_data = [
|
|
|
|
remote_realm
|
|
|
|
for remote_realm in all_registered_remote_realms_for_server
|
|
|
|
if remote_realm.uuid not in uuids
|
|
|
|
]
|
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
already_registered_uuids = {
|
2023-11-22 18:22:22 +01:00
|
|
|
remote_realm.uuid for remote_realm in already_registered_remote_realms
|
2023-11-08 20:02:10 +01:00
|
|
|
}
|
2023-10-30 23:50:53 +01:00
|
|
|
|
|
|
|
new_remote_realms = [
|
|
|
|
RemoteRealm(
|
|
|
|
server=server,
|
2023-11-27 01:53:08 +01:00
|
|
|
uuid=realm.uuid,
|
|
|
|
uuid_owner_secret=realm.uuid_owner_secret,
|
|
|
|
host=realm.host,
|
|
|
|
realm_deactivated=realm.deactivated,
|
|
|
|
realm_date_created=timestamp_to_datetime(realm.date_created),
|
2023-11-27 02:06:23 +01:00
|
|
|
org_type=realm.org_type,
|
2023-11-29 23:48:46 +01:00
|
|
|
name=realm.name,
|
|
|
|
authentication_methods=realm.authentication_methods,
|
2023-12-11 14:24:37 +01:00
|
|
|
is_system_bot_realm=realm.is_system_bot_realm,
|
2023-10-30 23:50:53 +01:00
|
|
|
)
|
|
|
|
for realm in server_realms_info
|
2023-11-27 01:53:08 +01:00
|
|
|
if realm.uuid not in already_registered_uuids
|
2023-10-30 23:50:53 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
try:
|
|
|
|
RemoteRealm.objects.bulk_create(new_remote_realms)
|
|
|
|
except IntegrityError:
|
|
|
|
raise JsonableError(_("Duplicate registration detected."))
|
|
|
|
|
2023-11-27 01:53:08 +01:00
|
|
|
uuid_to_realm_dict = {str(realm.uuid): realm for realm in server_realms_info}
|
2023-11-08 20:02:10 +01:00
|
|
|
remote_realms_to_update = []
|
|
|
|
remote_realm_audit_logs = []
|
|
|
|
now = timezone_now()
|
|
|
|
|
|
|
|
# Update RemoteRealm entries, for which the corresponding realm's info has changed
|
|
|
|
# (for the attributes that make sense to sync like this).
|
|
|
|
for remote_realm in already_registered_remote_realms:
|
2023-12-15 16:01:04 +01:00
|
|
|
# TODO: We'll also want to check if .realm_locally_deleted is True, and if so,
|
|
|
|
# toggle it off (and potentially restore registration_deactivated=True too),
|
|
|
|
# since the server is now sending us data for this realm again.
|
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
modified = False
|
|
|
|
realm = uuid_to_realm_dict[str(remote_realm.uuid)]
|
|
|
|
for remote_realm_attr, realm_dict_key in [
|
|
|
|
("host", "host"),
|
2023-11-30 00:20:42 +01:00
|
|
|
("org_type", "org_type"),
|
|
|
|
("name", "name"),
|
|
|
|
("authentication_methods", "authentication_methods"),
|
2023-11-08 20:02:10 +01:00
|
|
|
("realm_deactivated", "deactivated"),
|
2023-12-11 14:24:37 +01:00
|
|
|
("is_system_bot_realm", "is_system_bot_realm"),
|
2023-11-08 20:02:10 +01:00
|
|
|
]:
|
|
|
|
old_value = getattr(remote_realm, remote_realm_attr)
|
2023-11-27 01:53:08 +01:00
|
|
|
new_value = getattr(realm, realm_dict_key)
|
2023-11-30 00:20:42 +01:00
|
|
|
|
2023-11-08 20:02:10 +01:00
|
|
|
if old_value == new_value:
|
|
|
|
continue
|
|
|
|
|
|
|
|
setattr(remote_realm, remote_realm_attr, new_value)
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_id=None,
|
|
|
|
remote_realm=remote_realm,
|
2023-11-27 01:53:08 +01:00
|
|
|
realm_id=realm.id,
|
2023-11-08 20:02:10 +01:00
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_REALM_VALUE_UPDATED,
|
|
|
|
event_time=now,
|
|
|
|
extra_data={
|
|
|
|
"attr_name": remote_realm_attr,
|
|
|
|
"old_value": old_value,
|
|
|
|
"new_value": new_value,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
modified = True
|
|
|
|
|
|
|
|
if modified:
|
|
|
|
remote_realms_to_update.append(remote_realm)
|
|
|
|
|
2023-11-30 00:20:42 +01:00
|
|
|
RemoteRealm.objects.bulk_update(
|
|
|
|
remote_realms_to_update,
|
2023-12-11 14:24:37 +01:00
|
|
|
[
|
|
|
|
"host",
|
|
|
|
"realm_deactivated",
|
|
|
|
"name",
|
|
|
|
"authentication_methods",
|
|
|
|
"org_type",
|
|
|
|
"is_system_bot_realm",
|
|
|
|
],
|
2023-11-30 00:20:42 +01:00
|
|
|
)
|
2023-11-08 20:02:10 +01:00
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
2023-12-15 16:01:04 +01:00
|
|
|
remote_realms_to_update = []
|
|
|
|
remote_realm_audit_logs = []
|
|
|
|
for remote_realm in remote_realms_missing_from_server_data:
|
|
|
|
if not remote_realm.realm_locally_deleted:
|
|
|
|
# Otherwise we already knew about this, so nothing to do.
|
|
|
|
remote_realm.realm_locally_deleted = True
|
|
|
|
remote_realm.registration_deactivated = True
|
|
|
|
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_id=None,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
realm_id=None,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_REALM_LOCALLY_DELETED,
|
|
|
|
event_time=now,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
remote_realms_to_update.append(remote_realm)
|
|
|
|
|
|
|
|
RemoteRealm.objects.bulk_update(
|
|
|
|
remote_realms_to_update,
|
|
|
|
["realm_locally_deleted", "registration_deactivated"],
|
|
|
|
)
|
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
def get_human_user_realm_uuids(realms: List[RealmDataForAnalytics]) -> List[UUID]: # nocoverage
|
|
|
|
billable_realm_uuids = []
|
|
|
|
for realm in realms:
|
|
|
|
# TODO: Remove the `zulipinternal` string_id check once no server is on 8.0-beta.
|
|
|
|
if (
|
|
|
|
realm.is_system_bot_realm
|
2023-12-14 22:49:00 +01:00
|
|
|
or realm.deactivated
|
2023-12-11 18:00:42 +01:00
|
|
|
or realm.host.startswith("zulipinternal.")
|
|
|
|
or (settings.DEVELOPMENT and realm.host.startswith("analytics."))
|
|
|
|
):
|
|
|
|
continue
|
|
|
|
billable_realm_uuids.append(realm.uuid)
|
|
|
|
|
|
|
|
return billable_realm_uuids
|
|
|
|
|
|
|
|
|
|
|
|
@transaction.atomic
|
|
|
|
def handle_customer_migration_from_server_to_realms(
|
|
|
|
server: RemoteZulipServer, realms: List[RealmDataForAnalytics]
|
2023-12-14 23:05:11 +01:00
|
|
|
) -> None:
|
2023-12-11 18:00:42 +01:00
|
|
|
server_billing_session = RemoteServerBillingSession(server)
|
|
|
|
server_customer = server_billing_session.get_customer()
|
|
|
|
if server_customer is None:
|
|
|
|
return
|
2023-12-14 02:28:46 +01:00
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
server_plan = get_current_plan_by_customer(server_customer)
|
2023-12-14 02:28:46 +01:00
|
|
|
if server_plan is None:
|
|
|
|
# If the server has no current plan, either because it never
|
|
|
|
# had one or because a previous legacy plan was migrated to
|
|
|
|
# the RemoteRealm object, there's nothing to potentially
|
|
|
|
# migrate.
|
|
|
|
return
|
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
realm_uuids = get_human_user_realm_uuids(realms)
|
|
|
|
if not realm_uuids:
|
|
|
|
return
|
|
|
|
|
|
|
|
event_time = timezone_now()
|
|
|
|
remote_realm_audit_logs = []
|
2023-12-14 02:28:46 +01:00
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
if (
|
2023-12-14 02:28:46 +01:00
|
|
|
server_plan.tier == CustomerPlan.TIER_SELF_HOSTED_LEGACY
|
2023-12-11 18:00:42 +01:00
|
|
|
and server_plan.status == CustomerPlan.ACTIVE
|
|
|
|
):
|
2023-12-14 02:12:03 +01:00
|
|
|
assert server.plan_type == RemoteZulipServer.PLAN_TYPE_SELF_MANAGED_LEGACY
|
2023-12-11 18:00:42 +01:00
|
|
|
assert server_plan.end_date is not None
|
|
|
|
remote_realms = RemoteRealm.objects.filter(
|
2023-12-14 00:17:55 +01:00
|
|
|
uuid__in=realm_uuids, server=server, plan_type=RemoteRealm.PLAN_TYPE_SELF_MANAGED
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Verify that all the realms are on self hosted plan.
|
|
|
|
assert remote_realms.count() == len(realm_uuids)
|
|
|
|
|
|
|
|
# End existing plan for server.
|
|
|
|
server_plan.status = CustomerPlan.ENDED
|
|
|
|
server_plan.save(update_fields=["status"])
|
|
|
|
|
2023-12-14 05:35:41 +01:00
|
|
|
server.plan_type = RemoteZulipServer.PLAN_TYPE_SELF_MANAGED
|
|
|
|
server.save(update_fields=["plan_type"])
|
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
# Create new legacy plan for each remote realm.
|
|
|
|
for remote_realm in remote_realms:
|
|
|
|
RemoteRealmBillingSession(remote_realm).migrate_customer_to_legacy_plan(
|
|
|
|
server_plan.billing_cycle_anchor, server_plan.end_date
|
|
|
|
)
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_PLAN_TRANSFERRED_SERVER_TO_REALM,
|
|
|
|
event_time=event_time,
|
|
|
|
# No extra_data since there was no real change in any RemoteRealm attribute.
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
elif len(realm_uuids) == 1:
|
2023-12-14 02:28:46 +01:00
|
|
|
# Here, we have exactly one non-system-bot realm, and some
|
|
|
|
# sort of plan on the server; move it to the realm.
|
2023-12-11 18:00:42 +01:00
|
|
|
remote_realm = RemoteRealm.objects.get(
|
2023-12-14 00:17:55 +01:00
|
|
|
uuid=realm_uuids[0], plan_type=RemoteRealm.PLAN_TYPE_SELF_MANAGED
|
2023-12-11 18:00:42 +01:00
|
|
|
)
|
|
|
|
# Migrate customer from server to remote realm if there is only one realm.
|
|
|
|
server_customer.remote_realm = remote_realm
|
|
|
|
server_customer.remote_server = None
|
|
|
|
server_customer.save(update_fields=["remote_realm", "remote_server"])
|
2023-12-14 02:28:46 +01:00
|
|
|
# TODO: Might be better to call do_change_plan_type here.
|
2023-12-11 18:00:42 +01:00
|
|
|
remote_realm.plan_type = server.plan_type
|
|
|
|
remote_realm.save(update_fields=["plan_type"])
|
2023-12-14 00:17:55 +01:00
|
|
|
server.plan_type = RemoteZulipServer.PLAN_TYPE_SELF_MANAGED
|
2023-12-11 18:00:42 +01:00
|
|
|
server.save(update_fields=["plan_type"])
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
server=server,
|
|
|
|
remote_realm=remote_realm,
|
|
|
|
event_type=RemoteRealmAuditLog.REMOTE_PLAN_TRANSFERRED_SERVER_TO_REALM,
|
|
|
|
event_time=event_time,
|
|
|
|
extra_data={
|
|
|
|
"attr_name": "plan_type",
|
2023-12-14 00:17:55 +01:00
|
|
|
"old_value": RemoteRealm.PLAN_TYPE_SELF_MANAGED,
|
2023-12-11 18:00:42 +01:00
|
|
|
"new_value": remote_realm.plan_type,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
RemoteRealmAuditLog.objects.bulk_create(remote_realm_audit_logs)
|
|
|
|
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
@typed_endpoint
|
2023-11-21 17:55:46 +01:00
|
|
|
@transaction.atomic
|
2021-02-12 08:19:30 +01:00
|
|
|
def remote_server_post_analytics(
|
|
|
|
request: HttpRequest,
|
2022-08-01 23:51:10 +02:00
|
|
|
server: RemoteZulipServer,
|
2023-11-17 14:07:41 +01:00
|
|
|
*,
|
|
|
|
realm_counts: Json[List[RealmCountDataForAnalytics]],
|
|
|
|
installation_counts: Json[List[InstallationCountDataForAnalytics]],
|
|
|
|
realmauditlog_rows: Optional[Json[List[RealmAuditLogDataForAnalytics]]] = None,
|
|
|
|
realms: Optional[Json[List[RealmDataForAnalytics]]] = None,
|
2023-11-20 21:40:43 +01:00
|
|
|
version: Optional[Json[str]] = None,
|
2023-12-08 21:38:01 +01:00
|
|
|
api_feature_level: Optional[Json[int]] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2023-11-21 17:55:46 +01:00
|
|
|
# Lock the server, preventing this from racing with other
|
|
|
|
# duplicate submissions of the data
|
|
|
|
server = RemoteZulipServer.objects.select_for_update().get(id=server.id)
|
|
|
|
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_server_version_updated = False
|
2023-11-20 21:40:43 +01:00
|
|
|
if version is not None:
|
|
|
|
version = version[0 : RemoteZulipServer.VERSION_MAX_LENGTH]
|
2023-12-08 21:38:01 +01:00
|
|
|
if version != server.last_version or api_feature_level != server.last_api_feature_level:
|
2023-11-20 21:40:43 +01:00
|
|
|
server.last_version = version
|
2023-12-08 21:38:01 +01:00
|
|
|
server.last_api_feature_level = api_feature_level
|
|
|
|
server.save(update_fields=["last_version", "last_api_feature_level"])
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_server_version_updated = True
|
2023-11-20 21:40:43 +01:00
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteRealmCount,
|
|
|
|
[dict(count) for count in realm_counts],
|
|
|
|
is_count_stat=True,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteInstallationCount,
|
|
|
|
[dict(count) for count in installation_counts],
|
|
|
|
is_count_stat=True,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
2023-11-20 21:40:43 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
if realmauditlog_rows is not None:
|
2023-11-17 14:07:41 +01:00
|
|
|
validate_incoming_table_data(
|
2023-12-09 02:10:05 +01:00
|
|
|
server,
|
|
|
|
RemoteRealmAuditLog,
|
|
|
|
[dict(row) for row in realmauditlog_rows],
|
|
|
|
is_count_stat=False,
|
2023-11-17 14:07:41 +01:00
|
|
|
)
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2023-10-30 23:50:53 +01:00
|
|
|
if realms is not None:
|
2023-11-27 01:53:08 +01:00
|
|
|
update_remote_realm_data_for_server(server, realms)
|
2023-12-05 20:28:36 +01:00
|
|
|
if remote_server_version_updated:
|
|
|
|
fix_remote_realm_foreign_keys(server, realms)
|
|
|
|
|
2023-12-11 18:00:42 +01:00
|
|
|
try:
|
|
|
|
handle_customer_migration_from_server_to_realms(server, realms)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
logger.exception(
|
|
|
|
"%s: Failed to migrate customer from server (id: %s) to realms",
|
|
|
|
request.path,
|
|
|
|
server.id,
|
|
|
|
stack_info=True,
|
|
|
|
)
|
|
|
|
raise JsonableError(
|
|
|
|
_(
|
|
|
|
"Failed to migrate customer from server to realms. Please contact support for assistance."
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2023-12-05 20:28:36 +01:00
|
|
|
realm_id_to_remote_realm = build_realm_id_to_remote_realm_dict(server, realms)
|
2023-10-30 23:50:53 +01:00
|
|
|
|
2022-07-01 03:12:07 +02:00
|
|
|
remote_realm_counts = [
|
2021-02-12 08:19:30 +01:00
|
|
|
RemoteRealmCount(
|
2023-12-05 20:28:36 +01:00
|
|
|
remote_realm=realm_id_to_remote_realm.get(row.realm),
|
2023-11-17 14:07:41 +01:00
|
|
|
property=row.property,
|
|
|
|
realm_id=row.realm,
|
|
|
|
remote_id=row.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
server=server,
|
2023-11-19 19:45:19 +01:00
|
|
|
end_time=datetime.fromtimestamp(row.end_time, tz=timezone.utc),
|
2023-11-17 14:07:41 +01:00
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
for row in realm_counts
|
|
|
|
]
|
2022-07-01 03:12:07 +02:00
|
|
|
batch_create_table_data(server, RemoteRealmCount, remote_realm_counts)
|
2019-10-03 01:54:36 +02:00
|
|
|
|
2022-07-01 03:12:07 +02:00
|
|
|
remote_installation_counts = [
|
2021-02-12 08:19:30 +01:00
|
|
|
RemoteInstallationCount(
|
2023-11-17 14:07:41 +01:00
|
|
|
property=row.property,
|
|
|
|
remote_id=row.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
server=server,
|
2023-11-19 19:45:19 +01:00
|
|
|
end_time=datetime.fromtimestamp(row.end_time, tz=timezone.utc),
|
2023-11-17 14:07:41 +01:00
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
for row in installation_counts
|
|
|
|
]
|
2022-07-01 03:12:07 +02:00
|
|
|
batch_create_table_data(server, RemoteInstallationCount, remote_installation_counts)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
if realmauditlog_rows is not None:
|
2023-12-12 10:47:06 +01:00
|
|
|
# Creating audit logs, syncing license ledger, and updating
|
|
|
|
# 'last_audit_log_update' needs to be an atomic operation.
|
|
|
|
# This helps to rely on 'last_audit_log_update' to assume
|
|
|
|
# RemoteRealmAuditLog and LicenseLedger are up-to-date.
|
|
|
|
with transaction.atomic():
|
|
|
|
# Important: Do not return early if we receive 0 rows; we must
|
|
|
|
# updated last_audit_log_update even if there are no new rows,
|
|
|
|
# to help identify server whose ability to connect to this
|
|
|
|
# endpoint is broken by a networking problem.
|
|
|
|
remote_realms_set = set()
|
|
|
|
remote_realm_audit_logs = []
|
|
|
|
for row in realmauditlog_rows:
|
|
|
|
extra_data = {}
|
|
|
|
if isinstance(row.extra_data, str):
|
|
|
|
try:
|
|
|
|
extra_data = orjson.loads(row.extra_data)
|
|
|
|
except orjson.JSONDecodeError:
|
|
|
|
raise JsonableError(_("Malformed audit log data"))
|
|
|
|
elif row.extra_data is not None:
|
|
|
|
assert isinstance(row.extra_data, dict)
|
|
|
|
extra_data = row.extra_data
|
|
|
|
remote_realms_set.add(realm_id_to_remote_realm.get(row.realm))
|
|
|
|
remote_realm_audit_logs.append(
|
|
|
|
RemoteRealmAuditLog(
|
|
|
|
remote_realm=realm_id_to_remote_realm.get(row.realm),
|
|
|
|
realm_id=row.realm,
|
|
|
|
remote_id=row.id,
|
|
|
|
server=server,
|
|
|
|
event_time=datetime.fromtimestamp(row.event_time, tz=timezone.utc),
|
|
|
|
backfilled=row.backfilled,
|
|
|
|
extra_data=extra_data,
|
|
|
|
event_type=row.event_type,
|
|
|
|
)
|
2023-06-03 06:03:43 +02:00
|
|
|
)
|
2023-12-12 10:47:06 +01:00
|
|
|
batch_create_table_data(server, RemoteRealmAuditLog, remote_realm_audit_logs)
|
|
|
|
|
|
|
|
# We need to update 'last_audit_log_update' before calling the
|
|
|
|
# 'sync_license_ledger_if_needed' method to avoid 'MissingDataError'
|
|
|
|
# due to 'has_stale_audit_log' being True.
|
2023-12-14 17:59:15 +01:00
|
|
|
server.last_audit_log_update = timezone_now()
|
|
|
|
server.save(update_fields=["last_audit_log_update"])
|
2023-12-12 10:47:06 +01:00
|
|
|
|
2023-12-12 10:24:03 +01:00
|
|
|
# Update LicenseLedger for remote_realm customers using logs in RemoteRealmAuditlog.
|
2023-12-12 10:47:06 +01:00
|
|
|
for remote_realm in remote_realms_set:
|
|
|
|
if remote_realm:
|
|
|
|
billing_session = RemoteRealmBillingSession(remote_realm=remote_realm)
|
|
|
|
billing_session.sync_license_ledger_if_needed()
|
2019-10-03 02:01:36 +02:00
|
|
|
|
2023-12-12 10:24:03 +01:00
|
|
|
# Update LicenseLedger for remote_server customer using logs in RemoteRealmAuditlog.
|
|
|
|
remote_server_billing_session = RemoteServerBillingSession(remote_server=server)
|
|
|
|
remote_server_billing_session.sync_license_ledger_if_needed()
|
|
|
|
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
|
|
|
assert log_data is not None
|
|
|
|
can_push_values = set()
|
|
|
|
|
2023-12-15 16:01:04 +01:00
|
|
|
remote_realms = RemoteRealm.objects.filter(server=server, realm_locally_deleted=False)
|
2023-12-14 17:28:53 +01:00
|
|
|
remote_realm_dict: Dict[str, RemoteRealmDictValue] = {}
|
|
|
|
remote_human_realm_count = remote_realms.filter(is_system_bot_realm=False).count()
|
2023-11-29 17:00:19 +01:00
|
|
|
for remote_realm in remote_realms:
|
|
|
|
uuid = str(remote_realm.uuid)
|
2023-12-11 09:32:44 +01:00
|
|
|
status = get_push_status_for_remote_request(server, remote_realm)
|
2023-12-17 18:28:27 +01:00
|
|
|
if remote_realm.is_system_bot_realm:
|
|
|
|
# Ignore system bot realms for computing log_data
|
|
|
|
pass
|
|
|
|
elif remote_human_realm_count == 1: # nocoverage
|
2023-12-14 17:28:53 +01:00
|
|
|
log_data["extra"] = f"[can_push={status.can_push}/{status.message}]"
|
2023-12-17 18:28:27 +01:00
|
|
|
else:
|
2023-12-14 17:28:53 +01:00
|
|
|
can_push_values.add(status.can_push)
|
2023-12-11 09:32:44 +01:00
|
|
|
remote_realm_dict[uuid] = {
|
|
|
|
"can_push": status.can_push,
|
|
|
|
"expected_end_timestamp": status.expected_end_timestamp,
|
|
|
|
}
|
2023-11-29 17:00:19 +01:00
|
|
|
|
2023-12-14 17:28:53 +01:00
|
|
|
if len(can_push_values) == 1:
|
|
|
|
can_push_value = next(iter(can_push_values))
|
|
|
|
log_data["extra"] = f"[can_push={can_push_value}/{remote_human_realm_count} realms]"
|
|
|
|
elif can_push_values == {True, False}:
|
|
|
|
log_data["extra"] = f"[can_push=mixed/{remote_human_realm_count} realms]"
|
|
|
|
elif remote_human_realm_count == 0:
|
|
|
|
log_data["extra"] = "[0 realms]"
|
2023-11-29 17:00:19 +01:00
|
|
|
return json_success(request, data={"realms": remote_realm_dict})
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-05 20:28:36 +01:00
|
|
|
def build_realm_id_to_remote_realm_dict(
|
|
|
|
server: RemoteZulipServer, realms: Optional[List[RealmDataForAnalytics]]
|
|
|
|
) -> Dict[int, Optional[RemoteRealm]]:
|
|
|
|
if realms is None:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
realm_uuids = [realm.uuid for realm in realms]
|
|
|
|
remote_realms = RemoteRealm.objects.filter(uuid__in=realm_uuids, server=server)
|
|
|
|
|
|
|
|
uuid_to_remote_realm_dict = {
|
|
|
|
str(remote_realm.uuid): remote_realm for remote_realm in remote_realms
|
|
|
|
}
|
|
|
|
return {realm.id: uuid_to_remote_realm_dict[str(realm.uuid)] for realm in realms}
|
|
|
|
|
|
|
|
|
|
|
|
def fix_remote_realm_foreign_keys(
|
|
|
|
server: RemoteZulipServer, realms: List[RealmDataForAnalytics]
|
|
|
|
) -> None:
|
|
|
|
"""
|
|
|
|
Finds the RemoteRealmCount and RemoteRealmAuditLog entries without .remote_realm
|
|
|
|
set and sets it based on the "realms" data received from the remote server,
|
|
|
|
if possible.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if (
|
|
|
|
not RemoteRealmCount.objects.filter(server=server, remote_realm=None).exists()
|
|
|
|
and not RemoteRealmAuditLog.objects.filter(server=server, remote_realm=None).exists()
|
|
|
|
):
|
|
|
|
return
|
|
|
|
|
|
|
|
realm_id_to_remote_realm = build_realm_id_to_remote_realm_dict(server, realms)
|
|
|
|
for realm_id in realm_id_to_remote_realm:
|
|
|
|
RemoteRealmCount.objects.filter(server=server, remote_realm=None, realm_id=realm_id).update(
|
|
|
|
remote_realm=realm_id_to_remote_realm[realm_id]
|
|
|
|
)
|
|
|
|
RemoteRealmAuditLog.objects.filter(
|
|
|
|
server=server, remote_realm=None, realm_id=realm_id
|
|
|
|
).update(remote_realm=realm_id_to_remote_realm[realm_id])
|
|
|
|
|
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
def get_last_id_from_server(server: RemoteZulipServer, model: Any) -> int:
|
2023-10-23 22:29:42 +02:00
|
|
|
last_count = (
|
|
|
|
model.objects.filter(server=server)
|
|
|
|
# Rows with remote_id=None are managed by the bouncer service itself,
|
|
|
|
# and thus aren't meant for syncing and should be ignored here.
|
|
|
|
.exclude(remote_id=None)
|
|
|
|
.order_by("remote_id")
|
|
|
|
.only("remote_id")
|
|
|
|
.last()
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
if last_count is not None:
|
|
|
|
return last_count.remote_id
|
|
|
|
return 0
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
@has_request_variables
|
2022-08-01 23:51:10 +02:00
|
|
|
def remote_server_check_analytics(request: HttpRequest, server: RemoteZulipServer) -> HttpResponse:
|
2019-01-31 00:39:02 +01:00
|
|
|
result = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"last_realm_count_id": get_last_id_from_server(server, RemoteRealmCount),
|
|
|
|
"last_installation_count_id": get_last_id_from_server(server, RemoteInstallationCount),
|
|
|
|
"last_realmauditlog_id": get_last_id_from_server(server, RemoteRealmAuditLog),
|
2019-01-31 00:39:02 +01:00
|
|
|
}
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|