2019-02-12 06:16:10 +01:00
|
|
|
import logging
|
2023-11-15 22:44:24 +01:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, Tuple, Union
|
2023-12-05 21:14:17 +01:00
|
|
|
from urllib.parse import urljoin
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
import requests
|
2019-01-31 00:44:02 +01:00
|
|
|
from django.conf import settings
|
2023-12-09 00:09:01 +01:00
|
|
|
from django.db.models import QuerySet
|
2024-03-18 01:18:53 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2023-12-09 00:09:01 +01:00
|
|
|
from pydantic import UUID4, BaseModel, ConfigDict, Field, Json, field_validator
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2024-02-26 21:14:52 +01:00
|
|
|
from analytics.lib.counts import LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
|
2019-01-31 00:39:02 +01:00
|
|
|
from analytics.models import InstallationCount, RealmCount
|
2023-12-08 21:38:01 +01:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_VERSION
|
2023-12-09 13:29:59 +01:00
|
|
|
from zerver.actions.realm_settings import (
|
|
|
|
do_set_push_notifications_enabled_end_timestamp,
|
|
|
|
do_set_realm_property,
|
|
|
|
)
|
2024-03-18 01:18:53 +01:00
|
|
|
from zerver.lib import redis_utils
|
2023-12-10 11:59:28 +01:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
JsonableError,
|
|
|
|
MissingRemoteRealmError,
|
|
|
|
RemoteRealmServerMismatchError,
|
|
|
|
)
|
2021-05-07 03:54:25 +02:00
|
|
|
from zerver.lib.outgoing_http import OutgoingSession
|
2023-12-11 22:12:22 +01:00
|
|
|
from zerver.lib.queue import queue_event_on_commit
|
2024-03-18 01:18:53 +01:00
|
|
|
from zerver.lib.redis_utils import get_redis_client
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models import Realm, RealmAuditLog
|
|
|
|
from zerver.models.realms import OrgTypeEnum
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2024-03-18 01:18:53 +01:00
|
|
|
redis_client = get_redis_client()
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2021-05-07 03:54:25 +02:00
|
|
|
class PushBouncerSession(OutgoingSession):
|
2023-12-07 21:23:54 +01:00
|
|
|
def __init__(self, timeout: int = 15) -> None:
|
|
|
|
super().__init__(role="push_bouncer", timeout=timeout)
|
2021-05-07 03:54:25 +02:00
|
|
|
|
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class PushNotificationBouncerError(Exception):
|
2019-01-31 00:44:02 +01:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-02 19:46:11 +01:00
|
|
|
class PushNotificationBouncerRetryLaterError(JsonableError):
|
2019-12-03 20:19:38 +01:00
|
|
|
http_status_code = 502
|
2019-12-02 19:46:11 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-12-07 21:02:35 +01:00
|
|
|
class PushNotificationBouncerServerError(PushNotificationBouncerRetryLaterError):
|
|
|
|
http_status_code = 502
|
|
|
|
|
|
|
|
|
2023-12-09 00:09:01 +01:00
|
|
|
class RealmCountDataForAnalytics(BaseModel):
|
|
|
|
property: str
|
|
|
|
realm: int
|
|
|
|
id: int
|
|
|
|
end_time: float
|
|
|
|
subgroup: Optional[str]
|
|
|
|
value: int
|
|
|
|
|
|
|
|
|
|
|
|
class InstallationCountDataForAnalytics(BaseModel):
|
|
|
|
property: str
|
|
|
|
id: int
|
|
|
|
end_time: float
|
|
|
|
subgroup: Optional[str]
|
|
|
|
value: int
|
|
|
|
|
|
|
|
|
|
|
|
class RealmAuditLogDataForAnalytics(BaseModel):
|
|
|
|
id: int
|
|
|
|
realm: int
|
|
|
|
event_time: float
|
|
|
|
backfilled: bool
|
|
|
|
extra_data: Optional[Union[str, Dict[str, Any]]]
|
|
|
|
event_type: int
|
|
|
|
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
class RealmDataForAnalytics(BaseModel):
|
|
|
|
model_config = ConfigDict(extra="forbid")
|
|
|
|
|
|
|
|
id: int
|
|
|
|
host: str
|
|
|
|
url: str
|
2023-11-29 23:48:46 +01:00
|
|
|
name: str = ""
|
2023-11-27 02:06:23 +01:00
|
|
|
org_type: int = 0
|
2023-11-17 14:07:41 +01:00
|
|
|
date_created: float
|
|
|
|
deactivated: bool
|
2023-12-11 14:24:37 +01:00
|
|
|
is_system_bot_realm: bool = False
|
2023-11-17 14:07:41 +01:00
|
|
|
|
2023-11-29 23:48:46 +01:00
|
|
|
authentication_methods: Dict[str, bool] = Field(default_factory=dict)
|
|
|
|
|
2023-11-22 18:22:22 +01:00
|
|
|
uuid: UUID4
|
2023-11-17 14:07:41 +01:00
|
|
|
uuid_owner_secret: str
|
|
|
|
|
2023-11-27 02:06:23 +01:00
|
|
|
@field_validator("org_type")
|
|
|
|
@classmethod
|
|
|
|
def check_is_allowed_value(cls, value: int) -> int:
|
|
|
|
if value not in [org_type.value for org_type in OrgTypeEnum]:
|
|
|
|
raise ValueError("Not a valid org_type value")
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
|
2023-12-09 00:09:01 +01:00
|
|
|
class AnalyticsRequest(BaseModel):
|
|
|
|
realm_counts: Json[List[RealmCountDataForAnalytics]]
|
|
|
|
installation_counts: Json[List[InstallationCountDataForAnalytics]]
|
|
|
|
realmauditlog_rows: Optional[Json[List[RealmAuditLogDataForAnalytics]]] = None
|
|
|
|
realms: Json[List[RealmDataForAnalytics]]
|
|
|
|
version: Optional[Json[str]]
|
2023-12-08 21:38:01 +01:00
|
|
|
api_feature_level: Optional[Json[int]]
|
2023-12-09 00:09:01 +01:00
|
|
|
|
|
|
|
|
2023-11-15 22:44:24 +01:00
|
|
|
class UserDataForRemoteBilling(BaseModel):
|
|
|
|
uuid: UUID4
|
|
|
|
email: str
|
|
|
|
full_name: str
|
|
|
|
|
|
|
|
|
2020-07-05 20:46:41 +02:00
|
|
|
def send_to_push_bouncer(
|
|
|
|
method: str,
|
|
|
|
endpoint: str,
|
2021-07-24 20:12:10 +02:00
|
|
|
post_data: Union[bytes, Mapping[str, Union[str, int, None, bytes]]],
|
2020-07-05 20:46:41 +02:00
|
|
|
extra_headers: Mapping[str, str] = {},
|
|
|
|
) -> Dict[str, object]:
|
2019-01-31 00:44:02 +01:00
|
|
|
"""While it does actually send the notice, this function has a lot of
|
|
|
|
code and comments around error handling for the push notifications
|
|
|
|
bouncer. There are several classes of failures, each with its own
|
|
|
|
potential solution:
|
|
|
|
|
2019-12-03 20:19:38 +01:00
|
|
|
* Network errors with requests.request. We raise an exception to signal
|
|
|
|
it to the callers.
|
2019-01-31 00:44:02 +01:00
|
|
|
|
|
|
|
* 500 errors from the push bouncer or other unexpected responses;
|
|
|
|
we don't try to parse the response, but do make clear the cause.
|
|
|
|
|
|
|
|
* 400 errors from the push bouncer. Here there are 2 categories:
|
|
|
|
Our server failed to connect to the push bouncer (should throw)
|
2021-06-11 22:51:27 +02:00
|
|
|
vs. client-side errors like an invalid token.
|
2019-01-31 00:44:02 +01:00
|
|
|
|
|
|
|
"""
|
2021-08-18 17:54:22 +02:00
|
|
|
assert settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
|
2023-01-03 00:33:25 +01:00
|
|
|
assert settings.ZULIP_ORG_ID is not None
|
|
|
|
assert settings.ZULIP_ORG_KEY is not None
|
2023-12-05 21:14:17 +01:00
|
|
|
url = urljoin(settings.PUSH_NOTIFICATION_BOUNCER_URL, "/api/v1/remotes/" + endpoint)
|
2021-02-12 08:19:30 +01:00
|
|
|
api_auth = requests.auth.HTTPBasicAuth(settings.ZULIP_ORG_ID, settings.ZULIP_ORG_KEY)
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
headers = {"User-agent": f"ZulipServer/{ZULIP_VERSION}"}
|
2020-06-13 03:34:01 +02:00
|
|
|
headers.update(extra_headers)
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2023-12-07 21:23:54 +01:00
|
|
|
if endpoint == "server/analytics":
|
|
|
|
# Uploading audit log and/or analytics data can require the
|
|
|
|
# bouncer to do a significant chunk of work in a few
|
|
|
|
# situations; since this occurs in background jobs, set a long
|
|
|
|
# timeout.
|
|
|
|
session = PushBouncerSession(timeout=90)
|
|
|
|
else:
|
|
|
|
session = PushBouncerSession()
|
|
|
|
|
2019-12-03 20:19:38 +01:00
|
|
|
try:
|
2023-12-07 21:23:54 +01:00
|
|
|
res = session.request(
|
|
|
|
method,
|
|
|
|
url,
|
|
|
|
data=post_data,
|
|
|
|
auth=api_auth,
|
|
|
|
verify=True,
|
|
|
|
headers=headers,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
except (
|
|
|
|
requests.exceptions.Timeout,
|
|
|
|
requests.exceptions.SSLError,
|
|
|
|
requests.exceptions.ConnectionError,
|
|
|
|
) as e:
|
2019-12-03 20:19:38 +01:00
|
|
|
raise PushNotificationBouncerRetryLaterError(
|
2022-10-08 07:35:48 +02:00
|
|
|
f"{type(e).__name__} while trying to connect to push notification bouncer"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-31 00:44:02 +01:00
|
|
|
|
|
|
|
if res.status_code >= 500:
|
2023-12-14 17:33:35 +01:00
|
|
|
# 5xx's should be resolved by the people who run the push
|
2019-02-12 06:16:10 +01:00
|
|
|
# notification bouncer service, and they'll get an appropriate
|
2019-12-03 20:19:38 +01:00
|
|
|
# error notification from the server. We raise an exception to signal
|
|
|
|
# to the callers that the attempt failed and they can retry.
|
2023-12-14 17:33:35 +01:00
|
|
|
error_msg = f"Received {res.status_code} from push notification bouncer"
|
2019-12-03 20:19:38 +01:00
|
|
|
logging.warning(error_msg)
|
2023-12-07 21:02:35 +01:00
|
|
|
raise PushNotificationBouncerServerError(error_msg)
|
2019-01-31 00:44:02 +01:00
|
|
|
elif res.status_code >= 400:
|
|
|
|
# If JSON parsing errors, just let that exception happen
|
2020-08-07 01:09:47 +02:00
|
|
|
result_dict = orjson.loads(res.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
msg = result_dict["msg"]
|
|
|
|
if "code" in result_dict and result_dict["code"] == "INVALID_ZULIP_SERVER":
|
2019-01-31 00:44:02 +01:00
|
|
|
# Invalid Zulip server credentials should email this server's admins
|
2022-11-17 09:30:48 +01:00
|
|
|
raise PushNotificationBouncerError(
|
2023-07-17 22:40:33 +02:00
|
|
|
_("Push notifications bouncer error: {error}").format(error=msg)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-02-07 00:38:35 +01:00
|
|
|
elif "code" in result_dict and result_dict["code"] == "PUSH_NOTIFICATIONS_DISALLOWED":
|
|
|
|
from zerver.lib.push_notifications import PushNotificationsDisallowedByBouncerError
|
|
|
|
|
|
|
|
raise PushNotificationsDisallowedByBouncerError(reason=msg)
|
2023-10-08 00:43:41 +02:00
|
|
|
elif (
|
|
|
|
endpoint == "push/test_notification"
|
|
|
|
and "code" in result_dict
|
|
|
|
and result_dict["code"] == "INVALID_REMOTE_PUSH_DEVICE_TOKEN"
|
|
|
|
):
|
|
|
|
# This error from the notification debugging endpoint should just be directly
|
|
|
|
# communicated to the device.
|
|
|
|
# TODO: Extend this to use a more general mechanism when we add more such error responses.
|
|
|
|
from zerver.lib.push_notifications import InvalidRemotePushDeviceTokenError
|
|
|
|
|
|
|
|
raise InvalidRemotePushDeviceTokenError
|
2023-11-15 22:44:24 +01:00
|
|
|
elif (
|
|
|
|
endpoint == "server/billing"
|
|
|
|
and "code" in result_dict
|
|
|
|
and result_dict["code"] == "MISSING_REMOTE_REALM"
|
|
|
|
): # nocoverage
|
|
|
|
# The callers requesting this endpoint want the exception to propagate
|
|
|
|
# so they can catch it.
|
|
|
|
raise MissingRemoteRealmError
|
2023-12-10 11:59:28 +01:00
|
|
|
elif (
|
|
|
|
endpoint == "server/billing"
|
|
|
|
and "code" in result_dict
|
|
|
|
and result_dict["code"] == "REMOTE_REALM_SERVER_MISMATCH_ERROR"
|
|
|
|
): # nocoverage
|
|
|
|
# The callers requesting this endpoint want the exception to propagate
|
|
|
|
# so they can catch it.
|
|
|
|
raise RemoteRealmServerMismatchError
|
2019-01-31 00:44:02 +01:00
|
|
|
else:
|
|
|
|
# But most other errors coming from the push bouncer
|
|
|
|
# server are client errors (e.g. never-registered token)
|
|
|
|
# and should be handled as such.
|
|
|
|
raise JsonableError(msg)
|
|
|
|
elif res.status_code != 200:
|
|
|
|
# Anything else is unexpected and likely suggests a bug in
|
|
|
|
# this version of Zulip, so we throw an exception that will
|
|
|
|
# email the server admins.
|
2022-11-17 09:30:48 +01:00
|
|
|
raise PushNotificationBouncerError(
|
2021-02-12 08:19:30 +01:00
|
|
|
f"Push notification bouncer returned unexpected status code {res.status_code}"
|
|
|
|
)
|
2019-01-31 00:44:02 +01:00
|
|
|
|
|
|
|
# If we don't throw an exception, it's a successful bounce!
|
2020-08-07 01:09:47 +02:00
|
|
|
return orjson.loads(res.content)
|
2019-01-31 00:44:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-09-28 14:17:16 +02:00
|
|
|
def send_json_to_push_bouncer(
|
|
|
|
method: str, endpoint: str, post_data: Mapping[str, object]
|
|
|
|
) -> Dict[str, object]:
|
|
|
|
return send_to_push_bouncer(
|
2019-01-31 00:44:02 +01:00
|
|
|
method,
|
|
|
|
endpoint,
|
2020-08-07 01:09:47 +02:00
|
|
|
orjson.dumps(post_data),
|
2019-01-31 00:44:02 +01:00
|
|
|
extra_headers={"Content-type": "application/json"},
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2019-10-03 02:01:36 +02:00
|
|
|
|
2024-03-18 01:18:53 +01:00
|
|
|
PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY = "push_notifications_recently_working_ts"
|
|
|
|
|
|
|
|
|
|
|
|
def record_push_notifications_recently_working() -> None:
|
|
|
|
# Record the timestamp in redis, marking that push notifications
|
|
|
|
# were working as of this moment.
|
|
|
|
|
|
|
|
redis_key = redis_utils.REDIS_KEY_PREFIX + PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY
|
|
|
|
# Keep this record around for 24h in case it's useful for debugging.
|
|
|
|
redis_client.set(redis_key, str(timezone_now().timestamp()), ex=60 * 60 * 24)
|
|
|
|
|
|
|
|
|
|
|
|
def check_push_notifications_recently_working() -> bool:
|
|
|
|
# Check in redis whether push notifications were working in the last hour.
|
|
|
|
redis_key = redis_utils.REDIS_KEY_PREFIX + PUSH_NOTIFICATIONS_RECENTLY_WORKING_REDIS_KEY
|
|
|
|
timestamp = redis_client.get(redis_key)
|
|
|
|
if timestamp is None:
|
|
|
|
return False
|
|
|
|
|
|
|
|
# If the timestamp is within the last hour, we consider push notifications to be working.
|
|
|
|
return timezone_now().timestamp() - float(timestamp) < 60 * 60
|
|
|
|
|
|
|
|
|
2023-12-14 17:33:35 +01:00
|
|
|
def maybe_mark_pushes_disabled(
|
|
|
|
e: Union[JsonableError, orjson.JSONDecodeError], logger: logging.Logger
|
|
|
|
) -> None:
|
|
|
|
if isinstance(e, PushNotificationBouncerServerError):
|
|
|
|
# We don't fall through and deactivate the flag, since this is
|
|
|
|
# not under the control of the caller.
|
|
|
|
return
|
|
|
|
|
|
|
|
if isinstance(e, JsonableError):
|
|
|
|
logger.warning(e.msg)
|
|
|
|
else:
|
|
|
|
logger.exception("Exception communicating with %s", settings.PUSH_NOTIFICATION_BOUNCER_URL)
|
|
|
|
|
|
|
|
# An exception was thrown talking to the push bouncer. There may
|
2024-03-18 01:18:53 +01:00
|
|
|
# be certain transient failures that we could ignore here -
|
|
|
|
# therefore we check whether push notifications were recently working
|
|
|
|
# and if so, the error can be treated as transient.
|
|
|
|
# Otherwise, the assumed explanation is that there is something wrong
|
|
|
|
# either with our credentials being corrupted or our ability to reach the
|
|
|
|
# bouncer service over the network, so we move to
|
2023-12-14 17:33:35 +01:00
|
|
|
# reporting push notifications as likely not working.
|
2024-03-18 01:18:53 +01:00
|
|
|
if check_push_notifications_recently_working():
|
|
|
|
# Push notifications were recently observed working, so we
|
|
|
|
# assume this is likely a transient failure.
|
|
|
|
return
|
|
|
|
|
2023-12-14 17:33:35 +01:00
|
|
|
for realm in Realm.objects.filter(push_notifications_enabled=True):
|
|
|
|
do_set_realm_property(realm, "push_notifications_enabled", False, acting_user=None)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(realm, None, acting_user=None)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def build_analytics_data(
|
2023-12-09 00:09:01 +01:00
|
|
|
realm_count_query: QuerySet[RealmCount],
|
|
|
|
installation_count_query: QuerySet[InstallationCount],
|
|
|
|
realmauditlog_query: QuerySet[RealmAuditLog],
|
|
|
|
) -> Tuple[
|
|
|
|
List[RealmCountDataForAnalytics],
|
|
|
|
List[InstallationCountDataForAnalytics],
|
|
|
|
List[RealmAuditLogDataForAnalytics],
|
|
|
|
]:
|
2019-02-02 20:48:16 +01:00
|
|
|
# We limit the batch size on the client side to avoid OOM kills timeouts, etc.
|
|
|
|
MAX_CLIENT_BATCH_SIZE = 10000
|
2023-12-09 00:09:01 +01:00
|
|
|
realm_count_data = [
|
|
|
|
RealmCountDataForAnalytics(
|
|
|
|
property=row.property,
|
|
|
|
realm=row.realm.id,
|
|
|
|
id=row.id,
|
|
|
|
end_time=row.end_time.timestamp(),
|
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
|
|
|
)
|
|
|
|
for row in realm_count_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
|
2019-01-31 00:39:02 +01:00
|
|
|
]
|
2023-12-09 00:09:01 +01:00
|
|
|
installation_count_data = [
|
|
|
|
InstallationCountDataForAnalytics(
|
|
|
|
property=row.property,
|
|
|
|
id=row.id,
|
|
|
|
end_time=row.end_time.timestamp(),
|
|
|
|
subgroup=row.subgroup,
|
|
|
|
value=row.value,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
for row in installation_count_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
|
2019-01-31 00:39:02 +01:00
|
|
|
]
|
2023-12-09 00:09:01 +01:00
|
|
|
zerver_realmauditlog = [
|
|
|
|
RealmAuditLogDataForAnalytics(
|
|
|
|
id=row.id,
|
|
|
|
realm=row.realm.id,
|
|
|
|
event_time=row.event_time.timestamp(),
|
|
|
|
backfilled=row.backfilled,
|
|
|
|
# Note that we don't need to add extra_data_json here because
|
|
|
|
# the view remote_server_post_analytics populates extra_data_json
|
|
|
|
# from the provided extra_data.
|
|
|
|
extra_data=row.extra_data,
|
|
|
|
event_type=row.event_type,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
for row in realmauditlog_query.order_by("id")[0:MAX_CLIENT_BATCH_SIZE]
|
2019-10-03 02:01:36 +02:00
|
|
|
]
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2023-12-09 00:09:01 +01:00
|
|
|
return realm_count_data, installation_count_data, zerver_realmauditlog
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2023-11-15 22:44:24 +01:00
|
|
|
def get_realms_info_for_push_bouncer(realm_id: Optional[int] = None) -> List[RealmDataForAnalytics]:
|
2023-10-30 23:50:53 +01:00
|
|
|
realms = Realm.objects.order_by("id")
|
2023-11-15 22:44:24 +01:00
|
|
|
if realm_id is not None: # nocoverage
|
|
|
|
realms = realms.filter(id=realm_id)
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
realm_info_list = [
|
|
|
|
RealmDataForAnalytics(
|
2023-10-30 23:50:53 +01:00
|
|
|
id=realm.id,
|
2023-11-22 18:22:22 +01:00
|
|
|
uuid=realm.uuid,
|
2023-10-30 23:50:53 +01:00
|
|
|
uuid_owner_secret=realm.uuid_owner_secret,
|
|
|
|
host=realm.host,
|
2024-05-06 15:27:22 +02:00
|
|
|
url=realm.url,
|
2023-10-30 23:50:53 +01:00
|
|
|
deactivated=realm.deactivated,
|
|
|
|
date_created=realm.date_created.timestamp(),
|
2023-11-27 02:06:23 +01:00
|
|
|
org_type=realm.org_type,
|
2023-11-29 23:48:46 +01:00
|
|
|
name=realm.name,
|
|
|
|
authentication_methods=realm.authentication_methods_dict(),
|
2023-12-11 14:24:37 +01:00
|
|
|
is_system_bot_realm=realm.string_id == settings.SYSTEM_BOT_REALM,
|
2023-10-30 23:50:53 +01:00
|
|
|
)
|
|
|
|
for realm in realms
|
|
|
|
]
|
|
|
|
|
2023-11-17 14:07:41 +01:00
|
|
|
return realm_info_list
|
2023-10-30 23:50:53 +01:00
|
|
|
|
|
|
|
|
2023-12-11 14:24:13 +01:00
|
|
|
def send_server_data_to_push_bouncer(consider_usage_statistics: bool = True) -> None:
|
2023-11-17 19:43:25 +01:00
|
|
|
logger = logging.getLogger("zulip.analytics")
|
2019-01-31 00:39:02 +01:00
|
|
|
# first, check what's latest
|
2019-12-03 20:19:38 +01:00
|
|
|
try:
|
|
|
|
result = send_to_push_bouncer("GET", "server/analytics/status", {})
|
2023-12-14 17:33:35 +01:00
|
|
|
except (JsonableError, orjson.JSONDecodeError) as e:
|
|
|
|
maybe_mark_pushes_disabled(e, logger)
|
2019-02-12 06:16:10 +01:00
|
|
|
return
|
|
|
|
|
2023-11-28 23:20:24 +01:00
|
|
|
# Gather only entries with IDs greater than the last ID received by the push bouncer.
|
|
|
|
# We don't re-send old data that's already been submitted.
|
2021-02-12 08:20:45 +01:00
|
|
|
last_acked_realm_count_id = result["last_realm_count_id"]
|
|
|
|
last_acked_installation_count_id = result["last_installation_count_id"]
|
|
|
|
last_acked_realmauditlog_id = result["last_realmauditlog_id"]
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2023-12-09 13:29:59 +01:00
|
|
|
if settings.SUBMIT_USAGE_STATISTICS and consider_usage_statistics:
|
|
|
|
# Only upload usage statistics, which is relatively expensive,
|
|
|
|
# if called from the analytics cron job and the server has
|
|
|
|
# uploading such statistics enabled.
|
2023-11-28 23:20:24 +01:00
|
|
|
installation_count_query = InstallationCount.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
id__gt=last_acked_installation_count_id
|
2024-02-26 21:14:52 +01:00
|
|
|
).exclude(property__in=LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER)
|
|
|
|
realm_count_query = RealmCount.objects.filter(id__gt=last_acked_realm_count_id).exclude(
|
|
|
|
property__in=LOGGING_COUNT_STAT_PROPERTIES_NOT_SENT_TO_BOUNCER
|
2023-11-28 23:20:24 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
installation_count_query = InstallationCount.objects.none()
|
|
|
|
realm_count_query = RealmCount.objects.none()
|
|
|
|
|
|
|
|
(realm_count_data, installation_count_data, realmauditlog_data) = build_analytics_data(
|
|
|
|
realm_count_query=realm_count_query,
|
|
|
|
installation_count_query=installation_count_query,
|
2019-10-03 02:01:36 +02:00
|
|
|
realmauditlog_query=RealmAuditLog.objects.filter(
|
2021-02-12 08:19:30 +01:00
|
|
|
event_type__in=RealmAuditLog.SYNCED_BILLING_EVENTS, id__gt=last_acked_realmauditlog_id
|
|
|
|
),
|
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2023-11-17 16:57:59 +01:00
|
|
|
record_count = len(realm_count_data) + len(installation_count_data) + len(realmauditlog_data)
|
2023-12-09 00:09:01 +01:00
|
|
|
request = AnalyticsRequest.model_construct(
|
|
|
|
realm_counts=realm_count_data,
|
|
|
|
installation_counts=installation_count_data,
|
|
|
|
realmauditlog_rows=realmauditlog_data,
|
|
|
|
realms=get_realms_info_for_push_bouncer(),
|
|
|
|
version=ZULIP_VERSION,
|
2023-12-08 21:38:01 +01:00
|
|
|
api_feature_level=API_FEATURE_LEVEL,
|
2023-12-09 00:09:01 +01:00
|
|
|
)
|
2019-01-31 00:39:02 +01:00
|
|
|
|
2023-12-09 13:29:59 +01:00
|
|
|
# Send the actual request, and process the response.
|
2019-09-03 03:47:10 +02:00
|
|
|
try:
|
2023-12-09 13:29:59 +01:00
|
|
|
response = send_to_push_bouncer(
|
|
|
|
"POST", "server/analytics", request.model_dump(round_trip=True)
|
|
|
|
)
|
2023-12-14 17:33:35 +01:00
|
|
|
except (JsonableError, orjson.JSONDecodeError) as e:
|
|
|
|
maybe_mark_pushes_disabled(e, logger)
|
2023-12-09 13:29:59 +01:00
|
|
|
return
|
2023-11-16 15:25:58 +01:00
|
|
|
|
2023-11-29 17:00:19 +01:00
|
|
|
assert isinstance(response["realms"], dict) # for mypy
|
2023-12-09 13:29:59 +01:00
|
|
|
realms = response["realms"]
|
|
|
|
for realm_uuid, data in realms.items():
|
2023-12-13 21:40:54 +01:00
|
|
|
try:
|
|
|
|
realm = Realm.objects.get(uuid=realm_uuid)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
# This occurs if the installation's database was rebuilt
|
|
|
|
# from scratch or a realm was hard-deleted from the local
|
|
|
|
# database, after generating secrets and talking to the
|
|
|
|
# bouncer.
|
|
|
|
logger.warning("Received unexpected realm UUID from bouncer %s", realm_uuid)
|
|
|
|
continue
|
|
|
|
|
2023-12-09 13:29:59 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
realm, "push_notifications_enabled", data["can_push"], acting_user=None
|
|
|
|
)
|
|
|
|
do_set_push_notifications_enabled_end_timestamp(
|
|
|
|
realm, data["expected_end_timestamp"], acting_user=None
|
|
|
|
)
|
2023-11-29 17:00:19 +01:00
|
|
|
|
2023-12-09 13:29:59 +01:00
|
|
|
logger.info("Reported %d records", record_count)
|
2023-12-01 14:52:44 +01:00
|
|
|
|
|
|
|
|
2023-12-11 04:59:00 +01:00
|
|
|
def maybe_enqueue_audit_log_upload(realm: Realm) -> None:
|
2023-12-11 05:18:13 +01:00
|
|
|
# Update the push notifications service, either with the fact that
|
|
|
|
# the realm now exists or updates to its audit log of users.
|
|
|
|
#
|
|
|
|
# Done via a queue worker so that networking failures cannot have
|
|
|
|
# any impact on the success operation of the local server's
|
|
|
|
# ability to do operations that trigger these updates.
|
2023-12-01 14:52:44 +01:00
|
|
|
from zerver.lib.push_notifications import uses_notification_bouncer
|
|
|
|
|
|
|
|
if uses_notification_bouncer():
|
2023-12-11 05:18:13 +01:00
|
|
|
event = {"type": "push_bouncer_update_for_realm", "realm_id": realm.id}
|
2023-12-11 22:12:22 +01:00
|
|
|
queue_event_on_commit("deferred_work", event)
|