2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Iterator
|
2024-01-10 22:01:21 +01:00
|
|
|
from contextlib import AbstractContextManager, ExitStack, contextmanager
|
2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest import mock
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2023-10-22 23:21:56 +02:00
|
|
|
import time_machine
|
2017-03-12 08:55:55 +01:00
|
|
|
from django.apps import apps
|
2016-10-08 02:27:50 +02:00
|
|
|
from django.db import models
|
2017-02-15 17:26:22 +01:00
|
|
|
from django.db.models import Sum
|
2023-10-22 23:21:56 +02:00
|
|
|
from django.test import override_settings
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-06-09 10:46:28 +02:00
|
|
|
from psycopg2.sql import SQL, Literal
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from analytics.lib.counts import (
|
|
|
|
COUNT_STATS,
|
|
|
|
CountStat,
|
|
|
|
DependentCountStat,
|
|
|
|
LoggingCountStat,
|
|
|
|
do_aggregate_to_summary_table,
|
|
|
|
do_drop_all_analytics_tables,
|
|
|
|
do_drop_single_stat,
|
|
|
|
do_fill_count_stat_at_hour,
|
|
|
|
do_increment_logging_stat,
|
|
|
|
get_count_stats,
|
|
|
|
process_count_stat,
|
|
|
|
sql_data_collector,
|
|
|
|
)
|
|
|
|
from analytics.models import (
|
|
|
|
BaseCount,
|
|
|
|
FillState,
|
|
|
|
InstallationCount,
|
|
|
|
RealmCount,
|
|
|
|
StreamCount,
|
|
|
|
UserCount,
|
|
|
|
installation_epoch,
|
|
|
|
)
|
2022-04-14 23:58:15 +02:00
|
|
|
from zerver.actions.create_realm import do_create_realm
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import (
|
|
|
|
do_activate_mirror_dummy_user,
|
|
|
|
do_create_user,
|
|
|
|
do_reactivate_user,
|
|
|
|
)
|
2024-04-30 22:12:34 +02:00
|
|
|
from zerver.actions.invites import do_invite_users, do_revoke_user_invite, do_send_user_invite_email
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.message_flags import (
|
2020-06-08 06:01:49 +02:00
|
|
|
do_mark_all_as_read,
|
|
|
|
do_mark_stream_messages_as_read,
|
|
|
|
do_update_message_flags,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2022-04-14 23:54:53 +02:00
|
|
|
from zerver.actions.user_activity import update_user_activity_interval
|
|
|
|
from zerver.actions.users import do_deactivate_user
|
2019-09-19 23:30:43 +02:00
|
|
|
from zerver.lib.create_user import create_user
|
2021-06-07 18:11:26 +02:00
|
|
|
from zerver.lib.exceptions import InvitationError
|
2023-10-22 23:21:56 +02:00
|
|
|
from zerver.lib.push_notifications import (
|
|
|
|
get_message_payload_apns,
|
|
|
|
get_message_payload_gcm,
|
|
|
|
hex_to_b64,
|
|
|
|
)
|
2020-06-11 11:18:11 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2023-10-22 23:21:56 +02:00
|
|
|
from zerver.lib.timestamp import TimeZoneNotUTCError, ceiling_to_day, floor_to_day
|
2018-11-11 22:13:27 +01:00
|
|
|
from zerver.lib.topic import DB_TOPIC_NAME
|
2024-06-03 16:19:53 +02:00
|
|
|
from zerver.lib.user_counts import realm_user_count_by_role
|
2021-07-25 16:31:12 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
2024-07-05 13:13:40 +02:00
|
|
|
DirectMessageGroup,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2024-04-18 12:23:46 +02:00
|
|
|
NamedUserGroup,
|
2020-06-11 00:54:34 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
UserActivityInterval,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models.clients import get_client
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-04-19 03:06:53 +02:00
|
|
|
from zerver.models.messages import Attachment
|
2023-12-15 20:21:59 +01:00
|
|
|
from zerver.models.scheduled_jobs import NotificationTriggers
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_user, is_cross_realm_bot_email
|
2023-11-09 19:24:49 +01:00
|
|
|
from zilencer.models import (
|
|
|
|
RemoteInstallationCount,
|
|
|
|
RemotePushDeviceToken,
|
|
|
|
RemoteRealm,
|
|
|
|
RemoteRealmCount,
|
|
|
|
RemoteZulipServer,
|
|
|
|
)
|
2023-10-23 22:29:42 +02:00
|
|
|
from zilencer.views import get_last_id_from_server
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2020-06-11 11:18:11 +02:00
|
|
|
class AnalyticsTestCase(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
MINUTE = timedelta(seconds=60)
|
2016-10-07 01:29:57 +02:00
|
|
|
HOUR = MINUTE * 60
|
|
|
|
DAY = HOUR * 24
|
2020-06-05 06:55:20 +02:00
|
|
|
TIME_ZERO = datetime(1988, 3, 14, tzinfo=timezone.utc)
|
2016-10-07 01:29:57 +02:00
|
|
|
TIME_LAST_HOUR = TIME_ZERO - HOUR
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-03-08 13:22:43 +01:00
|
|
|
self.default_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="realmtest", name="Realm Test", date_created=self.TIME_ZERO - 2 * self.DAY
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-04-18 12:23:46 +02:00
|
|
|
self.administrators_user_group = NamedUserGroup.objects.get(
|
2023-09-21 13:06:39 +02:00
|
|
|
name=SystemGroups.ADMINISTRATORS,
|
|
|
|
realm=self.default_realm,
|
|
|
|
is_system_group=True,
|
2022-07-13 20:44:28 +02:00
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2017-01-13 08:12:39 +01:00
|
|
|
# used to generate unique names in self.create_*
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter = 100
|
2022-06-03 20:12:27 +02:00
|
|
|
# used as defaults in self.assert_table_count
|
2024-07-12 02:30:23 +02:00
|
|
|
self.current_property: str | None = None
|
2016-10-07 02:47:05 +02:00
|
|
|
|
2023-12-01 18:06:22 +01:00
|
|
|
# Delete RemoteRealm registrations to have a clean slate - the relevant
|
|
|
|
# tests want to construct this from scratch.
|
|
|
|
RemoteRealm.objects.all().delete()
|
|
|
|
|
2016-10-07 02:47:05 +02:00
|
|
|
# Lightweight creation of users, streams, and messages
|
2024-06-03 16:19:53 +02:00
|
|
|
def create_user(self, skip_auditlog: bool = False, **kwargs: Any) -> UserProfile:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2016-10-07 02:47:05 +02:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"email": f"user{self.name_counter}@domain.tld",
|
|
|
|
"date_joined": self.TIME_LAST_HOUR,
|
|
|
|
"full_name": "full_name",
|
|
|
|
"is_active": True,
|
|
|
|
"is_bot": False,
|
|
|
|
"realm": self.default_realm,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["delivery_email"] = kwargs["email"]
|
2023-11-28 19:16:58 +01:00
|
|
|
with time_machine.travel(kwargs["date_joined"], tick=False):
|
2024-07-12 02:30:17 +02:00
|
|
|
pass_kwargs: dict[str, Any] = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
if kwargs["is_bot"]:
|
|
|
|
pass_kwargs["bot_type"] = UserProfile.DEFAULT_BOT
|
|
|
|
pass_kwargs["bot_owner"] = None
|
2024-06-03 16:19:53 +02:00
|
|
|
user = create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["email"],
|
|
|
|
"password",
|
|
|
|
kwargs["realm"],
|
|
|
|
active=kwargs["is_active"],
|
|
|
|
full_name=kwargs["full_name"],
|
2020-07-16 14:10:43 +02:00
|
|
|
role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
2021-02-12 08:19:30 +01:00
|
|
|
**pass_kwargs,
|
2020-07-16 14:10:43 +02:00
|
|
|
)
|
2024-06-03 16:19:53 +02:00
|
|
|
if not skip_auditlog:
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=kwargs["realm"],
|
|
|
|
acting_user=None,
|
|
|
|
modified_user=user,
|
|
|
|
event_type=RealmAuditLog.USER_CREATED,
|
|
|
|
event_time=kwargs["date_joined"],
|
|
|
|
extra_data={
|
|
|
|
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(kwargs["realm"])
|
|
|
|
},
|
|
|
|
)
|
|
|
|
return user
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def create_stream_with_recipient(self, **kwargs: Any) -> tuple[Stream, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2021-02-12 08:19:30 +01:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": f"stream name {self.name_counter}",
|
|
|
|
"realm": self.default_realm,
|
|
|
|
"date_created": self.TIME_LAST_HOUR,
|
2022-07-13 20:44:28 +02:00
|
|
|
"can_remove_subscribers_group": self.administrators_user_group,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2017-01-12 21:04:55 +01:00
|
|
|
stream = Stream.objects.create(**kwargs)
|
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
2020-06-08 15:06:19 +02:00
|
|
|
stream.recipient = recipient
|
|
|
|
stream.save(update_fields=["recipient"])
|
2017-01-12 21:04:55 +01:00
|
|
|
return stream, recipient
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def create_huddle_with_recipient(self, **kwargs: Any) -> tuple[DirectMessageGroup, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2021-02-12 08:20:45 +01:00
|
|
|
defaults = {"huddle_hash": f"hash{self.name_counter}"}
|
2017-01-12 21:04:55 +01:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2024-07-05 13:13:40 +02:00
|
|
|
huddle = DirectMessageGroup.objects.create(**kwargs)
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.DIRECT_MESSAGE_GROUP)
|
2020-03-15 19:05:27 +01:00
|
|
|
huddle.recipient = recipient
|
|
|
|
huddle.save(update_fields=["recipient"])
|
2017-01-12 21:04:55 +01:00
|
|
|
return huddle, recipient
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_message(self, sender: UserProfile, recipient: Recipient, **kwargs: Any) -> Message:
|
2016-07-29 21:52:45 +02:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"sender": sender,
|
|
|
|
"recipient": recipient,
|
|
|
|
DB_TOPIC_NAME: "subject",
|
|
|
|
"content": "hi",
|
|
|
|
"date_sent": self.TIME_LAST_HOUR,
|
|
|
|
"sending_client": get_client("website"),
|
2022-09-27 21:42:31 +02:00
|
|
|
"realm_id": sender.realm_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2022-09-27 21:42:31 +02:00
|
|
|
# For simplicity, this helper doesn't support creating cross-realm messages
|
|
|
|
# since it'd require adding an additional realm argument.
|
|
|
|
assert not is_cross_realm_bot_email(sender.delivery_email)
|
|
|
|
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2016-10-07 02:47:05 +02:00
|
|
|
return Message.objects.create(**kwargs)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2024-04-19 03:06:53 +02:00
|
|
|
def create_attachment(
|
2024-06-20 18:54:49 +02:00
|
|
|
self,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
filename: str,
|
|
|
|
size: int,
|
|
|
|
create_time: datetime,
|
|
|
|
content_type: str,
|
2024-04-19 03:06:53 +02:00
|
|
|
) -> Attachment:
|
|
|
|
return Attachment.objects.create(
|
|
|
|
file_name=filename,
|
|
|
|
path_id=f"foo/bar/{filename}",
|
|
|
|
owner=user_profile,
|
|
|
|
realm=user_profile.realm,
|
|
|
|
size=size,
|
|
|
|
create_time=create_time,
|
2024-06-20 18:54:49 +02:00
|
|
|
content_type=content_type,
|
2024-04-19 03:06:53 +02:00
|
|
|
)
|
|
|
|
|
2016-10-08 02:27:50 +02:00
|
|
|
# kwargs should only ever be a UserProfile or Stream.
|
2022-06-03 20:12:27 +02:00
|
|
|
def assert_table_count(
|
2021-02-12 08:19:30 +01:00
|
|
|
self,
|
2024-07-12 02:30:17 +02:00
|
|
|
table: type[BaseCount],
|
2021-02-12 08:19:30 +01:00
|
|
|
value: int,
|
2024-07-12 02:30:23 +02:00
|
|
|
property: str | None = None,
|
|
|
|
subgroup: str | None = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
end_time: datetime = TIME_ZERO,
|
2024-07-12 02:30:23 +02:00
|
|
|
realm: Realm | None = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
**kwargs: models.Model,
|
|
|
|
) -> None:
|
2017-01-13 08:12:39 +01:00
|
|
|
if property is None:
|
|
|
|
property = self.current_property
|
2023-09-05 20:25:23 +02:00
|
|
|
queryset = table._default_manager.filter(property=property, end_time=end_time).filter(
|
|
|
|
**kwargs
|
|
|
|
)
|
2016-12-17 02:25:01 +01:00
|
|
|
if table is not InstallationCount:
|
|
|
|
if realm is None:
|
|
|
|
realm = self.default_realm
|
|
|
|
queryset = queryset.filter(realm=realm)
|
2016-12-17 02:26:46 +01:00
|
|
|
if subgroup is not None:
|
|
|
|
queryset = queryset.filter(subgroup=subgroup)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(queryset.values_list("value", flat=True)[0], value)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assertTableState(
|
2024-07-12 02:30:17 +02:00
|
|
|
self, table: type[BaseCount], arg_keys: list[str], arg_values: list[list[object]]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2017-01-14 01:27:19 +01:00
|
|
|
"""Assert that the state of a *Count table is what it should be.
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'],
|
|
|
|
[['p1', 4], ['p2', 10, self.alt_realm]])
|
|
|
|
|
|
|
|
table -- A *Count table.
|
|
|
|
arg_keys -- List of columns of <table>.
|
|
|
|
arg_values -- List of "rows" of <table>.
|
|
|
|
Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>.
|
|
|
|
The i'th value of the entry corresponds to the i'th arg_key, so e.g.
|
|
|
|
the first arg_values entry here corresponds to a row of RealmCount
|
|
|
|
with property='p1' and subgroup=10.
|
|
|
|
Any columns not specified (in this case, every column of RealmCount
|
|
|
|
other than property and subgroup) are either set to default values,
|
|
|
|
or are ignored.
|
|
|
|
|
|
|
|
The function checks that every entry of arg_values matches exactly one
|
|
|
|
row of <table>, and that no additional rows exist. Note that this means
|
|
|
|
checking a table with duplicate rows is not supported.
|
|
|
|
"""
|
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"property": self.current_property,
|
|
|
|
"subgroup": None,
|
|
|
|
"end_time": self.TIME_ZERO,
|
|
|
|
"value": 1,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-01-14 01:27:19 +01:00
|
|
|
for values in arg_values:
|
2024-07-12 02:30:17 +02:00
|
|
|
kwargs: dict[str, Any] = {}
|
2017-01-14 01:27:19 +01:00
|
|
|
for i in range(len(values)):
|
|
|
|
kwargs[arg_keys[i]] = values[i]
|
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2023-11-09 19:24:49 +01:00
|
|
|
if (
|
|
|
|
table not in [InstallationCount, RemoteInstallationCount, RemoteRealmCount]
|
|
|
|
and "realm" not in kwargs
|
|
|
|
):
|
2023-01-18 02:59:37 +01:00
|
|
|
if "user" in kwargs:
|
|
|
|
kwargs["realm"] = kwargs["user"].realm
|
|
|
|
elif "stream" in kwargs:
|
|
|
|
kwargs["realm"] = kwargs["stream"].realm
|
|
|
|
else:
|
|
|
|
kwargs["realm"] = self.default_realm
|
2023-09-05 20:25:23 +02:00
|
|
|
self.assertEqual(table._default_manager.filter(**kwargs).count(), 1)
|
|
|
|
self.assert_length(arg_values, table._default_manager.count())
|
2017-01-14 01:27:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestProcessCountStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def make_dummy_count_stat(self, property: str) -> CountStat:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
|
|
|
property=Literal(property),
|
|
|
|
)
|
2017-04-07 02:55:29 +02:00
|
|
|
return CountStat(property, sql_data_collector(RealmCount, query, None), CountStat.HOUR)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assertFillStateEquals(
|
|
|
|
self, stat: CountStat, end_time: datetime, state: int = FillState.DONE
|
|
|
|
) -> None:
|
2017-04-07 02:55:29 +02:00
|
|
|
fill_state = FillState.objects.filter(property=stat.property).first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert fill_state is not None
|
2017-01-07 09:19:37 +01:00
|
|
|
self.assertEqual(fill_state.end_time, end_time)
|
|
|
|
self.assertEqual(fill_state.state, state)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_stat(self) -> None:
|
2016-10-12 23:40:48 +02:00
|
|
|
# process new stat
|
|
|
|
current_time = installation_epoch() + self.HOUR
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# dirty stat
|
2017-04-07 02:55:29 +02:00
|
|
|
FillState.objects.filter(property=stat.property).update(state=FillState.STARTED)
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, no update
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, with update
|
|
|
|
current_time = current_time + self.HOUR
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 2)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bad_fill_to_time(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-04-28 02:22:40 +02:00
|
|
|
with self.assertRaises(ValueError):
|
2021-02-12 08:19:30 +01:00
|
|
|
process_count_stat(stat, installation_epoch() + 65 * self.MINUTE)
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(TimeZoneNotUTCError):
|
2017-10-05 02:06:43 +02:00
|
|
|
process_count_stat(stat, installation_epoch().replace(tzinfo=None))
|
2017-04-15 09:23:39 +02:00
|
|
|
|
2017-04-04 20:40:22 +02:00
|
|
|
# This tests the LoggingCountStat branch of the code in do_delete_counts_at_hour.
|
2017-02-15 17:26:22 +01:00
|
|
|
# It is important that do_delete_counts_at_hour not delete any of the collected
|
|
|
|
# logging data!
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_logging_stat(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_stat = LoggingCountStat("user stat", UserCount, CountStat.DAY)
|
|
|
|
stream_stat = LoggingCountStat("stream stat", StreamCount, CountStat.DAY)
|
|
|
|
realm_stat = LoggingCountStat("realm stat", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
realm = self.default_realm
|
|
|
|
UserCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
StreamCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
RealmCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=realm, property=realm_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
# Normal run of process_count_stat
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [[user_stat.property, 5]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [[stream_stat.property, 5]])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
# Change the logged data and mark FillState as dirty
|
|
|
|
UserCount.objects.update(value=6)
|
|
|
|
StreamCount.objects.update(value=6)
|
|
|
|
RealmCount.objects.filter(property=realm_stat.property).update(value=6)
|
|
|
|
FillState.objects.update(state=FillState.STARTED)
|
|
|
|
|
|
|
|
# Check that the change propagated (and the collected data wasn't deleted)
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [[user_stat.property, 6]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [[stream_stat.property, 6]])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_dependent_stat(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat1 = self.make_dummy_count_stat("stat1")
|
|
|
|
stat2 = self.make_dummy_count_stat("stat2")
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
property=Literal("stat3"),
|
2020-06-09 10:46:28 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
stat3 = DependentCountStat(
|
2021-02-12 08:20:45 +01:00
|
|
|
"stat3",
|
2021-02-12 08:19:30 +01:00
|
|
|
sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.HOUR,
|
2021-02-12 08:20:45 +01:00
|
|
|
dependencies=["stat1", "stat2"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
property=Literal("stat4"),
|
2020-06-09 10:46:28 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
stat4 = DependentCountStat(
|
2021-02-12 08:20:45 +01:00
|
|
|
"stat4",
|
2021-02-12 08:19:30 +01:00
|
|
|
sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.DAY,
|
2021-02-12 08:20:45 +01:00
|
|
|
dependencies=["stat1", "stat2"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
|
|
|
dummy_count_stats = {
|
|
|
|
"stat1": stat1,
|
|
|
|
"stat2": stat2,
|
|
|
|
"stat3": stat3,
|
|
|
|
"stat4": stat4,
|
|
|
|
}
|
|
|
|
with mock.patch("analytics.lib.counts.COUNT_STATS", dummy_count_stats):
|
2021-02-12 08:19:30 +01:00
|
|
|
hour = [installation_epoch() + i * self.HOUR for i in range(5)]
|
2020-12-22 18:09:34 +01:00
|
|
|
|
|
|
|
# test when one dependency has been run, and the other hasn't
|
|
|
|
process_count_stat(stat1, hour[2])
|
|
|
|
process_count_stat(stat3, hour[1])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
|
|
|
[["stat1", hour[1]], ["stat1", hour[2]]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[0])
|
|
|
|
|
|
|
|
# test that we don't fill past the fill_to_time argument, even if
|
|
|
|
# dependencies have later last_successful_fill
|
|
|
|
process_count_stat(stat2, hour[3])
|
|
|
|
process_count_stat(stat3, hour[1])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
["stat1", hour[1]],
|
|
|
|
["stat1", hour[2]],
|
|
|
|
["stat2", hour[1]],
|
|
|
|
["stat2", hour[2]],
|
|
|
|
["stat2", hour[3]],
|
|
|
|
["stat3", hour[1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[1])
|
|
|
|
|
|
|
|
# test that we don't fill past the dependency last_successful_fill times,
|
|
|
|
# even if fill_to_time is later
|
|
|
|
process_count_stat(stat3, hour[4])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
["stat1", hour[1]],
|
|
|
|
["stat1", hour[2]],
|
|
|
|
["stat2", hour[1]],
|
|
|
|
["stat2", hour[2]],
|
|
|
|
["stat2", hour[3]],
|
|
|
|
["stat3", hour[1]],
|
|
|
|
["stat3", hour[2]],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[2])
|
|
|
|
|
|
|
|
# test daily dependent stat with hourly dependencies
|
2021-02-12 08:19:30 +01:00
|
|
|
hour24 = installation_epoch() + 24 * self.HOUR
|
|
|
|
hour25 = installation_epoch() + 25 * self.HOUR
|
2020-12-22 18:09:34 +01:00
|
|
|
process_count_stat(stat1, hour25)
|
|
|
|
process_count_stat(stat2, hour25)
|
|
|
|
process_count_stat(stat4, hour25)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(InstallationCount.objects.filter(property="stat4").count(), 1)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat4, hour24)
|
2017-04-15 09:23:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestCountStats(AnalyticsTestCase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-01-13 01:55:46 +01:00
|
|
|
# This tests two things for each of the queries/CountStats: Handling
|
|
|
|
# more than 1 realm, and the time bounds (time_start and time_end in
|
|
|
|
# the queries).
|
2021-03-08 13:22:43 +01:00
|
|
|
self.second_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="second-realm",
|
|
|
|
name="Second Realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
date_created=self.TIME_ZERO - 2 * self.DAY,
|
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for minutes_ago in [0, 1, 61, 60 * 24 + 1]:
|
|
|
|
creation_time = self.TIME_ZERO - minutes_ago * self.MINUTE
|
|
|
|
user = self.create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
email=f"user-{minutes_ago}@second.analytics",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=self.second_realm,
|
|
|
|
date_joined=creation_time,
|
|
|
|
)
|
2017-01-13 01:55:46 +01:00
|
|
|
recipient = self.create_stream_with_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
name=f"stream {minutes_ago}", realm=self.second_realm, date_created=creation_time
|
2021-02-12 08:19:30 +01:00
|
|
|
)[1]
|
2019-08-28 02:43:19 +02:00
|
|
|
self.create_message(user, recipient, date_sent=creation_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.hourly_user = get_user("user-1@second.analytics", self.second_realm)
|
|
|
|
self.daily_user = get_user("user-61@second.analytics", self.second_realm)
|
2017-01-13 01:55:46 +01:00
|
|
|
|
|
|
|
# This realm should not show up in the *Count tables for any of the
|
|
|
|
# messages_* CountStats
|
2021-03-08 13:22:43 +01:00
|
|
|
self.no_message_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="no-message-realm",
|
|
|
|
name="No Message Realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
date_created=self.TIME_ZERO - 2 * self.DAY,
|
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2017-01-13 01:55:46 +01:00
|
|
|
self.create_user(realm=self.no_message_realm)
|
|
|
|
self.create_stream_with_recipient(realm=self.no_message_realm)
|
|
|
|
# This huddle should not show up anywhere
|
|
|
|
self.create_huddle_with_recipient()
|
2016-12-17 03:26:39 +01:00
|
|
|
|
2024-04-19 03:06:53 +02:00
|
|
|
def test_upload_quota_used_bytes(self) -> None:
|
|
|
|
stat = COUNT_STATS["upload_quota_used_bytes::day"]
|
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user_second_realm = self.create_user(realm=self.second_realm)
|
|
|
|
|
2024-06-20 18:54:49 +02:00
|
|
|
self.create_attachment(user1, "file1", 100, self.TIME_LAST_HOUR, "text/plain")
|
|
|
|
attachment2 = self.create_attachment(user2, "file2", 200, self.TIME_LAST_HOUR, "text/plain")
|
|
|
|
self.create_attachment(user_second_realm, "file3", 10, self.TIME_LAST_HOUR, "text/plain")
|
2024-04-19 03:06:53 +02:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[300, None, self.default_realm], [10, None, self.second_realm]],
|
|
|
|
)
|
|
|
|
|
|
|
|
# Delete an attachment and run the CountStat job again the next day.
|
|
|
|
attachment2.delete()
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO + self.DAY)
|
|
|
|
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
|
|
|
["value", "subgroup", "realm", "end_time"],
|
|
|
|
[
|
|
|
|
[300, None, self.default_realm, self.TIME_ZERO],
|
|
|
|
[10, None, self.second_realm, self.TIME_ZERO],
|
|
|
|
[100, None, self.default_realm, self.TIME_ZERO + self.DAY],
|
|
|
|
[10, None, self.second_realm, self.TIME_ZERO + self.DAY],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_stream)
|
|
|
|
self.create_message(bot, recipient_huddle)
|
|
|
|
self.create_message(human1, recipient_human1)
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "false", human1],
|
|
|
|
[1, "false", human2],
|
|
|
|
[3, "true", bot],
|
|
|
|
[1, "false", self.hourly_user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[2, "false"], [3, "true"], [1, "false", self.second_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[3, "false"], [3, "true"]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_is_bot_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_stream)
|
|
|
|
self.create_message(bot, recipient_huddle)
|
|
|
|
self.create_message(human1, recipient_human1)
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_human1)
|
|
|
|
self.create_message(self.hourly_user, recipient_stream)
|
|
|
|
self.create_message(self.hourly_user, recipient_huddle)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
|
|
|
[[1, "false", human1], [1, "false", human2], [3, "true", bot]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[2, "false", self.default_realm], [3, "true", self.default_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_message_type(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Nothing currently in this stat that is bot related, but so many of
|
|
|
|
# the rest of our stats make the human/bot distinction that one can
|
|
|
|
# imagine a later refactoring that will intentionally or
|
|
|
|
# unintentionally change this. So make one of our users a bot.
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user()
|
|
|
|
|
|
|
|
# private streams
|
|
|
|
recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
self.create_message(user1, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream2)
|
|
|
|
|
|
|
|
# public streams
|
|
|
|
recipient_stream3 = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_stream4 = self.create_stream_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_stream3)
|
|
|
|
self.create_message(user1, recipient_stream4)
|
|
|
|
self.create_message(user2, recipient_stream3)
|
|
|
|
|
|
|
|
# huddles
|
|
|
|
recipient_huddle1 = self.create_huddle_with_recipient()[1]
|
|
|
|
recipient_huddle2 = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_huddle1)
|
|
|
|
self.create_message(user2, recipient_huddle2)
|
|
|
|
|
2023-06-16 11:49:38 +02:00
|
|
|
# direct messages
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_user1 = Recipient.objects.get(type_id=user1.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user3 = Recipient.objects.get(type_id=user3.id, type=Recipient.PERSONAL)
|
2017-01-11 02:37:43 +01:00
|
|
|
self.create_message(user1, recipient_user2)
|
|
|
|
self.create_message(user2, recipient_user1)
|
|
|
|
self.create_message(user3, recipient_user3)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_stream", user1],
|
|
|
|
[2, "private_stream", user2],
|
|
|
|
[2, "public_stream", user1],
|
|
|
|
[1, "public_stream", user2],
|
|
|
|
[1, "private_message", user1],
|
|
|
|
[1, "private_message", user2],
|
|
|
|
[1, "private_message", user3],
|
|
|
|
[1, "huddle_message", user1],
|
|
|
|
[1, "huddle_message", user2],
|
|
|
|
[1, "public_stream", self.hourly_user],
|
|
|
|
[1, "public_stream", self.daily_user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "private_stream"],
|
|
|
|
[3, "public_stream"],
|
|
|
|
[3, "private_message"],
|
|
|
|
[2, "huddle_message"],
|
|
|
|
[2, "public_stream", self.second_realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "private_stream"],
|
|
|
|
[5, "public_stream"],
|
|
|
|
[3, "private_message"],
|
|
|
|
[2, "huddle_message"],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-11 02:37:43 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_message_type_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user = self.create_user()
|
|
|
|
user_recipient = Recipient.objects.get(type_id=user.id, type=Recipient.PERSONAL)
|
|
|
|
private_stream_recipient = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
stream_recipient = self.create_stream_with_recipient()[1]
|
|
|
|
huddle_recipient = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(user, user_recipient)
|
|
|
|
self.create_message(user, private_stream_recipient)
|
|
|
|
self.create_message(user, stream_recipient)
|
|
|
|
self.create_message(user, huddle_recipient)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, user_recipient)
|
|
|
|
self.create_message(self.hourly_user, private_stream_recipient)
|
|
|
|
self.create_message(self.hourly_user, stream_recipient)
|
|
|
|
self.create_message(self.hourly_user, huddle_recipient)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_message", user],
|
|
|
|
[1, "private_stream", user],
|
|
|
|
[1, "huddle_message", user],
|
|
|
|
[1, "public_stream", user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_message"],
|
|
|
|
[1, "private_stream"],
|
|
|
|
[1, "public_stream"],
|
|
|
|
[1, "huddle_message"],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_recipients_with_same_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user = self.create_user(id=1000)
|
2019-09-19 23:30:43 +02:00
|
|
|
user_recipient = Recipient.objects.get(type_id=user.id, type=Recipient.PERSONAL)
|
2017-01-11 02:37:43 +01:00
|
|
|
stream_recipient = self.create_stream_with_recipient(id=1000)[1]
|
|
|
|
huddle_recipient = self.create_huddle_with_recipient(id=1000)[1]
|
|
|
|
|
|
|
|
self.create_message(user, user_recipient)
|
|
|
|
self.create_message(user, stream_recipient)
|
|
|
|
self.create_message(user, huddle_recipient)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2022-06-03 20:12:27 +02:00
|
|
|
self.assert_table_count(UserCount, 1, subgroup="private_message")
|
|
|
|
self.assert_table_count(UserCount, 1, subgroup="huddle_message")
|
|
|
|
self.assert_table_count(UserCount, 1, subgroup="public_stream")
|
2017-01-13 06:03:20 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_client(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:client:day"]
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
client2 = Client.objects.create(name="client2")
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
self.create_message(user1, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user1, recipient_stream)
|
|
|
|
self.create_message(user1, recipient_huddle)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
client2_id = str(client2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
website_client_id = str(get_client("website").id) # default for self.create_message
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[2, website_client_id, user1],
|
|
|
|
[1, client2_id, user1],
|
|
|
|
[2, client2_id, user2],
|
|
|
|
[1, website_client_id, self.hourly_user],
|
|
|
|
[1, website_client_id, self.daily_user],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[2, website_client_id], [3, client2_id], [2, website_client_id, self.second_realm]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
InstallationCount, ["value", "subgroup"], [[4, website_client_id], [3, client2_id]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_client_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:client:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
client2 = Client.objects.create(name="client2")
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# TO be included
|
|
|
|
self.create_message(user1, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(self.hourly_user, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(self.hourly_user, recipient_user2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
|
|
|
client2_id = str(client2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
website_client_id = str(get_client("website").id) # default for self.create_message
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, client2_id, user1], [1, client2_id, user2], [1, website_client_id, user2]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "subgroup"], [[1, website_client_id], [2, client2_id]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_stream_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
2017-01-13 06:03:20 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2017-01-13 06:03:20 +01:00
|
|
|
|
|
|
|
stream1, recipient_stream1 = self.create_stream_with_recipient()
|
|
|
|
stream2, recipient_stream2 = self.create_stream_with_recipient()
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(human1, recipient_stream1)
|
|
|
|
self.create_message(human2, recipient_stream1)
|
|
|
|
self.create_message(human1, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(human1, recipient_huddle)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
StreamCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "stream"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "false", stream1],
|
|
|
|
[1, "false", stream2],
|
|
|
|
[2, "true", stream2],
|
2021-02-12 08:19:30 +01:00
|
|
|
# "hourly" and "daily" stream, from TestCountStats.setUp
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "false", Stream.objects.get(name="stream 1")],
|
|
|
|
[1, "false", Stream.objects.get(name="stream 61")],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[3, "false"], [2, "true"], [2, "false", self.second_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[5, "false"], [2, "true"]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_to_stream_by_is_bot_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
human1 = self.create_user()
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = {"realm": self.second_realm}
|
2020-01-16 02:43:51 +01:00
|
|
|
stream1, recipient_stream1 = self.create_stream_with_recipient()
|
|
|
|
stream2, recipient_stream2 = self.create_stream_with_recipient(**realm)
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(human1, recipient_stream1)
|
|
|
|
self.create_message(bot, recipient_stream1)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_stream2)
|
|
|
|
self.create_message(self.daily_user, recipient_stream2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
StreamCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "stream"],
|
|
|
|
[[1, "false", stream1], [1, "true", stream1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "subgroup", "realm"], [[1, "false"], [1, "true"]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_interval(
|
|
|
|
self, user: UserProfile, start_offset: timedelta, end_offset: timedelta
|
|
|
|
) -> None:
|
2017-03-16 09:23:44 +01:00
|
|
|
UserActivityInterval.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile=user, start=self.TIME_ZERO - start_offset, end=self.TIME_ZERO - end_offset
|
|
|
|
)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
2018-03-18 22:02:46 +01:00
|
|
|
def test_1day_actives(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["1day_actives::day"]
|
2018-03-18 22:02:46 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_1day = 1 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _1day + self.DAY, _1day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _1day + self.DAY, _1day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 2 * self.DAY, 1 * self.DAY)
|
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 1.5 * self.DAY, 0.5 * self.DAY)
|
2018-03-18 22:02:46 +01:00
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 2 * self.DAY, -2 * self.DAY)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user7, 20 * self.MINUTE, 19 * self.MINUTE)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "realm"], [[5, self.default_realm], [1, self.second_realm]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[6]])
|
2018-03-18 22:02:46 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_1day_actives_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["1day_actives::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_1day = 1 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2020-01-16 02:43:51 +01:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
|
|
|
|
# To be included
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
self.create_interval(user2, _1day + self.DAY, _1day)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[1, user2], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[2, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_15day_actives(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["15day_actives::day"]
|
2017-03-16 07:58:23 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_15day = 15 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _15day + self.DAY, _15day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _15day + self.DAY, _15day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.DAY, 19 * self.DAY)
|
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 20 * self.DAY, 10 * self.DAY)
|
2017-03-16 07:58:23 +01:00
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 20 * self.DAY, -2 * self.DAY)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user7, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "realm"], [[5, self.default_realm], [1, self.second_realm]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[6]])
|
2017-03-16 07:58:23 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_15day_actives_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["15day_actives::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_15day = 15 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_interval(user1, _15day + self.DAY, _15day)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user2, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# To be excluded
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[1, user1], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[2, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_minutes_active(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["minutes_active::day"]
|
2017-03-16 09:23:44 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also testing for intervals
|
|
|
|
# starting and ending on boundary
|
|
|
|
user1 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 25 * self.HOUR, self.DAY)
|
2017-03-16 09:23:44 +01:00
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user2, 20 * self.DAY, 19 * self.DAY)
|
|
|
|
self.create_interval(user2, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user2, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 25 * self.HOUR, 22 * self.HOUR)
|
2017-03-16 09:23:44 +01:00
|
|
|
self.create_interval(user3, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 2 * self.DAY, -2 * self.DAY)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
# Less than 60 seconds, should not appear
|
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, timedelta(seconds=30))
|
|
|
|
self.create_interval(user5, timedelta(seconds=20), timedelta(seconds=10))
|
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user6 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount, ["value", "user"], [[61, user2], [121, user3], [24 * 60, user4], [1, user6]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[61 + 121 + 24 * 60, self.default_realm], [1, self.second_realm]],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[61 + 121 + 24 * 60 + 1]])
|
2017-03-16 09:23:44 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_minutes_active_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["minutes_active::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also testing for intervals
|
|
|
|
# starting and ending on boundary
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
|
|
|
|
|
|
|
# To be included
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user2, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# To be excluded
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[60, user1], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[60 + 1, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-12-22 18:09:34 +01:00
|
|
|
def test_last_successful_fill(self) -> None:
|
|
|
|
self.assertIsNone(COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill())
|
|
|
|
|
|
|
|
a_time = datetime(2016, 3, 14, 19, tzinfo=timezone.utc)
|
|
|
|
one_hour_before = datetime(2016, 3, 14, 18, tzinfo=timezone.utc)
|
2020-12-22 18:33:42 +01:00
|
|
|
one_day_before = datetime(2016, 3, 13, 19, tzinfo=timezone.utc)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
fillstate = FillState.objects.create(
|
|
|
|
property=COUNT_STATS["messages_sent:is_bot:hour"].property,
|
|
|
|
end_time=a_time,
|
|
|
|
state=FillState.DONE,
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertEqual(COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill(), a_time)
|
2020-12-22 18:33:42 +01:00
|
|
|
|
2020-12-22 18:09:34 +01:00
|
|
|
fillstate.state = FillState.STARTED
|
2020-12-22 18:33:42 +01:00
|
|
|
fillstate.save(update_fields=["state"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill(), one_hour_before
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
2020-12-22 18:33:42 +01:00
|
|
|
fillstate.property = COUNT_STATS["7day_actives::day"].property
|
|
|
|
fillstate.save(update_fields=["property"])
|
|
|
|
self.assertEqual(COUNT_STATS["7day_actives::day"].last_successful_fill(), one_day_before)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-17 22:38:03 +01:00
|
|
|
class TestDoAggregateToSummaryTable(AnalyticsTestCase):
|
|
|
|
# do_aggregate_to_summary_table is mostly tested by the end to end
|
|
|
|
# nature of the tests in TestCountStats. But want to highlight one
|
|
|
|
# feature important for keeping the size of the analytics tables small,
|
|
|
|
# which is that if there is no relevant data in the table being
|
|
|
|
# aggregated, the aggregation table doesn't get a row with value 0.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_no_aggregated_zeros(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test stat", UserCount, CountStat.HOUR)
|
2017-02-17 22:38:03 +01:00
|
|
|
do_aggregate_to_summary_table(stat, self.TIME_ZERO)
|
|
|
|
self.assertFalse(RealmCount.objects.exists())
|
|
|
|
self.assertFalse(InstallationCount.objects.exists())
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
class TestDoIncrementLoggingStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_table_and_id_args(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
# For realms, streams, and users, tests that the new rows are going to
|
|
|
|
# the appropriate *Count table, and that using a different zerver_object
|
|
|
|
# results in a new row being created
|
2021-02-12 08:20:45 +01:00
|
|
|
self.current_property = "test"
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="moo", name="moo")
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["realm"], [[self.default_realm], [second_realm]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", UserCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["user"], [[user1], [user2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
stream1 = self.create_stream_with_recipient()[0]
|
|
|
|
stream2 = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", StreamCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(StreamCount, ["stream"], [[stream1], [stream2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_frequency(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
times = [self.TIME_ZERO - self.MINUTE * i for i in [0, 1, 61, 24 * 60 + 1]]
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("day test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("hour test", RealmCount, CountStat.HOUR)
|
2017-02-15 17:26:22 +01:00
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "day test", self.TIME_ZERO],
|
|
|
|
[1, "day test", self.TIME_ZERO - self.DAY],
|
|
|
|
[2, "hour test", self.TIME_ZERO],
|
|
|
|
[1, "hour test", self.TIME_LAST_HOUR],
|
|
|
|
[1, "hour test", self.TIME_ZERO - self.DAY],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_get_or_create(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.HOUR)
|
2017-02-15 17:26:22 +01:00
|
|
|
# All these should trigger the create part of get_or_create.
|
|
|
|
# property is tested in test_frequency, and id_args are tested in test_id_args,
|
|
|
|
# so this only tests a new subgroup and end_time
|
2021-02-12 08:20:45 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup2", self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_LAST_HOUR)
|
|
|
|
self.current_property = "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "subgroup1", self.TIME_ZERO],
|
|
|
|
[1, "subgroup2", self.TIME_ZERO],
|
|
|
|
[1, "subgroup1", self.TIME_LAST_HOUR],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
# This should trigger the get part of get_or_create
|
2021-02-12 08:20:45 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "subgroup1", self.TIME_ZERO],
|
|
|
|
[1, "subgroup2", self.TIME_ZERO],
|
|
|
|
[1, "subgroup1", self.TIME_LAST_HOUR],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_increment(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.DAY)
|
|
|
|
self.current_property = "test"
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[-1]])
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[3]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2024-03-10 16:07:02 +01:00
|
|
|
def test_do_increment_logging_start_query_count(self) -> None:
|
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.DAY)
|
2024-02-20 08:32:44 +01:00
|
|
|
with self.assert_database_query_count(1):
|
2024-03-10 16:07:02 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
class TestLoggingCountStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_aggregation(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("realm test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
user = self.create_user()
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("user test", UserCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("stream test", StreamCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
|
|
|
[["realm test", 1], ["user test", 1], ["stream test", 1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
|
|
|
[["realm test", 1], ["user test", 1], ["stream test", 1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [["user test", 1]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [["stream test", 1]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2023-10-22 23:21:56 +02:00
|
|
|
@override_settings(PUSH_NOTIFICATION_BOUNCER_URL="https://push.zulip.org.example.com")
|
|
|
|
def test_mobile_pushes_received_count(self) -> None:
|
|
|
|
self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe"
|
|
|
|
self.server = RemoteZulipServer.objects.create(
|
|
|
|
uuid=self.server_uuid,
|
|
|
|
api_key="magic_secret_api_key",
|
|
|
|
hostname="demo.example.com",
|
|
|
|
last_updated=timezone_now(),
|
|
|
|
)
|
|
|
|
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
token = "aaaa"
|
|
|
|
|
|
|
|
RemotePushDeviceToken.objects.create(
|
2024-06-13 20:53:09 +02:00
|
|
|
kind=RemotePushDeviceToken.FCM,
|
2023-10-22 23:21:56 +02:00
|
|
|
token=hex_to_b64(token),
|
|
|
|
user_uuid=(hamlet.uuid),
|
|
|
|
server=self.server,
|
|
|
|
)
|
|
|
|
RemotePushDeviceToken.objects.create(
|
2024-06-13 20:53:09 +02:00
|
|
|
kind=RemotePushDeviceToken.FCM,
|
2023-10-22 23:21:56 +02:00
|
|
|
token=hex_to_b64(token + "aa"),
|
|
|
|
user_uuid=(hamlet.uuid),
|
|
|
|
server=self.server,
|
|
|
|
)
|
|
|
|
RemotePushDeviceToken.objects.create(
|
|
|
|
kind=RemotePushDeviceToken.APNS,
|
|
|
|
token=hex_to_b64(token),
|
|
|
|
user_uuid=str(hamlet.uuid),
|
|
|
|
server=self.server,
|
|
|
|
)
|
|
|
|
|
|
|
|
message = Message(
|
|
|
|
sender=hamlet,
|
|
|
|
recipient=self.example_user("othello").recipient,
|
|
|
|
realm_id=hamlet.realm_id,
|
|
|
|
content="This is test content",
|
|
|
|
rendered_content="This is test content",
|
|
|
|
date_sent=timezone_now(),
|
|
|
|
sending_client=get_client("test"),
|
|
|
|
)
|
|
|
|
message.set_topic_name("Test topic")
|
|
|
|
message.save()
|
|
|
|
gcm_payload, gcm_options = get_message_payload_gcm(hamlet, message)
|
|
|
|
apns_payload = get_message_payload_apns(
|
|
|
|
hamlet, message, NotificationTriggers.DIRECT_MESSAGE
|
|
|
|
)
|
|
|
|
|
2023-11-09 19:24:49 +01:00
|
|
|
# First we'll make a request without providing realm_uuid. That means
|
|
|
|
# the bouncer can't increment the RemoteRealmCount stat, and only
|
|
|
|
# RemoteInstallationCount will be incremented.
|
2023-10-22 23:21:56 +02:00
|
|
|
payload = {
|
|
|
|
"user_id": hamlet.id,
|
|
|
|
"user_uuid": str(hamlet.uuid),
|
|
|
|
"gcm_payload": gcm_payload,
|
|
|
|
"apns_payload": apns_payload,
|
|
|
|
"gcm_options": gcm_options,
|
|
|
|
}
|
|
|
|
now = timezone_now()
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
time_machine.travel(now, tick=False),
|
|
|
|
mock.patch("zilencer.views.send_android_push_notification", return_value=1),
|
|
|
|
mock.patch("zilencer.views.send_apple_push_notification", return_value=1),
|
|
|
|
mock.patch(
|
|
|
|
"corporate.lib.stripe.RemoteServerBillingSession.current_count_for_billed_licenses",
|
|
|
|
return_value=10,
|
|
|
|
),
|
|
|
|
self.assertLogs("zilencer.views", level="INFO"),
|
|
|
|
):
|
2023-10-22 23:21:56 +02:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
"/api/v1/remotes/push/notify",
|
|
|
|
payload,
|
|
|
|
content_type="application/json",
|
|
|
|
subdomain="",
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2023-10-28 02:01:22 +02:00
|
|
|
# There are 3 devices we created for the user:
|
|
|
|
# 1. The mobile_pushes_received increment should match that number.
|
|
|
|
# 2. mobile_pushes_forwarded only counts successful deliveries, and we've set up
|
|
|
|
# the mocks above to simulate 1 successful android and 1 successful apple delivery.
|
|
|
|
# Thus the increment should be just 2.
|
2023-10-22 23:21:56 +02:00
|
|
|
self.assertTableState(
|
|
|
|
RemoteInstallationCount,
|
|
|
|
["property", "value", "subgroup", "server", "remote_id", "end_time"],
|
|
|
|
[
|
2023-11-09 19:24:49 +01:00
|
|
|
[
|
|
|
|
"mobile_pushes_received::day",
|
|
|
|
3,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
[
|
|
|
|
"mobile_pushes_forwarded::day",
|
|
|
|
2,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertFalse(
|
|
|
|
RemoteRealmCount.objects.filter(property="mobile_pushes_received::day").exists()
|
|
|
|
)
|
|
|
|
self.assertFalse(
|
|
|
|
RemoteRealmCount.objects.filter(property="mobile_pushes_forwarded::day").exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
# Now provide the realm_uuid. However, the RemoteRealm record doesn't exist yet, so it'll
|
|
|
|
# still be ignored.
|
|
|
|
payload = {
|
|
|
|
"user_id": hamlet.id,
|
|
|
|
"user_uuid": str(hamlet.uuid),
|
|
|
|
"realm_uuid": str(hamlet.realm.uuid),
|
|
|
|
"gcm_payload": gcm_payload,
|
|
|
|
"apns_payload": apns_payload,
|
|
|
|
"gcm_options": gcm_options,
|
|
|
|
}
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
time_machine.travel(now, tick=False),
|
|
|
|
mock.patch("zilencer.views.send_android_push_notification", return_value=1),
|
|
|
|
mock.patch("zilencer.views.send_apple_push_notification", return_value=1),
|
|
|
|
mock.patch(
|
|
|
|
"corporate.lib.stripe.RemoteServerBillingSession.current_count_for_billed_licenses",
|
|
|
|
return_value=10,
|
|
|
|
),
|
|
|
|
self.assertLogs("zilencer.views", level="INFO"),
|
|
|
|
):
|
2023-11-09 19:24:49 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
"/api/v1/remotes/push/notify",
|
|
|
|
payload,
|
|
|
|
content_type="application/json",
|
|
|
|
subdomain="",
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# The RemoteInstallationCount records get incremented again, but the RemoteRealmCount
|
|
|
|
# remains ignored due to missing RemoteRealm record.
|
|
|
|
self.assertTableState(
|
|
|
|
RemoteInstallationCount,
|
|
|
|
["property", "value", "subgroup", "server", "remote_id", "end_time"],
|
|
|
|
[
|
|
|
|
[
|
|
|
|
"mobile_pushes_received::day",
|
|
|
|
6,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
[
|
|
|
|
"mobile_pushes_forwarded::day",
|
|
|
|
4,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertFalse(
|
|
|
|
RemoteRealmCount.objects.filter(property="mobile_pushes_received::day").exists()
|
|
|
|
)
|
|
|
|
self.assertFalse(
|
|
|
|
RemoteRealmCount.objects.filter(property="mobile_pushes_forwarded::day").exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
# Create the RemoteRealm registration and repeat the above. This time RemoteRealmCount
|
|
|
|
# stats should be collected.
|
|
|
|
realm = hamlet.realm
|
|
|
|
remote_realm = RemoteRealm.objects.create(
|
|
|
|
server=self.server,
|
|
|
|
uuid=realm.uuid,
|
|
|
|
uuid_owner_secret=realm.uuid_owner_secret,
|
|
|
|
host=realm.host,
|
|
|
|
realm_deactivated=realm.deactivated,
|
|
|
|
realm_date_created=realm.date_created,
|
|
|
|
)
|
|
|
|
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
time_machine.travel(now, tick=False),
|
|
|
|
mock.patch("zilencer.views.send_android_push_notification", return_value=1),
|
|
|
|
mock.patch("zilencer.views.send_apple_push_notification", return_value=1),
|
|
|
|
mock.patch(
|
|
|
|
"corporate.lib.stripe.RemoteRealmBillingSession.current_count_for_billed_licenses",
|
|
|
|
return_value=10,
|
|
|
|
),
|
|
|
|
self.assertLogs("zilencer.views", level="INFO"),
|
|
|
|
):
|
2023-11-09 19:24:49 +01:00
|
|
|
result = self.uuid_post(
|
|
|
|
self.server_uuid,
|
|
|
|
"/api/v1/remotes/push/notify",
|
|
|
|
payload,
|
|
|
|
content_type="application/json",
|
|
|
|
subdomain="",
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# The RemoteInstallationCount records get incremented again, and the RemoteRealmCount
|
|
|
|
# gets collected.
|
|
|
|
self.assertTableState(
|
|
|
|
RemoteInstallationCount,
|
|
|
|
["property", "value", "subgroup", "server", "remote_id", "end_time"],
|
|
|
|
[
|
|
|
|
[
|
|
|
|
"mobile_pushes_received::day",
|
|
|
|
9,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
[
|
|
|
|
"mobile_pushes_forwarded::day",
|
|
|
|
6,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RemoteRealmCount,
|
|
|
|
["property", "value", "subgroup", "server", "remote_realm", "remote_id", "end_time"],
|
|
|
|
[
|
|
|
|
[
|
|
|
|
"mobile_pushes_received::day",
|
|
|
|
3,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
remote_realm,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
|
|
|
[
|
|
|
|
"mobile_pushes_forwarded::day",
|
|
|
|
2,
|
|
|
|
None,
|
|
|
|
self.server,
|
|
|
|
remote_realm,
|
|
|
|
None,
|
|
|
|
ceiling_to_day(now),
|
|
|
|
],
|
2023-10-22 23:21:56 +02:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
def test_invites_sent(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
property = "invites_sent::day"
|
2017-12-05 20:21:25 +01:00
|
|
|
|
2024-01-10 22:01:21 +01:00
|
|
|
@contextmanager
|
|
|
|
def invite_context(
|
|
|
|
too_many_recent_realm_invites: bool = False, failure: bool = False
|
|
|
|
) -> Iterator[None]:
|
2024-07-12 02:30:17 +02:00
|
|
|
managers: list[AbstractContextManager[Any]] = [
|
2024-01-10 22:01:21 +01:00
|
|
|
mock.patch(
|
|
|
|
"zerver.actions.invites.too_many_recent_realm_invites", return_value=False
|
|
|
|
),
|
|
|
|
self.captureOnCommitCallbacks(execute=True),
|
|
|
|
]
|
|
|
|
if failure:
|
|
|
|
managers.append(self.assertRaises(InvitationError))
|
|
|
|
with ExitStack() as stack:
|
|
|
|
for mgr in managers:
|
|
|
|
stack.enter_context(mgr)
|
|
|
|
yield
|
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
def assertInviteCountEquals(count: int) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
count,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=None).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.create_user(email="first@domain.tld")
|
2017-12-05 20:21:25 +01:00
|
|
|
stream, _ = self.create_stream_with_recipient()
|
2021-04-05 18:42:45 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2024-01-10 22:01:21 +01:00
|
|
|
with invite_context():
|
2023-03-07 20:14:09 +01:00
|
|
|
do_invite_users(
|
|
|
|
user,
|
|
|
|
["user1@domain.tld", "user2@domain.tld"],
|
|
|
|
[stream],
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2023-03-07 20:14:09 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(2)
|
|
|
|
|
|
|
|
# We currently send emails when re-inviting users that haven't
|
|
|
|
# turned into accounts, so count them towards the total
|
2024-01-10 22:01:21 +01:00
|
|
|
with invite_context():
|
2023-03-07 20:14:09 +01:00
|
|
|
do_invite_users(
|
|
|
|
user,
|
|
|
|
["user1@domain.tld", "user2@domain.tld"],
|
|
|
|
[stream],
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2023-03-07 20:14:09 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test mix of good and malformed invite emails
|
2024-01-10 22:01:21 +01:00
|
|
|
with invite_context(failure=True):
|
2021-04-05 18:42:45 +02:00
|
|
|
do_invite_users(
|
|
|
|
user,
|
|
|
|
["user3@domain.tld", "malformed"],
|
|
|
|
[stream],
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test inviting existing users
|
2024-01-10 22:01:21 +01:00
|
|
|
with invite_context():
|
|
|
|
skipped = do_invite_users(
|
2021-04-05 18:42:45 +02:00
|
|
|
user,
|
|
|
|
["first@domain.tld", "user4@domain.tld"],
|
|
|
|
[stream],
|
2023-05-25 16:06:13 +02:00
|
|
|
include_realm_default_subscriptions=False,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
)
|
2024-01-10 22:01:21 +01:00
|
|
|
self.assert_length(skipped, 1)
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Revoking invite should not give you credit
|
2021-07-25 16:31:12 +02:00
|
|
|
do_revoke_user_invite(
|
|
|
|
assert_is_not_none(PreregistrationUser.objects.filter(realm=user.realm).first())
|
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Resending invite should cost you
|
2024-01-10 22:01:21 +01:00
|
|
|
with invite_context():
|
2024-04-30 22:12:34 +02:00
|
|
|
do_send_user_invite_email(assert_is_not_none(PreregistrationUser.objects.first()))
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(6)
|
|
|
|
|
2020-06-08 06:01:49 +02:00
|
|
|
def test_messages_read_hour(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
read_count_property = "messages_read::hour"
|
|
|
|
interactions_property = "messages_read_interactions::hour"
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
stream, recipient = self.create_stream_with_recipient()
|
|
|
|
self.subscribe(user1, stream.name)
|
|
|
|
self.subscribe(user2, stream.name)
|
|
|
|
|
|
|
|
self.send_personal_message(user1, user2)
|
2022-03-10 14:30:45 +01:00
|
|
|
do_mark_all_as_read(user2)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
self.send_stream_message(user1, stream.name)
|
|
|
|
self.send_stream_message(user1, stream.name)
|
2021-07-25 16:31:12 +02:00
|
|
|
do_mark_stream_messages_as_read(user2, assert_is_not_none(stream.recipient_id))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
3,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
2,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
message = self.send_stream_message(user2, stream.name)
|
2022-03-10 14:30:45 +01:00
|
|
|
do_update_message_flags(user1, "add", "read", [message])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
4,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
3,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-06-08 06:01:49 +02:00
|
|
|
|
2017-03-12 08:55:55 +01:00
|
|
|
class TestDeleteStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_all_analytics_tables(self) -> None:
|
2017-03-12 08:55:55 +01:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
count_args = {"property": "test", "end_time": self.TIME_ZERO, "value": 10}
|
2017-03-12 08:55:55 +01:00
|
|
|
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
2021-02-12 08:20:45 +01:00
|
|
|
FillState.objects.create(property="test", end_time=self.TIME_ZERO, state=FillState.DONE)
|
2017-03-12 08:55:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
analytics = apps.get_app_config("analytics")
|
2023-09-12 23:19:57 +02:00
|
|
|
for table in analytics.models.values():
|
2023-09-05 20:25:23 +02:00
|
|
|
self.assertTrue(table._default_manager.exists())
|
2017-03-12 08:55:55 +01:00
|
|
|
|
|
|
|
do_drop_all_analytics_tables()
|
2023-09-12 23:19:57 +02:00
|
|
|
for table in analytics.models.values():
|
2023-09-05 20:25:23 +02:00
|
|
|
self.assertFalse(table._default_manager.exists())
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_single_stat(self) -> None:
|
2017-10-05 00:55:43 +02:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
count_args_to_delete = {"property": "to_delete", "end_time": self.TIME_ZERO, "value": 10}
|
|
|
|
count_args_to_save = {"property": "to_save", "end_time": self.TIME_ZERO, "value": 10}
|
2017-10-05 00:55:43 +02:00
|
|
|
|
|
|
|
for count_args in [count_args_to_delete, count_args_to_save]:
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
2021-02-12 08:19:30 +01:00
|
|
|
FillState.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
property="to_delete", end_time=self.TIME_ZERO, state=FillState.DONE
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
FillState.objects.create(property="to_save", end_time=self.TIME_ZERO, state=FillState.DONE)
|
2017-10-05 00:55:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
analytics = apps.get_app_config("analytics")
|
2023-09-12 23:19:57 +02:00
|
|
|
for table in analytics.models.values():
|
2023-09-05 20:25:23 +02:00
|
|
|
self.assertTrue(table._default_manager.exists())
|
2017-10-05 00:55:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
do_drop_single_stat("to_delete")
|
2023-09-12 23:19:57 +02:00
|
|
|
for table in analytics.models.values():
|
2023-09-05 20:25:23 +02:00
|
|
|
self.assertFalse(table._default_manager.filter(property="to_delete").exists())
|
|
|
|
self.assertTrue(table._default_manager.filter(property="to_save").exists())
|
2017-10-05 00:55:43 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-01 03:26:35 +02:00
|
|
|
class TestActiveUsersAudit(AnalyticsTestCase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2024-06-03 16:19:53 +02:00
|
|
|
self.user = self.create_user(skip_auditlog=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
2017-04-01 03:26:35 +02:00
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_event(
|
2024-07-12 02:30:23 +02:00
|
|
|
self, event_type: int, days_offset: float, user: UserProfile | None = None
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
hours_offset = int(24 * days_offset)
|
2017-04-01 03:26:35 +02:00
|
|
|
if user is None:
|
|
|
|
user = self.user
|
|
|
|
RealmAuditLog.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=user.realm,
|
|
|
|
modified_user=user,
|
|
|
|
event_type=event_type,
|
|
|
|
event_time=self.TIME_ZERO - hours_offset * self.HOUR,
|
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_in_future(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_reactivated_in_future(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 0)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, [], [])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_same_day(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, [], [])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_unactive_then_activated_same_day(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Arguably these next two tests are duplicates of the _in_future tests, but are
|
|
|
|
# a guard against future refactorings where they may no longer be duplicates
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_with_day_gap(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 2)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
2024-06-03 17:48:55 +02:00
|
|
|
RealmCount, ["subgroup", "end_time"], [["false", self.TIME_ZERO - self.DAY]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_then_reactivated_with_day_gap(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 2)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_event_types(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 4)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 3)
|
2018-07-09 14:05:19 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_ACTIVATED, 2)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
for i in range(4):
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO - i * self.DAY)
|
|
|
|
self.assertTableState(
|
2024-06-03 17:48:55 +02:00
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["subgroup", "end_time"],
|
|
|
|
[["false", self.TIME_ZERO - i * self.DAY] for i in [3, 1, 0]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Also tests that aggregation to RealmCount and InstallationCount is
|
|
|
|
# being done, and that we're storing the user correctly in UserCount
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_bots(self) -> None:
|
2024-06-03 16:19:53 +02:00
|
|
|
user1 = self.create_user(skip_auditlog=True)
|
|
|
|
user2 = self.create_user(skip_auditlog=True)
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="moo", name="moo")
|
2024-06-03 16:19:53 +02:00
|
|
|
user3 = self.create_user(skip_auditlog=True, realm=second_realm)
|
|
|
|
user4 = self.create_user(skip_auditlog=True, realm=second_realm, is_bot=True)
|
2017-04-01 03:26:35 +02:00
|
|
|
for user in [user1, user2, user3, user4]:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "false", self.default_realm],
|
|
|
|
[1, "false", second_realm],
|
|
|
|
[1, "true", second_realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[3, "false"], [1, "true"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
|
|
|
# Not that interesting a test if you look at the SQL query at hand, but
|
|
|
|
# almost all other CountStats have a start_date, so guarding against a
|
|
|
|
# refactoring that adds that in.
|
|
|
|
# Also tests the slightly more end-to-end process_count_stat rather than
|
|
|
|
# do_fill_count_stat_at_hour. E.g. if one changes self.stat.frequency to
|
|
|
|
# CountStat.HOUR from CountStat.DAY, this will fail, while many of the
|
|
|
|
# tests above will not.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_update_from_two_days_ago(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 2)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
2024-06-03 17:48:55 +02:00
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["subgroup", "end_time"],
|
|
|
|
[["false", self.TIME_ZERO], ["false", self.TIME_ZERO - self.DAY]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# User with no relevant activity could happen e.g. for a system bot that
|
|
|
|
# doesn't go through do_create_user. Mainly just want to make sure that
|
|
|
|
# that situation doesn't throw an error.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_empty_realm_or_user_with_no_relevant_activity(self) -> None:
|
2019-09-26 03:20:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 1)
|
2024-06-03 16:19:53 +02:00
|
|
|
self.create_user(skip_auditlog=True) # also test a user with no RealmAuditLog entries
|
2021-03-08 13:22:43 +01:00
|
|
|
do_create_realm(string_id="moo", name="moo")
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, [], [])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_max_audit_entry_is_unrelated(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Simultaneous related audit entries should not be allowed, and so not testing for that.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_unrelated_audit_entry(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2019-09-26 03:20:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_max_audit_entries_of_different_users(self) -> None:
|
2024-06-03 16:19:53 +02:00
|
|
|
user1 = self.create_user(skip_auditlog=True)
|
|
|
|
user2 = self.create_user(skip_auditlog=True)
|
|
|
|
user3 = self.create_user(skip_auditlog=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 0.5, user=user1)
|
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 0.5, user=user2)
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user3)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0.5, user=user3)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTableState(RealmCount, ["value", "subgroup"], [[2, "false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end_with_actions_dot_py(self) -> None:
|
2024-06-03 17:48:55 +02:00
|
|
|
do_create_user("email1", "password", self.default_realm, "full_name", acting_user=None)
|
2021-02-06 14:27:06 +01:00
|
|
|
user2 = do_create_user(
|
|
|
|
"email2", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user3 = do_create_user(
|
|
|
|
"email3", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user4 = do_create_user(
|
|
|
|
"email4", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user2, acting_user=None)
|
2021-07-09 02:27:06 +02:00
|
|
|
do_activate_mirror_dummy_user(user3, acting_user=None)
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user4, acting_user=None)
|
2017-04-15 04:03:56 +02:00
|
|
|
end_time = floor_to_day(timezone_now()) + self.DAY
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, end_time)
|
2024-06-03 17:48:55 +02:00
|
|
|
self.assertTrue(
|
|
|
|
RealmCount.objects.filter(
|
|
|
|
realm=self.default_realm,
|
|
|
|
property=self.current_property,
|
|
|
|
subgroup="false",
|
|
|
|
end_time=end_time,
|
|
|
|
value=3,
|
|
|
|
).exists()
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-05 07:51:55 +02:00
|
|
|
class TestRealmActiveHumans(AnalyticsTestCase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.stat = COUNT_STATS["realm_active_humans::day"]
|
2017-04-05 07:51:55 +02:00
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def mark_15day_active(self, user: UserProfile, end_time: datetime | None = None) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
if end_time is None:
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
UserCount.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
user=user, realm=user.realm, property="15day_actives::day", end_time=end_time, value=1
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2024-06-02 17:16:44 +02:00
|
|
|
def test_basic_logic(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user = self.create_user()
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
2024-06-02 17:16:44 +02:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount, ["value", "end_time"], [[1, self.TIME_ZERO], [1, self.TIME_ZERO + self.DAY]]
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bots_not_counted(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
self.mark_15day_active(bot)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_times(self) -> None:
|
2024-06-02 17:16:44 +02:00
|
|
|
user1 = self.create_user(date_joined=self.TIME_ZERO - 2 * self.DAY)
|
|
|
|
user2 = self.create_user(date_joined=self.TIME_ZERO - 2 * self.DAY)
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="second", name="second")
|
2024-06-02 17:16:44 +02:00
|
|
|
user3 = self.create_user(date_joined=self.TIME_ZERO - 2 * self.DAY, realm=second_realm)
|
|
|
|
user4 = self.create_user(date_joined=self.TIME_ZERO - 2 * self.DAY, realm=second_realm)
|
|
|
|
user5 = self.create_user(date_joined=self.TIME_ZERO - 2 * self.DAY, realm=second_realm)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
|
|
|
for user in [user1, user3, user4]:
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO - self.DAY)
|
2024-06-02 17:16:44 +02:00
|
|
|
for user in [user1, user2, user3, user4, user5]:
|
|
|
|
self.mark_15day_active(user)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY],
|
2024-06-02 17:16:44 +02:00
|
|
|
[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
|
|
|
# Check that adding spurious entries doesn't make a difference
|
|
|
|
self.create_user()
|
2021-03-08 13:22:43 +01:00
|
|
|
third_realm = do_create_realm(string_id="third", name="third")
|
2017-04-05 07:51:55 +02:00
|
|
|
self.create_user(realm=third_realm)
|
|
|
|
|
|
|
|
RealmCount.objects.all().delete()
|
2020-02-29 22:48:15 +01:00
|
|
|
InstallationCount.objects.all().delete()
|
2017-04-05 07:51:55 +02:00
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY],
|
2024-06-02 17:16:44 +02:00
|
|
|
[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end(self) -> None:
|
2021-02-06 14:27:06 +01:00
|
|
|
user1 = do_create_user(
|
|
|
|
"email1", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user2 = do_create_user(
|
|
|
|
"email2", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
do_create_user("email3", "password", self.default_realm, "full_name", acting_user=None)
|
2017-04-15 04:03:56 +02:00
|
|
|
time_zero = floor_to_day(timezone_now()) + self.DAY
|
2017-04-05 07:51:55 +02:00
|
|
|
update_user_activity_interval(user1, time_zero)
|
|
|
|
update_user_activity_interval(user2, time_zero)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user2, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
for property in [
|
2021-02-12 08:20:45 +01:00
|
|
|
"active_users_audit:is_bot:day",
|
|
|
|
"15day_actives::day",
|
|
|
|
"realm_active_humans::day",
|
2021-02-12 08:19:30 +01:00
|
|
|
]:
|
2017-04-05 07:51:55 +02:00
|
|
|
FillState.objects.create(property=property, state=FillState.DONE, end_time=time_zero)
|
2021-02-12 08:19:30 +01:00
|
|
|
process_count_stat(COUNT_STATS[property], time_zero + self.DAY)
|
|
|
|
self.assertEqual(
|
|
|
|
RealmCount.objects.filter(
|
2021-02-12 08:20:45 +01:00
|
|
|
property="realm_active_humans::day", end_time=time_zero + self.DAY, value=1
|
2021-02-12 08:19:30 +01:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(RealmCount.objects.filter(property="realm_active_humans::day").count(), 1)
|
2023-10-23 22:29:42 +02:00
|
|
|
|
|
|
|
|
|
|
|
class GetLastIdFromServerTest(ZulipTestCase):
|
|
|
|
def test_get_last_id_from_server_ignores_null(self) -> None:
|
|
|
|
"""
|
|
|
|
Verifies that get_last_id_from_server ignores null remote_ids, since this goes
|
|
|
|
against the default Postgres ordering behavior, which treats nulls as the largest value.
|
|
|
|
"""
|
|
|
|
self.server_uuid = "6cde5f7a-1f7e-4978-9716-49f69ebfc9fe"
|
|
|
|
self.server = RemoteZulipServer.objects.create(
|
|
|
|
uuid=self.server_uuid,
|
|
|
|
api_key="magic_secret_api_key",
|
|
|
|
hostname="demo.example.com",
|
|
|
|
last_updated=timezone_now(),
|
|
|
|
)
|
|
|
|
first = RemoteInstallationCount.objects.create(
|
|
|
|
end_time=timezone_now(), server=self.server, property="test", value=1, remote_id=1
|
|
|
|
)
|
|
|
|
RemoteInstallationCount.objects.create(
|
|
|
|
end_time=timezone_now(), server=self.server, property="test2", value=1, remote_id=None
|
|
|
|
)
|
|
|
|
result = get_last_id_from_server(self.server, RemoteInstallationCount)
|
|
|
|
self.assertEqual(result, first.remote_id)
|