2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2019-02-02 23:53:19 +01:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple, Type
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest import mock
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2017-03-12 08:55:55 +01:00
|
|
|
from django.apps import apps
|
2016-10-08 02:27:50 +02:00
|
|
|
from django.db import models
|
2017-02-15 17:26:22 +01:00
|
|
|
from django.db.models import Sum
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-06-09 10:46:28 +02:00
|
|
|
from psycopg2.sql import SQL, Literal
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from analytics.lib.counts import (
|
|
|
|
COUNT_STATS,
|
|
|
|
CountStat,
|
|
|
|
DependentCountStat,
|
|
|
|
LoggingCountStat,
|
|
|
|
do_aggregate_to_summary_table,
|
|
|
|
do_drop_all_analytics_tables,
|
|
|
|
do_drop_single_stat,
|
|
|
|
do_fill_count_stat_at_hour,
|
|
|
|
do_increment_logging_stat,
|
|
|
|
get_count_stats,
|
|
|
|
process_count_stat,
|
|
|
|
sql_data_collector,
|
|
|
|
)
|
|
|
|
from analytics.models import (
|
|
|
|
BaseCount,
|
|
|
|
FillState,
|
|
|
|
InstallationCount,
|
|
|
|
RealmCount,
|
|
|
|
StreamCount,
|
|
|
|
UserCount,
|
|
|
|
installation_epoch,
|
|
|
|
)
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_activate_user,
|
2021-03-08 13:22:43 +01:00
|
|
|
do_create_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_create_user,
|
|
|
|
do_deactivate_user,
|
|
|
|
do_invite_users,
|
2020-06-08 06:01:49 +02:00
|
|
|
do_mark_all_as_read,
|
|
|
|
do_mark_stream_messages_as_read,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_reactivate_user,
|
|
|
|
do_resend_user_invite_email,
|
|
|
|
do_revoke_user_invite,
|
2020-06-08 06:01:49 +02:00
|
|
|
do_update_message_flags,
|
2020-06-11 00:54:34 +02:00
|
|
|
update_user_activity_interval,
|
|
|
|
)
|
2019-09-19 23:30:43 +02:00
|
|
|
from zerver.lib.create_user import create_user
|
2021-06-07 18:11:26 +02:00
|
|
|
from zerver.lib.exceptions import InvitationError
|
2020-06-11 11:18:11 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2017-11-16 00:55:49 +01:00
|
|
|
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day
|
2018-11-11 22:13:27 +01:00
|
|
|
from zerver.lib.topic import DB_TOPIC_NAME
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
Huddle,
|
|
|
|
Message,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
UserActivityInterval,
|
|
|
|
UserProfile,
|
|
|
|
get_client,
|
|
|
|
get_user,
|
|
|
|
)
|
|
|
|
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2020-06-11 11:18:11 +02:00
|
|
|
class AnalyticsTestCase(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
MINUTE = timedelta(seconds=60)
|
2016-10-07 01:29:57 +02:00
|
|
|
HOUR = MINUTE * 60
|
|
|
|
DAY = HOUR * 24
|
2020-06-05 06:55:20 +02:00
|
|
|
TIME_ZERO = datetime(1988, 3, 14, tzinfo=timezone.utc)
|
2016-10-07 01:29:57 +02:00
|
|
|
TIME_LAST_HOUR = TIME_ZERO - HOUR
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-03-08 13:22:43 +01:00
|
|
|
self.default_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="realmtest", name="Realm Test", date_created=self.TIME_ZERO - 2 * self.DAY
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2017-01-13 08:12:39 +01:00
|
|
|
# used to generate unique names in self.create_*
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter = 100
|
2017-01-13 08:12:39 +01:00
|
|
|
# used as defaults in self.assertCountEquals
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.current_property: Optional[str] = None
|
2016-10-07 02:47:05 +02:00
|
|
|
|
|
|
|
# Lightweight creation of users, streams, and messages
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_user(self, **kwargs: Any) -> UserProfile:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2016-10-07 02:47:05 +02:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"email": f"user{self.name_counter}@domain.tld",
|
|
|
|
"date_joined": self.TIME_LAST_HOUR,
|
|
|
|
"full_name": "full_name",
|
|
|
|
"is_active": True,
|
|
|
|
"is_bot": False,
|
|
|
|
"realm": self.default_realm,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["delivery_email"] = kwargs["email"]
|
|
|
|
with mock.patch("zerver.lib.create_user.timezone_now", return_value=kwargs["date_joined"]):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
pass_kwargs: Dict[str, Any] = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
if kwargs["is_bot"]:
|
|
|
|
pass_kwargs["bot_type"] = UserProfile.DEFAULT_BOT
|
|
|
|
pass_kwargs["bot_owner"] = None
|
2020-07-16 14:10:43 +02:00
|
|
|
return create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["email"],
|
|
|
|
"password",
|
|
|
|
kwargs["realm"],
|
|
|
|
active=kwargs["is_active"],
|
|
|
|
full_name=kwargs["full_name"],
|
2020-07-16 14:10:43 +02:00
|
|
|
role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
2021-02-12 08:19:30 +01:00
|
|
|
**pass_kwargs,
|
2020-07-16 14:10:43 +02:00
|
|
|
)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2021-02-12 08:19:30 +01:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": f"stream name {self.name_counter}",
|
|
|
|
"realm": self.default_realm,
|
|
|
|
"date_created": self.TIME_LAST_HOUR,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2017-01-12 21:04:55 +01:00
|
|
|
stream = Stream.objects.create(**kwargs)
|
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
2020-06-08 15:06:19 +02:00
|
|
|
stream.recipient = recipient
|
|
|
|
stream.save(update_fields=["recipient"])
|
2017-01-12 21:04:55 +01:00
|
|
|
return stream, recipient
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[Huddle, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2021-02-12 08:20:45 +01:00
|
|
|
defaults = {"huddle_hash": f"hash{self.name_counter}"}
|
2017-01-12 21:04:55 +01:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
|
|
|
huddle = Huddle.objects.create(**kwargs)
|
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
|
2020-03-15 19:05:27 +01:00
|
|
|
huddle.recipient = recipient
|
|
|
|
huddle.save(update_fields=["recipient"])
|
2017-01-12 21:04:55 +01:00
|
|
|
return huddle, recipient
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_message(self, sender: UserProfile, recipient: Recipient, **kwargs: Any) -> Message:
|
2016-07-29 21:52:45 +02:00
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"sender": sender,
|
|
|
|
"recipient": recipient,
|
|
|
|
DB_TOPIC_NAME: "subject",
|
|
|
|
"content": "hi",
|
|
|
|
"date_sent": self.TIME_LAST_HOUR,
|
|
|
|
"sending_client": get_client("website"),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2016-10-07 02:47:05 +02:00
|
|
|
return Message.objects.create(**kwargs)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2016-10-08 02:27:50 +02:00
|
|
|
# kwargs should only ever be a UserProfile or Stream.
|
2021-02-12 08:19:30 +01:00
|
|
|
def assertCountEquals(
|
|
|
|
self,
|
|
|
|
table: Type[BaseCount],
|
|
|
|
value: int,
|
|
|
|
property: Optional[str] = None,
|
|
|
|
subgroup: Optional[str] = None,
|
|
|
|
end_time: datetime = TIME_ZERO,
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
**kwargs: models.Model,
|
|
|
|
) -> None:
|
2017-01-13 08:12:39 +01:00
|
|
|
if property is None:
|
|
|
|
property = self.current_property
|
2017-01-16 21:32:34 +01:00
|
|
|
queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs)
|
2016-12-17 02:25:01 +01:00
|
|
|
if table is not InstallationCount:
|
|
|
|
if realm is None:
|
|
|
|
realm = self.default_realm
|
|
|
|
queryset = queryset.filter(realm=realm)
|
2016-12-17 02:26:46 +01:00
|
|
|
if subgroup is not None:
|
|
|
|
queryset = queryset.filter(subgroup=subgroup)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(queryset.values_list("value", flat=True)[0], value)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assertTableState(
|
|
|
|
self, table: Type[BaseCount], arg_keys: List[str], arg_values: List[List[object]]
|
|
|
|
) -> None:
|
2017-01-14 01:27:19 +01:00
|
|
|
"""Assert that the state of a *Count table is what it should be.
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'],
|
|
|
|
[['p1', 4], ['p2', 10, self.alt_realm]])
|
|
|
|
|
|
|
|
table -- A *Count table.
|
|
|
|
arg_keys -- List of columns of <table>.
|
|
|
|
arg_values -- List of "rows" of <table>.
|
|
|
|
Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>.
|
|
|
|
The i'th value of the entry corresponds to the i'th arg_key, so e.g.
|
|
|
|
the first arg_values entry here corresponds to a row of RealmCount
|
|
|
|
with property='p1' and subgroup=10.
|
|
|
|
Any columns not specified (in this case, every column of RealmCount
|
|
|
|
other than property and subgroup) are either set to default values,
|
|
|
|
or are ignored.
|
|
|
|
|
|
|
|
The function checks that every entry of arg_values matches exactly one
|
|
|
|
row of <table>, and that no additional rows exist. Note that this means
|
|
|
|
checking a table with duplicate rows is not supported.
|
|
|
|
"""
|
|
|
|
defaults = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"property": self.current_property,
|
|
|
|
"subgroup": None,
|
|
|
|
"end_time": self.TIME_ZERO,
|
|
|
|
"value": 1,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-01-14 01:27:19 +01:00
|
|
|
for values in arg_values:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
kwargs: Dict[str, Any] = {}
|
2017-01-14 01:27:19 +01:00
|
|
|
for i in range(len(values)):
|
|
|
|
kwargs[arg_keys[i]] = values[i]
|
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
|
|
|
if table is not InstallationCount:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "realm" not in kwargs:
|
|
|
|
if "user" in kwargs:
|
|
|
|
kwargs["realm"] = kwargs["user"].realm
|
|
|
|
elif "stream" in kwargs:
|
|
|
|
kwargs["realm"] = kwargs["stream"].realm
|
2017-01-14 01:27:19 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs["realm"] = self.default_realm
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
|
|
|
|
self.assertEqual(table.objects.count(), len(arg_values))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestProcessCountStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def make_dummy_count_stat(self, property: str) -> CountStat:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
|
|
|
property=Literal(property),
|
|
|
|
)
|
2017-04-07 02:55:29 +02:00
|
|
|
return CountStat(property, sql_data_collector(RealmCount, query, None), CountStat.HOUR)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assertFillStateEquals(
|
|
|
|
self, stat: CountStat, end_time: datetime, state: int = FillState.DONE
|
|
|
|
) -> None:
|
2017-04-07 02:55:29 +02:00
|
|
|
fill_state = FillState.objects.filter(property=stat.property).first()
|
2017-01-07 09:19:37 +01:00
|
|
|
self.assertEqual(fill_state.end_time, end_time)
|
|
|
|
self.assertEqual(fill_state.state, state)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_stat(self) -> None:
|
2016-10-12 23:40:48 +02:00
|
|
|
# process new stat
|
|
|
|
current_time = installation_epoch() + self.HOUR
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# dirty stat
|
2017-04-07 02:55:29 +02:00
|
|
|
FillState.objects.filter(property=stat.property).update(state=FillState.STARTED)
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, no update
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, with update
|
|
|
|
current_time = current_time + self.HOUR
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 2)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bad_fill_to_time(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = self.make_dummy_count_stat("test stat")
|
2017-04-28 02:22:40 +02:00
|
|
|
with self.assertRaises(ValueError):
|
2021-02-12 08:19:30 +01:00
|
|
|
process_count_stat(stat, installation_epoch() + 65 * self.MINUTE)
|
2017-10-05 02:06:43 +02:00
|
|
|
with self.assertRaises(TimezoneNotUTCException):
|
|
|
|
process_count_stat(stat, installation_epoch().replace(tzinfo=None))
|
2017-04-15 09:23:39 +02:00
|
|
|
|
2017-04-04 20:40:22 +02:00
|
|
|
# This tests the LoggingCountStat branch of the code in do_delete_counts_at_hour.
|
2017-02-15 17:26:22 +01:00
|
|
|
# It is important that do_delete_counts_at_hour not delete any of the collected
|
|
|
|
# logging data!
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_logging_stat(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_stat = LoggingCountStat("user stat", UserCount, CountStat.DAY)
|
|
|
|
stream_stat = LoggingCountStat("stream stat", StreamCount, CountStat.DAY)
|
|
|
|
realm_stat = LoggingCountStat("realm stat", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
realm = self.default_realm
|
|
|
|
UserCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
StreamCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
RealmCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=realm, property=realm_stat.property, end_time=end_time, value=5
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
# Normal run of process_count_stat
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [[user_stat.property, 5]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [[stream_stat.property, 5]])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 5], [stream_stat.property, 5], [realm_stat.property, 5]],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
# Change the logged data and mark FillState as dirty
|
|
|
|
UserCount.objects.update(value=6)
|
|
|
|
StreamCount.objects.update(value=6)
|
|
|
|
RealmCount.objects.filter(property=realm_stat.property).update(value=6)
|
|
|
|
FillState.objects.update(state=FillState.STARTED)
|
|
|
|
|
|
|
|
# Check that the change propagated (and the collected data wasn't deleted)
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [[user_stat.property, 6]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [[stream_stat.property, 6]])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[user_stat.property, 6], [stream_stat.property, 6], [realm_stat.property, 6]],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_dependent_stat(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat1 = self.make_dummy_count_stat("stat1")
|
|
|
|
stat2 = self.make_dummy_count_stat("stat2")
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
property=Literal("stat3"),
|
2020-06-09 10:46:28 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
stat3 = DependentCountStat(
|
2021-02-12 08:20:45 +01:00
|
|
|
"stat3",
|
2021-02-12 08:19:30 +01:00
|
|
|
sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.HOUR,
|
2021-02-12 08:20:45 +01:00
|
|
|
dependencies=["stat1", "stat2"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = lambda kwargs: SQL(
|
|
|
|
"""
|
2020-06-09 10:46:28 +02:00
|
|
|
INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES ({default_realm_id}, 1, {property}, %(time_end)s)
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
default_realm_id=Literal(self.default_realm.id),
|
2021-02-12 08:20:45 +01:00
|
|
|
property=Literal("stat4"),
|
2020-06-09 10:46:28 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
stat4 = DependentCountStat(
|
2021-02-12 08:20:45 +01:00
|
|
|
"stat4",
|
2021-02-12 08:19:30 +01:00
|
|
|
sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.DAY,
|
2021-02-12 08:20:45 +01:00
|
|
|
dependencies=["stat1", "stat2"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
|
|
|
dummy_count_stats = {
|
|
|
|
"stat1": stat1,
|
|
|
|
"stat2": stat2,
|
|
|
|
"stat3": stat3,
|
|
|
|
"stat4": stat4,
|
|
|
|
}
|
|
|
|
with mock.patch("analytics.lib.counts.COUNT_STATS", dummy_count_stats):
|
2021-02-12 08:19:30 +01:00
|
|
|
hour = [installation_epoch() + i * self.HOUR for i in range(5)]
|
2020-12-22 18:09:34 +01:00
|
|
|
|
|
|
|
# test when one dependency has been run, and the other hasn't
|
|
|
|
process_count_stat(stat1, hour[2])
|
|
|
|
process_count_stat(stat3, hour[1])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
|
|
|
[["stat1", hour[1]], ["stat1", hour[2]]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[0])
|
|
|
|
|
|
|
|
# test that we don't fill past the fill_to_time argument, even if
|
|
|
|
# dependencies have later last_successful_fill
|
|
|
|
process_count_stat(stat2, hour[3])
|
|
|
|
process_count_stat(stat3, hour[1])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
["stat1", hour[1]],
|
|
|
|
["stat1", hour[2]],
|
|
|
|
["stat2", hour[1]],
|
|
|
|
["stat2", hour[2]],
|
|
|
|
["stat2", hour[3]],
|
|
|
|
["stat3", hour[1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[1])
|
|
|
|
|
|
|
|
# test that we don't fill past the dependency last_successful_fill times,
|
|
|
|
# even if fill_to_time is later
|
|
|
|
process_count_stat(stat3, hour[4])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
["stat1", hour[1]],
|
|
|
|
["stat1", hour[2]],
|
|
|
|
["stat2", hour[1]],
|
|
|
|
["stat2", hour[2]],
|
|
|
|
["stat2", hour[3]],
|
|
|
|
["stat3", hour[1]],
|
|
|
|
["stat3", hour[2]],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat3, hour[2])
|
|
|
|
|
|
|
|
# test daily dependent stat with hourly dependencies
|
2021-02-12 08:19:30 +01:00
|
|
|
hour24 = installation_epoch() + 24 * self.HOUR
|
|
|
|
hour25 = installation_epoch() + 25 * self.HOUR
|
2020-12-22 18:09:34 +01:00
|
|
|
process_count_stat(stat1, hour25)
|
|
|
|
process_count_stat(stat2, hour25)
|
|
|
|
process_count_stat(stat4, hour25)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(InstallationCount.objects.filter(property="stat4").count(), 1)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertFillStateEquals(stat4, hour24)
|
2017-04-15 09:23:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestCountStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-01-13 01:55:46 +01:00
|
|
|
# This tests two things for each of the queries/CountStats: Handling
|
|
|
|
# more than 1 realm, and the time bounds (time_start and time_end in
|
|
|
|
# the queries).
|
2021-03-08 13:22:43 +01:00
|
|
|
self.second_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="second-realm",
|
|
|
|
name="Second Realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
date_created=self.TIME_ZERO - 2 * self.DAY,
|
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for minutes_ago in [0, 1, 61, 60 * 24 + 1]:
|
|
|
|
creation_time = self.TIME_ZERO - minutes_ago * self.MINUTE
|
|
|
|
user = self.create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
email=f"user-{minutes_ago}@second.analytics",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=self.second_realm,
|
|
|
|
date_joined=creation_time,
|
|
|
|
)
|
2017-01-13 01:55:46 +01:00
|
|
|
recipient = self.create_stream_with_recipient(
|
2021-02-12 08:20:45 +01:00
|
|
|
name=f"stream {minutes_ago}", realm=self.second_realm, date_created=creation_time
|
2021-02-12 08:19:30 +01:00
|
|
|
)[1]
|
2019-08-28 02:43:19 +02:00
|
|
|
self.create_message(user, recipient, date_sent=creation_time)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.hourly_user = get_user("user-1@second.analytics", self.second_realm)
|
|
|
|
self.daily_user = get_user("user-61@second.analytics", self.second_realm)
|
2017-01-13 01:55:46 +01:00
|
|
|
|
|
|
|
# This realm should not show up in the *Count tables for any of the
|
|
|
|
# messages_* CountStats
|
2021-03-08 13:22:43 +01:00
|
|
|
self.no_message_realm = do_create_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="no-message-realm",
|
|
|
|
name="No Message Realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
date_created=self.TIME_ZERO - 2 * self.DAY,
|
|
|
|
)
|
2021-03-08 13:22:43 +01:00
|
|
|
|
2017-01-13 01:55:46 +01:00
|
|
|
self.create_user(realm=self.no_message_realm)
|
|
|
|
self.create_stream_with_recipient(realm=self.no_message_realm)
|
|
|
|
# This huddle should not show up anywhere
|
|
|
|
self.create_huddle_with_recipient()
|
2016-12-17 03:26:39 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_active_users_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["active_users:is_bot:day"]
|
2017-01-13 08:12:39 +01:00
|
|
|
self.current_property = stat.property
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2016-12-17 03:26:39 +01:00
|
|
|
# To be included
|
2017-01-13 07:23:47 +01:00
|
|
|
self.create_user(is_bot=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_user(is_bot=True, date_joined=self.TIME_ZERO - 25 * self.HOUR)
|
2017-01-13 07:23:47 +01:00
|
|
|
self.create_user(is_bot=False)
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2016-12-17 03:26:39 +01:00
|
|
|
# To be excluded
|
2017-01-13 07:23:47 +01:00
|
|
|
self.create_user(is_active=False)
|
2016-12-17 03:26:39 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "true"],
|
|
|
|
[1, "false"],
|
|
|
|
[3, "false", self.second_realm],
|
|
|
|
[1, "false", self.no_message_realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[2, "true"], [5, "false"]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-11 02:37:43 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_active_users_by_is_bot_for_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["active_users:is_bot:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# To be included
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_user(is_bot=True, date_joined=self.TIME_ZERO - 25 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
self.create_user(is_bot=False)
|
|
|
|
|
|
|
|
# To be excluded
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="test@second.analytics",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=self.second_realm,
|
|
|
|
date_joined=self.TIME_ZERO - 2 * self.DAY,
|
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value", "subgroup"], [[1, "true"], [1, "false"]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_stream)
|
|
|
|
self.create_message(bot, recipient_huddle)
|
|
|
|
self.create_message(human1, recipient_human1)
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "false", human1],
|
|
|
|
[1, "false", human2],
|
|
|
|
[3, "true", bot],
|
|
|
|
[1, "false", self.hourly_user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[2, "false"], [3, "true"], [1, "false", self.second_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[3, "false"], [3, "true"]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_is_bot_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:is_bot:hour"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_stream)
|
|
|
|
self.create_message(bot, recipient_huddle)
|
|
|
|
self.create_message(human1, recipient_human1)
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_human1)
|
|
|
|
self.create_message(self.hourly_user, recipient_stream)
|
|
|
|
self.create_message(self.hourly_user, recipient_huddle)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
|
|
|
[[1, "false", human1], [1, "false", human2], [3, "true", bot]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[2, "false", self.default_realm], [3, "true", self.default_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_message_type(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Nothing currently in this stat that is bot related, but so many of
|
|
|
|
# the rest of our stats make the human/bot distinction that one can
|
|
|
|
# imagine a later refactoring that will intentionally or
|
|
|
|
# unintentionally change this. So make one of our users a bot.
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user()
|
|
|
|
|
|
|
|
# private streams
|
|
|
|
recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
self.create_message(user1, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream2)
|
|
|
|
|
|
|
|
# public streams
|
|
|
|
recipient_stream3 = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_stream4 = self.create_stream_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_stream3)
|
|
|
|
self.create_message(user1, recipient_stream4)
|
|
|
|
self.create_message(user2, recipient_stream3)
|
|
|
|
|
|
|
|
# huddles
|
|
|
|
recipient_huddle1 = self.create_huddle_with_recipient()[1]
|
|
|
|
recipient_huddle2 = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_huddle1)
|
|
|
|
self.create_message(user2, recipient_huddle2)
|
|
|
|
|
|
|
|
# private messages
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_user1 = Recipient.objects.get(type_id=user1.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user3 = Recipient.objects.get(type_id=user3.id, type=Recipient.PERSONAL)
|
2017-01-11 02:37:43 +01:00
|
|
|
self.create_message(user1, recipient_user2)
|
|
|
|
self.create_message(user2, recipient_user1)
|
|
|
|
self.create_message(user3, recipient_user3)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_stream", user1],
|
|
|
|
[2, "private_stream", user2],
|
|
|
|
[2, "public_stream", user1],
|
|
|
|
[1, "public_stream", user2],
|
|
|
|
[1, "private_message", user1],
|
|
|
|
[1, "private_message", user2],
|
|
|
|
[1, "private_message", user3],
|
|
|
|
[1, "huddle_message", user1],
|
|
|
|
[1, "huddle_message", user2],
|
|
|
|
[1, "public_stream", self.hourly_user],
|
|
|
|
[1, "public_stream", self.daily_user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "private_stream"],
|
|
|
|
[3, "public_stream"],
|
|
|
|
[3, "private_message"],
|
|
|
|
[2, "huddle_message"],
|
|
|
|
[2, "public_stream", self.second_realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "private_stream"],
|
|
|
|
[5, "public_stream"],
|
|
|
|
[3, "private_message"],
|
|
|
|
[2, "huddle_message"],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-11 02:37:43 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_message_type_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user = self.create_user()
|
|
|
|
user_recipient = Recipient.objects.get(type_id=user.id, type=Recipient.PERSONAL)
|
|
|
|
private_stream_recipient = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
stream_recipient = self.create_stream_with_recipient()[1]
|
|
|
|
huddle_recipient = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(user, user_recipient)
|
|
|
|
self.create_message(user, private_stream_recipient)
|
|
|
|
self.create_message(user, stream_recipient)
|
|
|
|
self.create_message(user, huddle_recipient)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, user_recipient)
|
|
|
|
self.create_message(self.hourly_user, private_stream_recipient)
|
|
|
|
self.create_message(self.hourly_user, stream_recipient)
|
|
|
|
self.create_message(self.hourly_user, huddle_recipient)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_message", user],
|
|
|
|
[1, "private_stream", user],
|
|
|
|
[1, "huddle_message", user],
|
|
|
|
[1, "public_stream", user],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "private_message"],
|
|
|
|
[1, "private_stream"],
|
|
|
|
[1, "public_stream"],
|
|
|
|
[1, "huddle_message"],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_recipients_with_same_id(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:message_type:day"]
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user = self.create_user(id=1000)
|
2019-09-19 23:30:43 +02:00
|
|
|
user_recipient = Recipient.objects.get(type_id=user.id, type=Recipient.PERSONAL)
|
2017-01-11 02:37:43 +01:00
|
|
|
stream_recipient = self.create_stream_with_recipient(id=1000)[1]
|
|
|
|
huddle_recipient = self.create_huddle_with_recipient(id=1000)[1]
|
|
|
|
|
|
|
|
self.create_message(user, user_recipient)
|
|
|
|
self.create_message(user, stream_recipient)
|
|
|
|
self.create_message(user, huddle_recipient)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertCountEquals(UserCount, 1, subgroup="private_message")
|
|
|
|
self.assertCountEquals(UserCount, 1, subgroup="huddle_message")
|
|
|
|
self.assertCountEquals(UserCount, 1, subgroup="public_stream")
|
2017-01-13 06:03:20 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_client(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:client:day"]
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
client2 = Client.objects.create(name="client2")
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
self.create_message(user1, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user1, recipient_stream)
|
|
|
|
self.create_message(user1, recipient_huddle)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
client2_id = str(client2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
website_client_id = str(get_client("website").id) # default for self.create_message
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[2, website_client_id, user1],
|
|
|
|
[1, client2_id, user1],
|
|
|
|
[2, client2_id, user2],
|
|
|
|
[1, website_client_id, self.hourly_user],
|
|
|
|
[1, website_client_id, self.daily_user],
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[2, website_client_id], [3, client2_id], [2, website_client_id, self.second_realm]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
InstallationCount, ["value", "subgroup"], [[4, website_client_id], [3, client2_id]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_by_client_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_sent:client:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
recipient_user2 = Recipient.objects.get(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
client2 = Client.objects.create(name="client2")
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# TO be included
|
|
|
|
self.create_message(user1, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(self.hourly_user, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(self.hourly_user, recipient_user2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
|
|
|
client2_id = str(client2.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
website_client_id = str(get_client("website").id) # default for self.create_message
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, client2_id, user1], [1, client2_id, user2], [1, website_client_id, user2]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "subgroup"], [[1, website_client_id], [2, client2_id]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_stream_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
2017-01-13 06:03:20 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2019-09-19 23:30:43 +02:00
|
|
|
recipient_human1 = Recipient.objects.get(type_id=human1.id, type=Recipient.PERSONAL)
|
2017-01-13 06:03:20 +01:00
|
|
|
|
|
|
|
stream1, recipient_stream1 = self.create_stream_with_recipient()
|
|
|
|
stream2, recipient_stream2 = self.create_stream_with_recipient()
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(human1, recipient_stream1)
|
|
|
|
self.create_message(human2, recipient_stream1)
|
|
|
|
self.create_message(human1, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(human1, recipient_huddle)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
StreamCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "stream"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "false", stream1],
|
|
|
|
[1, "false", stream2],
|
|
|
|
[2, "true", stream2],
|
2021-02-12 08:19:30 +01:00
|
|
|
# "hourly" and "daily" stream, from TestCountStats.setUp
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "false", Stream.objects.get(name="stream 1")],
|
|
|
|
[1, "false", Stream.objects.get(name="stream 61")],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
|
|
|
[[3, "false"], [2, "true"], [2, "false", self.second_realm]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[5, "false"], [2, "true"]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_messages_sent_to_stream_by_is_bot_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["messages_in_stream:is_bot:day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
human1 = self.create_user()
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = {"realm": self.second_realm}
|
2020-01-16 02:43:51 +01:00
|
|
|
stream1, recipient_stream1 = self.create_stream_with_recipient()
|
|
|
|
stream2, recipient_stream2 = self.create_stream_with_recipient(**realm)
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(human1, recipient_stream1)
|
|
|
|
self.create_message(bot, recipient_stream1)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(self.hourly_user, recipient_stream2)
|
|
|
|
self.create_message(self.daily_user, recipient_stream2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
StreamCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "stream"],
|
|
|
|
[[1, "false", stream1], [1, "true", stream1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "subgroup", "realm"], [[1, "false"], [1, "true"]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def create_interval(
|
|
|
|
self, user: UserProfile, start_offset: timedelta, end_offset: timedelta
|
|
|
|
) -> None:
|
2017-03-16 09:23:44 +01:00
|
|
|
UserActivityInterval.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile=user, start=self.TIME_ZERO - start_offset, end=self.TIME_ZERO - end_offset
|
|
|
|
)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
2018-03-18 22:02:46 +01:00
|
|
|
def test_1day_actives(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["1day_actives::day"]
|
2018-03-18 22:02:46 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_1day = 1 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _1day + self.DAY, _1day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _1day + self.DAY, _1day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 2 * self.DAY, 1 * self.DAY)
|
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 1.5 * self.DAY, 0.5 * self.DAY)
|
2018-03-18 22:02:46 +01:00
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 2 * self.DAY, -2 * self.DAY)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user7, 20 * self.MINUTE, 19 * self.MINUTE)
|
2018-03-18 22:02:46 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "realm"], [[5, self.default_realm], [1, self.second_realm]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[6]])
|
2018-03-18 22:02:46 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_1day_actives_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["1day_actives::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_1day = 1 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2020-01-16 02:43:51 +01:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
|
|
|
|
# To be included
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
self.create_interval(user2, _1day + self.DAY, _1day)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[1, user2], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[2, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_15day_actives(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["15day_actives::day"]
|
2017-03-16 07:58:23 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_15day = 15 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _15day + self.DAY, _15day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _15day + self.DAY, _15day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.DAY, 19 * self.DAY)
|
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 20 * self.DAY, 10 * self.DAY)
|
2017-03-16 07:58:23 +01:00
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 20 * self.DAY, -2 * self.DAY)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user7, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "user"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]],
|
|
|
|
)
|
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount, ["value", "realm"], [[5, self.default_realm], [1, self.second_realm]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[6]])
|
2017-03-16 07:58:23 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_15day_actives_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["15day_actives::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
_15day = 15 * self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_interval(user1, _15day + self.DAY, _15day)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user2, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# To be excluded
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.HOUR, 19 * self.HOUR)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[1, user1], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[2, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_minutes_active(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["minutes_active::day"]
|
2017-03-16 09:23:44 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also testing for intervals
|
|
|
|
# starting and ending on boundary
|
|
|
|
user1 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 25 * self.HOUR, self.DAY)
|
2017-03-16 09:23:44 +01:00
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user2 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user2, 20 * self.DAY, 19 * self.DAY)
|
|
|
|
self.create_interval(user2, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user2, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 25 * self.HOUR, 22 * self.HOUR)
|
2017-03-16 09:23:44 +01:00
|
|
|
self.create_interval(user3, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user4 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user4, 2 * self.DAY, -2 * self.DAY)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
# Less than 60 seconds, should not appear
|
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, timedelta(seconds=30))
|
|
|
|
self.create_interval(user5, timedelta(seconds=20), timedelta(seconds=10))
|
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user6 = self.create_user(realm=self.second_realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user6, 20 * self.MINUTE, 19 * self.MINUTE)
|
2017-03-16 09:23:44 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount, ["value", "user"], [[61, user2], [121, user3], [24 * 60, user4], [1, user6]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[[61 + 121 + 24 * 60, self.default_realm], [1, self.second_realm]],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [[61 + 121 + 24 * 60 + 1]])
|
2017-03-16 09:23:44 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-01-16 02:43:51 +01:00
|
|
|
def test_minutes_active_realm_constraint(self) -> None:
|
|
|
|
# For single Realm
|
|
|
|
|
|
|
|
COUNT_STATS = get_count_stats(self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = COUNT_STATS["minutes_active::day"]
|
2020-01-16 02:43:51 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also testing for intervals
|
|
|
|
# starting and ending on boundary
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user(realm=self.second_realm)
|
|
|
|
|
|
|
|
# To be included
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user1, 20 * self.HOUR, 19 * self.HOUR)
|
|
|
|
self.create_interval(user2, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
# To be excluded
|
2021-02-12 08:19:30 +01:00
|
|
|
self.create_interval(user3, 20 * self.MINUTE, 19 * self.MINUTE)
|
2020-01-16 02:43:51 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO, self.default_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["value", "user"], [[60, user1], [1, user2]])
|
|
|
|
self.assertTableState(RealmCount, ["value", "realm"], [[60 + 1, self.default_realm]])
|
2020-01-16 02:43:51 +01:00
|
|
|
# No aggregation to InstallationCount with realm constraint
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value"], [])
|
2020-01-16 02:43:51 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2020-12-22 18:09:34 +01:00
|
|
|
def test_last_successful_fill(self) -> None:
|
|
|
|
self.assertIsNone(COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill())
|
|
|
|
|
|
|
|
a_time = datetime(2016, 3, 14, 19, tzinfo=timezone.utc)
|
|
|
|
one_hour_before = datetime(2016, 3, 14, 18, tzinfo=timezone.utc)
|
2020-12-22 18:33:42 +01:00
|
|
|
one_day_before = datetime(2016, 3, 13, 19, tzinfo=timezone.utc)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
fillstate = FillState.objects.create(
|
|
|
|
property=COUNT_STATS["messages_sent:is_bot:hour"].property,
|
|
|
|
end_time=a_time,
|
|
|
|
state=FillState.DONE,
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
self.assertEqual(COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill(), a_time)
|
2020-12-22 18:33:42 +01:00
|
|
|
|
2020-12-22 18:09:34 +01:00
|
|
|
fillstate.state = FillState.STARTED
|
2020-12-22 18:33:42 +01:00
|
|
|
fillstate.save(update_fields=["state"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
COUNT_STATS["messages_sent:is_bot:hour"].last_successful_fill(), one_hour_before
|
|
|
|
)
|
2020-12-22 18:09:34 +01:00
|
|
|
|
2020-12-22 18:33:42 +01:00
|
|
|
fillstate.property = COUNT_STATS["7day_actives::day"].property
|
|
|
|
fillstate.save(update_fields=["property"])
|
|
|
|
self.assertEqual(COUNT_STATS["7day_actives::day"].last_successful_fill(), one_day_before)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-17 22:38:03 +01:00
|
|
|
class TestDoAggregateToSummaryTable(AnalyticsTestCase):
|
|
|
|
# do_aggregate_to_summary_table is mostly tested by the end to end
|
|
|
|
# nature of the tests in TestCountStats. But want to highlight one
|
|
|
|
# feature important for keeping the size of the analytics tables small,
|
|
|
|
# which is that if there is no relevant data in the table being
|
|
|
|
# aggregated, the aggregation table doesn't get a row with value 0.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_no_aggregated_zeros(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test stat", UserCount, CountStat.HOUR)
|
2017-02-17 22:38:03 +01:00
|
|
|
do_aggregate_to_summary_table(stat, self.TIME_ZERO)
|
|
|
|
self.assertFalse(RealmCount.objects.exists())
|
|
|
|
self.assertFalse(InstallationCount.objects.exists())
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
class TestDoIncrementLoggingStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_table_and_id_args(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
# For realms, streams, and users, tests that the new rows are going to
|
|
|
|
# the appropriate *Count table, and that using a different zerver_object
|
|
|
|
# results in a new row being created
|
2021-02-12 08:20:45 +01:00
|
|
|
self.current_property = "test"
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="moo", name="moo")
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["realm"], [[self.default_realm], [second_realm]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", UserCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["user"], [[user1], [user2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
stream1 = self.create_stream_with_recipient()[0]
|
|
|
|
stream2 = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", StreamCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(StreamCount, ["stream"], [[stream1], [stream2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_frequency(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
times = [self.TIME_ZERO - self.MINUTE * i for i in [0, 1, 61, 24 * 60 + 1]]
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("day test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("hour test", RealmCount, CountStat.HOUR)
|
2017-02-15 17:26:22 +01:00
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "property", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[3, "day test", self.TIME_ZERO],
|
|
|
|
[1, "day test", self.TIME_ZERO - self.DAY],
|
|
|
|
[2, "hour test", self.TIME_ZERO],
|
|
|
|
[1, "hour test", self.TIME_LAST_HOUR],
|
|
|
|
[1, "hour test", self.TIME_ZERO - self.DAY],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_get_or_create(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.HOUR)
|
2017-02-15 17:26:22 +01:00
|
|
|
# All these should trigger the create part of get_or_create.
|
|
|
|
# property is tested in test_frequency, and id_args are tested in test_id_args,
|
|
|
|
# so this only tests a new subgroup and end_time
|
2021-02-12 08:20:45 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup2", self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_LAST_HOUR)
|
|
|
|
self.current_property = "test"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[1, "subgroup1", self.TIME_ZERO],
|
|
|
|
[1, "subgroup2", self.TIME_ZERO],
|
|
|
|
[1, "subgroup1", self.TIME_LAST_HOUR],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
# This should trigger the get part of get_or_create
|
2021-02-12 08:20:45 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, "subgroup1", self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "subgroup1", self.TIME_ZERO],
|
|
|
|
[1, "subgroup2", self.TIME_ZERO],
|
|
|
|
[1, "subgroup1", self.TIME_LAST_HOUR],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_increment(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("test", RealmCount, CountStat.DAY)
|
|
|
|
self.current_property = "test"
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[-1]])
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[2]])
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value"], [[3]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
class TestLoggingCountStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_aggregation(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("realm test", RealmCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
user = self.create_user()
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("user test", UserCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
stat = LoggingCountStat("stream test", StreamCount, CountStat.DAY)
|
2017-02-15 17:26:22 +01:00
|
|
|
do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
InstallationCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
|
|
|
[["realm test", 1], ["user test", 1], ["stream test", 1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["property", "value"],
|
|
|
|
[["realm test", 1], ["user test", 1], ["stream test", 1]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["property", "value"], [["user test", 1]])
|
|
|
|
self.assertTableState(StreamCount, ["property", "value"], [["stream test", 1]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_active_users_log_by_is_bot(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
property = "active_users_log:is_bot:day"
|
2021-02-06 14:27:06 +01:00
|
|
|
user = do_create_user(
|
|
|
|
"email", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
0,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-02-06 17:02:03 +01:00
|
|
|
do_activate_user(user, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
0,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=False).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-03-12 08:55:55 +01:00
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
def test_invites_sent(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
property = "invites_sent::day"
|
2017-12-05 20:21:25 +01:00
|
|
|
|
|
|
|
def assertInviteCountEquals(count: int) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
count,
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmCount.objects.filter(property=property, subgroup=None).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-12-05 20:21:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.create_user(email="first@domain.tld")
|
2017-12-05 20:21:25 +01:00
|
|
|
stream, _ = self.create_stream_with_recipient()
|
2021-02-12 08:20:45 +01:00
|
|
|
do_invite_users(user, ["user1@domain.tld", "user2@domain.tld"], [stream])
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(2)
|
|
|
|
|
|
|
|
# We currently send emails when re-inviting users that haven't
|
|
|
|
# turned into accounts, so count them towards the total
|
2021-02-12 08:20:45 +01:00
|
|
|
do_invite_users(user, ["user1@domain.tld", "user2@domain.tld"], [stream])
|
2017-12-05 20:21:25 +01:00
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test mix of good and malformed invite emails
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
do_invite_users(user, ["user3@domain.tld", "malformed"], [stream])
|
2017-12-05 20:21:25 +01:00
|
|
|
except InvitationError:
|
|
|
|
pass
|
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test inviting existing users
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
do_invite_users(user, ["first@domain.tld", "user4@domain.tld"], [stream])
|
2017-12-05 20:21:25 +01:00
|
|
|
except InvitationError:
|
|
|
|
pass
|
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Revoking invite should not give you credit
|
|
|
|
do_revoke_user_invite(PreregistrationUser.objects.filter(realm=user.realm).first())
|
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Resending invite should cost you
|
|
|
|
do_resend_user_invite_email(PreregistrationUser.objects.first())
|
|
|
|
assertInviteCountEquals(6)
|
|
|
|
|
2020-06-08 06:01:49 +02:00
|
|
|
def test_messages_read_hour(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
read_count_property = "messages_read::hour"
|
|
|
|
interactions_property = "messages_read_interactions::hour"
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
stream, recipient = self.create_stream_with_recipient()
|
|
|
|
self.subscribe(user1, stream.name)
|
|
|
|
self.subscribe(user2, stream.name)
|
|
|
|
|
|
|
|
self.send_personal_message(user1, user2)
|
|
|
|
client = get_client("website")
|
|
|
|
do_mark_all_as_read(user2, client)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
1,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
self.send_stream_message(user1, stream.name)
|
|
|
|
self.send_stream_message(user1, stream.name)
|
2020-10-16 16:25:32 +02:00
|
|
|
do_mark_stream_messages_as_read(user2, stream.recipient_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
3,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
2,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-06-08 06:01:49 +02:00
|
|
|
|
|
|
|
message = self.send_stream_message(user2, stream.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
do_update_message_flags(user1, client, "add", "read", [message])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
4,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=read_count_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
3,
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount.objects.filter(property=interactions_property).aggregate(Sum("value"))[
|
|
|
|
"value__sum"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-06-08 06:01:49 +02:00
|
|
|
|
2017-03-12 08:55:55 +01:00
|
|
|
class TestDeleteStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_all_analytics_tables(self) -> None:
|
2017-03-12 08:55:55 +01:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
count_args = {"property": "test", "end_time": self.TIME_ZERO, "value": 10}
|
2017-03-12 08:55:55 +01:00
|
|
|
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
2021-02-12 08:20:45 +01:00
|
|
|
FillState.objects.create(property="test", end_time=self.TIME_ZERO, state=FillState.DONE)
|
2017-03-12 08:55:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
analytics = apps.get_app_config("analytics")
|
2017-03-12 08:55:55 +01:00
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertTrue(table.objects.exists())
|
|
|
|
|
|
|
|
do_drop_all_analytics_tables()
|
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertFalse(table.objects.exists())
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_single_stat(self) -> None:
|
2017-10-05 00:55:43 +02:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
count_args_to_delete = {"property": "to_delete", "end_time": self.TIME_ZERO, "value": 10}
|
|
|
|
count_args_to_save = {"property": "to_save", "end_time": self.TIME_ZERO, "value": 10}
|
2017-10-05 00:55:43 +02:00
|
|
|
|
|
|
|
for count_args in [count_args_to_delete, count_args_to_save]:
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
2021-02-12 08:19:30 +01:00
|
|
|
FillState.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
property="to_delete", end_time=self.TIME_ZERO, state=FillState.DONE
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
FillState.objects.create(property="to_save", end_time=self.TIME_ZERO, state=FillState.DONE)
|
2017-10-05 00:55:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
analytics = apps.get_app_config("analytics")
|
2017-10-05 00:55:43 +02:00
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertTrue(table.objects.exists())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
do_drop_single_stat("to_delete")
|
2017-10-05 00:55:43 +02:00
|
|
|
for table in list(analytics.models.values()):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(table.objects.filter(property="to_delete").exists())
|
|
|
|
self.assertTrue(table.objects.filter(property="to_save").exists())
|
2017-10-05 00:55:43 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-01 03:26:35 +02:00
|
|
|
class TestActiveUsersAudit(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-04-01 03:26:35 +02:00
|
|
|
self.user = self.create_user()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.stat = COUNT_STATS["active_users_audit:is_bot:day"]
|
2017-04-01 03:26:35 +02:00
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_event(
|
|
|
|
self, event_type: int, days_offset: float, user: Optional[UserProfile] = None
|
|
|
|
) -> None:
|
|
|
|
hours_offset = int(24 * days_offset)
|
2017-04-01 03:26:35 +02:00
|
|
|
if user is None:
|
|
|
|
user = self.user
|
|
|
|
RealmAuditLog.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=user.realm,
|
|
|
|
modified_user=user,
|
|
|
|
event_type=event_type,
|
|
|
|
event_time=self.TIME_ZERO - hours_offset * self.HOUR,
|
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_in_future(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_reactivated_in_future(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 0)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_same_day(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_unactive_then_activated_same_day(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Arguably these next two tests are duplicates of the _in_future tests, but are
|
|
|
|
# a guard against future refactorings where they may no longer be duplicates
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_with_day_gap(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 2)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
2021-02-12 08:20:45 +01:00
|
|
|
UserCount, ["subgroup", "end_time"], [["false", self.TIME_ZERO - self.DAY]]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_then_reactivated_with_day_gap(self) -> None:
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 2)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_event_types(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 4)
|
2018-07-09 15:02:10 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 3)
|
2018-07-09 14:05:19 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_ACTIVATED, 2)
|
2018-07-09 20:10:31 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_REACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
for i in range(4):
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO - i * self.DAY)
|
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["subgroup", "end_time"],
|
|
|
|
[["false", self.TIME_ZERO - i * self.DAY] for i in [3, 1, 0]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Also tests that aggregation to RealmCount and InstallationCount is
|
|
|
|
# being done, and that we're storing the user correctly in UserCount
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_bots(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="moo", name="moo")
|
2017-04-01 03:26:35 +02:00
|
|
|
user3 = self.create_user(realm=second_realm)
|
|
|
|
user4 = self.create_user(realm=second_realm, is_bot=True)
|
|
|
|
for user in [user1, user2, user3, user4]:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["subgroup", "user"],
|
|
|
|
[["false", user1], ["false", user2], ["false", user3], ["true", user4]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "subgroup", "realm"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
[2, "false", self.default_realm],
|
|
|
|
[1, "false", second_realm],
|
|
|
|
[1, "true", second_realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(InstallationCount, ["value", "subgroup"], [[3, "false"], [1, "true"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
|
|
|
# Not that interesting a test if you look at the SQL query at hand, but
|
|
|
|
# almost all other CountStats have a start_date, so guarding against a
|
|
|
|
# refactoring that adds that in.
|
|
|
|
# Also tests the slightly more end-to-end process_count_stat rather than
|
|
|
|
# do_fill_count_stat_at_hour. E.g. if one changes self.stat.frequency to
|
|
|
|
# CountStat.HOUR from CountStat.DAY, this will fail, while many of the
|
|
|
|
# tests above will not.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_update_from_two_days_ago(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 2)
|
2017-04-01 03:26:35 +02:00
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTableState(
|
|
|
|
UserCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["subgroup", "end_time"],
|
|
|
|
[["false", self.TIME_ZERO], ["false", self.TIME_ZERO - self.DAY]],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# User with no relevant activity could happen e.g. for a system bot that
|
|
|
|
# doesn't go through do_create_user. Mainly just want to make sure that
|
|
|
|
# that situation doesn't throw an error.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_empty_realm_or_user_with_no_relevant_activity(self) -> None:
|
2019-09-26 03:20:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 1)
|
2017-05-07 16:35:22 +02:00
|
|
|
self.create_user() # also test a user with no RealmAuditLog entries
|
2021-03-08 13:22:43 +01:00
|
|
|
do_create_realm(string_id="moo", name="moo")
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_max_audit_entry_is_unrelated(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 0.5)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
|
|
|
# Simultaneous related audit entries should not be allowed, and so not testing for that.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_unrelated_audit_entry(self) -> None:
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1)
|
2019-09-26 03:20:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_SOFT_ACTIVATED, 1)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["subgroup"], [["false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_max_audit_entries_of_different_users(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 0.5, user=user1)
|
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 0.5, user=user2)
|
2018-07-09 12:11:56 +02:00
|
|
|
self.add_event(RealmAuditLog.USER_CREATED, 1, user=user3)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.add_event(RealmAuditLog.USER_DEACTIVATED, 0.5, user=user3)
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(UserCount, ["user", "subgroup"], [[user1, "false"], [user2, "false"]])
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end_with_actions_dot_py(self) -> None:
|
2021-02-06 14:27:06 +01:00
|
|
|
user1 = do_create_user(
|
|
|
|
"email1", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user2 = do_create_user(
|
|
|
|
"email2", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user3 = do_create_user(
|
|
|
|
"email3", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user4 = do_create_user(
|
|
|
|
"email4", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user2, acting_user=None)
|
2021-02-06 17:02:03 +01:00
|
|
|
do_activate_user(user3, acting_user=None)
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user4, acting_user=None)
|
2017-04-15 04:03:56 +02:00
|
|
|
end_time = floor_to_day(timezone_now()) + self.DAY
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, end_time)
|
|
|
|
for user in [user1, user3, user4]:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
|
|
|
UserCount.objects.filter(
|
|
|
|
user=user,
|
|
|
|
property=self.current_property,
|
2021-02-12 08:20:45 +01:00
|
|
|
subgroup="false",
|
2021-02-12 08:19:30 +01:00
|
|
|
end_time=end_time,
|
|
|
|
value=1,
|
|
|
|
).exists()
|
|
|
|
)
|
2019-02-28 03:31:21 +01:00
|
|
|
self.assertFalse(UserCount.objects.filter(user=user2, end_time=end_time).exists())
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-05 07:51:55 +02:00
|
|
|
class TestRealmActiveHumans(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.stat = COUNT_STATS["realm_active_humans::day"]
|
2017-04-05 07:51:55 +02:00
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def mark_audit_active(self, user: UserProfile, end_time: Optional[datetime] = None) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
if end_time is None:
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
UserCount.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=user,
|
|
|
|
realm=user.realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
property="active_users_audit:is_bot:day",
|
2021-02-12 08:19:30 +01:00
|
|
|
subgroup=orjson.dumps(user.is_bot).decode(),
|
|
|
|
end_time=end_time,
|
|
|
|
value=1,
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def mark_15day_active(self, user: UserProfile, end_time: Optional[datetime] = None) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
if end_time is None:
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
UserCount.objects.create(
|
2021-02-12 08:20:45 +01:00
|
|
|
user=user, realm=user.realm, property="15day_actives::day", end_time=end_time, value=1
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_basic_boolean_logic(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user = self.create_user()
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO)
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTableState(RealmCount, ["value", "end_time"], [[1, self.TIME_ZERO + self.DAY]])
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bots_not_counted(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
self.mark_audit_active(bot)
|
|
|
|
self.mark_15day_active(bot)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_times(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
2021-03-08 13:22:43 +01:00
|
|
|
second_realm = do_create_realm(string_id="second", name="second")
|
2017-04-05 07:51:55 +02:00
|
|
|
user3 = self.create_user(realm=second_realm)
|
|
|
|
user4 = self.create_user(realm=second_realm)
|
|
|
|
user5 = self.create_user(realm=second_realm)
|
|
|
|
|
|
|
|
for user in [user1, user2, user3, user4, user5]:
|
|
|
|
self.mark_audit_active(user)
|
|
|
|
self.mark_15day_active(user)
|
|
|
|
for user in [user1, user3, user4]:
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
],
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
|
|
|
# Check that adding spurious entries doesn't make a difference
|
|
|
|
self.mark_audit_active(user1, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user2, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user2, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.create_user()
|
2021-03-08 13:22:43 +01:00
|
|
|
third_realm = do_create_realm(string_id="third", name="third")
|
2017-04-05 07:51:55 +02:00
|
|
|
self.create_user(realm=third_realm)
|
|
|
|
|
|
|
|
RealmCount.objects.all().delete()
|
2020-02-29 22:48:15 +01:00
|
|
|
InstallationCount.objects.all().delete()
|
2017-04-05 07:51:55 +02:00
|
|
|
for i in [-1, 0, 1]:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i * self.DAY)
|
|
|
|
self.assertTableState(
|
|
|
|
RealmCount,
|
2021-02-12 08:20:45 +01:00
|
|
|
["value", "realm", "end_time"],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
],
|
|
|
|
)
|
2017-04-05 07:51:55 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end(self) -> None:
|
2021-02-06 14:27:06 +01:00
|
|
|
user1 = do_create_user(
|
|
|
|
"email1", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
user2 = do_create_user(
|
|
|
|
"email2", "password", self.default_realm, "full_name", acting_user=None
|
|
|
|
)
|
|
|
|
do_create_user("email3", "password", self.default_realm, "full_name", acting_user=None)
|
2017-04-15 04:03:56 +02:00
|
|
|
time_zero = floor_to_day(timezone_now()) + self.DAY
|
2017-04-05 07:51:55 +02:00
|
|
|
update_user_activity_interval(user1, time_zero)
|
|
|
|
update_user_activity_interval(user2, time_zero)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user2, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
for property in [
|
2021-02-12 08:20:45 +01:00
|
|
|
"active_users_audit:is_bot:day",
|
|
|
|
"15day_actives::day",
|
|
|
|
"realm_active_humans::day",
|
2021-02-12 08:19:30 +01:00
|
|
|
]:
|
2017-04-05 07:51:55 +02:00
|
|
|
FillState.objects.create(property=property, state=FillState.DONE, end_time=time_zero)
|
2021-02-12 08:19:30 +01:00
|
|
|
process_count_stat(COUNT_STATS[property], time_zero + self.DAY)
|
|
|
|
self.assertEqual(
|
|
|
|
RealmCount.objects.filter(
|
2021-02-12 08:20:45 +01:00
|
|
|
property="realm_active_humans::day", end_time=time_zero + self.DAY, value=1
|
2021-02-12 08:19:30 +01:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(RealmCount.objects.filter(property="realm_active_humans::day").count(), 1)
|