2017-01-14 01:27:19 +01:00
|
|
|
|
2017-11-16 00:55:49 +01:00
|
|
|
from datetime import datetime, timedelta
|
2018-05-10 18:35:50 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple, Type, Union
|
2017-11-16 00:55:49 +01:00
|
|
|
|
|
|
|
import ujson
|
2017-03-12 08:55:55 +01:00
|
|
|
from django.apps import apps
|
2016-10-08 02:27:50 +02:00
|
|
|
from django.db import models
|
2017-02-15 17:26:22 +01:00
|
|
|
from django.db.models import Sum
|
2016-07-29 21:52:45 +02:00
|
|
|
from django.test import TestCase
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-04-15 03:29:56 +02:00
|
|
|
from django.utils.timezone import utc as timezone_utc
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-16 00:55:49 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, CountStat, DataCollector, \
|
|
|
|
DependentCountStat, LoggingCountStat, do_aggregate_to_summary_table, \
|
|
|
|
do_drop_all_analytics_tables, do_drop_single_stat, \
|
|
|
|
do_fill_count_stat_at_hour, do_increment_logging_stat, \
|
|
|
|
process_count_stat, sql_data_collector
|
|
|
|
from analytics.models import Anomaly, BaseCount, \
|
|
|
|
FillState, InstallationCount, RealmCount, StreamCount, \
|
|
|
|
UserCount, installation_epoch, last_successful_fill
|
|
|
|
from zerver.lib.actions import do_activate_user, do_create_user, \
|
2017-12-05 20:21:25 +01:00
|
|
|
do_deactivate_user, do_reactivate_user, update_user_activity_interval, \
|
|
|
|
do_invite_users, do_revoke_user_invite, do_resend_user_invite_email, \
|
|
|
|
InvitationError
|
2017-11-16 00:55:49 +01:00
|
|
|
from zerver.lib.timestamp import TimezoneNotUTCException, floor_to_day
|
|
|
|
from zerver.models import Client, Huddle, Message, Realm, \
|
|
|
|
RealmAuditLog, Recipient, Stream, UserActivityInterval, \
|
2017-12-05 20:21:25 +01:00
|
|
|
UserProfile, get_client, get_user, PreregistrationUser
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2016-10-07 01:29:57 +02:00
|
|
|
class AnalyticsTestCase(TestCase):
|
|
|
|
MINUTE = timedelta(seconds = 60)
|
|
|
|
HOUR = MINUTE * 60
|
|
|
|
DAY = HOUR * 24
|
2017-04-15 03:29:56 +02:00
|
|
|
TIME_ZERO = datetime(1988, 3, 14).replace(tzinfo=timezone_utc)
|
2016-10-07 01:29:57 +02:00
|
|
|
TIME_LAST_HOUR = TIME_ZERO - HOUR
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2016-10-27 03:05:21 +02:00
|
|
|
self.default_realm = Realm.objects.create(
|
2017-03-14 00:51:51 +01:00
|
|
|
string_id='realmtest', name='Realm Test', date_created=self.TIME_ZERO - 2*self.DAY)
|
2017-01-13 08:12:39 +01:00
|
|
|
# used to generate unique names in self.create_*
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter = 100
|
2017-01-13 08:12:39 +01:00
|
|
|
# used as defaults in self.assertCountEquals
|
2017-05-07 16:35:22 +02:00
|
|
|
self.current_property = None # type: Optional[str]
|
2016-10-07 02:47:05 +02:00
|
|
|
|
|
|
|
# Lightweight creation of users, streams, and messages
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_user(self, **kwargs: Any) -> UserProfile:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
2016-10-07 02:47:05 +02:00
|
|
|
defaults = {
|
2017-01-13 07:23:47 +01:00
|
|
|
'email': 'user%s@domain.tld' % (self.name_counter,),
|
2016-10-07 02:47:05 +02:00
|
|
|
'date_joined': self.TIME_LAST_HOUR,
|
|
|
|
'full_name': 'full_name',
|
|
|
|
'short_name': 'short_name',
|
|
|
|
'pointer': -1,
|
|
|
|
'last_pointer_updater': 'seems unused?',
|
|
|
|
'realm': self.default_realm,
|
|
|
|
'api_key': '42'}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2017-01-13 07:23:47 +01:00
|
|
|
return UserProfile.objects.create(**kwargs)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
|
|
|
defaults = {'name': 'stream name %s' % (self.name_counter,),
|
2016-10-07 02:47:05 +02:00
|
|
|
'realm': self.default_realm,
|
|
|
|
'date_created': self.TIME_LAST_HOUR}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2017-01-12 21:04:55 +01:00
|
|
|
stream = Stream.objects.create(**kwargs)
|
|
|
|
recipient = Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
|
|
|
|
return stream, recipient
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[Huddle, Recipient]:
|
2017-01-13 07:23:47 +01:00
|
|
|
self.name_counter += 1
|
|
|
|
defaults = {'huddle_hash': 'hash%s' % (self.name_counter,)}
|
2017-01-12 21:04:55 +01:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
|
|
|
huddle = Huddle.objects.create(**kwargs)
|
|
|
|
recipient = Recipient.objects.create(type_id=huddle.id, type=Recipient.HUDDLE)
|
|
|
|
return huddle, recipient
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def create_message(self, sender: UserProfile, recipient: Recipient, **kwargs: Any) -> Message:
|
2016-07-29 21:52:45 +02:00
|
|
|
defaults = {
|
|
|
|
'sender': sender,
|
|
|
|
'recipient': recipient,
|
2016-10-07 02:47:05 +02:00
|
|
|
'subject': 'subject',
|
|
|
|
'content': 'hi',
|
|
|
|
'pub_date': self.TIME_LAST_HOUR,
|
|
|
|
'sending_client': get_client("website")}
|
2016-07-29 21:52:45 +02:00
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
2016-10-07 02:47:05 +02:00
|
|
|
return Message.objects.create(**kwargs)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2016-10-08 02:27:50 +02:00
|
|
|
# kwargs should only ever be a UserProfile or Stream.
|
2018-05-10 18:35:50 +02:00
|
|
|
def assertCountEquals(self, table: Type[BaseCount], value: int, property: Optional[str]=None,
|
|
|
|
subgroup: Optional[str]=None, end_time: datetime=TIME_ZERO,
|
2017-11-22 07:15:46 +01:00
|
|
|
realm: Optional[Realm]=None, **kwargs: models.Model) -> None:
|
2017-01-13 08:12:39 +01:00
|
|
|
if property is None:
|
|
|
|
property = self.current_property
|
2017-01-16 21:32:34 +01:00
|
|
|
queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs)
|
2016-12-17 02:25:01 +01:00
|
|
|
if table is not InstallationCount:
|
|
|
|
if realm is None:
|
|
|
|
realm = self.default_realm
|
|
|
|
queryset = queryset.filter(realm=realm)
|
2016-12-17 02:26:46 +01:00
|
|
|
if subgroup is not None:
|
|
|
|
queryset = queryset.filter(subgroup=subgroup)
|
2016-12-17 02:25:01 +01:00
|
|
|
self.assertEqual(queryset.values_list('value', flat=True)[0], value)
|
2016-07-29 21:52:45 +02:00
|
|
|
|
2017-11-17 22:19:48 +01:00
|
|
|
def assertTableState(self, table: Type[BaseCount], arg_keys: List[str],
|
|
|
|
arg_values: List[List[object]]) -> None:
|
2017-01-14 01:27:19 +01:00
|
|
|
"""Assert that the state of a *Count table is what it should be.
|
|
|
|
|
|
|
|
Example usage:
|
|
|
|
self.assertTableState(RealmCount, ['property', 'subgroup', 'realm'],
|
|
|
|
[['p1', 4], ['p2', 10, self.alt_realm]])
|
|
|
|
|
|
|
|
table -- A *Count table.
|
|
|
|
arg_keys -- List of columns of <table>.
|
|
|
|
arg_values -- List of "rows" of <table>.
|
|
|
|
Each entry of arg_values (e.g. ['p1', 4]) represents a row of <table>.
|
|
|
|
The i'th value of the entry corresponds to the i'th arg_key, so e.g.
|
|
|
|
the first arg_values entry here corresponds to a row of RealmCount
|
|
|
|
with property='p1' and subgroup=10.
|
|
|
|
Any columns not specified (in this case, every column of RealmCount
|
|
|
|
other than property and subgroup) are either set to default values,
|
|
|
|
or are ignored.
|
|
|
|
|
|
|
|
The function checks that every entry of arg_values matches exactly one
|
|
|
|
row of <table>, and that no additional rows exist. Note that this means
|
|
|
|
checking a table with duplicate rows is not supported.
|
|
|
|
"""
|
|
|
|
defaults = {
|
|
|
|
'property': self.current_property,
|
|
|
|
'subgroup': None,
|
2017-03-31 01:41:14 +02:00
|
|
|
'end_time': self.TIME_ZERO,
|
|
|
|
'value': 1}
|
2017-01-14 01:27:19 +01:00
|
|
|
for values in arg_values:
|
2017-05-07 16:35:22 +02:00
|
|
|
kwargs = {} # type: Dict[str, Any]
|
2017-01-14 01:27:19 +01:00
|
|
|
for i in range(len(values)):
|
|
|
|
kwargs[arg_keys[i]] = values[i]
|
|
|
|
for key, value in defaults.items():
|
|
|
|
kwargs[key] = kwargs.get(key, value)
|
|
|
|
if table is not InstallationCount:
|
|
|
|
if 'realm' not in kwargs:
|
|
|
|
if 'user' in kwargs:
|
|
|
|
kwargs['realm'] = kwargs['user'].realm
|
|
|
|
elif 'stream' in kwargs:
|
|
|
|
kwargs['realm'] = kwargs['stream'].realm
|
|
|
|
else:
|
|
|
|
kwargs['realm'] = self.default_realm
|
|
|
|
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
|
|
|
|
self.assertEqual(table.objects.count(), len(arg_values))
|
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestProcessCountStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def make_dummy_count_stat(self, property: str) -> CountStat:
|
2017-04-07 02:55:29 +02:00
|
|
|
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, property)
|
|
|
|
return CountStat(property, sql_data_collector(RealmCount, query, None), CountStat.HOUR)
|
|
|
|
|
2017-11-17 22:19:48 +01:00
|
|
|
def assertFillStateEquals(self, stat: CountStat, end_time: datetime,
|
|
|
|
state: int=FillState.DONE) -> None:
|
2017-04-07 02:55:29 +02:00
|
|
|
fill_state = FillState.objects.filter(property=stat.property).first()
|
2017-01-07 09:19:37 +01:00
|
|
|
self.assertEqual(fill_state.end_time, end_time)
|
|
|
|
self.assertEqual(fill_state.state, state)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_stat(self) -> None:
|
2016-10-12 23:40:48 +02:00
|
|
|
# process new stat
|
|
|
|
current_time = installation_epoch() + self.HOUR
|
2017-04-07 02:55:29 +02:00
|
|
|
stat = self.make_dummy_count_stat('test stat')
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# dirty stat
|
2017-04-07 02:55:29 +02:00
|
|
|
FillState.objects.filter(property=stat.property).update(state=FillState.STARTED)
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, no update
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 1)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
|
|
|
# clean stat, with update
|
|
|
|
current_time = current_time + self.HOUR
|
2017-04-07 02:55:29 +02:00
|
|
|
stat = self.make_dummy_count_stat('test stat')
|
2017-02-19 01:59:45 +01:00
|
|
|
process_count_stat(stat, current_time)
|
2017-04-07 02:55:29 +02:00
|
|
|
self.assertFillStateEquals(stat, current_time)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property=stat.property).count(), 2)
|
2016-10-12 23:40:48 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bad_fill_to_time(self) -> None:
|
2017-04-15 09:23:39 +02:00
|
|
|
stat = self.make_dummy_count_stat('test stat')
|
2017-04-28 02:22:40 +02:00
|
|
|
with self.assertRaises(ValueError):
|
|
|
|
process_count_stat(stat, installation_epoch() + 65*self.MINUTE)
|
2017-10-05 02:06:43 +02:00
|
|
|
with self.assertRaises(TimezoneNotUTCException):
|
|
|
|
process_count_stat(stat, installation_epoch().replace(tzinfo=None))
|
2017-04-15 09:23:39 +02:00
|
|
|
|
2017-04-04 20:40:22 +02:00
|
|
|
# This tests the LoggingCountStat branch of the code in do_delete_counts_at_hour.
|
2017-02-15 17:26:22 +01:00
|
|
|
# It is important that do_delete_counts_at_hour not delete any of the collected
|
|
|
|
# logging data!
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_logging_stat(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
|
|
|
|
user_stat = LoggingCountStat('user stat', UserCount, CountStat.DAY)
|
|
|
|
stream_stat = LoggingCountStat('stream stat', StreamCount, CountStat.DAY)
|
|
|
|
realm_stat = LoggingCountStat('realm stat', RealmCount, CountStat.DAY)
|
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
realm = self.default_realm
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=user, realm=realm, property=user_stat.property, end_time=end_time, value=5)
|
|
|
|
StreamCount.objects.create(
|
|
|
|
stream=stream, realm=realm, property=stream_stat.property, end_time=end_time, value=5)
|
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=realm, property=realm_stat.property, end_time=end_time, value=5)
|
|
|
|
|
|
|
|
# Normal run of process_count_stat
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
|
|
|
self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 5]])
|
|
|
|
self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 5]])
|
|
|
|
self.assertTableState(RealmCount, ['property', 'value'],
|
2017-11-17 22:19:48 +01:00
|
|
|
[[user_stat.property, 5],
|
|
|
|
[stream_stat.property, 5],
|
|
|
|
[realm_stat.property, 5]])
|
2017-02-15 17:26:22 +01:00
|
|
|
self.assertTableState(InstallationCount, ['property', 'value'],
|
2017-11-17 22:19:48 +01:00
|
|
|
[[user_stat.property, 5],
|
|
|
|
[stream_stat.property, 5],
|
|
|
|
[realm_stat.property, 5]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
|
|
|
# Change the logged data and mark FillState as dirty
|
|
|
|
UserCount.objects.update(value=6)
|
|
|
|
StreamCount.objects.update(value=6)
|
|
|
|
RealmCount.objects.filter(property=realm_stat.property).update(value=6)
|
|
|
|
FillState.objects.update(state=FillState.STARTED)
|
|
|
|
|
|
|
|
# Check that the change propagated (and the collected data wasn't deleted)
|
|
|
|
for stat in [user_stat, stream_stat, realm_stat]:
|
|
|
|
process_count_stat(stat, end_time)
|
|
|
|
self.assertTableState(UserCount, ['property', 'value'], [[user_stat.property, 6]])
|
|
|
|
self.assertTableState(StreamCount, ['property', 'value'], [[stream_stat.property, 6]])
|
|
|
|
self.assertTableState(RealmCount, ['property', 'value'],
|
2017-11-17 22:19:48 +01:00
|
|
|
[[user_stat.property, 6],
|
|
|
|
[stream_stat.property, 6],
|
|
|
|
[realm_stat.property, 6]])
|
2017-02-15 17:26:22 +01:00
|
|
|
self.assertTableState(InstallationCount, ['property', 'value'],
|
2017-11-17 22:19:48 +01:00
|
|
|
[[user_stat.property, 6],
|
|
|
|
[stream_stat.property, 6],
|
|
|
|
[realm_stat.property, 6]])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_process_dependent_stat(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
stat1 = self.make_dummy_count_stat('stat1')
|
|
|
|
stat2 = self.make_dummy_count_stat('stat2')
|
|
|
|
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, 'stat3')
|
2017-11-17 22:19:48 +01:00
|
|
|
stat3 = DependentCountStat('stat3', sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.HOUR,
|
2017-04-05 07:51:55 +02:00
|
|
|
dependencies=['stat1', 'stat2'])
|
|
|
|
hour = [installation_epoch() + i*self.HOUR for i in range(5)]
|
|
|
|
|
|
|
|
# test when one dependency has been run, and the other hasn't
|
|
|
|
process_count_stat(stat1, hour[2])
|
|
|
|
process_count_stat(stat3, hour[1])
|
|
|
|
self.assertTableState(InstallationCount, ['property', 'end_time'],
|
|
|
|
[['stat1', hour[1]], ['stat1', hour[2]]])
|
|
|
|
self.assertFillStateEquals(stat3, hour[0])
|
|
|
|
|
|
|
|
# test that we don't fill past the fill_to_time argument, even if
|
|
|
|
# dependencies have later last_successful_fill
|
|
|
|
process_count_stat(stat2, hour[3])
|
|
|
|
process_count_stat(stat3, hour[1])
|
|
|
|
self.assertTableState(InstallationCount, ['property', 'end_time'],
|
|
|
|
[['stat1', hour[1]], ['stat1', hour[2]],
|
|
|
|
['stat2', hour[1]], ['stat2', hour[2]], ['stat2', hour[3]],
|
|
|
|
['stat3', hour[1]]])
|
|
|
|
self.assertFillStateEquals(stat3, hour[1])
|
|
|
|
|
|
|
|
# test that we don't fill past the dependency last_successful_fill times,
|
|
|
|
# even if fill_to_time is later
|
|
|
|
process_count_stat(stat3, hour[4])
|
|
|
|
self.assertTableState(InstallationCount, ['property', 'end_time'],
|
|
|
|
[['stat1', hour[1]], ['stat1', hour[2]],
|
|
|
|
['stat2', hour[1]], ['stat2', hour[2]], ['stat2', hour[3]],
|
|
|
|
['stat3', hour[1]], ['stat3', hour[2]]])
|
|
|
|
self.assertFillStateEquals(stat3, hour[2])
|
|
|
|
|
2017-04-15 09:23:39 +02:00
|
|
|
# test daily dependent stat with hourly dependencies
|
|
|
|
query = """INSERT INTO analytics_realmcount (realm_id, value, property, end_time)
|
|
|
|
VALUES (%s, 1, '%s', %%%%(time_end)s)""" % (self.default_realm.id, 'stat4')
|
2017-11-17 22:19:48 +01:00
|
|
|
stat4 = DependentCountStat('stat4', sql_data_collector(RealmCount, query, None),
|
|
|
|
CountStat.DAY,
|
2017-04-15 09:23:39 +02:00
|
|
|
dependencies=['stat1', 'stat2'])
|
|
|
|
hour24 = installation_epoch() + 24*self.HOUR
|
|
|
|
hour25 = installation_epoch() + 25*self.HOUR
|
|
|
|
process_count_stat(stat1, hour25)
|
|
|
|
process_count_stat(stat2, hour25)
|
|
|
|
process_count_stat(stat4, hour25)
|
|
|
|
self.assertEqual(InstallationCount.objects.filter(property='stat4').count(), 1)
|
|
|
|
self.assertFillStateEquals(stat4, hour24)
|
|
|
|
|
2016-10-08 03:23:24 +02:00
|
|
|
class TestCountStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-01-13 01:55:46 +01:00
|
|
|
# This tests two things for each of the queries/CountStats: Handling
|
|
|
|
# more than 1 realm, and the time bounds (time_start and time_end in
|
|
|
|
# the queries).
|
2016-12-17 03:26:39 +01:00
|
|
|
self.second_realm = Realm.objects.create(
|
|
|
|
string_id='second-realm', name='Second Realm',
|
2017-03-14 00:51:51 +01:00
|
|
|
date_created=self.TIME_ZERO-2*self.DAY)
|
2017-01-13 01:55:46 +01:00
|
|
|
for minutes_ago in [0, 1, 61, 60*24+1]:
|
|
|
|
creation_time = self.TIME_ZERO - minutes_ago*self.MINUTE
|
2017-01-14 01:42:32 +01:00
|
|
|
user = self.create_user(email='user-%s@second.analytics' % (minutes_ago,),
|
2017-01-13 01:55:46 +01:00
|
|
|
realm=self.second_realm, date_joined=creation_time)
|
|
|
|
recipient = self.create_stream_with_recipient(
|
|
|
|
name='stream %s' % (minutes_ago,), realm=self.second_realm,
|
|
|
|
date_created=creation_time)[1]
|
|
|
|
self.create_message(user, recipient, pub_date=creation_time)
|
2017-08-15 21:48:33 +02:00
|
|
|
self.hourly_user = get_user('user-1@second.analytics', self.second_realm)
|
|
|
|
self.daily_user = get_user('user-61@second.analytics', self.second_realm)
|
2017-01-13 01:55:46 +01:00
|
|
|
|
|
|
|
# This realm should not show up in the *Count tables for any of the
|
|
|
|
# messages_* CountStats
|
|
|
|
self.no_message_realm = Realm.objects.create(
|
|
|
|
string_id='no-message-realm', name='No Message Realm',
|
2017-03-14 00:51:51 +01:00
|
|
|
date_created=self.TIME_ZERO-2*self.DAY)
|
2017-01-13 01:55:46 +01:00
|
|
|
self.create_user(realm=self.no_message_realm)
|
|
|
|
self.create_stream_with_recipient(realm=self.no_message_realm)
|
|
|
|
# This huddle should not show up anywhere
|
|
|
|
self.create_huddle_with_recipient()
|
2016-12-17 03:26:39 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_active_users_by_is_bot(self) -> None:
|
2017-01-16 22:05:16 +01:00
|
|
|
stat = COUNT_STATS['active_users:is_bot:day']
|
2017-01-13 08:12:39 +01:00
|
|
|
self.current_property = stat.property
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2016-12-17 03:26:39 +01:00
|
|
|
# To be included
|
2017-01-13 07:23:47 +01:00
|
|
|
self.create_user(is_bot=True)
|
|
|
|
self.create_user(is_bot=True, date_joined=self.TIME_ZERO-25*self.HOUR)
|
|
|
|
self.create_user(is_bot=False)
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2016-12-17 03:26:39 +01:00
|
|
|
# To be excluded
|
2017-01-13 07:23:47 +01:00
|
|
|
self.create_user(is_active=False)
|
2016-12-17 03:26:39 +01:00
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
2016-10-08 03:23:24 +02:00
|
|
|
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
|
|
|
[[2, 'true'], [1, 'false'],
|
|
|
|
[3, 'false', self.second_realm],
|
|
|
|
[1, 'false', self.no_message_realm]])
|
2017-11-17 22:19:48 +01:00
|
|
|
self.assertTableState(InstallationCount,
|
|
|
|
['value', 'subgroup'],
|
|
|
|
[[2, 'true'], [5, 'false']])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-11 02:37:43 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_is_bot(self) -> None:
|
2017-01-16 22:39:03 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
2017-11-17 22:19:48 +01:00
|
|
|
recipient_human1 = Recipient.objects.create(type_id=human1.id,
|
|
|
|
type=Recipient.PERSONAL)
|
2017-01-14 00:19:28 +01:00
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_stream)
|
|
|
|
self.create_message(bot, recipient_huddle)
|
|
|
|
self.create_message(human1, recipient_human1)
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
|
|
|
|
[[1, 'false', human1], [1, 'false', human2], [3, 'true', bot],
|
2017-01-16 22:39:03 +01:00
|
|
|
[1, 'false', self.hourly_user]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
2017-01-16 22:39:03 +01:00
|
|
|
[[2, 'false'], [3, 'true'], [1, 'false', self.second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [3, 'true']])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_message_type(self) -> None:
|
2017-01-16 22:05:16 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:message_type:day']
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Nothing currently in this stat that is bot related, but so many of
|
|
|
|
# the rest of our stats make the human/bot distinction that one can
|
|
|
|
# imagine a later refactoring that will intentionally or
|
|
|
|
# unintentionally change this. So make one of our users a bot.
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user()
|
|
|
|
|
|
|
|
# private streams
|
|
|
|
recipient_stream1 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
recipient_stream2 = self.create_stream_with_recipient(invite_only=True)[1]
|
|
|
|
self.create_message(user1, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream1)
|
|
|
|
self.create_message(user2, recipient_stream2)
|
|
|
|
|
|
|
|
# public streams
|
|
|
|
recipient_stream3 = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_stream4 = self.create_stream_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_stream3)
|
|
|
|
self.create_message(user1, recipient_stream4)
|
|
|
|
self.create_message(user2, recipient_stream3)
|
|
|
|
|
|
|
|
# huddles
|
|
|
|
recipient_huddle1 = self.create_huddle_with_recipient()[1]
|
|
|
|
recipient_huddle2 = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(user1, recipient_huddle1)
|
|
|
|
self.create_message(user2, recipient_huddle2)
|
|
|
|
|
|
|
|
# private messages
|
|
|
|
recipient_user1 = Recipient.objects.create(type_id=user1.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
recipient_user3 = Recipient.objects.create(type_id=user3.id, type=Recipient.PERSONAL)
|
|
|
|
self.create_message(user1, recipient_user2)
|
|
|
|
self.create_message(user2, recipient_user1)
|
|
|
|
self.create_message(user3, recipient_user3)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
|
|
|
|
[[1, 'private_stream', user1],
|
|
|
|
[2, 'private_stream', user2],
|
|
|
|
[2, 'public_stream', user1],
|
|
|
|
[1, 'public_stream', user2],
|
2017-03-19 00:11:07 +01:00
|
|
|
[1, 'private_message', user1],
|
|
|
|
[1, 'private_message', user2],
|
2017-01-14 01:27:19 +01:00
|
|
|
[1, 'private_message', user3],
|
2017-03-19 00:11:07 +01:00
|
|
|
[1, 'huddle_message', user1],
|
|
|
|
[1, 'huddle_message', user2],
|
2017-01-14 01:27:19 +01:00
|
|
|
[1, 'public_stream', self.hourly_user],
|
|
|
|
[1, 'public_stream', self.daily_user]])
|
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
2017-03-19 00:11:07 +01:00
|
|
|
[[3, 'private_stream'], [3, 'public_stream'], [3, 'private_message'],
|
|
|
|
[2, 'huddle_message'], [2, 'public_stream', self.second_realm]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(InstallationCount, ['value', 'subgroup'],
|
2017-03-19 00:11:07 +01:00
|
|
|
[[3, 'private_stream'], [5, 'public_stream'], [3, 'private_message'],
|
|
|
|
[2, 'huddle_message']])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-11 02:37:43 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_recipients_with_same_id(self) -> None:
|
2017-01-16 22:05:16 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:message_type:day']
|
2017-01-11 02:37:43 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user = self.create_user(id=1000)
|
|
|
|
user_recipient = Recipient.objects.create(type_id=user.id, type=Recipient.PERSONAL)
|
|
|
|
stream_recipient = self.create_stream_with_recipient(id=1000)[1]
|
|
|
|
huddle_recipient = self.create_huddle_with_recipient(id=1000)[1]
|
|
|
|
|
|
|
|
self.create_message(user, user_recipient)
|
|
|
|
self.create_message(user, stream_recipient)
|
|
|
|
self.create_message(user, huddle_recipient)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2017-03-19 00:11:07 +01:00
|
|
|
self.assertCountEquals(UserCount, 1, subgroup='private_message')
|
|
|
|
self.assertCountEquals(UserCount, 1, subgroup='huddle_message')
|
2017-01-11 02:37:43 +01:00
|
|
|
self.assertCountEquals(UserCount, 1, subgroup='public_stream')
|
2017-01-13 06:03:20 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_client(self) -> None:
|
2017-01-16 22:05:16 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:client:day']
|
2017-01-14 00:19:28 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
user1 = self.create_user(is_bot=True)
|
|
|
|
user2 = self.create_user()
|
|
|
|
recipient_user2 = Recipient.objects.create(type_id=user2.id, type=Recipient.PERSONAL)
|
|
|
|
|
|
|
|
recipient_stream = self.create_stream_with_recipient()[1]
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
|
|
|
|
client2 = Client.objects.create(name='client2')
|
|
|
|
|
|
|
|
self.create_message(user1, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user1, recipient_stream)
|
|
|
|
self.create_message(user1, recipient_huddle)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
self.create_message(user2, recipient_user2, sending_client=client2)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
client2_id = str(client2.id)
|
2017-05-07 16:35:22 +02:00
|
|
|
website_client_id = str(get_client('website').id) # default for self.create_message
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, ['value', 'subgroup', 'user'],
|
|
|
|
[[2, website_client_id, user1],
|
|
|
|
[1, client2_id, user1], [2, client2_id, user2],
|
2017-01-16 21:13:46 +01:00
|
|
|
[1, website_client_id, self.hourly_user],
|
|
|
|
[1, website_client_id, self.daily_user]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
|
|
|
[[2, website_client_id], [3, client2_id],
|
2017-01-16 21:13:46 +01:00
|
|
|
[2, website_client_id, self.second_realm]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(InstallationCount, ['value', 'subgroup'],
|
2017-01-16 21:13:46 +01:00
|
|
|
[[4, website_client_id], [3, client2_id]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, [], [])
|
2017-01-14 00:19:28 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_to_stream_by_is_bot(self) -> None:
|
2017-02-15 04:10:03 +01:00
|
|
|
stat = COUNT_STATS['messages_in_stream:is_bot:day']
|
2017-01-13 06:03:20 +01:00
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
human1 = self.create_user()
|
|
|
|
human2 = self.create_user()
|
|
|
|
recipient_human1 = Recipient.objects.create(type_id=human1.id, type=Recipient.PERSONAL)
|
|
|
|
|
|
|
|
stream1, recipient_stream1 = self.create_stream_with_recipient()
|
|
|
|
stream2, recipient_stream2 = self.create_stream_with_recipient()
|
|
|
|
|
|
|
|
# To be included
|
|
|
|
self.create_message(human1, recipient_stream1)
|
|
|
|
self.create_message(human2, recipient_stream1)
|
|
|
|
self.create_message(human1, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
self.create_message(bot, recipient_stream2)
|
|
|
|
|
|
|
|
# To be excluded
|
|
|
|
self.create_message(human2, recipient_human1)
|
|
|
|
self.create_message(bot, recipient_human1)
|
|
|
|
recipient_huddle = self.create_huddle_with_recipient()[1]
|
|
|
|
self.create_message(human1, recipient_huddle)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(StreamCount, ['value', 'subgroup', 'stream'],
|
|
|
|
[[2, 'false', stream1], [1, 'false', stream2], [2, 'true', stream2],
|
2017-02-15 04:10:03 +01:00
|
|
|
# "hourly" and "daily" stream, from TestCountStats.setUp
|
|
|
|
[1, 'false', Stream.objects.get(name='stream 1')],
|
|
|
|
[1, 'false', Stream.objects.get(name='stream 61')]])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
2017-02-15 04:10:03 +01:00
|
|
|
[[3, 'false'], [2, 'true'], [2, 'false', self.second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[5, 'false'], [2, 'true']])
|
2017-01-14 01:27:19 +01:00
|
|
|
self.assertTableState(UserCount, [], [])
|
2017-02-15 17:26:22 +01:00
|
|
|
|
2017-11-17 22:19:48 +01:00
|
|
|
def create_interval(self, user: UserProfile, start_offset: timedelta,
|
|
|
|
end_offset: timedelta) -> None:
|
2017-03-16 09:23:44 +01:00
|
|
|
UserActivityInterval.objects.create(
|
|
|
|
user_profile=user, start=self.TIME_ZERO-start_offset,
|
|
|
|
end=self.TIME_ZERO-end_offset)
|
|
|
|
|
2018-03-18 22:02:46 +01:00
|
|
|
def test_1day_actives(self) -> None:
|
|
|
|
stat = COUNT_STATS['1day_actives::day']
|
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
_1day = 1*self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _1day + self.DAY, _1day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _1day + self.DAY, _1day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
|
|
|
self.create_interval(user3, 2*self.DAY, 1*self.DAY)
|
|
|
|
self.create_interval(user3, 20*self.HOUR, 19*self.HOUR)
|
|
|
|
self.create_interval(user3, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
|
|
|
self.create_interval(user4, 1.5*self.DAY, 0.5*self.DAY)
|
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
|
|
|
self.create_interval(user6, 2*self.DAY, -2*self.DAY)
|
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
|
|
|
self.create_interval(user7, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['value', 'user'],
|
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]])
|
|
|
|
self.assertTableState(RealmCount, ['value', 'realm'],
|
|
|
|
[[5, self.default_realm], [1, self.second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value'], [[6]])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_15day_actives(self) -> None:
|
2017-03-16 07:58:23 +01:00
|
|
|
stat = COUNT_STATS['15day_actives::day']
|
|
|
|
self.current_property = stat.property
|
|
|
|
|
2017-04-15 07:20:16 +02:00
|
|
|
_15day = 15*self.DAY - UserActivityInterval.MIN_INTERVAL_LENGTH
|
2017-03-16 07:58:23 +01:00
|
|
|
|
|
|
|
# Outside time range, should not appear. Also tests upper boundary.
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, _15day + self.DAY, _15day + timedelta(seconds=1))
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# On lower boundary, should appear
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, _15day + self.DAY, _15day)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user3 = self.create_user()
|
|
|
|
self.create_interval(user3, 20*self.DAY, 19*self.DAY)
|
|
|
|
self.create_interval(user3, 20*self.HOUR, 19*self.HOUR)
|
|
|
|
self.create_interval(user3, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user4 = self.create_user()
|
|
|
|
self.create_interval(user4, 20*self.DAY, 10*self.DAY)
|
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user6 = self.create_user()
|
|
|
|
self.create_interval(user6, 20*self.DAY, -2*self.DAY)
|
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user7 = self.create_user(realm=self.second_realm)
|
|
|
|
self.create_interval(user7, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['value', 'user'],
|
|
|
|
[[1, user2], [1, user3], [1, user4], [1, user5], [1, user6], [1, user7]])
|
|
|
|
self.assertTableState(RealmCount, ['value', 'realm'],
|
|
|
|
[[5, self.default_realm], [1, self.second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value'], [[6]])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_minutes_active(self) -> None:
|
2017-03-16 09:23:44 +01:00
|
|
|
stat = COUNT_STATS['minutes_active::day']
|
|
|
|
self.current_property = stat.property
|
|
|
|
|
|
|
|
# Outside time range, should not appear. Also testing for intervals
|
|
|
|
# starting and ending on boundary
|
|
|
|
user1 = self.create_user()
|
|
|
|
self.create_interval(user1, 25*self.HOUR, self.DAY)
|
|
|
|
self.create_interval(user1, timedelta(0), -self.HOUR)
|
|
|
|
|
|
|
|
# Multiple intervals, including one outside boundary
|
|
|
|
user2 = self.create_user()
|
|
|
|
self.create_interval(user2, 20*self.DAY, 19*self.DAY)
|
|
|
|
self.create_interval(user2, 20*self.HOUR, 19*self.HOUR)
|
|
|
|
self.create_interval(user2, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
# Intervals crossing boundary
|
|
|
|
user3 = self.create_user()
|
|
|
|
self.create_interval(user3, 25*self.HOUR, 22*self.HOUR)
|
|
|
|
self.create_interval(user3, self.MINUTE, -self.MINUTE)
|
|
|
|
|
|
|
|
# Interval subsuming time range
|
|
|
|
user4 = self.create_user()
|
|
|
|
self.create_interval(user4, 2*self.DAY, -2*self.DAY)
|
|
|
|
|
|
|
|
# Less than 60 seconds, should not appear
|
|
|
|
user5 = self.create_user()
|
|
|
|
self.create_interval(user5, self.MINUTE, timedelta(seconds=30))
|
|
|
|
self.create_interval(user5, timedelta(seconds=20), timedelta(seconds=10))
|
|
|
|
|
|
|
|
# Second realm
|
|
|
|
user6 = self.create_user(realm=self.second_realm)
|
|
|
|
self.create_interval(user6, 20*self.MINUTE, 19*self.MINUTE)
|
|
|
|
|
|
|
|
do_fill_count_stat_at_hour(stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['value', 'user'],
|
|
|
|
[[61, user2], [121, user3], [24*60, user4], [1, user6]])
|
|
|
|
self.assertTableState(RealmCount, ['value', 'realm'],
|
|
|
|
[[61 + 121 + 24*60, self.default_realm], [1, self.second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value'], [[61 + 121 + 24*60 + 1]])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
2017-02-17 22:38:03 +01:00
|
|
|
class TestDoAggregateToSummaryTable(AnalyticsTestCase):
|
|
|
|
# do_aggregate_to_summary_table is mostly tested by the end to end
|
|
|
|
# nature of the tests in TestCountStats. But want to highlight one
|
|
|
|
# feature important for keeping the size of the analytics tables small,
|
|
|
|
# which is that if there is no relevant data in the table being
|
|
|
|
# aggregated, the aggregation table doesn't get a row with value 0.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_no_aggregated_zeros(self) -> None:
|
2017-02-17 22:38:03 +01:00
|
|
|
stat = LoggingCountStat('test stat', UserCount, CountStat.HOUR)
|
|
|
|
do_aggregate_to_summary_table(stat, self.TIME_ZERO)
|
|
|
|
self.assertFalse(RealmCount.objects.exists())
|
|
|
|
self.assertFalse(InstallationCount.objects.exists())
|
|
|
|
|
2017-02-15 17:26:22 +01:00
|
|
|
class TestDoIncrementLoggingStat(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_table_and_id_args(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
# For realms, streams, and users, tests that the new rows are going to
|
|
|
|
# the appropriate *Count table, and that using a different zerver_object
|
|
|
|
# results in a new row being created
|
|
|
|
self.current_property = 'test'
|
2017-03-14 00:51:51 +01:00
|
|
|
second_realm = Realm.objects.create(string_id='moo', name='moo')
|
2017-02-15 17:26:22 +01:00
|
|
|
stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(second_realm, stat, None, self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, ['realm'], [[self.default_realm], [second_realm]])
|
|
|
|
|
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
stat = LoggingCountStat('test', UserCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(user1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(user2, stat, None, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['user'], [[user1], [user2]])
|
|
|
|
|
|
|
|
stream1 = self.create_stream_with_recipient()[0]
|
|
|
|
stream2 = self.create_stream_with_recipient()[0]
|
|
|
|
stat = LoggingCountStat('test', StreamCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(stream1, stat, None, self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(stream2, stat, None, self.TIME_ZERO)
|
|
|
|
self.assertTableState(StreamCount, ['stream'], [[stream1], [stream2]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_frequency(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
times = [self.TIME_ZERO - self.MINUTE*i for i in [0, 1, 61, 24*60+1]]
|
|
|
|
|
|
|
|
stat = LoggingCountStat('day test', RealmCount, CountStat.DAY)
|
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
|
|
|
stat = LoggingCountStat('hour test', RealmCount, CountStat.HOUR)
|
|
|
|
for time_ in times:
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, time_)
|
|
|
|
|
|
|
|
self.assertTableState(RealmCount, ['value', 'property', 'end_time'],
|
|
|
|
[[3, 'day test', self.TIME_ZERO],
|
|
|
|
[1, 'day test', self.TIME_ZERO - self.DAY],
|
|
|
|
[2, 'hour test', self.TIME_ZERO],
|
|
|
|
[1, 'hour test', self.TIME_LAST_HOUR],
|
|
|
|
[1, 'hour test', self.TIME_ZERO - self.DAY]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_get_or_create(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
stat = LoggingCountStat('test', RealmCount, CountStat.HOUR)
|
|
|
|
# All these should trigger the create part of get_or_create.
|
|
|
|
# property is tested in test_frequency, and id_args are tested in test_id_args,
|
|
|
|
# so this only tests a new subgroup and end_time
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, 'subgroup2', self.TIME_ZERO)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_LAST_HOUR)
|
|
|
|
self.current_property = 'test'
|
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
|
|
|
|
[[1, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
|
|
|
|
[1, 'subgroup1', self.TIME_LAST_HOUR]])
|
|
|
|
# This should trigger the get part of get_or_create
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, 'subgroup1', self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'end_time'],
|
|
|
|
[[2, 'subgroup1', self.TIME_ZERO], [1, 'subgroup2', self.TIME_ZERO],
|
|
|
|
[1, 'subgroup1', self.TIME_LAST_HOUR]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_increment(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
stat = LoggingCountStat('test', RealmCount, CountStat.DAY)
|
|
|
|
self.current_property = 'test'
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=-1)
|
|
|
|
self.assertTableState(RealmCount, ['value'], [[-1]])
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO, increment=3)
|
|
|
|
self.assertTableState(RealmCount, ['value'], [[2]])
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, ['value'], [[3]])
|
|
|
|
|
|
|
|
class TestLoggingCountStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_aggregation(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
stat = LoggingCountStat('realm test', RealmCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(self.default_realm, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
user = self.create_user()
|
|
|
|
stat = LoggingCountStat('user test', UserCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(user, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
stat = LoggingCountStat('stream test', StreamCount, CountStat.DAY)
|
|
|
|
do_increment_logging_stat(stream, stat, None, self.TIME_ZERO)
|
|
|
|
process_count_stat(stat, self.TIME_ZERO)
|
|
|
|
|
|
|
|
self.assertTableState(InstallationCount, ['property', 'value'],
|
|
|
|
[['realm test', 1], ['user test', 1], ['stream test', 1]])
|
|
|
|
self.assertTableState(RealmCount, ['property', 'value'],
|
|
|
|
[['realm test', 1], ['user test', 1], ['stream test', 1]])
|
|
|
|
self.assertTableState(UserCount, ['property', 'value'], [['user test', 1]])
|
|
|
|
self.assertTableState(StreamCount, ['property', 'value'], [['stream test', 1]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_active_users_log_by_is_bot(self) -> None:
|
2017-02-15 17:26:22 +01:00
|
|
|
property = 'active_users_log:is_bot:day'
|
|
|
|
user = do_create_user('email', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
|
|
|
do_deactivate_user(user)
|
|
|
|
self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
|
|
|
do_activate_user(user)
|
|
|
|
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
|
|
|
do_deactivate_user(user)
|
|
|
|
self.assertEqual(0, RealmCount.objects.filter(property=property, subgroup=False)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
|
|
|
do_reactivate_user(user)
|
|
|
|
self.assertEqual(1, RealmCount.objects.filter(property=property, subgroup=False)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
2017-03-12 08:55:55 +01:00
|
|
|
|
2017-12-05 20:21:25 +01:00
|
|
|
def test_invites_sent(self) -> None:
|
|
|
|
property = 'invites_sent::day'
|
|
|
|
|
|
|
|
def assertInviteCountEquals(count: int) -> None:
|
|
|
|
self.assertEqual(count, RealmCount.objects.filter(property=property, subgroup=None)
|
|
|
|
.aggregate(Sum('value'))['value__sum'])
|
|
|
|
|
|
|
|
user = self.create_user(email='first@domain.tld')
|
|
|
|
stream, _ = self.create_stream_with_recipient()
|
|
|
|
do_invite_users(user, ['user1@domain.tld', 'user2@domain.tld'], [stream])
|
|
|
|
assertInviteCountEquals(2)
|
|
|
|
|
|
|
|
# We currently send emails when re-inviting users that haven't
|
|
|
|
# turned into accounts, so count them towards the total
|
|
|
|
do_invite_users(user, ['user1@domain.tld', 'user2@domain.tld'], [stream])
|
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test mix of good and malformed invite emails
|
|
|
|
try:
|
|
|
|
do_invite_users(user, ['user3@domain.tld', 'malformed'], [stream])
|
|
|
|
except InvitationError:
|
|
|
|
pass
|
|
|
|
assertInviteCountEquals(4)
|
|
|
|
|
|
|
|
# Test inviting existing users
|
|
|
|
try:
|
|
|
|
do_invite_users(user, ['first@domain.tld', 'user4@domain.tld'], [stream])
|
|
|
|
except InvitationError:
|
|
|
|
pass
|
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Revoking invite should not give you credit
|
|
|
|
do_revoke_user_invite(PreregistrationUser.objects.filter(realm=user.realm).first())
|
|
|
|
assertInviteCountEquals(5)
|
|
|
|
|
|
|
|
# Resending invite should cost you
|
|
|
|
do_resend_user_invite_email(PreregistrationUser.objects.first())
|
|
|
|
assertInviteCountEquals(6)
|
|
|
|
|
2017-03-12 08:55:55 +01:00
|
|
|
class TestDeleteStats(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_all_analytics_tables(self) -> None:
|
2017-03-12 08:55:55 +01:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
count_args = {'property': 'test', 'end_time': self.TIME_ZERO, 'value': 10}
|
|
|
|
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
|
|
|
FillState.objects.create(property='test', end_time=self.TIME_ZERO, state=FillState.DONE)
|
|
|
|
Anomaly.objects.create(info='test anomaly')
|
|
|
|
|
|
|
|
analytics = apps.get_app_config('analytics')
|
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertTrue(table.objects.exists())
|
|
|
|
|
|
|
|
do_drop_all_analytics_tables()
|
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertFalse(table.objects.exists())
|
2017-04-01 03:26:35 +02:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_do_drop_single_stat(self) -> None:
|
2017-10-05 00:55:43 +02:00
|
|
|
user = self.create_user()
|
|
|
|
stream = self.create_stream_with_recipient()[0]
|
|
|
|
count_args_to_delete = {'property': 'to_delete', 'end_time': self.TIME_ZERO, 'value': 10}
|
|
|
|
count_args_to_save = {'property': 'to_save', 'end_time': self.TIME_ZERO, 'value': 10}
|
|
|
|
|
|
|
|
for count_args in [count_args_to_delete, count_args_to_save]:
|
|
|
|
UserCount.objects.create(user=user, realm=user.realm, **count_args)
|
|
|
|
StreamCount.objects.create(stream=stream, realm=stream.realm, **count_args)
|
|
|
|
RealmCount.objects.create(realm=user.realm, **count_args)
|
|
|
|
InstallationCount.objects.create(**count_args)
|
|
|
|
FillState.objects.create(property='to_delete', end_time=self.TIME_ZERO, state=FillState.DONE)
|
|
|
|
FillState.objects.create(property='to_save', end_time=self.TIME_ZERO, state=FillState.DONE)
|
|
|
|
Anomaly.objects.create(info='test anomaly')
|
|
|
|
|
|
|
|
analytics = apps.get_app_config('analytics')
|
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
self.assertTrue(table.objects.exists())
|
|
|
|
|
|
|
|
do_drop_single_stat('to_delete')
|
|
|
|
for table in list(analytics.models.values()):
|
|
|
|
if table._meta.db_table == 'analytics_anomaly':
|
|
|
|
self.assertTrue(table.objects.exists())
|
|
|
|
else:
|
|
|
|
self.assertFalse(table.objects.filter(property='to_delete').exists())
|
|
|
|
self.assertTrue(table.objects.filter(property='to_save').exists())
|
|
|
|
|
2017-04-01 03:26:35 +02:00
|
|
|
class TestActiveUsersAudit(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-04-01 03:26:35 +02:00
|
|
|
self.user = self.create_user()
|
|
|
|
self.stat = COUNT_STATS['active_users_audit:is_bot:day']
|
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2017-11-17 22:19:48 +01:00
|
|
|
def add_event(self, event_type: str, days_offset: float,
|
|
|
|
user: Optional[UserProfile]=None) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
hours_offset = int(24*days_offset)
|
|
|
|
if user is None:
|
|
|
|
user = self.user
|
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user.realm, modified_user=user, event_type=event_type,
|
|
|
|
event_time=self.TIME_ZERO - hours_offset*self.HOUR)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_in_future(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 1)
|
|
|
|
self.add_event('user_deactivated', 0)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup'], [['false']])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_reactivated_in_future(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_deactivated', 1)
|
|
|
|
self.add_event('user_reactivated', 0)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_same_day(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 1)
|
|
|
|
self.add_event('user_deactivated', .5)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_unactive_then_activated_same_day(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_deactivated', 1)
|
|
|
|
self.add_event('user_reactivated', .5)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup'], [['false']])
|
|
|
|
|
|
|
|
# Arguably these next two tests are duplicates of the _in_future tests, but are
|
|
|
|
# a guard against future refactorings where they may no longer be duplicates
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_active_then_deactivated_with_day_gap(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 2)
|
|
|
|
self.add_event('user_deactivated', 1)
|
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup', 'end_time'],
|
|
|
|
[['false', self.TIME_ZERO - self.DAY]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_user_deactivated_then_reactivated_with_day_gap(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_deactivated', 2)
|
|
|
|
self.add_event('user_reactivated', 1)
|
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup'], [['false']])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_event_types(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 4)
|
|
|
|
self.add_event('user_deactivated', 3)
|
|
|
|
self.add_event('user_activated', 2)
|
|
|
|
self.add_event('user_reactivated', 1)
|
|
|
|
for i in range(4):
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO - i*self.DAY)
|
|
|
|
self.assertTableState(UserCount, ['subgroup', 'end_time'],
|
|
|
|
[['false', self.TIME_ZERO - i*self.DAY] for i in [3, 1, 0]])
|
|
|
|
|
|
|
|
# Also tests that aggregation to RealmCount and InstallationCount is
|
|
|
|
# being done, and that we're storing the user correctly in UserCount
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_bots(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
second_realm = Realm.objects.create(string_id='moo', name='moo')
|
|
|
|
user3 = self.create_user(realm=second_realm)
|
|
|
|
user4 = self.create_user(realm=second_realm, is_bot=True)
|
|
|
|
for user in [user1, user2, user3, user4]:
|
|
|
|
self.add_event('user_created', 1, user=user)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup', 'user'],
|
|
|
|
[['false', user1], ['false', user2], ['false', user3], ['true', user4]])
|
|
|
|
self.assertTableState(RealmCount, ['value', 'subgroup', 'realm'],
|
|
|
|
[[2, 'false', self.default_realm], [1, 'false', second_realm],
|
|
|
|
[1, 'true', second_realm]])
|
|
|
|
self.assertTableState(InstallationCount, ['value', 'subgroup'], [[3, 'false'], [1, 'true']])
|
|
|
|
self.assertTableState(StreamCount, [], [])
|
|
|
|
|
|
|
|
# Not that interesting a test if you look at the SQL query at hand, but
|
|
|
|
# almost all other CountStats have a start_date, so guarding against a
|
|
|
|
# refactoring that adds that in.
|
|
|
|
# Also tests the slightly more end-to-end process_count_stat rather than
|
|
|
|
# do_fill_count_stat_at_hour. E.g. if one changes self.stat.frequency to
|
|
|
|
# CountStat.HOUR from CountStat.DAY, this will fail, while many of the
|
|
|
|
# tests above will not.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_update_from_two_days_ago(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 2)
|
|
|
|
process_count_stat(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup', 'end_time'],
|
|
|
|
[['false', self.TIME_ZERO], ['false', self.TIME_ZERO-self.DAY]])
|
|
|
|
|
|
|
|
# User with no relevant activity could happen e.g. for a system bot that
|
|
|
|
# doesn't go through do_create_user. Mainly just want to make sure that
|
|
|
|
# that situation doesn't throw an error.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_empty_realm_or_user_with_no_relevant_activity(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('unrelated', 1)
|
2017-05-07 16:35:22 +02:00
|
|
|
self.create_user() # also test a user with no RealmAuditLog entries
|
2017-04-01 03:26:35 +02:00
|
|
|
Realm.objects.create(string_id='moo', name='moo')
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_max_audit_entry_is_unrelated(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 1)
|
|
|
|
self.add_event('unrelated', .5)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup'], [['false']])
|
|
|
|
|
|
|
|
# Simultaneous related audit entries should not be allowed, and so not testing for that.
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_unrelated_audit_entry(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
self.add_event('user_created', 1)
|
|
|
|
self.add_event('unrelated', 1)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['subgroup'], [['false']])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_simultaneous_max_audit_entries_of_different_users(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
user3 = self.create_user()
|
|
|
|
self.add_event('user_created', .5, user=user1)
|
|
|
|
self.add_event('user_created', .5, user=user2)
|
|
|
|
self.add_event('user_created', 1, user=user3)
|
|
|
|
self.add_event('user_deactivated', .5, user=user3)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(UserCount, ['user', 'subgroup'],
|
|
|
|
[[user1, 'false'], [user2, 'false']])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end_with_actions_dot_py(self) -> None:
|
2017-04-01 03:26:35 +02:00
|
|
|
user1 = do_create_user('email1', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
user2 = do_create_user('email2', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
user3 = do_create_user('email3', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
user4 = do_create_user('email4', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
do_deactivate_user(user2)
|
|
|
|
do_activate_user(user3)
|
|
|
|
do_reactivate_user(user4)
|
2017-04-15 04:03:56 +02:00
|
|
|
end_time = floor_to_day(timezone_now()) + self.DAY
|
2017-04-01 03:26:35 +02:00
|
|
|
do_fill_count_stat_at_hour(self.stat, end_time)
|
|
|
|
for user in [user1, user3, user4]:
|
|
|
|
self.assertTrue(UserCount.objects.filter(
|
|
|
|
user=user, property=self.current_property, subgroup='false',
|
|
|
|
end_time=end_time, value=1).exists())
|
|
|
|
self.assertFalse(UserCount.objects.filter(user=user2).exists())
|
2017-04-05 07:51:55 +02:00
|
|
|
|
|
|
|
class TestRealmActiveHumans(AnalyticsTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().setUp()
|
2017-04-05 07:51:55 +02:00
|
|
|
self.stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.current_property = self.stat.property
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def mark_audit_active(self, user: UserProfile, end_time: Optional[datetime]=None) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
if end_time is None:
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=user, realm=user.realm, property='active_users_audit:is_bot:day',
|
|
|
|
subgroup=ujson.dumps(user.is_bot), end_time=end_time, value=1)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def mark_15day_active(self, user: UserProfile, end_time: Optional[datetime]=None) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
if end_time is None:
|
|
|
|
end_time = self.TIME_ZERO
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=user, realm=user.realm, property='15day_actives::day',
|
|
|
|
end_time=end_time, value=1)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_basic_boolean_logic(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user = self.create_user()
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO)
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
|
|
|
|
self.assertTableState(RealmCount, ['value', 'end_time'], [[1, self.TIME_ZERO + self.DAY]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_bots_not_counted(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
bot = self.create_user(is_bot=True)
|
|
|
|
self.mark_audit_active(bot)
|
|
|
|
self.mark_15day_active(bot)
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO)
|
|
|
|
self.assertTableState(RealmCount, [], [])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_multiple_users_realms_and_times(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user1 = self.create_user()
|
|
|
|
user2 = self.create_user()
|
|
|
|
second_realm = Realm.objects.create(string_id='second', name='second')
|
|
|
|
user3 = self.create_user(realm=second_realm)
|
|
|
|
user4 = self.create_user(realm=second_realm)
|
|
|
|
user5 = self.create_user(realm=second_realm)
|
|
|
|
|
|
|
|
for user in [user1, user2, user3, user4, user5]:
|
|
|
|
self.mark_audit_active(user)
|
|
|
|
self.mark_15day_active(user)
|
|
|
|
for user in [user1, user3, user4]:
|
|
|
|
self.mark_audit_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.mark_15day_active(user, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
|
|
|
|
for i in [-1, 0, 1]:
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
|
|
|
|
self.assertTableState(RealmCount, ['value', 'realm', 'end_time'],
|
|
|
|
[[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY]])
|
|
|
|
|
|
|
|
# Check that adding spurious entries doesn't make a difference
|
|
|
|
self.mark_audit_active(user1, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user2, end_time=self.TIME_ZERO + self.DAY)
|
|
|
|
self.mark_15day_active(user2, end_time=self.TIME_ZERO - self.DAY)
|
|
|
|
self.create_user()
|
|
|
|
third_realm = Realm.objects.create(string_id='third', name='third')
|
|
|
|
self.create_user(realm=third_realm)
|
|
|
|
|
|
|
|
RealmCount.objects.all().delete()
|
|
|
|
for i in [-1, 0, 1]:
|
|
|
|
do_fill_count_stat_at_hour(self.stat, self.TIME_ZERO + i*self.DAY)
|
|
|
|
self.assertTableState(RealmCount, ['value', 'realm', 'end_time'],
|
|
|
|
[[2, self.default_realm, self.TIME_ZERO],
|
|
|
|
[3, second_realm, self.TIME_ZERO],
|
|
|
|
[1, self.default_realm, self.TIME_ZERO - self.DAY],
|
|
|
|
[2, second_realm, self.TIME_ZERO - self.DAY]])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_end_to_end(self) -> None:
|
2017-04-05 07:51:55 +02:00
|
|
|
user1 = do_create_user('email1', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
user2 = do_create_user('email2', 'password', self.default_realm, 'full_name', 'short_name')
|
|
|
|
do_create_user('email3', 'password', self.default_realm, 'full_name', 'short_name')
|
2017-04-15 04:03:56 +02:00
|
|
|
time_zero = floor_to_day(timezone_now()) + self.DAY
|
2017-04-05 07:51:55 +02:00
|
|
|
update_user_activity_interval(user1, time_zero)
|
|
|
|
update_user_activity_interval(user2, time_zero)
|
|
|
|
do_deactivate_user(user2)
|
|
|
|
for property in ['active_users_audit:is_bot:day', '15day_actives::day',
|
|
|
|
'realm_active_humans::day']:
|
|
|
|
FillState.objects.create(property=property, state=FillState.DONE, end_time=time_zero)
|
|
|
|
process_count_stat(COUNT_STATS[property], time_zero+self.DAY)
|
|
|
|
self.assertEqual(RealmCount.objects.filter(
|
|
|
|
property='realm_active_humans::day', end_time=time_zero+self.DAY, value=1).count(), 1)
|
|
|
|
self.assertEqual(RealmCount.objects.filter(property='realm_active_humans::day').count(), 1)
|