2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2019-02-02 23:53:19 +01:00
|
|
|
from typing import List, Optional
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest import mock
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2019-09-18 15:04:36 +02:00
|
|
|
import ujson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.http import HttpResponse
|
2020-03-31 12:01:48 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2017-02-10 21:52:14 +01:00
|
|
|
|
2017-11-16 00:55:49 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, CountStat
|
|
|
|
from analytics.lib.time_utils import time_range
|
2020-06-11 00:54:34 +02:00
|
|
|
from analytics.models import FillState, RealmCount, UserCount, last_successful_fill
|
|
|
|
from analytics.views import rewrite_client_arrays, sort_by_totals, sort_client_labels
|
2020-06-09 12:24:32 +02:00
|
|
|
from corporate.models import get_customer_by_realm
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.actions import do_create_multiuse_invite_link, do_send_realm_reactivation_email
|
2017-11-16 00:55:49 +01:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_helpers import reset_emails_in_zulip_realm
|
|
|
|
from zerver.lib.timestamp import ceiling_to_day, ceiling_to_hour, datetime_to_timestamp
|
2020-06-21 21:55:48 +02:00
|
|
|
from zerver.models import Client, MultiuseInvite, PreregistrationUser, get_realm
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-02-10 21:52:14 +01:00
|
|
|
|
|
|
|
class TestStatsEndpoint(ZulipTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_stats(self) -> None:
|
2017-07-13 20:22:26 +02:00
|
|
|
self.user = self.example_user('hamlet')
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(self.user)
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/stats')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
# Check that we get something back
|
2017-07-09 17:14:14 +02:00
|
|
|
self.assert_in_response("Zulip analytics for", result)
|
2017-02-10 21:52:14 +01:00
|
|
|
|
2018-10-31 21:09:33 +01:00
|
|
|
def test_guest_user_cant_access_stats(self) -> None:
|
|
|
|
self.user = self.example_user('polonius')
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(self.user)
|
2018-10-31 21:09:33 +01:00
|
|
|
result = self.client_get('/stats')
|
|
|
|
self.assert_json_error(result, "Not allowed for guest users", 400)
|
|
|
|
|
|
|
|
result = self.client_get('/json/analytics/chart_data')
|
|
|
|
self.assert_json_error(result, "Not allowed for guest users", 400)
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
def test_stats_for_realm(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
self.login_user(user)
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
result = self.client_get('/stats/realm/zulip/')
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
user.is_staff = True
|
|
|
|
user.save(update_fields=['is_staff'])
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
result = self.client_get('/stats/realm/not_existing_realm/')
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
result = self.client_get('/stats/realm/zulip/')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("Zulip analytics for", result)
|
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
def test_stats_for_installation(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
self.login_user(user)
|
2018-05-18 02:16:29 +02:00
|
|
|
|
|
|
|
result = self.client_get('/stats/installation')
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
user.is_staff = True
|
|
|
|
user.save(update_fields=['is_staff'])
|
2018-05-18 02:16:29 +02:00
|
|
|
|
|
|
|
result = self.client_get('/stats/installation')
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("Zulip analytics for", result)
|
|
|
|
|
2017-02-10 21:52:14 +01:00
|
|
|
class TestGetChartData(ZulipTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.realm = get_realm('zulip')
|
2017-07-13 20:22:26 +02:00
|
|
|
self.user = self.example_user('hamlet')
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(self.user)
|
2017-02-10 21:52:14 +01:00
|
|
|
self.end_times_hour = [ceiling_to_hour(self.realm.date_created) + timedelta(hours=i)
|
|
|
|
for i in range(4)]
|
|
|
|
self.end_times_day = [ceiling_to_day(self.realm.date_created) + timedelta(days=i)
|
|
|
|
for i in range(4)]
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def data(self, i: int) -> List[int]:
|
2017-02-10 21:52:14 +01:00
|
|
|
return [0, 0, i, 0]
|
|
|
|
|
2017-11-17 22:19:48 +01:00
|
|
|
def insert_data(self, stat: CountStat, realm_subgroups: List[Optional[str]],
|
|
|
|
user_subgroups: List[str]) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
if stat.frequency == CountStat.HOUR:
|
|
|
|
insert_time = self.end_times_hour[2]
|
|
|
|
fill_time = self.end_times_hour[-1]
|
|
|
|
if stat.frequency == CountStat.DAY:
|
|
|
|
insert_time = self.end_times_day[2]
|
|
|
|
fill_time = self.end_times_day[-1]
|
|
|
|
|
|
|
|
RealmCount.objects.bulk_create([
|
|
|
|
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
|
|
|
value=100+i, realm=self.realm)
|
|
|
|
for i, subgroup in enumerate(realm_subgroups)])
|
|
|
|
UserCount.objects.bulk_create([
|
|
|
|
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
|
|
|
value=200+i, realm=self.realm, user=self.user)
|
|
|
|
for i, subgroup in enumerate(user_subgroups)])
|
|
|
|
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_number_of_humans(self) -> None:
|
2017-04-25 23:54:30 +02:00
|
|
|
stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
2018-05-19 22:43:02 +02:00
|
|
|
stat = COUNT_STATS['1day_actives::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
|
|
|
self.insert_data(stat, ['false'], [])
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data, {
|
|
|
|
'msg': '',
|
|
|
|
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
|
|
|
'frequency': CountStat.DAY,
|
2018-05-19 22:43:02 +02:00
|
|
|
'everyone': {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)},
|
2017-02-10 21:52:14 +01:00
|
|
|
'display_order': None,
|
|
|
|
'result': 'success',
|
|
|
|
})
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_over_time(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
|
|
|
self.insert_data(stat, ['true', 'false'], ['false'])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data, {
|
|
|
|
'msg': '',
|
|
|
|
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
|
|
|
'frequency': CountStat.HOUR,
|
2018-05-18 22:13:08 +02:00
|
|
|
'everyone': {'bot': self.data(100), 'human': self.data(101)},
|
2017-03-25 21:48:37 +01:00
|
|
|
'user': {'bot': self.data(0), 'human': self.data(200)},
|
2017-02-10 21:52:14 +01:00
|
|
|
'display_order': None,
|
|
|
|
'result': 'success',
|
|
|
|
})
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_message_type(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:message_type:day']
|
|
|
|
self.insert_data(stat, ['public_stream', 'private_message'],
|
|
|
|
['public_stream', 'private_stream'])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_by_message_type'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data, {
|
|
|
|
'msg': '',
|
|
|
|
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
|
|
|
'frequency': CountStat.DAY,
|
2018-05-18 22:13:08 +02:00
|
|
|
'everyone': {'Public streams': self.data(100), 'Private streams': self.data(0),
|
|
|
|
'Private messages': self.data(101), 'Group private messages': self.data(0)},
|
2017-03-20 19:38:58 +01:00
|
|
|
'user': {'Public streams': self.data(200), 'Private streams': self.data(201),
|
|
|
|
'Private messages': self.data(0), 'Group private messages': self.data(0)},
|
|
|
|
'display_order': ['Private messages', 'Public streams', 'Private streams', 'Group private messages'],
|
2017-02-10 21:52:14 +01:00
|
|
|
'result': 'success',
|
|
|
|
})
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_messages_sent_by_client(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:client:day']
|
|
|
|
client1 = Client.objects.create(name='client 1')
|
|
|
|
client2 = Client.objects.create(name='client 2')
|
|
|
|
client3 = Client.objects.create(name='client 3')
|
|
|
|
client4 = Client.objects.create(name='client 4')
|
|
|
|
self.insert_data(stat, [client4.id, client3.id, client2.id],
|
2017-04-12 07:25:53 +02:00
|
|
|
[client3.id, client1.id])
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_by_client'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data, {
|
|
|
|
'msg': '',
|
|
|
|
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_day],
|
|
|
|
'frequency': CountStat.DAY,
|
2018-05-18 22:13:08 +02:00
|
|
|
'everyone': {'client 4': self.data(100), 'client 3': self.data(101),
|
|
|
|
'client 2': self.data(102)},
|
2017-04-12 07:25:53 +02:00
|
|
|
'user': {'client 3': self.data(200), 'client 1': self.data(201)},
|
2017-02-10 21:52:14 +01:00
|
|
|
'display_order': ['client 1', 'client 2', 'client 3', 'client 4'],
|
|
|
|
'result': 'success',
|
|
|
|
})
|
|
|
|
|
2020-06-11 12:56:06 +02:00
|
|
|
def test_messages_read_over_time(self) -> None:
|
|
|
|
stat = COUNT_STATS['messages_read::hour']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_read_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
data = result.json()
|
|
|
|
self.assertEqual(data, {
|
|
|
|
'msg': '',
|
|
|
|
'end_times': [datetime_to_timestamp(dt) for dt in self.end_times_hour],
|
|
|
|
'frequency': CountStat.HOUR,
|
|
|
|
'everyone': {'read': self.data(100)},
|
|
|
|
'user': {'read': self.data(0)},
|
|
|
|
'display_order': None,
|
|
|
|
'result': 'success',
|
|
|
|
})
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_include_empty_subgroups(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
FillState.objects.create(
|
2017-11-17 22:19:48 +01:00
|
|
|
property='realm_active_humans::day', end_time=self.end_times_day[0],
|
|
|
|
state=FillState.DONE)
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2018-05-19 22:43:02 +02:00
|
|
|
self.assertEqual(data['everyone'], {"_1day": [0], "_15day": [0], "all_time": [0]})
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertFalse('user' in data)
|
|
|
|
|
|
|
|
FillState.objects.create(
|
2017-11-17 22:19:48 +01:00
|
|
|
property='messages_sent:is_bot:hour', end_time=self.end_times_hour[0],
|
|
|
|
state=FillState.DONE)
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2018-05-18 22:13:08 +02:00
|
|
|
self.assertEqual(data['everyone'], {'human': [0], 'bot': [0]})
|
2017-03-25 21:48:37 +01:00
|
|
|
self.assertEqual(data['user'], {'human': [0], 'bot': [0]})
|
2017-02-10 21:52:14 +01:00
|
|
|
|
|
|
|
FillState.objects.create(
|
2017-11-17 22:19:48 +01:00
|
|
|
property='messages_sent:message_type:day', end_time=self.end_times_day[0],
|
|
|
|
state=FillState.DONE)
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_by_message_type'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2018-05-18 22:13:08 +02:00
|
|
|
self.assertEqual(data['everyone'], {
|
2017-11-17 22:19:48 +01:00
|
|
|
'Public streams': [0], 'Private streams': [0],
|
|
|
|
'Private messages': [0], 'Group private messages': [0]})
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data['user'], {
|
2017-11-17 22:19:48 +01:00
|
|
|
'Public streams': [0], 'Private streams': [0],
|
|
|
|
'Private messages': [0], 'Group private messages': [0]})
|
2017-02-10 21:52:14 +01:00
|
|
|
|
|
|
|
FillState.objects.create(
|
2017-11-17 22:19:48 +01:00
|
|
|
property='messages_sent:client:day', end_time=self.end_times_day[0],
|
|
|
|
state=FillState.DONE)
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_by_client'})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2018-05-18 22:13:08 +02:00
|
|
|
self.assertEqual(data['everyone'], {})
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data['user'], {})
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_start_and_end(self) -> None:
|
2017-04-25 23:54:30 +02:00
|
|
|
stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
2018-05-19 22:43:02 +02:00
|
|
|
stat = COUNT_STATS['1day_actives::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
|
|
|
self.insert_data(stat, ['false'], [])
|
2017-02-10 21:52:14 +01:00
|
|
|
end_time_timestamps = [datetime_to_timestamp(dt) for dt in self.end_times_day]
|
|
|
|
|
|
|
|
# valid start and end
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans',
|
|
|
|
'start': end_time_timestamps[1],
|
|
|
|
'end': end_time_timestamps[2]})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data['end_times'], end_time_timestamps[1:3])
|
2018-05-19 22:43:02 +02:00
|
|
|
self.assertEqual(data['everyone'], {'_1day': [0, 100], '_15day': [0, 100], 'all_time': [0, 100]})
|
2017-02-10 21:52:14 +01:00
|
|
|
|
|
|
|
# start later then end
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans',
|
|
|
|
'start': end_time_timestamps[2],
|
|
|
|
'end': end_time_timestamps[1]})
|
|
|
|
self.assert_json_error_contains(result, 'Start time is later than')
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_min_length(self) -> None:
|
2017-04-25 23:54:30 +02:00
|
|
|
stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
2018-05-19 22:43:02 +02:00
|
|
|
stat = COUNT_STATS['1day_actives::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
stat = COUNT_STATS['active_users_audit:is_bot:day']
|
|
|
|
self.insert_data(stat, ['false'], [])
|
2017-02-10 21:52:14 +01:00
|
|
|
# test min_length is too short to change anything
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans',
|
|
|
|
'min_length': 2})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in self.end_times_day])
|
2018-05-19 22:43:02 +02:00
|
|
|
self.assertEqual(data['everyone'], {'_1day': self.data(100), '_15day': self.data(100), 'all_time': self.data(100)})
|
2017-02-10 21:52:14 +01:00
|
|
|
# test min_length larger than filled data
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'number_of_humans',
|
|
|
|
'min_length': 5})
|
|
|
|
self.assert_json_success(result)
|
2017-08-17 08:26:42 +02:00
|
|
|
data = result.json()
|
2017-02-10 21:52:14 +01:00
|
|
|
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
|
|
|
|
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
|
2018-05-19 22:43:02 +02:00
|
|
|
self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)})
|
2017-02-10 21:52:14 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_non_existent_chart(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'does_not_exist'})
|
|
|
|
self.assert_json_error_contains(result, 'Unknown chart name')
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_analytics_not_running(self) -> None:
|
2020-03-31 12:01:48 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
self.assertEqual(FillState.objects.count(), 0)
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=3)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
with mock.patch('logging.warning'):
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_error_contains(result, 'No analytics data available')
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
with mock.patch('logging.warning'):
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_error_contains(result, 'No analytics data available')
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(hours=10)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
end_time = timezone_now() - timedelta(days=5)
|
|
|
|
fill_state = FillState.objects.create(property='messages_sent:is_bot:hour', end_time=end_time,
|
|
|
|
state=FillState.DONE)
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=3)
|
|
|
|
realm.save(update_fields=["date_created"])
|
2017-02-10 21:52:14 +01:00
|
|
|
with mock.patch('logging.warning'):
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
2020-03-31 12:01:48 +02:00
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assert_json_error_contains(result, 'No analytics data available')
|
|
|
|
|
2020-03-31 12:01:48 +02:00
|
|
|
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
end_time = timezone_now() - timedelta(days=2)
|
|
|
|
fill_state.end_time = end_time
|
|
|
|
fill_state.save(update_fields=["end_time"])
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=3)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=1, hours=2)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
with mock.patch('logging.warning'):
|
|
|
|
result = self.client_get('/json/analytics/chart_data',
|
|
|
|
{'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_error_contains(result, 'No analytics data available')
|
|
|
|
|
|
|
|
realm.date_created = timezone_now() - timedelta(days=1, minutes=10)
|
|
|
|
realm.save(update_fields=["date_created"])
|
|
|
|
result = self.client_get('/json/analytics/chart_data', {'chart_name': 'messages_sent_over_time'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
def test_get_chart_data_for_realm(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
self.login_user(user)
|
2018-04-15 18:43:48 +02:00
|
|
|
|
2020-04-30 17:30:41 +02:00
|
|
|
result = self.client_get('/json/analytics/chart_data/realm/zulip',
|
2018-04-15 18:43:48 +02:00
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_error(result, "Must be an server administrator", 400)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
user.is_staff = True
|
|
|
|
user.save(update_fields=['is_staff'])
|
2018-04-15 18:43:48 +02:00
|
|
|
stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
|
|
|
|
result = self.client_get('/json/analytics/chart_data/realm/not_existing_realm',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_error(result, 'Invalid organization', 400)
|
|
|
|
|
|
|
|
result = self.client_get('/json/analytics/chart_data/realm/zulip',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
def test_get_chart_data_for_installation(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
self.login_user(user)
|
2018-05-18 02:16:29 +02:00
|
|
|
|
|
|
|
result = self.client_get('/json/analytics/chart_data/installation',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_error(result, "Must be an server administrator", 400)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user = self.example_user('hamlet')
|
|
|
|
user.is_staff = True
|
|
|
|
user.save(update_fields=['is_staff'])
|
2018-05-18 02:16:29 +02:00
|
|
|
stat = COUNT_STATS['realm_active_humans::day']
|
|
|
|
self.insert_data(stat, [None], [])
|
|
|
|
|
|
|
|
result = self.client_get('/json/analytics/chart_data/installation',
|
|
|
|
{'chart_name': 'number_of_humans'})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
class TestSupportEndpoint(ZulipTestCase):
|
|
|
|
def test_search(self) -> None:
|
2020-03-12 14:17:25 +01:00
|
|
|
reset_emails_in_zulip_realm()
|
|
|
|
|
2019-06-12 16:09:24 +02:00
|
|
|
def check_hamlet_user_query_result(result: HttpResponse) -> None:
|
2019-03-08 13:02:10 +01:00
|
|
|
self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>',
|
|
|
|
'<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>',
|
2020-05-17 18:46:14 +02:00
|
|
|
'<b>Admins</b>: desdemona@zulip.com, iago@zulip.com\n',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'class="copy-button" data-copytext="desdemona@zulip.com, iago@zulip.com"',
|
2019-06-12 16:09:24 +02:00
|
|
|
], result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2019-06-12 16:09:24 +02:00
|
|
|
def check_zulip_realm_query_result(result: HttpResponse) -> None:
|
2019-07-24 07:22:48 +02:00
|
|
|
zulip_realm = get_realm("zulip")
|
2020-06-10 06:41:04 +02:00
|
|
|
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{zulip_realm.id}"',
|
2019-07-24 07:22:48 +02:00
|
|
|
'Zulip Dev</h3>',
|
2019-03-08 13:02:10 +01:00
|
|
|
'<option value="1" selected>Self Hosted</option>',
|
|
|
|
'<option value="2" >Limited</option>',
|
2019-04-19 15:19:49 +02:00
|
|
|
'input type="number" name="discount" value="None"',
|
|
|
|
'<option value="active" selected>Active</option>',
|
2019-05-08 06:05:18 +02:00
|
|
|
'<option value="deactivated" >Deactivated</option>',
|
2019-04-19 18:17:41 +02:00
|
|
|
'scrub-realm-button">',
|
|
|
|
'data-string-id="zulip"'], result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2019-06-12 16:09:24 +02:00
|
|
|
def check_lear_realm_query_result(result: HttpResponse) -> None:
|
2019-07-24 07:22:48 +02:00
|
|
|
lear_realm = get_realm("lear")
|
2020-06-10 06:41:04 +02:00
|
|
|
self.assert_in_success_response([f'<input type="hidden" name="realm_id" value="{lear_realm.id}"',
|
2019-07-24 07:22:48 +02:00
|
|
|
'Lear & Co.</h3>',
|
2019-03-08 13:02:10 +01:00
|
|
|
'<option value="1" selected>Self Hosted</option>',
|
|
|
|
'<option value="2" >Limited</option>',
|
2019-04-19 15:19:49 +02:00
|
|
|
'input type="number" name="discount" value="None"',
|
|
|
|
'<option value="active" selected>Active</option>',
|
2019-05-08 06:05:18 +02:00
|
|
|
'<option value="deactivated" >Deactivated</option>',
|
2019-04-19 18:17:41 +02:00
|
|
|
'scrub-realm-button">',
|
|
|
|
'data-string-id="lear"'], result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2020-06-13 01:57:21 +02:00
|
|
|
def check_preregistration_user_query_result(result: HttpResponse, email: str, invite: bool=False) -> None:
|
2019-09-18 15:04:36 +02:00
|
|
|
self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
|
2020-06-09 00:25:09 +02:00
|
|
|
f'<b>Email</b>: {email}',
|
2019-09-18 15:04:36 +02:00
|
|
|
], result)
|
|
|
|
if invite:
|
|
|
|
self.assert_in_success_response(['<span class="label">invite</span>'], result)
|
|
|
|
self.assert_in_success_response(['<b>Expires in</b>: 1\xa0week, 3',
|
|
|
|
'<b>Status</b>: Link has never been clicked'], result)
|
|
|
|
self.assert_in_success_response([], result)
|
|
|
|
else:
|
|
|
|
self.assert_not_in_success_response(['<span class="label">invite</span>'], result)
|
|
|
|
self.assert_in_success_response(['<b>Expires in</b>: 1\xa0day',
|
|
|
|
'<b>Status</b>: Link has never been clicked'], result)
|
|
|
|
|
|
|
|
def check_realm_creation_query_result(result: HttpResponse, email: str) -> None:
|
|
|
|
self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
|
|
|
|
'<span class="label">realm creation</span>\n',
|
2020-06-14 01:36:12 +02:00
|
|
|
'<b>Link</b>: http://testserver/accounts/do_confirm/',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'<b>Expires in</b>: 1\xa0day<br>\n',
|
2019-09-18 15:04:36 +02:00
|
|
|
], result)
|
|
|
|
|
|
|
|
def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
|
|
|
|
self.assert_in_success_response(['<span class="label">multiuse invite</span>\n',
|
|
|
|
'<b>Link</b>: http://zulip.testserver/join/',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'<b>Expires in</b>: 1\xa0week, 3',
|
2019-09-18 15:04:36 +02:00
|
|
|
], result)
|
|
|
|
|
|
|
|
def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
|
|
|
|
self.assert_in_success_response(['<span class="label">realm reactivation</span>\n',
|
|
|
|
'<b>Link</b>: http://zulip.testserver/reactivate/',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'<b>Expires in</b>: 1\xa0day',
|
2019-09-18 15:04:36 +02:00
|
|
|
], result)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('cordelia')
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support")
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support")
|
|
|
|
self.assert_in_success_response(['<input type="text" name="q" class="input-xxlarge search-query"'], result)
|
|
|
|
|
|
|
|
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_hamlet_user_query_result(result)
|
|
|
|
check_zulip_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support", {"q": "lear"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_lear_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support", {"q": "http://lear.testserver"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_lear_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
with self.settings(REALM_HOSTS={'zulip': 'localhost'}):
|
|
|
|
result = self.client_get("/activity/support", {"q": "http://localhost"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_zulip_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support", {"q": "hamlet@zulip.com, lear"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_hamlet_user_query_result(result)
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
check_lear_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
result = self.client_get("/activity/support", {"q": "lear, Hamlet <hamlet@zulip.com>"})
|
2019-06-12 16:09:24 +02:00
|
|
|
check_hamlet_user_query_result(result)
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
check_lear_realm_query_result(result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2019-09-18 15:04:36 +02:00
|
|
|
self.client_post('/accounts/home/', {'email': self.nonreg_email("test")})
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-09-18 15:04:36 +02:00
|
|
|
result = self.client_get("/activity/support", {"q": self.nonreg_email("test")})
|
|
|
|
check_preregistration_user_query_result(result, self.nonreg_email("test"))
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
|
|
|
|
stream_ids = [self.get_stream_id("Denmark")]
|
|
|
|
invitee_emails = [self.nonreg_email("test1")]
|
|
|
|
self.client_post("/json/invites", {"invitee_emails": invitee_emails,
|
2020-06-21 21:55:48 +02:00
|
|
|
"stream_ids": ujson.dumps(stream_ids),
|
|
|
|
"invite_as": PreregistrationUser.INVITE_AS['MEMBER']})
|
2019-09-18 15:04:36 +02:00
|
|
|
result = self.client_get("/activity/support", {"q": self.nonreg_email("test1")})
|
|
|
|
check_preregistration_user_query_result(result, self.nonreg_email("test1"), invite=True)
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
|
|
|
|
email = self.nonreg_email('alice')
|
|
|
|
self.client_post('/new/', {'email': email})
|
|
|
|
result = self.client_get("/activity/support", {"q": email})
|
|
|
|
check_realm_creation_query_result(result, email)
|
|
|
|
|
|
|
|
do_create_multiuse_invite_link(self.example_user("hamlet"), invited_as=1)
|
|
|
|
result = self.client_get("/activity/support", {"q": "zulip"})
|
|
|
|
check_multiuse_invite_link_query_result(result)
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
MultiuseInvite.objects.all().delete()
|
|
|
|
|
|
|
|
do_send_realm_reactivation_email(get_realm("zulip"))
|
|
|
|
result = self.client_get("/activity/support", {"q": "zulip"})
|
|
|
|
check_realm_reactivation_link_query_result(result)
|
|
|
|
check_zulip_realm_query_result(result)
|
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
def test_change_plan_type(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
self.login_user(cordelia)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{cordelia.realm_id}", "plan_type": "2"})
|
2019-03-08 13:02:10 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2019-07-24 07:22:48 +02:00
|
|
|
iago = self.example_user("iago")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(iago)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
with mock.patch("analytics.views.do_change_plan_type") as m:
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{iago.realm_id}", "plan_type": "2"})
|
2019-03-08 13:02:10 +01:00
|
|
|
m.assert_called_once_with(get_realm("zulip"), 2)
|
2019-05-08 06:10:09 +02:00
|
|
|
self.assert_in_success_response(["Plan type of Zulip Dev changed from self hosted to limited"], result)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
def test_attach_discount(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
lear_realm = get_realm('lear')
|
|
|
|
self.login_user(cordelia)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
2019-03-08 13:02:10 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
with mock.patch("analytics.views.attach_discount_to_realm") as m:
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
2019-03-08 13:02:10 +01:00
|
|
|
m.assert_called_once_with(get_realm("lear"), 25)
|
|
|
|
self.assert_in_success_response(["Discount of Lear & Co. changed to 25 from None"], result)
|
|
|
|
|
2020-06-09 12:24:32 +02:00
|
|
|
def test_change_sponsorship_status(self) -> None:
|
|
|
|
lear_realm = get_realm("lear")
|
|
|
|
self.assertIsNone(get_customer_by_realm(lear_realm))
|
|
|
|
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
self.login_user(cordelia)
|
|
|
|
|
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
|
|
|
|
"sponsorship_pending": "true"})
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
self.login_user(iago)
|
|
|
|
|
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
|
|
|
|
"sponsorship_pending": "true"})
|
|
|
|
self.assert_in_success_response(["Lear & Co. marked as pending sponsorship."], result)
|
|
|
|
customer = get_customer_by_realm(lear_realm)
|
|
|
|
assert(customer is not None)
|
|
|
|
self.assertTrue(customer.sponsorship_pending)
|
|
|
|
|
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}",
|
|
|
|
"sponsorship_pending": "false"})
|
|
|
|
self.assert_in_success_response(["Lear & Co. is no longer pending sponsorship."], result)
|
|
|
|
customer = get_customer_by_realm(lear_realm)
|
|
|
|
assert(customer is not None)
|
|
|
|
self.assertFalse(customer.sponsorship_pending)
|
|
|
|
|
2019-04-19 15:19:49 +02:00
|
|
|
def test_activate_or_deactivate_realm(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
lear_realm = get_realm('lear')
|
|
|
|
self.login_user(cordelia)
|
2019-04-19 15:19:49 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
|
2019-04-19 15:19:49 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-04-19 15:19:49 +02:00
|
|
|
|
|
|
|
with mock.patch("analytics.views.do_deactivate_realm") as m:
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "deactivated"})
|
2019-07-24 07:22:48 +02:00
|
|
|
m.assert_called_once_with(lear_realm, self.example_user("iago"))
|
2019-04-19 15:19:49 +02:00
|
|
|
self.assert_in_success_response(["Lear & Co. deactivated"], result)
|
|
|
|
|
2019-11-17 09:51:46 +01:00
|
|
|
with mock.patch("analytics.views.do_send_realm_reactivation_email") as m:
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "status": "active"})
|
2019-07-24 07:22:48 +02:00
|
|
|
m.assert_called_once_with(lear_realm)
|
2019-11-17 09:51:46 +01:00
|
|
|
self.assert_in_success_response(["Realm reactivation email sent to admins of Lear"], result)
|
2019-04-19 15:19:49 +02:00
|
|
|
|
2019-04-19 18:17:41 +02:00
|
|
|
def test_scrub_realm(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
lear_realm = get_realm('lear')
|
|
|
|
self.login_user(cordelia)
|
2019-04-19 18:17:41 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "discount": "25"})
|
2019-04-19 18:17:41 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login('iago')
|
2019-04-19 18:17:41 +02:00
|
|
|
|
|
|
|
with mock.patch("analytics.views.do_scrub_realm") as m:
|
2020-06-10 06:41:04 +02:00
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}", "scrub_realm": "scrub_realm"})
|
2019-07-24 07:22:48 +02:00
|
|
|
m.assert_called_once_with(lear_realm)
|
2019-04-19 18:17:41 +02:00
|
|
|
self.assert_in_success_response(["Lear & Co. scrubbed"], result)
|
|
|
|
|
|
|
|
with mock.patch("analytics.views.do_scrub_realm") as m:
|
2020-06-10 16:19:41 +02:00
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
result = self.client_post("/activity/support", {"realm_id": f"{lear_realm.id}"})
|
2019-04-19 18:17:41 +02:00
|
|
|
m.assert_not_called()
|
|
|
|
|
2017-02-10 21:52:14 +01:00
|
|
|
class TestGetChartDataHelpers(ZulipTestCase):
|
|
|
|
# last_successful_fill is in analytics/models.py, but get_chart_data is
|
|
|
|
# the only function that uses it at the moment
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_last_successful_fill(self) -> None:
|
2017-02-10 21:52:14 +01:00
|
|
|
self.assertIsNone(last_successful_fill('non-existant'))
|
2020-06-05 06:55:20 +02:00
|
|
|
a_time = datetime(2016, 3, 14, 19, tzinfo=timezone.utc)
|
|
|
|
one_hour_before = datetime(2016, 3, 14, 18, tzinfo=timezone.utc)
|
2017-02-10 21:52:14 +01:00
|
|
|
fillstate = FillState.objects.create(property='property', end_time=a_time,
|
|
|
|
state=FillState.DONE)
|
|
|
|
self.assertEqual(last_successful_fill('property'), a_time)
|
|
|
|
fillstate.state = FillState.STARTED
|
|
|
|
fillstate.save()
|
|
|
|
self.assertEqual(last_successful_fill('property'), one_hour_before)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_sort_by_totals(self) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
empty: List[int] = []
|
2017-02-10 21:52:14 +01:00
|
|
|
value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty}
|
|
|
|
self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd'])
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_sort_client_labels(self) -> None:
|
2018-05-18 22:13:08 +02:00
|
|
|
data = {'everyone': {'a': [16], 'c': [15], 'b': [14], 'e': [13], 'd': [12], 'h': [11]},
|
2017-02-10 21:52:14 +01:00
|
|
|
'user': {'a': [6], 'b': [5], 'd': [4], 'e': [3], 'f': [2], 'g': [1]}}
|
|
|
|
self.assertEqual(sort_client_labels(data), ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'])
|
2016-12-20 02:30:08 +01:00
|
|
|
|
|
|
|
class TestTimeRange(ZulipTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_time_range(self) -> None:
|
2016-12-20 02:30:08 +01:00
|
|
|
HOUR = timedelta(hours=1)
|
|
|
|
DAY = timedelta(days=1)
|
|
|
|
|
2020-06-05 06:55:20 +02:00
|
|
|
a_time = datetime(2016, 3, 14, 22, 59, tzinfo=timezone.utc)
|
|
|
|
floor_hour = datetime(2016, 3, 14, 22, tzinfo=timezone.utc)
|
|
|
|
floor_day = datetime(2016, 3, 14, tzinfo=timezone.utc)
|
2016-12-20 02:30:08 +01:00
|
|
|
|
|
|
|
# test start == end
|
2017-02-02 01:29:58 +01:00
|
|
|
self.assertEqual(time_range(a_time, a_time, CountStat.HOUR, None), [])
|
|
|
|
self.assertEqual(time_range(a_time, a_time, CountStat.DAY, None), [])
|
2016-12-20 02:30:08 +01:00
|
|
|
# test start == end == boundary, and min_length == 0
|
2017-02-02 01:29:58 +01:00
|
|
|
self.assertEqual(time_range(floor_hour, floor_hour, CountStat.HOUR, 0), [floor_hour])
|
|
|
|
self.assertEqual(time_range(floor_day, floor_day, CountStat.DAY, 0), [floor_day])
|
2016-12-20 02:30:08 +01:00
|
|
|
# test start and end on different boundaries
|
2017-02-02 01:29:58 +01:00
|
|
|
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, None),
|
|
|
|
[floor_hour, floor_hour+HOUR])
|
|
|
|
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, None),
|
|
|
|
[floor_day, floor_day+DAY])
|
2016-12-20 02:30:08 +01:00
|
|
|
# test min_length
|
2017-02-02 01:29:58 +01:00
|
|
|
self.assertEqual(time_range(floor_hour, floor_hour+HOUR, CountStat.HOUR, 4),
|
|
|
|
[floor_hour-2*HOUR, floor_hour-HOUR, floor_hour, floor_hour+HOUR])
|
|
|
|
self.assertEqual(time_range(floor_day, floor_day+DAY, CountStat.DAY, 4),
|
|
|
|
[floor_day-2*DAY, floor_day-DAY, floor_day, floor_day+DAY])
|
2017-02-06 01:17:31 +01:00
|
|
|
|
|
|
|
class TestMapArrays(ZulipTestCase):
|
2017-11-05 06:54:00 +01:00
|
|
|
def test_map_arrays(self) -> None:
|
2017-02-06 01:17:31 +01:00
|
|
|
a = {'desktop app 1.0': [1, 2, 3],
|
|
|
|
'desktop app 2.0': [10, 12, 13],
|
|
|
|
'desktop app 3.0': [21, 22, 23],
|
|
|
|
'website': [1, 2, 3],
|
|
|
|
'ZulipiOS': [1, 2, 3],
|
2017-07-08 03:31:47 +02:00
|
|
|
'ZulipElectron': [2, 5, 7],
|
2017-02-06 01:17:31 +01:00
|
|
|
'ZulipMobile': [1, 5, 7],
|
|
|
|
'ZulipPython': [1, 2, 3],
|
|
|
|
'API: Python': [1, 2, 3],
|
|
|
|
'SomethingRandom': [4, 5, 6],
|
|
|
|
'ZulipGitHubWebhook': [7, 7, 9],
|
|
|
|
'ZulipAndroid': [64, 63, 65]}
|
|
|
|
result = rewrite_client_arrays(a)
|
|
|
|
self.assertEqual(result,
|
|
|
|
{'Old desktop app': [32, 36, 39],
|
|
|
|
'Old iOS app': [1, 2, 3],
|
2017-07-08 03:31:47 +02:00
|
|
|
'Desktop app': [2, 5, 7],
|
2017-07-08 03:31:13 +02:00
|
|
|
'Mobile app': [1, 5, 7],
|
2017-02-06 01:17:31 +01:00
|
|
|
'Website': [1, 2, 3],
|
|
|
|
'Python API': [2, 4, 6],
|
|
|
|
'SomethingRandom': [4, 5, 6],
|
|
|
|
'GitHub webhook': [7, 7, 9],
|
2017-10-03 20:59:41 +02:00
|
|
|
'Old Android app': [64, 63, 65]})
|