2016-12-20 02:26:14 +01:00
|
|
|
from __future__ import absolute_import, division
|
2016-01-26 02:44:57 +01:00
|
|
|
|
2017-02-09 02:55:18 +01:00
|
|
|
from django.conf import settings
|
2016-12-20 02:26:14 +01:00
|
|
|
from django.core import urlresolvers
|
2013-11-06 13:25:55 +01:00
|
|
|
from django.db import connection
|
2017-01-14 23:52:27 +01:00
|
|
|
from django.db.models import Sum
|
2016-06-05 20:51:43 +02:00
|
|
|
from django.db.models.query import QuerySet
|
|
|
|
from django.http import HttpResponseNotFound, HttpRequest, HttpResponse
|
2016-12-20 02:26:14 +01:00
|
|
|
from django.template import RequestContext, loader
|
2016-12-20 02:30:08 +01:00
|
|
|
from django.utils import timezone
|
|
|
|
from django.utils.translation import ugettext as _
|
2016-04-21 08:48:33 +02:00
|
|
|
from jinja2 import Markup as mark_safe
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
from analytics.lib.counts import CountStat, process_count_stat, COUNT_STATS
|
2017-01-07 01:46:18 +01:00
|
|
|
from analytics.lib.time_utils import time_range
|
2017-01-14 18:32:31 +01:00
|
|
|
from analytics.models import BaseCount, InstallationCount, RealmCount, \
|
2017-02-08 08:04:10 +01:00
|
|
|
UserCount, StreamCount, last_successful_fill
|
2016-12-20 02:30:08 +01:00
|
|
|
|
|
|
|
from zerver.decorator import has_request_variables, REQ, zulip_internal, \
|
|
|
|
zulip_login_required, to_non_negative_int, to_utc_datetime
|
|
|
|
from zerver.lib.request import JsonableError
|
|
|
|
from zerver.lib.response import json_success
|
|
|
|
from zerver.lib.timestamp import ceiling_to_hour, ceiling_to_day, timestamp_to_datetime
|
2017-01-14 23:52:27 +01:00
|
|
|
from zerver.models import Realm, UserProfile, UserActivity, \
|
|
|
|
UserActivityInterval, Client
|
2016-12-20 02:26:14 +01:00
|
|
|
from zproject.jinja2 import render_to_response
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
from collections import defaultdict
|
2013-11-18 18:28:49 +01:00
|
|
|
from datetime import datetime, timedelta
|
2013-11-06 13:25:55 +01:00
|
|
|
import itertools
|
2016-12-20 02:30:08 +01:00
|
|
|
import json
|
2017-02-08 08:04:10 +01:00
|
|
|
import logging
|
2013-11-08 17:19:30 +01:00
|
|
|
import pytz
|
2016-12-20 02:26:14 +01:00
|
|
|
import re
|
|
|
|
import time
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2016-12-20 02:26:14 +01:00
|
|
|
from six.moves import filter, map, range, zip
|
2017-02-11 03:06:23 +01:00
|
|
|
from typing import Any, Dict, List, Tuple, Optional, Callable, Type, \
|
2017-01-14 18:32:31 +01:00
|
|
|
Union, Text
|
2016-12-20 02:26:14 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
@zulip_login_required
|
|
|
|
def stats(request):
|
|
|
|
# type: (HttpRequest) -> HttpResponse
|
2017-02-07 20:22:57 +01:00
|
|
|
return render_to_response('analytics/stats.html',
|
|
|
|
context=dict(realm_name = request.user.realm.name))
|
2016-12-20 02:30:08 +01:00
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data(request, user_profile, chart_name=REQ(),
|
|
|
|
min_length=REQ(converter=to_non_negative_int, default=None),
|
|
|
|
start=REQ(converter=to_utc_datetime, default=None),
|
|
|
|
end=REQ(converter=to_utc_datetime, default=None)):
|
|
|
|
# type: (HttpRequest, UserProfile, Text, Optional[int], Optional[datetime], Optional[datetime]) -> HttpResponse
|
2017-01-14 23:52:27 +01:00
|
|
|
if chart_name == 'number_of_humans':
|
2017-01-19 01:08:33 +01:00
|
|
|
stat = COUNT_STATS['active_users:is_bot:day']
|
|
|
|
tables = [RealmCount]
|
|
|
|
subgroups = ['false', 'true']
|
|
|
|
labels = ['human', 'bot']
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-02-10 06:56:18 +01:00
|
|
|
include_empty_subgroups = [True]
|
2017-01-11 21:44:59 +01:00
|
|
|
elif chart_name == 'messages_sent_over_time':
|
2017-01-19 01:08:33 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:is_bot:hour']
|
2017-02-10 06:56:18 +01:00
|
|
|
tables = [RealmCount, UserCount]
|
2017-01-19 01:08:33 +01:00
|
|
|
subgroups = ['false', 'true']
|
|
|
|
labels = ['human', 'bot']
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-02-10 06:56:18 +01:00
|
|
|
include_empty_subgroups = [True, False]
|
2017-01-14 23:52:27 +01:00
|
|
|
elif chart_name == 'messages_sent_by_message_type':
|
2017-01-19 01:08:33 +01:00
|
|
|
stat = COUNT_STATS['messages_sent:message_type:day']
|
|
|
|
tables = [RealmCount, UserCount]
|
|
|
|
subgroups = ['public_stream', 'private_stream', 'private_message']
|
2017-02-09 09:03:25 +01:00
|
|
|
labels = ['Public Streams', 'Private Streams', 'PMs & Group PMs']
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = lambda data: sort_by_totals(data['realm'])
|
2017-02-10 06:56:18 +01:00
|
|
|
include_empty_subgroups = [True, True]
|
2017-01-19 01:08:33 +01:00
|
|
|
elif chart_name == 'messages_sent_by_client':
|
|
|
|
stat = COUNT_STATS['messages_sent:client:day']
|
|
|
|
tables = [RealmCount, UserCount]
|
|
|
|
subgroups = [str(x) for x in Client.objects.values_list('id', flat=True).order_by('id')]
|
2017-02-09 02:55:18 +01:00
|
|
|
# these are further re-written by client_label_map
|
2017-01-19 01:08:33 +01:00
|
|
|
labels = list(Client.objects.values_list('name', flat=True).order_by('id'))
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = sort_client_labels
|
2017-02-10 06:56:18 +01:00
|
|
|
include_empty_subgroups = [False, False]
|
2017-01-14 18:32:31 +01:00
|
|
|
else:
|
|
|
|
raise JsonableError(_("Unknown chart name: %s") % (chart_name,))
|
2017-01-19 01:08:33 +01:00
|
|
|
|
2017-02-08 08:04:10 +01:00
|
|
|
# Most likely someone using our API endpoint. The /stats page does not
|
|
|
|
# pass a start or end in its requests.
|
|
|
|
if start is not None and end is not None and start > end:
|
|
|
|
raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") %
|
|
|
|
{'start': start, 'end': end})
|
|
|
|
|
2017-02-08 04:51:03 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
if start is None:
|
|
|
|
start = realm.date_created
|
|
|
|
if end is None:
|
2017-02-08 08:04:10 +01:00
|
|
|
end = last_successful_fill(stat.property)
|
|
|
|
if end is None or start > end:
|
|
|
|
logging.warning("User from realm %s attempted to access /stats, but the computed "
|
|
|
|
"start time, %s (creation date of realm) is later than the computed "
|
|
|
|
"end time, %s (last successful analytics update). Is the "
|
|
|
|
"update_analytics_counts cron job running?" % (realm.string_id, start, end))
|
|
|
|
raise JsonableError(_("No analytics data available. Please contact your server administrator."))
|
2017-02-08 04:51:03 +01:00
|
|
|
|
2017-01-19 01:08:33 +01:00
|
|
|
end_times = time_range(start, end, stat.frequency, min_length)
|
|
|
|
data = {'end_times': end_times, 'frequency': stat.frequency, 'interval': stat.interval}
|
2017-02-10 06:56:18 +01:00
|
|
|
for table, include_empty_subgroups_ in zip(tables, include_empty_subgroups):
|
2017-01-19 01:08:33 +01:00
|
|
|
if table == RealmCount:
|
|
|
|
data['realm'] = get_time_series_by_subgroup(
|
2017-02-10 06:56:18 +01:00
|
|
|
stat, RealmCount, realm.id, end_times, subgroups, labels, include_empty_subgroups_)
|
2017-01-19 01:08:33 +01:00
|
|
|
if table == UserCount:
|
|
|
|
data['user'] = get_time_series_by_subgroup(
|
2017-02-10 06:56:18 +01:00
|
|
|
stat, UserCount, user_profile.id, end_times, subgroups, labels, include_empty_subgroups_)
|
2017-02-10 00:39:42 +01:00
|
|
|
if labels_sort_function is not None:
|
|
|
|
data['display_order'] = labels_sort_function(data)
|
|
|
|
else:
|
|
|
|
data['display_order'] = None
|
2017-01-14 18:32:31 +01:00
|
|
|
return json_success(data=data)
|
|
|
|
|
|
|
|
def table_filtered_to_id(table, key_id):
|
|
|
|
# type: (Type[BaseCount], int) -> QuerySet
|
|
|
|
if table == RealmCount:
|
|
|
|
return RealmCount.objects.filter(realm_id=key_id)
|
|
|
|
elif table == UserCount:
|
|
|
|
return UserCount.objects.filter(user_id=key_id)
|
|
|
|
elif table == StreamCount:
|
|
|
|
return StreamCount.objects.filter(stream_id=key_id)
|
|
|
|
elif table == InstallationCount:
|
|
|
|
return InstallationCount.objects.all()
|
|
|
|
else:
|
|
|
|
raise ValueError("Unknown table: %s" % (table,))
|
|
|
|
|
2017-02-10 00:39:42 +01:00
|
|
|
def sort_by_totals(value_arrays):
|
|
|
|
# type: (Dict[str, List[int]]) -> List[str]
|
|
|
|
totals = []
|
|
|
|
for label, values in value_arrays.items():
|
|
|
|
totals.append((label, sum(values)))
|
|
|
|
totals.sort(key=lambda label_total: label_total[1], reverse=True)
|
|
|
|
return [label for label, total in totals]
|
|
|
|
|
2017-02-11 09:48:38 +01:00
|
|
|
# For any given user, we want to show a fixed set of clients in the chart,
|
|
|
|
# regardless of the time aggregation or whether we're looking at realm or
|
|
|
|
# user data. This fixed set ideally includes the clients most important in
|
|
|
|
# understanding the realm's traffic and the user's traffic. This function
|
|
|
|
# tries to rank the clients so that taking the first N elements of the
|
|
|
|
# sorted list has a reasonable chance of doing so.
|
2017-02-10 00:39:42 +01:00
|
|
|
def sort_client_labels(data):
|
|
|
|
# type: (Dict[str, Dict[str, List[int]]]) -> List[str]
|
|
|
|
realm_order = sort_by_totals(data['realm'])
|
|
|
|
user_order = sort_by_totals(data['user'])
|
2017-02-11 09:48:38 +01:00
|
|
|
label_sort_values = {} # type: Dict[str, float]
|
2017-02-10 00:39:42 +01:00
|
|
|
for i, label in enumerate(realm_order):
|
|
|
|
label_sort_values[label] = i
|
|
|
|
for i, label in enumerate(user_order):
|
2017-02-11 09:48:38 +01:00
|
|
|
label_sort_values[label] = min(i-.1, label_sort_values.get(label, i))
|
2017-02-10 00:39:42 +01:00
|
|
|
return [label for label, sort_value in sorted(label_sort_values.items(),
|
|
|
|
key=lambda x: x[1])]
|
|
|
|
|
2017-02-06 01:17:31 +01:00
|
|
|
def client_label_map(name):
|
|
|
|
# type: (str) -> str
|
|
|
|
if name == "website":
|
|
|
|
return "Website"
|
|
|
|
if name.startswith("desktop app"):
|
|
|
|
return "Old desktop app"
|
|
|
|
if name == "ZulipAndroid":
|
|
|
|
return "Android app"
|
|
|
|
if name == "ZulipiOS":
|
|
|
|
return "Old iOS app"
|
|
|
|
if name == "ZulipMobile":
|
|
|
|
return "New iOS app"
|
|
|
|
if name in ["ZulipPython", "API: Python"]:
|
|
|
|
return "Python API"
|
|
|
|
if name.startswith("Zulip") and name.endswith("Webhook"):
|
|
|
|
return name[len("Zulip"):-len("Webhook")] + " webhook"
|
2017-02-09 02:55:18 +01:00
|
|
|
# Clients in dev environment autogenerated data start with _ so
|
|
|
|
# that it's easy to manually drop without affecting other data.
|
|
|
|
if settings.DEVELOPMENT and name.startswith("_"):
|
|
|
|
return name[1:]
|
2017-02-06 01:17:31 +01:00
|
|
|
return name
|
|
|
|
|
|
|
|
def rewrite_client_arrays(value_arrays):
|
|
|
|
# type: (Dict[str, List[int]]) -> Dict[str, List[int]]
|
|
|
|
mapped_arrays = {} # type: Dict[str, List[int]]
|
|
|
|
for label, array in value_arrays.items():
|
|
|
|
mapped_label = client_label_map(label)
|
|
|
|
if mapped_label in mapped_arrays:
|
|
|
|
for i in range(0, len(array)):
|
|
|
|
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
|
|
|
else:
|
|
|
|
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
|
|
|
|
return mapped_arrays
|
|
|
|
|
2017-01-19 01:08:33 +01:00
|
|
|
def get_time_series_by_subgroup(stat, table, key_id, end_times, subgroups, labels, include_empty_subgroups):
|
|
|
|
# type: (CountStat, Type[BaseCount], Optional[int], List[datetime], List[str], Optional[List[str]], bool) -> Dict[str, List[int]]
|
|
|
|
if labels is None:
|
|
|
|
labels = subgroups
|
|
|
|
if len(subgroups) != len(labels):
|
|
|
|
raise ValueError("subgroups and labels have lengths %s and %s, which are different." %
|
|
|
|
(len(subgroups), len(labels)))
|
|
|
|
queryset = table_filtered_to_id(table, key_id).filter(property=stat.property) \
|
|
|
|
.values_list('subgroup', 'end_time', 'value')
|
2017-01-14 20:31:07 +01:00
|
|
|
value_dicts = defaultdict(lambda: defaultdict(int)) # type: Dict[Optional[str], Dict[datetime, int]]
|
|
|
|
for subgroup, end_time, value in queryset:
|
|
|
|
value_dicts[subgroup][end_time] = value
|
2017-01-14 18:32:31 +01:00
|
|
|
value_arrays = {}
|
2017-01-19 01:08:33 +01:00
|
|
|
for subgroup, label in zip(subgroups, labels):
|
|
|
|
if (subgroup in value_dicts) or include_empty_subgroups:
|
|
|
|
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
2017-02-06 01:17:31 +01:00
|
|
|
|
|
|
|
if stat == COUNT_STATS['messages_sent:client:day']:
|
|
|
|
# HACK: We rewrite these arrays to collapse the Client objects
|
|
|
|
# with similar names into a single sum, and generally give
|
|
|
|
# them better names
|
|
|
|
return rewrite_client_arrays(value_arrays)
|
2017-01-19 01:08:33 +01:00
|
|
|
return value_arrays
|
2017-01-14 23:52:27 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2016-12-20 02:26:14 +01:00
|
|
|
eastern_tz = pytz.timezone('US/Eastern')
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2013-11-18 16:26:56 +01:00
|
|
|
def make_table(title, cols, rows, has_row_class=False):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str, List[str], List[Any], bool) -> str
|
2013-11-18 16:26:56 +01:00
|
|
|
|
|
|
|
if not has_row_class:
|
|
|
|
def fix_row(row):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Any) -> Dict[str, Any]
|
2013-11-18 16:26:56 +01:00
|
|
|
return dict(cells=row, row_class=None)
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(fix_row, rows))
|
2013-11-18 16:26:56 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
data = dict(title=title, cols=cols, rows=rows)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'analytics/ad_hoc_query.html',
|
|
|
|
dict(data=data)
|
|
|
|
)
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
def dictfetchall(cursor):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (connection.cursor) -> List[Dict[str, Any]]
|
2013-11-06 13:25:55 +01:00
|
|
|
"Returns all rows from a cursor as a dict"
|
|
|
|
desc = cursor.description
|
|
|
|
return [
|
2015-11-01 17:15:11 +01:00
|
|
|
dict(list(zip([col[0] for col in desc], row)))
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in cursor.fetchall()
|
|
|
|
]
|
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
def get_realm_day_counts():
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: () -> Dict[str, Dict[str, str]]
|
2013-12-18 21:13:47 +01:00
|
|
|
query = '''
|
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2014-01-06 17:46:47 +01:00
|
|
|
(now()::date - pub_date::date) age,
|
2013-12-18 21:13:47 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
2014-02-03 18:20:47 +01:00
|
|
|
join zerver_client c on c.id = m.sending_client_id
|
2013-12-18 21:13:47 +01:00
|
|
|
where
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2014-01-06 17:46:47 +01:00
|
|
|
pub_date > now()::date - interval '8 day'
|
2013-12-18 21:13:47 +01:00
|
|
|
and
|
2014-02-03 18:20:47 +01:00
|
|
|
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
2013-12-18 21:13:47 +01:00
|
|
|
group by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
|
|
|
order by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
|
|
|
'''
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
2016-01-26 02:44:57 +01:00
|
|
|
counts = defaultdict(dict) # type: Dict[str, Dict[int, int]]
|
2013-12-18 21:13:47 +01:00
|
|
|
for row in rows:
|
2017-01-08 19:42:32 +01:00
|
|
|
counts[row['string_id']][row['age']] = row['cnt']
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
result = {}
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id in counts:
|
|
|
|
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
2016-05-12 23:07:30 +02:00
|
|
|
min_cnt = min(raw_cnts)
|
|
|
|
max_cnt = max(raw_cnts)
|
2013-12-18 23:02:17 +01:00
|
|
|
|
|
|
|
def format_count(cnt):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (int) -> str
|
2013-12-18 23:02:17 +01:00
|
|
|
if cnt == min_cnt:
|
|
|
|
good_bad = 'bad'
|
|
|
|
elif cnt == max_cnt:
|
|
|
|
good_bad = 'good'
|
|
|
|
else:
|
|
|
|
good_bad = 'neutral'
|
|
|
|
|
|
|
|
return '<td class="number %s">%s</td>' % (good_bad, cnt)
|
|
|
|
|
2016-05-12 23:07:30 +02:00
|
|
|
cnts = ''.join(map(format_count, raw_cnts))
|
2017-01-08 19:42:32 +01:00
|
|
|
result[string_id] = dict(cnts=cnts)
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
def realm_summary_table(realm_minutes):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, float]) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
query = '''
|
|
|
|
SELECT
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
coalesce(user_counts.active_user_count, 0) active_user_count,
|
2013-11-14 19:50:32 +01:00
|
|
|
coalesce(at_risk_counts.at_risk_count, 0) at_risk_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
|
|
|
AND not is_bot
|
|
|
|
) user_profile_count,
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
|
|
|
AND is_bot
|
|
|
|
) bot_count
|
|
|
|
FROM zerver_realm realm
|
|
|
|
LEFT OUTER JOIN
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
up.realm_id realm_id,
|
|
|
|
count(distinct(ua.user_profile_id)) active_user_count
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'send_message_backend',
|
2014-01-21 17:10:09 +01:00
|
|
|
'/api/v1/send_message',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/json/update_pointer',
|
|
|
|
'/json/users/me/pointer'
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '1 day'
|
2013-11-06 21:35:51 +01:00
|
|
|
AND
|
|
|
|
not is_bot
|
2013-11-06 13:25:55 +01:00
|
|
|
GROUP BY realm_id
|
|
|
|
) user_counts
|
|
|
|
ON user_counts.realm_id = realm.id
|
2013-11-14 19:50:32 +01:00
|
|
|
LEFT OUTER JOIN
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
realm_id,
|
|
|
|
count(*) at_risk_count
|
|
|
|
FROM (
|
|
|
|
SELECT
|
|
|
|
realm.id as realm_id,
|
|
|
|
up.email
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
JOIN zerver_realm realm
|
|
|
|
ON realm.id = up.realm_id
|
|
|
|
WHERE up.is_active
|
|
|
|
AND (not up.is_bot)
|
|
|
|
AND
|
|
|
|
ua.query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'send_message_backend',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/api/v1/send_message',
|
|
|
|
'/json/update_pointer',
|
|
|
|
'/json/users/me/pointer'
|
2013-11-14 19:50:32 +01:00
|
|
|
)
|
|
|
|
GROUP by realm.id, up.email
|
|
|
|
HAVING max(last_visit) between
|
|
|
|
now() - interval '7 day' and
|
|
|
|
now() - interval '1 day'
|
|
|
|
) as at_risk_users
|
|
|
|
GROUP BY realm_id
|
|
|
|
) at_risk_counts
|
|
|
|
ON at_risk_counts.realm_id = realm.id
|
2015-09-20 08:42:28 +02:00
|
|
|
WHERE EXISTS (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT *
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
2014-01-21 17:10:09 +01:00
|
|
|
'/api/v1/send_message',
|
2013-11-06 13:25:55 +01:00
|
|
|
'send_message_backend',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/json/update_pointer',
|
|
|
|
'/json/users/me/pointer'
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
AND
|
|
|
|
up.realm_id = realm.id
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '2 week'
|
|
|
|
)
|
2017-01-08 19:42:32 +01:00
|
|
|
ORDER BY active_user_count DESC, string_id ASC
|
2013-11-06 13:25:55 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
# get messages sent per day
|
|
|
|
counts = get_realm_day_counts()
|
|
|
|
for row in rows:
|
|
|
|
try:
|
2017-01-08 19:42:32 +01:00
|
|
|
row['history'] = counts[row['string_id']]['cnts']
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2013-12-18 21:13:47 +01:00
|
|
|
row['history'] = ''
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
# augment data with realm_minutes
|
2016-06-05 20:51:43 +02:00
|
|
|
total_hours = 0.0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2017-01-08 19:42:32 +01:00
|
|
|
string_id = row['string_id']
|
|
|
|
minutes = realm_minutes.get(string_id, 0.0)
|
2013-11-06 13:25:55 +01:00
|
|
|
hours = minutes / 60.0
|
|
|
|
total_hours += hours
|
|
|
|
row['hours'] = str(int(hours))
|
|
|
|
try:
|
|
|
|
row['hours_per_user'] = '%.1f' % (hours / row['active_user_count'],)
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2013-11-06 13:25:55 +01:00
|
|
|
pass
|
|
|
|
|
2013-11-14 18:41:23 +01:00
|
|
|
# formatting
|
|
|
|
for row in rows:
|
2017-01-08 19:42:32 +01:00
|
|
|
row['string_id'] = realm_activity_link(row['string_id'])
|
2013-11-14 18:41:23 +01:00
|
|
|
|
2013-12-23 15:52:52 +01:00
|
|
|
# Count active sites
|
|
|
|
def meets_goal(row):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, int]) -> bool
|
2013-12-23 15:52:52 +01:00
|
|
|
return row['active_user_count'] >= 5
|
|
|
|
|
2015-11-01 17:14:31 +01:00
|
|
|
num_active_sites = len(list(filter(meets_goal, rows)))
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
# create totals
|
|
|
|
total_active_user_count = 0
|
|
|
|
total_user_profile_count = 0
|
|
|
|
total_bot_count = 0
|
2016-05-06 11:09:46 +02:00
|
|
|
total_at_risk_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
|
|
|
total_active_user_count += int(row['active_user_count'])
|
|
|
|
total_user_profile_count += int(row['user_profile_count'])
|
|
|
|
total_bot_count += int(row['bot_count'])
|
2016-05-06 11:09:46 +02:00
|
|
|
total_at_risk_count += int(row['at_risk_count'])
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
rows.append(dict(
|
2017-01-08 19:42:32 +01:00
|
|
|
string_id='Total',
|
2013-11-06 13:25:55 +01:00
|
|
|
active_user_count=total_active_user_count,
|
|
|
|
user_profile_count=total_user_profile_count,
|
|
|
|
bot_count=total_bot_count,
|
2016-05-06 11:09:46 +02:00
|
|
|
hours=int(total_hours),
|
|
|
|
at_risk_count=total_at_risk_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
))
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'analytics/realm_summary_table.html',
|
|
|
|
dict(rows=rows, num_active_sites=num_active_sites)
|
|
|
|
)
|
2013-11-07 16:53:09 +01:00
|
|
|
return content
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
|
|
|
|
def user_activity_intervals():
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: () -> Tuple[mark_safe, Dict[str, float]]
|
2013-11-06 13:25:55 +01:00
|
|
|
day_end = timestamp_to_datetime(time.time())
|
2013-11-18 18:28:49 +01:00
|
|
|
day_start = day_end - timedelta(hours=24)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
output = "Per-user online duration for the last 24 hours:\n"
|
2013-11-18 18:28:49 +01:00
|
|
|
total_duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
all_intervals = UserActivityInterval.objects.filter(
|
|
|
|
end__gte=day_start,
|
|
|
|
start__lte=day_end
|
|
|
|
).select_related(
|
|
|
|
'user_profile',
|
|
|
|
'user_profile__realm'
|
|
|
|
).only(
|
|
|
|
'start',
|
|
|
|
'end',
|
|
|
|
'user_profile__email',
|
2017-01-08 19:42:32 +01:00
|
|
|
'user_profile__realm__string_id'
|
2013-11-06 13:25:55 +01:00
|
|
|
).order_by(
|
2017-01-08 19:42:32 +01:00
|
|
|
'user_profile__realm__string_id',
|
2013-11-06 13:25:55 +01:00
|
|
|
'user_profile__email'
|
|
|
|
)
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
by_string_id = lambda row: row.user_profile.realm.string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
by_email = lambda row: row.user_profile.email
|
|
|
|
|
|
|
|
realm_minutes = {}
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
2013-11-18 18:28:49 +01:00
|
|
|
realm_duration = timedelta(0)
|
2017-01-08 19:42:32 +01:00
|
|
|
output += '<hr>%s\n' % (string_id,)
|
2013-11-06 13:25:55 +01:00
|
|
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
2013-11-18 18:28:49 +01:00
|
|
|
duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
for interval in intervals:
|
|
|
|
start = max(day_start, interval.start)
|
|
|
|
end = min(day_end, interval.end)
|
|
|
|
duration += end - start
|
|
|
|
|
|
|
|
total_duration += duration
|
|
|
|
realm_duration += duration
|
2016-05-04 23:16:27 +02:00
|
|
|
output += " %-*s%s\n" % (37, email, duration)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
output += "\nTotal Duration: %s\n" % (total_duration,)
|
|
|
|
output += "\nTotal Duration in minutes: %s\n" % (total_duration.total_seconds() / 60.,)
|
|
|
|
output += "Total Duration amortized to a month: %s" % (total_duration.total_seconds() * 30. / 60.,)
|
|
|
|
content = mark_safe('<pre>' + output + '</pre>')
|
2013-11-07 16:53:09 +01:00
|
|
|
return content, realm_minutes
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
def sent_messages_report(realm):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
title = 'Recently sent messages for ' + realm
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Date',
|
2013-11-14 21:28:31 +01:00
|
|
|
'Humans',
|
|
|
|
'Bots'
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
series.day::date,
|
2013-11-14 21:28:31 +01:00
|
|
|
humans.cnt,
|
|
|
|
bots.cnt
|
2013-11-06 13:25:55 +01:00
|
|
|
from (
|
|
|
|
select generate_series(
|
|
|
|
(now()::date - interval '2 week'),
|
|
|
|
now()::date,
|
|
|
|
interval '1 day'
|
|
|
|
) as day
|
|
|
|
) as series
|
|
|
|
left join (
|
|
|
|
select
|
|
|
|
pub_date::date pub_date,
|
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
|
|
|
pub_date > now() - interval '2 week'
|
|
|
|
group by
|
|
|
|
pub_date::date
|
|
|
|
order by
|
|
|
|
pub_date::date
|
|
|
|
) humans on
|
|
|
|
series.day = humans.pub_date
|
|
|
|
left join (
|
|
|
|
select
|
|
|
|
pub_date::date pub_date,
|
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
up.is_bot
|
2013-11-06 13:25:55 +01:00
|
|
|
and
|
|
|
|
pub_date > now() - interval '2 week'
|
|
|
|
group by
|
|
|
|
pub_date::date
|
|
|
|
order by
|
|
|
|
pub_date::date
|
2013-11-14 21:28:31 +01:00
|
|
|
) bots on
|
|
|
|
series.day = bots.pub_date
|
2013-11-06 13:25:55 +01:00
|
|
|
'''
|
|
|
|
cursor = connection.cursor()
|
2013-11-14 21:28:31 +01:00
|
|
|
cursor.execute(query, [realm, realm])
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
def ad_hoc_queries():
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: () -> List[Dict[str, str]]
|
2013-11-06 13:25:55 +01:00
|
|
|
def get_page(query, cols, title):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str, List[str], str) -> Dict[str, str]
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(list, rows))
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor.close()
|
|
|
|
|
2013-11-14 17:26:12 +01:00
|
|
|
def fix_rows(i, fixup_func):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (int, Union[Callable[[Realm], mark_safe], Callable[[datetime], str]]) -> None
|
2013-11-14 17:26:12 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = fixup_func(row[i])
|
|
|
|
|
|
|
|
for i, col in enumerate(cols):
|
2017-01-08 19:42:32 +01:00
|
|
|
if col == 'Realm':
|
2013-11-14 17:26:12 +01:00
|
|
|
fix_rows(i, realm_activity_link)
|
2013-11-14 18:25:20 +01:00
|
|
|
elif col in ['Last time', 'Last visit']:
|
|
|
|
fix_rows(i, format_date_for_activity_reports)
|
2013-11-14 17:26:12 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
content = make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
content=content,
|
|
|
|
title=title
|
|
|
|
)
|
|
|
|
|
|
|
|
pages = []
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
for mobile_type in ['Android', 'ZulipiOS']:
|
|
|
|
title = '%s usage' % (mobile_type,)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
query = '''
|
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2014-01-08 15:17:15 +01:00
|
|
|
up.id user_id,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like '%s'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, up.id, client.name
|
2014-01-08 15:17:15 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, up.id, client.name
|
2014-01-08 15:17:15 +01:00
|
|
|
''' % (mobile_type,)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2014-01-08 15:17:15 +01:00
|
|
|
'User id',
|
|
|
|
'Name',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Desktop users'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'desktop%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client.name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client.name
|
2013-11-06 13:25:55 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Client',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
title = 'Integrations by realm'
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client_name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client_name
|
2013-11-06 13:25:55 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Client',
|
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Integrations by client'
|
|
|
|
|
|
|
|
query = '''
|
|
|
|
select
|
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by client_name, string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by client_name, string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
'''
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Client',
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Hits',
|
|
|
|
'Last time'
|
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
return pages
|
|
|
|
|
|
|
|
@zulip_internal
|
|
|
|
@has_request_variables
|
2013-12-18 19:42:04 +01:00
|
|
|
def get_activity(request):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (HttpRequest) -> HttpResponse
|
|
|
|
duration_content, realm_minutes = user_activity_intervals() # type: Tuple[mark_safe, Dict[str, float]]
|
|
|
|
counts_content = realm_summary_table(realm_minutes) # type: str
|
2013-11-06 13:25:55 +01:00
|
|
|
data = [
|
|
|
|
('Counts', counts_content),
|
|
|
|
('Durations', duration_content),
|
|
|
|
]
|
|
|
|
for page in ad_hoc_queries():
|
2013-11-07 16:53:09 +01:00
|
|
|
data.append((page['title'], page['content']))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
title = 'Activity'
|
|
|
|
|
|
|
|
return render_to_response(
|
|
|
|
'analytics/activity.html',
|
2013-12-18 19:42:04 +01:00
|
|
|
dict(data=data, title=title, is_home=True),
|
2016-04-21 08:48:33 +02:00
|
|
|
request=request
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_user_activity_records_for_realm(realm, is_bot):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str, bool) -> QuerySet
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
|
|
|
'user_profile__full_name',
|
|
|
|
'user_profile__email',
|
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
|
|
|
'last_visit',
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile__realm__string_id=realm,
|
|
|
|
user_profile__is_active=True,
|
|
|
|
user_profile__is_bot=is_bot
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
records = records.order_by("user_profile__email", "-last_visit")
|
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
|
|
|
return records
|
|
|
|
|
|
|
|
def get_user_activity_records_for_email(email):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str) -> List[QuerySet]
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
|
|
|
'user_profile__full_name',
|
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
|
|
|
'last_visit'
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile__email=email
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
records = records.order_by("-last_visit")
|
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
|
|
|
return records
|
|
|
|
|
|
|
|
def raw_user_activity_table(records):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (List[QuerySet]) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
cols = [
|
|
|
|
'query',
|
|
|
|
'client',
|
|
|
|
'count',
|
|
|
|
'last_visit'
|
|
|
|
]
|
|
|
|
|
|
|
|
def row(record):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (QuerySet) -> List[Any]
|
2013-11-06 13:25:55 +01:00
|
|
|
return [
|
2017-01-24 07:06:13 +01:00
|
|
|
record.query,
|
|
|
|
record.client.name,
|
|
|
|
record.count,
|
|
|
|
format_date_for_activity_reports(record.last_visit)
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(row, records))
|
2013-11-06 13:25:55 +01:00
|
|
|
title = 'Raw Data'
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
def get_user_activity_summary(records):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (List[QuerySet]) -> Dict[str, Dict[str, Any]]
|
|
|
|
#: `Any` used above should be `Union(int, datetime)`.
|
|
|
|
#: However current version of `Union` does not work inside other function.
|
|
|
|
#: We could use something like:
|
|
|
|
# `Union[Dict[str, Dict[str, int]], Dict[str, Dict[str, datetime]]]`
|
|
|
|
#: but that would require this long `Union` to carry on throughout inner functions.
|
2016-01-26 02:44:57 +01:00
|
|
|
summary = {} # type: Dict[str, Dict[str, Any]]
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
def update(action, record):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str, QuerySet) -> None
|
2013-11-06 13:25:55 +01:00
|
|
|
if action not in summary:
|
|
|
|
summary[action] = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
count=record.count,
|
|
|
|
last_visit=record.last_visit
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
summary[action]['count'] += record.count
|
|
|
|
summary[action]['last_visit'] = max(
|
2017-01-24 07:06:13 +01:00
|
|
|
summary[action]['last_visit'],
|
|
|
|
record.last_visit
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if records:
|
|
|
|
summary['name'] = records[0].user_profile.full_name
|
|
|
|
|
|
|
|
for record in records:
|
|
|
|
client = record.client.name
|
|
|
|
query = record.query
|
|
|
|
|
|
|
|
update('use', record)
|
|
|
|
|
2013-11-18 18:58:39 +01:00
|
|
|
if client == 'API':
|
|
|
|
m = re.match('/api/.*/external/(.*)', query)
|
|
|
|
if m:
|
|
|
|
client = m.group(1)
|
|
|
|
update(client, record)
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
if client.startswith('desktop'):
|
|
|
|
update('desktop', record)
|
|
|
|
if client == 'website':
|
|
|
|
update('website', record)
|
2013-11-25 21:27:57 +01:00
|
|
|
if ('send_message' in query) or re.search('/api/.*/external/.*', query):
|
2013-11-06 13:25:55 +01:00
|
|
|
update('send', record)
|
2016-04-02 20:24:19 +02:00
|
|
|
if query in ['/json/update_pointer', '/json/users/me/pointer', '/api/v1/update_pointer']:
|
2013-11-06 13:25:55 +01:00
|
|
|
update('pointer', record)
|
|
|
|
update(client, record)
|
|
|
|
|
|
|
|
return summary
|
|
|
|
|
|
|
|
def format_date_for_activity_reports(date):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Optional[datetime]) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
if date:
|
2013-11-08 17:19:30 +01:00
|
|
|
return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M')
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2013-11-14 16:52:48 +01:00
|
|
|
def user_activity_link(email):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str) -> mark_safe
|
2013-11-14 16:52:48 +01:00
|
|
|
url_name = 'analytics.views.get_user_activity'
|
|
|
|
url = urlresolvers.reverse(url_name, kwargs=dict(email=email))
|
|
|
|
email_link = '<a href="%s">%s</a>' % (url, email)
|
|
|
|
return mark_safe(email_link)
|
|
|
|
|
2017-01-12 23:18:37 +01:00
|
|
|
def realm_activity_link(realm_str):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (str) -> mark_safe
|
2013-11-14 17:26:12 +01:00
|
|
|
url_name = 'analytics.views.get_realm_activity'
|
2017-01-12 23:18:37 +01:00
|
|
|
url = urlresolvers.reverse(url_name, kwargs=dict(realm_str=realm_str))
|
|
|
|
realm_link = '<a href="%s">%s</a>' % (url, realm_str)
|
2013-11-14 17:26:12 +01:00
|
|
|
return mark_safe(realm_link)
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
def realm_client_table(user_summaries):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, Dict[str, Dict[str, Any]]]) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
exclude_keys = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'internal',
|
|
|
|
'name',
|
|
|
|
'use',
|
|
|
|
'send',
|
|
|
|
'pointer',
|
|
|
|
'website',
|
|
|
|
'desktop',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_summaries.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2013-11-06 13:25:55 +01:00
|
|
|
name = user_summary['name']
|
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k in exclude_keys:
|
|
|
|
continue
|
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
name,
|
|
|
|
email_link,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'Last visit',
|
|
|
|
'Client',
|
|
|
|
'Name',
|
|
|
|
'Email',
|
|
|
|
'Count',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Clients'
|
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
def user_activity_summary_table(user_summary):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, Dict[str, Any]]) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k == 'name':
|
|
|
|
continue
|
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'last_visit',
|
|
|
|
'client',
|
|
|
|
'count',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'User Activity'
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-18 20:09:28 +01:00
|
|
|
def realm_user_summary_table(all_records, admin_emails):
|
2016-12-08 05:06:51 +01:00
|
|
|
# type: (List[QuerySet], Set[Text]) -> Tuple[Dict[str, Dict[str, Any]], str]
|
2013-11-06 13:25:55 +01:00
|
|
|
user_records = {}
|
|
|
|
|
|
|
|
def by_email(record):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (QuerySet) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
return record.user_profile.email
|
|
|
|
|
|
|
|
for email, records in itertools.groupby(all_records, by_email):
|
|
|
|
user_records[email] = get_user_activity_summary(list(records))
|
|
|
|
|
|
|
|
def get_last_visit(user_summary, k):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, Dict[str, datetime]], str) -> Optional[datetime]
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
2013-11-18 17:07:59 +01:00
|
|
|
return user_summary[k]['last_visit']
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
2013-11-18 17:07:59 +01:00
|
|
|
return None
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
def get_count(user_summary, k):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Dict[str, Dict[str, str]], str) -> str
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
|
|
|
return user_summary[k]['count']
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2013-11-18 17:20:58 +01:00
|
|
|
def is_recent(val):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (Optional[datetime]) -> bool
|
2016-10-17 08:22:00 +02:00
|
|
|
age = datetime.now(val.tzinfo) - val
|
2013-11-18 17:20:58 +01:00
|
|
|
return age.total_seconds() < 5 * 60
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_records.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2013-11-06 13:25:55 +01:00
|
|
|
sent_count = get_count(user_summary, 'send')
|
2013-11-18 17:20:58 +01:00
|
|
|
cells = [user_summary['name'], email_link, sent_count]
|
2013-11-18 20:09:28 +01:00
|
|
|
row_class = ''
|
2014-01-08 15:17:15 +01:00
|
|
|
for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']:
|
2016-06-05 20:51:43 +02:00
|
|
|
visit = get_last_visit(user_summary, field)
|
2013-11-18 17:20:58 +01:00
|
|
|
if field == 'use':
|
2016-06-05 20:51:43 +02:00
|
|
|
if visit and is_recent(visit):
|
2013-11-18 20:09:28 +01:00
|
|
|
row_class += ' recently_active'
|
|
|
|
if email in admin_emails:
|
|
|
|
row_class += ' admin'
|
2016-06-05 20:51:43 +02:00
|
|
|
val = format_date_for_activity_reports(visit)
|
2013-11-18 17:20:58 +01:00
|
|
|
cells.append(val)
|
|
|
|
row = dict(cells=cells, row_class=row_class)
|
2013-11-06 13:25:55 +01:00
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
def by_used_time(row):
|
2017-02-11 03:06:23 +01:00
|
|
|
# type: (Dict[str, Any]) -> str
|
2013-11-18 17:20:58 +01:00
|
|
|
return row['cells'][3]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
rows = sorted(rows, key=by_used_time, reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'Name',
|
|
|
|
'Email',
|
|
|
|
'Total sent',
|
|
|
|
'Heard from',
|
|
|
|
'Message sent',
|
|
|
|
'Pointer motion',
|
|
|
|
'Desktop',
|
|
|
|
'ZulipiOS',
|
|
|
|
'Android',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Summary'
|
|
|
|
|
2013-11-18 17:20:58 +01:00
|
|
|
content = make_table(title, cols, rows, has_row_class=True)
|
2013-11-06 13:25:55 +01:00
|
|
|
return user_records, content
|
|
|
|
|
|
|
|
@zulip_internal
|
2017-01-08 19:42:32 +01:00
|
|
|
def get_realm_activity(request, realm_str):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (HttpRequest, str) -> HttpResponse
|
2016-01-26 02:44:57 +01:00
|
|
|
data = [] # type: List[Tuple[str, str]]
|
|
|
|
all_user_records = {} # type: Dict[str, Any]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-10 21:22:39 +01:00
|
|
|
try:
|
2017-01-08 19:42:32 +01:00
|
|
|
admins = Realm.objects.get(string_id=realm_str).get_admin_users()
|
2014-01-10 21:22:39 +01:00
|
|
|
except Realm.DoesNotExist:
|
2017-01-08 19:42:32 +01:00
|
|
|
return HttpResponseNotFound("Realm %s does not exist" % (realm_str,))
|
2014-01-10 21:22:39 +01:00
|
|
|
|
2013-11-18 20:09:28 +01:00
|
|
|
admin_emails = {admin.email for admin in admins}
|
|
|
|
|
2017-01-24 06:21:14 +01:00
|
|
|
for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]:
|
2017-01-08 19:42:32 +01:00
|
|
|
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-18 20:09:28 +01:00
|
|
|
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
2013-11-06 13:25:55 +01:00
|
|
|
all_user_records.update(user_records)
|
|
|
|
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
page_title = 'Clients'
|
|
|
|
content = realm_client_table(all_user_records)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
page_title = 'History'
|
2017-01-08 19:42:32 +01:00
|
|
|
content = sent_messages_report(realm_str)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-12-18 19:40:07 +01:00
|
|
|
realm_link = 'https://stats1.zulip.net:444/render/?from=-7days'
|
2017-01-09 20:45:11 +01:00
|
|
|
realm_link += '&target=stats.gauges.staging.users.active.%s.0_16hr' % (realm_str,)
|
2013-12-18 19:40:07 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
title = realm_str
|
2013-11-06 13:25:55 +01:00
|
|
|
return render_to_response(
|
|
|
|
'analytics/activity.html',
|
2013-12-18 19:40:07 +01:00
|
|
|
dict(data=data, realm_link=realm_link, title=title),
|
2016-04-21 08:48:33 +02:00
|
|
|
request=request
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
@zulip_internal
|
|
|
|
def get_user_activity(request, email):
|
2016-06-05 20:51:43 +02:00
|
|
|
# type: (HttpRequest, str) -> HttpResponse
|
2013-11-06 13:25:55 +01:00
|
|
|
records = get_user_activity_records_for_email(email)
|
|
|
|
|
2016-01-26 02:44:57 +01:00
|
|
|
data = [] # type: List[Tuple[str, str]]
|
2013-11-06 13:25:55 +01:00
|
|
|
user_summary = get_user_activity_summary(records)
|
|
|
|
content = user_activity_summary_table(user_summary)
|
|
|
|
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [('Summary', content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = raw_user_activity_table(records)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [('Info', content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
title = email
|
|
|
|
return render_to_response(
|
|
|
|
'analytics/activity.html',
|
2013-12-18 19:42:04 +01:00
|
|
|
dict(data=data, title=title),
|
2016-04-21 08:48:33 +02:00
|
|
|
request=request
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|