2017-11-16 00:55:49 +01:00
|
|
|
import itertools
|
|
|
|
import time
|
|
|
|
from collections import defaultdict
|
2023-01-18 05:25:49 +01:00
|
|
|
from contextlib import suppress
|
2021-06-17 23:25:14 +02:00
|
|
|
from datetime import datetime, timedelta
|
2021-06-18 00:06:02 +02:00
|
|
|
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
|
2017-11-16 00:55:49 +01:00
|
|
|
|
2017-02-09 02:55:18 +01:00
|
|
|
from django.conf import settings
|
2013-11-06 13:25:55 +01:00
|
|
|
from django.db import connection
|
2021-06-18 00:07:45 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2017-11-16 00:55:49 +01:00
|
|
|
from django.shortcuts import render
|
2019-02-02 23:53:21 +01:00
|
|
|
from django.template import loader
|
2020-06-05 06:55:20 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2022-11-16 06:28:44 +01:00
|
|
|
from markupsafe import Markup
|
2020-06-11 00:54:34 +02:00
|
|
|
from psycopg2.sql import SQL, Composable, Literal
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-06-17 23:25:14 +02:00
|
|
|
from analytics.lib.counts import COUNT_STATS
|
2021-06-17 23:49:07 +02:00
|
|
|
from analytics.views.activity_common import (
|
|
|
|
dictfetchall,
|
|
|
|
format_date_for_activity_reports,
|
|
|
|
make_table,
|
|
|
|
realm_activity_link,
|
|
|
|
realm_stats_link,
|
2022-12-01 16:48:46 +01:00
|
|
|
realm_support_link,
|
|
|
|
realm_url_link,
|
2021-06-17 23:49:07 +02:00
|
|
|
remote_installation_stats_link,
|
|
|
|
)
|
2021-07-16 22:11:10 +02:00
|
|
|
from analytics.views.support import get_plan_name
|
2021-06-17 23:25:14 +02:00
|
|
|
from zerver.decorator import require_server_admin
|
|
|
|
from zerver.lib.request import has_request_variables
|
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
2022-12-01 16:45:58 +01:00
|
|
|
from zerver.models import Realm, UserActivityInterval, get_org_type_display_name
|
2020-06-19 00:32:55 +02:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
2020-06-09 12:24:32 +02:00
|
|
|
from corporate.lib.stripe import (
|
2021-06-09 13:46:12 +02:00
|
|
|
estimate_annual_recurring_revenue_by_realm,
|
|
|
|
get_realms_to_default_discount_dict,
|
2020-06-09 12:24:32 +02:00
|
|
|
)
|
2016-12-20 02:26:14 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2023-03-21 07:10:20 +01:00
|
|
|
def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]:
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_date_sent_3b5b05d8
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-12-18 21:13:47 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2019-08-28 02:43:19 +02:00
|
|
|
(now()::date - date_sent::date) age,
|
2013-12-18 21:13:47 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
2014-02-03 18:20:47 +01:00
|
|
|
join zerver_client c on c.id = m.sending_client_id
|
2013-12-18 21:13:47 +01:00
|
|
|
where
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now()::date - interval '8 day'
|
2013-12-18 21:13:47 +01:00
|
|
|
and
|
2014-02-03 18:20:47 +01:00
|
|
|
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
2013-12-18 21:13:47 +01:00
|
|
|
group by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
|
|
|
order by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-12-18 21:13:47 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
2013-12-18 21:13:47 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
counts[row["string_id"]][row["age"]] = row["cnt"]
|
2013-12-18 21:13:47 +01:00
|
|
|
|
2023-04-13 02:05:54 +02:00
|
|
|
def format_count(cnt: int, style: Optional[str] = None) -> Markup:
|
|
|
|
if style is not None:
|
|
|
|
good_bad = style
|
|
|
|
elif cnt == min_cnt:
|
|
|
|
good_bad = "bad"
|
|
|
|
elif cnt == max_cnt:
|
|
|
|
good_bad = "good"
|
|
|
|
else:
|
|
|
|
good_bad = "neutral"
|
|
|
|
|
|
|
|
return Markup('<td class="number {good_bad}">{cnt}</td>').format(good_bad=good_bad, cnt=cnt)
|
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
result = {}
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id in counts:
|
|
|
|
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
2017-12-01 06:04:15 +01:00
|
|
|
min_cnt = min(raw_cnts[1:])
|
|
|
|
max_cnt = max(raw_cnts[1:])
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2023-03-21 07:10:20 +01:00
|
|
|
cnts = format_count(raw_cnts[0], "neutral") + Markup().join(map(format_count, raw_cnts[1:]))
|
2017-01-08 19:42:32 +01:00
|
|
|
result[string_id] = dict(cnts=cnts)
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
2017-12-01 06:28:56 +01:00
|
|
|
now = timezone_now()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2017-11-29 07:50:25 +01:00
|
|
|
realm.date_created,
|
2018-08-15 19:59:09 +02:00
|
|
|
realm.plan_type,
|
2021-07-02 20:34:00 +02:00
|
|
|
realm.org_type,
|
2020-08-07 13:41:31 +02:00
|
|
|
coalesce(wau_table.value, 0) wau_count,
|
|
|
|
coalesce(dau_table.value, 0) dau_count,
|
|
|
|
coalesce(user_count_table.value, 0) user_profile_count,
|
|
|
|
coalesce(bot_count_table.value, 0) bot_count
|
|
|
|
FROM
|
|
|
|
zerver_realm as realm
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value _14day_active_humans,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = 'realm_active_humans::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(realm_active_humans_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = '7day_actives::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(seven_day_actives_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as wau_table ON realm.id = wau_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
2013-11-06 13:25:55 +01:00
|
|
|
WHERE
|
2020-08-07 13:41:31 +02:00
|
|
|
property = '1day_actives::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(one_day_actives_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as dau_table ON realm.id = dau_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-14 19:50:32 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
2013-11-06 13:25:55 +01:00
|
|
|
WHERE
|
2020-08-07 13:41:31 +02:00
|
|
|
property = 'active_users_audit:is_bot:day'
|
|
|
|
AND subgroup = 'false'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(active_users_audit_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as user_count_table ON realm.id = user_count_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
|
|
|
SELECT
|
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = 'active_users_audit:is_bot:day'
|
|
|
|
AND subgroup = 'true'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(active_users_audit_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as bot_count_table ON realm.id = bot_count_table.realm_id
|
|
|
|
WHERE
|
|
|
|
_14day_active_humans IS NOT NULL
|
|
|
|
or realm.plan_type = 3
|
|
|
|
ORDER BY
|
|
|
|
dau_count DESC,
|
|
|
|
string_id ASC
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cursor = connection.cursor()
|
2021-02-12 08:19:30 +01:00
|
|
|
cursor.execute(
|
|
|
|
query,
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_active_humans_end_time": COUNT_STATS[
|
|
|
|
"realm_active_humans::day"
|
2021-02-12 08:19:30 +01:00
|
|
|
].last_successful_fill(),
|
2021-02-12 08:20:45 +01:00
|
|
|
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
|
|
|
|
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
|
|
|
|
"active_users_audit_end_time": COUNT_STATS[
|
|
|
|
"active_users_audit:is_bot:day"
|
2021-02-12 08:19:30 +01:00
|
|
|
].last_successful_fill(),
|
|
|
|
},
|
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
2017-11-29 07:50:25 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
|
|
|
|
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
|
|
|
|
row["is_new"] = row["age_days"] < 12 * 7
|
2017-11-29 07:50:25 +01:00
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
# get messages sent per day
|
|
|
|
counts = get_realm_day_counts()
|
|
|
|
for row in rows:
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["history"] = counts[row["string_id"]]["cnts"]
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["history"] = ""
|
2013-12-18 21:13:47 +01:00
|
|
|
|
2018-11-16 17:08:09 +01:00
|
|
|
# estimate annual subscription revenue
|
2021-06-09 18:06:53 +02:00
|
|
|
total_arr = 0
|
2018-11-16 17:08:09 +01:00
|
|
|
if settings.BILLING_ENABLED:
|
2018-12-15 09:33:25 +01:00
|
|
|
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
2021-06-09 13:46:12 +02:00
|
|
|
realms_to_default_discount = get_realms_to_default_discount_dict()
|
|
|
|
|
2018-11-16 17:08:09 +01:00
|
|
|
for row in rows:
|
2021-06-09 17:49:54 +02:00
|
|
|
row["plan_type_string"] = get_plan_name(row["plan_type"])
|
|
|
|
|
2021-06-09 13:46:12 +02:00
|
|
|
string_id = row["string_id"]
|
|
|
|
|
|
|
|
if string_id in estimated_arrs:
|
2021-06-09 18:06:53 +02:00
|
|
|
row["arr"] = estimated_arrs[string_id]
|
2021-06-09 13:46:12 +02:00
|
|
|
|
2021-10-18 23:28:17 +02:00
|
|
|
if row["plan_type"] in [Realm.PLAN_TYPE_STANDARD, Realm.PLAN_TYPE_PLUS]:
|
2021-06-09 13:46:12 +02:00
|
|
|
row["effective_rate"] = 100 - int(realms_to_default_discount.get(string_id, 0))
|
2021-10-18 23:28:17 +02:00
|
|
|
elif row["plan_type"] == Realm.PLAN_TYPE_STANDARD_FREE:
|
2021-06-09 13:46:12 +02:00
|
|
|
row["effective_rate"] = 0
|
2021-10-18 23:28:17 +02:00
|
|
|
elif (
|
|
|
|
row["plan_type"] == Realm.PLAN_TYPE_LIMITED
|
|
|
|
and string_id in realms_to_default_discount
|
|
|
|
):
|
2021-06-09 13:46:12 +02:00
|
|
|
row["effective_rate"] = 100 - int(realms_to_default_discount[string_id])
|
|
|
|
else:
|
|
|
|
row["effective_rate"] = ""
|
|
|
|
|
2021-06-09 18:06:53 +02:00
|
|
|
total_arr += sum(estimated_arrs.values())
|
2018-11-16 17:08:09 +01:00
|
|
|
|
2021-07-02 20:34:00 +02:00
|
|
|
for row in rows:
|
|
|
|
row["org_type_string"] = get_org_type_display_name(row["org_type"])
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
# augment data with realm_minutes
|
2016-06-05 20:51:43 +02:00
|
|
|
total_hours = 0.0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id = row["string_id"]
|
2017-01-08 19:42:32 +01:00
|
|
|
minutes = realm_minutes.get(string_id, 0.0)
|
2013-11-06 13:25:55 +01:00
|
|
|
hours = minutes / 60.0
|
|
|
|
total_hours += hours
|
2021-02-12 08:20:45 +01:00
|
|
|
row["hours"] = str(int(hours))
|
2023-01-18 05:25:49 +01:00
|
|
|
with suppress(Exception):
|
2021-02-12 08:20:45 +01:00
|
|
|
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-14 18:41:23 +01:00
|
|
|
# formatting
|
|
|
|
for row in rows:
|
2022-12-01 16:48:46 +01:00
|
|
|
row["realm_url"] = realm_url_link(row["string_id"])
|
2021-02-12 08:20:45 +01:00
|
|
|
row["stats_link"] = realm_stats_link(row["string_id"])
|
2022-12-01 16:48:46 +01:00
|
|
|
row["support_link"] = realm_support_link(row["string_id"])
|
2021-02-12 08:20:45 +01:00
|
|
|
row["string_id"] = realm_activity_link(row["string_id"])
|
2013-11-14 18:41:23 +01:00
|
|
|
|
2013-12-23 15:52:52 +01:00
|
|
|
# Count active sites
|
2023-09-12 23:19:57 +02:00
|
|
|
num_active_sites = sum(row["dau_count"] >= 5 for row in rows)
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2019-02-13 23:52:13 +01:00
|
|
|
# create totals
|
2017-11-27 21:03:15 +01:00
|
|
|
total_dau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
total_user_profile_count = 0
|
|
|
|
total_bot_count = 0
|
2017-11-27 21:03:15 +01:00
|
|
|
total_wau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
total_dau_count += int(row["dau_count"])
|
|
|
|
total_user_profile_count += int(row["user_profile_count"])
|
|
|
|
total_bot_count += int(row["bot_count"])
|
|
|
|
total_wau_count += int(row["wau_count"])
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2018-11-16 17:17:50 +01:00
|
|
|
total_row = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="Total",
|
2018-08-15 19:59:09 +02:00
|
|
|
plan_type_string="",
|
2021-07-02 20:34:00 +02:00
|
|
|
org_type_string="",
|
2021-06-09 13:46:12 +02:00
|
|
|
effective_rate="",
|
2021-06-09 18:06:53 +02:00
|
|
|
arr=total_arr,
|
2022-12-01 16:48:46 +01:00
|
|
|
realm_url="",
|
2021-02-12 08:20:45 +01:00
|
|
|
stats_link="",
|
2022-12-01 16:48:46 +01:00
|
|
|
support_link="",
|
2021-02-12 08:20:45 +01:00
|
|
|
date_created_day="",
|
2017-11-27 21:03:15 +01:00
|
|
|
dau_count=total_dau_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
user_profile_count=total_user_profile_count,
|
|
|
|
bot_count=total_bot_count,
|
2016-05-06 11:09:46 +02:00
|
|
|
hours=int(total_hours),
|
2017-11-27 21:03:15 +01:00
|
|
|
wau_count=total_wau_count,
|
2018-11-16 17:17:50 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
rows.insert(0, total_row)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = loader.render_to_string(
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/realm_summary_table.html",
|
2021-06-09 17:49:54 +02:00
|
|
|
dict(
|
|
|
|
rows=rows,
|
|
|
|
num_active_sites=num_active_sites,
|
2022-12-02 14:38:22 +01:00
|
|
|
utctime=now.strftime("%Y-%m-%d %H:%M %Z"),
|
2021-06-09 17:49:54 +02:00
|
|
|
billing_enabled=settings.BILLING_ENABLED,
|
|
|
|
),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
2013-11-07 16:53:09 +01:00
|
|
|
return content
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
|
2022-11-16 06:28:44 +01:00
|
|
|
def user_activity_intervals() -> Tuple[Markup, Dict[str, float]]:
|
2013-11-06 13:25:55 +01:00
|
|
|
day_end = timestamp_to_datetime(time.time())
|
2013-11-18 18:28:49 +01:00
|
|
|
day_start = day_end - timedelta(hours=24)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2023-03-21 07:10:20 +01:00
|
|
|
output = Markup()
|
|
|
|
output += "Per-user online duration for the last 24 hours:\n"
|
2013-11-18 18:28:49 +01:00
|
|
|
total_duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
all_intervals = (
|
|
|
|
UserActivityInterval.objects.filter(
|
|
|
|
end__gte=day_start,
|
|
|
|
start__lte=day_end,
|
|
|
|
)
|
|
|
|
.select_related(
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile",
|
|
|
|
"user_profile__realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.only(
|
2021-02-12 08:20:45 +01:00
|
|
|
"start",
|
|
|
|
"end",
|
|
|
|
"user_profile__delivery_email",
|
|
|
|
"user_profile__realm__string_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.order_by(
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__realm__string_id",
|
|
|
|
"user_profile__delivery_email",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
by_string_id = lambda row: row.user_profile.realm.string_id
|
2018-12-07 00:05:57 +01:00
|
|
|
by_email = lambda row: row.user_profile.delivery_email
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
realm_minutes = {}
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
2013-11-18 18:28:49 +01:00
|
|
|
realm_duration = timedelta(0)
|
2023-03-21 07:10:20 +01:00
|
|
|
output += Markup("<hr>") + f"{string_id}\n"
|
2013-11-06 13:25:55 +01:00
|
|
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
2013-11-18 18:28:49 +01:00
|
|
|
duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
for interval in intervals:
|
|
|
|
start = max(day_start, interval.start)
|
|
|
|
end = min(day_end, interval.end)
|
|
|
|
duration += end - start
|
|
|
|
|
|
|
|
total_duration += duration
|
|
|
|
realm_duration += duration
|
2020-06-14 02:57:50 +02:00
|
|
|
output += f" {email:<37}{duration}\n"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
output += f"\nTotal duration: {total_duration}\n"
|
|
|
|
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
|
|
|
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
2023-03-21 07:10:20 +01:00
|
|
|
content = Markup("<pre>{}</pre>").format(output)
|
2013-11-07 16:53:09 +01:00
|
|
|
return content, realm_minutes
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def ad_hoc_queries() -> List[Dict[str, str]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_page(
|
|
|
|
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
|
|
|
|
) -> Dict[str, str]:
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(list, rows))
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor.close()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def fix_rows(
|
2022-11-16 06:28:44 +01:00
|
|
|
i: int, fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str]]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2013-11-14 17:26:12 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = fixup_func(row[i])
|
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
total_row = []
|
2013-11-14 17:26:12 +01:00
|
|
|
for i, col in enumerate(cols):
|
2021-02-12 08:20:45 +01:00
|
|
|
if col == "Realm":
|
2013-11-14 17:26:12 +01:00
|
|
|
fix_rows(i, realm_activity_link)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif col in ["Last time", "Last visit"]:
|
2013-11-14 18:25:20 +01:00
|
|
|
fix_rows(i, format_date_for_activity_reports)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif col == "Hostname":
|
2019-02-03 00:27:16 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = remote_installation_stats_link(row[0], row[i])
|
|
|
|
if len(totals_columns) > 0:
|
|
|
|
if i == 0:
|
|
|
|
total_row.append("Total")
|
|
|
|
elif i in totals_columns:
|
|
|
|
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
total_row.append("")
|
2019-02-03 00:27:16 +01:00
|
|
|
if len(totals_columns) > 0:
|
|
|
|
rows.insert(0, total_row)
|
2013-11-14 17:26:12 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
content = make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
content=content,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
title=title,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
pages = []
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for mobile_type in ["Android", "ZulipiOS"]:
|
|
|
|
title = f"{mobile_type} usage"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2022-07-30 07:12:58 +02:00
|
|
|
query: Composable = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2014-01-08 15:17:15 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2014-01-08 15:17:15 +01:00
|
|
|
up.id user_id,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2020-06-09 10:46:28 +02:00
|
|
|
client.name like {mobile_type}
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, up.id, client.name
|
2014-01-08 15:17:15 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, up.id, client.name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
mobile_type=Literal(mobile_type),
|
|
|
|
)
|
2014-01-08 15:17:15 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"User id",
|
|
|
|
"Name",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2014-01-08 15:17:15 +01:00
|
|
|
]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Desktop users"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'desktop%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client.name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client.name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"Client",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Integrations by realm"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client_name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client_name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"Client",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Integrations by client"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by client_name, string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by client_name, string_id
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Client",
|
|
|
|
"Realm",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Remote Zulip servers"
|
2019-02-03 00:27:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2019-02-03 00:27:16 +01:00
|
|
|
with icount as (
|
|
|
|
select
|
|
|
|
server_id,
|
|
|
|
max(value) as max_value,
|
|
|
|
max(end_time) as max_end_time
|
|
|
|
from zilencer_remoteinstallationcount
|
|
|
|
where
|
|
|
|
property='active_users:is_bot:day'
|
|
|
|
and subgroup='false'
|
|
|
|
group by server_id
|
|
|
|
),
|
|
|
|
remote_push_devices as (
|
|
|
|
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
|
|
|
group by server_id
|
|
|
|
)
|
|
|
|
select
|
|
|
|
rserver.id,
|
|
|
|
rserver.hostname,
|
|
|
|
rserver.contact_email,
|
|
|
|
max_value,
|
|
|
|
push_user_count,
|
|
|
|
max_end_time
|
|
|
|
from zilencer_remotezulipserver rserver
|
|
|
|
left join icount on icount.server_id = rserver.id
|
|
|
|
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
|
|
|
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-03 00:27:16 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"ID",
|
|
|
|
"Hostname",
|
|
|
|
"Contact email",
|
|
|
|
"Analytics users",
|
|
|
|
"Mobile users",
|
|
|
|
"Last update time",
|
2019-02-03 00:27:16 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pages.append(get_page(query, cols, title, totals_columns=[3, 4]))
|
2019-02-03 00:27:16 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
return pages
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2013-11-06 13:25:55 +01:00
|
|
|
@has_request_variables
|
2021-06-18 00:12:55 +02:00
|
|
|
def get_installation_activity(request: HttpRequest) -> HttpResponse:
|
2020-05-09 00:10:17 +02:00
|
|
|
duration_content, realm_minutes = user_activity_intervals()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts_content: str = realm_summary_table(realm_minutes)
|
2013-11-06 13:25:55 +01:00
|
|
|
data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("Counts", counts_content),
|
|
|
|
("Durations", duration_content),
|
2023-07-31 22:52:35 +02:00
|
|
|
*((page["title"], page["content"]) for page in ad_hoc_queries()),
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Activity"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/activity.html",
|
2017-03-16 14:04:06 +01:00
|
|
|
context=dict(data=data, title=title, is_home=True),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|