2017-11-16 00:55:49 +01:00
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
import time
|
2019-03-08 13:02:10 +01:00
|
|
|
import urllib
|
2017-11-16 00:55:49 +01:00
|
|
|
from collections import defaultdict
|
2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2019-03-08 13:02:10 +01:00
|
|
|
from decimal import Decimal
|
2020-06-13 05:24:42 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, Type, Union
|
2017-11-16 00:55:49 +01:00
|
|
|
|
|
|
|
import pytz
|
2017-02-09 02:55:18 +01:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import URLValidator
|
2013-11-06 13:25:55 +01:00
|
|
|
from django.db import connection
|
2016-06-05 20:51:43 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2017-11-16 00:55:49 +01:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
|
|
|
from django.shortcuts import render
|
2019-02-02 23:53:21 +01:00
|
|
|
from django.template import loader
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.urls import reverse
|
|
|
|
from django.utils.timesince import timesince
|
2020-06-05 06:55:20 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2016-12-20 02:30:08 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2016-04-21 08:48:33 +02:00
|
|
|
from jinja2 import Markup as mark_safe
|
2020-06-11 00:54:34 +02:00
|
|
|
from psycopg2.sql import SQL, Composable, Literal
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2019-02-02 23:53:21 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, CountStat
|
2017-01-07 01:46:18 +01:00
|
|
|
from analytics.lib.time_utils import time_range
|
2020-06-11 00:54:34 +02:00
|
|
|
from analytics.models import (
|
|
|
|
BaseCount,
|
|
|
|
InstallationCount,
|
|
|
|
RealmCount,
|
|
|
|
StreamCount,
|
|
|
|
UserCount,
|
|
|
|
installation_epoch,
|
|
|
|
last_successful_fill,
|
|
|
|
)
|
|
|
|
from confirmation.models import Confirmation, _properties, confirmation_url
|
|
|
|
from confirmation.settings import STATUS_ACTIVE
|
|
|
|
from zerver.decorator import (
|
|
|
|
require_non_guest_user,
|
|
|
|
require_server_admin,
|
|
|
|
require_server_admin_api,
|
|
|
|
to_utc_datetime,
|
|
|
|
zulip_login_required,
|
|
|
|
)
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_change_plan_type,
|
|
|
|
do_deactivate_realm,
|
|
|
|
do_scrub_realm,
|
|
|
|
do_send_realm_reactivation_email,
|
|
|
|
)
|
2017-10-28 00:07:31 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2017-11-16 00:55:49 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2016-12-20 02:30:08 +01:00
|
|
|
from zerver.lib.response import json_success
|
2019-03-08 13:02:10 +01:00
|
|
|
from zerver.lib.subdomains import get_subdomain_from_hostname
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.timestamp import convert_to_UTC, timestamp_to_datetime
|
2020-05-07 13:19:54 +02:00
|
|
|
from zerver.lib.validator import to_non_negative_int
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.views.invite import get_invitee_emails_set
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.stripe import attach_discount_to_realm, get_discount_for_realm
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
MultiuseInvite,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
UserActivity,
|
|
|
|
UserActivityInterval,
|
|
|
|
UserProfile,
|
|
|
|
get_realm,
|
|
|
|
)
|
2016-12-20 02:26:14 +01:00
|
|
|
|
2019-02-03 02:18:57 +01:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2020-06-11 00:54:34 +02:00
|
|
|
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
2019-02-03 02:18:57 +01:00
|
|
|
else:
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import Mock
|
2020-04-22 04:13:37 +02:00
|
|
|
RemoteInstallationCount = Mock() # type: ignore[misc] # https://github.com/JukkaL/mypy/issues/1188
|
|
|
|
RemoteZulipServer = Mock() # type: ignore[misc] # https://github.com/JukkaL/mypy/issues/1188
|
|
|
|
RemoteRealmCount = Mock() # type: ignore[misc] # https://github.com/JukkaL/mypy/issues/1188
|
2019-02-03 02:18:57 +01:00
|
|
|
|
2020-03-31 12:01:48 +02:00
|
|
|
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
|
|
|
|
|
2020-03-31 12:17:25 +02:00
|
|
|
def is_analytics_ready(realm: Realm) -> bool:
|
|
|
|
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
|
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
def render_stats(request: HttpRequest, data_url_suffix: str, target_name: str,
|
2020-03-31 12:17:25 +02:00
|
|
|
for_installation: bool=False, remote: bool=False,
|
|
|
|
analytics_ready: bool=True) -> HttpRequest:
|
2018-04-15 18:43:48 +02:00
|
|
|
page_params = dict(
|
2018-05-18 01:04:44 +02:00
|
|
|
data_url_suffix=data_url_suffix,
|
2018-05-18 02:16:29 +02:00
|
|
|
for_installation=for_installation,
|
2019-02-02 20:57:20 +01:00
|
|
|
remote=remote,
|
2018-05-18 01:04:44 +02:00
|
|
|
debug_mode=False,
|
2018-04-15 18:43:48 +02:00
|
|
|
)
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(request,
|
|
|
|
'analytics/stats.html',
|
2018-05-18 02:11:01 +02:00
|
|
|
context=dict(target_name=target_name,
|
2020-03-31 12:17:25 +02:00
|
|
|
page_params=page_params,
|
|
|
|
analytics_ready=analytics_ready))
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
@zulip_login_required
|
|
|
|
def stats(request: HttpRequest) -> HttpResponse:
|
|
|
|
realm = request.user.realm
|
2018-10-31 21:09:33 +01:00
|
|
|
if request.user.is_guest:
|
|
|
|
# TODO: Make @zulip_login_required pass the UserProfile so we
|
2019-06-18 16:43:22 +02:00
|
|
|
# can use @require_member_or_admin
|
2018-10-31 21:09:33 +01:00
|
|
|
raise JsonableError(_("Not allowed for guest users"))
|
2020-03-31 12:17:25 +02:00
|
|
|
return render_stats(request, '', realm.name or realm.string_id,
|
|
|
|
analytics_ready=is_analytics_ready(realm))
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
@require_server_admin
|
|
|
|
@has_request_variables
|
|
|
|
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(realm_str)
|
|
|
|
except Realm.DoesNotExist:
|
2020-06-10 06:41:04 +02:00
|
|
|
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
2018-04-15 18:43:48 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
return render_stats(request, f'/realm/{realm_str}', realm.name or realm.string_id,
|
2020-03-31 12:17:25 +02:00
|
|
|
analytics_ready=is_analytics_ready(realm))
|
2018-04-15 18:43:48 +02:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin
|
|
|
|
@has_request_variables
|
|
|
|
def stats_for_remote_realm(request: HttpRequest, remote_server_id: str,
|
|
|
|
remote_realm_id: str) -> HttpResponse:
|
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2020-06-10 06:41:04 +02:00
|
|
|
return render_stats(request, f'/remote/{server.id}/realm/{remote_realm_id}',
|
|
|
|
f"Realm {remote_realm_id} on server {server.hostname}")
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_realm(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
realm_str: str, **kwargs: Any) -> HttpResponse:
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(realm_str)
|
|
|
|
except Realm.DoesNotExist:
|
2018-04-15 18:43:48 +02:00
|
|
|
raise JsonableError(_("Invalid organization"))
|
|
|
|
|
|
|
|
return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs)
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_remote_realm(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, remote_server_id: str,
|
|
|
|
remote_realm_id: str, **kwargs: Any) -> HttpResponse:
|
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
|
|
|
return get_chart_data(request=request, user_profile=user_profile, server=server,
|
|
|
|
remote=True, remote_realm_id=int(remote_realm_id), **kwargs)
|
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
@require_server_admin
|
|
|
|
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
|
|
|
return render_stats(request, '/installation', 'Installation', True)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin
|
|
|
|
def stats_for_remote_installation(request: HttpRequest, remote_server_id: str) -> HttpResponse:
|
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2020-06-10 06:41:04 +02:00
|
|
|
return render_stats(request, f'/remote/{server.id}/installation',
|
|
|
|
f'remote Installation {server.hostname}', True, True)
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_installation(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
chart_name: str=REQ(), **kwargs: Any) -> HttpResponse:
|
|
|
|
return get_chart_data(request=request, user_profile=user_profile, for_installation=True, **kwargs)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_remote_installation(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
remote_server_id: str,
|
|
|
|
chart_name: str=REQ(),
|
|
|
|
**kwargs: Any) -> HttpResponse:
|
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
|
|
|
return get_chart_data(request=request, user_profile=user_profile, for_installation=True,
|
|
|
|
remote=True, server=server, **kwargs)
|
|
|
|
|
2018-10-31 21:09:33 +01:00
|
|
|
@require_non_guest_user
|
2016-12-20 02:30:08 +01:00
|
|
|
@has_request_variables
|
2018-05-10 18:35:50 +02:00
|
|
|
def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: str=REQ(),
|
2017-11-22 07:15:46 +01:00
|
|
|
min_length: Optional[int]=REQ(converter=to_non_negative_int, default=None),
|
|
|
|
start: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
|
2018-04-15 18:43:48 +02:00
|
|
|
end: Optional[datetime]=REQ(converter=to_utc_datetime, default=None),
|
2019-02-02 20:57:20 +01:00
|
|
|
realm: Optional[Realm]=None, for_installation: bool=False,
|
|
|
|
remote: bool=False, remote_realm_id: Optional[int]=None,
|
|
|
|
server: Optional[RemoteZulipServer]=None) -> HttpResponse:
|
2018-05-18 02:16:29 +02:00
|
|
|
if for_installation:
|
2019-02-02 20:57:20 +01:00
|
|
|
if remote:
|
|
|
|
aggregate_table = RemoteInstallationCount
|
|
|
|
assert server is not None
|
|
|
|
else:
|
|
|
|
aggregate_table = InstallationCount
|
|
|
|
else:
|
|
|
|
if remote:
|
|
|
|
aggregate_table = RemoteRealmCount
|
|
|
|
assert server is not None
|
|
|
|
assert remote_realm_id is not None
|
|
|
|
else:
|
|
|
|
aggregate_table = RealmCount
|
2018-05-18 02:16:29 +02:00
|
|
|
|
2017-01-14 23:52:27 +01:00
|
|
|
if chart_name == 'number_of_humans':
|
2018-05-19 22:43:02 +02:00
|
|
|
stats = [
|
|
|
|
COUNT_STATS['1day_actives::day'],
|
|
|
|
COUNT_STATS['realm_active_humans::day'],
|
|
|
|
COUNT_STATS['active_users_audit:is_bot:day']]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table]
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
2018-05-19 22:43:02 +02:00
|
|
|
stats[0]: {None: '_1day'},
|
|
|
|
stats[1]: {None: '_15day'},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
stats[2]: {'false': 'all_time'}}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2017-01-11 21:44:59 +01:00
|
|
|
elif chart_name == 'messages_sent_over_time':
|
2018-05-19 01:48:36 +02:00
|
|
|
stats = [COUNT_STATS['messages_sent:is_bot:hour']]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2018-05-19 01:48:36 +02:00
|
|
|
subgroup_to_label = {stats[0]: {'false': 'human', 'true': 'bot'}}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2017-01-14 23:52:27 +01:00
|
|
|
elif chart_name == 'messages_sent_by_message_type':
|
2018-05-19 01:48:36 +02:00
|
|
|
stats = [COUNT_STATS['messages_sent:message_type:day']]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2018-05-19 01:48:36 +02:00
|
|
|
subgroup_to_label = {stats[0]: {'public_stream': _('Public streams'),
|
|
|
|
'private_stream': _('Private streams'),
|
|
|
|
'private_message': _('Private messages'),
|
|
|
|
'huddle_message': _('Group private messages')}}
|
2018-05-18 22:13:08 +02:00
|
|
|
labels_sort_function = lambda data: sort_by_totals(data['everyone'])
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2017-01-19 01:08:33 +01:00
|
|
|
elif chart_name == 'messages_sent_by_client':
|
2018-05-19 01:48:36 +02:00
|
|
|
stats = [COUNT_STATS['messages_sent:client:day']]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2017-04-12 23:36:07 +02:00
|
|
|
# Note that the labels are further re-written by client_label_map
|
2018-05-19 01:48:36 +02:00
|
|
|
subgroup_to_label = {stats[0]:
|
|
|
|
{str(id): name for id, name in Client.objects.values_list('id', 'name')}}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = sort_client_labels
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = False
|
2020-06-11 12:56:06 +02:00
|
|
|
elif chart_name == 'messages_read_over_time':
|
|
|
|
stats = [COUNT_STATS['messages_read::hour']]
|
|
|
|
tables = [aggregate_table, UserCount]
|
|
|
|
subgroup_to_label = {stats[0]: {None: 'read'}}
|
|
|
|
labels_sort_function = None
|
|
|
|
include_empty_subgroups = True
|
2017-01-14 18:32:31 +01:00
|
|
|
else:
|
|
|
|
raise JsonableError(_("Unknown chart name: %s") % (chart_name,))
|
2017-01-19 01:08:33 +01:00
|
|
|
|
2017-02-08 08:04:10 +01:00
|
|
|
# Most likely someone using our API endpoint. The /stats page does not
|
|
|
|
# pass a start or end in its requests.
|
2017-10-05 01:18:18 +02:00
|
|
|
if start is not None:
|
|
|
|
start = convert_to_UTC(start)
|
|
|
|
if end is not None:
|
|
|
|
end = convert_to_UTC(end)
|
2017-02-08 08:04:10 +01:00
|
|
|
if start is not None and end is not None and start > end:
|
|
|
|
raise JsonableError(_("Start time is later than end time. Start: %(start)s, End: %(end)s") %
|
|
|
|
{'start': start, 'end': end})
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
if realm is None:
|
2019-02-02 20:57:20 +01:00
|
|
|
# Note that this value is invalid for Remote tables; be
|
|
|
|
# careful not to access it in those code paths.
|
2018-04-15 18:43:48 +02:00
|
|
|
realm = user_profile.realm
|
2019-02-02 20:57:20 +01:00
|
|
|
|
|
|
|
if remote:
|
|
|
|
# For remote servers, we don't have fillstate data, and thus
|
|
|
|
# should simply use the first and last data points for the
|
|
|
|
# table.
|
|
|
|
assert server is not None
|
|
|
|
if not aggregate_table.objects.filter(server=server).exists():
|
|
|
|
raise JsonableError(_("No analytics data available. Please contact your server administrator."))
|
|
|
|
if start is None:
|
|
|
|
start = aggregate_table.objects.filter(server=server).first().end_time
|
|
|
|
if end is None:
|
|
|
|
end = aggregate_table.objects.filter(server=server).last().end_time
|
|
|
|
else:
|
|
|
|
# Otherwise, we can use tables on the current server to
|
|
|
|
# determine a nice range, and some additional validation.
|
|
|
|
if start is None:
|
|
|
|
if for_installation:
|
|
|
|
start = installation_epoch()
|
|
|
|
else:
|
|
|
|
start = realm.date_created
|
|
|
|
if end is None:
|
|
|
|
end = max(last_successful_fill(stat.property) or
|
2020-06-05 06:55:20 +02:00
|
|
|
datetime.min.replace(tzinfo=timezone.utc) for stat in stats)
|
2020-03-31 12:01:48 +02:00
|
|
|
|
|
|
|
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
|
2019-02-02 20:57:20 +01:00
|
|
|
logging.warning("User from realm %s attempted to access /stats, but the computed "
|
|
|
|
"start time: %s (creation of realm or installation) is later than the computed "
|
|
|
|
"end time: %s (last successful analytics update). Is the "
|
2020-05-02 08:44:14 +02:00
|
|
|
"analytics cron job running?", realm.string_id, start, end)
|
2019-02-02 20:57:20 +01:00
|
|
|
raise JsonableError(_("No analytics data available. Please contact your server administrator."))
|
2017-02-08 04:51:03 +01:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
assert len({stat.frequency for stat in stats}) == 1
|
2018-05-19 01:48:36 +02:00
|
|
|
end_times = time_range(start, end, stats[0].frequency, min_length)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {'end_times': end_times, 'frequency': stats[0].frequency}
|
2018-05-19 00:47:44 +02:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
aggregation_level = {
|
|
|
|
InstallationCount: 'everyone',
|
|
|
|
RealmCount: 'everyone',
|
|
|
|
RemoteInstallationCount: 'everyone',
|
|
|
|
RemoteRealmCount: 'everyone',
|
|
|
|
UserCount: 'user',
|
|
|
|
}
|
2018-05-19 00:47:44 +02:00
|
|
|
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
|
2019-02-02 20:57:20 +01:00
|
|
|
id_value = {
|
|
|
|
InstallationCount: -1,
|
|
|
|
RealmCount: realm.id,
|
2019-08-10 00:30:33 +02:00
|
|
|
RemoteInstallationCount: server.id if server is not None else None,
|
2019-02-02 20:57:20 +01:00
|
|
|
# TODO: RemoteRealmCount logic doesn't correctly handle
|
|
|
|
# filtering by server_id as well.
|
|
|
|
RemoteRealmCount: remote_realm_id,
|
|
|
|
UserCount: user_profile.id,
|
|
|
|
}
|
2017-03-25 21:48:37 +01:00
|
|
|
for table in tables:
|
2018-05-19 01:48:36 +02:00
|
|
|
data[aggregation_level[table]] = {}
|
|
|
|
for stat in stats:
|
|
|
|
data[aggregation_level[table]].update(get_time_series_by_subgroup(
|
|
|
|
stat, table, id_value[table], end_times, subgroup_to_label[stat], include_empty_subgroups))
|
2018-05-19 00:47:44 +02:00
|
|
|
|
2017-02-10 00:39:42 +01:00
|
|
|
if labels_sort_function is not None:
|
|
|
|
data['display_order'] = labels_sort_function(data)
|
|
|
|
else:
|
|
|
|
data['display_order'] = None
|
2017-01-14 18:32:31 +01:00
|
|
|
return json_success(data=data)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
2017-05-06 04:08:15 +02:00
|
|
|
totals = [(sum(values), label) for label, values in value_arrays.items()]
|
|
|
|
totals.sort(reverse=True)
|
|
|
|
return [label for total, label in totals]
|
2017-02-10 00:39:42 +01:00
|
|
|
|
2017-02-11 09:48:38 +01:00
|
|
|
# For any given user, we want to show a fixed set of clients in the chart,
|
|
|
|
# regardless of the time aggregation or whether we're looking at realm or
|
|
|
|
# user data. This fixed set ideally includes the clients most important in
|
|
|
|
# understanding the realm's traffic and the user's traffic. This function
|
|
|
|
# tries to rank the clients so that taking the first N elements of the
|
|
|
|
# sorted list has a reasonable chance of doing so.
|
2017-11-05 06:54:00 +01:00
|
|
|
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
2018-05-18 22:13:08 +02:00
|
|
|
realm_order = sort_by_totals(data['everyone'])
|
2017-02-10 00:39:42 +01:00
|
|
|
user_order = sort_by_totals(data['user'])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
label_sort_values: Dict[str, float] = {}
|
2017-02-10 00:39:42 +01:00
|
|
|
for i, label in enumerate(realm_order):
|
|
|
|
label_sort_values[label] = i
|
|
|
|
for i, label in enumerate(user_order):
|
2017-02-11 09:48:38 +01:00
|
|
|
label_sort_values[label] = min(i-.1, label_sort_values.get(label, i))
|
2017-02-10 00:39:42 +01:00
|
|
|
return [label for label, sort_value in sorted(label_sort_values.items(),
|
|
|
|
key=lambda x: x[1])]
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
|
2017-02-11 20:37:08 +01:00
|
|
|
if table == RealmCount:
|
|
|
|
return RealmCount.objects.filter(realm_id=key_id)
|
|
|
|
elif table == UserCount:
|
|
|
|
return UserCount.objects.filter(user_id=key_id)
|
|
|
|
elif table == StreamCount:
|
|
|
|
return StreamCount.objects.filter(stream_id=key_id)
|
|
|
|
elif table == InstallationCount:
|
|
|
|
return InstallationCount.objects.all()
|
2019-02-02 20:57:20 +01:00
|
|
|
elif table == RemoteInstallationCount:
|
|
|
|
return RemoteInstallationCount.objects.filter(server_id=key_id)
|
|
|
|
elif table == RemoteRealmCount:
|
|
|
|
return RemoteRealmCount.objects.filter(realm_id=key_id)
|
2017-02-11 20:37:08 +01:00
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError(f"Unknown table: {table}")
|
2017-02-11 20:37:08 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def client_label_map(name: str) -> str:
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "website":
|
|
|
|
return "Website"
|
|
|
|
if name.startswith("desktop app"):
|
|
|
|
return "Old desktop app"
|
2017-07-08 03:31:47 +02:00
|
|
|
if name == "ZulipElectron":
|
|
|
|
return "Desktop app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "ZulipAndroid":
|
2017-10-03 20:59:41 +02:00
|
|
|
return "Old Android app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "ZulipiOS":
|
|
|
|
return "Old iOS app"
|
|
|
|
if name == "ZulipMobile":
|
2017-07-08 03:31:13 +02:00
|
|
|
return "Mobile app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name in ["ZulipPython", "API: Python"]:
|
|
|
|
return "Python API"
|
|
|
|
if name.startswith("Zulip") and name.endswith("Webhook"):
|
|
|
|
return name[len("Zulip"):-len("Webhook")] + " webhook"
|
|
|
|
return name
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
mapped_arrays: Dict[str, List[int]] = {}
|
2017-02-06 01:17:31 +01:00
|
|
|
for label, array in value_arrays.items():
|
|
|
|
mapped_label = client_label_map(label)
|
|
|
|
if mapped_label in mapped_arrays:
|
|
|
|
for i in range(0, len(array)):
|
|
|
|
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
|
|
|
else:
|
|
|
|
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
|
|
|
|
return mapped_arrays
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_time_series_by_subgroup(stat: CountStat,
|
|
|
|
table: Type[BaseCount],
|
|
|
|
key_id: int,
|
|
|
|
end_times: List[datetime],
|
|
|
|
subgroup_to_label: Dict[Optional[str], str],
|
|
|
|
include_empty_subgroups: bool) -> Dict[str, List[int]]:
|
2017-01-19 01:08:33 +01:00
|
|
|
queryset = table_filtered_to_id(table, key_id).filter(property=stat.property) \
|
|
|
|
.values_list('subgroup', 'end_time', 'value')
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
2017-01-14 20:31:07 +01:00
|
|
|
for subgroup, end_time, value in queryset:
|
|
|
|
value_dicts[subgroup][end_time] = value
|
2017-01-14 18:32:31 +01:00
|
|
|
value_arrays = {}
|
2017-04-12 23:36:07 +02:00
|
|
|
for subgroup, label in subgroup_to_label.items():
|
2017-01-19 01:08:33 +01:00
|
|
|
if (subgroup in value_dicts) or include_empty_subgroups:
|
|
|
|
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
2017-02-06 01:17:31 +01:00
|
|
|
|
|
|
|
if stat == COUNT_STATS['messages_sent:client:day']:
|
|
|
|
# HACK: We rewrite these arrays to collapse the Client objects
|
|
|
|
# with similar names into a single sum, and generally give
|
|
|
|
# them better names
|
|
|
|
return rewrite_client_arrays(value_arrays)
|
2017-01-19 01:08:33 +01:00
|
|
|
return value_arrays
|
2017-01-14 23:52:27 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2016-12-20 02:26:14 +01:00
|
|
|
eastern_tz = pytz.timezone('US/Eastern')
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2020-06-13 05:24:42 +02:00
|
|
|
def make_table(title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False) -> str:
|
2013-11-18 16:26:56 +01:00
|
|
|
|
|
|
|
if not has_row_class:
|
2017-11-05 06:54:00 +01:00
|
|
|
def fix_row(row: Any) -> Dict[str, Any]:
|
2013-11-18 16:26:56 +01:00
|
|
|
return dict(cells=row, row_class=None)
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(fix_row, rows))
|
2013-11-18 16:26:56 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
data = dict(title=title, cols=cols, rows=rows)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'analytics/ad_hoc_query.html',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
dict(data=data),
|
2013-11-07 16:38:41 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def dictfetchall(cursor: connection.cursor) -> List[Dict[str, Any]]:
|
2013-11-06 13:25:55 +01:00
|
|
|
"Returns all rows from a cursor as a dict"
|
|
|
|
desc = cursor.description
|
|
|
|
return [
|
2015-11-01 17:15:11 +01:00
|
|
|
dict(list(zip([col[0] for col in desc], row)))
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in cursor.fetchall()
|
|
|
|
]
|
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-12-18 21:13:47 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2019-08-28 02:43:19 +02:00
|
|
|
(now()::date - date_sent::date) age,
|
2013-12-18 21:13:47 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
2014-02-03 18:20:47 +01:00
|
|
|
join zerver_client c on c.id = m.sending_client_id
|
2013-12-18 21:13:47 +01:00
|
|
|
where
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now()::date - interval '8 day'
|
2013-12-18 21:13:47 +01:00
|
|
|
and
|
2014-02-03 18:20:47 +01:00
|
|
|
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
2013-12-18 21:13:47 +01:00
|
|
|
group by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
|
|
|
order by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-12-18 21:13:47 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
2013-12-18 21:13:47 +01:00
|
|
|
for row in rows:
|
2017-01-08 19:42:32 +01:00
|
|
|
counts[row['string_id']][row['age']] = row['cnt']
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
result = {}
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id in counts:
|
|
|
|
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
2017-12-01 06:04:15 +01:00
|
|
|
min_cnt = min(raw_cnts[1:])
|
|
|
|
max_cnt = max(raw_cnts[1:])
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2017-12-01 06:04:15 +01:00
|
|
|
def format_count(cnt: int, style: Optional[str]=None) -> str:
|
|
|
|
if style is not None:
|
|
|
|
good_bad = style
|
|
|
|
elif cnt == min_cnt:
|
2013-12-18 23:02:17 +01:00
|
|
|
good_bad = 'bad'
|
|
|
|
elif cnt == max_cnt:
|
|
|
|
good_bad = 'good'
|
|
|
|
else:
|
|
|
|
good_bad = 'neutral'
|
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
return f'<td class="number {good_bad}">{cnt}</td>'
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2017-12-01 06:04:15 +01:00
|
|
|
cnts = (format_count(raw_cnts[0], 'neutral')
|
|
|
|
+ ''.join(map(format_count, raw_cnts[1:])))
|
2017-01-08 19:42:32 +01:00
|
|
|
result[string_id] = dict(cnts=cnts)
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2019-03-08 11:17:37 +01:00
|
|
|
def get_plan_name(plan_type: int) -> str:
|
|
|
|
return ['', 'self hosted', 'limited', 'standard', 'open source'][plan_type]
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
2017-12-01 06:28:56 +01:00
|
|
|
now = timezone_now()
|
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2017-11-29 07:50:25 +01:00
|
|
|
realm.date_created,
|
2018-08-15 19:59:09 +02:00
|
|
|
realm.plan_type,
|
2017-11-27 21:03:15 +01:00
|
|
|
coalesce(user_counts.dau_count, 0) dau_count,
|
|
|
|
coalesce(wau_counts.wau_count, 0) wau_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
|
|
|
AND not is_bot
|
|
|
|
) user_profile_count,
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM zerver_userprofile up
|
|
|
|
WHERE up.realm_id = realm.id
|
|
|
|
AND is_active
|
|
|
|
AND is_bot
|
|
|
|
) bot_count
|
|
|
|
FROM zerver_realm realm
|
|
|
|
LEFT OUTER JOIN
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
up.realm_id realm_id,
|
2017-11-27 21:03:15 +01:00
|
|
|
count(distinct(ua.user_profile_id)) dau_count
|
2013-11-06 13:25:55 +01:00
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
2017-11-30 01:38:17 +01:00
|
|
|
up.is_active
|
|
|
|
AND (not up.is_bot)
|
|
|
|
AND
|
2013-11-06 13:25:55 +01:00
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'send_message_backend',
|
2014-01-21 17:10:09 +01:00
|
|
|
'/api/v1/send_message',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/json/update_pointer',
|
2017-11-05 03:25:25 +01:00
|
|
|
'/json/users/me/pointer',
|
|
|
|
'update_pointer_backend'
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '1 day'
|
|
|
|
GROUP BY realm_id
|
|
|
|
) user_counts
|
|
|
|
ON user_counts.realm_id = realm.id
|
2013-11-14 19:50:32 +01:00
|
|
|
LEFT OUTER JOIN
|
|
|
|
(
|
|
|
|
SELECT
|
|
|
|
realm_id,
|
2017-11-27 21:03:15 +01:00
|
|
|
count(*) wau_count
|
2013-11-14 19:50:32 +01:00
|
|
|
FROM (
|
|
|
|
SELECT
|
|
|
|
realm.id as realm_id,
|
2019-07-21 03:58:14 +02:00
|
|
|
up.delivery_email
|
2013-11-14 19:50:32 +01:00
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
JOIN zerver_realm realm
|
|
|
|
ON realm.id = up.realm_id
|
|
|
|
WHERE up.is_active
|
|
|
|
AND (not up.is_bot)
|
|
|
|
AND
|
|
|
|
ua.query in (
|
|
|
|
'/json/send_message',
|
|
|
|
'send_message_backend',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/api/v1/send_message',
|
|
|
|
'/json/update_pointer',
|
2017-11-05 03:25:25 +01:00
|
|
|
'/json/users/me/pointer',
|
|
|
|
'update_pointer_backend'
|
2013-11-14 19:50:32 +01:00
|
|
|
)
|
2019-07-21 03:58:14 +02:00
|
|
|
GROUP by realm.id, up.delivery_email
|
2017-11-27 21:03:15 +01:00
|
|
|
HAVING max(last_visit) > now() - interval '7 day'
|
|
|
|
) as wau_users
|
2013-11-14 19:50:32 +01:00
|
|
|
GROUP BY realm_id
|
2017-11-27 21:03:15 +01:00
|
|
|
) wau_counts
|
|
|
|
ON wau_counts.realm_id = realm.id
|
2015-09-20 08:42:28 +02:00
|
|
|
WHERE EXISTS (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT *
|
|
|
|
FROM zerver_useractivity ua
|
|
|
|
JOIN zerver_userprofile up
|
|
|
|
ON up.id = ua.user_profile_id
|
|
|
|
WHERE
|
2017-11-30 01:38:17 +01:00
|
|
|
up.realm_id = realm.id
|
|
|
|
AND up.is_active
|
|
|
|
AND (not up.is_bot)
|
|
|
|
AND
|
2013-11-06 13:25:55 +01:00
|
|
|
query in (
|
|
|
|
'/json/send_message',
|
2014-01-21 17:10:09 +01:00
|
|
|
'/api/v1/send_message',
|
2013-11-06 13:25:55 +01:00
|
|
|
'send_message_backend',
|
2016-04-02 20:24:19 +02:00
|
|
|
'/json/update_pointer',
|
2017-11-05 03:25:25 +01:00
|
|
|
'/json/users/me/pointer',
|
|
|
|
'update_pointer_backend'
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
AND
|
|
|
|
last_visit > now() - interval '2 week'
|
|
|
|
)
|
2017-11-27 21:03:15 +01:00
|
|
|
ORDER BY dau_count DESC, string_id ASC
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
2018-02-11 08:59:50 +01:00
|
|
|
# Fetch all the realm administrator users
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
realm_admins: Dict[str, List[str]] = defaultdict(list)
|
2018-02-11 08:59:50 +01:00
|
|
|
for up in UserProfile.objects.select_related("realm").filter(
|
2019-10-05 02:35:07 +02:00
|
|
|
role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_active=True,
|
2018-02-11 08:59:50 +01:00
|
|
|
):
|
2019-07-21 03:58:14 +02:00
|
|
|
realm_admins[up.realm.string_id].append(up.delivery_email)
|
2018-02-11 08:59:50 +01:00
|
|
|
|
2017-11-29 07:50:25 +01:00
|
|
|
for row in rows:
|
|
|
|
row['date_created_day'] = row['date_created'].strftime('%Y-%m-%d')
|
2019-03-08 11:17:37 +01:00
|
|
|
row['plan_type_string'] = get_plan_name(row['plan_type'])
|
2017-12-01 06:28:56 +01:00
|
|
|
row['age_days'] = int((now - row['date_created']).total_seconds()
|
2017-11-29 07:50:25 +01:00
|
|
|
/ 86400)
|
|
|
|
row['is_new'] = row['age_days'] < 12 * 7
|
2018-02-11 08:59:50 +01:00
|
|
|
row['realm_admin_email'] = ', '.join(realm_admins[row['string_id']])
|
2017-11-29 07:50:25 +01:00
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
# get messages sent per day
|
|
|
|
counts = get_realm_day_counts()
|
|
|
|
for row in rows:
|
|
|
|
try:
|
2017-01-08 19:42:32 +01:00
|
|
|
row['history'] = counts[row['string_id']]['cnts']
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2013-12-18 21:13:47 +01:00
|
|
|
row['history'] = ''
|
|
|
|
|
2018-11-16 17:08:09 +01:00
|
|
|
# estimate annual subscription revenue
|
|
|
|
total_amount = 0
|
|
|
|
if settings.BILLING_ENABLED:
|
2018-12-15 09:33:25 +01:00
|
|
|
from corporate.lib.stripe import estimate_annual_recurring_revenue_by_realm
|
|
|
|
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
2018-11-16 17:08:09 +01:00
|
|
|
for row in rows:
|
2018-12-15 09:33:25 +01:00
|
|
|
if row['string_id'] in estimated_arrs:
|
|
|
|
row['amount'] = estimated_arrs[row['string_id']]
|
|
|
|
total_amount += sum(estimated_arrs.values())
|
2018-11-16 17:08:09 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
# augment data with realm_minutes
|
2016-06-05 20:51:43 +02:00
|
|
|
total_hours = 0.0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2017-01-08 19:42:32 +01:00
|
|
|
string_id = row['string_id']
|
|
|
|
minutes = realm_minutes.get(string_id, 0.0)
|
2013-11-06 13:25:55 +01:00
|
|
|
hours = minutes / 60.0
|
|
|
|
total_hours += hours
|
|
|
|
row['hours'] = str(int(hours))
|
|
|
|
try:
|
2020-06-10 06:41:04 +02:00
|
|
|
row['hours_per_user'] = '{:.1f}'.format(hours / row['dau_count'])
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2013-11-06 13:25:55 +01:00
|
|
|
pass
|
|
|
|
|
2013-11-14 18:41:23 +01:00
|
|
|
# formatting
|
|
|
|
for row in rows:
|
2018-04-16 12:36:02 +02:00
|
|
|
row['stats_link'] = realm_stats_link(row['string_id'])
|
2017-01-08 19:42:32 +01:00
|
|
|
row['string_id'] = realm_activity_link(row['string_id'])
|
2013-11-14 18:41:23 +01:00
|
|
|
|
2013-12-23 15:52:52 +01:00
|
|
|
# Count active sites
|
2017-11-05 06:54:00 +01:00
|
|
|
def meets_goal(row: Dict[str, int]) -> bool:
|
2019-02-13 23:52:13 +01:00
|
|
|
return row['dau_count'] >= 5
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2015-11-01 17:14:31 +01:00
|
|
|
num_active_sites = len(list(filter(meets_goal, rows)))
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2019-02-13 23:52:13 +01:00
|
|
|
# create totals
|
2017-11-27 21:03:15 +01:00
|
|
|
total_dau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
total_user_profile_count = 0
|
|
|
|
total_bot_count = 0
|
2017-11-27 21:03:15 +01:00
|
|
|
total_wau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2017-11-27 21:03:15 +01:00
|
|
|
total_dau_count += int(row['dau_count'])
|
2013-11-06 13:25:55 +01:00
|
|
|
total_user_profile_count += int(row['user_profile_count'])
|
|
|
|
total_bot_count += int(row['bot_count'])
|
2017-11-27 21:03:15 +01:00
|
|
|
total_wau_count += int(row['wau_count'])
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2018-11-16 17:17:50 +01:00
|
|
|
total_row = dict(
|
2017-01-08 19:42:32 +01:00
|
|
|
string_id='Total',
|
2018-08-15 19:59:09 +02:00
|
|
|
plan_type_string="",
|
2018-11-16 17:08:09 +01:00
|
|
|
amount=total_amount,
|
2018-04-16 12:36:02 +02:00
|
|
|
stats_link = '',
|
2017-12-01 06:10:48 +01:00
|
|
|
date_created_day='',
|
2018-02-11 08:59:50 +01:00
|
|
|
realm_admin_email='',
|
2017-11-27 21:03:15 +01:00
|
|
|
dau_count=total_dau_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
user_profile_count=total_user_profile_count,
|
|
|
|
bot_count=total_bot_count,
|
2016-05-06 11:09:46 +02:00
|
|
|
hours=int(total_hours),
|
2017-11-27 21:03:15 +01:00
|
|
|
wau_count=total_wau_count,
|
2018-11-16 17:17:50 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
rows.insert(0, total_row)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = loader.render_to_string(
|
|
|
|
'analytics/realm_summary_table.html',
|
2017-12-01 06:28:56 +01:00
|
|
|
dict(rows=rows, num_active_sites=num_active_sites,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
now=now.strftime('%Y-%m-%dT%H:%M:%SZ')),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
2013-11-07 16:53:09 +01:00
|
|
|
return content
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
2013-11-06 13:25:55 +01:00
|
|
|
day_end = timestamp_to_datetime(time.time())
|
2013-11-18 18:28:49 +01:00
|
|
|
day_start = day_end - timedelta(hours=24)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
output = "Per-user online duration for the last 24 hours:\n"
|
2013-11-18 18:28:49 +01:00
|
|
|
total_duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
all_intervals = UserActivityInterval.objects.filter(
|
|
|
|
end__gte=day_start,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
start__lte=day_end,
|
2013-11-06 13:25:55 +01:00
|
|
|
).select_related(
|
|
|
|
'user_profile',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'user_profile__realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
).only(
|
|
|
|
'start',
|
|
|
|
'end',
|
2018-12-07 00:05:57 +01:00
|
|
|
'user_profile__delivery_email',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'user_profile__realm__string_id',
|
2013-11-06 13:25:55 +01:00
|
|
|
).order_by(
|
2017-01-08 19:42:32 +01:00
|
|
|
'user_profile__realm__string_id',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'user_profile__delivery_email',
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
by_string_id = lambda row: row.user_profile.realm.string_id
|
2018-12-07 00:05:57 +01:00
|
|
|
by_email = lambda row: row.user_profile.delivery_email
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
realm_minutes = {}
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
2013-11-18 18:28:49 +01:00
|
|
|
realm_duration = timedelta(0)
|
2020-06-10 06:41:04 +02:00
|
|
|
output += f'<hr>{string_id}\n'
|
2013-11-06 13:25:55 +01:00
|
|
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
2013-11-18 18:28:49 +01:00
|
|
|
duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
for interval in intervals:
|
|
|
|
start = max(day_start, interval.start)
|
|
|
|
end = min(day_end, interval.end)
|
|
|
|
duration += end - start
|
|
|
|
|
|
|
|
total_duration += duration
|
|
|
|
realm_duration += duration
|
2016-05-04 23:16:27 +02:00
|
|
|
output += " %-*s%s\n" % (37, email, duration)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
output += f"\nTotal Duration: {total_duration}\n"
|
|
|
|
output += f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
|
|
|
output += f"Total Duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
2013-11-06 13:25:55 +01:00
|
|
|
content = mark_safe('<pre>' + output + '</pre>')
|
2013-11-07 16:53:09 +01:00
|
|
|
return content, realm_minutes
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def sent_messages_report(realm: str) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
title = 'Recently sent messages for ' + realm
|
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Date',
|
2013-11-14 21:28:31 +01:00
|
|
|
'Humans',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'Bots',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
|
|
|
series.day::date,
|
2013-11-14 21:28:31 +01:00
|
|
|
humans.cnt,
|
|
|
|
bots.cnt
|
2013-11-06 13:25:55 +01:00
|
|
|
from (
|
|
|
|
select generate_series(
|
|
|
|
(now()::date - interval '2 week'),
|
|
|
|
now()::date,
|
|
|
|
interval '1 day'
|
|
|
|
) as day
|
|
|
|
) as series
|
|
|
|
left join (
|
|
|
|
select
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date date_sent,
|
2013-11-06 13:25:55 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now() - interval '2 week'
|
2013-11-14 21:28:31 +01:00
|
|
|
group by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
order by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
) humans on
|
2019-08-28 02:43:19 +02:00
|
|
|
series.day = humans.date_sent
|
2013-11-14 21:28:31 +01:00
|
|
|
left join (
|
|
|
|
select
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date date_sent,
|
2013-11-14 21:28:31 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
up.is_bot
|
2013-11-06 13:25:55 +01:00
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now() - interval '2 week'
|
2013-11-06 13:25:55 +01:00
|
|
|
group by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-06 13:25:55 +01:00
|
|
|
order by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
) bots on
|
2019-08-28 02:43:19 +02:00
|
|
|
series.day = bots.date_sent
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
2013-11-14 21:28:31 +01:00
|
|
|
cursor.execute(query, [realm, realm])
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def ad_hoc_queries() -> List[Dict[str, str]]:
|
2020-06-13 05:24:42 +02:00
|
|
|
def get_page(query: Composable, cols: Sequence[str], title: str,
|
|
|
|
totals_columns: Sequence[int]=[]) -> Dict[str, str]:
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(list, rows))
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor.close()
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def fix_rows(i: int,
|
|
|
|
fixup_func: Union[Callable[[Realm], mark_safe], Callable[[datetime], str]]) -> None:
|
2013-11-14 17:26:12 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = fixup_func(row[i])
|
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
total_row = []
|
2013-11-14 17:26:12 +01:00
|
|
|
for i, col in enumerate(cols):
|
2017-01-08 19:42:32 +01:00
|
|
|
if col == 'Realm':
|
2013-11-14 17:26:12 +01:00
|
|
|
fix_rows(i, realm_activity_link)
|
2013-11-14 18:25:20 +01:00
|
|
|
elif col in ['Last time', 'Last visit']:
|
|
|
|
fix_rows(i, format_date_for_activity_reports)
|
2019-02-03 00:27:16 +01:00
|
|
|
elif col == 'Hostname':
|
|
|
|
for row in rows:
|
|
|
|
row[i] = remote_installation_stats_link(row[0], row[i])
|
|
|
|
if len(totals_columns) > 0:
|
|
|
|
if i == 0:
|
|
|
|
total_row.append("Total")
|
|
|
|
elif i in totals_columns:
|
|
|
|
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
|
|
|
else:
|
|
|
|
total_row.append('')
|
|
|
|
if len(totals_columns) > 0:
|
|
|
|
rows.insert(0, total_row)
|
2013-11-14 17:26:12 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
content = make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
content=content,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
title=title,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
pages = []
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
for mobile_type in ['Android', 'ZulipiOS']:
|
2020-06-10 06:41:04 +02:00
|
|
|
title = f'{mobile_type} usage'
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2014-01-08 15:17:15 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2014-01-08 15:17:15 +01:00
|
|
|
up.id user_id,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2020-06-09 10:46:28 +02:00
|
|
|
client.name like {mobile_type}
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, up.id, client.name
|
2014-01-08 15:17:15 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, up.id, client.name
|
2020-06-09 10:46:28 +02:00
|
|
|
''').format(
|
|
|
|
mobile_type=Literal(mobile_type),
|
|
|
|
)
|
2014-01-08 15:17:15 +01:00
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2014-01-08 15:17:15 +01:00
|
|
|
'User id',
|
|
|
|
'Name',
|
|
|
|
'Hits',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'Last time',
|
2014-01-08 15:17:15 +01:00
|
|
|
]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Desktop users'
|
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'desktop%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client.name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client.name
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Client',
|
|
|
|
'Hits',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'Last time',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
title = 'Integrations by realm'
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client_name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client_name
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Client',
|
|
|
|
'Hits',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'Last time',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
|
|
|
title = 'Integrations by client'
|
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by client_name, string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by client_name, string_id
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
|
|
|
'Client',
|
2017-01-08 19:42:32 +01:00
|
|
|
'Realm',
|
2013-11-06 13:25:55 +01:00
|
|
|
'Hits',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'Last time',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
title = 'Remote Zulip servers'
|
|
|
|
|
2020-06-09 10:46:28 +02:00
|
|
|
query = SQL('''
|
2019-02-03 00:27:16 +01:00
|
|
|
with icount as (
|
|
|
|
select
|
|
|
|
server_id,
|
|
|
|
max(value) as max_value,
|
|
|
|
max(end_time) as max_end_time
|
|
|
|
from zilencer_remoteinstallationcount
|
|
|
|
where
|
|
|
|
property='active_users:is_bot:day'
|
|
|
|
and subgroup='false'
|
|
|
|
group by server_id
|
|
|
|
),
|
|
|
|
remote_push_devices as (
|
|
|
|
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
|
|
|
group by server_id
|
|
|
|
)
|
|
|
|
select
|
|
|
|
rserver.id,
|
|
|
|
rserver.hostname,
|
|
|
|
rserver.contact_email,
|
|
|
|
max_value,
|
|
|
|
push_user_count,
|
|
|
|
max_end_time
|
|
|
|
from zilencer_remotezulipserver rserver
|
|
|
|
left join icount on icount.server_id = rserver.id
|
|
|
|
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
|
|
|
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
2020-06-09 10:46:28 +02:00
|
|
|
''')
|
2019-02-03 00:27:16 +01:00
|
|
|
|
|
|
|
cols = [
|
|
|
|
'ID',
|
|
|
|
'Hostname',
|
|
|
|
'Contact email',
|
|
|
|
'Analytics users',
|
|
|
|
'Mobile users',
|
|
|
|
'Last update time',
|
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title,
|
|
|
|
totals_columns=[3, 4]))
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
return pages
|
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2013-11-06 13:25:55 +01:00
|
|
|
@has_request_variables
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_activity(request: HttpRequest) -> HttpResponse:
|
2020-05-09 00:10:17 +02:00
|
|
|
duration_content, realm_minutes = user_activity_intervals()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts_content: str = realm_summary_table(realm_minutes)
|
2013-11-06 13:25:55 +01:00
|
|
|
data = [
|
|
|
|
('Counts', counts_content),
|
|
|
|
('Durations', duration_content),
|
|
|
|
]
|
|
|
|
for page in ad_hoc_queries():
|
2013-11-07 16:53:09 +01:00
|
|
|
data.append((page['title'], page['content']))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
title = 'Activity'
|
|
|
|
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2013-11-06 13:25:55 +01:00
|
|
|
'analytics/activity.html',
|
2017-03-16 14:04:06 +01:00
|
|
|
context=dict(data=data, title=title, is_home=True),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2019-09-18 15:04:36 +02:00
|
|
|
def get_confirmations(types: List[int], object_ids: List[int],
|
|
|
|
hostname: Optional[str]=None) -> List[Dict[str, Any]]:
|
|
|
|
lowest_datetime = timezone_now() - timedelta(days=30)
|
|
|
|
confirmations = Confirmation.objects.filter(type__in=types, object_id__in=object_ids,
|
|
|
|
date_sent__gte=lowest_datetime)
|
|
|
|
confirmation_dicts = []
|
|
|
|
for confirmation in confirmations:
|
|
|
|
realm = confirmation.realm
|
|
|
|
content_object = confirmation.content_object
|
|
|
|
|
|
|
|
if realm is not None:
|
|
|
|
realm_host = realm.host
|
|
|
|
elif isinstance(content_object, Realm):
|
|
|
|
realm_host = content_object.host
|
|
|
|
else:
|
|
|
|
realm_host = hostname
|
|
|
|
|
|
|
|
type = confirmation.type
|
|
|
|
days_to_activate = _properties[type].validity_in_days
|
|
|
|
expiry_date = confirmation.date_sent + timedelta(days=days_to_activate)
|
|
|
|
|
|
|
|
if hasattr(content_object, "status"):
|
|
|
|
if content_object.status == STATUS_ACTIVE:
|
|
|
|
link_status = "Link has been clicked"
|
|
|
|
else:
|
|
|
|
link_status = "Link has never been clicked"
|
|
|
|
else:
|
|
|
|
link_status = ""
|
|
|
|
|
|
|
|
if timezone_now() < expiry_date:
|
|
|
|
expires_in = timesince(confirmation.date_sent, expiry_date)
|
|
|
|
else:
|
|
|
|
expires_in = "Expired"
|
|
|
|
|
|
|
|
url = confirmation_url(confirmation.confirmation_key, realm_host, type)
|
|
|
|
confirmation_dicts.append({"object": confirmation.content_object,
|
|
|
|
"url": url, "type": type, "link_status": link_status,
|
|
|
|
"expires_in": expires_in})
|
|
|
|
return confirmation_dicts
|
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
@require_server_admin
|
|
|
|
def support(request: HttpRequest) -> HttpResponse:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
context: Dict[str, Any] = {}
|
2019-03-08 13:02:10 +01:00
|
|
|
if settings.BILLING_ENABLED and request.method == "POST":
|
|
|
|
realm_id = request.POST.get("realm_id", None)
|
|
|
|
realm = Realm.objects.get(id=realm_id)
|
|
|
|
|
|
|
|
new_plan_type = request.POST.get("plan_type", None)
|
|
|
|
if new_plan_type is not None:
|
|
|
|
new_plan_type = int(new_plan_type)
|
|
|
|
current_plan_type = realm.plan_type
|
|
|
|
do_change_plan_type(realm, new_plan_type)
|
2020-06-10 06:40:53 +02:00
|
|
|
msg = f"Plan type of {realm.name} changed from {get_plan_name(current_plan_type)} to {get_plan_name(new_plan_type)} "
|
2019-04-19 15:19:49 +02:00
|
|
|
context["message"] = msg
|
2019-03-08 13:02:10 +01:00
|
|
|
|
|
|
|
new_discount = request.POST.get("discount", None)
|
|
|
|
if new_discount is not None:
|
|
|
|
new_discount = Decimal(new_discount)
|
|
|
|
current_discount = get_discount_for_realm(realm)
|
|
|
|
attach_discount_to_realm(realm, new_discount)
|
2020-06-09 00:25:09 +02:00
|
|
|
msg = f"Discount of {realm.name} changed to {new_discount} from {current_discount} "
|
2019-04-19 15:19:49 +02:00
|
|
|
context["message"] = msg
|
|
|
|
|
|
|
|
status = request.POST.get("status", None)
|
|
|
|
if status is not None:
|
|
|
|
if status == "active":
|
2019-11-17 09:51:46 +01:00
|
|
|
do_send_realm_reactivation_email(realm)
|
2020-06-09 00:25:09 +02:00
|
|
|
context["message"] = f"Realm reactivation email sent to admins of {realm.name}."
|
2019-05-08 06:05:18 +02:00
|
|
|
elif status == "deactivated":
|
2019-05-09 00:04:31 +02:00
|
|
|
do_deactivate_realm(realm, request.user)
|
2020-06-09 00:25:09 +02:00
|
|
|
context["message"] = f"{realm.name} deactivated."
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2019-04-19 18:17:41 +02:00
|
|
|
scrub_realm = request.POST.get("scrub_realm", None)
|
|
|
|
if scrub_realm is not None:
|
|
|
|
if scrub_realm == "scrub_realm":
|
|
|
|
do_scrub_realm(realm)
|
2020-06-09 00:25:09 +02:00
|
|
|
context["message"] = f"{realm.name} scrubbed."
|
2019-04-19 18:17:41 +02:00
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
query = request.GET.get("q", None)
|
|
|
|
if query:
|
|
|
|
key_words = get_invitee_emails_set(query)
|
|
|
|
|
2019-09-13 13:35:28 +02:00
|
|
|
context["users"] = UserProfile.objects.filter(delivery_email__in=key_words)
|
2019-03-08 13:02:10 +01:00
|
|
|
realms = set(Realm.objects.filter(string_id__in=key_words))
|
|
|
|
|
|
|
|
for key_word in key_words:
|
|
|
|
try:
|
|
|
|
URLValidator()(key_word)
|
|
|
|
parse_result = urllib.parse.urlparse(key_word)
|
|
|
|
hostname = parse_result.hostname
|
2019-11-13 10:06:02 +01:00
|
|
|
assert hostname is not None
|
2019-03-08 13:02:10 +01:00
|
|
|
if parse_result.port:
|
2020-06-09 00:25:09 +02:00
|
|
|
hostname = f"{hostname}:{parse_result.port}"
|
2019-03-08 13:02:10 +01:00
|
|
|
subdomain = get_subdomain_from_hostname(hostname)
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realms.add(get_realm(subdomain))
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
pass
|
2019-03-08 13:02:10 +01:00
|
|
|
except ValidationError:
|
|
|
|
pass
|
|
|
|
|
2019-09-13 13:35:28 +02:00
|
|
|
context["realms"] = realms
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
confirmations: List[Dict[str, Any]] = []
|
2019-09-18 15:04:36 +02:00
|
|
|
|
|
|
|
preregistration_users = PreregistrationUser.objects.filter(email__in=key_words)
|
|
|
|
confirmations += get_confirmations([Confirmation.USER_REGISTRATION, Confirmation.INVITATION,
|
|
|
|
Confirmation.REALM_CREATION], preregistration_users,
|
|
|
|
hostname=request.get_host())
|
|
|
|
|
|
|
|
multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
|
|
|
|
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invites)
|
|
|
|
|
|
|
|
confirmations += get_confirmations([Confirmation.REALM_REACTIVATION], [realm.id for realm in realms])
|
|
|
|
|
|
|
|
context["confirmations"] = confirmations
|
|
|
|
|
2019-09-13 13:35:28 +02:00
|
|
|
def realm_admin_emails(realm: Realm) -> str:
|
2020-05-18 23:03:37 +02:00
|
|
|
return ", ".join(realm.get_human_admin_users().order_by('delivery_email').values_list(
|
|
|
|
"delivery_email", flat=True))
|
2019-09-13 13:35:28 +02:00
|
|
|
|
|
|
|
context["realm_admin_emails"] = realm_admin_emails
|
|
|
|
context["get_discount_for_realm"] = get_discount_for_realm
|
|
|
|
context["realm_icon_url"] = realm_icon_url
|
2019-09-18 15:04:36 +02:00
|
|
|
context["Confirmation"] = Confirmation
|
2019-03-08 13:02:10 +01:00
|
|
|
return render(request, 'analytics/support.html', context=context)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
|
|
|
'user_profile__full_name',
|
2018-12-07 00:05:57 +01:00
|
|
|
'user_profile__delivery_email',
|
2013-11-06 13:25:55 +01:00
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
|
|
|
'last_visit',
|
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile__realm__string_id=realm,
|
|
|
|
user_profile__is_active=True,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
user_profile__is_bot=is_bot,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
2018-12-07 00:05:57 +01:00
|
|
|
records = records.order_by("user_profile__delivery_email", "-last_visit")
|
2013-11-06 13:25:55 +01:00
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
|
|
|
return records
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_records_for_email(email: str) -> List[QuerySet]:
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
|
|
|
'user_profile__full_name',
|
|
|
|
'query',
|
|
|
|
'client__name',
|
|
|
|
'count',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'last_visit',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
user_profile__delivery_email=email,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
records = records.order_by("-last_visit")
|
|
|
|
records = records.select_related('user_profile', 'client').only(*fields)
|
|
|
|
return records
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def raw_user_activity_table(records: List[QuerySet]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
cols = [
|
|
|
|
'query',
|
|
|
|
'client',
|
|
|
|
'count',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'last_visit',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def row(record: QuerySet) -> List[Any]:
|
2013-11-06 13:25:55 +01:00
|
|
|
return [
|
2017-01-24 07:06:13 +01:00
|
|
|
record.query,
|
|
|
|
record.client.name,
|
|
|
|
record.count,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
format_date_for_activity_reports(record.last_visit),
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(row, records))
|
2013-11-06 13:25:55 +01:00
|
|
|
title = 'Raw Data'
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, Any]]:
|
2016-06-05 20:51:43 +02:00
|
|
|
#: `Any` used above should be `Union(int, datetime)`.
|
|
|
|
#: However current version of `Union` does not work inside other function.
|
|
|
|
#: We could use something like:
|
|
|
|
# `Union[Dict[str, Dict[str, int]], Dict[str, Dict[str, datetime]]]`
|
|
|
|
#: but that would require this long `Union` to carry on throughout inner functions.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
summary: Dict[str, Dict[str, Any]] = {}
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def update(action: str, record: QuerySet) -> None:
|
2013-11-06 13:25:55 +01:00
|
|
|
if action not in summary:
|
|
|
|
summary[action] = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
count=record.count,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
last_visit=record.last_visit,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
summary[action]['count'] += record.count
|
|
|
|
summary[action]['last_visit'] = max(
|
2017-01-24 07:06:13 +01:00
|
|
|
summary[action]['last_visit'],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
record.last_visit,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if records:
|
|
|
|
summary['name'] = records[0].user_profile.full_name
|
|
|
|
|
|
|
|
for record in records:
|
|
|
|
client = record.client.name
|
|
|
|
query = record.query
|
|
|
|
|
|
|
|
update('use', record)
|
|
|
|
|
2013-11-18 18:58:39 +01:00
|
|
|
if client == 'API':
|
|
|
|
m = re.match('/api/.*/external/(.*)', query)
|
|
|
|
if m:
|
|
|
|
client = m.group(1)
|
|
|
|
update(client, record)
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
if client.startswith('desktop'):
|
|
|
|
update('desktop', record)
|
|
|
|
if client == 'website':
|
|
|
|
update('website', record)
|
2013-11-25 21:27:57 +01:00
|
|
|
if ('send_message' in query) or re.search('/api/.*/external/.*', query):
|
2013-11-06 13:25:55 +01:00
|
|
|
update('send', record)
|
2017-11-05 03:25:25 +01:00
|
|
|
if query in ['/json/update_pointer', '/json/users/me/pointer', '/api/v1/update_pointer',
|
|
|
|
'update_pointer_backend']:
|
2013-11-06 13:25:55 +01:00
|
|
|
update('pointer', record)
|
|
|
|
update(client, record)
|
|
|
|
|
|
|
|
return summary
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
if date:
|
2013-11-08 17:19:30 +01:00
|
|
|
return date.astimezone(eastern_tz).strftime('%Y-%m-%d %H:%M')
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_link(email: str) -> mark_safe:
|
2013-11-14 16:52:48 +01:00
|
|
|
url_name = 'analytics.views.get_user_activity'
|
2018-01-30 06:05:25 +01:00
|
|
|
url = reverse(url_name, kwargs=dict(email=email))
|
2020-06-10 06:41:04 +02:00
|
|
|
email_link = f'<a href="{url}">{email}</a>'
|
2013-11-14 16:52:48 +01:00
|
|
|
return mark_safe(email_link)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_activity_link(realm_str: str) -> mark_safe:
|
2013-11-14 17:26:12 +01:00
|
|
|
url_name = 'analytics.views.get_realm_activity'
|
2018-01-30 06:05:25 +01:00
|
|
|
url = reverse(url_name, kwargs=dict(realm_str=realm_str))
|
2020-06-10 06:41:04 +02:00
|
|
|
realm_link = f'<a href="{url}">{realm_str}</a>'
|
2013-11-14 17:26:12 +01:00
|
|
|
return mark_safe(realm_link)
|
|
|
|
|
2018-04-16 12:36:02 +02:00
|
|
|
def realm_stats_link(realm_str: str) -> mark_safe:
|
|
|
|
url_name = 'analytics.views.stats_for_realm'
|
|
|
|
url = reverse(url_name, kwargs=dict(realm_str=realm_str))
|
2020-06-09 00:25:09 +02:00
|
|
|
stats_link = f'<a href="{url}"><i class="fa fa-pie-chart"></i>{realm_str}</a>'
|
2018-04-16 12:36:02 +02:00
|
|
|
return mark_safe(stats_link)
|
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
def remote_installation_stats_link(server_id: int, hostname: str) -> mark_safe:
|
|
|
|
url_name = 'analytics.views.stats_for_remote_installation'
|
|
|
|
url = reverse(url_name, kwargs=dict(remote_server_id=server_id))
|
2020-06-09 00:25:09 +02:00
|
|
|
stats_link = f'<a href="{url}"><i class="fa fa-pie-chart"></i>{hostname}</a>'
|
2019-02-03 00:27:16 +01:00
|
|
|
return mark_safe(stats_link)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_client_table(user_summaries: Dict[str, Dict[str, Dict[str, Any]]]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
exclude_keys = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'internal',
|
|
|
|
'name',
|
|
|
|
'use',
|
|
|
|
'send',
|
|
|
|
'pointer',
|
|
|
|
'website',
|
|
|
|
'desktop',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_summaries.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2013-11-06 13:25:55 +01:00
|
|
|
name = user_summary['name']
|
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k in exclude_keys:
|
|
|
|
continue
|
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
name,
|
|
|
|
email_link,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'Last visit',
|
|
|
|
'Client',
|
|
|
|
'Name',
|
|
|
|
'Email',
|
|
|
|
'Count',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Clients'
|
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k == 'name':
|
|
|
|
continue
|
|
|
|
client = k
|
|
|
|
count = v['count']
|
|
|
|
last_visit = v['last_visit']
|
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'last_visit',
|
|
|
|
'client',
|
|
|
|
'count',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'User Activity'
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_user_summary_table(all_records: List[QuerySet],
|
2018-05-10 18:35:50 +02:00
|
|
|
admin_emails: Set[str]) -> Tuple[Dict[str, Dict[str, Any]], str]:
|
2013-11-06 13:25:55 +01:00
|
|
|
user_records = {}
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def by_email(record: QuerySet) -> str:
|
2018-12-07 00:05:57 +01:00
|
|
|
return record.user_profile.delivery_email
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
for email, records in itertools.groupby(all_records, by_email):
|
|
|
|
user_records[email] = get_user_activity_summary(list(records))
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
2013-11-18 17:07:59 +01:00
|
|
|
return user_summary[k]['last_visit']
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
2013-11-18 17:07:59 +01:00
|
|
|
return None
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
|
|
|
return user_summary[k]['count']
|
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def is_recent(val: Optional[datetime]) -> bool:
|
2017-04-15 04:03:56 +02:00
|
|
|
age = timezone_now() - val
|
2013-11-18 17:20:58 +01:00
|
|
|
return age.total_seconds() < 5 * 60
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_records.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2013-11-06 13:25:55 +01:00
|
|
|
sent_count = get_count(user_summary, 'send')
|
2013-11-18 17:20:58 +01:00
|
|
|
cells = [user_summary['name'], email_link, sent_count]
|
2013-11-18 20:09:28 +01:00
|
|
|
row_class = ''
|
2014-01-08 15:17:15 +01:00
|
|
|
for field in ['use', 'send', 'pointer', 'desktop', 'ZulipiOS', 'Android']:
|
2016-06-05 20:51:43 +02:00
|
|
|
visit = get_last_visit(user_summary, field)
|
2013-11-18 17:20:58 +01:00
|
|
|
if field == 'use':
|
2016-06-05 20:51:43 +02:00
|
|
|
if visit and is_recent(visit):
|
2013-11-18 20:09:28 +01:00
|
|
|
row_class += ' recently_active'
|
|
|
|
if email in admin_emails:
|
|
|
|
row_class += ' admin'
|
2016-06-05 20:51:43 +02:00
|
|
|
val = format_date_for_activity_reports(visit)
|
2013-11-18 17:20:58 +01:00
|
|
|
cells.append(val)
|
|
|
|
row = dict(cells=cells, row_class=row_class)
|
2013-11-06 13:25:55 +01:00
|
|
|
rows.append(row)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def by_used_time(row: Dict[str, Any]) -> str:
|
2013-11-18 17:20:58 +01:00
|
|
|
return row['cells'][3]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
rows = sorted(rows, key=by_used_time, reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2017-01-24 07:06:13 +01:00
|
|
|
'Name',
|
|
|
|
'Email',
|
|
|
|
'Total sent',
|
|
|
|
'Heard from',
|
|
|
|
'Message sent',
|
|
|
|
'Pointer motion',
|
|
|
|
'Desktop',
|
|
|
|
'ZulipiOS',
|
|
|
|
'Android',
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
title = 'Summary'
|
|
|
|
|
2013-11-18 17:20:58 +01:00
|
|
|
content = make_table(title, cols, rows, has_row_class=True)
|
2013-11-06 13:25:55 +01:00
|
|
|
return user_records, content
|
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Tuple[str, str]] = []
|
|
|
|
all_user_records: Dict[str, Any] = {}
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-10 21:22:39 +01:00
|
|
|
try:
|
2019-06-20 23:26:54 +02:00
|
|
|
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
2014-01-10 21:22:39 +01:00
|
|
|
except Realm.DoesNotExist:
|
2020-06-10 06:41:04 +02:00
|
|
|
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
2014-01-10 21:22:39 +01:00
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
admin_emails = {admin.delivery_email for admin in admins}
|
2013-11-18 20:09:28 +01:00
|
|
|
|
2017-01-24 06:21:14 +01:00
|
|
|
for is_bot, page_title in [(False, 'Humans'), (True, 'Bots')]:
|
2017-01-08 19:42:32 +01:00
|
|
|
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-18 20:09:28 +01:00
|
|
|
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
2013-11-06 13:25:55 +01:00
|
|
|
all_user_records.update(user_records)
|
|
|
|
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
page_title = 'Clients'
|
|
|
|
content = realm_client_table(all_user_records)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
page_title = 'History'
|
2017-01-08 19:42:32 +01:00
|
|
|
content = sent_messages_report(realm_str)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
title = realm_str
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2013-11-06 13:25:55 +01:00
|
|
|
'analytics/activity.html',
|
2017-08-11 01:20:12 +02:00
|
|
|
context=dict(data=data, realm_link=None, title=title),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity(request: HttpRequest, email: str) -> HttpResponse:
|
2013-11-06 13:25:55 +01:00
|
|
|
records = get_user_activity_records_for_email(email)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Tuple[str, str]] = []
|
2013-11-06 13:25:55 +01:00
|
|
|
user_summary = get_user_activity_summary(records)
|
|
|
|
content = user_activity_summary_table(user_summary)
|
|
|
|
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [('Summary', content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = raw_user_activity_table(records)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [('Info', content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
title = email
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2013-11-06 13:25:55 +01:00
|
|
|
'analytics/activity.html',
|
2017-03-16 14:04:06 +01:00
|
|
|
context=dict(data=data, title=title),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|