2017-11-16 00:55:49 +01:00
|
|
|
import itertools
|
|
|
|
import logging
|
|
|
|
import re
|
|
|
|
import time
|
2019-03-08 13:02:10 +01:00
|
|
|
import urllib
|
2017-11-16 00:55:49 +01:00
|
|
|
from collections import defaultdict
|
2020-06-05 06:55:20 +02:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2019-03-08 13:02:10 +01:00
|
|
|
from decimal import Decimal
|
2020-06-13 05:24:42 +02:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, Type, Union
|
2020-11-17 19:18:22 +01:00
|
|
|
from urllib.parse import urlencode
|
2017-11-16 00:55:49 +01:00
|
|
|
|
|
|
|
import pytz
|
2017-02-09 02:55:18 +01:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import URLValidator
|
2013-11-06 13:25:55 +01:00
|
|
|
from django.db import connection
|
2016-06-05 20:51:43 +02:00
|
|
|
from django.db.models.query import QuerySet
|
2020-11-17 19:18:22 +01:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound, HttpResponseRedirect
|
2017-11-16 00:55:49 +01:00
|
|
|
from django.shortcuts import render
|
2019-02-02 23:53:21 +01:00
|
|
|
from django.template import loader
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.urls import reverse
|
2020-09-24 13:04:54 +02:00
|
|
|
from django.utils import translation
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.utils.timesince import timesince
|
2020-06-05 06:55:20 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2016-12-20 02:30:08 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2016-04-21 08:48:33 +02:00
|
|
|
from jinja2 import Markup as mark_safe
|
2020-06-11 00:54:34 +02:00
|
|
|
from psycopg2.sql import SQL, Composable, Literal
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2019-02-02 23:53:21 +01:00
|
|
|
from analytics.lib.counts import COUNT_STATS, CountStat
|
2017-01-07 01:46:18 +01:00
|
|
|
from analytics.lib.time_utils import time_range
|
2020-06-11 00:54:34 +02:00
|
|
|
from analytics.models import (
|
|
|
|
BaseCount,
|
|
|
|
InstallationCount,
|
|
|
|
RealmCount,
|
|
|
|
StreamCount,
|
|
|
|
UserCount,
|
|
|
|
installation_epoch,
|
|
|
|
)
|
|
|
|
from confirmation.models import Confirmation, _properties, confirmation_url
|
|
|
|
from confirmation.settings import STATUS_ACTIVE
|
|
|
|
from zerver.decorator import (
|
|
|
|
require_non_guest_user,
|
|
|
|
require_server_admin,
|
|
|
|
require_server_admin_api,
|
|
|
|
to_utc_datetime,
|
|
|
|
zulip_login_required,
|
|
|
|
)
|
2020-11-17 19:18:22 +01:00
|
|
|
from zerver.forms import check_subdomain_available
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_change_plan_type,
|
2020-11-17 19:18:22 +01:00
|
|
|
do_change_realm_subdomain,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_deactivate_realm,
|
|
|
|
do_scrub_realm,
|
|
|
|
do_send_realm_reactivation_email,
|
|
|
|
)
|
2017-10-28 00:07:31 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2020-10-02 14:29:00 +02:00
|
|
|
from zerver.lib.i18n import get_and_set_request_language, get_language_translation_data
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2017-11-16 00:55:49 +01:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2020-07-27 20:21:41 +02:00
|
|
|
from zerver.lib.response import json_error, json_success
|
2019-03-08 13:02:10 +01:00
|
|
|
from zerver.lib.subdomains import get_subdomain_from_hostname
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.timestamp import convert_to_UTC, timestamp_to_datetime
|
2020-05-07 13:19:54 +02:00
|
|
|
from zerver.lib.validator import to_non_negative_int
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Client,
|
|
|
|
MultiuseInvite,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
UserActivity,
|
|
|
|
UserActivityInterval,
|
|
|
|
UserProfile,
|
|
|
|
get_realm,
|
|
|
|
)
|
2020-06-19 00:32:55 +02:00
|
|
|
from zerver.views.invite import get_invitee_emails_set
|
|
|
|
|
|
|
|
if settings.BILLING_ENABLED:
|
2020-06-09 12:24:32 +02:00
|
|
|
from corporate.lib.stripe import (
|
2020-07-17 12:56:06 +02:00
|
|
|
approve_sponsorship,
|
2020-06-09 12:24:32 +02:00
|
|
|
attach_discount_to_realm,
|
2020-08-13 13:20:18 +02:00
|
|
|
downgrade_at_the_end_of_billing_cycle,
|
|
|
|
downgrade_now_without_creating_additional_invoices,
|
2020-07-03 20:21:13 +02:00
|
|
|
get_current_plan_by_realm,
|
2020-06-09 12:24:32 +02:00
|
|
|
get_customer_by_realm,
|
|
|
|
get_discount_for_realm,
|
2020-07-03 20:21:13 +02:00
|
|
|
get_latest_seat_count,
|
|
|
|
make_end_of_cycle_updates_if_needed,
|
2020-08-18 13:48:11 +02:00
|
|
|
update_billing_method_of_current_plan,
|
2020-06-09 12:24:32 +02:00
|
|
|
update_sponsorship_status,
|
2020-08-13 13:20:18 +02:00
|
|
|
void_all_open_invoices,
|
2020-06-09 12:24:32 +02:00
|
|
|
)
|
2016-12-20 02:26:14 +01:00
|
|
|
|
2019-02-03 02:18:57 +01:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2020-06-11 00:54:34 +02:00
|
|
|
from zilencer.models import RemoteInstallationCount, RemoteRealmCount, RemoteZulipServer
|
2019-02-03 02:18:57 +01:00
|
|
|
|
2020-03-31 12:01:48 +02:00
|
|
|
MAX_TIME_FOR_FULL_ANALYTICS_GENERATION = timedelta(days=1, minutes=30)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-03-31 12:17:25 +02:00
|
|
|
def is_analytics_ready(realm: Realm) -> bool:
|
|
|
|
return (timezone_now() - realm.date_created) > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def render_stats(
|
|
|
|
request: HttpRequest,
|
|
|
|
data_url_suffix: str,
|
|
|
|
target_name: str,
|
|
|
|
for_installation: bool = False,
|
|
|
|
remote: bool = False,
|
|
|
|
analytics_ready: bool = True,
|
|
|
|
) -> HttpRequest:
|
2018-04-15 18:43:48 +02:00
|
|
|
page_params = dict(
|
2018-05-18 01:04:44 +02:00
|
|
|
data_url_suffix=data_url_suffix,
|
2018-05-18 02:16:29 +02:00
|
|
|
for_installation=for_installation,
|
2019-02-02 20:57:20 +01:00
|
|
|
remote=remote,
|
2018-05-18 01:04:44 +02:00
|
|
|
debug_mode=False,
|
2018-04-15 18:43:48 +02:00
|
|
|
)
|
2020-09-24 13:04:54 +02:00
|
|
|
|
2020-10-02 14:29:00 +02:00
|
|
|
request_language = get_and_set_request_language(
|
|
|
|
request,
|
|
|
|
request.user.default_language,
|
2021-02-12 08:19:30 +01:00
|
|
|
translation.get_language_from_path(request.path_info),
|
2020-10-02 14:29:00 +02:00
|
|
|
)
|
2020-09-24 13:04:54 +02:00
|
|
|
|
|
|
|
page_params["translation_data"] = get_language_translation_data(request_language)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/stats.html",
|
2021-02-12 08:19:30 +01:00
|
|
|
context=dict(
|
|
|
|
target_name=target_name, page_params=page_params, analytics_ready=analytics_ready
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
@zulip_login_required
|
|
|
|
def stats(request: HttpRequest) -> HttpResponse:
|
|
|
|
realm = request.user.realm
|
2018-10-31 21:09:33 +01:00
|
|
|
if request.user.is_guest:
|
|
|
|
# TODO: Make @zulip_login_required pass the UserProfile so we
|
2019-06-18 16:43:22 +02:00
|
|
|
# can use @require_member_or_admin
|
2018-10-31 21:09:33 +01:00
|
|
|
raise JsonableError(_("Not allowed for guest users"))
|
2021-02-12 08:19:30 +01:00
|
|
|
return render_stats(
|
2021-02-12 08:20:45 +01:00
|
|
|
request, "", realm.name or realm.string_id, analytics_ready=is_analytics_ready(realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
|
|
|
|
@require_server_admin
|
|
|
|
@has_request_variables
|
|
|
|
def stats_for_realm(request: HttpRequest, realm_str: str) -> HttpResponse:
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(realm_str)
|
|
|
|
except Realm.DoesNotExist:
|
2020-06-10 06:41:04 +02:00
|
|
|
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
2018-04-15 18:43:48 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return render_stats(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/realm/{realm_str}",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm.name or realm.string_id,
|
|
|
|
analytics_ready=is_analytics_ready(realm),
|
|
|
|
)
|
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def stats_for_remote_realm(
|
|
|
|
request: HttpRequest, remote_server_id: int, remote_realm_id: int
|
|
|
|
) -> HttpResponse:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
return render_stats(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/remote/{server.id}/realm/{remote_realm_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
f"Realm {remote_realm_id} on server {server.hostname}",
|
|
|
|
)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_chart_data_for_realm(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, realm_str: str, **kwargs: Any
|
|
|
|
) -> HttpResponse:
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(realm_str)
|
|
|
|
except Realm.DoesNotExist:
|
2018-04-15 18:43:48 +02:00
|
|
|
raise JsonableError(_("Invalid organization"))
|
|
|
|
|
|
|
|
return get_chart_data(request=request, user_profile=user_profile, realm=realm, **kwargs)
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_remote_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
remote_server_id: int,
|
|
|
|
remote_realm_id: int,
|
|
|
|
**kwargs: Any,
|
|
|
|
) -> HttpResponse:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
return get_chart_data(
|
|
|
|
request=request,
|
|
|
|
user_profile=user_profile,
|
|
|
|
server=server,
|
|
|
|
remote=True,
|
|
|
|
remote_realm_id=int(remote_realm_id),
|
|
|
|
**kwargs,
|
|
|
|
)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
@require_server_admin
|
|
|
|
def stats_for_installation(request: HttpRequest) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
return render_stats(request, "/installation", "installation", True)
|
2018-05-18 02:16:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin
|
2020-04-30 17:30:41 +02:00
|
|
|
def stats_for_remote_installation(request: HttpRequest, remote_server_id: int) -> HttpResponse:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
return render_stats(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/remote/{server.id}/installation",
|
|
|
|
f"remote installation {server.hostname}",
|
2021-02-12 08:19:30 +01:00
|
|
|
True,
|
|
|
|
True,
|
|
|
|
)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_chart_data_for_installation(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, chart_name: str = REQ(), **kwargs: Any
|
|
|
|
) -> HttpResponse:
|
|
|
|
return get_chart_data(
|
|
|
|
request=request, user_profile=user_profile, for_installation=True, **kwargs
|
|
|
|
)
|
|
|
|
|
2018-05-18 02:16:29 +02:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
@require_server_admin_api
|
|
|
|
@has_request_variables
|
|
|
|
def get_chart_data_for_remote_installation(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
remote_server_id: int,
|
|
|
|
chart_name: str = REQ(),
|
|
|
|
**kwargs: Any,
|
|
|
|
) -> HttpResponse:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
server = RemoteZulipServer.objects.get(id=remote_server_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
return get_chart_data(
|
|
|
|
request=request,
|
|
|
|
user_profile=user_profile,
|
|
|
|
for_installation=True,
|
|
|
|
remote=True,
|
|
|
|
server=server,
|
|
|
|
**kwargs,
|
|
|
|
)
|
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
|
2018-10-31 21:09:33 +01:00
|
|
|
@require_non_guest_user
|
2016-12-20 02:30:08 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_chart_data(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
chart_name: str = REQ(),
|
|
|
|
min_length: Optional[int] = REQ(converter=to_non_negative_int, default=None),
|
|
|
|
start: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
|
|
|
end: Optional[datetime] = REQ(converter=to_utc_datetime, default=None),
|
|
|
|
realm: Optional[Realm] = None,
|
|
|
|
for_installation: bool = False,
|
|
|
|
remote: bool = False,
|
|
|
|
remote_realm_id: Optional[int] = None,
|
|
|
|
server: Optional["RemoteZulipServer"] = None,
|
|
|
|
) -> HttpResponse:
|
2018-05-18 02:16:29 +02:00
|
|
|
if for_installation:
|
2019-02-02 20:57:20 +01:00
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
aggregate_table = RemoteInstallationCount
|
|
|
|
assert server is not None
|
|
|
|
else:
|
|
|
|
aggregate_table = InstallationCount
|
|
|
|
else:
|
|
|
|
if remote:
|
2020-08-27 22:19:00 +02:00
|
|
|
assert settings.ZILENCER_ENABLED
|
2019-02-02 20:57:20 +01:00
|
|
|
aggregate_table = RemoteRealmCount
|
|
|
|
assert server is not None
|
|
|
|
assert remote_realm_id is not None
|
|
|
|
else:
|
|
|
|
aggregate_table = RealmCount
|
2018-05-18 02:16:29 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if chart_name == "number_of_humans":
|
2018-05-19 22:43:02 +02:00
|
|
|
stats = [
|
2021-02-12 08:20:45 +01:00
|
|
|
COUNT_STATS["1day_actives::day"],
|
|
|
|
COUNT_STATS["realm_active_humans::day"],
|
|
|
|
COUNT_STATS["active_users_audit:is_bot:day"],
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table]
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
stats[0]: {None: "_1day"},
|
|
|
|
stats[1]: {None: "_15day"},
|
|
|
|
stats[2]: {"false": "all_time"},
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2021-02-12 08:20:45 +01:00
|
|
|
elif chart_name == "messages_sent_over_time":
|
|
|
|
stats = [COUNT_STATS["messages_sent:is_bot:hour"]]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2021-02-12 08:20:45 +01:00
|
|
|
subgroup_to_label = {stats[0]: {"false": "human", "true": "bot"}}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = None
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2021-02-12 08:20:45 +01:00
|
|
|
elif chart_name == "messages_sent_by_message_type":
|
|
|
|
stats = [COUNT_STATS["messages_sent:message_type:day"]]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2021-02-12 08:19:30 +01:00
|
|
|
subgroup_to_label = {
|
|
|
|
stats[0]: {
|
2021-02-12 08:20:45 +01:00
|
|
|
"public_stream": _("Public streams"),
|
|
|
|
"private_stream": _("Private streams"),
|
|
|
|
"private_message": _("Private messages"),
|
|
|
|
"huddle_message": _("Group private messages"),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
labels_sort_function = lambda data: sort_by_totals(data["everyone"])
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = True
|
2021-02-12 08:20:45 +01:00
|
|
|
elif chart_name == "messages_sent_by_client":
|
|
|
|
stats = [COUNT_STATS["messages_sent:client:day"]]
|
2018-05-18 02:16:29 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2017-04-12 23:36:07 +02:00
|
|
|
# Note that the labels are further re-written by client_label_map
|
2021-02-12 08:19:30 +01:00
|
|
|
subgroup_to_label = {
|
2021-02-12 08:20:45 +01:00
|
|
|
stats[0]: {str(id): name for id, name in Client.objects.values_list("id", "name")}
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2017-02-10 00:39:42 +01:00
|
|
|
labels_sort_function = sort_client_labels
|
2017-03-25 21:48:37 +01:00
|
|
|
include_empty_subgroups = False
|
2021-02-12 08:20:45 +01:00
|
|
|
elif chart_name == "messages_read_over_time":
|
|
|
|
stats = [COUNT_STATS["messages_read::hour"]]
|
2020-06-11 12:56:06 +02:00
|
|
|
tables = [aggregate_table, UserCount]
|
2021-02-12 08:20:45 +01:00
|
|
|
subgroup_to_label = {stats[0]: {None: "read"}}
|
2020-06-11 12:56:06 +02:00
|
|
|
labels_sort_function = None
|
|
|
|
include_empty_subgroups = True
|
2017-01-14 18:32:31 +01:00
|
|
|
else:
|
2020-06-15 23:22:24 +02:00
|
|
|
raise JsonableError(_("Unknown chart name: {}").format(chart_name))
|
2017-01-19 01:08:33 +01:00
|
|
|
|
2017-02-08 08:04:10 +01:00
|
|
|
# Most likely someone using our API endpoint. The /stats page does not
|
|
|
|
# pass a start or end in its requests.
|
2017-10-05 01:18:18 +02:00
|
|
|
if start is not None:
|
|
|
|
start = convert_to_UTC(start)
|
|
|
|
if end is not None:
|
|
|
|
end = convert_to_UTC(end)
|
2017-02-08 08:04:10 +01:00
|
|
|
if start is not None and end is not None and start > end:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_("Start time is later than end time. Start: {start}, End: {end}").format(
|
|
|
|
start=start,
|
|
|
|
end=end,
|
|
|
|
)
|
|
|
|
)
|
2017-02-08 08:04:10 +01:00
|
|
|
|
2018-04-15 18:43:48 +02:00
|
|
|
if realm is None:
|
2019-02-02 20:57:20 +01:00
|
|
|
# Note that this value is invalid for Remote tables; be
|
|
|
|
# careful not to access it in those code paths.
|
2018-04-15 18:43:48 +02:00
|
|
|
realm = user_profile.realm
|
2019-02-02 20:57:20 +01:00
|
|
|
|
|
|
|
if remote:
|
|
|
|
# For remote servers, we don't have fillstate data, and thus
|
|
|
|
# should simply use the first and last data points for the
|
|
|
|
# table.
|
|
|
|
assert server is not None
|
|
|
|
if not aggregate_table.objects.filter(server=server).exists():
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
|
|
|
_("No analytics data available. Please contact your server administrator.")
|
|
|
|
)
|
2019-02-02 20:57:20 +01:00
|
|
|
if start is None:
|
|
|
|
start = aggregate_table.objects.filter(server=server).first().end_time
|
|
|
|
if end is None:
|
|
|
|
end = aggregate_table.objects.filter(server=server).last().end_time
|
|
|
|
else:
|
|
|
|
# Otherwise, we can use tables on the current server to
|
|
|
|
# determine a nice range, and some additional validation.
|
|
|
|
if start is None:
|
|
|
|
if for_installation:
|
|
|
|
start = installation_epoch()
|
|
|
|
else:
|
|
|
|
start = realm.date_created
|
|
|
|
if end is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
end = max(
|
|
|
|
stat.last_successful_fill() or datetime.min.replace(tzinfo=timezone.utc)
|
|
|
|
for stat in stats
|
|
|
|
)
|
2020-03-31 12:01:48 +02:00
|
|
|
|
|
|
|
if start > end and (timezone_now() - start > MAX_TIME_FOR_FULL_ANALYTICS_GENERATION):
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.warning(
|
|
|
|
"User from realm %s attempted to access /stats, but the computed "
|
|
|
|
"start time: %s (creation of realm or installation) is later than the computed "
|
|
|
|
"end time: %s (last successful analytics update). Is the "
|
|
|
|
"analytics cron job running?",
|
|
|
|
realm.string_id,
|
|
|
|
start,
|
|
|
|
end,
|
|
|
|
)
|
|
|
|
raise JsonableError(
|
|
|
|
_("No analytics data available. Please contact your server administrator.")
|
|
|
|
)
|
2017-02-08 04:51:03 +01:00
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
assert len({stat.frequency for stat in stats}) == 1
|
2018-05-19 01:48:36 +02:00
|
|
|
end_times = time_range(start, end, stats[0].frequency, min_length)
|
2020-08-07 08:21:19 +02:00
|
|
|
data: Dict[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
|
|
|
"frequency": stats[0].frequency,
|
2020-08-07 08:21:19 +02:00
|
|
|
}
|
2018-05-19 00:47:44 +02:00
|
|
|
|
2019-02-02 20:57:20 +01:00
|
|
|
aggregation_level = {
|
2021-02-12 08:20:45 +01:00
|
|
|
InstallationCount: "everyone",
|
|
|
|
RealmCount: "everyone",
|
|
|
|
UserCount: "user",
|
2019-02-02 20:57:20 +01:00
|
|
|
}
|
2020-08-27 22:19:00 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2021-02-12 08:20:45 +01:00
|
|
|
aggregation_level[RemoteInstallationCount] = "everyone"
|
|
|
|
aggregation_level[RemoteRealmCount] = "everyone"
|
2020-08-27 22:19:00 +02:00
|
|
|
|
2018-05-19 00:47:44 +02:00
|
|
|
# -1 is a placeholder value, since there is no relevant filtering on InstallationCount
|
2019-02-02 20:57:20 +01:00
|
|
|
id_value = {
|
|
|
|
InstallationCount: -1,
|
|
|
|
RealmCount: realm.id,
|
|
|
|
UserCount: user_profile.id,
|
|
|
|
}
|
2020-08-27 22:19:00 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
|
|
|
if server is not None:
|
|
|
|
id_value[RemoteInstallationCount] = server.id
|
|
|
|
# TODO: RemoteRealmCount logic doesn't correctly handle
|
|
|
|
# filtering by server_id as well.
|
|
|
|
if remote_realm_id is not None:
|
|
|
|
id_value[RemoteRealmCount] = remote_realm_id
|
|
|
|
|
2017-03-25 21:48:37 +01:00
|
|
|
for table in tables:
|
2018-05-19 01:48:36 +02:00
|
|
|
data[aggregation_level[table]] = {}
|
|
|
|
for stat in stats:
|
2021-02-12 08:19:30 +01:00
|
|
|
data[aggregation_level[table]].update(
|
|
|
|
get_time_series_by_subgroup(
|
|
|
|
stat,
|
|
|
|
table,
|
|
|
|
id_value[table],
|
|
|
|
end_times,
|
|
|
|
subgroup_to_label[stat],
|
|
|
|
include_empty_subgroups,
|
|
|
|
)
|
|
|
|
)
|
2018-05-19 00:47:44 +02:00
|
|
|
|
2017-02-10 00:39:42 +01:00
|
|
|
if labels_sort_function is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["display_order"] = labels_sort_function(data)
|
2017-02-10 00:39:42 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
data["display_order"] = None
|
2017-01-14 18:32:31 +01:00
|
|
|
return json_success(data=data)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
2017-05-06 04:08:15 +02:00
|
|
|
totals = [(sum(values), label) for label, values in value_arrays.items()]
|
|
|
|
totals.sort(reverse=True)
|
|
|
|
return [label for total, label in totals]
|
2017-02-10 00:39:42 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-02-11 09:48:38 +01:00
|
|
|
# For any given user, we want to show a fixed set of clients in the chart,
|
|
|
|
# regardless of the time aggregation or whether we're looking at realm or
|
|
|
|
# user data. This fixed set ideally includes the clients most important in
|
|
|
|
# understanding the realm's traffic and the user's traffic. This function
|
|
|
|
# tries to rank the clients so that taking the first N elements of the
|
|
|
|
# sorted list has a reasonable chance of doing so.
|
2017-11-05 06:54:00 +01:00
|
|
|
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm_order = sort_by_totals(data["everyone"])
|
|
|
|
user_order = sort_by_totals(data["user"])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
label_sort_values: Dict[str, float] = {}
|
2017-02-10 00:39:42 +01:00
|
|
|
for i, label in enumerate(realm_order):
|
|
|
|
label_sort_values[label] = i
|
|
|
|
for i, label in enumerate(user_order):
|
2021-02-12 08:19:30 +01:00
|
|
|
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
|
|
|
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
|
|
|
|
2017-02-10 00:39:42 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def table_filtered_to_id(table: Type[BaseCount], key_id: int) -> QuerySet:
|
2017-02-11 20:37:08 +01:00
|
|
|
if table == RealmCount:
|
|
|
|
return RealmCount.objects.filter(realm_id=key_id)
|
|
|
|
elif table == UserCount:
|
|
|
|
return UserCount.objects.filter(user_id=key_id)
|
|
|
|
elif table == StreamCount:
|
|
|
|
return StreamCount.objects.filter(stream_id=key_id)
|
|
|
|
elif table == InstallationCount:
|
|
|
|
return InstallationCount.objects.all()
|
2020-08-27 22:19:00 +02:00
|
|
|
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
|
2019-02-02 20:57:20 +01:00
|
|
|
return RemoteInstallationCount.objects.filter(server_id=key_id)
|
2020-08-27 22:19:00 +02:00
|
|
|
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
|
2019-02-02 20:57:20 +01:00
|
|
|
return RemoteRealmCount.objects.filter(realm_id=key_id)
|
2017-02-11 20:37:08 +01:00
|
|
|
else:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError(f"Unknown table: {table}")
|
2017-02-11 20:37:08 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def client_label_map(name: str) -> str:
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "website":
|
|
|
|
return "Website"
|
|
|
|
if name.startswith("desktop app"):
|
|
|
|
return "Old desktop app"
|
2017-07-08 03:31:47 +02:00
|
|
|
if name == "ZulipElectron":
|
|
|
|
return "Desktop app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "ZulipAndroid":
|
2017-10-03 20:59:41 +02:00
|
|
|
return "Old Android app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name == "ZulipiOS":
|
|
|
|
return "Old iOS app"
|
|
|
|
if name == "ZulipMobile":
|
2017-07-08 03:31:13 +02:00
|
|
|
return "Mobile app"
|
2017-02-06 01:17:31 +01:00
|
|
|
if name in ["ZulipPython", "API: Python"]:
|
|
|
|
return "Python API"
|
|
|
|
if name.startswith("Zulip") and name.endswith("Webhook"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return name[len("Zulip") : -len("Webhook")] + " webhook"
|
2017-02-06 01:17:31 +01:00
|
|
|
return name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
mapped_arrays: Dict[str, List[int]] = {}
|
2017-02-06 01:17:31 +01:00
|
|
|
for label, array in value_arrays.items():
|
|
|
|
mapped_label = client_label_map(label)
|
|
|
|
if mapped_label in mapped_arrays:
|
|
|
|
for i in range(0, len(array)):
|
|
|
|
mapped_arrays[mapped_label][i] += value_arrays[label][i]
|
|
|
|
else:
|
|
|
|
mapped_arrays[mapped_label] = [value_arrays[label][i] for i in range(0, len(array))]
|
|
|
|
return mapped_arrays
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_time_series_by_subgroup(
|
|
|
|
stat: CountStat,
|
|
|
|
table: Type[BaseCount],
|
|
|
|
key_id: int,
|
|
|
|
end_times: List[datetime],
|
|
|
|
subgroup_to_label: Dict[Optional[str], str],
|
|
|
|
include_empty_subgroups: bool,
|
|
|
|
) -> Dict[str, List[int]]:
|
|
|
|
queryset = (
|
|
|
|
table_filtered_to_id(table, key_id)
|
|
|
|
.filter(property=stat.property)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values_list("subgroup", "end_time", "value")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
2017-01-14 20:31:07 +01:00
|
|
|
for subgroup, end_time, value in queryset:
|
|
|
|
value_dicts[subgroup][end_time] = value
|
2017-01-14 18:32:31 +01:00
|
|
|
value_arrays = {}
|
2017-04-12 23:36:07 +02:00
|
|
|
for subgroup, label in subgroup_to_label.items():
|
2017-01-19 01:08:33 +01:00
|
|
|
if (subgroup in value_dicts) or include_empty_subgroups:
|
|
|
|
value_arrays[label] = [value_dicts[subgroup][end_time] for end_time in end_times]
|
2017-02-06 01:17:31 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if stat == COUNT_STATS["messages_sent:client:day"]:
|
2017-02-06 01:17:31 +01:00
|
|
|
# HACK: We rewrite these arrays to collapse the Client objects
|
|
|
|
# with similar names into a single sum, and generally give
|
|
|
|
# them better names
|
|
|
|
return rewrite_client_arrays(value_arrays)
|
2017-01-19 01:08:33 +01:00
|
|
|
return value_arrays
|
2017-01-14 23:52:27 +01:00
|
|
|
|
2016-12-20 02:30:08 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
eastern_tz = pytz.timezone("US/Eastern")
|
2016-04-21 08:48:33 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def make_table(
|
|
|
|
title: str, cols: Sequence[str], rows: Sequence[Any], has_row_class: bool = False
|
|
|
|
) -> str:
|
2013-11-18 16:26:56 +01:00
|
|
|
|
|
|
|
if not has_row_class:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def fix_row(row: Any) -> Dict[str, Any]:
|
2013-11-18 16:26:56 +01:00
|
|
|
return dict(cells=row, row_class=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(fix_row, rows))
|
2013-11-18 16:26:56 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
data = dict(title=title, cols=cols, rows=rows)
|
|
|
|
|
|
|
|
content = loader.render_to_string(
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/ad_hoc_query.html",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
dict(data=data),
|
2013-11-07 16:38:41 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
return content
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def dictfetchall(cursor: connection.cursor) -> List[Dict[str, Any]]:
|
2013-11-06 13:25:55 +01:00
|
|
|
"Returns all rows from a cursor as a dict"
|
|
|
|
desc = cursor.description
|
2021-02-12 08:19:30 +01:00
|
|
|
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_realm_day_counts() -> Dict[str, Dict[str, str]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-12-18 21:13:47 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2019-08-28 02:43:19 +02:00
|
|
|
(now()::date - date_sent::date) age,
|
2013-12-18 21:13:47 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
2014-02-03 18:20:47 +01:00
|
|
|
join zerver_client c on c.id = m.sending_client_id
|
2013-12-18 21:13:47 +01:00
|
|
|
where
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now()::date - interval '8 day'
|
2013-12-18 21:13:47 +01:00
|
|
|
and
|
2014-02-03 18:20:47 +01:00
|
|
|
c.name not in ('zephyr_mirror', 'ZulipMonitoring')
|
2013-12-18 21:13:47 +01:00
|
|
|
group by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
|
|
|
order by
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id,
|
2013-12-18 21:13:47 +01:00
|
|
|
age
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-12-18 21:13:47 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
2013-12-18 21:13:47 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
counts[row["string_id"]][row["age"]] = row["cnt"]
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
result = {}
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id in counts:
|
|
|
|
raw_cnts = [counts[string_id].get(age, 0) for age in range(8)]
|
2017-12-01 06:04:15 +01:00
|
|
|
min_cnt = min(raw_cnts[1:])
|
|
|
|
max_cnt = max(raw_cnts[1:])
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def format_count(cnt: int, style: Optional[str] = None) -> str:
|
2017-12-01 06:04:15 +01:00
|
|
|
if style is not None:
|
|
|
|
good_bad = style
|
|
|
|
elif cnt == min_cnt:
|
2021-02-12 08:20:45 +01:00
|
|
|
good_bad = "bad"
|
2013-12-18 23:02:17 +01:00
|
|
|
elif cnt == max_cnt:
|
2021-02-12 08:20:45 +01:00
|
|
|
good_bad = "good"
|
2013-12-18 23:02:17 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
good_bad = "neutral"
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
return f'<td class="number {good_bad}">{cnt}</td>'
|
2013-12-18 23:02:17 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cnts = format_count(raw_cnts[0], "neutral") + "".join(map(format_count, raw_cnts[1:]))
|
2017-01-08 19:42:32 +01:00
|
|
|
result[string_id] = dict(cnts=cnts)
|
2013-12-18 21:13:47 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-08 11:17:37 +01:00
|
|
|
def get_plan_name(plan_type: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return ["", "self hosted", "limited", "standard", "open source"][plan_type]
|
2019-03-08 11:17:37 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
|
2017-12-01 06:28:56 +01:00
|
|
|
now = timezone_now()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2017-11-29 07:50:25 +01:00
|
|
|
realm.date_created,
|
2018-08-15 19:59:09 +02:00
|
|
|
realm.plan_type,
|
2020-08-07 13:41:31 +02:00
|
|
|
coalesce(wau_table.value, 0) wau_count,
|
|
|
|
coalesce(dau_table.value, 0) dau_count,
|
|
|
|
coalesce(user_count_table.value, 0) user_profile_count,
|
|
|
|
coalesce(bot_count_table.value, 0) bot_count
|
|
|
|
FROM
|
|
|
|
zerver_realm as realm
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value _14day_active_humans,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = 'realm_active_humans::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(realm_active_humans_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as _14day_active_humans_table ON realm.id = _14day_active_humans_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = '7day_actives::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(seven_day_actives_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as wau_table ON realm.id = wau_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-06 13:25:55 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
2013-11-06 13:25:55 +01:00
|
|
|
WHERE
|
2020-08-07 13:41:31 +02:00
|
|
|
property = '1day_actives::day'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(one_day_actives_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as dau_table ON realm.id = dau_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
2013-11-14 19:50:32 +01:00
|
|
|
SELECT
|
2020-08-07 13:41:31 +02:00
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
2013-11-06 13:25:55 +01:00
|
|
|
WHERE
|
2020-08-07 13:41:31 +02:00
|
|
|
property = 'active_users_audit:is_bot:day'
|
|
|
|
AND subgroup = 'false'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(active_users_audit_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as user_count_table ON realm.id = user_count_table.realm_id
|
|
|
|
LEFT OUTER JOIN (
|
|
|
|
SELECT
|
|
|
|
value,
|
|
|
|
realm_id
|
|
|
|
from
|
|
|
|
analytics_realmcount
|
|
|
|
WHERE
|
|
|
|
property = 'active_users_audit:is_bot:day'
|
|
|
|
AND subgroup = 'true'
|
2020-12-22 18:40:30 +01:00
|
|
|
AND end_time = %(active_users_audit_end_time)s
|
2020-08-07 13:41:31 +02:00
|
|
|
) as bot_count_table ON realm.id = bot_count_table.realm_id
|
|
|
|
WHERE
|
|
|
|
_14day_active_humans IS NOT NULL
|
|
|
|
or realm.plan_type = 3
|
|
|
|
ORDER BY
|
|
|
|
dau_count DESC,
|
|
|
|
string_id ASC
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cursor = connection.cursor()
|
2021-02-12 08:19:30 +01:00
|
|
|
cursor.execute(
|
|
|
|
query,
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm_active_humans_end_time": COUNT_STATS[
|
|
|
|
"realm_active_humans::day"
|
2021-02-12 08:19:30 +01:00
|
|
|
].last_successful_fill(),
|
2021-02-12 08:20:45 +01:00
|
|
|
"seven_day_actives_end_time": COUNT_STATS["7day_actives::day"].last_successful_fill(),
|
|
|
|
"one_day_actives_end_time": COUNT_STATS["1day_actives::day"].last_successful_fill(),
|
|
|
|
"active_users_audit_end_time": COUNT_STATS[
|
|
|
|
"active_users_audit:is_bot:day"
|
2021-02-12 08:19:30 +01:00
|
|
|
].last_successful_fill(),
|
|
|
|
},
|
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = dictfetchall(cursor)
|
|
|
|
cursor.close()
|
|
|
|
|
2018-02-11 08:59:50 +01:00
|
|
|
# Fetch all the realm administrator users
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
realm_admins: Dict[str, List[str]] = defaultdict(list)
|
2018-02-11 08:59:50 +01:00
|
|
|
for up in UserProfile.objects.select_related("realm").filter(
|
2019-10-05 02:35:07 +02:00
|
|
|
role=UserProfile.ROLE_REALM_ADMINISTRATOR,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_active=True,
|
2018-02-11 08:59:50 +01:00
|
|
|
):
|
2019-07-21 03:58:14 +02:00
|
|
|
realm_admins[up.realm.string_id].append(up.delivery_email)
|
2018-02-11 08:59:50 +01:00
|
|
|
|
2017-11-29 07:50:25 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["date_created_day"] = row["date_created"].strftime("%Y-%m-%d")
|
|
|
|
row["plan_type_string"] = get_plan_name(row["plan_type"])
|
|
|
|
row["age_days"] = int((now - row["date_created"]).total_seconds() / 86400)
|
|
|
|
row["is_new"] = row["age_days"] < 12 * 7
|
|
|
|
row["realm_admin_email"] = ", ".join(realm_admins[row["string_id"]])
|
2017-11-29 07:50:25 +01:00
|
|
|
|
2013-12-18 21:13:47 +01:00
|
|
|
# get messages sent per day
|
|
|
|
counts = get_realm_day_counts()
|
|
|
|
for row in rows:
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["history"] = counts[row["string_id"]]["cnts"]
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["history"] = ""
|
2013-12-18 21:13:47 +01:00
|
|
|
|
2018-11-16 17:08:09 +01:00
|
|
|
# estimate annual subscription revenue
|
|
|
|
total_amount = 0
|
|
|
|
if settings.BILLING_ENABLED:
|
2018-12-15 09:33:25 +01:00
|
|
|
from corporate.lib.stripe import estimate_annual_recurring_revenue_by_realm
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-15 09:33:25 +01:00
|
|
|
estimated_arrs = estimate_annual_recurring_revenue_by_realm()
|
2018-11-16 17:08:09 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
if row["string_id"] in estimated_arrs:
|
|
|
|
row["amount"] = estimated_arrs[row["string_id"]]
|
2018-12-15 09:33:25 +01:00
|
|
|
total_amount += sum(estimated_arrs.values())
|
2018-11-16 17:08:09 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
# augment data with realm_minutes
|
2016-06-05 20:51:43 +02:00
|
|
|
total_hours = 0.0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id = row["string_id"]
|
2017-01-08 19:42:32 +01:00
|
|
|
minutes = realm_minutes.get(string_id, 0.0)
|
2013-11-06 13:25:55 +01:00
|
|
|
hours = minutes / 60.0
|
|
|
|
total_hours += hours
|
2021-02-12 08:20:45 +01:00
|
|
|
row["hours"] = str(int(hours))
|
2013-11-06 13:25:55 +01:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
|
2017-01-08 16:58:30 +01:00
|
|
|
except Exception:
|
2013-11-06 13:25:55 +01:00
|
|
|
pass
|
|
|
|
|
2013-11-14 18:41:23 +01:00
|
|
|
# formatting
|
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
row["stats_link"] = realm_stats_link(row["string_id"])
|
|
|
|
row["string_id"] = realm_activity_link(row["string_id"])
|
2013-11-14 18:41:23 +01:00
|
|
|
|
2013-12-23 15:52:52 +01:00
|
|
|
# Count active sites
|
2017-11-05 06:54:00 +01:00
|
|
|
def meets_goal(row: Dict[str, int]) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
return row["dau_count"] >= 5
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2015-11-01 17:14:31 +01:00
|
|
|
num_active_sites = len(list(filter(meets_goal, rows)))
|
2013-12-23 15:52:52 +01:00
|
|
|
|
2019-02-13 23:52:13 +01:00
|
|
|
# create totals
|
2017-11-27 21:03:15 +01:00
|
|
|
total_dau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
total_user_profile_count = 0
|
|
|
|
total_bot_count = 0
|
2017-11-27 21:03:15 +01:00
|
|
|
total_wau_count = 0
|
2013-11-06 13:25:55 +01:00
|
|
|
for row in rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
total_dau_count += int(row["dau_count"])
|
|
|
|
total_user_profile_count += int(row["user_profile_count"])
|
|
|
|
total_bot_count += int(row["bot_count"])
|
|
|
|
total_wau_count += int(row["wau_count"])
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2018-11-16 17:17:50 +01:00
|
|
|
total_row = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
string_id="Total",
|
2018-08-15 19:59:09 +02:00
|
|
|
plan_type_string="",
|
2018-11-16 17:08:09 +01:00
|
|
|
amount=total_amount,
|
2021-02-12 08:20:45 +01:00
|
|
|
stats_link="",
|
|
|
|
date_created_day="",
|
|
|
|
realm_admin_email="",
|
2017-11-27 21:03:15 +01:00
|
|
|
dau_count=total_dau_count,
|
2013-11-06 13:25:55 +01:00
|
|
|
user_profile_count=total_user_profile_count,
|
|
|
|
bot_count=total_bot_count,
|
2016-05-06 11:09:46 +02:00
|
|
|
hours=int(total_hours),
|
2017-11-27 21:03:15 +01:00
|
|
|
wau_count=total_wau_count,
|
2018-11-16 17:17:50 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
rows.insert(0, total_row)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = loader.render_to_string(
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/realm_summary_table.html",
|
|
|
|
dict(rows=rows, num_active_sites=num_active_sites, utctime=now.strftime("%Y-%m-%d %H:%MZ")),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
2013-11-07 16:53:09 +01:00
|
|
|
return content
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
|
2013-11-06 13:25:55 +01:00
|
|
|
day_end = timestamp_to_datetime(time.time())
|
2013-11-18 18:28:49 +01:00
|
|
|
day_start = day_end - timedelta(hours=24)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
output = "Per-user online duration for the last 24 hours:\n"
|
2013-11-18 18:28:49 +01:00
|
|
|
total_duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
all_intervals = (
|
|
|
|
UserActivityInterval.objects.filter(
|
|
|
|
end__gte=day_start,
|
|
|
|
start__lte=day_end,
|
|
|
|
)
|
|
|
|
.select_related(
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile",
|
|
|
|
"user_profile__realm",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.only(
|
2021-02-12 08:20:45 +01:00
|
|
|
"start",
|
|
|
|
"end",
|
|
|
|
"user_profile__delivery_email",
|
|
|
|
"user_profile__realm__string_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.order_by(
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__realm__string_id",
|
|
|
|
"user_profile__delivery_email",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
by_string_id = lambda row: row.user_profile.realm.string_id
|
2018-12-07 00:05:57 +01:00
|
|
|
by_email = lambda row: row.user_profile.delivery_email
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
realm_minutes = {}
|
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
for string_id, realm_intervals in itertools.groupby(all_intervals, by_string_id):
|
2013-11-18 18:28:49 +01:00
|
|
|
realm_duration = timedelta(0)
|
2021-02-12 08:20:45 +01:00
|
|
|
output += f"<hr>{string_id}\n"
|
2013-11-06 13:25:55 +01:00
|
|
|
for email, intervals in itertools.groupby(realm_intervals, by_email):
|
2013-11-18 18:28:49 +01:00
|
|
|
duration = timedelta(0)
|
2013-11-06 13:25:55 +01:00
|
|
|
for interval in intervals:
|
|
|
|
start = max(day_start, interval.start)
|
|
|
|
end = min(day_end, interval.end)
|
|
|
|
duration += end - start
|
|
|
|
|
|
|
|
total_duration += duration
|
|
|
|
realm_duration += duration
|
2020-06-14 02:57:50 +02:00
|
|
|
output += f" {email:<37}{duration}\n"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
realm_minutes[string_id] = realm_duration.total_seconds() / 60
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
output += f"\nTotal duration: {total_duration}\n"
|
|
|
|
output += f"\nTotal duration in minutes: {total_duration.total_seconds() / 60.}\n"
|
|
|
|
output += f"Total duration amortized to a month: {total_duration.total_seconds() * 30. / 60.}"
|
2021-02-12 08:20:45 +01:00
|
|
|
content = mark_safe("<pre>" + output + "</pre>")
|
2013-11-07 16:53:09 +01:00
|
|
|
return content, realm_minutes
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def sent_messages_report(realm: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Recently sent messages for " + realm
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Date",
|
|
|
|
"Humans",
|
|
|
|
"Bots",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
|
|
|
series.day::date,
|
2013-11-14 21:28:31 +01:00
|
|
|
humans.cnt,
|
|
|
|
bots.cnt
|
2013-11-06 13:25:55 +01:00
|
|
|
from (
|
|
|
|
select generate_series(
|
|
|
|
(now()::date - interval '2 week'),
|
|
|
|
now()::date,
|
|
|
|
interval '1 day'
|
|
|
|
) as day
|
|
|
|
) as series
|
|
|
|
left join (
|
|
|
|
select
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date date_sent,
|
2013-11-06 13:25:55 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
(not up.is_bot)
|
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now() - interval '2 week'
|
2013-11-14 21:28:31 +01:00
|
|
|
group by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
order by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
) humans on
|
2019-08-28 02:43:19 +02:00
|
|
|
series.day = humans.date_sent
|
2013-11-14 21:28:31 +01:00
|
|
|
left join (
|
|
|
|
select
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date date_sent,
|
2013-11-14 21:28:31 +01:00
|
|
|
count(*) cnt
|
|
|
|
from zerver_message m
|
|
|
|
join zerver_userprofile up on up.id = m.sender_id
|
|
|
|
join zerver_realm r on r.id = up.realm_id
|
|
|
|
where
|
2017-01-08 19:42:32 +01:00
|
|
|
r.string_id = %s
|
2013-11-14 21:28:31 +01:00
|
|
|
and
|
|
|
|
up.is_bot
|
2013-11-06 13:25:55 +01:00
|
|
|
and
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent > now() - interval '2 week'
|
2013-11-06 13:25:55 +01:00
|
|
|
group by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-06 13:25:55 +01:00
|
|
|
order by
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent::date
|
2013-11-14 21:28:31 +01:00
|
|
|
) bots on
|
2019-08-28 02:43:19 +02:00
|
|
|
series.day = bots.date_sent
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
2013-11-14 21:28:31 +01:00
|
|
|
cursor.execute(query, [realm, realm])
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def ad_hoc_queries() -> List[Dict[str, str]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_page(
|
|
|
|
query: Composable, cols: Sequence[str], title: str, totals_columns: Sequence[int] = []
|
|
|
|
) -> Dict[str, str]:
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor = connection.cursor()
|
|
|
|
cursor.execute(query)
|
|
|
|
rows = cursor.fetchall()
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(list, rows))
|
2013-11-06 13:25:55 +01:00
|
|
|
cursor.close()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def fix_rows(
|
|
|
|
i: int, fixup_func: Union[Callable[[Realm], mark_safe], Callable[[datetime], str]]
|
|
|
|
) -> None:
|
2013-11-14 17:26:12 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = fixup_func(row[i])
|
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
total_row = []
|
2013-11-14 17:26:12 +01:00
|
|
|
for i, col in enumerate(cols):
|
2021-02-12 08:20:45 +01:00
|
|
|
if col == "Realm":
|
2013-11-14 17:26:12 +01:00
|
|
|
fix_rows(i, realm_activity_link)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif col in ["Last time", "Last visit"]:
|
2013-11-14 18:25:20 +01:00
|
|
|
fix_rows(i, format_date_for_activity_reports)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif col == "Hostname":
|
2019-02-03 00:27:16 +01:00
|
|
|
for row in rows:
|
|
|
|
row[i] = remote_installation_stats_link(row[0], row[i])
|
|
|
|
if len(totals_columns) > 0:
|
|
|
|
if i == 0:
|
|
|
|
total_row.append("Total")
|
|
|
|
elif i in totals_columns:
|
|
|
|
total_row.append(str(sum(row[i] for row in rows if row[i] is not None)))
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
total_row.append("")
|
2019-02-03 00:27:16 +01:00
|
|
|
if len(totals_columns) > 0:
|
|
|
|
rows.insert(0, total_row)
|
2013-11-14 17:26:12 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
content = make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
return dict(
|
|
|
|
content=content,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
title=title,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
pages = []
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for mobile_type in ["Android", "ZulipiOS"]:
|
|
|
|
title = f"{mobile_type} usage"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2014-01-08 15:17:15 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2014-01-08 15:17:15 +01:00
|
|
|
up.id user_id,
|
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2020-06-09 10:46:28 +02:00
|
|
|
client.name like {mobile_type}
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, up.id, client.name
|
2014-01-08 15:17:15 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, up.id, client.name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
).format(
|
2020-06-09 10:46:28 +02:00
|
|
|
mobile_type=Literal(mobile_type),
|
|
|
|
)
|
2014-01-08 15:17:15 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"User id",
|
|
|
|
"Name",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2014-01-08 15:17:15 +01:00
|
|
|
]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-08 15:17:15 +01:00
|
|
|
pages.append(get_page(query, cols, title))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Desktop users"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
client.name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
|
|
|
client.name like 'desktop%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client.name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client.name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"Client",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Integrations by realm"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by string_id, client_name
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by string_id, client_name
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Realm",
|
|
|
|
"Client",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
|
|
|
###
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Integrations by client"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2013-11-06 13:25:55 +01:00
|
|
|
select
|
|
|
|
case
|
|
|
|
when query like '%%external%%' then split_part(query, '/', 5)
|
|
|
|
else client.name
|
|
|
|
end client_name,
|
2017-01-08 19:42:32 +01:00
|
|
|
realm.string_id,
|
2013-11-06 13:25:55 +01:00
|
|
|
sum(count) as hits,
|
|
|
|
max(last_visit) as last_time
|
|
|
|
from zerver_useractivity ua
|
|
|
|
join zerver_client client on client.id = ua.client_id
|
|
|
|
join zerver_userprofile up on up.id = ua.user_profile_id
|
|
|
|
join zerver_realm realm on realm.id = up.realm_id
|
|
|
|
where
|
2014-01-21 17:10:09 +01:00
|
|
|
(query in ('send_message_backend', '/api/v1/send_message')
|
2014-01-08 15:17:15 +01:00
|
|
|
and client.name not in ('Android', 'ZulipiOS')
|
2013-11-06 13:25:55 +01:00
|
|
|
and client.name not like 'test: Zulip%%'
|
|
|
|
)
|
|
|
|
or
|
|
|
|
query like '%%external%%'
|
2017-01-08 19:42:32 +01:00
|
|
|
group by client_name, string_id
|
2013-11-06 13:25:55 +01:00
|
|
|
having max(last_visit) > now() - interval '2 week'
|
2017-01-08 19:42:32 +01:00
|
|
|
order by client_name, string_id
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Client",
|
|
|
|
"Realm",
|
|
|
|
"Hits",
|
|
|
|
"Last time",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
pages.append(get_page(query, cols, title))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Remote Zulip servers"
|
2019-02-03 00:27:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
query = SQL(
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2019-02-03 00:27:16 +01:00
|
|
|
with icount as (
|
|
|
|
select
|
|
|
|
server_id,
|
|
|
|
max(value) as max_value,
|
|
|
|
max(end_time) as max_end_time
|
|
|
|
from zilencer_remoteinstallationcount
|
|
|
|
where
|
|
|
|
property='active_users:is_bot:day'
|
|
|
|
and subgroup='false'
|
|
|
|
group by server_id
|
|
|
|
),
|
|
|
|
remote_push_devices as (
|
|
|
|
select server_id, count(distinct(user_id)) as push_user_count from zilencer_remotepushdevicetoken
|
|
|
|
group by server_id
|
|
|
|
)
|
|
|
|
select
|
|
|
|
rserver.id,
|
|
|
|
rserver.hostname,
|
|
|
|
rserver.contact_email,
|
|
|
|
max_value,
|
|
|
|
push_user_count,
|
|
|
|
max_end_time
|
|
|
|
from zilencer_remotezulipserver rserver
|
|
|
|
left join icount on icount.server_id = rserver.id
|
|
|
|
left join remote_push_devices on remote_push_devices.server_id = rserver.id
|
|
|
|
order by max_value DESC NULLS LAST, push_user_count DESC NULLS LAST
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-03 00:27:16 +01:00
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"ID",
|
|
|
|
"Hostname",
|
|
|
|
"Contact email",
|
|
|
|
"Analytics users",
|
|
|
|
"Mobile users",
|
|
|
|
"Last update time",
|
2019-02-03 00:27:16 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pages.append(get_page(query, cols, title, totals_columns=[3, 4]))
|
2019-02-03 00:27:16 +01:00
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
return pages
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2013-11-06 13:25:55 +01:00
|
|
|
@has_request_variables
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_activity(request: HttpRequest) -> HttpResponse:
|
2020-05-09 00:10:17 +02:00
|
|
|
duration_content, realm_minutes = user_activity_intervals()
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
counts_content: str = realm_summary_table(realm_minutes)
|
2013-11-06 13:25:55 +01:00
|
|
|
data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("Counts", counts_content),
|
|
|
|
("Durations", duration_content),
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
for page in ad_hoc_queries():
|
2021-02-12 08:20:45 +01:00
|
|
|
data.append((page["title"], page["content"]))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Activity"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/activity.html",
|
2017-03-16 14:04:06 +01:00
|
|
|
context=dict(data=data, title=title, is_home=True),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_confirmations(
|
|
|
|
types: List[int], object_ids: List[int], hostname: Optional[str] = None
|
|
|
|
) -> List[Dict[str, Any]]:
|
2019-09-18 15:04:36 +02:00
|
|
|
lowest_datetime = timezone_now() - timedelta(days=30)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmations = Confirmation.objects.filter(
|
|
|
|
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
|
|
|
|
)
|
2019-09-18 15:04:36 +02:00
|
|
|
confirmation_dicts = []
|
|
|
|
for confirmation in confirmations:
|
|
|
|
realm = confirmation.realm
|
|
|
|
content_object = confirmation.content_object
|
|
|
|
|
|
|
|
type = confirmation.type
|
|
|
|
days_to_activate = _properties[type].validity_in_days
|
|
|
|
expiry_date = confirmation.date_sent + timedelta(days=days_to_activate)
|
|
|
|
|
|
|
|
if hasattr(content_object, "status"):
|
|
|
|
if content_object.status == STATUS_ACTIVE:
|
|
|
|
link_status = "Link has been clicked"
|
|
|
|
else:
|
|
|
|
link_status = "Link has never been clicked"
|
|
|
|
else:
|
|
|
|
link_status = ""
|
|
|
|
|
|
|
|
if timezone_now() < expiry_date:
|
|
|
|
expires_in = timesince(confirmation.date_sent, expiry_date)
|
|
|
|
else:
|
|
|
|
expires_in = "Expired"
|
|
|
|
|
2020-06-14 01:36:12 +02:00
|
|
|
url = confirmation_url(confirmation.confirmation_key, realm, type)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_dicts.append(
|
|
|
|
{
|
|
|
|
"object": confirmation.content_object,
|
|
|
|
"url": url,
|
|
|
|
"type": type,
|
|
|
|
"link_status": link_status,
|
|
|
|
"expires_in": expires_in,
|
|
|
|
}
|
|
|
|
)
|
2019-09-18 15:04:36 +02:00
|
|
|
return confirmation_dicts
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
@require_server_admin
|
|
|
|
def support(request: HttpRequest) -> HttpResponse:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
context: Dict[str, Any] = {}
|
2020-11-17 19:18:22 +01:00
|
|
|
|
|
|
|
if "success_message" in request.session:
|
|
|
|
context["success_message"] = request.session["success_message"]
|
|
|
|
del request.session["success_message"]
|
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
if settings.BILLING_ENABLED and request.method == "POST":
|
2020-06-10 16:19:41 +02:00
|
|
|
# We check that request.POST only has two keys in it: The
|
|
|
|
# realm_id and a field to change.
|
|
|
|
keys = set(request.POST.keys())
|
|
|
|
if "csrfmiddlewaretoken" in keys:
|
|
|
|
keys.remove("csrfmiddlewaretoken")
|
2020-07-27 20:21:41 +02:00
|
|
|
if len(keys) != 2:
|
|
|
|
return json_error(_("Invalid parameters"))
|
2020-06-10 16:19:41 +02:00
|
|
|
|
|
|
|
realm_id = request.POST.get("realm_id")
|
2019-03-08 13:02:10 +01:00
|
|
|
realm = Realm.objects.get(id=realm_id)
|
|
|
|
|
2020-06-10 16:19:41 +02:00
|
|
|
if request.POST.get("plan_type", None) is not None:
|
|
|
|
new_plan_type = int(request.POST.get("plan_type"))
|
2019-03-08 13:02:10 +01:00
|
|
|
current_plan_type = realm.plan_type
|
|
|
|
do_change_plan_type(realm, new_plan_type)
|
2020-08-18 14:10:53 +02:00
|
|
|
msg = f"Plan type of {realm.string_id} changed from {get_plan_name(current_plan_type)} to {get_plan_name(new_plan_type)} "
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = msg
|
2020-06-10 16:19:41 +02:00
|
|
|
elif request.POST.get("discount", None) is not None:
|
|
|
|
new_discount = Decimal(request.POST.get("discount"))
|
2020-12-04 15:16:40 +01:00
|
|
|
current_discount = get_discount_for_realm(realm) or 0
|
2019-03-08 13:02:10 +01:00
|
|
|
attach_discount_to_realm(realm, new_discount)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"Discount of {realm.string_id} changed to {new_discount}% from {current_discount}%."
|
2020-11-17 19:18:22 +01:00
|
|
|
elif request.POST.get("new_subdomain", None) is not None:
|
|
|
|
new_subdomain = request.POST.get("new_subdomain")
|
|
|
|
old_subdomain = realm.string_id
|
|
|
|
try:
|
|
|
|
check_subdomain_available(new_subdomain)
|
|
|
|
except ValidationError as error:
|
|
|
|
context["error_message"] = error.message
|
|
|
|
else:
|
|
|
|
do_change_realm_subdomain(realm, new_subdomain)
|
2021-02-12 08:19:30 +01:00
|
|
|
request.session[
|
|
|
|
"success_message"
|
|
|
|
] = f"Subdomain changed from {old_subdomain} to {new_subdomain}"
|
|
|
|
return HttpResponseRedirect(
|
2021-02-12 08:20:45 +01:00
|
|
|
reverse("support") + "?" + urlencode({"q": new_subdomain})
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-10 16:19:41 +02:00
|
|
|
elif request.POST.get("status", None) is not None:
|
|
|
|
status = request.POST.get("status")
|
2019-04-19 15:19:49 +02:00
|
|
|
if status == "active":
|
2019-11-17 09:51:46 +01:00
|
|
|
do_send_realm_reactivation_email(realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"Realm reactivation email sent to admins of {realm.string_id}."
|
2019-05-08 06:05:18 +02:00
|
|
|
elif status == "deactivated":
|
2019-05-09 00:04:31 +02:00
|
|
|
do_deactivate_realm(realm, request.user)
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = f"{realm.string_id} deactivated."
|
2020-08-18 13:48:11 +02:00
|
|
|
elif request.POST.get("billing_method", None) is not None:
|
|
|
|
billing_method = request.POST.get("billing_method")
|
|
|
|
if billing_method == "send_invoice":
|
|
|
|
update_billing_method_of_current_plan(realm, charge_automatically=False)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"Billing method of {realm.string_id} updated to pay by invoice."
|
2020-08-18 13:48:11 +02:00
|
|
|
elif billing_method == "charge_automatically":
|
|
|
|
update_billing_method_of_current_plan(realm, charge_automatically=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"Billing method of {realm.string_id} updated to charge automatically."
|
2020-06-10 16:19:41 +02:00
|
|
|
elif request.POST.get("sponsorship_pending", None) is not None:
|
|
|
|
sponsorship_pending = request.POST.get("sponsorship_pending")
|
2020-06-09 12:24:32 +02:00
|
|
|
if sponsorship_pending == "true":
|
|
|
|
update_sponsorship_status(realm, True)
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = f"{realm.string_id} marked as pending sponsorship."
|
2020-06-09 12:24:32 +02:00
|
|
|
elif sponsorship_pending == "false":
|
|
|
|
update_sponsorship_status(realm, False)
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = f"{realm.string_id} is no longer pending sponsorship."
|
2021-02-12 08:20:45 +01:00
|
|
|
elif request.POST.get("approve_sponsorship") is not None:
|
|
|
|
if request.POST.get("approve_sponsorship") == "approve_sponsorship":
|
2020-07-17 12:56:06 +02:00
|
|
|
approve_sponsorship(realm)
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = f"Sponsorship approved for {realm.string_id}"
|
2021-02-12 08:20:45 +01:00
|
|
|
elif request.POST.get("downgrade_method", None) is not None:
|
|
|
|
downgrade_method = request.POST.get("downgrade_method")
|
2020-08-13 13:20:18 +02:00
|
|
|
if downgrade_method == "downgrade_at_billing_cycle_end":
|
|
|
|
downgrade_at_the_end_of_billing_cycle(realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"{realm.string_id} marked for downgrade at the end of billing cycle"
|
2020-08-13 13:20:18 +02:00
|
|
|
elif downgrade_method == "downgrade_now_without_additional_licenses":
|
|
|
|
downgrade_now_without_creating_additional_invoices(realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"{realm.string_id} downgraded without creating additional invoices"
|
2020-08-13 13:20:18 +02:00
|
|
|
elif downgrade_method == "downgrade_now_void_open_invoices":
|
|
|
|
downgrade_now_without_creating_additional_invoices(realm)
|
|
|
|
voided_invoices_count = void_all_open_invoices(realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
context[
|
|
|
|
"success_message"
|
|
|
|
] = f"{realm.string_id} downgraded and voided {voided_invoices_count} open invoices"
|
2020-06-10 16:19:41 +02:00
|
|
|
elif request.POST.get("scrub_realm", None) is not None:
|
|
|
|
if request.POST.get("scrub_realm") == "scrub_realm":
|
2020-06-29 12:28:21 +02:00
|
|
|
do_scrub_realm(realm, acting_user=request.user)
|
2020-11-18 11:30:39 +01:00
|
|
|
context["success_message"] = f"{realm.string_id} scrubbed."
|
2019-04-19 18:17:41 +02:00
|
|
|
|
2019-03-08 13:02:10 +01:00
|
|
|
query = request.GET.get("q", None)
|
|
|
|
if query:
|
|
|
|
key_words = get_invitee_emails_set(query)
|
|
|
|
|
2020-10-30 20:23:20 +01:00
|
|
|
users = set(UserProfile.objects.filter(delivery_email__in=key_words))
|
2019-03-08 13:02:10 +01:00
|
|
|
realms = set(Realm.objects.filter(string_id__in=key_words))
|
|
|
|
|
|
|
|
for key_word in key_words:
|
|
|
|
try:
|
|
|
|
URLValidator()(key_word)
|
|
|
|
parse_result = urllib.parse.urlparse(key_word)
|
|
|
|
hostname = parse_result.hostname
|
2019-11-13 10:06:02 +01:00
|
|
|
assert hostname is not None
|
2019-03-08 13:02:10 +01:00
|
|
|
if parse_result.port:
|
2020-06-09 00:25:09 +02:00
|
|
|
hostname = f"{hostname}:{parse_result.port}"
|
2019-03-08 13:02:10 +01:00
|
|
|
subdomain = get_subdomain_from_hostname(hostname)
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realms.add(get_realm(subdomain))
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
pass
|
2019-03-08 13:02:10 +01:00
|
|
|
except ValidationError:
|
2020-10-30 20:23:20 +01:00
|
|
|
users.update(UserProfile.objects.filter(full_name__iexact=key_word))
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2020-06-09 12:24:32 +02:00
|
|
|
for realm in realms:
|
|
|
|
realm.customer = get_customer_by_realm(realm)
|
|
|
|
|
2020-07-03 20:21:13 +02:00
|
|
|
current_plan = get_current_plan_by_realm(realm)
|
|
|
|
if current_plan is not None:
|
2021-02-12 08:19:30 +01:00
|
|
|
new_plan, last_ledger_entry = make_end_of_cycle_updates_if_needed(
|
|
|
|
current_plan, timezone_now()
|
|
|
|
)
|
2020-07-03 20:21:13 +02:00
|
|
|
if last_ledger_entry is not None:
|
|
|
|
if new_plan is not None:
|
|
|
|
realm.current_plan = new_plan
|
|
|
|
else:
|
|
|
|
realm.current_plan = current_plan
|
|
|
|
realm.current_plan.licenses = last_ledger_entry.licenses
|
|
|
|
realm.current_plan.licenses_used = get_latest_seat_count(realm)
|
|
|
|
|
2020-10-30 20:23:20 +01:00
|
|
|
# full_names can have , in them
|
|
|
|
users.update(UserProfile.objects.filter(full_name__iexact=query))
|
|
|
|
|
|
|
|
context["users"] = users
|
2019-09-13 13:35:28 +02:00
|
|
|
context["realms"] = realms
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
confirmations: List[Dict[str, Any]] = []
|
2019-09-18 15:04:36 +02:00
|
|
|
|
|
|
|
preregistration_users = PreregistrationUser.objects.filter(email__in=key_words)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmations += get_confirmations(
|
|
|
|
[Confirmation.USER_REGISTRATION, Confirmation.INVITATION, Confirmation.REALM_CREATION],
|
|
|
|
preregistration_users,
|
|
|
|
hostname=request.get_host(),
|
|
|
|
)
|
2019-09-18 15:04:36 +02:00
|
|
|
|
|
|
|
multiuse_invites = MultiuseInvite.objects.filter(realm__in=realms)
|
|
|
|
confirmations += get_confirmations([Confirmation.MULTIUSE_INVITE], multiuse_invites)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmations += get_confirmations(
|
|
|
|
[Confirmation.REALM_REACTIVATION], [realm.id for realm in realms]
|
|
|
|
)
|
2019-09-18 15:04:36 +02:00
|
|
|
|
|
|
|
context["confirmations"] = confirmations
|
|
|
|
|
2019-09-13 13:35:28 +02:00
|
|
|
def realm_admin_emails(realm: Realm) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
return ", ".join(
|
|
|
|
realm.get_human_admin_users()
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("delivery_email")
|
2021-02-12 08:19:30 +01:00
|
|
|
.values_list("delivery_email", flat=True)
|
|
|
|
)
|
2019-09-13 13:35:28 +02:00
|
|
|
|
|
|
|
context["realm_admin_emails"] = realm_admin_emails
|
|
|
|
context["get_discount_for_realm"] = get_discount_for_realm
|
|
|
|
context["realm_icon_url"] = realm_icon_url
|
2019-09-18 15:04:36 +02:00
|
|
|
context["Confirmation"] = Confirmation
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "analytics/support.html", context=context)
|
2019-03-08 13:02:10 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__full_name",
|
|
|
|
"user_profile__delivery_email",
|
|
|
|
"query",
|
|
|
|
"client__name",
|
|
|
|
"count",
|
|
|
|
"last_visit",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
2017-01-24 07:06:13 +01:00
|
|
|
user_profile__realm__string_id=realm,
|
|
|
|
user_profile__is_active=True,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
user_profile__is_bot=is_bot,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
2018-12-07 00:05:57 +01:00
|
|
|
records = records.order_by("user_profile__delivery_email", "-last_visit")
|
2021-02-12 08:20:45 +01:00
|
|
|
records = records.select_related("user_profile", "client").only(*fields)
|
2013-11-06 13:25:55 +01:00
|
|
|
return records
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_records_for_email(email: str) -> List[QuerySet]:
|
2013-11-06 13:25:55 +01:00
|
|
|
fields = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__full_name",
|
|
|
|
"query",
|
|
|
|
"client__name",
|
|
|
|
"count",
|
|
|
|
"last_visit",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
records = UserActivity.objects.filter(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
user_profile__delivery_email=email,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
records = records.order_by("-last_visit")
|
2021-02-12 08:20:45 +01:00
|
|
|
records = records.select_related("user_profile", "client").only(*fields)
|
2013-11-06 13:25:55 +01:00
|
|
|
return records
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def raw_user_activity_table(records: List[QuerySet]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"query",
|
|
|
|
"client",
|
|
|
|
"count",
|
|
|
|
"last_visit",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def row(record: QuerySet) -> List[Any]:
|
2013-11-06 13:25:55 +01:00
|
|
|
return [
|
2017-01-24 07:06:13 +01:00
|
|
|
record.query,
|
|
|
|
record.client.name,
|
|
|
|
record.count,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
format_date_for_activity_reports(record.last_visit),
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2015-11-01 17:14:53 +01:00
|
|
|
rows = list(map(row, records))
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Raw data"
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, Any]]:
|
2016-06-05 20:51:43 +02:00
|
|
|
#: `Any` used above should be `Union(int, datetime)`.
|
|
|
|
#: However current version of `Union` does not work inside other function.
|
|
|
|
#: We could use something like:
|
|
|
|
# `Union[Dict[str, Dict[str, int]], Dict[str, Dict[str, datetime]]]`
|
|
|
|
#: but that would require this long `Union` to carry on throughout inner functions.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
summary: Dict[str, Dict[str, Any]] = {}
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def update(action: str, record: QuerySet) -> None:
|
2013-11-06 13:25:55 +01:00
|
|
|
if action not in summary:
|
|
|
|
summary[action] = dict(
|
2017-01-24 07:06:13 +01:00
|
|
|
count=record.count,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
last_visit=record.last_visit,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
summary[action]["count"] += record.count
|
|
|
|
summary[action]["last_visit"] = max(
|
|
|
|
summary[action]["last_visit"],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
record.last_visit,
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if records:
|
2021-02-12 08:20:45 +01:00
|
|
|
summary["name"] = records[0].user_profile.full_name
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
for record in records:
|
|
|
|
client = record.client.name
|
|
|
|
query = record.query
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
update("use", record)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if client == "API":
|
|
|
|
m = re.match("/api/.*/external/(.*)", query)
|
2013-11-18 18:58:39 +01:00
|
|
|
if m:
|
|
|
|
client = m.group(1)
|
|
|
|
update(client, record)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if client.startswith("desktop"):
|
|
|
|
update("desktop", record)
|
|
|
|
if client == "website":
|
|
|
|
update("website", record)
|
|
|
|
if ("send_message" in query) or re.search("/api/.*/external/.*", query):
|
|
|
|
update("send", record)
|
2021-02-12 08:19:30 +01:00
|
|
|
if query in [
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/update_pointer",
|
|
|
|
"/json/users/me/pointer",
|
|
|
|
"/api/v1/update_pointer",
|
|
|
|
"update_pointer_backend",
|
2021-02-12 08:19:30 +01:00
|
|
|
]:
|
2021-02-12 08:20:45 +01:00
|
|
|
update("pointer", record)
|
2013-11-06 13:25:55 +01:00
|
|
|
update(client, record)
|
|
|
|
|
|
|
|
return summary
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def format_date_for_activity_reports(date: Optional[datetime]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
if date:
|
2021-02-12 08:20:45 +01:00
|
|
|
return date.astimezone(eastern_tz).strftime("%Y-%m-%d %H:%M")
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
return ""
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_link(email: str) -> mark_safe:
|
2020-09-22 02:54:44 +02:00
|
|
|
url = reverse(get_user_activity, kwargs=dict(email=email))
|
2020-06-10 06:41:04 +02:00
|
|
|
email_link = f'<a href="{url}">{email}</a>'
|
2013-11-14 16:52:48 +01:00
|
|
|
return mark_safe(email_link)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_activity_link(realm_str: str) -> mark_safe:
|
2020-09-22 02:54:44 +02:00
|
|
|
url = reverse(get_realm_activity, kwargs=dict(realm_str=realm_str))
|
2020-06-10 06:41:04 +02:00
|
|
|
realm_link = f'<a href="{url}">{realm_str}</a>'
|
2013-11-14 17:26:12 +01:00
|
|
|
return mark_safe(realm_link)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-16 12:36:02 +02:00
|
|
|
def realm_stats_link(realm_str: str) -> mark_safe:
|
2020-09-22 02:54:44 +02:00
|
|
|
url = reverse(stats_for_realm, kwargs=dict(realm_str=realm_str))
|
2020-06-09 00:25:09 +02:00
|
|
|
stats_link = f'<a href="{url}"><i class="fa fa-pie-chart"></i>{realm_str}</a>'
|
2018-04-16 12:36:02 +02:00
|
|
|
return mark_safe(stats_link)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-03 00:27:16 +01:00
|
|
|
def remote_installation_stats_link(server_id: int, hostname: str) -> mark_safe:
|
2020-09-22 02:54:44 +02:00
|
|
|
url = reverse(stats_for_remote_installation, kwargs=dict(remote_server_id=server_id))
|
2020-06-09 00:25:09 +02:00
|
|
|
stats_link = f'<a href="{url}"><i class="fa fa-pie-chart"></i>{hostname}</a>'
|
2019-02-03 00:27:16 +01:00
|
|
|
return mark_safe(stats_link)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def realm_client_table(user_summaries: Dict[str, Dict[str, Dict[str, Any]]]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
exclude_keys = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"internal",
|
|
|
|
"name",
|
|
|
|
"use",
|
|
|
|
"send",
|
|
|
|
"pointer",
|
|
|
|
"website",
|
|
|
|
"desktop",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_summaries.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2021-02-12 08:20:45 +01:00
|
|
|
name = user_summary["name"]
|
2013-11-06 13:25:55 +01:00
|
|
|
for k, v in user_summary.items():
|
|
|
|
if k in exclude_keys:
|
|
|
|
continue
|
|
|
|
client = k
|
2021-02-12 08:20:45 +01:00
|
|
|
count = v["count"]
|
|
|
|
last_visit = v["last_visit"]
|
2013-11-06 13:25:55 +01:00
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
name,
|
|
|
|
email_link,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Last visit",
|
|
|
|
"Client",
|
|
|
|
"Name",
|
|
|
|
"Email",
|
|
|
|
"Count",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Clients"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def user_activity_summary_table(user_summary: Dict[str, Dict[str, Any]]) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for k, v in user_summary.items():
|
2021-02-12 08:20:45 +01:00
|
|
|
if k == "name":
|
2013-11-06 13:25:55 +01:00
|
|
|
continue
|
|
|
|
client = k
|
2021-02-12 08:20:45 +01:00
|
|
|
count = v["count"]
|
|
|
|
last_visit = v["last_visit"]
|
2013-11-06 13:25:55 +01:00
|
|
|
row = [
|
2017-01-24 07:06:13 +01:00
|
|
|
format_date_for_activity_reports(last_visit),
|
|
|
|
client,
|
|
|
|
count,
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
rows.append(row)
|
|
|
|
|
|
|
|
rows = sorted(rows, key=lambda r: r[0], reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"last_visit",
|
|
|
|
"client",
|
|
|
|
"count",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "User activity"
|
2013-11-07 16:38:41 +01:00
|
|
|
return make_table(title, cols, rows)
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def realm_user_summary_table(
|
|
|
|
all_records: List[QuerySet], admin_emails: Set[str]
|
|
|
|
) -> Tuple[Dict[str, Dict[str, Any]], str]:
|
2013-11-06 13:25:55 +01:00
|
|
|
user_records = {}
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def by_email(record: QuerySet) -> str:
|
2018-12-07 00:05:57 +01:00
|
|
|
return record.user_profile.delivery_email
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
for email, records in itertools.groupby(all_records, by_email):
|
|
|
|
user_records[email] = get_user_activity_summary(list(records))
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_last_visit(user_summary: Dict[str, Dict[str, datetime]], k: str) -> Optional[datetime]:
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
2021-02-12 08:20:45 +01:00
|
|
|
return user_summary[k]["last_visit"]
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
2013-11-18 17:07:59 +01:00
|
|
|
return None
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_count(user_summary: Dict[str, Dict[str, str]], k: str) -> str:
|
2013-11-06 13:25:55 +01:00
|
|
|
if k in user_summary:
|
2021-02-12 08:20:45 +01:00
|
|
|
return user_summary[k]["count"]
|
2013-11-06 13:25:55 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
return ""
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def is_recent(val: Optional[datetime]) -> bool:
|
2017-04-15 04:03:56 +02:00
|
|
|
age = timezone_now() - val
|
2013-11-18 17:20:58 +01:00
|
|
|
return age.total_seconds() < 5 * 60
|
|
|
|
|
2013-11-06 13:25:55 +01:00
|
|
|
rows = []
|
|
|
|
for email, user_summary in user_records.items():
|
2013-11-14 16:52:48 +01:00
|
|
|
email_link = user_activity_link(email)
|
2021-02-12 08:20:45 +01:00
|
|
|
sent_count = get_count(user_summary, "send")
|
|
|
|
cells = [user_summary["name"], email_link, sent_count]
|
|
|
|
row_class = ""
|
|
|
|
for field in ["use", "send", "pointer", "desktop", "ZulipiOS", "Android"]:
|
2016-06-05 20:51:43 +02:00
|
|
|
visit = get_last_visit(user_summary, field)
|
2021-02-12 08:20:45 +01:00
|
|
|
if field == "use":
|
2016-06-05 20:51:43 +02:00
|
|
|
if visit and is_recent(visit):
|
2021-02-12 08:20:45 +01:00
|
|
|
row_class += " recently_active"
|
2013-11-18 20:09:28 +01:00
|
|
|
if email in admin_emails:
|
2021-02-12 08:20:45 +01:00
|
|
|
row_class += " admin"
|
2016-06-05 20:51:43 +02:00
|
|
|
val = format_date_for_activity_reports(visit)
|
2013-11-18 17:20:58 +01:00
|
|
|
cells.append(val)
|
|
|
|
row = dict(cells=cells, row_class=row_class)
|
2013-11-06 13:25:55 +01:00
|
|
|
rows.append(row)
|
|
|
|
|
2017-11-05 06:54:00 +01:00
|
|
|
def by_used_time(row: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return row["cells"][3]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
rows = sorted(rows, key=by_used_time, reverse=True)
|
|
|
|
|
|
|
|
cols = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"Name",
|
|
|
|
"Email",
|
|
|
|
"Total sent",
|
|
|
|
"Heard from",
|
|
|
|
"Message sent",
|
|
|
|
"Pointer motion",
|
|
|
|
"Desktop",
|
|
|
|
"ZulipiOS",
|
|
|
|
"Android",
|
2013-11-06 13:25:55 +01:00
|
|
|
]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
title = "Summary"
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-18 17:20:58 +01:00
|
|
|
content = make_table(title, cols, rows, has_row_class=True)
|
2013-11-06 13:25:55 +01:00
|
|
|
return user_records, content
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Tuple[str, str]] = []
|
|
|
|
all_user_records: Dict[str, Any] = {}
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2014-01-10 21:22:39 +01:00
|
|
|
try:
|
2019-06-20 23:26:54 +02:00
|
|
|
admins = Realm.objects.get(string_id=realm_str).get_human_admin_users()
|
2014-01-10 21:22:39 +01:00
|
|
|
except Realm.DoesNotExist:
|
2020-06-10 06:41:04 +02:00
|
|
|
return HttpResponseNotFound(f"Realm {realm_str} does not exist")
|
2014-01-10 21:22:39 +01:00
|
|
|
|
2018-12-07 00:05:57 +01:00
|
|
|
admin_emails = {admin.delivery_email for admin in admins}
|
2013-11-18 20:09:28 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
for is_bot, page_title in [(False, "Humans"), (True, "Bots")]:
|
2017-01-08 19:42:32 +01:00
|
|
|
all_records = list(get_user_activity_records_for_realm(realm_str, is_bot))
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2013-11-18 20:09:28 +01:00
|
|
|
user_records, content = realm_user_summary_table(all_records, admin_emails)
|
2013-11-06 13:25:55 +01:00
|
|
|
all_user_records.update(user_records)
|
|
|
|
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
page_title = "Clients"
|
2013-11-06 13:25:55 +01:00
|
|
|
content = realm_client_table(all_user_records)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
page_title = "History"
|
2017-01-08 19:42:32 +01:00
|
|
|
content = sent_messages_report(realm_str)
|
2013-11-07 16:53:09 +01:00
|
|
|
data += [(page_title, content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
2017-01-08 19:42:32 +01:00
|
|
|
title = realm_str
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/activity.html",
|
2017-08-11 01:20:12 +02:00
|
|
|
context=dict(data=data, realm_link=None, title=title),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-06 12:59:18 +02:00
|
|
|
@require_server_admin
|
2017-11-05 06:54:00 +01:00
|
|
|
def get_user_activity(request: HttpRequest, email: str) -> HttpResponse:
|
2013-11-06 13:25:55 +01:00
|
|
|
records = get_user_activity_records_for_email(email)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: List[Tuple[str, str]] = []
|
2013-11-06 13:25:55 +01:00
|
|
|
user_summary = get_user_activity_summary(records)
|
|
|
|
content = user_activity_summary_table(user_summary)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data += [("Summary", content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
content = raw_user_activity_table(records)
|
2021-02-12 08:20:45 +01:00
|
|
|
data += [("Info", content)]
|
2013-11-06 13:25:55 +01:00
|
|
|
|
|
|
|
title = email
|
2017-03-16 14:04:06 +01:00
|
|
|
return render(
|
|
|
|
request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"analytics/activity.html",
|
2017-03-16 14:04:06 +01:00
|
|
|
context=dict(data=data, title=title),
|
2013-11-06 13:25:55 +01:00
|
|
|
)
|