2020-02-06 16:23:35 +01:00
|
|
|
import datetime
|
2020-02-06 21:00:21 +01:00
|
|
|
import itertools
|
2020-02-06 15:54:57 +01:00
|
|
|
import time
|
2020-06-11 00:54:34 +02:00
|
|
|
from collections import defaultdict
|
2021-08-19 10:45:20 +02:00
|
|
|
from typing import Any, Dict, Mapping, Sequence, Set
|
2020-02-06 16:23:35 +01:00
|
|
|
|
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
|
2020-02-06 16:42:28 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import PushDeviceToken, Realm, UserPresence, UserProfile, query_for_ids
|
|
|
|
|
2020-02-06 16:23:35 +01:00
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
def get_presence_dicts_for_rows(
|
2021-08-19 10:45:20 +02:00
|
|
|
all_rows: Sequence[Mapping[str, Any]], mobile_user_ids: Set[int], slim_presence: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[str, Dict[str, Any]]:
|
2020-02-06 21:00:21 +01:00
|
|
|
# Note that datetime values have sub-second granularity, which is
|
|
|
|
# mostly important for avoiding test flakes, but it's also technically
|
|
|
|
# more precise for real users.
|
|
|
|
# We could technically do this sort with the database, but doing it
|
|
|
|
# here prevents us from having to assume the caller is playing nice.
|
|
|
|
all_rows = sorted(
|
|
|
|
all_rows,
|
2021-04-22 16:23:09 +02:00
|
|
|
key=lambda row: (row["user_profile_id"], row["timestamp"]),
|
2020-02-06 21:00:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if slim_presence:
|
|
|
|
# Stringify user_id here, since it's gonna be turned
|
|
|
|
# into a string anyway by JSON, and it keeps mypy happy.
|
2021-04-22 16:23:09 +02:00
|
|
|
get_user_key = lambda row: str(row["user_profile_id"])
|
2022-09-16 18:12:20 +02:00
|
|
|
get_user_presence_info = get_modern_user_presence_info
|
2020-02-06 21:00:21 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
get_user_key = lambda row: row["user_profile__email"]
|
2022-09-16 18:12:20 +02:00
|
|
|
get_user_presence_info = get_legacy_user_presence_info
|
2020-02-06 21:00:21 +01:00
|
|
|
|
2020-09-02 08:14:51 +02:00
|
|
|
user_statuses: Dict[str, Dict[str, Any]] = {}
|
2020-02-06 21:00:21 +01:00
|
|
|
|
|
|
|
for user_key, presence_rows in itertools.groupby(all_rows, get_user_key):
|
2022-09-16 18:12:20 +02:00
|
|
|
info = get_user_presence_info(
|
2020-02-06 21:00:21 +01:00
|
|
|
list(presence_rows),
|
2020-02-07 14:50:30 +01:00
|
|
|
mobile_user_ids=mobile_user_ids,
|
2020-02-06 21:00:21 +01:00
|
|
|
)
|
|
|
|
user_statuses[user_key] = info
|
|
|
|
|
|
|
|
return user_statuses
|
2020-02-06 16:42:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
def get_modern_user_presence_info(
|
2021-08-19 10:45:20 +02:00
|
|
|
presence_rows: Sequence[Mapping[str, Any]], mobile_user_ids: Set[int]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[str, Any]:
|
2020-02-07 14:50:30 +01:00
|
|
|
active_timestamp = None
|
|
|
|
for row in reversed(presence_rows):
|
2021-02-12 08:20:45 +01:00
|
|
|
if row["status"] == UserPresence.ACTIVE:
|
|
|
|
active_timestamp = datetime_to_timestamp(row["timestamp"])
|
2020-02-07 14:50:30 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
idle_timestamp = None
|
|
|
|
for row in reversed(presence_rows):
|
2021-02-12 08:20:45 +01:00
|
|
|
if row["status"] == UserPresence.IDLE:
|
|
|
|
idle_timestamp = datetime_to_timestamp(row["timestamp"])
|
2020-02-07 14:50:30 +01:00
|
|
|
break
|
|
|
|
|
|
|
|
# Be stingy about bandwidth, and don't even include
|
|
|
|
# keys for entities that have None values. JS
|
|
|
|
# code should just do a falsy check here.
|
2020-09-02 08:14:51 +02:00
|
|
|
result = {}
|
2020-02-07 14:50:30 +01:00
|
|
|
|
|
|
|
if active_timestamp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["active_timestamp"] = active_timestamp
|
2020-02-07 14:50:30 +01:00
|
|
|
|
|
|
|
if idle_timestamp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["idle_timestamp"] = idle_timestamp
|
2020-02-07 14:50:30 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
def get_legacy_user_presence_info(
|
2021-08-19 10:45:20 +02:00
|
|
|
presence_rows: Sequence[Mapping[str, Any]], mobile_user_ids: Set[int]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[str, Any]:
|
2020-02-06 21:00:21 +01:00
|
|
|
# The format of data here is for legacy users of our API,
|
|
|
|
# including old versions of the mobile app.
|
|
|
|
info_rows = []
|
|
|
|
for row in presence_rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
client_name = row["client__name"]
|
|
|
|
status = UserPresence.status_to_string(row["status"])
|
|
|
|
dt = row["timestamp"]
|
2020-02-06 16:42:28 +01:00
|
|
|
timestamp = datetime_to_timestamp(dt)
|
2021-02-12 08:20:45 +01:00
|
|
|
push_enabled = row["user_profile__enable_offline_push_notifications"]
|
2021-04-22 16:23:09 +02:00
|
|
|
has_push_devices = row["user_profile_id"] in mobile_user_ids
|
2021-02-12 08:19:30 +01:00
|
|
|
pushable = push_enabled and has_push_devices
|
2020-02-06 16:42:28 +01:00
|
|
|
|
|
|
|
info = dict(
|
|
|
|
client=client_name,
|
|
|
|
status=status,
|
|
|
|
timestamp=timestamp,
|
|
|
|
pushable=pushable,
|
|
|
|
)
|
|
|
|
|
2020-02-06 21:00:21 +01:00
|
|
|
info_rows.append(info)
|
2020-02-06 16:42:28 +01:00
|
|
|
|
2020-02-06 21:00:21 +01:00
|
|
|
most_recent_info = info_rows[-1]
|
2020-02-06 16:42:28 +01:00
|
|
|
|
2020-09-02 08:14:51 +02:00
|
|
|
result = {}
|
2020-02-06 16:42:28 +01:00
|
|
|
|
2022-02-08 00:13:33 +01:00
|
|
|
# The word "aggregated" here is possibly misleading.
|
2020-02-06 21:00:21 +01:00
|
|
|
# It's really just the most recent client's info.
|
2021-02-12 08:20:45 +01:00
|
|
|
result["aggregated"] = dict(
|
|
|
|
client=most_recent_info["client"],
|
|
|
|
status=most_recent_info["status"],
|
|
|
|
timestamp=most_recent_info["timestamp"],
|
2020-02-06 21:00:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
# Build a dictionary of client -> info. There should
|
|
|
|
# only be one row per client, but to be on the safe side,
|
|
|
|
# we always overwrite with rows that are later in our list.
|
|
|
|
for info in info_rows:
|
2021-02-12 08:20:45 +01:00
|
|
|
result[info["client"]] = info
|
2020-02-06 21:00:21 +01:00
|
|
|
|
|
|
|
return result
|
2020-02-06 16:42:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_presence_for_user(
|
|
|
|
user_profile_id: int, slim_presence: bool = False
|
|
|
|
) -> Dict[str, Dict[str, Any]]:
|
2020-02-06 16:35:22 +01:00
|
|
|
query = UserPresence.objects.filter(user_profile_id=user_profile_id).values(
|
2021-02-12 08:20:45 +01:00
|
|
|
"client__name",
|
|
|
|
"status",
|
|
|
|
"timestamp",
|
|
|
|
"user_profile__email",
|
2021-04-22 16:23:09 +02:00
|
|
|
"user_profile_id",
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__enable_offline_push_notifications",
|
2020-02-06 16:35:22 +01:00
|
|
|
)
|
|
|
|
presence_rows = list(query)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
mobile_user_ids: Set[int] = set()
|
2020-02-06 16:35:22 +01:00
|
|
|
if PushDeviceToken.objects.filter(user_id=user_profile_id).exists(): # nocoverage
|
|
|
|
# TODO: Add a test, though this is low priority, since we don't use mobile_user_ids yet.
|
|
|
|
mobile_user_ids.add(user_profile_id)
|
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
return get_presence_dicts_for_rows(presence_rows, mobile_user_ids, slim_presence)
|
2020-02-06 16:35:22 +01:00
|
|
|
|
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
def get_presence_dict_by_realm(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_id: int, slim_presence: bool = False
|
|
|
|
) -> Dict[str, Dict[str, Any]]:
|
2020-02-06 16:23:35 +01:00
|
|
|
two_weeks_ago = timezone_now() - datetime.timedelta(weeks=2)
|
|
|
|
query = UserPresence.objects.filter(
|
2020-02-08 21:50:55 +01:00
|
|
|
realm_id=realm_id,
|
|
|
|
timestamp__gte=two_weeks_ago,
|
|
|
|
user_profile__is_active=True,
|
|
|
|
user_profile__is_bot=False,
|
2020-02-06 16:23:35 +01:00
|
|
|
).values(
|
2021-02-12 08:20:45 +01:00
|
|
|
"client__name",
|
|
|
|
"status",
|
|
|
|
"timestamp",
|
|
|
|
"user_profile__email",
|
2021-04-22 16:23:09 +02:00
|
|
|
"user_profile_id",
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile__enable_offline_push_notifications",
|
2020-02-06 16:23:35 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
presence_rows = list(query)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_query = PushDeviceToken.objects.distinct("user_id").values_list(
|
|
|
|
"user_id",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
flat=True,
|
2020-02-06 16:23:35 +01:00
|
|
|
)
|
|
|
|
|
2021-04-22 16:23:09 +02:00
|
|
|
user_profile_ids = [presence_row["user_profile_id"] for presence_row in presence_rows]
|
2020-05-14 19:18:50 +02:00
|
|
|
if len(user_profile_ids) == 0:
|
|
|
|
# This conditional is necessary because query_for_ids
|
|
|
|
# throws an exception if passed an empty list.
|
|
|
|
#
|
|
|
|
# It's not clear this condition is actually possible,
|
|
|
|
# though, because it shouldn't be possible to end up with
|
|
|
|
# a realm with 0 active users.
|
|
|
|
return {}
|
|
|
|
|
2022-06-23 20:21:54 +02:00
|
|
|
mobile_query_ids = query_for_ids(
|
2020-02-06 16:23:35 +01:00
|
|
|
query=mobile_query,
|
|
|
|
user_ids=user_profile_ids,
|
2021-02-12 08:20:45 +01:00
|
|
|
field="user_id",
|
2020-02-06 16:23:35 +01:00
|
|
|
)
|
2022-06-23 20:21:54 +02:00
|
|
|
mobile_user_ids = set(mobile_query_ids)
|
2020-02-06 16:23:35 +01:00
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
return get_presence_dicts_for_rows(presence_rows, mobile_user_ids, slim_presence)
|
2020-02-06 15:54:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_presences_for_realm(
|
|
|
|
realm: Realm, slim_presence: bool
|
|
|
|
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
2020-02-06 17:41:55 +01:00
|
|
|
if realm.presence_disabled:
|
2020-02-06 15:54:57 +01:00
|
|
|
# Return an empty dict if presence is disabled in this realm
|
|
|
|
return defaultdict(dict)
|
|
|
|
|
2022-09-16 18:12:20 +02:00
|
|
|
return get_presence_dict_by_realm(realm.id, slim_presence)
|
2020-02-06 15:54:57 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_presence_response(
|
|
|
|
requesting_user_profile: UserProfile, slim_presence: bool
|
|
|
|
) -> Dict[str, Any]:
|
2020-02-06 17:41:55 +01:00
|
|
|
realm = requesting_user_profile.realm
|
2020-02-06 15:54:57 +01:00
|
|
|
server_timestamp = time.time()
|
2020-02-06 17:41:55 +01:00
|
|
|
presences = get_presences_for_realm(realm, slim_presence)
|
2020-02-06 15:54:57 +01:00
|
|
|
return dict(presences=presences, server_timestamp=server_timestamp)
|