2019-03-08 02:48:54 +01:00
|
|
|
# Documented in https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
|
2017-12-13 01:45:57 +01:00
|
|
|
import logging
|
2020-06-11 00:54:34 +02:00
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Iterable, Sequence
|
|
|
|
from typing import Any, TypedDict
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
2017-07-30 19:48:49 +02:00
|
|
|
from django.db import transaction
|
2024-03-26 17:36:35 +01:00
|
|
|
from django.db.models import Exists, F, Max, OuterRef, QuerySet
|
|
|
|
from django.db.models.functions import Greatest
|
2017-07-13 16:39:01 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-21 19:55:45 +02:00
|
|
|
from sentry_sdk import capture_exception
|
2017-07-30 19:48:49 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.logging_util import log_to_file
|
2022-04-15 04:51:41 +02:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2024-03-26 17:36:35 +01:00
|
|
|
from zerver.lib.user_message import bulk_insert_all_ums
|
2021-07-25 16:31:12 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
Recipient,
|
|
|
|
Subscription,
|
|
|
|
UserActivity,
|
|
|
|
UserMessage,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2024-09-03 15:33:25 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2023-12-15 20:21:59 +01:00
|
|
|
from zerver.models.scheduled_jobs import NotificationTriggers
|
2017-07-30 19:48:49 +02:00
|
|
|
|
2017-12-13 01:45:57 +01:00
|
|
|
logger = logging.getLogger("zulip.soft_deactivation")
|
|
|
|
log_to_file(logger, settings.SOFT_DEACTIVATION_LOG_PATH)
|
2019-03-11 04:32:04 +01:00
|
|
|
BULK_CREATE_BATCH_SIZE = 10000
|
2017-08-16 05:09:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-20 21:18:01 +02:00
|
|
|
class MissingMessageDict(TypedDict):
|
|
|
|
id: int
|
|
|
|
recipient__type_id: int
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def filter_by_subscription_history(
|
|
|
|
user_profile: UserProfile,
|
2024-07-12 02:30:17 +02:00
|
|
|
all_stream_messages: defaultdict[int, list[MissingMessageDict]],
|
|
|
|
all_stream_subscription_logs: defaultdict[int, list[RealmAuditLog]],
|
|
|
|
) -> list[int]:
|
|
|
|
message_ids: set[int] = set()
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
for stream_id, stream_messages_raw in all_stream_messages.items():
|
2017-07-13 16:39:01 +02:00
|
|
|
stream_subscription_logs = all_stream_subscription_logs[stream_id]
|
2019-05-06 03:14:42 +02:00
|
|
|
# Make a copy of the original list of messages, which we will
|
|
|
|
# mutate in the loop below.
|
|
|
|
stream_messages = list(stream_messages_raw)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
|
|
|
for log_entry in stream_subscription_logs:
|
2019-05-06 03:33:15 +02:00
|
|
|
# For each stream, we iterate through all of the changes
|
|
|
|
# to the user's subscription to that stream, ordered by
|
|
|
|
# event_last_message_id, to determine whether the user was
|
|
|
|
# subscribed to the target stream at that time.
|
|
|
|
#
|
|
|
|
# For each message, we're looking for the first event for
|
|
|
|
# the user's subscription to the target stream after the
|
|
|
|
# message was sent.
|
|
|
|
# * If it's an unsubscribe, we know the user was subscribed
|
|
|
|
# when the message was sent, and create a UserMessage
|
|
|
|
# * If it's a subscribe, we know the user was not, and we
|
|
|
|
# skip the message by mutating the stream_messages list
|
|
|
|
# to skip that message.
|
|
|
|
|
2017-07-13 16:39:01 +02:00
|
|
|
if len(stream_messages) == 0:
|
2019-05-06 03:13:21 +02:00
|
|
|
# Because stream_messages gets mutated below, this
|
|
|
|
# check belongs in this inner loop, not the outer loop.
|
|
|
|
break
|
|
|
|
|
2021-07-25 16:31:12 +02:00
|
|
|
event_last_message_id = assert_is_not_none(log_entry.event_last_message_id)
|
|
|
|
|
2024-09-03 17:25:32 +02:00
|
|
|
if log_entry.event_type == AuditLogEventType.SUBSCRIPTION_DEACTIVATED:
|
2019-05-06 03:33:15 +02:00
|
|
|
# If the event shows the user was unsubscribed after
|
|
|
|
# event_last_message_id, we know they must have been
|
|
|
|
# subscribed immediately before the event.
|
2017-07-13 16:39:01 +02:00
|
|
|
for stream_message in stream_messages:
|
2021-07-25 16:31:12 +02:00
|
|
|
if stream_message["id"] <= event_last_message_id:
|
2024-03-26 17:36:35 +01:00
|
|
|
message_ids.add(stream_message["id"])
|
2017-07-13 16:39:01 +02:00
|
|
|
else:
|
|
|
|
break
|
2021-02-12 08:19:30 +01:00
|
|
|
elif log_entry.event_type in (
|
2024-09-03 17:25:32 +02:00
|
|
|
AuditLogEventType.SUBSCRIPTION_ACTIVATED,
|
|
|
|
AuditLogEventType.SUBSCRIPTION_CREATED,
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2017-07-13 16:39:01 +02:00
|
|
|
initial_msg_count = len(stream_messages)
|
|
|
|
for i, stream_message in enumerate(stream_messages):
|
2021-07-25 16:31:12 +02:00
|
|
|
if stream_message["id"] > event_last_message_id:
|
2017-07-13 16:39:01 +02:00
|
|
|
stream_messages = stream_messages[i:]
|
|
|
|
break
|
|
|
|
final_msg_count = len(stream_messages)
|
2023-01-18 02:59:37 +01:00
|
|
|
if (
|
|
|
|
initial_msg_count == final_msg_count
|
|
|
|
and stream_messages[-1]["id"] <= event_last_message_id
|
|
|
|
):
|
|
|
|
stream_messages = []
|
2017-07-13 16:39:01 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"{log_entry.event_type} is not a subscription event.")
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2023-01-18 02:59:37 +01:00
|
|
|
# We do this check for last event since if the last subscription
|
|
|
|
# event was a subscription_deactivated then we don't want to create
|
|
|
|
# UserMessage rows for any of the remaining messages.
|
|
|
|
if len(stream_messages) > 0 and stream_subscription_logs[-1].event_type in (
|
2024-09-03 17:25:32 +02:00
|
|
|
AuditLogEventType.SUBSCRIPTION_ACTIVATED,
|
|
|
|
AuditLogEventType.SUBSCRIPTION_CREATED,
|
2023-01-18 02:59:37 +01:00
|
|
|
):
|
2024-03-26 17:36:35 +01:00
|
|
|
message_ids.update(stream_message["id"] for stream_message in stream_messages)
|
|
|
|
return sorted(message_ids)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def add_missing_messages(user_profile: UserProfile) -> None:
|
2017-08-15 17:15:08 +02:00
|
|
|
"""This function takes a soft-deactivated user, and computes and adds
|
|
|
|
to the database any UserMessage rows that were not created while
|
|
|
|
the user was soft-deactivated. The end result is that from the
|
|
|
|
perspective of the message database, it should be impossible to
|
|
|
|
tell that the user was soft-deactivated at all.
|
|
|
|
|
|
|
|
At a high level, the algorithm is as follows:
|
|
|
|
|
|
|
|
* Find all the streams that the user was at any time a subscriber
|
|
|
|
of when or after they were soft-deactivated (`recipient_ids`
|
|
|
|
below).
|
|
|
|
|
|
|
|
* Find all the messages sent to those streams since the user was
|
|
|
|
soft-deactivated. This will be a superset of the target
|
|
|
|
UserMessages we need to create in two ways: (1) some UserMessage
|
|
|
|
rows will have already been created in do_send_messages because
|
|
|
|
the user had a nonzero set of flags (the fact that we do so in
|
|
|
|
do_send_messages simplifies things considerably, since it means
|
|
|
|
we don't need to inspect message content to look for things like
|
|
|
|
mentions here), and (2) the user might not have been subscribed
|
|
|
|
to all of the streams in recipient_ids for the entire time
|
|
|
|
window.
|
|
|
|
|
|
|
|
* Correct the list from the previous state by excluding those with
|
|
|
|
existing UserMessage rows.
|
|
|
|
|
|
|
|
* Correct the list from the previous state by excluding those
|
|
|
|
where the user wasn't subscribed at the time, using the
|
|
|
|
RealmAuditLog data to determine exactly when the user was
|
|
|
|
subscribed/unsubscribed.
|
|
|
|
|
|
|
|
* Create the UserMessage rows.
|
|
|
|
|
2019-03-08 02:48:54 +01:00
|
|
|
For further documentation, see:
|
|
|
|
|
|
|
|
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
|
|
|
|
|
2017-08-15 17:15:08 +02:00
|
|
|
"""
|
2018-05-17 19:09:03 +02:00
|
|
|
assert user_profile.last_active_message_id is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
all_stream_subs = list(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, recipient__type=Recipient.STREAM
|
2021-02-12 08:20:45 +01:00
|
|
|
).values("recipient_id", "recipient__type_id")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# For stream messages we need to check messages against data from
|
2017-07-13 16:39:01 +02:00
|
|
|
# RealmAuditLog for visibility to user. So we fetch the subscription logs.
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_ids = [sub["recipient__type_id"] for sub in all_stream_subs]
|
2023-04-28 21:08:21 +02:00
|
|
|
|
|
|
|
# We have a partial index on RealmAuditLog for these rows -- if
|
|
|
|
# this set changes, the partial index must be updated as well, to
|
|
|
|
# keep this query performant
|
2021-02-12 08:19:30 +01:00
|
|
|
events = [
|
2024-09-03 17:25:32 +02:00
|
|
|
AuditLogEventType.SUBSCRIPTION_CREATED,
|
|
|
|
AuditLogEventType.SUBSCRIPTION_DEACTIVATED,
|
|
|
|
AuditLogEventType.SUBSCRIPTION_ACTIVATED,
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2019-05-06 03:23:34 +02:00
|
|
|
|
|
|
|
# Important: We order first by event_last_message_id, which is the
|
|
|
|
# official ordering, and then tiebreak by RealmAuditLog event ID.
|
|
|
|
# That second tiebreak is important in case a user is subscribed
|
|
|
|
# and then unsubscribed without any messages being sent in the
|
|
|
|
# meantime. Without that tiebreak, we could end up incorrectly
|
2023-04-19 21:28:58 +02:00
|
|
|
# processing the ordering of those two subscription changes. Note
|
|
|
|
# that this means we cannot backfill events unless there are no
|
|
|
|
# pre-existing events for this stream/user pair!
|
2021-02-12 08:19:30 +01:00
|
|
|
subscription_logs = list(
|
2023-04-28 20:47:18 +02:00
|
|
|
RealmAuditLog.objects.filter(
|
2021-04-22 16:23:09 +02:00
|
|
|
modified_user=user_profile, modified_stream_id__in=stream_ids, event_type__in=events
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("event_last_message_id", "id")
|
2023-04-28 20:47:59 +02:00
|
|
|
.only("id", "event_type", "modified_stream_id", "event_last_message_id")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
all_stream_subscription_logs: defaultdict[int, list[RealmAuditLog]] = defaultdict(list)
|
2017-07-13 16:39:01 +02:00
|
|
|
for log in subscription_logs:
|
2021-07-25 16:31:12 +02:00
|
|
|
all_stream_subscription_logs[assert_is_not_none(log.modified_stream_id)].append(log)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
|
|
|
recipient_ids = []
|
|
|
|
for sub in all_stream_subs:
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_subscription_logs = all_stream_subscription_logs[sub["recipient__type_id"]]
|
2024-09-03 17:25:32 +02:00
|
|
|
if stream_subscription_logs[-1].event_type == AuditLogEventType.SUBSCRIPTION_DEACTIVATED:
|
2018-05-17 19:09:03 +02:00
|
|
|
assert stream_subscription_logs[-1].event_last_message_id is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
stream_subscription_logs[-1].event_last_message_id
|
|
|
|
<= user_profile.last_active_message_id
|
|
|
|
):
|
2018-05-17 19:09:03 +02:00
|
|
|
# We are going to short circuit this iteration as its no use
|
|
|
|
# iterating since user unsubscribed before soft-deactivation
|
|
|
|
continue
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient_ids.append(sub["recipient_id"])
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2023-09-29 17:45:33 +02:00
|
|
|
new_stream_msgs = (
|
2024-07-11 17:04:53 +02:00
|
|
|
Message.objects.alias(
|
2023-09-29 17:45:33 +02:00
|
|
|
has_user_message=Exists(
|
|
|
|
UserMessage.objects.filter(
|
|
|
|
user_profile_id=user_profile,
|
|
|
|
message_id=OuterRef("id"),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
# Uses index: zerver_message_realm_recipient_id
|
2024-07-11 17:04:53 +02:00
|
|
|
has_user_message=False,
|
2023-08-30 21:19:37 +02:00
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
recipient_id__in=recipient_ids,
|
|
|
|
id__gt=user_profile.last_active_message_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("id")
|
|
|
|
.values("id", "recipient__type_id")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
stream_messages: defaultdict[int, list[MissingMessageDict]] = defaultdict(list)
|
2023-09-29 17:45:33 +02:00
|
|
|
for msg in new_stream_msgs:
|
|
|
|
stream_messages[msg["recipient__type_id"]].append(
|
|
|
|
MissingMessageDict(id=msg["id"], recipient__type_id=msg["recipient__type_id"])
|
|
|
|
)
|
2017-07-13 16:39:01 +02:00
|
|
|
|
|
|
|
# Calling this function to filter out stream messages based upon
|
|
|
|
# subscription logs and then store all UserMessage objects for bulk insert
|
|
|
|
# This function does not perform any SQL related task and gets all the data
|
|
|
|
# required for its operation in its params.
|
2024-03-26 17:36:35 +01:00
|
|
|
message_ids_to_insert = filter_by_subscription_history(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile, stream_messages, all_stream_subscription_logs
|
|
|
|
)
|
2017-07-30 19:48:49 +02:00
|
|
|
|
2017-07-13 16:39:01 +02:00
|
|
|
# Doing a bulk create for all the UserMessage objects stored for creation.
|
2024-03-26 17:36:35 +01:00
|
|
|
while len(message_ids_to_insert) > 0:
|
|
|
|
message_ids, message_ids_to_insert = (
|
|
|
|
message_ids_to_insert[0:BULK_CREATE_BATCH_SIZE],
|
|
|
|
message_ids_to_insert[BULK_CREATE_BATCH_SIZE:],
|
|
|
|
)
|
|
|
|
bulk_insert_all_ums(user_ids=[user_profile.id], message_ids=message_ids, flags=0)
|
|
|
|
UserProfile.objects.filter(id=user_profile.id).update(
|
|
|
|
last_active_message_id=Greatest(F("last_active_message_id"), message_ids[-1])
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-30 19:48:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_soft_deactivate_user(user_profile: UserProfile) -> None:
|
2018-12-17 03:45:19 +01:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile.last_active_message_id = (
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile)
|
2021-04-22 16:23:09 +02:00
|
|
|
.order_by("-message_id")[0]
|
2021-02-12 08:19:30 +01:00
|
|
|
.message_id
|
|
|
|
)
|
2018-12-17 03:45:19 +01:00
|
|
|
except IndexError: # nocoverage
|
|
|
|
# In the unlikely event that a user somehow has never received
|
|
|
|
# a message, we just use the overall max message ID.
|
2021-07-24 18:16:48 +02:00
|
|
|
last_message = Message.objects.last()
|
|
|
|
assert last_message is not None
|
|
|
|
user_profile.last_active_message_id = last_message.id
|
2017-07-30 19:48:49 +02:00
|
|
|
user_profile.long_term_idle = True
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile.save(update_fields=["long_term_idle", "last_active_message_id"])
|
|
|
|
logger.info("Soft deactivated user %s", user_profile.id)
|
2017-07-30 19:48:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 20:07:19 +02:00
|
|
|
def do_soft_deactivate_users(
|
2024-07-12 02:30:23 +02:00
|
|
|
users: Sequence[UserProfile] | QuerySet[UserProfile],
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[UserProfile]:
|
2018-12-17 03:41:24 +01:00
|
|
|
BATCH_SIZE = 100
|
2017-08-16 05:35:04 +02:00
|
|
|
users_soft_deactivated = []
|
2018-12-17 03:41:24 +01:00
|
|
|
while True:
|
|
|
|
(user_batch, users) = (users[0:BATCH_SIZE], users[BATCH_SIZE:])
|
|
|
|
if len(user_batch) == 0:
|
|
|
|
break
|
|
|
|
with transaction.atomic():
|
|
|
|
realm_logs = []
|
|
|
|
for user in user_batch:
|
|
|
|
do_soft_deactivate_user(user)
|
|
|
|
event_time = timezone_now()
|
|
|
|
log = RealmAuditLog(
|
|
|
|
realm=user.realm,
|
|
|
|
modified_user=user,
|
2024-09-03 15:33:25 +02:00
|
|
|
event_type=AuditLogEventType.USER_SOFT_DEACTIVATED,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
event_time=event_time,
|
2018-12-17 03:41:24 +01:00
|
|
|
)
|
|
|
|
realm_logs.append(log)
|
|
|
|
users_soft_deactivated.append(user)
|
|
|
|
RealmAuditLog.objects.bulk_create(realm_logs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
logger.info(
|
|
|
|
"Soft-deactivated batch of %s users; %s remain to process", len(user_batch), len(users)
|
|
|
|
)
|
2018-12-17 03:41:24 +01:00
|
|
|
|
2017-08-16 05:35:04 +02:00
|
|
|
return users_soft_deactivated
|
2017-07-16 09:41:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def do_auto_soft_deactivate_users(inactive_for_days: int, realm: Realm | None) -> list[UserProfile]:
|
2024-07-12 02:30:17 +02:00
|
|
|
filter_kwargs: dict[str, Realm] = {}
|
2019-03-12 04:28:40 +01:00
|
|
|
if realm is not None:
|
|
|
|
filter_kwargs = dict(user_profile__realm=realm)
|
|
|
|
users_to_deactivate = get_users_for_soft_deactivation(inactive_for_days, filter_kwargs)
|
|
|
|
users_deactivated = do_soft_deactivate_users(users_to_deactivate)
|
|
|
|
|
|
|
|
if not settings.AUTO_CATCH_UP_SOFT_DEACTIVATED_USERS:
|
2021-02-12 08:20:45 +01:00
|
|
|
logger.info("Not catching up users since AUTO_CATCH_UP_SOFT_DEACTIVATED_USERS is off")
|
2019-03-12 04:28:40 +01:00
|
|
|
return users_deactivated
|
|
|
|
|
|
|
|
if realm is not None:
|
|
|
|
filter_kwargs = dict(realm=realm)
|
|
|
|
users_to_catch_up = get_soft_deactivated_users_for_catch_up(filter_kwargs)
|
|
|
|
do_catch_up_soft_deactivated_users(users_to_catch_up)
|
|
|
|
return users_deactivated
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:23 +02:00
|
|
|
def reactivate_user_if_soft_deactivated(user_profile: UserProfile) -> UserProfile | None:
|
2017-07-16 09:41:38 +02:00
|
|
|
if user_profile.long_term_idle:
|
|
|
|
add_missing_messages(user_profile)
|
|
|
|
user_profile.long_term_idle = False
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile.save(update_fields=["long_term_idle"])
|
2017-07-16 09:41:38 +02:00
|
|
|
RealmAuditLog.objects.create(
|
|
|
|
realm=user_profile.realm,
|
|
|
|
modified_user=user_profile,
|
2024-09-03 15:33:25 +02:00
|
|
|
event_type=AuditLogEventType.USER_SOFT_ACTIVATED,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
event_time=timezone_now(),
|
2017-07-16 09:41:38 +02:00
|
|
|
)
|
2021-05-10 07:02:14 +02:00
|
|
|
logger.info("Soft reactivated user %s", user_profile.id)
|
2017-08-16 05:35:04 +02:00
|
|
|
return user_profile
|
|
|
|
return None
|
2017-07-31 05:25:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_users_for_soft_deactivation(
|
|
|
|
inactive_for_days: int, filter_kwargs: Any
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> list[UserProfile]:
|
2021-02-12 08:19:30 +01:00
|
|
|
users_activity = list(
|
|
|
|
UserActivity.objects.filter(
|
|
|
|
user_profile__is_active=True,
|
|
|
|
user_profile__is_bot=False,
|
|
|
|
user_profile__long_term_idle=False,
|
|
|
|
**filter_kwargs,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.values("user_profile_id")
|
|
|
|
.annotate(last_visit=Max("last_visit"))
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-07-31 05:25:36 +02:00
|
|
|
today = timezone_now()
|
2023-07-31 22:52:35 +02:00
|
|
|
user_ids_to_deactivate = [
|
|
|
|
user_activity["user_profile_id"]
|
|
|
|
for user_activity in users_activity
|
|
|
|
if (today - user_activity["last_visit"]).days > inactive_for_days
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
users_to_deactivate = list(UserProfile.objects.filter(id__in=user_ids_to_deactivate))
|
2017-07-31 05:25:36 +02:00
|
|
|
return users_to_deactivate
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def do_soft_activate_users(users: list[UserProfile]) -> list[UserProfile]:
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
user_activated
|
|
|
|
for user_profile in users
|
|
|
|
if (user_activated := reactivate_user_if_soft_deactivated(user_profile)) is not None
|
|
|
|
]
|
2019-03-12 03:59:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def do_catch_up_soft_deactivated_users(users: Iterable[UserProfile]) -> list[UserProfile]:
|
2019-03-12 03:59:02 +01:00
|
|
|
users_caught_up = []
|
2021-04-21 19:55:45 +02:00
|
|
|
failures = []
|
2019-03-12 03:59:02 +01:00
|
|
|
for user_profile in users:
|
|
|
|
if user_profile.long_term_idle:
|
2021-04-21 19:55:45 +02:00
|
|
|
try:
|
|
|
|
add_missing_messages(user_profile)
|
|
|
|
users_caught_up.append(user_profile)
|
|
|
|
except Exception: # nocoverage
|
|
|
|
capture_exception() # nocoverage
|
|
|
|
failures.append(user_profile) # nocoverage
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Caught up %d soft-deactivated users", len(users_caught_up))
|
2021-04-21 19:55:45 +02:00
|
|
|
if failures:
|
|
|
|
logger.error("Failed to catch up %d soft-deactivated users", len(failures)) # nocoverage
|
2019-03-12 03:59:02 +01:00
|
|
|
return users_caught_up
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 19:59:20 +02:00
|
|
|
def get_soft_deactivated_users_for_catch_up(filter_kwargs: Any) -> QuerySet[UserProfile]:
|
2023-07-12 11:37:08 +02:00
|
|
|
users_to_catch_up = UserProfile.objects.filter(
|
2019-03-12 03:59:02 +01:00
|
|
|
long_term_idle=True,
|
|
|
|
is_active=True,
|
|
|
|
is_bot=False,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
**filter_kwargs,
|
2019-03-12 03:59:02 +01:00
|
|
|
)
|
|
|
|
return users_to_catch_up
|
2022-04-15 04:51:41 +02:00
|
|
|
|
|
|
|
|
2022-05-26 17:26:43 +02:00
|
|
|
def queue_soft_reactivation(user_profile_id: int) -> None:
|
|
|
|
event = {
|
|
|
|
"type": "soft_reactivate",
|
|
|
|
"user_profile_id": user_profile_id,
|
|
|
|
}
|
|
|
|
queue_json_publish("deferred_work", event)
|
|
|
|
|
|
|
|
|
2022-04-15 04:51:41 +02:00
|
|
|
def soft_reactivate_if_personal_notification(
|
2023-12-08 20:53:31 +01:00
|
|
|
user_profile: UserProfile,
|
2024-07-12 02:30:17 +02:00
|
|
|
unique_triggers: set[str],
|
2024-07-12 02:30:23 +02:00
|
|
|
mentioned_user_group_members_count: int | None,
|
2022-04-15 04:51:41 +02:00
|
|
|
) -> None:
|
|
|
|
"""When we're about to send an email/push notification to a
|
|
|
|
long_term_idle user, it's very likely that the user will try to
|
|
|
|
return to Zulip. As a result, it makes sense to optimistically
|
|
|
|
soft-reactivate that user, to give them a good return experience.
|
|
|
|
|
2023-12-08 20:53:31 +01:00
|
|
|
It's important that we do nothing for stream wildcard or large
|
|
|
|
group mentions (size > 'settings.MAX_GROUP_SIZE_FOR_MENTION_REACTIVATION'),
|
|
|
|
because soft-reactivating an entire realm or a large group would be
|
|
|
|
very expensive. The caller is responsible for passing a
|
|
|
|
mentioned_user_group_members_count that is None for messages that
|
|
|
|
contain both a personal mention and a group mention.
|
2022-04-15 04:51:41 +02:00
|
|
|
"""
|
|
|
|
if not user_profile.long_term_idle:
|
|
|
|
return
|
|
|
|
|
2023-08-04 19:54:41 +02:00
|
|
|
direct_message = NotificationTriggers.DIRECT_MESSAGE in unique_triggers
|
2023-12-08 20:53:31 +01:00
|
|
|
|
|
|
|
personal_mention = False
|
|
|
|
small_group_mention = False
|
|
|
|
if NotificationTriggers.MENTION in unique_triggers:
|
|
|
|
if mentioned_user_group_members_count is None:
|
|
|
|
personal_mention = True
|
|
|
|
elif mentioned_user_group_members_count <= settings.MAX_GROUP_SIZE_FOR_MENTION_REACTIVATION:
|
|
|
|
small_group_mention = True
|
|
|
|
|
2023-06-09 19:43:36 +02:00
|
|
|
topic_wildcard_mention = any(
|
|
|
|
trigger in unique_triggers
|
2023-07-24 14:26:43 +02:00
|
|
|
for trigger in [
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION,
|
|
|
|
NotificationTriggers.TOPIC_WILDCARD_MENTION_IN_FOLLOWED_TOPIC,
|
|
|
|
]
|
2023-06-09 19:43:36 +02:00
|
|
|
)
|
2023-12-08 20:53:31 +01:00
|
|
|
if (
|
|
|
|
not direct_message
|
|
|
|
and not personal_mention
|
|
|
|
and not small_group_mention
|
|
|
|
and not topic_wildcard_mention
|
|
|
|
):
|
2022-04-15 04:51:41 +02:00
|
|
|
return
|
|
|
|
|
2022-05-26 17:26:43 +02:00
|
|
|
queue_soft_reactivation(user_profile.id)
|