2020-10-13 12:53:23 +02:00
|
|
|
import itertools
|
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Collection
|
|
|
|
from collections.abc import Set as AbstractSet
|
2020-10-13 12:53:23 +02:00
|
|
|
from dataclasses import dataclass
|
|
|
|
from operator import itemgetter
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2017-10-29 15:52:01 +01:00
|
|
|
|
2021-05-11 13:55:49 +02:00
|
|
|
from django.db.models import Q, QuerySet
|
2022-06-23 19:04:06 +02:00
|
|
|
|
2023-05-17 16:01:16 +02:00
|
|
|
from zerver.models import AlertWord, Realm, Recipient, Stream, Subscription, UserProfile, UserTopic
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-10-29 15:40:07 +01:00
|
|
|
|
2020-10-16 14:17:32 +02:00
|
|
|
@dataclass
|
|
|
|
class SubInfo:
|
|
|
|
user: UserProfile
|
|
|
|
sub: Subscription
|
|
|
|
stream: Stream
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-13 12:53:23 +02:00
|
|
|
@dataclass
|
|
|
|
class SubscriberPeerInfo:
|
2024-07-12 02:30:17 +02:00
|
|
|
subscribed_ids: dict[int, set[int]]
|
|
|
|
private_peer_dict: dict[int, set[int]]
|
2020-10-13 12:53:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-04-17 13:29:55 +02:00
|
|
|
def get_active_subscriptions_for_stream_id(
|
|
|
|
stream_id: int, *, include_deactivated_users: bool
|
2022-06-23 19:04:06 +02:00
|
|
|
) -> QuerySet[Subscription]:
|
2021-04-17 13:29:55 +02:00
|
|
|
query = Subscription.objects.filter(
|
2017-10-29 15:40:07 +01:00
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id=stream_id,
|
|
|
|
active=True,
|
|
|
|
)
|
2021-04-17 13:29:55 +02:00
|
|
|
if not include_deactivated_users:
|
|
|
|
# Note that non-active users may still have "active" subscriptions, because we
|
|
|
|
# want to be able to easily reactivate them with their old subscriptions. This
|
|
|
|
# is why the query here has to look at the is_user_active flag.
|
|
|
|
query = query.filter(is_user_active=True)
|
|
|
|
|
|
|
|
return query
|
2017-10-29 15:40:07 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_active_subscriptions_for_stream_ids(stream_ids: set[int]) -> QuerySet[Subscription]:
|
2017-10-29 15:52:01 +01:00
|
|
|
return Subscription.objects.filter(
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id__in=stream_ids,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
active=True,
|
2021-04-19 22:36:06 +02:00
|
|
|
is_user_active=True,
|
2017-10-29 15:52:01 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 19:04:06 +02:00
|
|
|
def get_subscribed_stream_ids_for_user(
|
|
|
|
user_profile: UserProfile,
|
2024-08-25 02:30:41 +02:00
|
|
|
) -> QuerySet[Subscription, int]:
|
perf: Extract get_subscribed_stream_ids_for_user.
This new method prevents us from getting fat
objects from the database.
Instead, now we just get ids from the database
to build our subqueries.
Note that we could also technically eliminate
the `set(...)` wrappers in this code to have
Django make a subquery and save a round trip.
I am postponing that for another commit (since
it's still somewhat coupled to some other
complexity in `do_get_streams` that I am trying
to cut through, plus it's not the main point
of this commit.)
BEFORE:
# old, still in use for other codepaths
def get_stream_subscriptions_for_user(user_profile: UserProfile) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
user_profile=user_profile,
recipient__type=Recipient.STREAM,
)
user_subs = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
).select_related('recipient')
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
AFTER:
# newly added
def get_subscribed_stream_ids_for_user(user_profile: UserProfile) -> QuerySet:
return Subscription.objects.filter(
user_profile_id=user_profile,
recipient__type=Recipient.STREAM,
active=True,
).values_list('recipient__type_id', flat=True)
subscribed_stream_ids = get_subscribed_stream_ids_for_user(user_profile)
recipient_check = Q(id__in=set(subscribed_stream_ids))
2020-02-29 18:41:41 +01:00
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile_id=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
active=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("recipient__type_id", flat=True)
|
perf: Extract get_subscribed_stream_ids_for_user.
This new method prevents us from getting fat
objects from the database.
Instead, now we just get ids from the database
to build our subqueries.
Note that we could also technically eliminate
the `set(...)` wrappers in this code to have
Django make a subquery and save a round trip.
I am postponing that for another commit (since
it's still somewhat coupled to some other
complexity in `do_get_streams` that I am trying
to cut through, plus it's not the main point
of this commit.)
BEFORE:
# old, still in use for other codepaths
def get_stream_subscriptions_for_user(user_profile: UserProfile) -> QuerySet:
# TODO: Change return type to QuerySet[Subscription]
return Subscription.objects.filter(
user_profile=user_profile,
recipient__type=Recipient.STREAM,
)
user_subs = get_stream_subscriptions_for_user(user_profile).filter(
active=True,
).select_related('recipient')
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
AFTER:
# newly added
def get_subscribed_stream_ids_for_user(user_profile: UserProfile) -> QuerySet:
return Subscription.objects.filter(
user_profile_id=user_profile,
recipient__type=Recipient.STREAM,
active=True,
).values_list('recipient__type_id', flat=True)
subscribed_stream_ids = get_subscribed_stream_ids_for_user(user_profile)
recipient_check = Q(id__in=set(subscribed_stream_ids))
2020-02-29 18:41:41 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-23 19:04:06 +02:00
|
|
|
def get_subscribed_stream_recipient_ids_for_user(
|
|
|
|
user_profile: UserProfile,
|
2024-08-25 02:30:41 +02:00
|
|
|
) -> QuerySet[Subscription, int]:
|
2021-05-12 23:40:58 +02:00
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile_id=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
active=True,
|
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
|
|
|
|
|
2022-06-23 19:04:06 +02:00
|
|
|
def get_stream_subscriptions_for_user(user_profile: UserProfile) -> QuerySet[Subscription]:
|
2017-10-29 17:11:11 +01:00
|
|
|
return Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_used_colors_for_user_ids(user_ids: list[int]) -> dict[int, set[str]]:
|
2021-12-24 17:35:59 +01:00
|
|
|
"""Fetch which stream colors have already been used for each user in
|
|
|
|
user_ids. Uses an optimized query designed to support picking
|
|
|
|
colors when bulk-adding users to streams, which requires
|
|
|
|
inspecting all Subscription objects for the users, which can often
|
|
|
|
end up being all Subscription objects in the realm.
|
|
|
|
"""
|
|
|
|
query = (
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile_id__in=user_ids,
|
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
)
|
|
|
|
.values("user_profile_id", "color")
|
|
|
|
.distinct()
|
|
|
|
)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, set[str]] = defaultdict(set)
|
2021-12-24 17:35:59 +01:00
|
|
|
|
2023-09-12 23:19:57 +02:00
|
|
|
for row in query:
|
2022-06-15 05:17:23 +02:00
|
|
|
assert row["color"] is not None
|
2021-12-24 17:35:59 +01:00
|
|
|
result[row["user_profile_id"]].add(row["color"])
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_bulk_stream_subscriber_info(
|
2024-07-12 02:30:17 +02:00
|
|
|
users: list[UserProfile],
|
|
|
|
streams: list[Stream],
|
|
|
|
) -> dict[int, list[SubInfo]]:
|
2020-10-16 14:51:01 +02:00
|
|
|
stream_ids = {stream.id for stream in streams}
|
2017-10-29 20:19:57 +01:00
|
|
|
|
|
|
|
subs = Subscription.objects.filter(
|
2020-10-16 14:51:01 +02:00
|
|
|
user_profile__in=users,
|
2017-10-29 20:19:57 +01:00
|
|
|
recipient__type=Recipient.STREAM,
|
|
|
|
recipient__type_id__in=stream_ids,
|
|
|
|
active=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
).only("user_profile_id", "recipient_id")
|
2020-10-16 14:51:01 +02:00
|
|
|
|
|
|
|
stream_map = {stream.recipient_id: stream for stream in streams}
|
|
|
|
user_map = {user.id: user for user in users}
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, list[SubInfo]] = {user.id: [] for user in users}
|
2017-10-29 20:19:57 +01:00
|
|
|
|
|
|
|
for sub in subs:
|
2020-10-16 14:51:01 +02:00
|
|
|
user_id = sub.user_profile_id
|
|
|
|
user = user_map[user_id]
|
|
|
|
recipient_id = sub.recipient_id
|
|
|
|
stream = stream_map[recipient_id]
|
|
|
|
sub_info = SubInfo(
|
|
|
|
user=user,
|
|
|
|
sub=sub,
|
|
|
|
stream=stream,
|
|
|
|
)
|
|
|
|
|
|
|
|
result[user_id].append(sub_info)
|
2017-10-29 20:19:57 +01:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def num_subscribers_for_stream_id(stream_id: int) -> int:
|
2021-04-17 13:29:55 +02:00
|
|
|
return get_active_subscriptions_for_stream_id(
|
|
|
|
stream_id, include_deactivated_users=False
|
|
|
|
).count()
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_ids_for_streams(stream_ids: set[int]) -> dict[int, set[int]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
all_subs = (
|
|
|
|
get_active_subscriptions_for_stream_ids(stream_ids)
|
|
|
|
.values(
|
2021-02-12 08:20:45 +01:00
|
|
|
"recipient__type_id",
|
|
|
|
"user_profile_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
.order_by(
|
2021-02-12 08:20:45 +01:00
|
|
|
"recipient__type_id",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-10-13 12:53:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
get_stream_id = itemgetter("recipient__type_id")
|
2020-10-13 12:53:23 +02:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, set[int]] = defaultdict(set)
|
2020-10-13 12:53:23 +02:00
|
|
|
for stream_id, rows in itertools.groupby(all_subs, get_stream_id):
|
2021-02-12 08:20:45 +01:00
|
|
|
user_ids = {row["user_profile_id"] for row in rows}
|
2020-10-13 12:53:23 +02:00
|
|
|
result[stream_id] = user_ids
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_users_for_streams(stream_ids: set[int]) -> dict[int, set[UserProfile]]:
|
2023-10-18 05:24:50 +02:00
|
|
|
all_subs = (
|
|
|
|
get_active_subscriptions_for_stream_ids(stream_ids)
|
|
|
|
.select_related("user_profile", "recipient")
|
|
|
|
.order_by("recipient__type_id")
|
|
|
|
)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, set[UserProfile]] = defaultdict(set)
|
2023-10-18 05:24:50 +02:00
|
|
|
for stream_id, rows in itertools.groupby(all_subs, key=lambda obj: obj.recipient.type_id):
|
|
|
|
users = {row.user_profile for row in rows}
|
|
|
|
result[stream_id] = users
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-10-13 12:53:23 +02:00
|
|
|
def bulk_get_subscriber_peer_info(
|
|
|
|
realm: Realm,
|
2024-04-17 05:28:33 +02:00
|
|
|
streams: Collection[Stream] | QuerySet[Stream],
|
2020-10-13 12:53:23 +02:00
|
|
|
) -> SubscriberPeerInfo:
|
|
|
|
"""
|
|
|
|
Glossary:
|
|
|
|
|
|
|
|
subscribed_ids:
|
|
|
|
This shows the users who are actually subscribed to the
|
|
|
|
stream, which we generally send to the person subscribing
|
|
|
|
to the stream.
|
|
|
|
|
2020-10-20 17:46:31 +02:00
|
|
|
private_peer_dict:
|
2020-10-13 12:53:23 +02:00
|
|
|
These are the folks that need to know about a new subscriber.
|
|
|
|
It's usually a superset of the subscribers.
|
2020-10-20 17:46:31 +02:00
|
|
|
|
|
|
|
Note that we only compute this for PRIVATE streams. We
|
|
|
|
let other code handle peers for public streams, since the
|
|
|
|
peers for all public streams are actually the same group
|
|
|
|
of users, and downstream code can use that property of
|
|
|
|
public streams to avoid extra work.
|
2020-10-13 12:53:23 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
subscribed_ids = {}
|
2020-10-20 17:46:31 +02:00
|
|
|
private_peer_dict = {}
|
2020-10-13 12:53:23 +02:00
|
|
|
|
|
|
|
private_stream_ids = {stream.id for stream in streams if stream.invite_only}
|
|
|
|
public_stream_ids = {stream.id for stream in streams if not stream.invite_only}
|
|
|
|
|
|
|
|
stream_user_ids = get_user_ids_for_streams(private_stream_ids | public_stream_ids)
|
|
|
|
|
|
|
|
if private_stream_ids:
|
|
|
|
realm_admin_ids = {user.id for user in realm.get_admin_users_and_bots()}
|
|
|
|
|
|
|
|
for stream_id in private_stream_ids:
|
2023-10-26 18:14:01 +02:00
|
|
|
# Realm admins can see all private stream
|
2020-10-20 17:46:31 +02:00
|
|
|
# subscribers.
|
2020-10-13 12:53:23 +02:00
|
|
|
subscribed_user_ids = stream_user_ids.get(stream_id, set())
|
|
|
|
subscribed_ids[stream_id] = subscribed_user_ids
|
2020-10-20 17:46:31 +02:00
|
|
|
private_peer_dict[stream_id] = subscribed_user_ids | realm_admin_ids
|
2020-10-13 12:53:23 +02:00
|
|
|
|
2020-10-20 17:46:31 +02:00
|
|
|
for stream_id in public_stream_ids:
|
|
|
|
subscribed_user_ids = stream_user_ids.get(stream_id, set())
|
|
|
|
subscribed_ids[stream_id] = subscribed_user_ids
|
2020-10-13 12:53:23 +02:00
|
|
|
|
|
|
|
return SubscriberPeerInfo(
|
|
|
|
subscribed_ids=subscribed_ids,
|
2020-10-20 17:46:31 +02:00
|
|
|
private_peer_dict=private_peer_dict,
|
2020-10-13 12:53:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def handle_stream_notifications_compatibility(
|
2024-07-12 02:30:23 +02:00
|
|
|
user_profile: UserProfile | None,
|
2024-07-12 02:30:17 +02:00
|
|
|
stream_dict: dict[str, Any],
|
2021-02-12 08:19:30 +01:00
|
|
|
notification_settings_null: bool,
|
|
|
|
) -> None:
|
2019-02-13 10:22:16 +01:00
|
|
|
# Old versions of the mobile apps don't support `None` as a
|
|
|
|
# value for the stream-level notifications properties, so we
|
|
|
|
# have to handle the normally frontend-side defaults for these
|
|
|
|
# settings here for those older clients.
|
|
|
|
#
|
|
|
|
# Note that this situation results in these older mobile apps
|
|
|
|
# having a subtle bug where changes to the user-level stream
|
|
|
|
# notification defaults will not properly propagate to the
|
|
|
|
# mobile app "stream notification settings" UI until the app
|
|
|
|
# re-registers. This is an acceptable level of
|
|
|
|
# backwards-compatibility problem in our view.
|
|
|
|
assert not notification_settings_null
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for notification_type in [
|
|
|
|
"desktop_notifications",
|
|
|
|
"audible_notifications",
|
|
|
|
"push_notifications",
|
|
|
|
"email_notifications",
|
|
|
|
]:
|
2019-02-13 10:22:16 +01:00
|
|
|
# Values of true/false are supported by older clients.
|
|
|
|
if stream_dict[notification_type] is not None:
|
|
|
|
continue
|
|
|
|
target_attr = "enable_stream_" + notification_type
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_dict[notification_type] = (
|
|
|
|
False if user_profile is None else getattr(user_profile, target_attr)
|
|
|
|
)
|
2021-03-27 03:01:37 +01:00
|
|
|
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def subscriber_ids_with_stream_history_access(stream: Stream) -> set[int]:
|
2021-03-27 03:01:37 +01:00
|
|
|
"""Returns the set of active user IDs who can access any message
|
|
|
|
history on this stream (regardless of whether they have a
|
|
|
|
UserMessage) based on the stream's configuration.
|
|
|
|
|
|
|
|
1. if !history_public_to_subscribers:
|
|
|
|
History is not available to anyone
|
2021-04-17 18:24:02 +02:00
|
|
|
2. if history_public_to_subscribers:
|
2021-03-27 03:01:37 +01:00
|
|
|
All subscribers can access the history including guests
|
2021-04-17 18:24:02 +02:00
|
|
|
|
|
|
|
The results of this function need to be kept consistent with
|
|
|
|
what can_access_stream_history would dictate.
|
2021-03-27 03:01:37 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not stream.is_history_public_to_subscribers():
|
|
|
|
return set()
|
|
|
|
|
|
|
|
return set(
|
2021-04-17 18:24:02 +02:00
|
|
|
get_active_subscriptions_for_stream_id(
|
|
|
|
stream.id, include_deactivated_users=False
|
2021-04-22 16:23:09 +02:00
|
|
|
).values_list("user_profile_id", flat=True)
|
2021-03-27 03:01:37 +01:00
|
|
|
)
|
2021-05-11 13:55:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_subscriptions_for_send_message(
|
|
|
|
*,
|
|
|
|
realm_id: int,
|
|
|
|
stream_id: int,
|
2023-05-17 16:01:16 +02:00
|
|
|
topic_name: str,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention: bool,
|
2023-05-31 16:56:18 +02:00
|
|
|
topic_participant_user_ids: AbstractSet[int],
|
2021-05-11 13:55:49 +02:00
|
|
|
possibly_mentioned_user_ids: AbstractSet[int],
|
2022-06-23 19:04:06 +02:00
|
|
|
) -> QuerySet[Subscription]:
|
2021-05-11 13:55:49 +02:00
|
|
|
"""This function optimizes an important use case for large
|
|
|
|
streams. Open realms often have many long_term_idle users, which
|
|
|
|
can result in 10,000s of long_term_idle recipients in default
|
|
|
|
streams. do_send_messages has an optimization to avoid doing work
|
|
|
|
for long_term_idle unless message flags or notifications should be
|
|
|
|
generated.
|
|
|
|
|
|
|
|
However, it's expensive even to fetch and process them all in
|
|
|
|
Python at all. This function returns all recipients of a stream
|
|
|
|
message that could possibly require action in the send-message
|
|
|
|
codepath.
|
|
|
|
|
|
|
|
Basically, it returns all subscribers, excluding all long-term
|
|
|
|
idle users who it can prove will not receive a UserMessage row or
|
|
|
|
notification for the message (i.e. no alert words, mentions, or
|
|
|
|
email/push notifications are configured) and thus are not needed
|
|
|
|
for processing the message send.
|
|
|
|
|
|
|
|
Critically, this function is called before the Markdown
|
|
|
|
processor. As a result, it returns all subscribers who have ANY
|
|
|
|
configured alert words, even if their alert words aren't present
|
|
|
|
in the message. Similarly, it returns all subscribers who match
|
|
|
|
the "possible mention" parameters.
|
|
|
|
|
|
|
|
Downstream logic, which runs after the Markdown processor has
|
|
|
|
parsed the message, will do the precise determination.
|
|
|
|
"""
|
|
|
|
|
|
|
|
query = get_active_subscriptions_for_stream_id(
|
|
|
|
stream_id,
|
|
|
|
include_deactivated_users=False,
|
|
|
|
)
|
|
|
|
|
2023-06-03 16:51:38 +02:00
|
|
|
if possible_stream_wildcard_mention:
|
2021-05-11 13:55:49 +02:00
|
|
|
return query
|
|
|
|
|
|
|
|
query = query.filter(
|
|
|
|
Q(user_profile__long_term_idle=False)
|
|
|
|
| Q(push_notifications=True)
|
|
|
|
| (Q(push_notifications=None) & Q(user_profile__enable_stream_push_notifications=True))
|
|
|
|
| Q(email_notifications=True)
|
|
|
|
| (Q(email_notifications=None) & Q(user_profile__enable_stream_email_notifications=True))
|
|
|
|
| Q(user_profile_id__in=possibly_mentioned_user_ids)
|
2023-05-31 16:56:18 +02:00
|
|
|
| Q(user_profile_id__in=topic_participant_user_ids)
|
2021-05-11 13:55:49 +02:00
|
|
|
| Q(
|
|
|
|
user_profile_id__in=AlertWord.objects.filter(realm_id=realm_id).values_list(
|
|
|
|
"user_profile_id"
|
|
|
|
)
|
|
|
|
)
|
2023-05-17 16:01:16 +02:00
|
|
|
| Q(
|
|
|
|
user_profile_id__in=UserTopic.objects.filter(
|
|
|
|
stream_id=stream_id,
|
|
|
|
topic_name__iexact=topic_name,
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.FOLLOWED,
|
|
|
|
).values_list("user_profile_id")
|
|
|
|
)
|
2021-05-11 13:55:49 +02:00
|
|
|
)
|
|
|
|
return query
|