2023-03-27 16:32:30 +02:00
|
|
|
import itertools
|
2020-04-25 19:18:13 +02:00
|
|
|
import re
|
2019-06-29 04:41:13 +02:00
|
|
|
import unicodedata
|
2020-01-13 22:11:19 +01:00
|
|
|
from collections import defaultdict
|
2023-03-27 16:32:30 +02:00
|
|
|
from email.headerregistry import Address
|
|
|
|
from operator import itemgetter
|
|
|
|
from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, Set, Tuple, TypedDict
|
2019-06-29 04:41:13 +02:00
|
|
|
|
2022-02-16 13:40:43 +01:00
|
|
|
import dateutil.parser as date_parser
|
2020-01-13 22:11:19 +01:00
|
|
|
from django.conf import settings
|
2020-06-21 02:36:20 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2023-03-27 16:32:30 +02:00
|
|
|
from django.db.models import Q, QuerySet
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2022-07-08 23:06:28 +02:00
|
|
|
from django_otp.middleware import is_verified
|
2023-11-08 12:44:21 +01:00
|
|
|
from typing_extensions import NotRequired
|
2020-06-11 00:54:34 +02:00
|
|
|
from zulip_bots.custom_exceptions import ConfigValidationError
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field, get_avatar_for_inaccessible_user
|
2023-11-08 11:13:25 +01:00
|
|
|
from zerver.lib.cache import cache_with_key, get_cross_realm_dicts_key
|
2022-01-07 21:47:11 +01:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
JsonableError,
|
2022-11-17 09:30:48 +01:00
|
|
|
OrganizationAdministratorRequiredError,
|
|
|
|
OrganizationOwnerRequiredError,
|
2022-01-07 21:47:11 +01:00
|
|
|
)
|
2023-03-27 16:32:30 +02:00
|
|
|
from zerver.lib.timestamp import timestamp_to_datetime
|
2020-10-27 01:41:00 +01:00
|
|
|
from zerver.lib.timezone import canonicalize_timezone
|
2023-11-08 11:13:25 +01:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict, ProfileDataElementValue, RawUserDict
|
2023-03-27 16:32:30 +02:00
|
|
|
from zerver.lib.user_groups import is_user_in_group
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
2023-03-27 16:32:30 +02:00
|
|
|
Message,
|
2020-06-11 00:54:34 +02:00
|
|
|
Realm,
|
2023-03-27 16:32:30 +02:00
|
|
|
Recipient,
|
2020-06-11 00:54:34 +02:00
|
|
|
Service,
|
2023-03-27 16:32:30 +02:00
|
|
|
Subscription,
|
2023-08-30 23:40:24 +02:00
|
|
|
UserMessage,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2023-12-15 01:16:00 +01:00
|
|
|
)
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
2024-03-11 20:02:05 +01:00
|
|
|
from zerver.models.realms import get_fake_email_domain, require_unique_names
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import (
|
2023-10-11 09:34:26 +02:00
|
|
|
active_non_guest_user_ids,
|
|
|
|
active_user_ids,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_realm_user_dicts,
|
2022-02-21 09:33:08 +01:00
|
|
|
get_user,
|
2023-11-27 05:39:27 +01:00
|
|
|
get_user_by_id_in_realm_including_cross_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_user_profile_by_id_in_realm,
|
2022-09-16 14:27:32 +02:00
|
|
|
is_cross_realm_bot_email,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2024-03-11 20:02:05 +01:00
|
|
|
def check_full_name(
|
|
|
|
full_name_raw: str, *, user_profile: Optional[UserProfile], realm: Optional[Realm]
|
|
|
|
) -> str:
|
2017-02-08 04:51:01 +01:00
|
|
|
full_name = full_name_raw.strip()
|
|
|
|
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too long!"))
|
2017-05-12 04:21:49 +02:00
|
|
|
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too short!"))
|
2019-06-29 04:41:13 +02:00
|
|
|
for character in full_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
if unicodedata.category(character)[0] == "C" or character in UserProfile.NAME_INVALID_CHARS:
|
2019-06-29 04:41:13 +02:00
|
|
|
raise JsonableError(_("Invalid characters in name!"))
|
2020-04-25 19:18:13 +02:00
|
|
|
# Names ending with e.g. `|15` could be ambiguous for
|
2020-08-11 01:47:49 +02:00
|
|
|
# sloppily-written parsers of our Markdown syntax for mentioning
|
2020-08-11 01:47:44 +02:00
|
|
|
# users with ambiguous names, and likely have no real use, so we
|
2020-04-25 19:18:13 +02:00
|
|
|
# ban them.
|
|
|
|
if re.search(r"\|\d+$", full_name_raw):
|
|
|
|
raise JsonableError(_("Invalid format!"))
|
2024-03-11 20:02:05 +01:00
|
|
|
|
|
|
|
if require_unique_names(realm):
|
|
|
|
normalized_user_full_name = unicodedata.normalize("NFKC", full_name).casefold()
|
|
|
|
users_query = UserProfile.objects.filter(realm=realm)
|
|
|
|
# We want to exclude the user's full name while checking for
|
|
|
|
# uniqueness.
|
|
|
|
if user_profile is not None:
|
|
|
|
existing_names = users_query.exclude(id=user_profile.id).values_list(
|
|
|
|
"full_name", flat=True
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
existing_names = users_query.values_list("full_name", flat=True)
|
|
|
|
|
|
|
|
normalized_existing_names = [
|
|
|
|
unicodedata.normalize("NFKC", full_name).casefold() for full_name in existing_names
|
|
|
|
]
|
|
|
|
|
|
|
|
if normalized_user_full_name in normalized_existing_names:
|
|
|
|
raise JsonableError(_("Unique names required in this organization."))
|
|
|
|
|
2017-02-08 04:51:01 +01:00
|
|
|
return full_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-25 02:01:34 +02:00
|
|
|
# NOTE: We don't try to absolutely prevent 2 bots from having the same
|
|
|
|
# name (e.g. you can get there by reactivating a deactivated bot after
|
|
|
|
# making a new bot with the same name). This is just a check designed
|
|
|
|
# to make it unlikely to happen by accident.
|
2023-03-29 19:01:19 +02:00
|
|
|
def check_bot_name_available(realm_id: int, full_name: str, *, is_activation: bool) -> None:
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
dup_exists = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
full_name=full_name.strip(),
|
|
|
|
is_active=True,
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
if dup_exists:
|
2023-03-29 19:01:19 +02:00
|
|
|
if is_activation:
|
|
|
|
raise JsonableError(
|
|
|
|
f'There is already an active bot named "{full_name}" in this organization. To reactivate this bot, you must rename or deactivate the other one first.'
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
raise JsonableError(_("Name is already in use!"))
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_short_name(short_name_raw: str) -> str:
|
2017-06-21 13:46:58 +02:00
|
|
|
short_name = short_name_raw.strip()
|
|
|
|
if len(short_name) == 0:
|
|
|
|
raise JsonableError(_("Bad name or username"))
|
|
|
|
return short_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-06 11:56:48 +02:00
|
|
|
def check_valid_bot_config(
|
|
|
|
bot_type: int, service_name: str, config_data: Mapping[str, str]
|
|
|
|
) -> None:
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT:
|
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-18 12:56:21 +02:00
|
|
|
config_options = None
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS:
|
|
|
|
if integration.name == service_name:
|
|
|
|
# key: validator
|
|
|
|
config_options = {c[1]: c[2] for c in integration.config_options}
|
|
|
|
break
|
|
|
|
if not config_options:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Invalid integration '{integration_name}'.").format(integration_name=service_name)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
missing_keys = set(config_options.keys()) - set(config_data.keys())
|
|
|
|
if missing_keys:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
2023-07-17 22:40:33 +02:00
|
|
|
_("Missing configuration parameters: {keys}").format(
|
|
|
|
keys=missing_keys,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
for key, validator in config_options.items():
|
|
|
|
value = config_data[key]
|
|
|
|
error = validator(key, value)
|
2022-06-01 00:34:34 +02:00
|
|
|
if error is not None:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Invalid {key} value {value} ({error})").format(
|
|
|
|
key=key, value=value, error=error
|
|
|
|
)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
2019-08-17 19:06:51 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.bot_lib import get_bot_handler
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-17 19:06:51 +02:00
|
|
|
bot_handler = get_bot_handler(service_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(bot_handler, "validate_config"):
|
2019-08-17 19:06:51 +02:00
|
|
|
bot_handler.validate_config(config_data)
|
|
|
|
except ConfigValidationError:
|
|
|
|
# The exception provides a specific error message, but that
|
|
|
|
# message is not tagged translatable, because it is
|
|
|
|
# triggered in the external zulip_bots package.
|
|
|
|
# TODO: Think of some clever way to provide a more specific
|
|
|
|
# error message.
|
|
|
|
raise JsonableError(_("Invalid configuration data!"))
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-01 17:25:15 +02:00
|
|
|
# Adds an outgoing webhook or embedded bot service.
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_service(
|
|
|
|
name: str,
|
|
|
|
user_profile: UserProfile,
|
2022-07-23 00:47:30 +02:00
|
|
|
base_url: str,
|
|
|
|
interface: int,
|
|
|
|
token: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
Service.objects.create(
|
|
|
|
name=name, user_profile=user_profile, base_url=base_url, interface=interface, token=token
|
|
|
|
)
|
|
|
|
|
2018-06-01 17:25:15 +02:00
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
# Realm administrators can always add bot
|
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return
|
|
|
|
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
|
|
|
|
return
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS
|
|
|
|
and bot_type == UserProfile.DEFAULT_BOT
|
|
|
|
):
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2018-01-29 16:10:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-24 16:24:24 +01:00
|
|
|
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
if bot_type not in user_profile.allowed_bot_types:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Invalid bot type"))
|
2017-07-03 18:35:12 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-01 17:47:50 +02:00
|
|
|
def check_valid_interface_type(interface_type: Optional[int]) -> None:
|
2017-07-03 18:35:12 +02:00
|
|
|
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Invalid interface type"))
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-30 21:43:19 +02:00
|
|
|
def is_administrator_role(role: int) -> bool:
|
|
|
|
return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-19 12:10:10 +02:00
|
|
|
def bulk_get_cross_realm_bots() -> Dict[str, UserProfile]:
|
|
|
|
emails = list(settings.CROSS_REALM_BOT_EMAILS)
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
|
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
|
|
|
|
# realm=realm)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
|
|
|
where_clause = (
|
|
|
|
"upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)"
|
2017-11-16 02:28:50 +01:00
|
|
|
)
|
2023-09-12 23:19:57 +02:00
|
|
|
users = UserProfile.objects.filter(realm__string_id=settings.SYSTEM_BOT_REALM).extra(
|
|
|
|
where=[where_clause], params=(emails,)
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return {user.email.lower(): user for user in users}
|
2017-11-16 02:28:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-13 05:24:42 +02:00
|
|
|
def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]:
|
2017-11-01 10:04:16 +01:00
|
|
|
# TODO: Consider adding a flag to control whether deactivated
|
|
|
|
# users should be included.
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2023-07-19 15:20:45 +02:00
|
|
|
user_profiles = list(
|
|
|
|
UserProfile.objects.filter(id__in=user_ids, realm=realm).select_related("realm")
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2023-07-19 15:20:45 +02:00
|
|
|
found_user_ids = {user_profile.id for user_profile in user_profiles}
|
|
|
|
|
|
|
|
for user_id in user_ids:
|
|
|
|
if user_id not in found_user_ids:
|
|
|
|
raise JsonableError(_("Invalid user ID: {user_id}").format(user_id=user_id))
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
return user_profiles
|
2018-05-28 20:42:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-28 20:42:31 +02:00
|
|
|
def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile:
|
|
|
|
try:
|
|
|
|
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not target.is_bot:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2022-01-07 21:47:11 +01:00
|
|
|
|
|
|
|
if target.can_create_users and not user_profile.is_realm_owner:
|
|
|
|
# Organizations owners are required to administer a bot with
|
|
|
|
# the can_create_users permission. User creation via the API
|
|
|
|
# is a permission not available even to organization owners by
|
|
|
|
# default, because it can be abused to send spam. Requiring an
|
|
|
|
# owner is intended to ensure organizational responsibility
|
|
|
|
# for use of this permission.
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2022-01-07 21:47:11 +01:00
|
|
|
|
2018-05-28 20:42:31 +02:00
|
|
|
return target
|
2018-06-04 07:04:19 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-21 07:28:42 +01:00
|
|
|
def access_user_common(
|
|
|
|
target: UserProfile,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
allow_deactivated: bool,
|
|
|
|
allow_bots: bool,
|
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
if target.is_bot and not allow_bots:
|
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
if not target.is_active and not allow_deactivated:
|
|
|
|
raise JsonableError(_("User is deactivated"))
|
|
|
|
if not for_admin:
|
2023-10-17 15:25:07 +02:00
|
|
|
# Administrative access is not required just to read a user
|
|
|
|
# but we need to check can_access_all_users_group setting.
|
|
|
|
if not check_can_access_user(target, user_profile):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
|
2022-02-21 07:28:42 +01:00
|
|
|
return target
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
return target
|
|
|
|
|
|
|
|
|
2021-01-28 18:04:43 +01:00
|
|
|
def access_user_by_id(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_user_id: int,
|
|
|
|
*,
|
2021-02-12 08:19:30 +01:00
|
|
|
allow_deactivated: bool = False,
|
|
|
|
allow_bots: bool = False,
|
2021-01-28 18:04:43 +01:00
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
"""Master function for accessing another user by ID in API code;
|
|
|
|
verifies the user ID is in the same realm, and if requested checks
|
|
|
|
for administrative privileges, with flags for various special
|
|
|
|
cases.
|
|
|
|
"""
|
2018-06-04 07:04:19 +02:00
|
|
|
try:
|
2021-01-28 18:04:43 +01:00
|
|
|
target = get_user_profile_by_id_in_realm(target_user_id, user_profile.realm)
|
2018-06-04 07:04:19 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2022-02-21 09:33:08 +01:00
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
|
|
|
|
return access_user_common(target, user_profile, allow_deactivated, allow_bots, for_admin)
|
|
|
|
|
|
|
|
|
2023-11-27 05:39:27 +01:00
|
|
|
def access_user_by_id_including_cross_realm(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_user_id: int,
|
|
|
|
*,
|
|
|
|
allow_deactivated: bool = False,
|
|
|
|
allow_bots: bool = False,
|
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
"""Variant of access_user_by_id allowing cross-realm bots to be accessed."""
|
|
|
|
try:
|
|
|
|
target = get_user_by_id_in_realm_including_cross_realm(target_user_id, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
|
|
|
|
return access_user_common(target, user_profile, allow_deactivated, allow_bots, for_admin)
|
|
|
|
|
|
|
|
|
2022-02-21 09:33:08 +01:00
|
|
|
def access_user_by_email(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
email: str,
|
|
|
|
*,
|
|
|
|
allow_deactivated: bool = False,
|
|
|
|
allow_bots: bool = False,
|
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
try:
|
|
|
|
target = get_user(email, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
2018-06-04 07:04:19 +02:00
|
|
|
raise JsonableError(_("No such user"))
|
2022-02-21 07:28:42 +01:00
|
|
|
|
|
|
|
return access_user_common(target, user_profile, allow_deactivated, allow_bots, for_admin)
|
2018-06-19 10:55:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-31 23:15:51 +02:00
|
|
|
class Account(TypedDict):
|
2020-11-16 19:33:10 +01:00
|
|
|
realm_name: str
|
|
|
|
realm_id: int
|
|
|
|
full_name: str
|
|
|
|
avatar: Optional[str]
|
|
|
|
|
|
|
|
|
2023-07-31 23:15:51 +02:00
|
|
|
def get_accounts_for_email(email: str) -> List[Account]:
|
2021-02-12 08:19:30 +01:00
|
|
|
profiles = (
|
2021-02-12 08:20:45 +01:00
|
|
|
UserProfile.objects.select_related("realm")
|
2021-02-12 08:19:30 +01:00
|
|
|
.filter(
|
|
|
|
delivery_email__iexact=email.strip(),
|
|
|
|
is_active=True,
|
|
|
|
realm__deactivated=False,
|
|
|
|
is_bot=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("date_joined")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
dict(
|
|
|
|
realm_name=profile.realm.name,
|
|
|
|
realm_id=profile.realm.id,
|
|
|
|
full_name=profile.full_name,
|
|
|
|
avatar=avatar_url(profile),
|
2020-11-16 19:33:10 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
for profile in profiles
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
|
|
|
|
def get_api_key(user_profile: UserProfile) -> str:
|
|
|
|
return user_profile.api_key
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
|
|
|
|
# Users can only have one API key for now
|
|
|
|
return [user_profile.api_key]
|
2018-09-04 20:23:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_user_custom_profile_field(
|
2021-09-21 16:52:15 +02:00
|
|
|
realm_id: int, field: CustomProfileField, value: ProfileDataElementValue
|
|
|
|
) -> ProfileDataElementValue:
|
2019-01-28 20:08:04 +01:00
|
|
|
validators = CustomProfileField.FIELD_VALIDATORS
|
|
|
|
field_type = field.field_type
|
2021-02-12 08:20:45 +01:00
|
|
|
var_name = f"{field.name}"
|
2019-01-28 20:08:04 +01:00
|
|
|
if field_type in validators:
|
|
|
|
validator = validators[field_type]
|
2020-06-21 02:36:20 +02:00
|
|
|
return validator(var_name, value)
|
2021-03-20 11:39:22 +01:00
|
|
|
elif field_type == CustomProfileField.SELECT:
|
|
|
|
choice_field_validator = CustomProfileField.SELECT_FIELD_VALIDATORS[field_type]
|
2019-01-28 20:08:04 +01:00
|
|
|
field_data = field.field_data
|
|
|
|
# Put an assertion so that mypy doesn't complain.
|
|
|
|
assert field_data is not None
|
2020-06-21 02:36:20 +02:00
|
|
|
return choice_field_validator(var_name, field_data, value)
|
2019-01-28 20:08:04 +01:00
|
|
|
elif field_type == CustomProfileField.USER:
|
|
|
|
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
|
2020-06-23 01:12:03 +02:00
|
|
|
return user_field_validator(realm_id, value, False)
|
2019-01-28 20:08:04 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid field type")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_user_custom_profile_data(
|
2022-07-08 17:17:46 +02:00
|
|
|
realm_id: int, profile_data: List[ProfileDataElementUpdateDict]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-09-04 20:23:44 +02:00
|
|
|
# This function validate all custom field values according to their field type.
|
|
|
|
for item in profile_data:
|
2021-02-12 08:20:45 +01:00
|
|
|
field_id = item["id"]
|
2018-09-04 20:23:44 +02:00
|
|
|
try:
|
|
|
|
field = CustomProfileField.objects.get(id=field_id)
|
|
|
|
except CustomProfileField.DoesNotExist:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
|
2018-09-04 20:23:44 +02:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
try:
|
2022-07-08 17:17:46 +02:00
|
|
|
validate_user_custom_profile_field(realm_id, field, item["value"])
|
2020-06-21 02:36:20 +02:00
|
|
|
except ValidationError as error:
|
|
|
|
raise JsonableError(error.message)
|
2020-01-13 18:47:30 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-11 08:17:57 +01:00
|
|
|
def can_access_delivery_email(
|
2022-05-25 13:13:31 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
target_user_id: int,
|
|
|
|
email_address_visibility: int,
|
2021-12-11 08:17:57 +01:00
|
|
|
) -> bool:
|
|
|
|
if target_user_id == user_profile.id:
|
|
|
|
return True
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
# Bots always have email_address_visibility as EMAIL_ADDRESS_VISIBILITY_EVERYONE.
|
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
2022-05-25 13:13:31 +02:00
|
|
|
return True
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS:
|
2021-05-21 18:08:24 +02:00
|
|
|
return user_profile.is_realm_admin
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS:
|
2021-05-21 18:08:24 +02:00
|
|
|
return user_profile.is_realm_admin or user_profile.is_moderator
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_MEMBERS:
|
|
|
|
return not user_profile.is_guest
|
|
|
|
|
2021-05-21 18:08:24 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2023-11-08 12:44:21 +01:00
|
|
|
class APIUserDict(TypedDict):
|
|
|
|
email: str
|
|
|
|
user_id: int
|
|
|
|
avatar_version: int
|
|
|
|
is_admin: bool
|
|
|
|
is_owner: bool
|
|
|
|
is_guest: bool
|
|
|
|
is_billing_admin: NotRequired[bool]
|
|
|
|
role: int
|
|
|
|
is_bot: bool
|
|
|
|
full_name: str
|
|
|
|
timezone: NotRequired[str]
|
|
|
|
is_active: bool
|
|
|
|
date_joined: str
|
|
|
|
avatar_url: NotRequired[Optional[str]]
|
|
|
|
delivery_email: Optional[str]
|
|
|
|
bot_type: NotRequired[Optional[int]]
|
|
|
|
bot_owner_id: NotRequired[Optional[int]]
|
|
|
|
profile_data: NotRequired[Optional[Dict[str, Any]]]
|
|
|
|
is_system_bot: NotRequired[bool]
|
|
|
|
max_message_id: NotRequired[int]
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
realm_id: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user: Optional[UserProfile],
|
2023-11-08 11:13:25 +01:00
|
|
|
row: RawUserDict,
|
2021-02-12 08:19:30 +01:00
|
|
|
client_gravatar: bool,
|
|
|
|
user_avatar_url_field_optional: bool,
|
|
|
|
custom_profile_field_data: Optional[Dict[str, Any]] = None,
|
2023-11-08 12:44:21 +01:00
|
|
|
) -> APIUserDict:
|
2020-01-13 22:11:19 +01:00
|
|
|
"""Formats a user row returned by a database fetch using
|
|
|
|
.values(*realm_user_dict_fields) into a dictionary representation
|
|
|
|
of that user for API delivery to clients. The acting_user
|
|
|
|
argument is used for permissions checks.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
is_admin = is_administrator_role(row["role"])
|
|
|
|
is_owner = row["role"] == UserProfile.ROLE_REALM_OWNER
|
|
|
|
is_guest = row["role"] == UserProfile.ROLE_GUEST
|
|
|
|
is_bot = row["is_bot"]
|
2023-11-08 12:44:21 +01:00
|
|
|
|
|
|
|
delivery_email = None
|
|
|
|
if acting_user is not None and can_access_delivery_email(
|
|
|
|
acting_user, row["id"], row["email_address_visibility"]
|
|
|
|
):
|
|
|
|
delivery_email = row["delivery_email"]
|
|
|
|
|
|
|
|
result = APIUserDict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email=row["email"],
|
|
|
|
user_id=row["id"],
|
|
|
|
avatar_version=row["avatar_version"],
|
2020-01-13 22:11:19 +01:00
|
|
|
is_admin=is_admin,
|
2020-06-01 21:47:18 +02:00
|
|
|
is_owner=is_owner,
|
2020-01-13 22:11:19 +01:00
|
|
|
is_guest=is_guest,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=row["is_billing_admin"],
|
2021-04-11 07:38:09 +02:00
|
|
|
role=row["role"],
|
2020-01-13 22:11:19 +01:00
|
|
|
is_bot=is_bot,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name=row["full_name"],
|
|
|
|
timezone=canonicalize_timezone(row["timezone"]),
|
|
|
|
is_active=row["is_active"],
|
|
|
|
date_joined=row["date_joined"].isoformat(),
|
2023-11-08 12:44:21 +01:00
|
|
|
delivery_email=delivery_email,
|
2020-01-13 22:11:19 +01:00
|
|
|
)
|
2020-06-13 10:10:05 +02:00
|
|
|
|
2022-02-16 13:40:43 +01:00
|
|
|
if acting_user is None:
|
|
|
|
# Remove data about other users which are not useful to spectators
|
|
|
|
# or can reveal personal information about a user.
|
|
|
|
# Only send day level precision date_joined data to spectators.
|
|
|
|
del result["is_billing_admin"]
|
|
|
|
del result["timezone"]
|
2023-11-08 11:13:25 +01:00
|
|
|
assert isinstance(result["date_joined"], str)
|
2022-02-16 13:40:43 +01:00
|
|
|
result["date_joined"] = str(date_parser.parse(result["date_joined"]).date())
|
|
|
|
|
2020-06-13 10:10:05 +02:00
|
|
|
# Zulip clients that support using `GET /avatar/{user_id}` as a
|
|
|
|
# fallback if we didn't send an avatar URL in the user object pass
|
|
|
|
# user_avatar_url_field_optional in client_capabilities.
|
|
|
|
#
|
|
|
|
# This is a major network performance optimization for
|
|
|
|
# organizations with 10,000s of users where we would otherwise
|
|
|
|
# send avatar URLs in the payload (either because most users have
|
|
|
|
# uploaded avatars or because EMAIL_ADDRESS_VISIBILITY_ADMINS
|
|
|
|
# prevents the older client_gravatar optimization from helping).
|
|
|
|
# The performance impact is large largely because the hashes in
|
|
|
|
# avatar URLs structurally cannot compress well.
|
|
|
|
#
|
|
|
|
# The user_avatar_url_field_optional gives the server sole
|
|
|
|
# discretion in deciding for which users we want to send the
|
|
|
|
# avatar URL (Which saves clients an RTT at the cost of some
|
|
|
|
# bandwidth). At present, the server looks at `long_term_idle` to
|
|
|
|
# decide which users to include avatars for, piggy-backing on a
|
|
|
|
# different optimization for organizations with 10,000s of users.
|
2021-02-12 08:20:45 +01:00
|
|
|
include_avatar_url = not user_avatar_url_field_optional or not row["long_term_idle"]
|
2020-06-13 10:10:05 +02:00
|
|
|
if include_avatar_url:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["avatar_url"] = get_avatar_field(
|
|
|
|
user_id=row["id"],
|
2023-07-19 11:33:40 +02:00
|
|
|
realm_id=realm_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
email=row["delivery_email"],
|
|
|
|
avatar_source=row["avatar_source"],
|
|
|
|
avatar_version=row["avatar_version"],
|
2021-02-12 08:19:30 +01:00
|
|
|
medium=False,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
if is_bot:
|
|
|
|
result["bot_type"] = row["bot_type"]
|
2022-09-16 14:27:32 +02:00
|
|
|
if is_cross_realm_bot_email(row["email"]):
|
2021-03-07 15:51:55 +01:00
|
|
|
result["is_system_bot"] = True
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
# Note that bot_owner_id can be None with legacy data.
|
2021-02-12 08:20:45 +01:00
|
|
|
result["bot_owner_id"] = row["bot_owner_id"]
|
2020-01-13 22:11:19 +01:00
|
|
|
elif custom_profile_field_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["profile_data"] = custom_profile_field_data
|
2020-01-13 22:11:19 +01:00
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-10-17 15:25:07 +02:00
|
|
|
def user_access_restricted_in_realm(target_user: UserProfile) -> bool:
|
|
|
|
if target_user.is_bot:
|
|
|
|
return False
|
|
|
|
|
|
|
|
realm = target_user.realm
|
2024-04-19 16:37:29 +02:00
|
|
|
if realm.can_access_all_users_group.named_user_group.name == SystemGroups.EVERYONE:
|
2023-10-17 15:25:07 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
def check_user_can_access_all_users(acting_user: Optional[UserProfile]) -> bool:
|
|
|
|
if acting_user is None:
|
|
|
|
# We allow spectators to access all users since they
|
|
|
|
# have very limited access to the user already.
|
|
|
|
return True
|
|
|
|
|
|
|
|
if not acting_user.is_guest:
|
|
|
|
return True
|
|
|
|
|
|
|
|
realm = acting_user.realm
|
|
|
|
if is_user_in_group(realm.can_access_all_users_group, acting_user):
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2023-10-17 15:25:07 +02:00
|
|
|
def check_can_access_user(
|
|
|
|
target_user: UserProfile, user_profile: Optional[UserProfile] = None
|
|
|
|
) -> bool:
|
|
|
|
if not user_access_restricted_in_realm(target_user):
|
|
|
|
return True
|
|
|
|
|
|
|
|
if check_user_can_access_all_users(user_profile):
|
|
|
|
return True
|
|
|
|
|
|
|
|
assert user_profile is not None
|
|
|
|
|
|
|
|
if target_user.id == user_profile.id:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# These include Subscription objects for streams as well as group DMs.
|
|
|
|
subscribed_recipient_ids = Subscription.objects.filter(
|
|
|
|
user_profile=user_profile,
|
|
|
|
active=True,
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
2023-10-17 15:25:07 +02:00
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
|
|
|
|
if Subscription.objects.filter(
|
|
|
|
recipient_id__in=subscribed_recipient_ids,
|
|
|
|
user_profile=target_user,
|
|
|
|
active=True,
|
|
|
|
is_user_active=True,
|
|
|
|
).exists():
|
|
|
|
return True
|
|
|
|
|
|
|
|
assert user_profile.recipient_id is not None
|
|
|
|
assert target_user.recipient_id is not None
|
|
|
|
|
|
|
|
# Querying the "Message" table is expensive so we do this last.
|
|
|
|
direct_message_query = Message.objects.filter(
|
|
|
|
recipient__type=Recipient.PERSONAL, realm=target_user.realm
|
|
|
|
)
|
|
|
|
if direct_message_query.filter(
|
|
|
|
Q(sender_id=target_user.id, recipient_id=user_profile.recipient_id)
|
|
|
|
| Q(recipient_id=target_user.recipient_id, sender_id=user_profile.id)
|
|
|
|
).exists():
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2023-12-10 01:34:23 +01:00
|
|
|
def get_inaccessible_user_ids(
|
|
|
|
target_user_ids: List[int], acting_user: Optional[UserProfile]
|
|
|
|
) -> Set[int]:
|
|
|
|
if check_user_can_access_all_users(acting_user):
|
|
|
|
return set()
|
|
|
|
|
|
|
|
assert acting_user is not None
|
|
|
|
|
|
|
|
# All users can access all the bots, so we just exclude them.
|
|
|
|
target_human_user_ids = UserProfile.objects.filter(
|
|
|
|
id__in=target_user_ids, is_bot=False
|
|
|
|
).values_list("id", flat=True)
|
|
|
|
|
|
|
|
if not target_human_user_ids:
|
|
|
|
return set()
|
|
|
|
|
|
|
|
subscribed_recipient_ids = Subscription.objects.filter(
|
|
|
|
user_profile=acting_user,
|
|
|
|
active=True,
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
2023-12-10 01:34:23 +01:00
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
|
|
|
|
common_subscription_user_ids = (
|
|
|
|
Subscription.objects.filter(
|
|
|
|
recipient_id__in=subscribed_recipient_ids,
|
|
|
|
user_profile_id__in=target_human_user_ids,
|
|
|
|
active=True,
|
|
|
|
is_user_active=True,
|
|
|
|
)
|
|
|
|
.distinct("user_profile_id")
|
|
|
|
.values_list("user_profile_id", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
possible_inaccessible_user_ids = set(target_human_user_ids) - set(common_subscription_user_ids)
|
|
|
|
if not possible_inaccessible_user_ids:
|
|
|
|
return set()
|
|
|
|
|
|
|
|
target_user_recipient_ids = UserProfile.objects.filter(
|
|
|
|
id__in=possible_inaccessible_user_ids
|
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
|
|
|
|
direct_message_query = Message.objects.filter(
|
|
|
|
recipient__type=Recipient.PERSONAL, realm=acting_user.realm
|
|
|
|
)
|
|
|
|
direct_messages_users = direct_message_query.filter(
|
|
|
|
Q(sender_id__in=possible_inaccessible_user_ids, recipient_id=acting_user.recipient_id)
|
|
|
|
| Q(recipient_id__in=target_user_recipient_ids, sender_id=acting_user.id)
|
|
|
|
).values_list("sender_id", "recipient__type_id")
|
|
|
|
|
|
|
|
user_ids_involved_in_dms = set()
|
|
|
|
for sender_id, recipient_user_id in direct_messages_users:
|
|
|
|
if sender_id == acting_user.id:
|
|
|
|
user_ids_involved_in_dms.add(recipient_user_id)
|
|
|
|
else:
|
|
|
|
user_ids_involved_in_dms.add(sender_id)
|
|
|
|
|
|
|
|
inaccessible_user_ids = possible_inaccessible_user_ids - user_ids_involved_in_dms
|
|
|
|
return inaccessible_user_ids
|
|
|
|
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
def get_user_ids_who_can_access_user(target_user: UserProfile) -> List[int]:
|
|
|
|
# We assume that caller only needs active users here, since
|
2023-10-17 12:56:39 +02:00
|
|
|
# this function is used to get users to send events and to
|
|
|
|
# send presence update.
|
2023-10-11 09:34:26 +02:00
|
|
|
realm = target_user.realm
|
|
|
|
if not user_access_restricted_in_realm(target_user):
|
|
|
|
return active_user_ids(realm.id)
|
|
|
|
|
|
|
|
active_non_guest_user_ids_in_realm = active_non_guest_user_ids(realm.id)
|
|
|
|
|
|
|
|
users_in_subscribed_streams_or_huddles_dict = get_subscribers_of_target_user_subscriptions(
|
|
|
|
[target_user]
|
|
|
|
)
|
|
|
|
users_involved_in_dms_dict = get_users_involved_in_dms_with_target_users([target_user], realm)
|
|
|
|
|
|
|
|
user_ids_who_can_access_target_user = (
|
|
|
|
{target_user.id}
|
|
|
|
| set(active_non_guest_user_ids_in_realm)
|
|
|
|
| users_in_subscribed_streams_or_huddles_dict[target_user.id]
|
|
|
|
| users_involved_in_dms_dict[target_user.id]
|
|
|
|
)
|
|
|
|
return list(user_ids_who_can_access_target_user)
|
|
|
|
|
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
def get_subscribers_of_target_user_subscriptions(
|
2023-10-11 09:34:26 +02:00
|
|
|
target_users: List[UserProfile], include_deactivated_users_for_huddles: bool = False
|
2023-03-27 16:32:30 +02:00
|
|
|
) -> Dict[int, Set[int]]:
|
|
|
|
target_user_ids = [user.id for user in target_users]
|
|
|
|
target_user_subscriptions = (
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile__in=target_user_ids,
|
|
|
|
active=True,
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
2023-03-27 16:32:30 +02:00
|
|
|
)
|
|
|
|
.order_by("user_profile_id")
|
|
|
|
.values("user_profile_id", "recipient_id")
|
|
|
|
)
|
|
|
|
|
|
|
|
target_users_subbed_recipient_ids = set()
|
|
|
|
target_user_subscriptions_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
|
|
|
|
for user_profile_id, sub_rows in itertools.groupby(
|
|
|
|
target_user_subscriptions, itemgetter("user_profile_id")
|
|
|
|
):
|
|
|
|
recipient_ids = {row["recipient_id"] for row in sub_rows}
|
|
|
|
target_user_subscriptions_dict[user_profile_id] = recipient_ids
|
|
|
|
target_users_subbed_recipient_ids |= recipient_ids
|
|
|
|
|
|
|
|
subs_in_target_user_subscriptions_query = Subscription.objects.filter(
|
|
|
|
recipient_id__in=list(target_users_subbed_recipient_ids),
|
|
|
|
active=True,
|
|
|
|
)
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
if include_deactivated_users_for_huddles:
|
|
|
|
subs_in_target_user_subscriptions_query = subs_in_target_user_subscriptions_query.filter(
|
|
|
|
Q(recipient__type=Recipient.STREAM, is_user_active=True)
|
2024-03-22 00:39:33 +01:00
|
|
|
| Q(recipient__type=Recipient.DIRECT_MESSAGE_GROUP)
|
2023-10-11 09:34:26 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
subs_in_target_user_subscriptions_query = subs_in_target_user_subscriptions_query.filter(
|
2024-03-22 00:39:33 +01:00
|
|
|
recipient__type__in=[Recipient.STREAM, Recipient.DIRECT_MESSAGE_GROUP],
|
|
|
|
is_user_active=True,
|
2023-10-11 09:34:26 +02:00
|
|
|
)
|
2023-03-27 16:32:30 +02:00
|
|
|
|
|
|
|
subs_in_target_user_subscriptions = subs_in_target_user_subscriptions_query.order_by(
|
|
|
|
"recipient_id"
|
|
|
|
).values("user_profile_id", "recipient_id")
|
|
|
|
|
|
|
|
subscribers_dict_by_recipient_ids: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for recipient_id, sub_rows in itertools.groupby(
|
|
|
|
subs_in_target_user_subscriptions, itemgetter("recipient_id")
|
|
|
|
):
|
|
|
|
user_ids = {row["user_profile_id"] for row in sub_rows}
|
|
|
|
subscribers_dict_by_recipient_ids[recipient_id] = user_ids
|
|
|
|
|
|
|
|
users_subbed_to_target_user_subscriptions_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for user_id in target_user_ids:
|
|
|
|
target_user_subbed_recipients = target_user_subscriptions_dict[user_id]
|
|
|
|
for recipient_id in target_user_subbed_recipients:
|
2023-12-05 18:45:07 +01:00
|
|
|
users_subbed_to_target_user_subscriptions_dict[user_id] |= (
|
|
|
|
subscribers_dict_by_recipient_ids[recipient_id]
|
|
|
|
)
|
2023-03-27 16:32:30 +02:00
|
|
|
|
|
|
|
return users_subbed_to_target_user_subscriptions_dict
|
|
|
|
|
|
|
|
|
|
|
|
def get_users_involved_in_dms_with_target_users(
|
2023-10-11 09:34:26 +02:00
|
|
|
target_users: List[UserProfile], realm: Realm, include_deactivated_users: bool = False
|
2023-03-27 16:32:30 +02:00
|
|
|
) -> Dict[int, Set[int]]:
|
|
|
|
target_user_ids = [user.id for user in target_users]
|
|
|
|
|
|
|
|
direct_messages_recipient_users = (
|
|
|
|
Message.objects.filter(
|
|
|
|
sender_id__in=target_user_ids, realm=realm, recipient__type=Recipient.PERSONAL
|
|
|
|
)
|
|
|
|
.order_by("sender_id")
|
|
|
|
.distinct("sender_id", "recipient__type_id")
|
|
|
|
.values("sender_id", "recipient__type_id")
|
|
|
|
)
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
direct_messages_recipient_users_set = {
|
|
|
|
obj["recipient__type_id"] for obj in direct_messages_recipient_users
|
|
|
|
}
|
|
|
|
active_direct_messages_recipient_user_ids = UserProfile.objects.filter(
|
|
|
|
id__in=list(direct_messages_recipient_users_set), is_active=True
|
|
|
|
).values_list("id", flat=True)
|
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
direct_message_participants_dict: Dict[int, Set[int]] = defaultdict(set)
|
|
|
|
for sender_id, message_rows in itertools.groupby(
|
|
|
|
direct_messages_recipient_users, itemgetter("sender_id")
|
|
|
|
):
|
|
|
|
recipient_user_ids = {row["recipient__type_id"] for row in message_rows}
|
2023-10-11 09:34:26 +02:00
|
|
|
if not include_deactivated_users:
|
|
|
|
recipient_user_ids &= set(active_direct_messages_recipient_user_ids)
|
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
direct_message_participants_dict[sender_id] = recipient_user_ids
|
|
|
|
|
|
|
|
personal_recipient_ids_for_target_users = [user.recipient_id for user in target_users]
|
2023-10-11 09:34:26 +02:00
|
|
|
direct_message_senders_query = Message.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
recipient_id__in=personal_recipient_ids_for_target_users,
|
|
|
|
recipient__type=Recipient.PERSONAL,
|
|
|
|
)
|
|
|
|
|
|
|
|
if not include_deactivated_users:
|
|
|
|
direct_message_senders_query = direct_message_senders_query.filter(sender__is_active=True)
|
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
direct_messages_senders = (
|
2023-10-11 09:34:26 +02:00
|
|
|
direct_message_senders_query.order_by("recipient__type_id")
|
2023-03-27 16:32:30 +02:00
|
|
|
.distinct("sender_id", "recipient__type_id")
|
|
|
|
.values("sender_id", "recipient__type_id")
|
|
|
|
)
|
|
|
|
|
|
|
|
for recipient_user_id, message_rows in itertools.groupby(
|
|
|
|
direct_messages_senders, itemgetter("recipient__type_id")
|
|
|
|
):
|
|
|
|
sender_ids = {row["sender_id"] for row in message_rows}
|
|
|
|
direct_message_participants_dict[recipient_user_id] |= sender_ids
|
|
|
|
|
|
|
|
return direct_message_participants_dict
|
|
|
|
|
|
|
|
|
2023-11-08 11:13:25 +01:00
|
|
|
def user_profile_to_user_row(user_profile: UserProfile) -> RawUserDict:
|
|
|
|
return RawUserDict(
|
|
|
|
id=user_profile.id,
|
|
|
|
full_name=user_profile.full_name,
|
|
|
|
email=user_profile.email,
|
|
|
|
avatar_source=user_profile.avatar_source,
|
|
|
|
avatar_version=user_profile.avatar_version,
|
|
|
|
is_active=user_profile.is_active,
|
|
|
|
role=user_profile.role,
|
|
|
|
is_billing_admin=user_profile.is_billing_admin,
|
|
|
|
is_bot=user_profile.is_bot,
|
|
|
|
timezone=user_profile.timezone,
|
|
|
|
date_joined=user_profile.date_joined,
|
|
|
|
bot_owner_id=user_profile.bot_owner_id,
|
|
|
|
delivery_email=user_profile.delivery_email,
|
|
|
|
bot_type=user_profile.bot_type,
|
|
|
|
long_term_idle=user_profile.long_term_idle,
|
|
|
|
email_address_visibility=user_profile.email_address_visibility,
|
|
|
|
)
|
2020-01-14 18:19:35 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
@cache_with_key(get_cross_realm_dicts_key)
|
2023-11-08 12:44:21 +01:00
|
|
|
def get_cross_realm_dicts() -> List[APIUserDict]:
|
2023-07-19 12:10:10 +02:00
|
|
|
user_dict = bulk_get_cross_realm_bots()
|
|
|
|
users = sorted(user_dict.values(), key=lambda user: user.full_name)
|
2020-01-14 18:19:35 +01:00
|
|
|
result = []
|
|
|
|
for user in users:
|
2020-01-31 23:41:41 +01:00
|
|
|
user_row = user_profile_to_user_row(user)
|
2022-02-08 00:13:33 +01:00
|
|
|
# Because we want to avoid clients being exposed to the
|
2020-01-31 23:41:41 +01:00
|
|
|
# implementation detail that these bots are self-owned, we
|
|
|
|
# just set bot_owner_id=None.
|
2021-02-12 08:20:45 +01:00
|
|
|
user_row["bot_owner_id"] = None
|
2020-01-14 18:19:35 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result.append(
|
|
|
|
format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
user.realm_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=user,
|
|
|
|
row=user_row,
|
|
|
|
client_gravatar=False,
|
|
|
|
user_avatar_url_field_optional=False,
|
|
|
|
custom_profile_field_data=None,
|
|
|
|
)
|
|
|
|
)
|
2020-01-14 18:19:35 +01:00
|
|
|
|
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-03-27 16:32:30 +02:00
|
|
|
def get_data_for_inaccessible_user(realm: Realm, user_id: int) -> APIUserDict:
|
2023-11-07 08:12:19 +01:00
|
|
|
fake_email = Address(
|
|
|
|
username=f"user{user_id}", domain=get_fake_email_domain(realm.host)
|
|
|
|
).addr_spec
|
2023-03-27 16:32:30 +02:00
|
|
|
|
|
|
|
# We just set date_joined field to UNIX epoch.
|
|
|
|
user_date_joined = timestamp_to_datetime(0)
|
|
|
|
|
|
|
|
user_dict = APIUserDict(
|
|
|
|
email=fake_email,
|
|
|
|
user_id=user_id,
|
|
|
|
avatar_version=1,
|
|
|
|
is_admin=False,
|
|
|
|
is_owner=False,
|
|
|
|
is_guest=False,
|
|
|
|
is_billing_admin=False,
|
|
|
|
role=UserProfile.ROLE_MEMBER,
|
|
|
|
is_bot=False,
|
|
|
|
full_name=str(UserProfile.INACCESSIBLE_USER_NAME),
|
|
|
|
timezone="",
|
|
|
|
is_active=True,
|
|
|
|
date_joined=user_date_joined.isoformat(),
|
|
|
|
delivery_email=None,
|
|
|
|
avatar_url=get_avatar_for_inaccessible_user(),
|
|
|
|
profile_data={},
|
|
|
|
)
|
|
|
|
return user_dict
|
|
|
|
|
|
|
|
|
2023-10-11 09:34:26 +02:00
|
|
|
def get_accessible_user_ids(
|
|
|
|
realm: Realm, user_profile: UserProfile, include_deactivated_users: bool = False
|
|
|
|
) -> List[int]:
|
2023-03-27 16:32:30 +02:00
|
|
|
subscribers_dict_of_target_user_subscriptions = get_subscribers_of_target_user_subscriptions(
|
2023-10-11 09:34:26 +02:00
|
|
|
[user_profile], include_deactivated_users_for_huddles=include_deactivated_users
|
|
|
|
)
|
|
|
|
users_involved_in_dms_dict = get_users_involved_in_dms_with_target_users(
|
|
|
|
[user_profile], realm, include_deactivated_users=include_deactivated_users
|
2023-03-27 16:32:30 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# This does not include bots, because either the caller
|
|
|
|
# wants only human users or it handles bots separately.
|
|
|
|
accessible_user_ids = (
|
|
|
|
{user_profile.id}
|
|
|
|
| subscribers_dict_of_target_user_subscriptions[user_profile.id]
|
|
|
|
| users_involved_in_dms_dict[user_profile.id]
|
|
|
|
)
|
|
|
|
|
|
|
|
return list(accessible_user_ids)
|
|
|
|
|
|
|
|
|
|
|
|
def get_user_dicts_in_realm(
|
|
|
|
realm: Realm, user_profile: Optional[UserProfile]
|
|
|
|
) -> Tuple[List[RawUserDict], List[APIUserDict]]:
|
|
|
|
group_allowed_to_access_all_users = realm.can_access_all_users_group
|
|
|
|
assert group_allowed_to_access_all_users is not None
|
|
|
|
|
|
|
|
all_user_dicts = get_realm_user_dicts(realm.id)
|
|
|
|
if check_user_can_access_all_users(user_profile):
|
|
|
|
return (all_user_dicts, [])
|
|
|
|
|
|
|
|
assert user_profile is not None
|
2023-10-11 09:34:26 +02:00
|
|
|
accessible_user_ids = get_accessible_user_ids(
|
|
|
|
realm, user_profile, include_deactivated_users=True
|
|
|
|
)
|
2023-03-27 16:32:30 +02:00
|
|
|
|
|
|
|
accessible_user_dicts: List[RawUserDict] = []
|
|
|
|
inaccessible_user_dicts: List[APIUserDict] = []
|
|
|
|
for user_dict in all_user_dicts:
|
|
|
|
if user_dict["id"] in accessible_user_ids or user_dict["is_bot"]:
|
|
|
|
accessible_user_dicts.append(user_dict)
|
|
|
|
else:
|
|
|
|
inaccessible_user_dicts.append(get_data_for_inaccessible_user(realm, user_dict["id"]))
|
|
|
|
|
|
|
|
return (accessible_user_dicts, inaccessible_user_dicts)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_custom_profile_field_values(
|
2022-06-23 20:07:19 +02:00
|
|
|
custom_profile_field_values: Iterable[CustomProfileFieldValue],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[int, Dict[str, Any]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
|
2020-01-13 22:11:19 +01:00
|
|
|
for profile_field in custom_profile_field_values:
|
|
|
|
user_id = profile_field.user_profile_id
|
|
|
|
if profile_field.field.is_renderable():
|
2020-08-07 03:24:19 +02:00
|
|
|
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
|
2020-01-13 22:11:19 +01:00
|
|
|
"value": profile_field.value,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"rendered_value": profile_field.rendered_value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
else:
|
2020-08-07 03:24:19 +02:00
|
|
|
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"value": profile_field.value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
return profiles_by_user_id
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-11-08 08:21:24 +01:00
|
|
|
def get_users_for_api(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm: Realm,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
*,
|
|
|
|
target_user: Optional[UserProfile] = None,
|
|
|
|
client_gravatar: bool,
|
|
|
|
user_avatar_url_field_optional: bool,
|
|
|
|
include_custom_profile_fields: bool = True,
|
2023-10-24 19:47:39 +02:00
|
|
|
user_list_incomplete: bool = False,
|
2023-11-08 12:44:21 +01:00
|
|
|
) -> Dict[int, APIUserDict]:
|
2020-01-02 00:39:54 +01:00
|
|
|
"""Fetches data about the target user(s) appropriate for sending to
|
|
|
|
acting_user via the standard format for the Zulip API. If
|
|
|
|
target_user is None, we fetch all users in the realm.
|
|
|
|
"""
|
2020-01-13 22:11:19 +01:00
|
|
|
profiles_by_user_id = None
|
|
|
|
custom_profile_field_data = None
|
2020-01-02 00:39:54 +01:00
|
|
|
# target_user is an optional parameter which is passed when user data of a specific user
|
|
|
|
# is required. It is 'None' otherwise.
|
2023-03-27 16:32:30 +02:00
|
|
|
accessible_user_dicts: List[RawUserDict] = []
|
|
|
|
inaccessible_user_dicts: List[APIUserDict] = []
|
2020-01-02 00:39:54 +01:00
|
|
|
if target_user is not None:
|
2023-03-27 16:32:30 +02:00
|
|
|
accessible_user_dicts = [user_profile_to_user_row(target_user)]
|
2020-01-02 00:39:54 +01:00
|
|
|
else:
|
2023-03-27 16:32:30 +02:00
|
|
|
accessible_user_dicts, inaccessible_user_dicts = get_user_dicts_in_realm(realm, acting_user)
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
if include_custom_profile_fields:
|
2020-02-07 02:36:55 +01:00
|
|
|
base_query = CustomProfileFieldValue.objects.select_related("field")
|
|
|
|
# TODO: Consider optimizing this query away with caching.
|
2020-01-02 00:39:54 +01:00
|
|
|
if target_user is not None:
|
|
|
|
custom_profile_field_values = base_query.filter(user_profile=target_user)
|
|
|
|
else:
|
2020-02-09 00:29:21 +01:00
|
|
|
custom_profile_field_values = base_query.filter(field__realm_id=realm.id)
|
2020-02-07 02:36:55 +01:00
|
|
|
profiles_by_user_id = get_custom_profile_field_values(custom_profile_field_values)
|
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
result = {}
|
2023-03-27 16:32:30 +02:00
|
|
|
for row in accessible_user_dicts:
|
2020-01-13 22:11:19 +01:00
|
|
|
if profiles_by_user_id is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
custom_profile_field_data = profiles_by_user_id.get(row["id"], {})
|
2021-10-26 09:15:16 +02:00
|
|
|
client_gravatar_for_user = (
|
|
|
|
client_gravatar
|
|
|
|
and row["email_address_visibility"] == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result[row["id"]] = format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
realm.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=acting_user,
|
|
|
|
row=row,
|
2021-10-26 09:15:16 +02:00
|
|
|
client_gravatar=client_gravatar_for_user,
|
2021-02-12 08:19:30 +01:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
|
|
|
custom_profile_field_data=custom_profile_field_data,
|
|
|
|
)
|
2023-03-27 16:32:30 +02:00
|
|
|
|
2023-10-24 19:47:39 +02:00
|
|
|
if not user_list_incomplete:
|
|
|
|
for inaccessible_user_row in inaccessible_user_dicts:
|
|
|
|
# We already have the required data for inaccessible users
|
|
|
|
# in row object, so we can just add it to result directly.
|
|
|
|
user_id = inaccessible_user_row["user_id"]
|
|
|
|
result[user_id] = inaccessible_user_row
|
2023-03-27 16:32:30 +02:00
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
return result
|
2022-04-14 23:43:58 +02:00
|
|
|
|
|
|
|
|
2022-06-23 22:50:57 +02:00
|
|
|
def get_active_bots_owned_by_user(user_profile: UserProfile) -> QuerySet[UserProfile]:
|
2022-04-14 23:43:58 +02:00
|
|
|
return UserProfile.objects.filter(is_bot=True, is_active=True, bot_owner=user_profile)
|
2022-07-08 23:06:28 +02:00
|
|
|
|
|
|
|
|
2022-07-23 00:02:23 +02:00
|
|
|
def is_2fa_verified(user: UserProfile) -> bool:
|
2022-07-08 23:06:28 +02:00
|
|
|
"""
|
|
|
|
It is generally unsafe to call is_verified directly on `request.user` since
|
|
|
|
the attribute `otp_device` does not exist on an `AnonymousUser`, and `is_verified`
|
|
|
|
does not make sense without 2FA being enabled.
|
|
|
|
|
|
|
|
This wraps the checks for all these assumptions to make sure the call is safe.
|
|
|
|
"""
|
|
|
|
# Explicitly require the caller to ensure that settings.TWO_FACTOR_AUTHENTICATION_ENABLED
|
|
|
|
# is True before calling `is_2fa_verified`.
|
|
|
|
assert settings.TWO_FACTOR_AUTHENTICATION_ENABLED
|
2022-07-23 00:02:23 +02:00
|
|
|
return is_verified(user)
|
2022-05-25 13:13:31 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_users_with_access_to_real_email(user_profile: UserProfile) -> List[int]:
|
2023-10-11 09:34:26 +02:00
|
|
|
if not user_access_restricted_in_realm(user_profile):
|
|
|
|
active_users = user_profile.realm.get_active_users()
|
|
|
|
else:
|
|
|
|
# The get_user_ids_who_can_access_user returns user IDs and not
|
|
|
|
# user objects and we instead do one more query for UserProfile
|
|
|
|
# objects. We need complete UserProfile objects only for a couple
|
|
|
|
# of cases and it is not worth to query the whole UserProfile
|
|
|
|
# objects in all the cases and it is fine to do the extra query
|
|
|
|
# wherever needed.
|
|
|
|
user_ids_who_can_access_user = get_user_ids_who_can_access_user(user_profile)
|
|
|
|
active_users = UserProfile.objects.filter(
|
|
|
|
id__in=user_ids_who_can_access_user, is_active=True
|
|
|
|
)
|
|
|
|
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
user.id
|
|
|
|
for user in active_users
|
2022-05-25 13:13:31 +02:00
|
|
|
if can_access_delivery_email(
|
|
|
|
user,
|
|
|
|
user_profile.id,
|
2021-10-26 09:15:16 +02:00
|
|
|
user_profile.email_address_visibility,
|
2023-07-31 22:52:35 +02:00
|
|
|
)
|
|
|
|
]
|
2023-08-30 23:40:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
def max_message_id_for_user(user_profile: Optional[UserProfile]) -> int:
|
|
|
|
if user_profile is None:
|
|
|
|
return -1
|
|
|
|
max_message = (
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile)
|
|
|
|
.order_by("-message_id")
|
|
|
|
.only("message_id")
|
|
|
|
.first()
|
|
|
|
)
|
|
|
|
if max_message:
|
|
|
|
return max_message.message_id
|
|
|
|
else:
|
|
|
|
return -1
|