2020-04-25 19:18:13 +02:00
|
|
|
import re
|
2019-06-29 04:41:13 +02:00
|
|
|
import unicodedata
|
2020-01-13 22:11:19 +01:00
|
|
|
from collections import defaultdict
|
2022-10-06 11:56:48 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Mapping, Optional, Sequence, TypedDict
|
2019-06-29 04:41:13 +02:00
|
|
|
|
2022-02-16 13:40:43 +01:00
|
|
|
import dateutil.parser as date_parser
|
2020-01-13 22:11:19 +01:00
|
|
|
from django.conf import settings
|
2020-06-21 02:36:20 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2023-03-04 01:52:14 +01:00
|
|
|
from django.db.models import QuerySet
|
2020-01-14 18:19:35 +01:00
|
|
|
from django.forms.models import model_to_dict
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2022-07-08 23:06:28 +02:00
|
|
|
from django_otp.middleware import is_verified
|
2020-06-11 00:54:34 +02:00
|
|
|
from zulip_bots.custom_exceptions import ConfigValidationError
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field
|
2023-08-03 02:09:35 +02:00
|
|
|
from zerver.lib.cache import cache_with_key, get_cross_realm_dicts_key, realm_user_dict_fields
|
2022-01-07 21:47:11 +01:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
JsonableError,
|
2022-11-17 09:30:48 +01:00
|
|
|
OrganizationAdministratorRequiredError,
|
|
|
|
OrganizationOwnerRequiredError,
|
2022-01-07 21:47:11 +01:00
|
|
|
)
|
2020-10-27 01:41:00 +01:00
|
|
|
from zerver.lib.timezone import canonicalize_timezone
|
2022-07-08 17:17:46 +02:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict, ProfileDataElementValue
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
|
|
|
Realm,
|
|
|
|
Service,
|
2023-08-30 23:40:24 +02:00
|
|
|
UserMessage,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
|
|
|
get_realm_user_dicts,
|
2022-02-21 09:33:08 +01:00
|
|
|
get_user,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_user_profile_by_id_in_realm,
|
2022-09-16 14:27:32 +02:00
|
|
|
is_cross_realm_bot_email,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_full_name(full_name_raw: str) -> str:
|
2017-02-08 04:51:01 +01:00
|
|
|
full_name = full_name_raw.strip()
|
|
|
|
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too long!"))
|
2017-05-12 04:21:49 +02:00
|
|
|
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too short!"))
|
2019-06-29 04:41:13 +02:00
|
|
|
for character in full_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
if unicodedata.category(character)[0] == "C" or character in UserProfile.NAME_INVALID_CHARS:
|
2019-06-29 04:41:13 +02:00
|
|
|
raise JsonableError(_("Invalid characters in name!"))
|
2020-04-25 19:18:13 +02:00
|
|
|
# Names ending with e.g. `|15` could be ambiguous for
|
2020-08-11 01:47:49 +02:00
|
|
|
# sloppily-written parsers of our Markdown syntax for mentioning
|
2020-08-11 01:47:44 +02:00
|
|
|
# users with ambiguous names, and likely have no real use, so we
|
2020-04-25 19:18:13 +02:00
|
|
|
# ban them.
|
|
|
|
if re.search(r"\|\d+$", full_name_raw):
|
|
|
|
raise JsonableError(_("Invalid format!"))
|
2017-02-08 04:51:01 +01:00
|
|
|
return full_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-25 02:01:34 +02:00
|
|
|
# NOTE: We don't try to absolutely prevent 2 bots from having the same
|
|
|
|
# name (e.g. you can get there by reactivating a deactivated bot after
|
|
|
|
# making a new bot with the same name). This is just a check designed
|
|
|
|
# to make it unlikely to happen by accident.
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
def check_bot_name_available(realm_id: int, full_name: str) -> None:
|
|
|
|
dup_exists = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
full_name=full_name.strip(),
|
|
|
|
is_active=True,
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
if dup_exists:
|
|
|
|
raise JsonableError(_("Name is already in use!"))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_short_name(short_name_raw: str) -> str:
|
2017-06-21 13:46:58 +02:00
|
|
|
short_name = short_name_raw.strip()
|
|
|
|
if len(short_name) == 0:
|
|
|
|
raise JsonableError(_("Bad name or username"))
|
|
|
|
return short_name
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-10-06 11:56:48 +02:00
|
|
|
def check_valid_bot_config(
|
|
|
|
bot_type: int, service_name: str, config_data: Mapping[str, str]
|
|
|
|
) -> None:
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT:
|
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-18 12:56:21 +02:00
|
|
|
config_options = None
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS:
|
|
|
|
if integration.name == service_name:
|
|
|
|
# key: validator
|
|
|
|
config_options = {c[1]: c[2] for c in integration.config_options}
|
|
|
|
break
|
|
|
|
if not config_options:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Invalid integration '{integration_name}'.").format(integration_name=service_name)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
missing_keys = set(config_options.keys()) - set(config_data.keys())
|
|
|
|
if missing_keys:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise JsonableError(
|
2023-07-17 22:40:33 +02:00
|
|
|
_("Missing configuration parameters: {keys}").format(
|
|
|
|
keys=missing_keys,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
for key, validator in config_options.items():
|
|
|
|
value = config_data[key]
|
|
|
|
error = validator(key, value)
|
2022-06-01 00:34:34 +02:00
|
|
|
if error is not None:
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(
|
|
|
|
_("Invalid {key} value {value} ({error})").format(
|
|
|
|
key=key, value=value, error=error
|
|
|
|
)
|
|
|
|
)
|
2019-08-18 12:56:21 +02:00
|
|
|
|
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
2019-08-17 19:06:51 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.bot_lib import get_bot_handler
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-17 19:06:51 +02:00
|
|
|
bot_handler = get_bot_handler(service_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(bot_handler, "validate_config"):
|
2019-08-17 19:06:51 +02:00
|
|
|
bot_handler.validate_config(config_data)
|
|
|
|
except ConfigValidationError:
|
|
|
|
# The exception provides a specific error message, but that
|
|
|
|
# message is not tagged translatable, because it is
|
|
|
|
# triggered in the external zulip_bots package.
|
|
|
|
# TODO: Think of some clever way to provide a more specific
|
|
|
|
# error message.
|
|
|
|
raise JsonableError(_("Invalid configuration data!"))
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-01 17:25:15 +02:00
|
|
|
# Adds an outgoing webhook or embedded bot service.
|
2021-02-12 08:19:30 +01:00
|
|
|
def add_service(
|
|
|
|
name: str,
|
|
|
|
user_profile: UserProfile,
|
2022-07-23 00:47:30 +02:00
|
|
|
base_url: str,
|
|
|
|
interface: int,
|
|
|
|
token: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
|
|
|
Service.objects.create(
|
|
|
|
name=name, user_profile=user_profile, base_url=base_url, interface=interface, token=token
|
|
|
|
)
|
|
|
|
|
2018-06-01 17:25:15 +02:00
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
# Realm administrators can always add bot
|
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return
|
|
|
|
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
|
|
|
|
return
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS
|
|
|
|
and bot_type == UserProfile.DEFAULT_BOT
|
|
|
|
):
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2018-01-29 16:10:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-24 16:24:24 +01:00
|
|
|
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
if bot_type not in user_profile.allowed_bot_types:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Invalid bot type"))
|
2017-07-03 18:35:12 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-01 17:47:50 +02:00
|
|
|
def check_valid_interface_type(interface_type: Optional[int]) -> None:
|
2017-07-03 18:35:12 +02:00
|
|
|
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Invalid interface type"))
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-30 21:43:19 +02:00
|
|
|
def is_administrator_role(role: int) -> bool:
|
|
|
|
return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-19 12:10:10 +02:00
|
|
|
def bulk_get_cross_realm_bots() -> Dict[str, UserProfile]:
|
|
|
|
emails = list(settings.CROSS_REALM_BOT_EMAILS)
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
|
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
|
|
|
|
# realm=realm)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
|
|
|
where_clause = (
|
|
|
|
"upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)"
|
2017-11-16 02:28:50 +01:00
|
|
|
)
|
2023-09-12 23:19:57 +02:00
|
|
|
users = UserProfile.objects.filter(realm__string_id=settings.SYSTEM_BOT_REALM).extra(
|
|
|
|
where=[where_clause], params=(emails,)
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return {user.email.lower(): user for user in users}
|
2017-11-16 02:28:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-13 05:24:42 +02:00
|
|
|
def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]:
|
2017-11-01 10:04:16 +01:00
|
|
|
# TODO: Consider adding a flag to control whether deactivated
|
|
|
|
# users should be included.
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2023-07-19 15:20:45 +02:00
|
|
|
user_profiles = list(
|
|
|
|
UserProfile.objects.filter(id__in=user_ids, realm=realm).select_related("realm")
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2023-07-19 15:20:45 +02:00
|
|
|
found_user_ids = {user_profile.id for user_profile in user_profiles}
|
|
|
|
|
|
|
|
for user_id in user_ids:
|
|
|
|
if user_id not in found_user_ids:
|
|
|
|
raise JsonableError(_("Invalid user ID: {user_id}").format(user_id=user_id))
|
2018-04-04 18:38:37 +02:00
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
return user_profiles
|
2018-05-28 20:42:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-28 20:42:31 +02:00
|
|
|
def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile:
|
|
|
|
try:
|
|
|
|
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not target.is_bot:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2022-01-07 21:47:11 +01:00
|
|
|
|
|
|
|
if target.can_create_users and not user_profile.is_realm_owner:
|
|
|
|
# Organizations owners are required to administer a bot with
|
|
|
|
# the can_create_users permission. User creation via the API
|
|
|
|
# is a permission not available even to organization owners by
|
|
|
|
# default, because it can be abused to send spam. Requiring an
|
|
|
|
# owner is intended to ensure organizational responsibility
|
|
|
|
# for use of this permission.
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2022-01-07 21:47:11 +01:00
|
|
|
|
2018-05-28 20:42:31 +02:00
|
|
|
return target
|
2018-06-04 07:04:19 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-21 07:28:42 +01:00
|
|
|
def access_user_common(
|
|
|
|
target: UserProfile,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
allow_deactivated: bool,
|
|
|
|
allow_bots: bool,
|
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
if target.is_bot and not allow_bots:
|
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
if not target.is_active and not allow_deactivated:
|
|
|
|
raise JsonableError(_("User is deactivated"))
|
|
|
|
if not for_admin:
|
|
|
|
# Administrative access is not required just to read a user.
|
|
|
|
return target
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
return target
|
|
|
|
|
|
|
|
|
2021-01-28 18:04:43 +01:00
|
|
|
def access_user_by_id(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_user_id: int,
|
|
|
|
*,
|
2021-02-12 08:19:30 +01:00
|
|
|
allow_deactivated: bool = False,
|
|
|
|
allow_bots: bool = False,
|
2021-01-28 18:04:43 +01:00
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
"""Master function for accessing another user by ID in API code;
|
|
|
|
verifies the user ID is in the same realm, and if requested checks
|
|
|
|
for administrative privileges, with flags for various special
|
|
|
|
cases.
|
|
|
|
"""
|
2018-06-04 07:04:19 +02:00
|
|
|
try:
|
2021-01-28 18:04:43 +01:00
|
|
|
target = get_user_profile_by_id_in_realm(target_user_id, user_profile.realm)
|
2018-06-04 07:04:19 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2022-02-21 09:33:08 +01:00
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
|
|
|
|
return access_user_common(target, user_profile, allow_deactivated, allow_bots, for_admin)
|
|
|
|
|
|
|
|
|
|
|
|
def access_user_by_email(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
email: str,
|
|
|
|
*,
|
|
|
|
allow_deactivated: bool = False,
|
|
|
|
allow_bots: bool = False,
|
|
|
|
for_admin: bool,
|
|
|
|
) -> UserProfile:
|
|
|
|
try:
|
|
|
|
target = get_user(email, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
2018-06-04 07:04:19 +02:00
|
|
|
raise JsonableError(_("No such user"))
|
2022-02-21 07:28:42 +01:00
|
|
|
|
|
|
|
return access_user_common(target, user_profile, allow_deactivated, allow_bots, for_admin)
|
2018-06-19 10:55:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-07-31 23:15:51 +02:00
|
|
|
class Account(TypedDict):
|
2020-11-16 19:33:10 +01:00
|
|
|
realm_name: str
|
|
|
|
realm_id: int
|
|
|
|
full_name: str
|
|
|
|
avatar: Optional[str]
|
|
|
|
|
|
|
|
|
2023-07-31 23:15:51 +02:00
|
|
|
def get_accounts_for_email(email: str) -> List[Account]:
|
2021-02-12 08:19:30 +01:00
|
|
|
profiles = (
|
2021-02-12 08:20:45 +01:00
|
|
|
UserProfile.objects.select_related("realm")
|
2021-02-12 08:19:30 +01:00
|
|
|
.filter(
|
|
|
|
delivery_email__iexact=email.strip(),
|
|
|
|
is_active=True,
|
|
|
|
realm__deactivated=False,
|
|
|
|
is_bot=False,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("date_joined")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
dict(
|
|
|
|
realm_name=profile.realm.name,
|
|
|
|
realm_id=profile.realm.id,
|
|
|
|
full_name=profile.full_name,
|
|
|
|
avatar=avatar_url(profile),
|
2020-11-16 19:33:10 +01:00
|
|
|
)
|
2023-07-31 22:52:35 +02:00
|
|
|
for profile in profiles
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
|
|
|
|
def get_api_key(user_profile: UserProfile) -> str:
|
|
|
|
return user_profile.api_key
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
|
|
|
|
# Users can only have one API key for now
|
|
|
|
return [user_profile.api_key]
|
2018-09-04 20:23:44 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_user_custom_profile_field(
|
2021-09-21 16:52:15 +02:00
|
|
|
realm_id: int, field: CustomProfileField, value: ProfileDataElementValue
|
|
|
|
) -> ProfileDataElementValue:
|
2019-01-28 20:08:04 +01:00
|
|
|
validators = CustomProfileField.FIELD_VALIDATORS
|
|
|
|
field_type = field.field_type
|
2021-02-12 08:20:45 +01:00
|
|
|
var_name = f"{field.name}"
|
2019-01-28 20:08:04 +01:00
|
|
|
if field_type in validators:
|
|
|
|
validator = validators[field_type]
|
2020-06-21 02:36:20 +02:00
|
|
|
return validator(var_name, value)
|
2021-03-20 11:39:22 +01:00
|
|
|
elif field_type == CustomProfileField.SELECT:
|
|
|
|
choice_field_validator = CustomProfileField.SELECT_FIELD_VALIDATORS[field_type]
|
2019-01-28 20:08:04 +01:00
|
|
|
field_data = field.field_data
|
|
|
|
# Put an assertion so that mypy doesn't complain.
|
|
|
|
assert field_data is not None
|
2020-06-21 02:36:20 +02:00
|
|
|
return choice_field_validator(var_name, field_data, value)
|
2019-01-28 20:08:04 +01:00
|
|
|
elif field_type == CustomProfileField.USER:
|
|
|
|
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
|
2020-06-23 01:12:03 +02:00
|
|
|
return user_field_validator(realm_id, value, False)
|
2019-01-28 20:08:04 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid field type")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_user_custom_profile_data(
|
2022-07-08 17:17:46 +02:00
|
|
|
realm_id: int, profile_data: List[ProfileDataElementUpdateDict]
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-09-04 20:23:44 +02:00
|
|
|
# This function validate all custom field values according to their field type.
|
|
|
|
for item in profile_data:
|
2021-02-12 08:20:45 +01:00
|
|
|
field_id = item["id"]
|
2018-09-04 20:23:44 +02:00
|
|
|
try:
|
|
|
|
field = CustomProfileField.objects.get(id=field_id)
|
|
|
|
except CustomProfileField.DoesNotExist:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
|
2018-09-04 20:23:44 +02:00
|
|
|
|
2020-06-21 02:36:20 +02:00
|
|
|
try:
|
2022-07-08 17:17:46 +02:00
|
|
|
validate_user_custom_profile_field(realm_id, field, item["value"])
|
2020-06-21 02:36:20 +02:00
|
|
|
except ValidationError as error:
|
|
|
|
raise JsonableError(error.message)
|
2020-01-13 18:47:30 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-12-11 08:17:57 +01:00
|
|
|
def can_access_delivery_email(
|
2022-05-25 13:13:31 +02:00
|
|
|
user_profile: UserProfile,
|
|
|
|
target_user_id: int,
|
|
|
|
email_address_visibility: int,
|
2021-12-11 08:17:57 +01:00
|
|
|
) -> bool:
|
|
|
|
if target_user_id == user_profile.id:
|
|
|
|
return True
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
# Bots always have email_address_visibility as EMAIL_ADDRESS_VISIBILITY_EVERYONE.
|
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
|
2022-05-25 13:13:31 +02:00
|
|
|
return True
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS:
|
2021-05-21 18:08:24 +02:00
|
|
|
return user_profile.is_realm_admin
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS:
|
2021-05-21 18:08:24 +02:00
|
|
|
return user_profile.is_realm_admin or user_profile.is_moderator
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
if email_address_visibility == UserProfile.EMAIL_ADDRESS_VISIBILITY_MEMBERS:
|
|
|
|
return not user_profile.is_guest
|
|
|
|
|
2021-05-21 18:08:24 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
realm_id: int,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
row: Dict[str, Any],
|
|
|
|
client_gravatar: bool,
|
|
|
|
user_avatar_url_field_optional: bool,
|
|
|
|
custom_profile_field_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Dict[str, Any]:
|
2020-01-13 22:11:19 +01:00
|
|
|
"""Formats a user row returned by a database fetch using
|
|
|
|
.values(*realm_user_dict_fields) into a dictionary representation
|
|
|
|
of that user for API delivery to clients. The acting_user
|
|
|
|
argument is used for permissions checks.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
is_admin = is_administrator_role(row["role"])
|
|
|
|
is_owner = row["role"] == UserProfile.ROLE_REALM_OWNER
|
|
|
|
is_guest = row["role"] == UserProfile.ROLE_GUEST
|
|
|
|
is_bot = row["is_bot"]
|
2020-01-13 22:11:19 +01:00
|
|
|
result = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email=row["email"],
|
|
|
|
user_id=row["id"],
|
|
|
|
avatar_version=row["avatar_version"],
|
2020-01-13 22:11:19 +01:00
|
|
|
is_admin=is_admin,
|
2020-06-01 21:47:18 +02:00
|
|
|
is_owner=is_owner,
|
2020-01-13 22:11:19 +01:00
|
|
|
is_guest=is_guest,
|
2021-05-28 12:51:50 +02:00
|
|
|
is_billing_admin=row["is_billing_admin"],
|
2021-04-11 07:38:09 +02:00
|
|
|
role=row["role"],
|
2020-01-13 22:11:19 +01:00
|
|
|
is_bot=is_bot,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name=row["full_name"],
|
|
|
|
timezone=canonicalize_timezone(row["timezone"]),
|
|
|
|
is_active=row["is_active"],
|
|
|
|
date_joined=row["date_joined"].isoformat(),
|
2020-01-13 22:11:19 +01:00
|
|
|
)
|
2020-06-13 10:10:05 +02:00
|
|
|
|
2022-02-16 13:40:43 +01:00
|
|
|
if acting_user is None:
|
|
|
|
# Remove data about other users which are not useful to spectators
|
|
|
|
# or can reveal personal information about a user.
|
|
|
|
# Only send day level precision date_joined data to spectators.
|
|
|
|
del result["is_billing_admin"]
|
|
|
|
del result["timezone"]
|
|
|
|
result["date_joined"] = str(date_parser.parse(result["date_joined"]).date())
|
|
|
|
|
2020-06-13 10:10:05 +02:00
|
|
|
# Zulip clients that support using `GET /avatar/{user_id}` as a
|
|
|
|
# fallback if we didn't send an avatar URL in the user object pass
|
|
|
|
# user_avatar_url_field_optional in client_capabilities.
|
|
|
|
#
|
|
|
|
# This is a major network performance optimization for
|
|
|
|
# organizations with 10,000s of users where we would otherwise
|
|
|
|
# send avatar URLs in the payload (either because most users have
|
|
|
|
# uploaded avatars or because EMAIL_ADDRESS_VISIBILITY_ADMINS
|
|
|
|
# prevents the older client_gravatar optimization from helping).
|
|
|
|
# The performance impact is large largely because the hashes in
|
|
|
|
# avatar URLs structurally cannot compress well.
|
|
|
|
#
|
|
|
|
# The user_avatar_url_field_optional gives the server sole
|
|
|
|
# discretion in deciding for which users we want to send the
|
|
|
|
# avatar URL (Which saves clients an RTT at the cost of some
|
|
|
|
# bandwidth). At present, the server looks at `long_term_idle` to
|
|
|
|
# decide which users to include avatars for, piggy-backing on a
|
|
|
|
# different optimization for organizations with 10,000s of users.
|
2021-02-12 08:20:45 +01:00
|
|
|
include_avatar_url = not user_avatar_url_field_optional or not row["long_term_idle"]
|
2020-06-13 10:10:05 +02:00
|
|
|
if include_avatar_url:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["avatar_url"] = get_avatar_field(
|
|
|
|
user_id=row["id"],
|
2023-07-19 11:33:40 +02:00
|
|
|
realm_id=realm_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
email=row["delivery_email"],
|
|
|
|
avatar_source=row["avatar_source"],
|
|
|
|
avatar_version=row["avatar_version"],
|
2021-02-12 08:19:30 +01:00
|
|
|
medium=False,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
)
|
|
|
|
|
2021-12-13 13:08:30 +01:00
|
|
|
if acting_user is not None and can_access_delivery_email(
|
2021-10-26 09:15:16 +02:00
|
|
|
acting_user, row["id"], row["email_address_visibility"]
|
2021-12-13 13:08:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
result["delivery_email"] = row["delivery_email"]
|
2022-05-25 13:13:31 +02:00
|
|
|
else:
|
|
|
|
result["delivery_email"] = None
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
if is_bot:
|
|
|
|
result["bot_type"] = row["bot_type"]
|
2022-09-16 14:27:32 +02:00
|
|
|
if is_cross_realm_bot_email(row["email"]):
|
2021-03-07 15:51:55 +01:00
|
|
|
result["is_system_bot"] = True
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
# Note that bot_owner_id can be None with legacy data.
|
2021-02-12 08:20:45 +01:00
|
|
|
result["bot_owner_id"] = row["bot_owner_id"]
|
2020-01-13 22:11:19 +01:00
|
|
|
elif custom_profile_field_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result["profile_data"] = custom_profile_field_data
|
2020-01-13 22:11:19 +01:00
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-01-31 23:41:41 +01:00
|
|
|
def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]:
|
|
|
|
# What we're trying to do is simulate the user_profile having been
|
|
|
|
# fetched from a QuerySet using `.values(*realm_user_dict_fields)`
|
|
|
|
# even though we fetched UserProfile objects. This is messier
|
|
|
|
# than it seems.
|
|
|
|
#
|
|
|
|
# What we'd like to do is just call model_to_dict(user,
|
|
|
|
# fields=realm_user_dict_fields). The problem with this is
|
|
|
|
# that model_to_dict has a different convention than
|
|
|
|
# `.values()` in its handling of foreign keys, naming them as
|
|
|
|
# e.g. `bot_owner`, not `bot_owner_id`; we work around that
|
|
|
|
# here.
|
|
|
|
#
|
|
|
|
# This could be potentially simplified in the future by
|
|
|
|
# changing realm_user_dict_fields to name the bot owner with
|
|
|
|
# the less readable `bot_owner` (instead of `bot_owner_id`).
|
2021-02-12 08:20:45 +01:00
|
|
|
user_row = model_to_dict(user_profile, fields=[*realm_user_dict_fields, "bot_owner"])
|
|
|
|
user_row["bot_owner_id"] = user_row["bot_owner"]
|
|
|
|
del user_row["bot_owner"]
|
2020-01-31 23:41:41 +01:00
|
|
|
return user_row
|
2020-01-14 18:19:35 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
@cache_with_key(get_cross_realm_dicts_key)
|
2020-01-31 23:41:41 +01:00
|
|
|
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
|
2023-07-19 12:10:10 +02:00
|
|
|
user_dict = bulk_get_cross_realm_bots()
|
|
|
|
users = sorted(user_dict.values(), key=lambda user: user.full_name)
|
2020-01-14 18:19:35 +01:00
|
|
|
result = []
|
|
|
|
for user in users:
|
2020-01-31 23:41:41 +01:00
|
|
|
user_row = user_profile_to_user_row(user)
|
2022-02-08 00:13:33 +01:00
|
|
|
# Because we want to avoid clients being exposed to the
|
2020-01-31 23:41:41 +01:00
|
|
|
# implementation detail that these bots are self-owned, we
|
|
|
|
# just set bot_owner_id=None.
|
2021-02-12 08:20:45 +01:00
|
|
|
user_row["bot_owner_id"] = None
|
2020-01-14 18:19:35 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result.append(
|
|
|
|
format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
user.realm_id,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=user,
|
|
|
|
row=user_row,
|
|
|
|
client_gravatar=False,
|
|
|
|
user_avatar_url_field_optional=False,
|
|
|
|
custom_profile_field_data=None,
|
|
|
|
)
|
|
|
|
)
|
2020-01-14 18:19:35 +01:00
|
|
|
|
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_custom_profile_field_values(
|
2022-06-23 20:07:19 +02:00
|
|
|
custom_profile_field_values: Iterable[CustomProfileFieldValue],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[int, Dict[str, Any]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
|
2020-01-13 22:11:19 +01:00
|
|
|
for profile_field in custom_profile_field_values:
|
|
|
|
user_id = profile_field.user_profile_id
|
|
|
|
if profile_field.field.is_renderable():
|
2020-08-07 03:24:19 +02:00
|
|
|
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
|
2020-01-13 22:11:19 +01:00
|
|
|
"value": profile_field.value,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"rendered_value": profile_field.rendered_value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
else:
|
2020-08-07 03:24:19 +02:00
|
|
|
profiles_by_user_id[user_id][str(profile_field.field_id)] = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"value": profile_field.value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
return profiles_by_user_id
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_raw_user_data(
|
|
|
|
realm: Realm,
|
|
|
|
acting_user: Optional[UserProfile],
|
|
|
|
*,
|
|
|
|
target_user: Optional[UserProfile] = None,
|
|
|
|
client_gravatar: bool,
|
|
|
|
user_avatar_url_field_optional: bool,
|
|
|
|
include_custom_profile_fields: bool = True,
|
|
|
|
) -> Dict[int, Dict[str, str]]:
|
2020-01-02 00:39:54 +01:00
|
|
|
"""Fetches data about the target user(s) appropriate for sending to
|
|
|
|
acting_user via the standard format for the Zulip API. If
|
|
|
|
target_user is None, we fetch all users in the realm.
|
|
|
|
"""
|
2020-01-13 22:11:19 +01:00
|
|
|
profiles_by_user_id = None
|
|
|
|
custom_profile_field_data = None
|
2020-01-02 00:39:54 +01:00
|
|
|
# target_user is an optional parameter which is passed when user data of a specific user
|
|
|
|
# is required. It is 'None' otherwise.
|
|
|
|
if target_user is not None:
|
|
|
|
user_dicts = [user_profile_to_user_row(target_user)]
|
|
|
|
else:
|
|
|
|
user_dicts = get_realm_user_dicts(realm.id)
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
if include_custom_profile_fields:
|
2020-02-07 02:36:55 +01:00
|
|
|
base_query = CustomProfileFieldValue.objects.select_related("field")
|
|
|
|
# TODO: Consider optimizing this query away with caching.
|
2020-01-02 00:39:54 +01:00
|
|
|
if target_user is not None:
|
|
|
|
custom_profile_field_values = base_query.filter(user_profile=target_user)
|
|
|
|
else:
|
2020-02-09 00:29:21 +01:00
|
|
|
custom_profile_field_values = base_query.filter(field__realm_id=realm.id)
|
2020-02-07 02:36:55 +01:00
|
|
|
profiles_by_user_id = get_custom_profile_field_values(custom_profile_field_values)
|
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
result = {}
|
|
|
|
for row in user_dicts:
|
|
|
|
if profiles_by_user_id is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
custom_profile_field_data = profiles_by_user_id.get(row["id"], {})
|
2021-10-26 09:15:16 +02:00
|
|
|
client_gravatar_for_user = (
|
|
|
|
client_gravatar
|
|
|
|
and row["email_address_visibility"] == UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result[row["id"]] = format_user_row(
|
2023-07-19 11:33:40 +02:00
|
|
|
realm.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user=acting_user,
|
|
|
|
row=row,
|
2021-10-26 09:15:16 +02:00
|
|
|
client_gravatar=client_gravatar_for_user,
|
2021-02-12 08:19:30 +01:00
|
|
|
user_avatar_url_field_optional=user_avatar_url_field_optional,
|
|
|
|
custom_profile_field_data=custom_profile_field_data,
|
|
|
|
)
|
2020-01-13 22:11:19 +01:00
|
|
|
return result
|
2022-04-14 23:43:58 +02:00
|
|
|
|
|
|
|
|
2022-06-23 22:50:57 +02:00
|
|
|
def get_active_bots_owned_by_user(user_profile: UserProfile) -> QuerySet[UserProfile]:
|
2022-04-14 23:43:58 +02:00
|
|
|
return UserProfile.objects.filter(is_bot=True, is_active=True, bot_owner=user_profile)
|
2022-07-08 23:06:28 +02:00
|
|
|
|
|
|
|
|
2022-07-23 00:02:23 +02:00
|
|
|
def is_2fa_verified(user: UserProfile) -> bool:
|
2022-07-08 23:06:28 +02:00
|
|
|
"""
|
|
|
|
It is generally unsafe to call is_verified directly on `request.user` since
|
|
|
|
the attribute `otp_device` does not exist on an `AnonymousUser`, and `is_verified`
|
|
|
|
does not make sense without 2FA being enabled.
|
|
|
|
|
|
|
|
This wraps the checks for all these assumptions to make sure the call is safe.
|
|
|
|
"""
|
|
|
|
# Explicitly require the caller to ensure that settings.TWO_FACTOR_AUTHENTICATION_ENABLED
|
|
|
|
# is True before calling `is_2fa_verified`.
|
|
|
|
assert settings.TWO_FACTOR_AUTHENTICATION_ENABLED
|
2022-07-23 00:02:23 +02:00
|
|
|
return is_verified(user)
|
2022-05-25 13:13:31 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_users_with_access_to_real_email(user_profile: UserProfile) -> List[int]:
|
|
|
|
active_users = user_profile.realm.get_active_users()
|
2023-07-31 22:52:35 +02:00
|
|
|
return [
|
|
|
|
user.id
|
|
|
|
for user in active_users
|
2022-05-25 13:13:31 +02:00
|
|
|
if can_access_delivery_email(
|
|
|
|
user,
|
|
|
|
user_profile.id,
|
2021-10-26 09:15:16 +02:00
|
|
|
user_profile.email_address_visibility,
|
2023-07-31 22:52:35 +02:00
|
|
|
)
|
|
|
|
]
|
2023-08-30 23:40:24 +02:00
|
|
|
|
|
|
|
|
|
|
|
def max_message_id_for_user(user_profile: Optional[UserProfile]) -> int:
|
|
|
|
if user_profile is None:
|
|
|
|
return -1
|
|
|
|
max_message = (
|
|
|
|
UserMessage.objects.filter(user_profile=user_profile)
|
|
|
|
.order_by("-message_id")
|
|
|
|
.only("message_id")
|
|
|
|
.first()
|
|
|
|
)
|
|
|
|
if max_message:
|
|
|
|
return max_message.message_id
|
|
|
|
else:
|
|
|
|
return -1
|