2020-04-25 19:18:13 +02:00
|
|
|
import re
|
2019-06-29 04:41:13 +02:00
|
|
|
import unicodedata
|
2020-01-13 22:11:19 +01:00
|
|
|
from collections import defaultdict
|
2020-06-13 05:24:42 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
2019-06-29 04:41:13 +02:00
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
from django.conf import settings
|
2017-11-16 02:28:50 +01:00
|
|
|
from django.db.models.query import QuerySet
|
2020-01-14 18:19:35 +01:00
|
|
|
from django.forms.models import model_to_dict
|
2017-02-08 04:39:55 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2020-06-11 00:54:34 +02:00
|
|
|
from zulip_bots.custom_exceptions import ConfigValidationError
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
generic_bulk_cached_fetch,
|
|
|
|
realm_user_dict_fields,
|
|
|
|
user_profile_by_id_cache_key,
|
|
|
|
user_profile_cache_key_id,
|
|
|
|
)
|
2019-11-16 15:53:56 +01:00
|
|
|
from zerver.lib.exceptions import OrganizationAdministratorRequired
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
|
|
|
Realm,
|
|
|
|
Service,
|
|
|
|
UserProfile,
|
|
|
|
get_realm_user_dicts,
|
|
|
|
get_user_profile_by_id_in_realm,
|
|
|
|
)
|
2017-02-08 04:39:55 +01:00
|
|
|
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_full_name(full_name_raw: str) -> str:
|
2017-02-08 04:51:01 +01:00
|
|
|
full_name = full_name_raw.strip()
|
|
|
|
if len(full_name) > UserProfile.MAX_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too long!"))
|
2017-05-12 04:21:49 +02:00
|
|
|
if len(full_name) < UserProfile.MIN_NAME_LENGTH:
|
|
|
|
raise JsonableError(_("Name too short!"))
|
2019-06-29 04:41:13 +02:00
|
|
|
for character in full_name:
|
|
|
|
if (unicodedata.category(character)[0] == 'C' or
|
|
|
|
character in UserProfile.NAME_INVALID_CHARS):
|
|
|
|
raise JsonableError(_("Invalid characters in name!"))
|
2020-04-25 19:18:13 +02:00
|
|
|
# Names ending with e.g. `|15` could be ambiguous for
|
|
|
|
# sloppily-written parsers of our markdown syntax for mentioning
|
|
|
|
# users with ambigious names, and likely have no real use, so we
|
|
|
|
# ban them.
|
|
|
|
if re.search(r"\|\d+$", full_name_raw):
|
|
|
|
raise JsonableError(_("Invalid format!"))
|
2017-02-08 04:51:01 +01:00
|
|
|
return full_name
|
|
|
|
|
2018-10-25 02:01:34 +02:00
|
|
|
# NOTE: We don't try to absolutely prevent 2 bots from having the same
|
|
|
|
# name (e.g. you can get there by reactivating a deactivated bot after
|
|
|
|
# making a new bot with the same name). This is just a check designed
|
|
|
|
# to make it unlikely to happen by accident.
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
def check_bot_name_available(realm_id: int, full_name: str) -> None:
|
|
|
|
dup_exists = UserProfile.objects.filter(
|
|
|
|
realm_id=realm_id,
|
|
|
|
full_name=full_name.strip(),
|
|
|
|
is_active=True,
|
|
|
|
).exists()
|
|
|
|
|
|
|
|
if dup_exists:
|
|
|
|
raise JsonableError(_("Name is already in use!"))
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def check_short_name(short_name_raw: str) -> str:
|
2017-06-21 13:46:58 +02:00
|
|
|
short_name = short_name_raw.strip()
|
|
|
|
if len(short_name) == 0:
|
|
|
|
raise JsonableError(_("Bad name or username"))
|
|
|
|
return short_name
|
|
|
|
|
2019-08-17 19:06:51 +02:00
|
|
|
def check_valid_bot_config(bot_type: int, service_name: str,
|
|
|
|
config_data: Dict[str, str]) -> None:
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT:
|
|
|
|
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
|
|
|
|
config_options = None
|
|
|
|
for integration in WEBHOOK_INTEGRATIONS:
|
|
|
|
if integration.name == service_name:
|
|
|
|
# key: validator
|
|
|
|
config_options = {c[1]: c[2] for c in integration.config_options}
|
|
|
|
break
|
|
|
|
if not config_options:
|
|
|
|
raise JsonableError(_("Invalid integration '%s'.") % (service_name,))
|
|
|
|
|
|
|
|
missing_keys = set(config_options.keys()) - set(config_data.keys())
|
|
|
|
if missing_keys:
|
|
|
|
raise JsonableError(_("Missing configuration parameters: %s") % (
|
|
|
|
missing_keys,))
|
|
|
|
|
|
|
|
for key, validator in config_options.items():
|
|
|
|
value = config_data[key]
|
|
|
|
error = validator(key, value)
|
|
|
|
if error:
|
|
|
|
raise JsonableError(_("Invalid {} value {} ({})").format(
|
|
|
|
key, value, error))
|
|
|
|
|
|
|
|
elif bot_type == UserProfile.EMBEDDED_BOT:
|
2019-08-17 19:06:51 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.bot_lib import get_bot_handler
|
|
|
|
bot_handler = get_bot_handler(service_name)
|
|
|
|
if hasattr(bot_handler, 'validate_config'):
|
|
|
|
bot_handler.validate_config(config_data)
|
|
|
|
except ConfigValidationError:
|
|
|
|
# The exception provides a specific error message, but that
|
|
|
|
# message is not tagged translatable, because it is
|
|
|
|
# triggered in the external zulip_bots package.
|
|
|
|
# TODO: Think of some clever way to provide a more specific
|
|
|
|
# error message.
|
|
|
|
raise JsonableError(_("Invalid configuration data!"))
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2018-06-01 17:25:15 +02:00
|
|
|
# Adds an outgoing webhook or embedded bot service.
|
|
|
|
def add_service(name: str, user_profile: UserProfile, base_url: Optional[str]=None,
|
|
|
|
interface: Optional[int]=None, token: Optional[str]=None) -> None:
|
|
|
|
Service.objects.create(name=name,
|
|
|
|
user_profile=user_profile,
|
|
|
|
base_url=base_url,
|
|
|
|
interface=interface,
|
|
|
|
token=token)
|
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
def check_bot_creation_policy(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
# Realm administrators can always add bot
|
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return
|
|
|
|
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_EVERYONE:
|
|
|
|
return
|
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_ADMINS_ONLY:
|
2019-11-16 15:53:56 +01:00
|
|
|
raise OrganizationAdministratorRequired()
|
2018-01-29 16:10:54 +01:00
|
|
|
if user_profile.realm.bot_creation_policy == Realm.BOT_CREATION_LIMIT_GENERIC_BOTS and \
|
|
|
|
bot_type == UserProfile.DEFAULT_BOT:
|
2019-11-16 15:53:56 +01:00
|
|
|
raise OrganizationAdministratorRequired()
|
2018-01-29 16:10:54 +01:00
|
|
|
|
2017-11-24 16:24:24 +01:00
|
|
|
def check_valid_bot_type(user_profile: UserProfile, bot_type: int) -> None:
|
|
|
|
if bot_type not in user_profile.allowed_bot_types:
|
2017-05-30 19:19:48 +02:00
|
|
|
raise JsonableError(_('Invalid bot type'))
|
2017-07-03 18:35:12 +02:00
|
|
|
|
2018-06-01 17:47:50 +02:00
|
|
|
def check_valid_interface_type(interface_type: Optional[int]) -> None:
|
2017-07-03 18:35:12 +02:00
|
|
|
if interface_type not in Service.ALLOWED_INTERFACE_TYPES:
|
|
|
|
raise JsonableError(_('Invalid interface type'))
|
2017-11-01 10:04:16 +01:00
|
|
|
|
2020-05-30 21:43:19 +02:00
|
|
|
def is_administrator_role(role: int) -> bool:
|
|
|
|
return role in {UserProfile.ROLE_REALM_ADMINISTRATOR, UserProfile.ROLE_REALM_OWNER}
|
|
|
|
|
2017-11-16 02:28:50 +01:00
|
|
|
def bulk_get_users(emails: List[str], realm: Optional[Realm],
|
|
|
|
base_query: 'QuerySet[UserProfile]'=None) -> Dict[str, UserProfile]:
|
|
|
|
if base_query is None:
|
|
|
|
assert realm is not None
|
2018-03-22 18:36:10 +01:00
|
|
|
query = UserProfile.objects.filter(realm=realm, is_active=True)
|
2017-11-27 23:33:13 +01:00
|
|
|
realm_id = realm.id
|
|
|
|
else:
|
2017-11-27 23:41:05 +01:00
|
|
|
# WARNING: Currently, this code path only really supports one
|
|
|
|
# version of `base_query` being used (because otherwise,
|
|
|
|
# they'll share the cache, which can screw up the filtering).
|
|
|
|
# If you're using this flow, you'll need to re-do any filters
|
|
|
|
# in base_query in the code itself; base_query is just a perf
|
|
|
|
# optimization.
|
2018-03-22 18:36:10 +01:00
|
|
|
query = base_query
|
2017-11-27 23:33:13 +01:00
|
|
|
realm_id = 0
|
2017-11-16 02:28:50 +01:00
|
|
|
|
|
|
|
def fetch_users_by_email(emails: List[str]) -> List[UserProfile]:
|
|
|
|
# This should be just
|
|
|
|
#
|
|
|
|
# UserProfile.objects.select_related("realm").filter(email__iexact__in=emails,
|
|
|
|
# realm=realm)
|
|
|
|
#
|
|
|
|
# But chaining __in and __iexact doesn't work with Django's
|
|
|
|
# ORM, so we have the following hack to construct the relevant where clause
|
2020-06-10 01:58:04 +02:00
|
|
|
where_clause = "upper(zerver_userprofile.email::text) IN (SELECT upper(email) FROM unnest(%s) AS email)"
|
2018-03-22 18:36:10 +01:00
|
|
|
return query.select_related("realm").extra(
|
2017-11-16 02:28:50 +01:00
|
|
|
where=[where_clause],
|
2020-06-10 01:58:04 +02:00
|
|
|
params=(emails,))
|
2017-11-16 02:28:50 +01:00
|
|
|
|
2019-08-08 21:34:06 +02:00
|
|
|
def user_to_email(user_profile: UserProfile) -> str:
|
|
|
|
return user_profile.email.lower()
|
|
|
|
|
2017-11-16 02:28:50 +01:00
|
|
|
return generic_bulk_cached_fetch(
|
2017-11-27 23:41:05 +01:00
|
|
|
# Use a separate cache key to protect us from conflicts with
|
|
|
|
# the get_user cache.
|
|
|
|
lambda email: 'bulk_get_users:' + user_profile_cache_key_id(email, realm_id),
|
2017-11-16 02:28:50 +01:00
|
|
|
fetch_users_by_email,
|
|
|
|
[email.lower() for email in emails],
|
2019-08-08 21:34:06 +02:00
|
|
|
id_fetcher=user_to_email,
|
2017-11-16 02:28:50 +01:00
|
|
|
)
|
|
|
|
|
2020-06-13 05:24:42 +02:00
|
|
|
def user_ids_to_users(user_ids: Sequence[int], realm: Realm) -> List[UserProfile]:
|
2017-11-01 10:04:16 +01:00
|
|
|
# TODO: Consider adding a flag to control whether deactivated
|
|
|
|
# users should be included.
|
2018-04-04 18:38:37 +02:00
|
|
|
|
|
|
|
def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]:
|
|
|
|
return list(UserProfile.objects.filter(id__in=user_ids).select_related())
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_profiles_by_id: Dict[int, UserProfile] = generic_bulk_cached_fetch(
|
2018-04-04 18:38:37 +02:00
|
|
|
cache_key_function=user_profile_by_id_cache_key,
|
|
|
|
query_function=fetch_users_by_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
object_ids=user_ids,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2018-04-04 18:38:37 +02:00
|
|
|
|
|
|
|
found_user_ids = user_profiles_by_id.keys()
|
|
|
|
missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids]
|
|
|
|
if missed_user_ids:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Invalid user ID: %s") % (missed_user_ids[0],))
|
2018-04-04 18:38:37 +02:00
|
|
|
|
|
|
|
user_profiles = list(user_profiles_by_id.values())
|
|
|
|
for user_profile in user_profiles:
|
2017-11-01 10:04:16 +01:00
|
|
|
if user_profile.realm != realm:
|
2019-04-20 03:49:03 +02:00
|
|
|
raise JsonableError(_("Invalid user ID: %s") % (user_profile.id,))
|
2017-11-01 10:04:16 +01:00
|
|
|
return user_profiles
|
2018-05-28 20:42:31 +02:00
|
|
|
|
|
|
|
def access_bot_by_id(user_profile: UserProfile, user_id: int) -> UserProfile:
|
|
|
|
try:
|
|
|
|
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not target.is_bot:
|
|
|
|
raise JsonableError(_("No such bot"))
|
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
return target
|
2018-06-04 07:04:19 +02:00
|
|
|
|
|
|
|
def access_user_by_id(user_profile: UserProfile, user_id: int,
|
2020-02-07 02:46:59 +01:00
|
|
|
allow_deactivated: bool=False, allow_bots: bool=False,
|
|
|
|
read_only: bool=False) -> UserProfile:
|
2018-06-04 07:04:19 +02:00
|
|
|
try:
|
|
|
|
target = get_user_profile_by_id_in_realm(user_id, user_profile.realm)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
if target.is_bot and not allow_bots:
|
|
|
|
raise JsonableError(_("No such user"))
|
|
|
|
if not target.is_active and not allow_deactivated:
|
|
|
|
raise JsonableError(_("User is deactivated"))
|
2020-02-07 02:46:59 +01:00
|
|
|
if read_only:
|
|
|
|
# Administrative access is not required just to read a user.
|
|
|
|
return target
|
2018-06-04 07:04:19 +02:00
|
|
|
if not user_profile.can_admin_user(target):
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
|
|
|
return target
|
2018-06-19 10:55:56 +02:00
|
|
|
|
|
|
|
def get_accounts_for_email(email: str) -> List[Dict[str, Optional[str]]]:
|
2018-12-07 00:05:57 +01:00
|
|
|
profiles = UserProfile.objects.select_related('realm').filter(delivery_email__iexact=email.strip(),
|
2018-06-19 10:55:56 +02:00
|
|
|
is_active=True,
|
2018-12-25 13:45:01 +01:00
|
|
|
realm__deactivated=False,
|
|
|
|
is_bot=False).order_by('date_joined')
|
2018-06-19 10:55:56 +02:00
|
|
|
return [{"realm_name": profile.realm.name,
|
|
|
|
"string_id": profile.realm.string_id,
|
|
|
|
"full_name": profile.full_name,
|
|
|
|
"avatar": avatar_url(profile)}
|
|
|
|
for profile in profiles]
|
2018-08-01 10:53:40 +02:00
|
|
|
|
|
|
|
def get_api_key(user_profile: UserProfile) -> str:
|
|
|
|
return user_profile.api_key
|
|
|
|
|
|
|
|
def get_all_api_keys(user_profile: UserProfile) -> List[str]:
|
|
|
|
# Users can only have one API key for now
|
|
|
|
return [user_profile.api_key]
|
2018-09-04 20:23:44 +02:00
|
|
|
|
2019-01-28 20:08:04 +01:00
|
|
|
def validate_user_custom_profile_field(realm_id: int, field: CustomProfileField,
|
|
|
|
value: Union[int, str, List[int]]) -> Optional[str]:
|
|
|
|
validators = CustomProfileField.FIELD_VALIDATORS
|
|
|
|
field_type = field.field_type
|
2020-06-09 00:25:09 +02:00
|
|
|
var_name = f'{field.name}'
|
2019-01-28 20:08:04 +01:00
|
|
|
if field_type in validators:
|
|
|
|
validator = validators[field_type]
|
|
|
|
result = validator(var_name, value)
|
|
|
|
elif field_type == CustomProfileField.CHOICE:
|
|
|
|
choice_field_validator = CustomProfileField.CHOICE_FIELD_VALIDATORS[field_type]
|
|
|
|
field_data = field.field_data
|
|
|
|
# Put an assertion so that mypy doesn't complain.
|
|
|
|
assert field_data is not None
|
|
|
|
result = choice_field_validator(var_name, field_data, value)
|
|
|
|
elif field_type == CustomProfileField.USER:
|
|
|
|
user_field_validator = CustomProfileField.USER_FIELD_VALIDATORS[field_type]
|
|
|
|
result = user_field_validator(realm_id, cast(List[int], value), False)
|
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid field type")
|
|
|
|
return result
|
|
|
|
|
2018-09-04 20:23:44 +02:00
|
|
|
def validate_user_custom_profile_data(realm_id: int,
|
|
|
|
profile_data: List[Dict[str, Union[int, str, List[int]]]]) -> None:
|
|
|
|
# This function validate all custom field values according to their field type.
|
|
|
|
for item in profile_data:
|
|
|
|
field_id = item['id']
|
|
|
|
try:
|
|
|
|
field = CustomProfileField.objects.get(id=field_id)
|
|
|
|
except CustomProfileField.DoesNotExist:
|
|
|
|
raise JsonableError(_('Field id {id} not found.').format(id=field_id))
|
|
|
|
|
2019-01-28 20:08:04 +01:00
|
|
|
result = validate_user_custom_profile_field(realm_id, field, item['value'])
|
2018-09-04 20:23:44 +02:00
|
|
|
if result is not None:
|
|
|
|
raise JsonableError(result)
|
2020-01-13 18:47:30 +01:00
|
|
|
|
2019-09-14 02:22:41 +02:00
|
|
|
def compute_show_invites_and_add_streams(user_profile: Optional[UserProfile]) -> Tuple[bool, bool]:
|
|
|
|
if user_profile is None:
|
|
|
|
return False, False
|
2020-01-13 18:47:30 +01:00
|
|
|
|
|
|
|
if user_profile.is_guest:
|
2019-09-14 02:22:41 +02:00
|
|
|
return False, False
|
2020-01-13 18:47:30 +01:00
|
|
|
|
2019-09-14 02:22:41 +02:00
|
|
|
if user_profile.is_realm_admin:
|
|
|
|
return True, True
|
|
|
|
|
|
|
|
if user_profile.realm.invite_by_admins_only:
|
|
|
|
return False, True
|
|
|
|
|
|
|
|
return True, True
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
def format_user_row(realm: Realm, acting_user: UserProfile, row: Dict[str, Any],
|
|
|
|
client_gravatar: bool,
|
|
|
|
custom_profile_field_data: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
|
|
|
|
"""Formats a user row returned by a database fetch using
|
|
|
|
.values(*realm_user_dict_fields) into a dictionary representation
|
|
|
|
of that user for API delivery to clients. The acting_user
|
|
|
|
argument is used for permissions checks.
|
|
|
|
"""
|
|
|
|
|
|
|
|
avatar_url = get_avatar_field(user_id=row['id'],
|
|
|
|
realm_id=realm.id,
|
|
|
|
email=row['delivery_email'],
|
|
|
|
avatar_source=row['avatar_source'],
|
|
|
|
avatar_version=row['avatar_version'],
|
|
|
|
medium=False,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
client_gravatar=client_gravatar)
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2020-06-01 16:44:12 +02:00
|
|
|
is_admin = is_administrator_role(row['role'])
|
2020-06-01 21:47:18 +02:00
|
|
|
is_owner = row['role'] == UserProfile.ROLE_REALM_OWNER
|
2020-01-13 22:11:19 +01:00
|
|
|
is_guest = row['role'] == UserProfile.ROLE_GUEST
|
|
|
|
is_bot = row['is_bot']
|
|
|
|
# This format should align with get_cross_realm_dicts() and notify_created_user
|
|
|
|
result = dict(
|
|
|
|
email=row['email'],
|
|
|
|
user_id=row['id'],
|
|
|
|
avatar_url=avatar_url,
|
2020-04-07 20:09:30 +02:00
|
|
|
avatar_version=row['avatar_version'],
|
2020-01-13 22:11:19 +01:00
|
|
|
is_admin=is_admin,
|
2020-06-01 21:47:18 +02:00
|
|
|
is_owner=is_owner,
|
2020-01-13 22:11:19 +01:00
|
|
|
is_guest=is_guest,
|
|
|
|
is_bot=is_bot,
|
|
|
|
full_name=row['full_name'],
|
|
|
|
timezone=row['timezone'],
|
|
|
|
is_active = row['is_active'],
|
|
|
|
date_joined = row['date_joined'].isoformat(),
|
|
|
|
)
|
|
|
|
if (realm.email_address_visibility == Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS and
|
|
|
|
acting_user.is_realm_admin):
|
|
|
|
result['delivery_email'] = row['delivery_email']
|
|
|
|
|
|
|
|
if is_bot:
|
|
|
|
result["bot_type"] = row["bot_type"]
|
|
|
|
if row['email'] in settings.CROSS_REALM_BOT_EMAILS:
|
|
|
|
result['is_cross_realm_bot'] = True
|
|
|
|
|
|
|
|
# Note that bot_owner_id can be None with legacy data.
|
|
|
|
result['bot_owner_id'] = row['bot_owner_id']
|
|
|
|
elif custom_profile_field_data is not None:
|
|
|
|
result['profile_data'] = custom_profile_field_data
|
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2020-01-31 23:41:41 +01:00
|
|
|
def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]:
|
|
|
|
# What we're trying to do is simulate the user_profile having been
|
|
|
|
# fetched from a QuerySet using `.values(*realm_user_dict_fields)`
|
|
|
|
# even though we fetched UserProfile objects. This is messier
|
|
|
|
# than it seems.
|
|
|
|
#
|
|
|
|
# What we'd like to do is just call model_to_dict(user,
|
|
|
|
# fields=realm_user_dict_fields). The problem with this is
|
|
|
|
# that model_to_dict has a different convention than
|
|
|
|
# `.values()` in its handling of foreign keys, naming them as
|
|
|
|
# e.g. `bot_owner`, not `bot_owner_id`; we work around that
|
|
|
|
# here.
|
|
|
|
#
|
|
|
|
# This could be potentially simplified in the future by
|
|
|
|
# changing realm_user_dict_fields to name the bot owner with
|
|
|
|
# the less readable `bot_owner` (instead of `bot_owner_id`).
|
|
|
|
user_row = model_to_dict(user_profile,
|
|
|
|
fields=realm_user_dict_fields + ['bot_owner'])
|
|
|
|
user_row['bot_owner_id'] = user_row['bot_owner']
|
|
|
|
del user_row['bot_owner']
|
|
|
|
return user_row
|
2020-01-14 18:19:35 +01:00
|
|
|
|
2020-01-31 23:41:41 +01:00
|
|
|
def get_cross_realm_dicts() -> List[Dict[str, Any]]:
|
2020-01-13 22:11:19 +01:00
|
|
|
users = bulk_get_users(list(settings.CROSS_REALM_BOT_EMAILS), None,
|
|
|
|
base_query=UserProfile.objects.filter(
|
2020-01-14 18:19:35 +01:00
|
|
|
realm__string_id=settings.SYSTEM_BOT_REALM)).values()
|
|
|
|
result = []
|
|
|
|
for user in users:
|
|
|
|
# Important: We filter here, is addition to in
|
|
|
|
# `base_query`, because of how bulk_get_users shares its
|
|
|
|
# cache with other UserProfile caches.
|
2020-01-31 23:51:12 +01:00
|
|
|
if user.realm.string_id != settings.SYSTEM_BOT_REALM: # nocoverage
|
2020-01-14 18:19:35 +01:00
|
|
|
continue
|
2020-01-31 23:41:41 +01:00
|
|
|
user_row = user_profile_to_user_row(user)
|
|
|
|
# Because we want to avoid clients becing exposed to the
|
|
|
|
# implementation detail that these bots are self-owned, we
|
|
|
|
# just set bot_owner_id=None.
|
2020-01-14 18:19:35 +01:00
|
|
|
user_row['bot_owner_id'] = None
|
|
|
|
|
|
|
|
result.append(format_user_row(user.realm,
|
|
|
|
acting_user=user,
|
|
|
|
row=user_row,
|
|
|
|
client_gravatar=False,
|
|
|
|
custom_profile_field_data=None))
|
|
|
|
|
|
|
|
return result
|
2020-01-13 22:11:19 +01:00
|
|
|
|
2020-02-07 02:36:55 +01:00
|
|
|
def get_custom_profile_field_values(custom_profile_field_values:
|
|
|
|
List[CustomProfileFieldValue]) -> Dict[int, Dict[str, Any]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict)
|
2020-01-13 22:11:19 +01:00
|
|
|
for profile_field in custom_profile_field_values:
|
|
|
|
user_id = profile_field.user_profile_id
|
|
|
|
if profile_field.field.is_renderable():
|
|
|
|
profiles_by_user_id[user_id][profile_field.field_id] = {
|
|
|
|
"value": profile_field.value,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"rendered_value": profile_field.rendered_value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
else:
|
|
|
|
profiles_by_user_id[user_id][profile_field.field_id] = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"value": profile_field.value,
|
2020-01-13 22:11:19 +01:00
|
|
|
}
|
|
|
|
return profiles_by_user_id
|
|
|
|
|
2020-02-07 02:33:15 +01:00
|
|
|
def get_raw_user_data(realm: Realm, acting_user: UserProfile, client_gravatar: bool,
|
2020-01-02 00:39:54 +01:00
|
|
|
target_user: Optional[UserProfile]=None,
|
2020-01-13 22:11:19 +01:00
|
|
|
include_custom_profile_fields: bool=True) -> Dict[int, Dict[str, str]]:
|
2020-01-02 00:39:54 +01:00
|
|
|
"""Fetches data about the target user(s) appropriate for sending to
|
|
|
|
acting_user via the standard format for the Zulip API. If
|
|
|
|
target_user is None, we fetch all users in the realm.
|
|
|
|
"""
|
2020-01-13 22:11:19 +01:00
|
|
|
profiles_by_user_id = None
|
|
|
|
custom_profile_field_data = None
|
2020-01-02 00:39:54 +01:00
|
|
|
# target_user is an optional parameter which is passed when user data of a specific user
|
|
|
|
# is required. It is 'None' otherwise.
|
|
|
|
if target_user is not None:
|
|
|
|
user_dicts = [user_profile_to_user_row(target_user)]
|
|
|
|
else:
|
|
|
|
user_dicts = get_realm_user_dicts(realm.id)
|
2020-01-13 22:11:19 +01:00
|
|
|
|
|
|
|
if include_custom_profile_fields:
|
2020-02-07 02:36:55 +01:00
|
|
|
base_query = CustomProfileFieldValue.objects.select_related("field")
|
|
|
|
# TODO: Consider optimizing this query away with caching.
|
2020-01-02 00:39:54 +01:00
|
|
|
if target_user is not None:
|
|
|
|
custom_profile_field_values = base_query.filter(user_profile=target_user)
|
|
|
|
else:
|
2020-02-09 00:29:21 +01:00
|
|
|
custom_profile_field_values = base_query.filter(field__realm_id=realm.id)
|
2020-02-07 02:36:55 +01:00
|
|
|
profiles_by_user_id = get_custom_profile_field_values(custom_profile_field_values)
|
|
|
|
|
2020-01-13 22:11:19 +01:00
|
|
|
result = {}
|
|
|
|
for row in user_dicts:
|
|
|
|
if profiles_by_user_id is not None:
|
|
|
|
custom_profile_field_data = profiles_by_user_id.get(row['id'], {})
|
|
|
|
|
|
|
|
result[row['id']] = format_user_row(realm,
|
2020-02-07 02:33:15 +01:00
|
|
|
acting_user = acting_user,
|
2020-01-13 22:11:19 +01:00
|
|
|
row=row,
|
|
|
|
client_gravatar= client_gravatar,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
custom_profile_field_data = custom_profile_field_data,
|
2020-01-13 22:11:19 +01:00
|
|
|
)
|
|
|
|
return result
|