2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2022-10-06 11:56:48 +02:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, Union
|
2017-01-06 18:56:36 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2021-11-01 12:21:17 +01:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2022-06-21 22:23:34 +02:00
|
|
|
from django.core.files.uploadedfile import UploadedFile
|
2016-06-05 00:47:14 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2020-01-29 20:41:23 +01:00
|
|
|
from django.shortcuts import redirect
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2022-04-14 23:55:07 +02:00
|
|
|
from zerver.actions.bots import (
|
|
|
|
do_change_bot_owner,
|
|
|
|
do_change_default_all_public_streams,
|
|
|
|
do_change_default_events_register_stream,
|
|
|
|
do_change_default_sending_stream,
|
|
|
|
)
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user, notify_created_bot
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import (
|
|
|
|
check_remove_custom_profile_field_value,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
)
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import (
|
|
|
|
check_change_bot_full_name,
|
|
|
|
check_change_full_name,
|
|
|
|
do_change_avatar_fields,
|
|
|
|
do_regenerate_api_key,
|
|
|
|
)
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import (
|
|
|
|
do_change_user_role,
|
|
|
|
do_deactivate_user,
|
|
|
|
do_update_bot_config_data,
|
|
|
|
do_update_outgoing_webhook_service,
|
|
|
|
)
|
2021-11-01 12:21:17 +01:00
|
|
|
from zerver.context_processors import get_valid_realm_from_request
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.decorator import require_member_or_admin, require_realm_admin
|
|
|
|
from zerver.forms import PASSWORD_TOO_WEAK_ERROR, CreateUserForm
|
2023-11-03 19:10:57 +01:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_for_inaccessible_user, get_gravatar_url
|
2018-01-07 19:24:14 +01:00
|
|
|
from zerver.lib.bot_config import set_bot_config
|
2020-03-05 13:54:37 +01:00
|
|
|
from zerver.lib.email_validation import email_allowed_for_realm
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
CannotDeactivateLastUserError,
|
|
|
|
JsonableError,
|
2021-11-01 12:21:17 +01:00
|
|
|
MissingAuthenticationError,
|
2022-11-17 09:30:48 +01:00
|
|
|
OrganizationAdministratorRequiredError,
|
|
|
|
OrganizationOwnerRequiredError,
|
2021-07-16 22:11:10 +02:00
|
|
|
)
|
2017-07-14 16:44:07 +02:00
|
|
|
from zerver.lib.integrations import EMBEDDED_BOTS
|
2022-03-28 09:42:58 +02:00
|
|
|
from zerver.lib.rate_limiter import rate_limit_spectator_attachment_access_by_file
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.request import REQ, has_request_variables
|
2022-07-21 21:51:08 +02:00
|
|
|
from zerver.lib.response import json_success
|
2021-11-27 15:26:09 +01:00
|
|
|
from zerver.lib.send_email import FromAddress, send_email
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.streams import access_stream_by_id, access_stream_by_name, subscribed_to_stream
|
2022-07-08 17:15:43 +02:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict, ProfileDataElementValue, Validator
|
2015-11-24 05:26:33 +01:00
|
|
|
from zerver.lib.upload import upload_avatar_image
|
2021-10-14 01:45:34 +02:00
|
|
|
from zerver.lib.url_encoding import append_url_query_string
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.users import (
|
2023-11-08 12:44:21 +01:00
|
|
|
APIUserDict,
|
2020-06-11 00:54:34 +02:00
|
|
|
access_bot_by_id,
|
2022-02-21 09:33:08 +01:00
|
|
|
access_user_by_email,
|
2020-06-11 00:54:34 +02:00
|
|
|
access_user_by_id,
|
|
|
|
add_service,
|
|
|
|
check_bot_creation_policy,
|
|
|
|
check_bot_name_available,
|
2023-11-03 19:10:57 +01:00
|
|
|
check_can_access_user,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_full_name,
|
|
|
|
check_short_name,
|
|
|
|
check_valid_bot_config,
|
|
|
|
check_valid_bot_type,
|
|
|
|
check_valid_interface_type,
|
|
|
|
get_api_key,
|
2023-11-08 08:21:24 +01:00
|
|
|
get_users_for_api,
|
2023-08-30 23:40:24 +02:00
|
|
|
max_message_id_for_user,
|
2020-06-11 00:54:34 +02:00
|
|
|
validate_user_custom_profile_data,
|
|
|
|
)
|
2020-06-09 00:07:13 +02:00
|
|
|
from zerver.lib.utils import generate_api_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_bool,
|
2021-11-27 15:26:09 +01:00
|
|
|
check_capped_string,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_dict,
|
REQ: Use check_dict_only in update_user_backend.
Update the REQ check for profile_data in
update_user_backend by tweaking `check_profile_data`
to use `check_dict_only`.
Here is the relevant URL:
path('users/<int:user_id>', rest_dispatch,
{'GET': 'zerver.views.users.get_members_backend',
It would be nice to unify the validator
for these two views, but they are different:
update_user_backend
update_user_custom_profile_data
It's not completely clear to me why update_user_backend
seems to support a superset of the functionality
of `update_user_custom_profile_data`, but it has
this code to allow you to remove custom profile fields:
clean_profile_data = []
for entry in profile_data:
assert isinstance(entry["id"], int)
if entry["value"] is None or not entry["value"]:
field_id = entry["id"]
check_remove_custom_profile_field_value(target, field_id)
else:
clean_profile_data.append({
"id": entry["id"],
"value": entry["value"],
})
Whereas the other view is much simpler:
def update_user_custom_profile_data(
<snip>
) -> HttpResponse:
validate_user_custom_profile_data(user_profile.realm.id, data)
do_update_user_custom_profile_data_if_changed(user_profile, data)
# We need to call this explicitly otherwise constraints are not check
return json_success()
2020-06-25 17:37:45 +02:00
|
|
|
check_dict_only,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_int,
|
|
|
|
check_int_in,
|
|
|
|
check_list,
|
2020-06-21 03:24:17 +02:00
|
|
|
check_none_or,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_string,
|
2020-06-21 03:24:17 +02:00
|
|
|
check_union,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_url,
|
|
|
|
)
|
|
|
|
from zerver.models import (
|
|
|
|
DisposableEmailError,
|
|
|
|
DomainNotAllowedForRealmError,
|
|
|
|
EmailContainsPlusError,
|
2022-11-17 09:30:48 +01:00
|
|
|
InvalidFakeEmailDomainError,
|
2020-06-11 00:54:34 +02:00
|
|
|
Service,
|
|
|
|
Stream,
|
|
|
|
UserProfile,
|
|
|
|
get_user_by_delivery_email,
|
|
|
|
get_user_by_id_in_realm_including_cross_realm,
|
|
|
|
get_user_including_cross_realm,
|
|
|
|
get_user_profile_by_id_in_realm,
|
|
|
|
)
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
from zproject.backends import check_password_strength
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-05-16 21:06:43 +02:00
|
|
|
def check_last_owner(user_profile: UserProfile) -> bool:
|
|
|
|
owners = set(user_profile.realm.get_human_owner_users())
|
|
|
|
return user_profile.is_realm_owner and not user_profile.is_bot and len(owners) == 1
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-11-27 15:26:09 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def deactivate_user_backend(
|
2021-11-27 15:26:09 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
user_id: int,
|
|
|
|
deactivation_notification_comment: Optional[str] = REQ(
|
|
|
|
str_validator=check_capped_string(max_length=2000), default=None
|
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2021-01-28 18:04:43 +01:00
|
|
|
target = access_user_by_id(user_profile, user_id, for_admin=True)
|
2020-06-10 22:33:48 +02:00
|
|
|
if target.is_realm_owner and not user_profile.is_realm_owner:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2020-05-16 21:06:43 +02:00
|
|
|
if check_last_owner(target):
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Cannot deactivate the only organization owner"))
|
2021-11-27 15:26:09 +01:00
|
|
|
if deactivation_notification_comment is not None:
|
|
|
|
deactivation_notification_comment = deactivation_notification_comment.strip()
|
|
|
|
return _deactivate_user_profile_backend(
|
|
|
|
request,
|
|
|
|
user_profile,
|
|
|
|
target,
|
|
|
|
deactivation_notification_comment=deactivation_notification_comment,
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def deactivate_user_own_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
|
2018-08-21 08:14:46 +02:00
|
|
|
if UserProfile.objects.filter(realm=user_profile.realm, is_active=True).count() == 1:
|
2020-05-16 21:06:43 +02:00
|
|
|
raise CannotDeactivateLastUserError(is_last_owner=False)
|
|
|
|
if user_profile.is_realm_owner and check_last_owner(user_profile):
|
|
|
|
raise CannotDeactivateLastUserError(is_last_owner=True)
|
2018-08-21 08:14:46 +02:00
|
|
|
|
2017-08-17 01:20:23 +02:00
|
|
|
do_deactivate_user(user_profile, acting_user=user_profile)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-10-13 20:09:32 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def deactivate_bot_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, bot_id: int
|
|
|
|
) -> HttpResponse:
|
2018-05-28 20:42:31 +02:00
|
|
|
target = access_bot_by_id(user_profile, bot_id)
|
2021-11-27 15:26:09 +01:00
|
|
|
return _deactivate_user_profile_backend(
|
|
|
|
request, user_profile, target, deactivation_notification_comment=None
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def _deactivate_user_profile_backend(
|
2021-11-27 15:26:09 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target: UserProfile,
|
|
|
|
*,
|
|
|
|
deactivation_notification_comment: Optional[str],
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-08-17 01:20:23 +02:00
|
|
|
do_deactivate_user(target, acting_user=user_profile)
|
2021-11-27 15:26:09 +01:00
|
|
|
|
|
|
|
# It's important that we check for None explicitly here, since ""
|
|
|
|
# encodes sending an email without a custom administrator comment.
|
|
|
|
if deactivation_notification_comment is not None:
|
|
|
|
send_email(
|
|
|
|
"zerver/emails/deactivate",
|
|
|
|
to_user_ids=[target.id],
|
|
|
|
from_address=FromAddress.NOREPLY,
|
|
|
|
context={
|
|
|
|
"deactivation_notification_comment": deactivation_notification_comment,
|
|
|
|
"realm_uri": target.realm.uri,
|
|
|
|
"realm_name": target.realm.name,
|
|
|
|
},
|
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def reactivate_user_backend(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, user_id: int
|
|
|
|
) -> HttpResponse:
|
|
|
|
target = access_user_by_id(
|
|
|
|
user_profile, user_id, allow_deactivated=True, allow_bots=True, for_admin=True
|
|
|
|
)
|
2018-07-27 18:18:33 +02:00
|
|
|
if target.is_bot:
|
2018-07-27 19:58:08 +02:00
|
|
|
assert target.bot_type is not None
|
2018-07-27 18:18:33 +02:00
|
|
|
check_bot_creation_policy(user_profile, target.bot_type)
|
2017-09-22 16:18:05 +02:00
|
|
|
do_reactivate_user(target, acting_user=user_profile)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-09-21 16:52:15 +02:00
|
|
|
check_profile_data: Validator[
|
|
|
|
List[Dict[str, Optional[Union[int, ProfileDataElementValue]]]]
|
|
|
|
] = check_list(
|
2021-02-12 08:19:30 +01:00
|
|
|
check_dict_only(
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
("id", check_int),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"value",
|
2021-02-12 08:19:30 +01:00
|
|
|
check_none_or(
|
2021-09-21 16:52:15 +02:00
|
|
|
check_union([check_string, check_list(check_int)]),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
),
|
|
|
|
]
|
|
|
|
),
|
2020-06-22 22:37:00 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-24 05:26:33 +01:00
|
|
|
@has_request_variables
|
2020-06-21 03:24:17 +02:00
|
|
|
def update_user_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
user_id: int,
|
2021-10-16 19:52:57 +02:00
|
|
|
full_name: Optional[str] = REQ(default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
role: Optional[int] = REQ(
|
|
|
|
default=None,
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_int_in(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile.ROLE_TYPES,
|
|
|
|
),
|
|
|
|
),
|
2021-09-21 16:52:15 +02:00
|
|
|
profile_data: Optional[List[Dict[str, Optional[Union[int, ProfileDataElementValue]]]]] = REQ(
|
2021-02-12 08:19:30 +01:00
|
|
|
default=None,
|
2021-04-07 22:00:44 +02:00
|
|
|
json_validator=check_profile_data,
|
2020-06-21 03:24:17 +02:00
|
|
|
),
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:19:30 +01:00
|
|
|
target = access_user_by_id(
|
|
|
|
user_profile, user_id, allow_deactivated=True, allow_bots=True, for_admin=True
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2020-05-21 00:13:06 +02:00
|
|
|
if role is not None and target.role != role:
|
2021-01-28 18:04:43 +01:00
|
|
|
# Require that the current user has permissions to
|
2022-07-19 04:00:51 +02:00
|
|
|
# grant/remove the role in question.
|
2022-05-07 09:03:02 +02:00
|
|
|
#
|
|
|
|
# Logic replicated in patch_bot_backend.
|
2020-05-16 21:06:43 +02:00
|
|
|
if UserProfile.ROLE_REALM_OWNER in [role, target.role] and not user_profile.is_realm_owner:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2022-07-19 04:00:51 +02:00
|
|
|
elif not user_profile.is_realm_admin:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2021-01-28 18:04:43 +01:00
|
|
|
|
2022-05-10 22:27:19 +02:00
|
|
|
if target.role == UserProfile.ROLE_REALM_OWNER and check_last_owner(target):
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:20:45 +01:00
|
|
|
_("The owner permission cannot be removed from the only organization owner.")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-29 15:07:10 +02:00
|
|
|
do_change_user_role(target, role, acting_user=user_profile)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if full_name is not None and target.full_name != full_name and full_name.strip() != "":
|
2016-09-27 14:25:52 +02:00
|
|
|
# We don't respect `name_changes_disabled` here because the request
|
|
|
|
# is on behalf of the administrator.
|
2017-04-07 07:28:28 +02:00
|
|
|
check_change_full_name(target, full_name, user_profile)
|
2016-09-27 14:25:52 +02:00
|
|
|
|
2018-09-04 20:46:11 +02:00
|
|
|
if profile_data is not None:
|
2022-07-08 17:15:43 +02:00
|
|
|
clean_profile_data: List[ProfileDataElementUpdateDict] = []
|
2019-01-15 12:21:14 +01:00
|
|
|
for entry in profile_data:
|
2020-06-21 03:24:17 +02:00
|
|
|
assert isinstance(entry["id"], int)
|
2022-07-08 17:15:43 +02:00
|
|
|
assert not isinstance(entry["value"], int)
|
2020-06-21 03:24:17 +02:00
|
|
|
if entry["value"] is None or not entry["value"]:
|
2019-01-15 12:21:14 +01:00
|
|
|
field_id = entry["id"]
|
|
|
|
check_remove_custom_profile_field_value(target, field_id)
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
clean_profile_data.append(
|
|
|
|
{
|
|
|
|
"id": entry["id"],
|
|
|
|
"value": entry["value"],
|
|
|
|
}
|
|
|
|
)
|
2019-01-15 12:21:14 +01:00
|
|
|
validate_user_custom_profile_data(target.realm.id, clean_profile_data)
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(target, clean_profile_data)
|
2018-09-04 20:46:11 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def avatar(
|
2021-11-01 12:21:17 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
maybe_user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
email_or_id: str,
|
|
|
|
medium: bool = False,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2016-10-24 16:42:43 +02:00
|
|
|
"""Accepts an email address or user ID and returns the avatar"""
|
2017-07-17 21:02:09 +02:00
|
|
|
is_email = False
|
2015-11-24 05:26:33 +01:00
|
|
|
try:
|
2016-10-24 16:42:43 +02:00
|
|
|
int(email_or_id)
|
|
|
|
except ValueError:
|
2017-07-17 21:02:09 +02:00
|
|
|
is_email = True
|
2016-10-24 16:42:43 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
if not maybe_user_profile.is_authenticated:
|
2022-02-08 00:13:33 +01:00
|
|
|
# Allow anonymous access to avatars only if spectators are
|
2021-11-01 12:21:17 +01:00
|
|
|
# enabled in the organization.
|
|
|
|
realm = get_valid_realm_from_request(request)
|
2021-10-03 14:16:07 +02:00
|
|
|
if not realm.allow_web_public_streams_access():
|
2023-02-04 02:07:20 +01:00
|
|
|
raise MissingAuthenticationError
|
2021-11-01 12:21:17 +01:00
|
|
|
|
|
|
|
# We only allow the ID format for accessing a user's avatar
|
|
|
|
# for spectators. This is mainly for defense in depth, since
|
|
|
|
# email_address_visibility should mean spectators only
|
|
|
|
# interact with fake email addresses anyway.
|
|
|
|
if is_email:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise MissingAuthenticationError
|
2022-03-28 09:42:58 +02:00
|
|
|
|
|
|
|
if settings.RATE_LIMITING:
|
2022-07-21 21:51:08 +02:00
|
|
|
unique_avatar_key = f"{realm.id}/{email_or_id}/{medium}"
|
|
|
|
rate_limit_spectator_attachment_access_by_file(unique_avatar_key)
|
2021-11-01 12:21:17 +01:00
|
|
|
else:
|
|
|
|
realm = maybe_user_profile.realm
|
|
|
|
|
2016-10-24 16:42:43 +02:00
|
|
|
try:
|
2017-07-17 21:02:09 +02:00
|
|
|
if is_email:
|
2018-08-18 14:18:58 +02:00
|
|
|
avatar_user_profile = get_user_including_cross_realm(email_or_id, realm)
|
2017-07-17 21:02:09 +02:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
avatar_user_profile = get_user_by_id_in_realm_including_cross_realm(
|
|
|
|
int(email_or_id), realm
|
|
|
|
)
|
2023-11-03 19:10:57 +01:00
|
|
|
|
|
|
|
url: Optional[str] = None
|
|
|
|
if maybe_user_profile.is_authenticated and not check_can_access_user(
|
|
|
|
avatar_user_profile, maybe_user_profile
|
|
|
|
):
|
|
|
|
url = get_avatar_for_inaccessible_user()
|
|
|
|
else:
|
|
|
|
# If there is a valid user account passed in, use its avatar
|
|
|
|
url = avatar_url(avatar_user_profile, medium=medium)
|
|
|
|
assert url is not None
|
2015-11-24 05:26:33 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
2016-10-24 16:42:43 +02:00
|
|
|
# If there is no such user, treat it as a new gravatar
|
|
|
|
email = email_or_id
|
2017-02-16 22:35:57 +01:00
|
|
|
avatar_version = 1
|
2017-07-17 18:04:36 +02:00
|
|
|
url = get_gravatar_url(email, avatar_version, medium)
|
2016-07-13 02:19:26 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# We can rely on the URL already having query parameters. Because
|
2016-07-13 02:19:26 +02:00
|
|
|
# our templates depend on being able to use the ampersand to
|
|
|
|
# add query parameters to our url, get_avatar_url does '?x=x'
|
|
|
|
# hacks to prevent us from having to jump through decode/encode hoops.
|
2018-06-01 17:47:50 +02:00
|
|
|
assert url is not None
|
2021-10-14 01:45:34 +02:00
|
|
|
url = append_url_query_string(url, request.META["QUERY_STRING"])
|
2015-11-24 05:26:33 +01:00
|
|
|
return redirect(url)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-15 21:50:30 +02:00
|
|
|
def avatar_medium(
|
|
|
|
request: HttpRequest, maybe_user_profile: Union[UserProfile, AnonymousUser], email_or_id: str
|
|
|
|
) -> HttpResponse:
|
|
|
|
return avatar(request, maybe_user_profile, email_or_id, medium=True)
|
|
|
|
|
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def get_stream_name(stream: Optional[Stream]) -> Optional[str]:
|
2015-11-24 05:26:33 +01:00
|
|
|
if stream:
|
2017-02-11 05:44:37 +01:00
|
|
|
return stream.name
|
|
|
|
return None
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
@require_member_or_admin
|
2015-11-24 05:26:33 +01:00
|
|
|
@has_request_variables
|
2017-12-25 12:02:23 +01:00
|
|
|
def patch_bot_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
bot_id: int,
|
|
|
|
full_name: Optional[str] = REQ(default=None),
|
2022-05-07 09:03:02 +02:00
|
|
|
role: Optional[int] = REQ(
|
|
|
|
default=None,
|
|
|
|
json_validator=check_int_in(
|
|
|
|
UserProfile.ROLE_TYPES,
|
|
|
|
),
|
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
bot_owner_id: Optional[int] = REQ(json_validator=check_int, default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
config_data: Optional[Dict[str, str]] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
default=None, json_validator=check_dict(value_validator=check_string)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
service_payload_url: Optional[str] = REQ(json_validator=check_url, default=None),
|
|
|
|
service_interface: int = REQ(json_validator=check_int, default=1),
|
2021-02-12 08:19:30 +01:00
|
|
|
default_sending_stream: Optional[str] = REQ(default=None),
|
|
|
|
default_events_register_stream: Optional[str] = REQ(default=None),
|
2021-04-07 22:00:44 +02:00
|
|
|
default_all_public_streams: Optional[bool] = REQ(default=None, json_validator=check_bool),
|
2017-12-25 12:02:23 +01:00
|
|
|
) -> HttpResponse:
|
2018-05-28 20:42:31 +02:00
|
|
|
bot = access_bot_by_id(user_profile, bot_id)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
|
|
|
if full_name is not None:
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
check_change_bot_full_name(bot, full_name, user_profile)
|
2022-05-07 09:03:02 +02:00
|
|
|
|
|
|
|
if role is not None and bot.role != role:
|
|
|
|
# Logic duplicated from update_user_backend.
|
|
|
|
if UserProfile.ROLE_REALM_OWNER in [role, bot.role] and not user_profile.is_realm_owner:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationOwnerRequiredError
|
2022-07-19 04:00:51 +02:00
|
|
|
elif not user_profile.is_realm_admin:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise OrganizationAdministratorRequiredError
|
2022-05-07 09:03:02 +02:00
|
|
|
|
|
|
|
do_change_user_role(bot, role, acting_user=user_profile)
|
|
|
|
|
2018-06-19 07:42:04 +02:00
|
|
|
if bot_owner_id is not None:
|
2018-02-10 14:17:04 +01:00
|
|
|
try:
|
2018-06-19 07:42:04 +02:00
|
|
|
owner = get_user_profile_by_id_in_realm(bot_owner_id, user_profile.realm)
|
2018-02-10 14:17:04 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Failed to change owner, no such user"))
|
2018-02-13 11:54:16 +01:00
|
|
|
if not owner.is_active:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Failed to change owner, user is deactivated"))
|
2018-02-13 11:54:16 +01:00
|
|
|
if owner.is_bot:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Failed to change owner, bots can't own other bots"))
|
2018-06-19 07:42:04 +02:00
|
|
|
|
|
|
|
previous_owner = bot.bot_owner
|
|
|
|
if previous_owner != owner:
|
|
|
|
do_change_bot_owner(bot, owner, user_profile)
|
2018-02-10 14:17:04 +01:00
|
|
|
|
2015-11-24 05:26:33 +01:00
|
|
|
if default_sending_stream is not None:
|
2017-01-30 03:17:42 +01:00
|
|
|
if default_sending_stream == "":
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
stream: Optional[Stream] = None
|
2017-01-30 03:17:42 +01:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, default_sending_stream)
|
2020-07-12 23:45:50 +02:00
|
|
|
do_change_default_sending_stream(bot, stream, acting_user=user_profile)
|
2015-11-24 05:26:33 +01:00
|
|
|
if default_events_register_stream is not None:
|
2017-01-30 03:17:42 +01:00
|
|
|
if default_events_register_stream == "":
|
|
|
|
stream = None
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
(stream, sub) = access_stream_by_name(user_profile, default_events_register_stream)
|
2020-07-12 23:45:50 +02:00
|
|
|
do_change_default_events_register_stream(bot, stream, acting_user=user_profile)
|
2015-11-24 05:26:33 +01:00
|
|
|
if default_all_public_streams is not None:
|
2021-02-12 08:19:30 +01:00
|
|
|
do_change_default_all_public_streams(
|
|
|
|
bot, default_all_public_streams, acting_user=user_profile
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2018-01-16 20:34:12 +01:00
|
|
|
if service_payload_url is not None:
|
|
|
|
check_valid_interface_type(service_interface)
|
2018-06-01 17:47:50 +02:00
|
|
|
assert service_interface is not None
|
2018-01-16 20:34:12 +01:00
|
|
|
do_update_outgoing_webhook_service(bot, service_interface, service_payload_url)
|
|
|
|
|
2018-01-30 19:21:13 +01:00
|
|
|
if config_data is not None:
|
|
|
|
do_update_bot_config_data(bot, config_data)
|
|
|
|
|
2015-11-24 05:26:33 +01:00
|
|
|
if len(request.FILES) == 0:
|
|
|
|
pass
|
|
|
|
elif len(request.FILES) == 1:
|
2023-07-22 00:34:11 +02:00
|
|
|
[user_file] = request.FILES.values()
|
2022-06-21 22:23:34 +02:00
|
|
|
assert isinstance(user_file, UploadedFile)
|
|
|
|
assert user_file.size is not None
|
2017-03-02 16:21:46 +01:00
|
|
|
upload_avatar_image(user_file, user_profile, bot)
|
2015-11-24 05:26:33 +01:00
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
2020-06-29 12:47:44 +02:00
|
|
|
do_change_avatar_fields(bot, avatar_source, acting_user=user_profile)
|
2015-11-24 05:26:33 +01:00
|
|
|
else:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("You may only upload one file at a time"))
|
2015-11-24 05:26:33 +01:00
|
|
|
|
|
|
|
json_result = dict(
|
|
|
|
full_name=bot.full_name,
|
|
|
|
avatar_url=avatar_url(bot),
|
2021-02-12 08:19:30 +01:00
|
|
|
service_interface=service_interface,
|
|
|
|
service_payload_url=service_payload_url,
|
|
|
|
config_data=config_data,
|
2015-11-24 05:26:33 +01:00
|
|
|
default_sending_stream=get_stream_name(bot.default_sending_stream),
|
|
|
|
default_events_register_stream=get_stream_name(bot.default_events_register_stream),
|
|
|
|
default_all_public_streams=bot.default_all_public_streams,
|
|
|
|
)
|
2017-02-24 06:36:54 +01:00
|
|
|
|
|
|
|
# Don't include the bot owner in case it is not set.
|
|
|
|
# Default bots have no owner.
|
|
|
|
if bot.bot_owner is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
json_result["bot_owner"] = bot.bot_owner.email
|
2017-02-24 06:36:54 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=json_result)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
@require_member_or_admin
|
2015-11-24 05:26:33 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def regenerate_bot_api_key(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, bot_id: int
|
|
|
|
) -> HttpResponse:
|
2018-05-28 20:42:31 +02:00
|
|
|
bot = access_bot_by_id(user_profile, bot_id)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2018-08-10 21:03:32 +02:00
|
|
|
new_api_key = do_regenerate_api_key(bot, user_profile)
|
2015-11-24 05:26:33 +01:00
|
|
|
json_result = dict(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
api_key=new_api_key,
|
2015-11-24 05:26:33 +01:00
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=json_result)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
@require_member_or_admin
|
2015-11-24 05:26:33 +01:00
|
|
|
@has_request_variables
|
2017-12-25 12:02:23 +01:00
|
|
|
def add_bot_backend(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
full_name_raw: str = REQ("full_name"),
|
|
|
|
short_name_raw: str = REQ("short_name"),
|
2021-04-07 22:00:44 +02:00
|
|
|
bot_type: int = REQ(json_validator=check_int, default=UserProfile.DEFAULT_BOT),
|
|
|
|
payload_url: str = REQ(json_validator=check_url, default=""),
|
2021-02-12 08:19:30 +01:00
|
|
|
service_name: Optional[str] = REQ(default=None),
|
2022-10-06 11:56:48 +02:00
|
|
|
config_data: Mapping[str, str] = REQ(
|
2021-04-07 22:00:44 +02:00
|
|
|
default={}, json_validator=check_dict(value_validator=check_string)
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
interface_type: int = REQ(json_validator=check_int, default=Service.GENERIC),
|
2021-02-12 08:20:45 +01:00
|
|
|
default_sending_stream_name: Optional[str] = REQ("default_sending_stream", default=None),
|
2021-02-12 08:19:30 +01:00
|
|
|
default_events_register_stream_name: Optional[str] = REQ(
|
2021-02-12 08:20:45 +01:00
|
|
|
"default_events_register_stream", default=None
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-04-07 22:00:44 +02:00
|
|
|
default_all_public_streams: Optional[bool] = REQ(json_validator=check_bool, default=None),
|
2017-12-25 12:02:23 +01:00
|
|
|
) -> HttpResponse:
|
2017-06-21 13:46:58 +02:00
|
|
|
short_name = check_short_name(short_name_raw)
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type != UserProfile.INCOMING_WEBHOOK_BOT:
|
|
|
|
service_name = service_name or short_name
|
2015-11-24 05:26:33 +01:00
|
|
|
short_name += "-bot"
|
2017-02-08 04:51:01 +01:00
|
|
|
full_name = check_full_name(full_name_raw)
|
2019-08-30 00:21:36 +02:00
|
|
|
try:
|
2022-07-27 23:33:49 +02:00
|
|
|
email = Address(username=short_name, domain=user_profile.realm.get_bot_domain()).addr_spec
|
2022-11-17 09:30:48 +01:00
|
|
|
except InvalidFakeEmailDomainError:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:19:30 +01:00
|
|
|
_(
|
|
|
|
"Can't create bots until FAKE_EMAIL_DOMAIN is correctly configured.\n"
|
|
|
|
"Please contact your server administrator."
|
|
|
|
)
|
|
|
|
)
|
2022-07-27 23:33:49 +02:00
|
|
|
except ValueError:
|
|
|
|
raise JsonableError(_("Bad name or username"))
|
2021-02-12 08:20:45 +01:00
|
|
|
form = CreateUserForm({"full_name": full_name, "email": email})
|
2017-07-14 16:44:07 +02:00
|
|
|
|
|
|
|
if bot_type == UserProfile.EMBEDDED_BOT:
|
|
|
|
if not settings.EMBEDDED_BOTS_ENABLED:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Embedded bots are not enabled."))
|
2017-07-14 16:44:07 +02:00
|
|
|
if service_name not in [bot.name for bot in EMBEDDED_BOTS]:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Invalid embedded bot name."))
|
2017-07-14 16:44:07 +02:00
|
|
|
|
2022-07-27 23:33:49 +02:00
|
|
|
if not form.is_valid(): # nocoverage
|
|
|
|
# coverage note: The similar block above covers the most
|
|
|
|
# common situation where this might fail, but this case may be
|
|
|
|
# still possible with an overly long username.
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Bad name or username"))
|
2015-11-24 05:26:33 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
get_user_by_delivery_email(email, user_profile.realm)
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Username already in use"))
|
2015-11-24 05:26:33 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
bots: Prevent bots from having duplicate full names.
Bots are not allowed to use the same name as
other users in the realm (either bot or human).
This is kind of a big commit, but I wanted to
combine the post/patch (aka add/edit) checks
into one commit, since it's a change in policy
that affects both codepaths.
A lot of the noise is in tests. We had good
coverage on the previous code, including some places
like event testing where we were expediently
not bothering to use different names for
different bots in some longer tests. And then
of course I test some new scenarios that are relevant
with the new policy.
There are two new functions:
check_bot_name_available:
very simple Django query
check_change_bot_full_name:
this diverges from the 3-line
check_change_full_name, where the latter
is still used for the "humans" use case
And then we just call those in appropriate places.
Note that there is still a loophole here
where you can get two bots with the same
name if you reactivate a bot named Fred
that was inactive when the second bot named
Fred was created. Also, we don't attempt
to fix historical data. So this commit
shouldn't be considered any kind of lockdown,
it's just meant to help people from
inadvertently creating two bots of the same
name where they don't intend to. For more
context, we are continuing to allow two
human users in the same realm to have the
same full name, and our code should generally
be tolerant of that possibility. (A good
example is our new mention syntax, which disambiguates
same-named people using ids.)
It's also worth noting that our web app client
doesn't try to scrub full_name from its payload in
situations where the user has actually only modified other
fields in the "Edit bot" UI. Starting here
we just handle this on the server, since it's
easy to fix there, and even if we fixed it in the web
app, there's no guarantee that other clients won't be
just as brute force. It wasn't exactly broken before,
but we'd needlessly write rows to audit tables.
Fixes #10509
2018-09-27 19:25:18 +02:00
|
|
|
|
|
|
|
check_bot_name_available(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
full_name=full_name,
|
|
|
|
)
|
|
|
|
|
2018-01-29 16:10:54 +01:00
|
|
|
check_bot_creation_policy(user_profile, bot_type)
|
2017-11-24 16:24:24 +01:00
|
|
|
check_valid_bot_type(user_profile, bot_type)
|
2017-07-03 18:35:12 +02:00
|
|
|
check_valid_interface_type(interface_type)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
|
|
|
if len(request.FILES) == 0:
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
elif len(request.FILES) != 1:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("You may only upload one file at a time"))
|
2015-11-24 05:26:33 +01:00
|
|
|
else:
|
|
|
|
avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
|
2016-06-05 00:47:14 +02:00
|
|
|
default_sending_stream = None
|
|
|
|
if default_sending_stream_name is not None:
|
2020-10-16 18:00:07 +02:00
|
|
|
(default_sending_stream, ignored_sub) = access_stream_by_name(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile, default_sending_stream_name
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2016-06-05 00:47:14 +02:00
|
|
|
default_events_register_stream = None
|
|
|
|
if default_events_register_stream_name is not None:
|
2020-10-16 18:00:07 +02:00
|
|
|
(default_events_register_stream, ignored_sub) = access_stream_by_name(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile, default_events_register_stream_name
|
|
|
|
)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type in (UserProfile.INCOMING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT) and service_name:
|
2019-08-17 19:06:51 +02:00
|
|
|
check_valid_bot_config(bot_type, service_name, config_data)
|
2018-02-13 11:47:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_profile = do_create_user(
|
|
|
|
email=email,
|
|
|
|
password=None,
|
|
|
|
realm=user_profile.realm,
|
|
|
|
full_name=full_name,
|
|
|
|
bot_type=bot_type,
|
|
|
|
bot_owner=user_profile,
|
|
|
|
avatar_source=avatar_source,
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=default_all_public_streams,
|
|
|
|
acting_user=user_profile,
|
|
|
|
)
|
2017-03-02 16:21:46 +01:00
|
|
|
if len(request.FILES) == 1:
|
2023-07-22 00:34:11 +02:00
|
|
|
[user_file] = request.FILES.values()
|
2022-06-21 22:23:34 +02:00
|
|
|
assert isinstance(user_file, UploadedFile)
|
|
|
|
assert user_file.size is not None
|
2017-03-02 16:21:46 +01:00
|
|
|
upload_avatar_image(user_file, user_profile, bot_profile)
|
2017-06-10 18:43:31 +02:00
|
|
|
|
2017-07-14 16:44:07 +02:00
|
|
|
if bot_type in (UserProfile.OUTGOING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT):
|
2021-02-12 08:19:30 +01:00
|
|
|
assert isinstance(service_name, str)
|
|
|
|
add_service(
|
|
|
|
name=service_name,
|
|
|
|
user_profile=bot_profile,
|
|
|
|
base_url=payload_url,
|
|
|
|
interface=interface_type,
|
|
|
|
token=generate_api_key(),
|
|
|
|
)
|
2017-06-10 18:43:31 +02:00
|
|
|
|
2019-08-18 12:56:21 +02:00
|
|
|
if bot_type == UserProfile.INCOMING_WEBHOOK_BOT and service_name:
|
|
|
|
set_bot_config(bot_profile, "integration_id", service_name)
|
|
|
|
|
|
|
|
if bot_type in (UserProfile.INCOMING_WEBHOOK_BOT, UserProfile.EMBEDDED_BOT):
|
2018-01-07 19:24:14 +01:00
|
|
|
for key, value in config_data.items():
|
|
|
|
set_bot_config(bot_profile, key, value)
|
|
|
|
|
2018-01-16 20:19:57 +01:00
|
|
|
notify_created_bot(bot_profile)
|
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(bot_profile)
|
|
|
|
|
2015-11-24 05:26:33 +01:00
|
|
|
json_result = dict(
|
2020-08-11 23:01:01 +02:00
|
|
|
user_id=bot_profile.id,
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key=api_key,
|
2017-01-24 07:06:13 +01:00
|
|
|
avatar_url=avatar_url(bot_profile),
|
|
|
|
default_sending_stream=get_stream_name(bot_profile.default_sending_stream),
|
|
|
|
default_events_register_stream=get_stream_name(bot_profile.default_events_register_stream),
|
|
|
|
default_all_public_streams=bot_profile.default_all_public_streams,
|
2015-11-24 05:26:33 +01:00
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=json_result)
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
@require_member_or_admin
|
2017-11-27 09:28:57 +01:00
|
|
|
def get_bots_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True, bot_owner=user_profile)
|
|
|
|
bot_profiles = bot_profiles.select_related(
|
2021-02-12 08:20:45 +01:00
|
|
|
"default_sending_stream", "default_events_register_stream"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
bot_profiles = bot_profiles.order_by("date_joined")
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def bot_info(bot_profile: UserProfile) -> Dict[str, Any]:
|
2015-11-24 05:26:33 +01:00
|
|
|
default_sending_stream = get_stream_name(bot_profile.default_sending_stream)
|
|
|
|
default_events_register_stream = get_stream_name(bot_profile.default_events_register_stream)
|
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
# Bots are supposed to have only one API key, at least for now.
|
2020-03-28 01:25:56 +01:00
|
|
|
# Therefore we can safely assume that one and only valid API key will be
|
2018-08-01 10:53:40 +02:00
|
|
|
# the first one.
|
|
|
|
api_key = get_api_key(bot_profile)
|
|
|
|
|
2015-11-24 05:26:33 +01:00
|
|
|
return dict(
|
|
|
|
username=bot_profile.email,
|
|
|
|
full_name=bot_profile.full_name,
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key=api_key,
|
2015-11-24 05:26:33 +01:00
|
|
|
avatar_url=avatar_url(bot_profile),
|
|
|
|
default_sending_stream=default_sending_stream,
|
|
|
|
default_events_register_stream=default_events_register_stream,
|
|
|
|
default_all_public_streams=bot_profile.default_all_public_streams,
|
|
|
|
)
|
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"bots": list(map(bot_info, bot_profiles))})
|
2015-11-24 05:26:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-21 08:05:21 +01:00
|
|
|
def get_user_data(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile: UserProfile,
|
2022-02-21 08:05:21 +01:00
|
|
|
include_custom_profile_fields: bool,
|
|
|
|
client_gravatar: bool,
|
|
|
|
target_user: Optional[UserProfile] = None,
|
|
|
|
) -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-08-05 19:48:43 +02:00
|
|
|
The client_gravatar field here is set to True by default assuming that clients
|
|
|
|
can compute their own gravatars, which saves bandwidth. This is more important of
|
|
|
|
an optimization than it might seem because gravatar URLs contain MD5 hashes that
|
|
|
|
compress very poorly compared to other data.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2015-11-24 05:26:33 +01:00
|
|
|
realm = user_profile.realm
|
2020-01-02 00:39:54 +01:00
|
|
|
|
2023-11-08 08:21:24 +01:00
|
|
|
members = get_users_for_api(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm,
|
|
|
|
user_profile,
|
|
|
|
target_user=target_user,
|
|
|
|
client_gravatar=client_gravatar,
|
|
|
|
user_avatar_url_field_optional=False,
|
|
|
|
include_custom_profile_fields=include_custom_profile_fields,
|
|
|
|
)
|
2020-03-08 21:13:11 +01:00
|
|
|
|
|
|
|
if target_user is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
data: Dict[str, Any] = {"user": members[target_user.id]}
|
2020-03-08 21:13:11 +01:00
|
|
|
else:
|
|
|
|
data = {"members": [members[k] for k in members]}
|
|
|
|
|
2022-02-21 08:05:21 +01:00
|
|
|
return data
|
|
|
|
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def get_members_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
user_id: Optional[int] = None,
|
|
|
|
include_custom_profile_fields: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
client_gravatar: bool = REQ(json_validator=check_bool, default=True),
|
|
|
|
) -> HttpResponse:
|
|
|
|
target_user = None
|
|
|
|
if user_id is not None:
|
|
|
|
target_user = access_user_by_id(
|
|
|
|
user_profile, user_id, allow_deactivated=True, allow_bots=True, for_admin=False
|
|
|
|
)
|
|
|
|
|
|
|
|
data = get_user_data(user_profile, include_custom_profile_fields, client_gravatar, target_user)
|
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data)
|
2016-01-12 15:40:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-01-12 15:40:40 +01:00
|
|
|
@require_realm_admin
|
|
|
|
@has_request_variables
|
2020-07-16 23:56:34 +02:00
|
|
|
def create_user_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-02-12 08:19:30 +01:00
|
|
|
email: str = REQ(),
|
|
|
|
password: str = REQ(),
|
|
|
|
full_name_raw: str = REQ("full_name"),
|
2020-07-16 23:56:34 +02:00
|
|
|
) -> HttpResponse:
|
2020-12-14 22:02:22 +01:00
|
|
|
if not user_profile.can_create_users:
|
2023-01-06 13:36:27 +01:00
|
|
|
raise JsonableError(_("User not authorized to create users"))
|
2020-12-14 22:02:22 +01:00
|
|
|
|
2017-02-08 04:51:01 +01:00
|
|
|
full_name = check_full_name(full_name_raw)
|
2021-02-12 08:20:45 +01:00
|
|
|
form = CreateUserForm({"full_name": full_name, "email": email})
|
2016-01-12 15:40:40 +01:00
|
|
|
if not form.is_valid():
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Bad name or username"))
|
2016-01-12 15:40:40 +01:00
|
|
|
|
|
|
|
# Check that the new user's email address belongs to the admin's realm
|
|
|
|
# (Since this is an admin API, we don't require the user to have been
|
|
|
|
# invited first.)
|
|
|
|
realm = user_profile.realm
|
2018-03-14 12:54:05 +01:00
|
|
|
try:
|
|
|
|
email_allowed_for_realm(email, user_profile.realm)
|
|
|
|
except DomainNotAllowedForRealmError:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(
|
2021-02-12 08:19:30 +01:00
|
|
|
_("Email '{email}' not allowed in this organization").format(
|
|
|
|
email=email,
|
|
|
|
)
|
|
|
|
)
|
2018-03-14 13:25:26 +01:00
|
|
|
except DisposableEmailError:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Disposable email addresses are not allowed in this organization"))
|
2018-06-20 13:08:07 +02:00
|
|
|
except EmailContainsPlusError:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Email addresses containing + are not allowed."))
|
2018-03-05 20:19:07 +01:00
|
|
|
|
2016-01-12 15:40:40 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
get_user_by_delivery_email(email, user_profile.realm)
|
2023-07-17 22:40:33 +02:00
|
|
|
raise JsonableError(_("Email '{email}' already in use").format(email=email))
|
2016-01-12 15:40:40 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
if not check_password_strength(password):
|
2022-08-08 19:53:11 +02:00
|
|
|
raise JsonableError(str(PASSWORD_TOO_WEAK_ERROR))
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
|
2022-03-21 04:43:43 +01:00
|
|
|
target_user = do_create_user(
|
|
|
|
email,
|
|
|
|
password,
|
|
|
|
realm,
|
|
|
|
full_name,
|
2023-05-08 09:17:57 +02:00
|
|
|
# Explicitly set tos_version=-1. This means that users
|
|
|
|
# created via this mechanism would be prompted to set
|
|
|
|
# the email_address_visibility setting on first login.
|
|
|
|
# For servers that have configured Terms of Service,
|
|
|
|
# users will also be prompted to accept the Terms of
|
2022-03-21 04:43:43 +01:00
|
|
|
# Service on first login.
|
2023-05-08 09:17:57 +02:00
|
|
|
tos_version=UserProfile.TOS_VERSION_BEFORE_FIRST_LOGIN,
|
2022-03-21 04:43:43 +01:00
|
|
|
acting_user=user_profile,
|
|
|
|
)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"user_id": target_user.id})
|
2016-12-15 12:22:24 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def get_profile_backend(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
|
2023-11-08 08:21:24 +01:00
|
|
|
raw_user_data = get_users_for_api(
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile.realm,
|
|
|
|
user_profile,
|
|
|
|
target_user=user_profile,
|
|
|
|
client_gravatar=False,
|
|
|
|
user_avatar_url_field_optional=False,
|
|
|
|
)
|
2023-11-08 12:44:21 +01:00
|
|
|
result: APIUserDict = raw_user_data[user_profile.id]
|
2020-06-08 00:29:47 +02:00
|
|
|
|
2023-08-30 23:40:24 +02:00
|
|
|
result["max_message_id"] = max_message_id_for_user(user_profile)
|
2016-12-15 12:22:24 +01:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 19:10:41 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_subscription_backend(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
2021-04-07 22:00:44 +02:00
|
|
|
user_id: int = REQ(json_validator=check_int, path_only=True),
|
|
|
|
stream_id: int = REQ(json_validator=check_int, path_only=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2021-01-28 18:04:43 +01:00
|
|
|
target_user = access_user_by_id(user_profile, user_id, for_admin=False)
|
2021-10-15 12:19:21 +02:00
|
|
|
(stream, sub) = access_stream_by_id(user_profile, stream_id, allow_realm_admin=True)
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_status = {"is_subscribed": subscribed_to_stream(target_user, stream_id)}
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=subscription_status)
|
2021-01-02 15:05:29 +01:00
|
|
|
|
|
|
|
|
|
|
|
@has_request_variables
|
|
|
|
def get_user_by_email(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
email: str,
|
2021-04-07 22:00:44 +02:00
|
|
|
include_custom_profile_fields: bool = REQ(json_validator=check_bool, default=False),
|
2021-08-05 19:48:43 +02:00
|
|
|
client_gravatar: bool = REQ(json_validator=check_bool, default=True),
|
2021-01-02 15:05:29 +01:00
|
|
|
) -> HttpResponse:
|
2022-02-21 09:33:08 +01:00
|
|
|
target_user = access_user_by_email(
|
|
|
|
user_profile, email, allow_deactivated=True, allow_bots=True, for_admin=False
|
|
|
|
)
|
2021-01-02 15:05:29 +01:00
|
|
|
|
2022-02-21 09:33:08 +01:00
|
|
|
data = get_user_data(user_profile, include_custom_profile_fields, client_gravatar, target_user)
|
|
|
|
return json_success(request, data)
|