2019-03-10 02:43:29 +01:00
|
|
|
# Documentation for Zulip's authentication backends is split across a few places:
|
|
|
|
#
|
|
|
|
# * https://zulip.readthedocs.io/en/latest/production/authentication-methods.html and
|
|
|
|
# zproject/prod_settings_template.py have user-level configuration documentation.
|
2019-11-07 18:29:05 +01:00
|
|
|
# * https://zulip.readthedocs.io/en/latest/development/authentication.html
|
|
|
|
# has developer-level documentation, especially on testing authentication backends
|
|
|
|
# in the Zulip development environment.
|
2019-03-10 02:43:29 +01:00
|
|
|
#
|
|
|
|
# Django upstream's documentation for authentication backends is also
|
|
|
|
# helpful background. The most important detail to understand for
|
|
|
|
# reading this file is that the Django authenticate() function will
|
|
|
|
# call the authenticate methods of all backends registered in
|
|
|
|
# settings.AUTHENTICATION_BACKENDS that have a function signature
|
|
|
|
# matching the args/kwargs passed in the authenticate() call.
|
2020-05-22 15:26:17 +02:00
|
|
|
import binascii
|
2020-10-09 02:17:33 +02:00
|
|
|
import json
|
2016-08-01 13:06:35 +02:00
|
|
|
import logging
|
2020-06-11 00:54:34 +02:00
|
|
|
from abc import ABC, abstractmethod
|
2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2022-04-27 02:23:56 +02:00
|
|
|
from typing import (
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
List,
|
|
|
|
Optional,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
TypedDict,
|
|
|
|
TypeVar,
|
|
|
|
Union,
|
|
|
|
cast,
|
|
|
|
)
|
2021-10-14 01:56:37 +02:00
|
|
|
from urllib.parse import urlencode
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2019-06-07 23:36:19 +02:00
|
|
|
import magic
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2019-08-01 15:09:27 +02:00
|
|
|
from decorator import decorator
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2020-02-23 18:58:08 +01:00
|
|
|
from django.contrib.auth import authenticate, get_backends
|
2013-11-04 23:16:46 +01:00
|
|
|
from django.contrib.auth.backends import RemoteUserBackend
|
2023-02-14 01:11:49 +01:00
|
|
|
from django.contrib.staticfiles.storage import staticfiles_storage
|
2018-05-31 00:12:39 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import validate_email
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.dispatch import Signal, receiver
|
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
2018-07-18 23:45:49 +02:00
|
|
|
from django.shortcuts import render
|
2019-04-12 06:24:58 +02:00
|
|
|
from django.urls import reverse
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2021-08-25 23:15:27 +02:00
|
|
|
from django_auth_ldap.backend import LDAPBackend, _LDAPUser, ldap_error
|
2020-05-22 15:26:17 +02:00
|
|
|
from lxml.etree import XMLSyntaxError
|
2021-11-01 20:08:20 +01:00
|
|
|
from onelogin.saml2 import compat as onelogin_saml2_compat
|
2022-03-22 18:52:24 +01:00
|
|
|
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
2023-09-27 01:16:15 +02:00
|
|
|
from onelogin.saml2.errors import OneLogin_Saml2_Error, OneLogin_Saml2_ValidationError
|
2021-10-21 14:16:26 +02:00
|
|
|
from onelogin.saml2.logout_request import OneLogin_Saml2_Logout_Request
|
2021-11-01 20:08:20 +01:00
|
|
|
from onelogin.saml2.logout_response import OneLogin_Saml2_Logout_Response
|
2020-05-22 18:44:29 +02:00
|
|
|
from onelogin.saml2.response import OneLogin_Saml2_Response
|
2020-07-25 14:31:45 +02:00
|
|
|
from onelogin.saml2.settings import OneLogin_Saml2_Settings
|
2021-11-01 20:08:20 +01:00
|
|
|
from onelogin.saml2.utils import OneLogin_Saml2_Utils
|
|
|
|
from onelogin.saml2.xml_utils import OneLogin_Saml2_XML
|
2020-06-11 00:54:34 +02:00
|
|
|
from requests import HTTPError
|
|
|
|
from social_core.backends.apple import AppleIdAuth
|
2018-10-05 14:32:02 +02:00
|
|
|
from social_core.backends.azuread import AzureADOAuth2
|
2018-05-31 00:12:39 +02:00
|
|
|
from social_core.backends.base import BaseAuth
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, GithubTeamOAuth2
|
|
|
|
from social_core.backends.gitlab import GitLabOAuth2
|
2019-02-02 16:51:26 +01:00
|
|
|
from social_core.backends.google import GoogleOAuth2
|
2021-05-21 16:45:43 +02:00
|
|
|
from social_core.backends.open_id_connect import OpenIdConnectAuth
|
2020-06-19 21:44:29 +02:00
|
|
|
from social_core.backends.saml import SAMLAuth, SAMLIdentityProvider
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_core.exceptions import (
|
2020-06-09 18:17:32 +02:00
|
|
|
AuthCanceled,
|
2020-06-11 00:54:34 +02:00
|
|
|
AuthFailed,
|
|
|
|
AuthMissingParameter,
|
|
|
|
AuthStateForbidden,
|
|
|
|
SocialAuthBaseException,
|
|
|
|
)
|
2018-07-18 23:45:49 +02:00
|
|
|
from social_core.pipeline.partial import partial
|
2021-11-01 20:08:20 +01:00
|
|
|
from social_django.utils import load_backend, load_strategy
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2020-06-11 00:54:34 +02:00
|
|
|
from zxcvbn import zxcvbn
|
2017-03-24 10:48:52 +01:00
|
|
|
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import do_update_user_custom_profile_data_if_changed
|
2023-07-15 22:25:36 +02:00
|
|
|
from zerver.actions.user_groups import (
|
|
|
|
bulk_add_members_to_user_groups,
|
|
|
|
bulk_remove_members_from_user_groups,
|
|
|
|
)
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import do_regenerate_api_key
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import do_deactivate_user
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar import avatar_url, is_avatar_new
|
2019-06-28 00:10:58 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_content_hash
|
2019-01-12 18:12:11 +01:00
|
|
|
from zerver.lib.dev_ldap_directory import init_fakeldap
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.email_validation import email_allowed_for_realm, validate_email_not_already_in_realm
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2020-02-01 17:45:22 +01:00
|
|
|
from zerver.lib.mobile_auth_otp import is_valid_otp
|
2022-08-05 17:40:03 +02:00
|
|
|
from zerver.lib.rate_limiter import RateLimitedObject, client_is_exempt_from_rate_limiting
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.redis_utils import get_dict_from_redis, get_redis_client, put_dict_in_redis
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import RequestNotes
|
2021-10-21 14:16:26 +02:00
|
|
|
from zerver.lib.sessions import delete_user_sessions
|
2020-05-23 15:21:19 +02:00
|
|
|
from zerver.lib.subdomains import get_subdomain
|
2022-09-24 06:44:08 +02:00
|
|
|
from zerver.lib.types import ProfileDataElementUpdateDict
|
2021-10-14 01:45:34 +02:00
|
|
|
from zerver.lib.url_encoding import append_url_query_string
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.users import check_full_name, validate_user_custom_profile_field
|
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
DisposableEmailError,
|
|
|
|
DomainNotAllowedForRealmError,
|
|
|
|
EmailContainsPlusError,
|
2020-07-03 19:42:54 +02:00
|
|
|
PasswordTooWeakError,
|
2023-03-10 10:42:00 +01:00
|
|
|
PreregistrationRealm,
|
2020-06-11 00:54:34 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
2023-07-15 22:25:36 +02:00
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
|
|
|
custom_profile_fields_for_realm,
|
|
|
|
get_realm,
|
|
|
|
get_user_by_delivery_email,
|
|
|
|
get_user_profile_by_id,
|
|
|
|
remote_user_to_email,
|
|
|
|
supported_auth_backends,
|
|
|
|
)
|
2022-09-24 06:44:08 +02:00
|
|
|
from zproject.settings_types import OIDCIdPConfigDict
|
2017-10-24 17:59:39 +02:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
redis_client = get_redis_client()
|
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2023-04-16 21:53:22 +02:00
|
|
|
def all_implemented_backend_names() -> List[str]:
|
|
|
|
return list(AUTH_BACKEND_NAME_MAP.keys())
|
|
|
|
|
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# This first batch of methods is used by other code in Zulip to check
|
|
|
|
# whether a given authentication backend is enabled for a given realm.
|
|
|
|
# In each case, we both needs to check at the server level (via
|
|
|
|
# `settings.AUTHENTICATION_BACKENDS`, queried via
|
|
|
|
# `django.contrib.auth.get_backends`) and at the realm level (via the
|
2023-04-16 21:53:22 +02:00
|
|
|
# `RealmAuthenticationMethod` table).
|
2018-05-10 18:53:55 +02:00
|
|
|
def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]:
|
2016-11-06 23:44:45 +01:00
|
|
|
"""Pads an authentication methods dict to contain all auth backends
|
|
|
|
supported by the software, regardless of whether they are
|
|
|
|
configured on this server"""
|
|
|
|
for key in AUTH_BACKEND_NAME_MAP:
|
|
|
|
if key not in method_dict:
|
|
|
|
method_dict[key] = False
|
|
|
|
return method_dict
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def auth_enabled_helper(
|
|
|
|
backends_to_check: List[str],
|
|
|
|
realm: Optional[Realm],
|
|
|
|
realm_authentication_methods: Optional[Dict[str, bool]] = None,
|
|
|
|
) -> bool:
|
|
|
|
"""
|
|
|
|
realm_authentication_methods can be passed if already fetched to avoid
|
|
|
|
a database query.
|
|
|
|
"""
|
2016-11-02 21:41:10 +01:00
|
|
|
if realm is not None:
|
2023-04-17 22:14:14 +02:00
|
|
|
if realm_authentication_methods is not None:
|
|
|
|
# Copy the dict to avoid mutating the original if it was passed in as argument.
|
|
|
|
enabled_method_dict = realm_authentication_methods.copy()
|
|
|
|
else:
|
|
|
|
enabled_method_dict = realm.authentication_methods_dict()
|
2016-11-02 21:41:10 +01:00
|
|
|
else:
|
2023-04-16 22:32:39 +02:00
|
|
|
enabled_method_dict = {method: True for method in AUTH_BACKEND_NAME_MAP}
|
2020-09-17 19:23:21 +02:00
|
|
|
|
|
|
|
pad_method_dict(enabled_method_dict)
|
2019-03-17 22:25:47 +01:00
|
|
|
for supported_backend in supported_auth_backends():
|
2016-11-06 23:44:45 +01:00
|
|
|
for backend_name in backends_to_check:
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[backend_name]
|
|
|
|
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
|
|
|
|
return True
|
2013-11-04 23:42:31 +01:00
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def ldap_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["LDAP"], realm, realm_authentication_methods)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def email_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["Email"], realm, realm_authentication_methods)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def password_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return ldap_auth_enabled(realm, realm_authentication_methods) or email_auth_enabled(
|
|
|
|
realm, realm_authentication_methods
|
|
|
|
)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def dev_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["Dev"], realm, realm_authentication_methods)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def google_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["Google"], realm, realm_authentication_methods)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def github_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["GitHub"], realm, realm_authentication_methods)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def gitlab_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["GitLab"], realm, realm_authentication_methods)
|
2020-01-31 18:19:53 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def apple_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["Apple"], realm, realm_authentication_methods)
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-04-17 22:14:14 +02:00
|
|
|
def saml_auth_enabled(
|
|
|
|
realm: Optional[Realm] = None, realm_authentication_methods: Optional[Dict[str, bool]] = None
|
|
|
|
) -> bool:
|
|
|
|
return auth_enabled_helper(["SAML"], realm, realm_authentication_methods)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def require_email_format_usernames(realm: Optional[Realm] = None) -> bool:
|
2023-01-18 02:59:37 +01:00
|
|
|
if ldap_auth_enabled(realm) and (settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN):
|
|
|
|
return False
|
2017-09-15 16:59:03 +02:00
|
|
|
return True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def is_user_active(user_profile: UserProfile, return_data: Optional[Dict[str, Any]] = None) -> bool:
|
2021-08-23 15:14:05 +02:00
|
|
|
if user_profile.realm.deactivated:
|
|
|
|
if return_data is not None:
|
|
|
|
return_data["inactive_realm"] = True
|
|
|
|
return False
|
2019-09-21 01:32:59 +02:00
|
|
|
if not user_profile.is_active:
|
|
|
|
if return_data is not None:
|
|
|
|
if user_profile.is_mirror_dummy:
|
|
|
|
# Record whether it's a mirror dummy account
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["is_mirror_dummy"] = True
|
|
|
|
return_data["inactive_user"] = True
|
|
|
|
return_data["inactive_user_id"] = user_profile.id
|
2019-09-21 01:32:59 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def common_get_active_user(
|
|
|
|
email: str, realm: Realm, return_data: Optional[Dict[str, Any]] = None
|
|
|
|
) -> Optional[UserProfile]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""This is the core common function used by essentially all
|
|
|
|
authentication backends to check if there's an active user account
|
|
|
|
with a given email address in the organization, handling both
|
|
|
|
user-level and realm-level deactivation correctly.
|
|
|
|
"""
|
2017-11-17 22:43:16 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2017-11-17 22:43:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# If the user doesn't have an account in the target realm, we
|
|
|
|
# check whether they might have an account in another realm,
|
|
|
|
# and if so, provide a helpful error message via
|
|
|
|
# `invalid_subdomain`.
|
2018-12-07 00:05:57 +01:00
|
|
|
if not UserProfile.objects.filter(delivery_email__iexact=email).exists():
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
|
|
|
if return_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["invalid_subdomain"] = True
|
2021-09-09 19:53:41 +02:00
|
|
|
return_data["matching_user_ids_in_different_realms"] = list(
|
|
|
|
UserProfile.objects.filter(delivery_email__iexact=email).values("realm_id", "id")
|
|
|
|
)
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
2019-09-21 01:32:59 +02:00
|
|
|
if not is_user_active(user_profile, return_data):
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
2019-09-21 01:32:59 +02:00
|
|
|
|
2017-11-17 22:43:16 +01:00
|
|
|
return user_profile
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-15 17:27:09 +02:00
|
|
|
def is_subdomain_in_allowed_subdomains_list(subdomain: str, allowed_subdomains: List[str]) -> bool:
|
|
|
|
if subdomain in allowed_subdomains:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# The root subdomain is a special case, as sending an
|
|
|
|
# empty string in the list of values of the attribute may
|
|
|
|
# not be viable. So, any of the ROOT_SUBDOMAIN_ALIASES can
|
|
|
|
# be used to signify the user is authorized for the root
|
|
|
|
# subdomain.
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
|
|
|
|
and not settings.ROOT_DOMAIN_LANDING_PAGE
|
|
|
|
and any(alias in allowed_subdomains for alias in settings.ROOT_SUBDOMAIN_ALIASES)
|
|
|
|
):
|
2020-08-15 17:27:09 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
AuthFuncT = TypeVar("AuthFuncT", bound=Callable[..., Optional[UserProfile]])
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
class RateLimitedAuthenticationByUsername(RateLimitedObject):
|
|
|
|
def __init__(self, username: str) -> None:
|
|
|
|
self.username = username
|
2020-03-05 13:38:20 +01:00
|
|
|
super().__init__()
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2020-03-06 10:49:04 +01:00
|
|
|
def key(self) -> str:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{type(self).__name__}:{self.username}"
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-08-01 15:09:27 +02:00
|
|
|
def rules(self) -> List[Tuple[int, int]]:
|
2023-06-07 23:01:42 +02:00
|
|
|
return settings.RATE_LIMITING_RULES["authenticate_by_username"]
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
def rate_limit_authentication_by_username(request: HttpRequest, username: str) -> None:
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedAuthenticationByUsername(username).rate_limit_request(request)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
def auth_rate_limiting_already_applied(request: HttpRequest) -> bool:
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2020-04-01 13:31:20 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
return any(
|
|
|
|
isinstance(r.entity, RateLimitedAuthenticationByUsername)
|
2021-07-19 23:27:29 +02:00
|
|
|
for r in request_notes.ratelimits_applied
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
# Django's authentication mechanism uses introspection on the various authenticate() functions
|
|
|
|
# defined by backends, so we need a decorator that doesn't break function signatures.
|
|
|
|
# @decorator does this for us.
|
|
|
|
# The usual @wraps from functools breaks signatures, so it can't be used here.
|
|
|
|
@decorator
|
|
|
|
def rate_limit_auth(auth_func: AuthFuncT, *args: Any, **kwargs: Any) -> Optional[UserProfile]:
|
|
|
|
if not settings.RATE_LIMITING_AUTHENTICATE:
|
|
|
|
return auth_func(*args, **kwargs)
|
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
request = args[1]
|
2021-02-12 08:20:45 +01:00
|
|
|
username = kwargs["username"]
|
2021-08-21 19:24:20 +02:00
|
|
|
if RequestNotes.get_notes(request).client is None or not client_is_exempt_from_rate_limiting(
|
2021-07-09 18:10:51 +02:00
|
|
|
request
|
|
|
|
):
|
2019-08-01 15:09:27 +02:00
|
|
|
# Django cycles through enabled authentication backends until one succeeds,
|
|
|
|
# or all of them fail. If multiple backends are tried like this, we only want
|
|
|
|
# to execute rate_limit_authentication_* once, on the first attempt:
|
|
|
|
if auth_rate_limiting_already_applied(request):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Apply rate limiting. If this request is above the limit,
|
2022-11-17 09:30:48 +01:00
|
|
|
# RateLimitedError will be raised, interrupting the authentication process.
|
2019-08-01 15:09:27 +02:00
|
|
|
# From there, the code calling authenticate() can either catch the exception
|
|
|
|
# and handle it on its own, or it will be processed by RateLimitMiddleware.
|
|
|
|
rate_limit_authentication_by_username(request, username)
|
|
|
|
|
|
|
|
result = auth_func(*args, **kwargs)
|
|
|
|
if result is not None:
|
|
|
|
# Authentication succeeded, clear the rate-limiting record.
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedAuthenticationByUsername(username).clear_history()
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-09-01 20:17:33 +02:00
|
|
|
@decorator
|
|
|
|
def log_auth_attempts(auth_func: AuthFuncT, *args: Any, **kwargs: Any) -> Optional[UserProfile]:
|
|
|
|
result = auth_func(*args, **kwargs)
|
|
|
|
|
|
|
|
backend_instance = args[0]
|
|
|
|
request = args[1]
|
|
|
|
username = kwargs["username"]
|
|
|
|
realm = kwargs["realm"]
|
|
|
|
return_data = kwargs["return_data"]
|
|
|
|
|
2021-09-02 18:46:58 +02:00
|
|
|
log_auth_attempt(
|
|
|
|
backend_instance.logger,
|
|
|
|
request,
|
|
|
|
realm,
|
|
|
|
username,
|
|
|
|
succeeded=result is not None,
|
|
|
|
return_data=return_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def log_auth_attempt(
|
|
|
|
logger: logging.Logger,
|
|
|
|
request: HttpRequest,
|
|
|
|
realm: Realm,
|
|
|
|
username: str,
|
|
|
|
succeeded: bool,
|
|
|
|
return_data: Dict[str, Any],
|
|
|
|
) -> None:
|
2021-09-01 20:17:33 +02:00
|
|
|
ip_addr = request.META.get("REMOTE_ADDR")
|
2021-09-02 18:46:58 +02:00
|
|
|
outcome = "success" if succeeded else "failed"
|
|
|
|
logger.info(
|
2021-09-01 20:17:33 +02:00
|
|
|
"Authentication attempt from %s: subdomain=%s;username=%s;outcome=%s;return_data=%s",
|
|
|
|
ip_addr,
|
|
|
|
realm.subdomain,
|
|
|
|
username,
|
|
|
|
outcome,
|
|
|
|
return_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2017-11-05 11:31:53 +01:00
|
|
|
class ZulipAuthMixin:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""This common mixin is used to override Django's default behavior for
|
|
|
|
looking up a logged-in user by ID to use a version that fetches
|
|
|
|
from memcached before checking the database (avoiding a database
|
|
|
|
query in most cases).
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-02 20:20:53 +02:00
|
|
|
name = "undefined"
|
2021-08-15 18:32:51 +02:00
|
|
|
_logger: Optional[logging.Logger] = None
|
2020-06-02 20:20:53 +02:00
|
|
|
|
|
|
|
@property
|
|
|
|
def logger(self) -> logging.Logger:
|
|
|
|
if self._logger is None:
|
|
|
|
self._logger = logging.getLogger(f"zulip.auth.{self.name}")
|
|
|
|
return self._logger
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_user(self, user_profile_id: int) -> Optional[UserProfile]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Override the Django method for getting a UserProfile object from
|
|
|
|
the user_profile_id,."""
|
2013-11-01 20:22:12 +01:00
|
|
|
try:
|
|
|
|
return get_user_profile_by_id(user_profile_id)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
class ZulipDummyBackend(ZulipAuthMixin):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Used when we want to log you in without checking any
|
2017-11-17 22:43:16 +01:00
|
|
|
authentication (i.e. new user registration or when otherwise
|
|
|
|
authentication has already been checked earlier in the process).
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
We ensure that this backend only ever successfully authenticates
|
|
|
|
when explicitly requested by including the use_dummy_backend kwarg.
|
2013-11-21 04:57:23 +01:00
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def authenticate(
|
|
|
|
self,
|
|
|
|
request: Optional[HttpRequest] = None,
|
|
|
|
*,
|
|
|
|
username: str,
|
|
|
|
realm: Realm,
|
|
|
|
use_dummy_backend: bool = False,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2013-11-21 04:57:23 +01:00
|
|
|
if use_dummy_backend:
|
2017-11-17 22:43:16 +01:00
|
|
|
return common_get_active_user(username, realm, return_data)
|
2013-11-21 04:57:23 +01:00
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
def check_password_strength(password: str) -> bool:
|
|
|
|
"""
|
|
|
|
Returns True if the password is strong enough,
|
|
|
|
False otherwise.
|
|
|
|
"""
|
|
|
|
if len(password) < settings.PASSWORD_MIN_LENGTH:
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if password == "":
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
# zxcvbn throws an exception when passed the empty string, so
|
|
|
|
# we need a special case for the empty string password here.
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if int(zxcvbn(password)["guesses"]) < settings.PASSWORD_MIN_GUESSES:
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-11-01 20:22:12 +01:00
|
|
|
class EmailAuthBackend(ZulipAuthMixin):
|
2013-08-06 22:51:47 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Email+Password authentication backend (the default).
|
2013-08-06 22:51:47 +02:00
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
Allows a user to sign in using an email/password pair.
|
2013-08-06 22:51:47 +02:00
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "email"
|
2020-06-03 13:18:08 +02:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
@rate_limit_auth
|
2021-09-01 20:17:33 +02:00
|
|
|
@log_auth_attempts
|
2021-02-12 08:19:30 +01:00
|
|
|
def authenticate(
|
|
|
|
self,
|
2020-07-06 19:24:08 +02:00
|
|
|
request: HttpRequest,
|
2021-02-12 08:19:30 +01:00
|
|
|
*,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
realm: Realm,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2021-05-08 02:36:30 +02:00
|
|
|
"""Authenticate a user based on email address as the user name."""
|
2017-11-17 23:56:45 +01:00
|
|
|
if not password_auth_enabled(realm):
|
2016-04-21 21:07:43 +02:00
|
|
|
if return_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["password_auth_disabled"] = True
|
2016-04-21 07:19:08 +02:00
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
if not email_auth_enabled(realm):
|
2016-11-07 00:04:59 +01:00
|
|
|
if return_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["email_auth_disabled"] = True
|
2016-11-07 00:04:59 +01:00
|
|
|
return None
|
2019-11-18 07:57:36 +01:00
|
|
|
if password == "":
|
|
|
|
# Never allow an empty password. This is defensive code;
|
|
|
|
# a user having password "" should only be possible
|
|
|
|
# through a bug somewhere else.
|
|
|
|
return None
|
2017-11-21 21:39:56 +01:00
|
|
|
|
2017-11-21 21:42:21 +01:00
|
|
|
user_profile = common_get_active_user(username, realm, return_data=return_data)
|
2017-11-21 21:39:56 +01:00
|
|
|
if user_profile is None:
|
|
|
|
return None
|
2020-07-03 19:42:54 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
is_password_correct = user_profile.check_password(password)
|
|
|
|
except PasswordTooWeakError:
|
|
|
|
# In some rare cases when password hasher is changed and the user has
|
|
|
|
# a weak password, PasswordTooWeakError will be raised.
|
2021-05-07 15:10:35 +02:00
|
|
|
self.logger.info(
|
|
|
|
"User %s password can't be rehashed due to being too weak.", user_profile.id
|
|
|
|
)
|
|
|
|
if return_data is not None:
|
|
|
|
return_data["password_reset_needed"] = True
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
# Since we can't communicate the situation via return_data,
|
|
|
|
# we have to raise an error - a silent failure would not be right
|
|
|
|
# because the password actually is correct, just can't be re-hashed.
|
|
|
|
raise JsonableError(_("You need to reset your password."))
|
2020-07-03 19:42:54 +02:00
|
|
|
|
|
|
|
if is_password_correct:
|
2016-04-21 07:19:08 +02:00
|
|
|
return user_profile
|
2016-07-19 14:35:08 +02:00
|
|
|
return None
|
2013-08-06 22:51:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-09 17:58:39 +01:00
|
|
|
def is_valid_email(email: str) -> bool:
|
|
|
|
try:
|
|
|
|
validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
def check_ldap_config() -> None:
|
|
|
|
if not settings.LDAP_APPEND_DOMAIN:
|
|
|
|
# Email search needs to be configured in this case.
|
|
|
|
assert settings.AUTH_LDAP_USERNAME_ATTR and settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH
|
|
|
|
|
2021-09-16 20:04:19 +02:00
|
|
|
# These two are alternatives approaches to deactivating users based on an ldap attribute
|
|
|
|
# and thus don't make sense to have enabled together.
|
|
|
|
assert not (
|
|
|
|
settings.AUTH_LDAP_USER_ATTR_MAP.get("userAccountControl")
|
|
|
|
and settings.AUTH_LDAP_USER_ATTR_MAP.get("deactivated")
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def ldap_should_sync_active_status() -> bool:
|
|
|
|
if "userAccountControl" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
return True
|
|
|
|
|
|
|
|
if "deactivated" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-25 23:15:27 +02:00
|
|
|
def find_ldap_users_by_email(email: str) -> List[_LDAPUser]:
|
2019-10-05 01:02:46 +02:00
|
|
|
"""
|
2021-08-25 23:15:27 +02:00
|
|
|
Returns list of _LDAPUsers matching the email search
|
2019-10-05 01:02:46 +02:00
|
|
|
"""
|
2021-08-25 23:15:27 +02:00
|
|
|
return LDAPReverseEmailSearch().search_for_users(email)
|
2019-10-05 01:02:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-29 07:25:08 +02:00
|
|
|
def email_belongs_to_ldap(realm: Realm, email: str) -> bool:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Used to make determinations on whether a user's email address is
|
|
|
|
managed by LDAP. For environments using both LDAP and
|
|
|
|
Email+Password authentication, we do not allow EmailAuthBackend
|
|
|
|
authentication for email addresses managed by LDAP (to avoid a
|
|
|
|
security issue where one create separate credentials for an LDAP
|
|
|
|
user), and this function is used to enforce that rule.
|
|
|
|
"""
|
2018-05-29 07:25:08 +02:00
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return False
|
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
check_ldap_config()
|
2019-10-05 01:02:46 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
|
|
|
# Check if the email ends with LDAP_APPEND_DOMAIN
|
2022-07-27 23:33:49 +02:00
|
|
|
return Address(addr_spec=email).domain.lower() == settings.LDAP_APPEND_DOMAIN
|
2019-10-05 01:02:46 +02:00
|
|
|
|
|
|
|
# If we don't have an LDAP domain, we have to do a lookup for the email.
|
|
|
|
if find_ldap_users_by_email(email):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-27 23:03:00 +01:00
|
|
|
ldap_logger = logging.getLogger("zulip.ldap")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2021-08-25 23:15:27 +02:00
|
|
|
class LDAPReverseEmailSearch(_LDAPUser):
|
|
|
|
"""
|
|
|
|
This class is a workaround - we want to use
|
|
|
|
django-auth-ldap to query the ldap directory for
|
|
|
|
users with the specified email address, but it doesn't
|
|
|
|
provide an API for that or an isolated class for handling
|
|
|
|
the connection. Because connection-handling is tightly integrated
|
|
|
|
into the _LDAPUser class, we have to make this strange inheritance here,
|
|
|
|
in order to be able to comfortably have an ldap connection and make search
|
|
|
|
queries.
|
|
|
|
|
|
|
|
We may be able to get rid of this in the future if we can get
|
|
|
|
https://github.com/django-auth-ldap/django-auth-ldap/pull/150 merged upstream.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def __init__(self) -> None:
|
|
|
|
# Superclass __init__ requires a username argument - it doesn't actually
|
|
|
|
# impact anything for us in this class, given its very limited use
|
|
|
|
# for only making a search query, so we pass an empty string.
|
|
|
|
super().__init__(LDAPBackend(), username="")
|
|
|
|
|
|
|
|
def search_for_users(self, email: str) -> List[_LDAPUser]:
|
|
|
|
search = settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH
|
|
|
|
USERNAME_ATTR = settings.AUTH_LDAP_USERNAME_ATTR
|
|
|
|
|
2022-05-31 01:32:29 +02:00
|
|
|
assert search is not None
|
2021-08-25 23:15:27 +02:00
|
|
|
results = search.execute(self.connection, {"email": email})
|
|
|
|
|
|
|
|
ldap_users = []
|
|
|
|
for result in results:
|
|
|
|
user_dn, user_attrs = result
|
|
|
|
username = user_attrs[USERNAME_ATTR][0]
|
|
|
|
ldap_user = _LDAPUser(self.backend, username=username)
|
|
|
|
ldap_user._user_dn = user_dn
|
|
|
|
ldap_user._user_attrs = user_attrs
|
|
|
|
|
|
|
|
ldap_users.append(ldap_user)
|
|
|
|
|
|
|
|
return ldap_users
|
|
|
|
|
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class ZulipLDAPError(_LDAPUser.AuthenticationFailed):
|
2018-05-31 23:10:22 +02:00
|
|
|
"""Since this inherits from _LDAPUser.AuthenticationFailed, these will
|
|
|
|
be caught and logged at debug level inside django-auth-ldap's authenticate()"""
|
2015-10-13 23:08:05 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class NoMatchingLDAPUserError(ZulipLDAPError):
|
2019-10-25 02:26:05 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class OutsideLDAPDomainError(NoMatchingLDAPUserError):
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
class ZulipLDAPConfigurationError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Common code between LDAP authentication (ZulipLDAPAuthBackend) and
|
|
|
|
using LDAP just to sync user data (ZulipLDAPUserPopulator).
|
|
|
|
|
|
|
|
To fully understand our LDAP backend, you may want to skim
|
|
|
|
django_auth_ldap/backend.py from the upstream django-auth-ldap
|
|
|
|
library. It's not a lot of code, and searching around in that
|
|
|
|
file makes the flow for LDAP authentication clear.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2020-06-03 13:18:08 +02:00
|
|
|
name = "ldap"
|
|
|
|
|
2018-12-12 20:06:10 +01:00
|
|
|
def __init__(self) -> None:
|
2019-03-10 02:43:29 +01:00
|
|
|
# Used to initialize a fake LDAP directly for both manual
|
|
|
|
# and automated testing in a development environment where
|
|
|
|
# there is no actual LDAP server.
|
2018-12-12 20:06:10 +01:00
|
|
|
if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage
|
2019-01-12 18:12:11 +01:00
|
|
|
init_fakeldap()
|
2018-12-12 20:06:10 +01:00
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
check_ldap_config()
|
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# Disable django-auth-ldap's permissions functions -- we don't use
|
|
|
|
# the standard Django user/group permissions system because they
|
|
|
|
# are prone to performance issues.
|
2021-02-12 08:19:30 +01:00
|
|
|
def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any = None) -> bool:
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool:
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_all_permissions(self, user: Optional[UserProfile], obj: Any = None) -> Set[Any]:
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_group_permissions(self, user: Optional[UserProfile], obj: Any = None) -> Set[Any]:
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def django_to_ldap_username(self, username: str) -> str:
|
2019-10-25 02:26:05 +02:00
|
|
|
"""
|
2020-09-13 18:39:00 +02:00
|
|
|
Translates django username (user_profile.delivery_email or whatever the user typed in the login
|
2020-10-23 02:43:28 +02:00
|
|
|
field when authenticating via the LDAP backend) into LDAP username.
|
|
|
|
Guarantees that the username it returns actually has an entry in the LDAP directory.
|
2022-11-17 09:30:48 +01:00
|
|
|
Raises NoMatchingLDAPUserError if that's not possible.
|
2019-10-25 02:26:05 +02:00
|
|
|
"""
|
|
|
|
result = username
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2019-01-09 17:58:39 +01:00
|
|
|
if is_valid_email(username):
|
2022-07-27 23:33:49 +02:00
|
|
|
address = Address(addr_spec=username)
|
|
|
|
if address.domain != settings.LDAP_APPEND_DOMAIN:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise OutsideLDAPDomainError(
|
2021-02-12 08:19:30 +01:00
|
|
|
f"Email {username} does not match LDAP domain {settings.LDAP_APPEND_DOMAIN}."
|
|
|
|
)
|
2022-07-27 23:33:49 +02:00
|
|
|
result = address.username
|
2019-11-05 02:24:18 +01:00
|
|
|
else:
|
2019-10-05 03:54:48 +02:00
|
|
|
# We can use find_ldap_users_by_email
|
|
|
|
if is_valid_email(username):
|
2019-10-25 02:26:05 +02:00
|
|
|
email_search_result = find_ldap_users_by_email(username)
|
2021-08-25 23:15:27 +02:00
|
|
|
if not email_search_result:
|
2019-10-25 02:26:05 +02:00
|
|
|
result = username
|
|
|
|
elif len(email_search_result) == 1:
|
|
|
|
return email_search_result[0]._username
|
|
|
|
elif len(email_search_result) > 1:
|
2019-10-05 03:54:48 +02:00
|
|
|
# This is possible, but strange, so worth logging a warning about.
|
|
|
|
# We can't translate the email to a unique username,
|
|
|
|
# so we don't do anything else here.
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("Multiple users with email %s found in LDAP.", username)
|
2019-10-25 02:26:05 +02:00
|
|
|
result = username
|
2019-10-05 03:54:48 +02:00
|
|
|
|
2019-10-25 02:26:05 +02:00
|
|
|
if _LDAPUser(self, result).attrs is None:
|
2020-10-23 02:43:28 +02:00
|
|
|
# Check that there actually is an LDAP entry matching the result username
|
2019-10-25 02:26:05 +02:00
|
|
|
# we want to return. Otherwise, raise an exception.
|
2021-02-12 08:19:30 +01:00
|
|
|
error_message = (
|
|
|
|
"No LDAP user matching django_to_ldap_username result: {}. Input username: {}"
|
|
|
|
)
|
2022-11-17 09:30:48 +01:00
|
|
|
raise NoMatchingLDAPUserError(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
error_message.format(result, username),
|
2019-12-27 23:03:00 +01:00
|
|
|
)
|
2019-10-25 02:26:05 +02:00
|
|
|
|
|
|
|
return result
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2019-10-06 00:32:25 +02:00
|
|
|
def user_email_from_ldapuser(self, username: str, ldap_user: _LDAPUser) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(ldap_user, "_username"):
|
2019-10-06 00:32:25 +02:00
|
|
|
# In tests, we sometimes pass a simplified _LDAPUser without _username attr,
|
|
|
|
# and with the intended username in the username argument.
|
|
|
|
username = ldap_user._username
|
|
|
|
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2022-07-27 23:33:49 +02:00
|
|
|
return Address(username=username, domain=settings.LDAP_APPEND_DOMAIN).addr_spec
|
2019-10-06 00:32:25 +02:00
|
|
|
|
|
|
|
if settings.LDAP_EMAIL_ATTR is not None:
|
2020-10-23 02:43:28 +02:00
|
|
|
# Get email from LDAP attributes.
|
2019-10-06 00:32:25 +02:00
|
|
|
if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError(
|
2021-02-12 08:19:30 +01:00
|
|
|
f"LDAP user doesn't have the needed {settings.LDAP_EMAIL_ATTR} attribute"
|
|
|
|
)
|
2019-10-06 00:32:25 +02:00
|
|
|
else:
|
|
|
|
return ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0]
|
|
|
|
|
|
|
|
return username
|
|
|
|
|
|
|
|
def ldap_to_django_username(self, username: str) -> str:
|
|
|
|
"""
|
|
|
|
This is called inside django_auth_ldap with only one role:
|
|
|
|
to convert _LDAPUser._username to django username (so in Zulip, the email)
|
|
|
|
and pass that as "username" argument to get_or_build_user(username, ldapuser).
|
|
|
|
In many cases, the email is stored in the _LDAPUser's attributes, so it can't be
|
|
|
|
constructed just from the username. We choose to do nothing in this function,
|
|
|
|
and our overrides of get_or_build_user() obtain that username from the _LDAPUser
|
|
|
|
object on their own, through our user_email_from_ldapuser function.
|
|
|
|
"""
|
2013-11-21 01:30:20 +01:00
|
|
|
return username
|
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "avatar" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
2018-12-12 19:46:37 +01:00
|
|
|
# We do local imports here to avoid import loops
|
|
|
|
from io import BytesIO
|
|
|
|
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import do_change_avatar_fields
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.upload import upload_avatar_image
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP["avatar"]
|
2018-12-30 01:32:16 +01:00
|
|
|
if avatar_attr_name not in ldap_user.attrs: # nocoverage
|
|
|
|
# If this specific user doesn't have e.g. a
|
|
|
|
# thumbnailPhoto set in LDAP, just skip that user.
|
|
|
|
return
|
2019-06-07 23:36:19 +02:00
|
|
|
|
2019-06-28 00:10:58 +02:00
|
|
|
ldap_avatar = ldap_user.attrs[avatar_attr_name][0]
|
|
|
|
|
|
|
|
avatar_changed = is_avatar_new(ldap_avatar, user)
|
|
|
|
if not avatar_changed:
|
|
|
|
# Don't do work to replace the avatar with itself.
|
|
|
|
return
|
|
|
|
|
2019-06-07 23:36:19 +02:00
|
|
|
# Structurally, to make the S3 backend happy, we need to
|
|
|
|
# provide a Content-Type; since that isn't specified in
|
|
|
|
# any metadata, we auto-detect it.
|
2020-10-24 04:26:46 +02:00
|
|
|
content_type = magic.from_buffer(ldap_avatar[:1024], mime=True)
|
2019-06-07 23:36:19 +02:00
|
|
|
if content_type.startswith("image/"):
|
2020-10-24 04:26:46 +02:00
|
|
|
upload_avatar_image(BytesIO(ldap_avatar), user, user, content_type=content_type)
|
2020-06-29 12:47:44 +02:00
|
|
|
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER, acting_user=None)
|
2019-06-28 00:10:58 +02:00
|
|
|
# Update avatar hash.
|
|
|
|
user.avatar_hash = user_avatar_content_hash(ldap_avatar)
|
|
|
|
user.save(update_fields=["avatar_hash"])
|
2019-06-07 23:36:19 +02:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.warning("Could not parse %s field for user %s", avatar_attr_name, user.id)
|
2018-12-12 19:46:37 +01:00
|
|
|
|
2021-09-16 20:04:19 +02:00
|
|
|
def is_user_disabled_in_ldap(self, ldap_user: _LDAPUser) -> bool:
|
|
|
|
"""Implements checks for whether a user has been
|
|
|
|
disabled in the LDAP server being integrated with
|
|
|
|
Zulip."""
|
|
|
|
if "userAccountControl" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
account_control_value = ldap_user.attrs[
|
|
|
|
settings.AUTH_LDAP_USER_ATTR_MAP["userAccountControl"]
|
|
|
|
][0]
|
|
|
|
return bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK)
|
|
|
|
|
|
|
|
assert "deactivated" in settings.AUTH_LDAP_USER_ATTR_MAP
|
|
|
|
attr_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP["deactivated"]][0]
|
|
|
|
|
|
|
|
# In the LDAP specification, a Boolean attribute should be
|
|
|
|
# *exactly* either "TRUE" or "FALSE". However,
|
|
|
|
# https://www.freeipa.org/page/V4/User_Life-Cycle_Management suggests
|
|
|
|
# that FreeIPA at least documents using Yes/No for booleans.
|
|
|
|
true_values = ["TRUE", "YES"]
|
|
|
|
false_values = ["FALSE", "NO"]
|
|
|
|
attr_value_upper = attr_value.upper()
|
|
|
|
assert (
|
|
|
|
attr_value_upper in true_values or attr_value_upper in false_values
|
|
|
|
), f"Invalid value '{attr_value}' in the LDAP attribute mapped to deactivated"
|
|
|
|
return attr_value_upper in true_values
|
2018-12-13 23:58:26 +01:00
|
|
|
|
2020-08-15 18:33:16 +02:00
|
|
|
def is_account_realm_access_forbidden(self, ldap_user: _LDAPUser, realm: Realm) -> bool:
|
2022-07-07 22:28:24 +02:00
|
|
|
realm_access_control = settings.AUTH_LDAP_ADVANCED_REALM_ACCESS_CONTROL
|
2020-09-22 18:18:45 +02:00
|
|
|
# org_membership takes priority over AUTH_LDAP_ADVANCED_REALM_ACCESS_CONTROL.
|
|
|
|
if "org_membership" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
org_membership_attr = settings.AUTH_LDAP_USER_ATTR_MAP["org_membership"]
|
|
|
|
allowed_orgs: List[str] = ldap_user.attrs.get(org_membership_attr, [])
|
|
|
|
if is_subdomain_in_allowed_subdomains_list(realm.subdomain, allowed_orgs):
|
|
|
|
return False
|
|
|
|
# If Advanced is not configured, forbid access
|
2022-07-07 22:28:24 +02:00
|
|
|
if realm_access_control is None:
|
2020-09-22 18:18:45 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
# If neither setting is configured, allow access.
|
2022-07-07 22:28:24 +02:00
|
|
|
if realm_access_control is None:
|
2020-08-15 18:33:16 +02:00
|
|
|
return False
|
2023-10-08 22:53:54 +02:00
|
|
|
if realm.subdomain not in realm_access_control:
|
|
|
|
# If a realm is not configured in this setting, it shouldn't
|
|
|
|
# be affected by it - therefore, allow access.
|
|
|
|
return False
|
2020-08-15 18:33:16 +02:00
|
|
|
|
2020-09-22 18:18:45 +02:00
|
|
|
# With settings.AUTH_LDAP_ADVANCED_REALM_ACCESS_CONTROL, we
|
|
|
|
# allow access if and only if one of the entries for the
|
|
|
|
# target subdomain matches the user's LDAP attributes.
|
2023-10-08 22:53:54 +02:00
|
|
|
|
|
|
|
# Make sure the format of the setting makes sense.
|
|
|
|
assert isinstance(realm_access_control[realm.subdomain], list)
|
|
|
|
assert len(realm_access_control[realm.subdomain]) > 0
|
2020-09-22 18:18:45 +02:00
|
|
|
|
|
|
|
# Go through every "or" check
|
|
|
|
for attribute_group in realm_access_control[realm.subdomain]:
|
|
|
|
access = True
|
|
|
|
for attribute in attribute_group:
|
|
|
|
if not (
|
|
|
|
attribute in ldap_user.attrs
|
|
|
|
and attribute_group[attribute] in ldap_user.attrs[attribute]
|
|
|
|
):
|
|
|
|
access = False
|
|
|
|
if access:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
2020-08-15 18:33:16 +02:00
|
|
|
|
2019-01-16 09:06:11 +01:00
|
|
|
@classmethod
|
2020-07-17 20:22:10 +02:00
|
|
|
def get_mapped_name(cls, ldap_user: _LDAPUser) -> str:
|
|
|
|
"""Constructs the user's Zulip full_name from the LDAP data"""
|
2019-01-10 18:25:34 +01:00
|
|
|
if "full_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"]
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = ldap_user.attrs[full_name_attr][0]
|
2023-06-06 22:54:42 +02:00
|
|
|
elif all(key in settings.AUTH_LDAP_USER_ATTR_MAP for key in ["first_name", "last_name"]):
|
2019-01-10 18:25:34 +01:00
|
|
|
first_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["first_name"]
|
|
|
|
last_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["last_name"]
|
2020-07-17 20:22:10 +02:00
|
|
|
first_name = ldap_user.attrs[first_name_attr][0]
|
|
|
|
last_name = ldap_user.attrs[last_name_attr][0]
|
|
|
|
full_name = f"{first_name} {last_name}"
|
2019-01-10 18:25:34 +01:00
|
|
|
else:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("Missing required mapping for user's full name")
|
2019-01-10 18:25:34 +01:00
|
|
|
|
2020-07-17 20:22:10 +02:00
|
|
|
return full_name
|
2019-01-10 18:25:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def sync_full_name_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None:
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import do_change_full_name
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = self.get_mapped_name(ldap_user)
|
2019-01-10 18:25:34 +01:00
|
|
|
if full_name != user_profile.full_name:
|
|
|
|
try:
|
|
|
|
full_name = check_full_name(full_name)
|
|
|
|
except JsonableError as e:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError(e.msg)
|
2019-01-10 18:25:34 +01:00
|
|
|
do_change_full_name(user_profile, full_name, None)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def sync_custom_profile_fields_from_ldap(
|
|
|
|
self, user_profile: UserProfile, ldap_user: _LDAPUser
|
|
|
|
) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
values_by_var_name: Dict[str, Union[int, str, List[int]]] = {}
|
2019-01-29 13:39:21 +01:00
|
|
|
for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items():
|
2021-02-12 08:20:45 +01:00
|
|
|
if not attr.startswith("custom_profile_field__"):
|
2019-01-29 13:39:21 +01:00
|
|
|
continue
|
2021-02-12 08:20:45 +01:00
|
|
|
var_name = attr.split("custom_profile_field__")[1]
|
2019-03-05 09:40:40 +01:00
|
|
|
try:
|
|
|
|
value = ldap_user.attrs[ldap_attr][0]
|
|
|
|
except KeyError:
|
|
|
|
# If this user doesn't have this field set then ignore this
|
|
|
|
# field and continue syncing other fields. `django-auth-ldap`
|
|
|
|
# automatically logs error about missing field.
|
|
|
|
continue
|
2019-01-29 13:39:21 +01:00
|
|
|
values_by_var_name[var_name] = value
|
|
|
|
|
2021-05-29 11:01:48 +02:00
|
|
|
try:
|
|
|
|
sync_user_profile_custom_fields(user_profile, values_by_var_name)
|
2022-11-17 09:30:48 +01:00
|
|
|
except SyncUserError as e:
|
|
|
|
raise ZulipLDAPError(str(e)) from e
|
2019-01-29 13:39:21 +01:00
|
|
|
|
2023-07-15 22:25:36 +02:00
|
|
|
def sync_groups_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None:
|
|
|
|
"""
|
|
|
|
For the groups set up for syncing for the realm in LDAP_SYNCHRONIZED_GROUPS_BY_REALM:
|
|
|
|
|
|
|
|
(1) Makes sure the user has membership in the Zulip UserGroups corresponding
|
|
|
|
to the LDAP groups ldap_user belongs to.
|
|
|
|
(2) Makes sure the user doesn't have membership in the Zulip UserGroups corresponding
|
|
|
|
to the LDAP groups ldap_user doesn't belong to.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if user_profile.realm.string_id not in settings.LDAP_SYNCHRONIZED_GROUPS_BY_REALM:
|
|
|
|
# no groups to sync for this realm
|
|
|
|
return
|
|
|
|
|
|
|
|
configured_ldap_group_names_for_sync = set(
|
|
|
|
settings.LDAP_SYNCHRONIZED_GROUPS_BY_REALM[user_profile.realm.string_id]
|
|
|
|
)
|
|
|
|
|
|
|
|
try:
|
|
|
|
ldap_logger.debug("Syncing groups for user: %s", user_profile.id)
|
|
|
|
intended_group_name_set_for_user = set(ldap_user.group_names).intersection(
|
|
|
|
configured_ldap_group_names_for_sync
|
|
|
|
)
|
|
|
|
|
|
|
|
existing_group_name_set_for_user = set(
|
|
|
|
UserGroupMembership.objects.filter(
|
|
|
|
user_group__realm=user_profile.realm,
|
|
|
|
user_group__name__in=set(
|
|
|
|
settings.LDAP_SYNCHRONIZED_GROUPS_BY_REALM[user_profile.realm.string_id]
|
|
|
|
),
|
|
|
|
user_profile=user_profile,
|
|
|
|
).values_list("user_group__name", flat=True)
|
|
|
|
)
|
|
|
|
|
|
|
|
ldap_logger.debug(
|
|
|
|
"intended groups: %s; zulip groups: %s",
|
|
|
|
repr(intended_group_name_set_for_user),
|
|
|
|
repr(existing_group_name_set_for_user),
|
|
|
|
)
|
|
|
|
|
|
|
|
new_groups = UserGroup.objects.filter(
|
|
|
|
name__in=intended_group_name_set_for_user.difference(
|
|
|
|
existing_group_name_set_for_user
|
|
|
|
),
|
|
|
|
realm=user_profile.realm,
|
|
|
|
)
|
|
|
|
if new_groups:
|
|
|
|
ldap_logger.debug(
|
|
|
|
"add %s to %s", user_profile.id, [group.name for group in new_groups]
|
|
|
|
)
|
|
|
|
bulk_add_members_to_user_groups(new_groups, [user_profile.id], acting_user=None)
|
|
|
|
|
|
|
|
group_names_for_membership_deletion = existing_group_name_set_for_user.difference(
|
|
|
|
intended_group_name_set_for_user
|
|
|
|
)
|
|
|
|
groups_for_membership_deletion = UserGroup.objects.filter(
|
|
|
|
name__in=group_names_for_membership_deletion, realm=user_profile.realm
|
|
|
|
)
|
|
|
|
|
|
|
|
if group_names_for_membership_deletion:
|
|
|
|
ldap_logger.debug(
|
|
|
|
"removing groups %s from %s",
|
|
|
|
group_names_for_membership_deletion,
|
|
|
|
user_profile.id,
|
|
|
|
)
|
|
|
|
bulk_remove_members_from_user_groups(
|
|
|
|
groups_for_membership_deletion, [user_profile.id], acting_user=None
|
|
|
|
)
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
raise ZulipLDAPError(str(e)) from e
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
|
2017-09-22 10:58:12 +02:00
|
|
|
REALM_IS_NONE_ERROR = 1
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
@rate_limit_auth
|
2021-09-01 20:17:33 +02:00
|
|
|
@log_auth_attempts
|
2021-02-12 08:19:30 +01:00
|
|
|
def authenticate(
|
|
|
|
self,
|
|
|
|
request: Optional[HttpRequest] = None,
|
|
|
|
*,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
realm: Realm,
|
2023-03-10 10:42:00 +01:00
|
|
|
prereg_realm: Optional[PreregistrationRealm] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
prereg_user: Optional[PreregistrationUser] = None,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2017-11-17 23:56:45 +01:00
|
|
|
self._realm = realm
|
2019-01-16 09:59:01 +01:00
|
|
|
self._prereg_user = prereg_user
|
2023-03-10 10:42:00 +01:00
|
|
|
self._prereg_realm = prereg_realm
|
2017-11-21 21:45:32 +01:00
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
try:
|
2019-12-27 17:17:56 +01:00
|
|
|
# We want to pass the user's LDAP username into
|
2019-10-05 03:54:48 +02:00
|
|
|
# authenticate() below. If an email address was entered
|
|
|
|
# in the login form, we need to use
|
|
|
|
# django_to_ldap_username to translate the email address
|
|
|
|
# to the user's LDAP username before calling the
|
|
|
|
# django-auth-ldap authenticate().
|
2015-10-13 23:08:05 +02:00
|
|
|
username = self.django_to_ldap_username(username)
|
2022-11-17 09:30:48 +01:00
|
|
|
except NoMatchingLDAPUserError as e:
|
2022-10-08 07:35:48 +02:00
|
|
|
ldap_logger.debug("%s: %s", type(self).__name__, e)
|
2018-10-27 03:19:49 +02:00
|
|
|
if return_data is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["no_matching_ldap_user"] = True
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
return None
|
2015-10-13 23:08:05 +02:00
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# Call into (ultimately) the django-auth-ldap authenticate
|
|
|
|
# function. This will check the username/password pair
|
|
|
|
# against the LDAP database, and assuming those are correct,
|
|
|
|
# end up calling `self.get_or_build_user` with the
|
|
|
|
# authenticated user's data from LDAP.
|
2020-02-04 14:04:10 +01:00
|
|
|
return super().authenticate(request=request, username=username, password=password)
|
2018-05-31 23:10:22 +02:00
|
|
|
|
2018-05-22 08:33:56 +02:00
|
|
|
def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""The main function of our authentication backend extension of
|
|
|
|
django-auth-ldap. When this is called (from `authenticate`),
|
|
|
|
django-auth-ldap will already have verified that the provided
|
|
|
|
username and password match those in the LDAP database.
|
|
|
|
|
|
|
|
This function's responsibility is to check (1) whether the
|
|
|
|
email address for this user obtained from LDAP has an active
|
|
|
|
account in this Zulip realm. If so, it will log them in.
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
Otherwise, to provide a seamless single sign-on experience
|
2019-03-10 02:43:29 +01:00
|
|
|
with LDAP, this function can automatically create a new Zulip
|
|
|
|
user account in the realm (assuming the realm is configured to
|
|
|
|
allow that email address to sign up).
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, Any] = {}
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2019-10-06 00:32:25 +02:00
|
|
|
username = self.user_email_from_ldapuser(username, ldap_user)
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2020-08-15 18:33:16 +02:00
|
|
|
if self.is_account_realm_access_forbidden(ldap_user, self._realm):
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("User not allowed to access realm")
|
2020-08-15 18:33:16 +02:00
|
|
|
|
2023-11-24 20:18:28 +01:00
|
|
|
if ldap_should_sync_active_status():
|
2021-09-16 20:04:19 +02:00
|
|
|
ldap_disabled = self.is_user_disabled_in_ldap(ldap_user)
|
2018-12-13 23:58:26 +01:00
|
|
|
if ldap_disabled:
|
|
|
|
# Treat disabled users as deactivated in Zulip.
|
|
|
|
return_data["inactive_user"] = True
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("User has been deactivated")
|
2018-12-13 23:58:26 +01:00
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
user_profile = common_get_active_user(username, self._realm, return_data)
|
2017-11-21 21:57:23 +01:00
|
|
|
if user_profile is not None:
|
|
|
|
# An existing user, successfully authed; return it.
|
|
|
|
return user_profile, False
|
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
if return_data.get("inactive_realm"):
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if there is a user account in a deactivated realm
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("Realm has been deactivated")
|
2017-11-21 21:52:34 +01:00
|
|
|
if return_data.get("inactive_user"):
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("User has been deactivated")
|
2019-03-04 13:16:00 +01:00
|
|
|
# An invalid_subdomain `return_data` value here is ignored,
|
|
|
|
# since that just means we're trying to create an account in a
|
|
|
|
# second realm on the server (`ldap_auth_enabled(realm)` would
|
|
|
|
# have been false if this user wasn't meant to have an account
|
|
|
|
# in this second realm).
|
2017-11-18 01:12:05 +01:00
|
|
|
if self._realm.deactivated:
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if no account exists, but the realm is
|
|
|
|
# deactivated, so we shouldn't create a new user account
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("Realm has been deactivated")
|
2017-11-18 01:12:05 +01:00
|
|
|
|
2021-06-09 15:39:18 +02:00
|
|
|
try:
|
|
|
|
validate_email(username)
|
|
|
|
except ValidationError:
|
|
|
|
error_message = f"{username} is not a valid email address."
|
|
|
|
# This indicates a misconfiguration of ldap settings
|
|
|
|
# or a malformed email value in the ldap directory,
|
|
|
|
# so we should log a warning about this before failing.
|
|
|
|
self.logger.warning(error_message)
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError(error_message)
|
2021-06-09 15:39:18 +02:00
|
|
|
|
2019-03-10 08:57:19 +01:00
|
|
|
# Makes sure that email domain hasn't be restricted for this
|
|
|
|
# realm. The main thing here is email_allowed_for_realm; but
|
2020-03-02 13:24:50 +01:00
|
|
|
# we also call validate_email_not_already_in_realm just for consistency,
|
2019-03-10 08:57:19 +01:00
|
|
|
# even though its checks were already done above.
|
|
|
|
try:
|
|
|
|
email_allowed_for_realm(username, self._realm)
|
2020-03-02 13:24:50 +01:00
|
|
|
validate_email_not_already_in_realm(self._realm, username)
|
2019-03-10 08:57:19 +01:00
|
|
|
except DomainNotAllowedForRealmError:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("This email domain isn't allowed in this organization.")
|
2019-03-10 08:57:19 +01:00
|
|
|
except (DisposableEmailError, EmailContainsPlusError):
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError("Email validation failed.")
|
2019-03-10 08:57:19 +01:00
|
|
|
|
2017-11-18 01:13:35 +01:00
|
|
|
# We have valid LDAP credentials; time to create an account.
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = self.get_mapped_name(ldap_user)
|
2017-11-18 01:12:05 +01:00
|
|
|
try:
|
|
|
|
full_name = check_full_name(full_name)
|
|
|
|
except JsonableError as e:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise ZulipLDAPError(e.msg)
|
2017-11-18 01:12:05 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
opts: Dict[str, Any] = {}
|
2019-01-16 09:59:01 +01:00
|
|
|
if self._prereg_user:
|
|
|
|
invited_as = self._prereg_user.invited_as
|
2021-02-12 08:20:45 +01:00
|
|
|
opts["prereg_user"] = self._prereg_user
|
users: Remove redundant get_role_for_new_user in lib/create_user.py.
The function get_role_for_new_user was added to get role from the
invited_as value, as invited_as values were one of (1,2,3,4)
previously, but it was then changed to be the actual role value,
i.e. one of (100, 200, 400, 600), in 1f8f227444.
So, we can safely remove this function now and use invited_as value
directly and handle realm_creation case by using an if condition.
2021-04-30 14:37:48 +02:00
|
|
|
|
|
|
|
opts["role"] = invited_as
|
2023-03-10 10:42:00 +01:00
|
|
|
opts["realm_creation"] = False
|
2020-01-14 23:40:17 +01:00
|
|
|
# TODO: Ideally, we should add a mechanism for the user
|
|
|
|
# entering which default stream groups they've selected in
|
|
|
|
# the LDAP flow.
|
2021-02-12 08:20:45 +01:00
|
|
|
opts["default_stream_groups"] = []
|
2019-01-16 09:59:01 +01:00
|
|
|
|
2023-03-10 10:42:00 +01:00
|
|
|
if self._prereg_realm:
|
|
|
|
opts["prereg_realm"] = self._prereg_realm
|
|
|
|
opts["realm_creation"] = True
|
|
|
|
opts["role"] = UserProfile.ROLE_REALM_OWNER
|
|
|
|
opts["default_stream_groups"] = []
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = do_create_user(
|
2023-05-08 09:17:57 +02:00
|
|
|
username,
|
|
|
|
None,
|
|
|
|
self._realm,
|
|
|
|
full_name,
|
|
|
|
tos_version=UserProfile.TOS_VERSION_BEFORE_FIRST_LOGIN,
|
|
|
|
acting_user=None,
|
|
|
|
**opts,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-12-12 19:46:37 +01:00
|
|
|
self.sync_avatar_from_ldap(user_profile, ldap_user)
|
2019-01-29 13:39:21 +01:00
|
|
|
self.sync_custom_profile_fields_from_ldap(user_profile, ldap_user)
|
2017-11-18 01:12:05 +01:00
|
|
|
|
|
|
|
return user_profile, True
|
2013-11-21 01:30:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
class ZulipLDAPUser(_LDAPUser):
|
|
|
|
"""
|
|
|
|
This is an extension of the _LDAPUser class, with a realm attribute
|
|
|
|
attached to it. It's purpose is to call its inherited method
|
2020-10-23 02:43:28 +02:00
|
|
|
populate_user() which will sync the LDAP data with the corresponding
|
2019-11-09 00:27:18 +01:00
|
|
|
UserProfile. The realm attribute serves to uniquely identify the UserProfile
|
2020-10-23 02:43:28 +02:00
|
|
|
in case the LDAP user is registered to multiple realms.
|
2019-11-09 00:27:18 +01:00
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.realm: Realm = kwargs["realm"]
|
|
|
|
del kwargs["realm"]
|
2019-11-09 00:27:18 +01:00
|
|
|
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Just like ZulipLDAPAuthBackend, but doesn't let you log in. Used
|
|
|
|
for syncing data like names, avatars, and custom profile fields
|
|
|
|
from LDAP in `manage.py sync_ldap_user_data` as well as in
|
|
|
|
registration for organizations that use a different SSO solution
|
|
|
|
for managing login (often via RemoteUserBackend).
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def authenticate(
|
|
|
|
self,
|
|
|
|
request: Optional[HttpRequest] = None,
|
|
|
|
*,
|
|
|
|
username: str,
|
|
|
|
password: str,
|
|
|
|
realm: Realm,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2013-11-21 01:30:20 +01:00
|
|
|
return None
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_or_build_user(
|
|
|
|
self, username: str, ldap_user: ZulipLDAPUser
|
|
|
|
) -> Tuple[UserProfile, bool]:
|
2019-10-26 23:28:01 +02:00
|
|
|
"""This is used only in non-authentication contexts such as:
|
2021-02-12 08:19:30 +01:00
|
|
|
./manage.py sync_ldap_user_data
|
2019-10-26 23:28:01 +02:00
|
|
|
"""
|
|
|
|
# Obtain the django username from the ldap_user object:
|
|
|
|
username = self.user_email_from_ldapuser(username, ldap_user)
|
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
# We set the built flag (which tells django-auth-ldap whether the user object
|
|
|
|
# was taken from the database or freshly built) to False - because in this codepath
|
|
|
|
# the user we're syncing of course already has to exist in the database.
|
|
|
|
user = get_user_by_delivery_email(username, ldap_user.realm)
|
|
|
|
built = False
|
2019-10-26 23:28:01 +02:00
|
|
|
# Synchronise the UserProfile with its LDAP attributes:
|
2021-09-16 20:04:19 +02:00
|
|
|
if ldap_should_sync_active_status():
|
|
|
|
user_disabled_in_ldap = self.is_user_disabled_in_ldap(ldap_user)
|
2019-10-26 23:28:01 +02:00
|
|
|
if user_disabled_in_ldap:
|
|
|
|
if user.is_active:
|
2021-02-12 08:19:30 +01:00
|
|
|
ldap_logger.info(
|
|
|
|
"Deactivating user %s because they are disabled in LDAP.",
|
|
|
|
user.delivery_email,
|
|
|
|
)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2019-10-26 23:28:01 +02:00
|
|
|
# Do an early return to avoid trying to sync additional data.
|
|
|
|
return (user, built)
|
|
|
|
elif not user.is_active:
|
2021-02-12 08:19:30 +01:00
|
|
|
ldap_logger.info(
|
|
|
|
"Reactivating user %s because they are not disabled in LDAP.",
|
|
|
|
user.delivery_email,
|
|
|
|
)
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user, acting_user=None)
|
2019-10-26 23:28:01 +02:00
|
|
|
|
|
|
|
self.sync_avatar_from_ldap(user, ldap_user)
|
|
|
|
self.sync_full_name_from_ldap(user, ldap_user)
|
|
|
|
self.sync_custom_profile_fields_from_ldap(user, ldap_user)
|
2023-07-15 22:25:36 +02:00
|
|
|
self.sync_groups_from_ldap(user, ldap_user)
|
2019-10-26 23:28:01 +02:00
|
|
|
return (user, built)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class PopulateUserLDAPError(ZulipLDAPError):
|
2019-09-04 10:11:25 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-09-04 10:11:25 +02:00
|
|
|
@receiver(ldap_error, sender=ZulipLDAPUserPopulator)
|
|
|
|
def catch_ldap_error(signal: Signal, **kwargs: Any) -> None:
|
|
|
|
"""
|
|
|
|
Inside django_auth_ldap populate_user(), if LDAPError is raised,
|
|
|
|
e.g. due to invalid connection credentials, the function catches it
|
|
|
|
and emits a signal (ldap_error) to communicate this error to others.
|
|
|
|
We normally don't use signals, but here there's no choice, so in this function
|
|
|
|
we essentially convert the signal to a normal exception that will properly
|
|
|
|
propagate out of django_auth_ldap internals.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if kwargs["context"] == "populate_user":
|
2019-09-04 10:11:25 +02:00
|
|
|
# The exception message can contain the password (if it was invalid),
|
|
|
|
# so it seems better not to log that, and only use the original exception's name here.
|
2022-10-08 07:35:48 +02:00
|
|
|
raise PopulateUserLDAPError(type(kwargs["exception"]).__name__)
|
2019-09-04 10:11:25 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-26 21:13:23 +02:00
|
|
|
def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bool:
|
2019-01-12 12:32:54 +01:00
|
|
|
backend = ZulipLDAPUserPopulator()
|
2019-10-25 02:26:05 +02:00
|
|
|
try:
|
2019-12-15 20:10:09 +01:00
|
|
|
ldap_username = backend.django_to_ldap_username(user_profile.delivery_email)
|
2022-11-17 09:30:48 +01:00
|
|
|
except NoMatchingLDAPUserError:
|
2020-06-10 06:06:57 +02:00
|
|
|
if (
|
|
|
|
settings.ONLY_LDAP
|
|
|
|
if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS is None
|
|
|
|
else settings.LDAP_DEACTIVATE_NON_MATCHING_USERS
|
|
|
|
):
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Deactivated non-matching user: %s", user_profile.delivery_email)
|
2019-10-25 02:26:05 +02:00
|
|
|
return True
|
|
|
|
elif user_profile.is_active:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("Did not find %s in LDAP.", user_profile.delivery_email)
|
2019-10-25 02:26:05 +02:00
|
|
|
return False
|
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
# What one would expect to see like to do here is just a call to
|
|
|
|
# `backend.populate_user`, which in turn just creates the
|
|
|
|
# `_LDAPUser` object and calls `ldap_user.populate_user()` on
|
|
|
|
# that. Unfortunately, that will produce incorrect results in the
|
|
|
|
# case that the server has multiple Zulip users in different
|
|
|
|
# realms associated with a single LDAP user, because
|
|
|
|
# `django-auth-ldap` isn't implemented with the possibility of
|
|
|
|
# multiple realms on different subdomains in mind.
|
|
|
|
#
|
|
|
|
# To address this, we construct a version of the _LDAPUser class
|
|
|
|
# extended to store the realm of the target user, and call its
|
|
|
|
# `.populate_user` function directly.
|
|
|
|
#
|
|
|
|
# Ideally, we'd contribute changes to `django-auth-ldap` upstream
|
|
|
|
# making this flow possible in a more directly supported fashion.
|
|
|
|
updated_user = ZulipLDAPUser(backend, ldap_username, realm=user_profile.realm).populate_user()
|
2019-08-26 21:13:23 +02:00
|
|
|
if updated_user:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Updated %s.", user_profile.delivery_email)
|
2019-08-26 21:13:23 +02:00
|
|
|
return True
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
raise PopulateUserLDAPError(f"populate_user unexpectedly returned {updated_user}")
|
2019-01-12 12:32:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-09 07:52:14 +01:00
|
|
|
# Quick tool to test whether you're correctly authenticating to LDAP
|
2019-03-09 08:32:06 +01:00
|
|
|
def query_ldap(email: str) -> List[str]:
|
|
|
|
values = []
|
2021-02-12 08:19:30 +01:00
|
|
|
backend = next(
|
|
|
|
(backend for backend in get_backends() if isinstance(backend, LDAPBackend)), None
|
|
|
|
)
|
2019-03-09 08:32:06 +01:00
|
|
|
if backend is not None:
|
2019-10-25 02:26:05 +02:00
|
|
|
try:
|
|
|
|
ldap_username = backend.django_to_ldap_username(email)
|
2022-11-17 09:30:48 +01:00
|
|
|
except NoMatchingLDAPUserError as e:
|
2020-06-09 00:25:09 +02:00
|
|
|
values.append(f"No such user found: {e}")
|
2019-10-25 02:26:05 +02:00
|
|
|
return values
|
|
|
|
|
|
|
|
ldap_attrs = _LDAPUser(backend, ldap_username).attrs
|
|
|
|
|
|
|
|
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
value = ldap_attrs.get(ldap_field, ["LDAP field not present"])[0]
|
2023-01-18 02:59:37 +01:00
|
|
|
if django_field == "avatar" and isinstance(value, bytes):
|
|
|
|
value = "(An avatar image file)"
|
2020-06-10 06:41:04 +02:00
|
|
|
values.append(f"{django_field}: {value}")
|
2019-10-25 02:26:05 +02:00
|
|
|
if settings.LDAP_EMAIL_ATTR is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
values.append("{}: {}".format("email", ldap_attrs[settings.LDAP_EMAIL_ATTR][0]))
|
2019-03-09 08:32:06 +01:00
|
|
|
else:
|
|
|
|
values.append("LDAP backend not configured on this server.")
|
|
|
|
return values
|
2019-03-09 07:52:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-08-19 02:58:20 +02:00
|
|
|
class DevAuthBackend(ZulipAuthMixin):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Allow logging in as any user without a password. This is used for
|
|
|
|
convenience when developing Zulip, and is disabled in production."""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "dev"
|
2020-06-03 13:18:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def authenticate(
|
|
|
|
self,
|
|
|
|
request: Optional[HttpRequest] = None,
|
|
|
|
*,
|
|
|
|
dev_auth_username: str,
|
|
|
|
realm: Realm,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2017-11-21 21:19:58 +01:00
|
|
|
if not dev_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-21 21:20:44 +01:00
|
|
|
return common_get_active_user(dev_auth_username, realm, return_data=return_data)
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-02 06:24:43 +02:00
|
|
|
class ExternalAuthMethodDictT(TypedDict):
|
|
|
|
name: str
|
|
|
|
display_name: str
|
|
|
|
display_icon: Optional[str]
|
|
|
|
login_url: str
|
|
|
|
signup_url: str
|
2019-12-08 23:11:25 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
class ExternalAuthMethod(ABC):
|
|
|
|
"""
|
|
|
|
To register a backend as an external_authentication_method, it should
|
|
|
|
subclass ExternalAuthMethod and define its dict_representation
|
|
|
|
classmethod, and finally use the external_auth_method class decorator to
|
|
|
|
get added to the EXTERNAL_AUTH_METHODS list.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
auth_backend_name = "undeclared"
|
|
|
|
name = "undeclared"
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
display_icon: Optional[str] = None
|
2019-12-08 23:11:25 +01:00
|
|
|
|
|
|
|
# Used to determine how to order buttons on login form, backend with
|
|
|
|
# higher sort order are displayed first.
|
|
|
|
sort_order = 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2021-02-12 08:19:30 +01:00
|
|
|
def dict_representation(cls, realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
2019-12-08 23:11:25 +01:00
|
|
|
"""
|
|
|
|
Method returning dictionaries representing the authentication methods
|
|
|
|
corresponding to the backend that subclasses this. The documentation
|
|
|
|
for the external_authentication_methods field of the /server_settings endpoint
|
|
|
|
explains the details of these dictionaries.
|
|
|
|
This returns a list, because one backend can support configuring multiple methods,
|
|
|
|
that are all serviced by that backend - our SAML backend is an example of that.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
EXTERNAL_AUTH_METHODS: List[Type[ExternalAuthMethod]] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
def external_auth_method(cls: Type[ExternalAuthMethod]) -> Type[ExternalAuthMethod]:
|
|
|
|
assert issubclass(cls, ExternalAuthMethod)
|
|
|
|
|
|
|
|
EXTERNAL_AUTH_METHODS.append(cls)
|
|
|
|
return cls
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# We want to be able to store this data in Redis, so it has to be easy to serialize.
|
2020-02-23 18:58:08 +01:00
|
|
|
# That's why we avoid having fields that could pose a problem for that.
|
2020-06-13 09:36:36 +02:00
|
|
|
class ExternalAuthDataDict(TypedDict, total=False):
|
|
|
|
subdomain: str
|
|
|
|
full_name: str
|
|
|
|
email: str
|
|
|
|
is_signup: bool
|
|
|
|
is_realm_creation: bool
|
|
|
|
redirect_to: str
|
|
|
|
mobile_flow_otp: Optional[str]
|
|
|
|
desktop_flow_otp: Optional[str]
|
|
|
|
multiuse_object_key: str
|
|
|
|
full_name_validated: bool
|
2022-11-16 18:31:10 +01:00
|
|
|
# The mobile app doesn't actually use a session, so this
|
|
|
|
# data is not applicable there.
|
2021-11-01 17:03:55 +01:00
|
|
|
params_to_store_in_authenticated_session: Dict[str, str]
|
2020-02-23 18:58:08 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
class ExternalAuthResult:
|
|
|
|
LOGIN_KEY_PREFIX = "login_key_"
|
|
|
|
LOGIN_KEY_FORMAT = LOGIN_KEY_PREFIX + "{token}"
|
|
|
|
LOGIN_KEY_EXPIRATION_SECONDS = 15
|
|
|
|
LOGIN_TOKEN_LENGTH = UserProfile.API_KEY_LENGTH
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
*,
|
|
|
|
user_profile: Optional[UserProfile] = None,
|
|
|
|
data_dict: Optional[ExternalAuthDataDict] = None,
|
|
|
|
login_token: Optional[str] = None,
|
|
|
|
delete_stored_data: bool = True,
|
|
|
|
) -> None:
|
2020-02-23 18:58:08 +01:00
|
|
|
if data_dict is None:
|
|
|
|
data_dict = {}
|
|
|
|
|
|
|
|
if login_token is not None:
|
2021-02-12 08:19:30 +01:00
|
|
|
assert (not data_dict) and (
|
|
|
|
user_profile is None
|
|
|
|
), "Passing in data_dict or user_profile with login_token is disallowed."
|
2020-02-23 18:58:08 +01:00
|
|
|
self.instantiate_with_token(login_token, delete_stored_data)
|
|
|
|
else:
|
2020-06-23 00:39:19 +02:00
|
|
|
self.data_dict = data_dict.copy()
|
2020-02-23 18:58:08 +01:00
|
|
|
self.user_profile = user_profile
|
|
|
|
|
|
|
|
if self.user_profile is not None:
|
|
|
|
# Ensure data inconsistent with the user_profile wasn't passed in inside the data_dict argument.
|
2021-02-12 08:19:30 +01:00
|
|
|
assert (
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name" not in data_dict
|
|
|
|
or data_dict["full_name"] == self.user_profile.full_name
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
assert (
|
2021-02-12 08:20:45 +01:00
|
|
|
"email" not in data_dict
|
|
|
|
or data_dict["email"].lower() == self.user_profile.delivery_email.lower()
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-23 18:58:08 +01:00
|
|
|
# Update these data_dict fields to ensure consistency with self.user_profile. This is mostly
|
|
|
|
# defensive code, but is useful in these scenarios:
|
|
|
|
# 1. user_profile argument was passed in, and no full_name or email_data in the data_dict arg.
|
|
|
|
# 2. We're instantiating from the login_token and the user has changed their full_name since
|
|
|
|
# the data was stored under the token.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.data_dict["full_name"] = self.user_profile.full_name
|
|
|
|
self.data_dict["email"] = self.user_profile.delivery_email
|
2020-02-23 18:58:08 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "subdomain" not in self.data_dict:
|
|
|
|
self.data_dict["subdomain"] = self.user_profile.realm.subdomain
|
2020-02-23 18:58:08 +01:00
|
|
|
if not self.user_profile.is_mirror_dummy:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.data_dict["is_signup"] = False
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
def store_data(self) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
key = put_dict_in_redis(
|
|
|
|
redis_client,
|
|
|
|
self.LOGIN_KEY_FORMAT,
|
|
|
|
self.data_dict,
|
|
|
|
expiration_seconds=self.LOGIN_KEY_EXPIRATION_SECONDS,
|
|
|
|
token_length=self.LOGIN_TOKEN_LENGTH,
|
|
|
|
)
|
2020-02-23 18:58:08 +01:00
|
|
|
token = key.split(self.LOGIN_KEY_PREFIX, 1)[1] # remove the prefix
|
|
|
|
return token
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def instantiate_with_token(self, token: str, delete_stored_data: bool = True) -> None:
|
2020-02-23 18:58:08 +01:00
|
|
|
key = self.LOGIN_KEY_FORMAT.format(token=token)
|
|
|
|
data = get_dict_from_redis(redis_client, self.LOGIN_KEY_FORMAT, key)
|
|
|
|
if data is None or None in [data.get("email"), data.get("subdomain")]:
|
|
|
|
raise self.InvalidTokenError
|
|
|
|
|
|
|
|
if delete_stored_data:
|
|
|
|
redis_client.delete(key)
|
|
|
|
|
|
|
|
self.data_dict = cast(ExternalAuthDataDict, data)
|
|
|
|
|
|
|
|
# Here we refetch the UserProfile object (if any) for this
|
|
|
|
# ExternalAuthResult. Using authenticate() will re-check for
|
|
|
|
# (unlikely) races like the realm or user having been deactivated
|
|
|
|
# between generating this ExternalAuthResult and accessing it.
|
|
|
|
#
|
|
|
|
# In theory, we should return_data here so the caller can do
|
|
|
|
# more customized error messages for those unlikely races, but
|
|
|
|
# it's likely not worth implementing.
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm(data["subdomain"])
|
2022-05-31 03:17:38 +02:00
|
|
|
auth_result = authenticate(username=data["email"], realm=realm, use_dummy_backend=True)
|
|
|
|
if auth_result is not None:
|
|
|
|
assert isinstance(auth_result, UserProfile)
|
|
|
|
self.user_profile = auth_result
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
class InvalidTokenError(Exception):
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class SyncUserError(Exception):
|
2021-05-29 11:01:48 +02:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
def sync_user_profile_custom_fields(
|
|
|
|
user_profile: UserProfile, custom_field_name_to_value: Dict[str, Any]
|
|
|
|
) -> None:
|
|
|
|
fields_by_var_name: Dict[str, CustomProfileField] = {}
|
|
|
|
custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id)
|
|
|
|
for field in custom_profile_fields:
|
|
|
|
var_name = "_".join(field.name.lower().split(" "))
|
|
|
|
fields_by_var_name[var_name] = field
|
|
|
|
|
|
|
|
existing_values = {}
|
2021-09-18 15:02:13 +02:00
|
|
|
for data in user_profile.profile_data():
|
2021-05-29 11:01:48 +02:00
|
|
|
var_name = "_".join(data["name"].lower().split(" "))
|
|
|
|
existing_values[var_name] = data["value"]
|
|
|
|
|
2022-07-08 17:17:46 +02:00
|
|
|
profile_data: List[ProfileDataElementUpdateDict] = []
|
2021-05-29 11:01:48 +02:00
|
|
|
for var_name, value in custom_field_name_to_value.items():
|
|
|
|
try:
|
|
|
|
field = fields_by_var_name[var_name]
|
|
|
|
except KeyError:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise SyncUserError(f"Custom profile field with name {var_name} not found.")
|
2021-05-29 11:01:48 +02:00
|
|
|
if existing_values.get(var_name) == value:
|
|
|
|
continue
|
|
|
|
try:
|
|
|
|
validate_user_custom_profile_field(user_profile.realm.id, field, value)
|
|
|
|
except ValidationError as error:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise SyncUserError(f"Invalid data for {var_name} field: {error.message}")
|
2021-05-29 11:01:48 +02:00
|
|
|
profile_data.append(
|
|
|
|
{
|
|
|
|
"id": field.id,
|
|
|
|
"value": value,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
do_update_user_custom_profile_data_if_changed(user_profile, profile_data)
|
|
|
|
|
|
|
|
|
2019-12-10 00:42:12 +01:00
|
|
|
@external_auth_method
|
|
|
|
class ZulipRemoteUserBackend(RemoteUserBackend, ExternalAuthMethod):
|
|
|
|
"""Authentication backend that reads the Apache REMOTE_USER variable.
|
|
|
|
Used primarily in enterprise environments with an SSO solution
|
|
|
|
that has an Apache REMOTE_USER integration. For manual testing, see
|
|
|
|
|
|
|
|
https://zulip.readthedocs.io/en/latest/production/authentication-methods.html
|
|
|
|
|
|
|
|
See also remote_user_sso in zerver/views/auth.py.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-10 00:42:12 +01:00
|
|
|
auth_backend_name = "RemoteUser"
|
|
|
|
name = "remoteuser"
|
|
|
|
display_icon = None
|
2020-09-02 02:50:08 +02:00
|
|
|
# If configured, this backend should have its button near the top of the list.
|
|
|
|
sort_order = 9000
|
2019-12-10 00:42:12 +01:00
|
|
|
|
|
|
|
create_unknown_user = False
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-07-17 12:25:08 +02:00
|
|
|
def authenticate( # type: ignore[override] # authenticate has an incompatible signature with ModelBackend and BaseBackend
|
2021-02-12 08:19:30 +01:00
|
|
|
self,
|
|
|
|
request: Optional[HttpRequest] = None,
|
|
|
|
*,
|
|
|
|
remote_user: str,
|
|
|
|
realm: Realm,
|
|
|
|
return_data: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> Optional[UserProfile]:
|
2019-12-10 00:42:12 +01:00
|
|
|
if not auth_enabled_helper(["RemoteUser"], realm):
|
|
|
|
return None
|
|
|
|
|
|
|
|
email = remote_user_to_email(remote_user)
|
|
|
|
return common_get_active_user(email, realm, return_data=return_data)
|
|
|
|
|
|
|
|
@classmethod
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-12 08:19:30 +01:00
|
|
|
def dict_representation(cls, realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
|
|
|
return [
|
|
|
|
dict(
|
|
|
|
name=cls.name,
|
|
|
|
display_name="SSO",
|
|
|
|
display_icon=cls.display_icon,
|
|
|
|
# The user goes to the same URL for both login and signup:
|
2021-02-12 08:20:45 +01:00
|
|
|
login_url=reverse("start-login-sso"),
|
|
|
|
signup_url=reverse("start-login-sso"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2019-12-10 00:42:12 +01:00
|
|
|
|
2021-06-26 18:51:43 +02:00
|
|
|
def redirect_to_signup(realm: Realm) -> HttpResponseRedirect:
|
|
|
|
signup_url = reverse("register")
|
|
|
|
redirect_url = realm.uri + signup_url
|
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
|
|
|
|
|
|
|
def redirect_to_login(realm: Realm) -> HttpResponseRedirect:
|
|
|
|
login_url = reverse("login_page", kwargs={"template_name": "zerver/login.html"})
|
|
|
|
redirect_url = realm.uri + login_url
|
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
|
|
|
|
2021-08-23 15:14:05 +02:00
|
|
|
def redirect_deactivated_user_to_login(realm: Realm, email: str) -> HttpResponseRedirect:
|
2019-04-17 21:28:57 +02:00
|
|
|
# Specifying the template name makes sure that the user is not redirected to dev_login in case of
|
|
|
|
# a deactivated account on a test server.
|
2021-02-12 08:20:45 +01:00
|
|
|
login_url = reverse("login_page", kwargs={"template_name": "zerver/login.html"})
|
2021-10-14 01:45:34 +02:00
|
|
|
redirect_url = append_url_query_string(
|
2021-10-14 01:56:37 +02:00
|
|
|
realm.uri + login_url, urlencode({"is_deactivated": email})
|
|
|
|
)
|
2019-04-12 06:24:58 +02:00
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def social_associate_user_helper(
|
|
|
|
backend: BaseAuth, return_data: Dict[str, Any], *args: Any, **kwargs: Any
|
|
|
|
) -> Union[HttpResponse, Optional[UserProfile]]:
|
2020-10-23 02:43:28 +02:00
|
|
|
"""Responsible for doing the Zulip account lookup and validation parts
|
|
|
|
of the Zulip social auth pipeline (similar to the authenticate()
|
2018-05-31 00:12:39 +02:00
|
|
|
methods in most other auth backends in this file).
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
Returns a UserProfile object for successful authentication, and None otherwise.
|
2018-05-31 00:12:39 +02:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain = backend.strategy.session_get("subdomain")
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
2018-05-31 00:12:39 +02:00
|
|
|
return_data["invalid_realm"] = True
|
|
|
|
return None
|
2018-07-10 12:29:06 +02:00
|
|
|
return_data["realm_id"] = realm.id
|
2020-05-19 22:36:51 +02:00
|
|
|
return_data["realm_string_id"] = realm.string_id
|
2016-11-07 00:09:21 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if not auth_enabled_helper([backend.auth_backend_name], realm):
|
|
|
|
return_data["auth_backend_disabled"] = True
|
|
|
|
return None
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "auth_failed_reason" in kwargs.get("response", {}):
|
|
|
|
return_data["social_auth_failed_reason"] = kwargs["response"]["auth_failed_reason"]
|
2018-05-31 00:12:39 +02:00
|
|
|
return None
|
2021-02-12 08:20:45 +01:00
|
|
|
elif hasattr(backend, "get_verified_emails"):
|
2019-03-10 02:43:29 +01:00
|
|
|
# Some social backends, like GitHubAuthBackend, don't
|
|
|
|
# guarantee that the `details` data is validated (i.e., it's
|
|
|
|
# possible users can put any string they want in the "email"
|
|
|
|
# field of the `details` object). For those backends, we have
|
|
|
|
# custom per-backend code to properly fetch only verified
|
|
|
|
# email addresses from the appropriate third-party API.
|
2020-07-22 18:31:56 +02:00
|
|
|
verified_emails = backend.get_verified_emails(realm, *args, **kwargs)
|
2018-07-18 23:45:49 +02:00
|
|
|
verified_emails_length = len(verified_emails)
|
|
|
|
if verified_emails_length == 0:
|
2018-06-07 00:19:06 +02:00
|
|
|
# TODO: Provide a nice error message screen to the user
|
|
|
|
# for this case, rather than just logging a warning.
|
2021-02-12 08:19:30 +01:00
|
|
|
backend.logger.warning(
|
|
|
|
"Social auth (%s) failed because user has no verified emails",
|
|
|
|
backend.auth_backend_name,
|
|
|
|
)
|
2018-06-07 00:19:06 +02:00
|
|
|
return_data["email_not_verified"] = True
|
|
|
|
return None
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
if verified_emails_length == 1:
|
|
|
|
chosen_email = verified_emails[0]
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
chosen_email = backend.strategy.request_data().get("email")
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
if not chosen_email:
|
2019-08-03 14:15:32 +02:00
|
|
|
avatars = {} # Dict[str, str]
|
2019-12-30 20:31:36 +01:00
|
|
|
existing_account_emails = []
|
2019-08-03 14:15:32 +02:00
|
|
|
for email in verified_emails:
|
|
|
|
existing_account = common_get_active_user(email, realm, {})
|
|
|
|
if existing_account is not None:
|
2019-12-30 20:31:36 +01:00
|
|
|
existing_account_emails.append(email)
|
2019-08-03 14:15:32 +02:00
|
|
|
avatars[email] = avatar_url(existing_account)
|
2020-03-28 10:59:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
len(existing_account_emails) != 1
|
2021-02-12 08:20:45 +01:00
|
|
|
or backend.strategy.session_get("is_signup") == "1"
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2020-03-28 10:59:06 +01:00
|
|
|
unverified_emails = []
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(backend, "get_unverified_emails"):
|
2020-07-22 18:31:56 +02:00
|
|
|
unverified_emails = backend.get_unverified_emails(realm, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
return render(
|
|
|
|
backend.strategy.request,
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver/social_auth_select_email.html",
|
2021-02-12 08:19:30 +01:00
|
|
|
context={
|
2021-02-12 08:20:45 +01:00
|
|
|
"primary_email": verified_emails[0],
|
|
|
|
"verified_non_primary_emails": verified_emails[1:],
|
|
|
|
"unverified_emails": unverified_emails,
|
|
|
|
"backend": "github",
|
|
|
|
"avatar_urls": avatars,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2019-12-30 20:31:36 +01:00
|
|
|
else:
|
|
|
|
chosen_email = existing_account_emails[0]
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
validate_email(chosen_email)
|
|
|
|
except ValidationError:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["invalid_email"] = True
|
2018-07-18 23:45:49 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
if chosen_email not in verified_emails:
|
|
|
|
# If a user edits the submit value for the choose email form, we might
|
|
|
|
# end up with a wrong email associated with the account. The below code
|
|
|
|
# takes care of that.
|
2021-02-12 08:19:30 +01:00
|
|
|
backend.logger.warning(
|
|
|
|
"Social auth (%s) failed because user has no verified"
|
|
|
|
" emails associated with the account",
|
|
|
|
backend.auth_backend_name,
|
|
|
|
)
|
2018-07-18 23:45:49 +02:00
|
|
|
return_data["email_not_associated"] = True
|
|
|
|
return None
|
|
|
|
|
|
|
|
validated_email = chosen_email
|
2019-09-28 01:51:36 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
validate_email(kwargs["details"].get("email"))
|
|
|
|
except ValidationError:
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["invalid_email"] = True
|
2019-09-28 01:51:36 +02:00
|
|
|
return None
|
2018-05-31 00:12:39 +02:00
|
|
|
validated_email = kwargs["details"].get("email")
|
|
|
|
|
|
|
|
if not validated_email: # nocoverage
|
|
|
|
# This code path isn't used with GitHubAuthBackend, but may be relevant for other
|
|
|
|
# social auth backends.
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["invalid_email"] = True
|
2018-05-31 00:12:39 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
return_data["valid_attestation"] = True
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["validated_email"] = validated_email
|
2018-05-31 00:12:39 +02:00
|
|
|
user_profile = common_get_active_user(validated_email, realm, return_data)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name = kwargs["details"].get("fullname")
|
|
|
|
first_name = kwargs["details"].get("first_name")
|
|
|
|
last_name = kwargs["details"].get("last_name")
|
2022-01-30 12:48:45 +01:00
|
|
|
|
2021-07-07 15:48:28 +02:00
|
|
|
if all(name is None for name in [full_name, first_name, last_name]) and backend.name not in [
|
|
|
|
"apple",
|
|
|
|
"saml",
|
2022-01-30 12:48:45 +01:00
|
|
|
"oidc",
|
2021-07-07 15:48:28 +02:00
|
|
|
]:
|
|
|
|
# (1) Apple authentication provides the user's name only the very first time a user tries to log in.
|
2020-06-28 15:23:21 +02:00
|
|
|
# So if the user aborts login or otherwise is doing this the second time,
|
2021-07-07 15:48:28 +02:00
|
|
|
# we won't have any name data.
|
2022-01-30 12:48:45 +01:00
|
|
|
# (2) Some SAML or OIDC IdPs may not send any name value if the user doesn't
|
|
|
|
# have them set in the IdP's directory.
|
2021-07-07 15:48:28 +02:00
|
|
|
#
|
|
|
|
# The name will just default to the empty string in the code below.
|
2020-06-28 15:23:21 +02:00
|
|
|
|
|
|
|
# We need custom code here for any social auth backends
|
|
|
|
# that don't provide name details feature.
|
|
|
|
raise AssertionError("Social auth backend doesn't provide name")
|
2019-09-28 01:49:58 +02:00
|
|
|
|
|
|
|
if full_name:
|
|
|
|
return_data["full_name"] = full_name
|
2018-05-31 00:12:39 +02:00
|
|
|
else:
|
2020-06-24 15:28:47 +02:00
|
|
|
# Some authentications methods like Apple and SAML send
|
2020-08-11 01:47:44 +02:00
|
|
|
# first name and last name as separate attributes. In that case
|
2019-09-28 01:49:58 +02:00
|
|
|
# we construct the full name from them.
|
2020-09-02 02:50:08 +02:00
|
|
|
# strip removes the unnecessary ' '
|
|
|
|
return_data["full_name"] = f"{first_name or ''} {last_name or ''}".strip()
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2021-05-31 14:48:12 +02:00
|
|
|
return_data["extra_attrs"] = kwargs["details"].get("extra_attrs", {})
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
return user_profile
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-18 23:45:49 +02:00
|
|
|
@partial
|
2018-05-31 00:12:39 +02:00
|
|
|
def social_auth_associate_user(
|
2021-02-12 08:19:30 +01:00
|
|
|
backend: BaseAuth, *args: Any, **kwargs: Any
|
|
|
|
) -> Union[HttpResponse, Dict[str, Any]]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""A simple wrapper function to reformat the return data from
|
|
|
|
social_associate_user_helper as a dictionary. The
|
|
|
|
python-social-auth infrastructure will then pass those values into
|
|
|
|
later stages of settings.SOCIAL_AUTH_PIPELINE, such as
|
|
|
|
social_auth_finish, as kwargs.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
partial_token = backend.strategy.request_data().get("partial_token")
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, Any] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = social_associate_user_helper(backend, return_data, *args, **kwargs)
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2020-04-22 01:59:09 +02:00
|
|
|
if isinstance(user_profile, HttpResponse):
|
2018-07-18 23:45:49 +02:00
|
|
|
return user_profile
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
return {
|
2021-02-12 08:20:45 +01:00
|
|
|
"user_profile": user_profile,
|
|
|
|
"return_data": return_data,
|
|
|
|
"partial_token": partial_token,
|
|
|
|
"partial_backend_name": backend,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def social_auth_finish(
|
|
|
|
backend: Any, details: Dict[str, Any], response: HttpResponse, *args: Any, **kwargs: Any
|
|
|
|
) -> Optional[HttpResponse]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given the determination in social_auth_associate_user for whether
|
|
|
|
the user should be authenticated, this takes care of actually
|
|
|
|
logging in the user (if appropriate) and redirecting the browser
|
|
|
|
to the appropriate next page depending on the situation. Read the
|
|
|
|
comments below as well as login_or_register_remote_user in
|
|
|
|
`zerver/views/auth.py` for the details on how that dispatch works.
|
|
|
|
"""
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.views.auth import login_or_register_remote_user, redirect_and_log_into_subdomain
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = kwargs["user_profile"]
|
|
|
|
return_data = kwargs["return_data"]
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2018-06-07 00:19:06 +02:00
|
|
|
no_verified_email = return_data.get("email_not_verified")
|
2021-02-12 08:20:45 +01:00
|
|
|
auth_backend_disabled = return_data.get("auth_backend_disabled")
|
|
|
|
inactive_user = return_data.get("inactive_user")
|
|
|
|
inactive_realm = return_data.get("inactive_realm")
|
|
|
|
invalid_realm = return_data.get("invalid_realm")
|
|
|
|
invalid_email = return_data.get("invalid_email")
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_failed_reason = return_data.get("social_auth_failed_reason")
|
2018-07-18 23:45:49 +02:00
|
|
|
email_not_associated = return_data.get("email_not_associated")
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
if invalid_realm:
|
2020-02-15 17:08:09 +01:00
|
|
|
# User has passed an invalid subdomain param - this shouldn't happen in the normal flow,
|
|
|
|
# unless the user manually edits the param. In any case, it's most appropriate to just take
|
|
|
|
# them to find_account, as there isn't even an appropriate subdomain to take them to the login
|
|
|
|
# form on.
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponseRedirect(reverse("find_account"))
|
2019-04-12 06:24:58 +02:00
|
|
|
|
2021-06-26 18:51:43 +02:00
|
|
|
realm = Realm.objects.get(id=return_data["realm_id"])
|
2021-08-23 15:14:05 +02:00
|
|
|
if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated:
|
|
|
|
# Redirect to login page. We can't send to registration
|
|
|
|
# workflow with these errors. We will redirect to login page.
|
|
|
|
return redirect_to_login(realm)
|
2019-04-12 06:24:58 +02:00
|
|
|
if inactive_user:
|
2021-02-12 08:19:30 +01:00
|
|
|
backend.logger.info(
|
|
|
|
"Failed login attempt for deactivated account: %s@%s",
|
2021-02-12 08:20:45 +01:00
|
|
|
return_data["inactive_user_id"],
|
|
|
|
return_data["realm_string_id"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-08-23 15:14:05 +02:00
|
|
|
return redirect_deactivated_user_to_login(realm, return_data["validated_email"])
|
2019-04-12 06:24:58 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if invalid_email:
|
|
|
|
# In case of invalid email, we will end up on registration page.
|
|
|
|
# This seems better than redirecting to login page.
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.warning(
|
2021-02-12 08:19:30 +01:00
|
|
|
"%s got invalid email argument.",
|
|
|
|
backend.auth_backend_name,
|
2018-05-31 00:12:39 +02:00
|
|
|
)
|
2021-06-26 18:51:43 +02:00
|
|
|
return redirect_to_signup(realm)
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if auth_failed_reason:
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.info(auth_failed_reason)
|
2021-06-26 18:51:43 +02:00
|
|
|
return redirect_to_login(realm)
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
# Structurally, all the cases where we don't have an authenticated
|
|
|
|
# email for the user should be handled above; this assertion helps
|
|
|
|
# prevent any violations of that contract from resulting in a user
|
|
|
|
# being incorrectly authenticated.
|
2021-02-12 08:20:45 +01:00
|
|
|
assert return_data.get("valid_attestation") is True
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2019-08-10 00:30:35 +02:00
|
|
|
strategy = backend.strategy
|
2019-11-01 00:00:36 +01:00
|
|
|
full_name_validated = backend.full_name_validated
|
2021-02-12 08:20:45 +01:00
|
|
|
email_address = return_data["validated_email"]
|
|
|
|
full_name = return_data["full_name"]
|
|
|
|
redirect_to = strategy.session_get("next")
|
|
|
|
multiuse_object_key = strategy.session_get("multiuse_object_key", "")
|
2020-02-01 17:45:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = strategy.session_get("mobile_flow_otp")
|
|
|
|
desktop_flow_otp = strategy.session_get("desktop_flow_otp")
|
2020-02-01 17:45:22 +01:00
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2020-01-17 18:57:13 +01:00
|
|
|
if user_profile is None or user_profile.is_mirror_dummy:
|
2021-06-13 14:18:28 +02:00
|
|
|
is_signup = strategy.session_get("is_signup") == "1" or backend.should_auto_signup()
|
2020-01-17 18:57:13 +01:00
|
|
|
else:
|
|
|
|
is_signup = False
|
2019-03-10 02:43:29 +01:00
|
|
|
|
2021-05-31 14:48:12 +02:00
|
|
|
extra_attrs = return_data.get("extra_attrs", {})
|
|
|
|
attrs_by_backend = settings.SOCIAL_AUTH_SYNC_CUSTOM_ATTRS_DICT.get(realm.subdomain, {})
|
|
|
|
if user_profile is not None and extra_attrs and attrs_by_backend:
|
|
|
|
# This is only supported for SAML right now, though the design
|
|
|
|
# is meant to be easy to extend this to other backends if desired.
|
|
|
|
# Unlike with LDAP, here we can only do syncing during the authentication
|
|
|
|
# flow, as that's when the data is provided and we don't have a way to query
|
|
|
|
# for it otherwise.
|
|
|
|
assert backend.name == "saml"
|
|
|
|
custom_profile_field_name_to_attr_name = attrs_by_backend.get(backend.name, {})
|
|
|
|
custom_profile_field_name_to_value = {}
|
|
|
|
for field_name, attr_name in custom_profile_field_name_to_attr_name.items():
|
|
|
|
custom_profile_field_name_to_value[field_name] = extra_attrs.get(attr_name)
|
|
|
|
try:
|
|
|
|
sync_user_profile_custom_fields(user_profile, custom_profile_field_name_to_value)
|
2022-11-17 09:30:48 +01:00
|
|
|
except SyncUserError as e:
|
2021-05-31 14:48:12 +02:00
|
|
|
backend.logger.warning(
|
|
|
|
"Exception while syncing custom profile fields for user %s: %s",
|
|
|
|
user_profile.id,
|
|
|
|
str(e),
|
|
|
|
)
|
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# At this point, we have now confirmed that the user has
|
|
|
|
# demonstrated control over the target email address.
|
|
|
|
#
|
|
|
|
# The next step is to call login_or_register_remote_user, but
|
|
|
|
# there are two code paths here because of an optimization to save
|
2020-01-23 14:22:28 +01:00
|
|
|
# a redirect on mobile and desktop.
|
2021-09-02 18:46:58 +02:00
|
|
|
|
|
|
|
# Authentication failures happen on the external provider's side, so we don't get to log those,
|
|
|
|
# but we should log the successes at least.
|
|
|
|
log_auth_attempt(
|
|
|
|
backend.logger,
|
|
|
|
strategy.request,
|
|
|
|
realm,
|
|
|
|
username=email_address,
|
|
|
|
succeeded=True,
|
|
|
|
return_data={},
|
|
|
|
)
|
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
data_dict = ExternalAuthDataDict(
|
|
|
|
subdomain=realm.subdomain,
|
|
|
|
is_signup=is_signup,
|
|
|
|
redirect_to=redirect_to,
|
|
|
|
multiuse_object_key=multiuse_object_key,
|
|
|
|
full_name_validated=full_name_validated,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
2021-11-01 17:03:55 +01:00
|
|
|
params_to_store_in_authenticated_session=backend.get_params_to_store_in_authenticated_session(),
|
2020-02-23 18:58:08 +01:00
|
|
|
)
|
|
|
|
if user_profile is None:
|
|
|
|
data_dict.update(dict(full_name=full_name, email=email_address))
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
|
2019-03-10 02:43:29 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
if mobile_flow_otp or desktop_flow_otp:
|
2020-02-06 18:27:10 +01:00
|
|
|
if user_profile is not None and not user_profile.is_mirror_dummy:
|
|
|
|
# For mobile and desktop app authentication, login_or_register_remote_user
|
|
|
|
# will redirect to a special zulip:// URL that is handled by
|
|
|
|
# the app after a successful authentication; so we can
|
|
|
|
# redirect directly from here, saving a round trip over what
|
|
|
|
# we need to do to create session cookies on the right domain
|
|
|
|
# in the web login flow (below).
|
2020-02-23 18:58:08 +01:00
|
|
|
return login_or_register_remote_user(strategy.request, result)
|
2020-02-06 18:27:10 +01:00
|
|
|
else:
|
|
|
|
# The user needs to register, so we need to go the realm's
|
|
|
|
# subdomain for that.
|
|
|
|
pass
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
# If this authentication code were executing on
|
|
|
|
# subdomain.zulip.example.com, we would just call
|
|
|
|
# login_or_register_remote_user as in the mobile code path.
|
|
|
|
# However, because third-party SSO providers generally don't allow
|
|
|
|
# wildcard addresses in their redirect URLs, for multi-realm
|
|
|
|
# servers, we will have just completed authentication on e.g.
|
|
|
|
# auth.zulip.example.com (depending on
|
|
|
|
# settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on
|
|
|
|
# the subdomain.zulip.example.com domain. So instead we serve a
|
|
|
|
# redirect (encoding the authentication result data in a
|
|
|
|
# cryptographically signed token) to a route on
|
|
|
|
# subdomain.zulip.example.com that will verify the signature and
|
|
|
|
# then call login_or_register_remote_user.
|
2020-02-23 18:58:08 +01:00
|
|
|
return redirect_and_log_into_subdomain(result)
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-23 22:36:55 +02:00
|
|
|
class SocialAuthMixin(ZulipAuthMixin, ExternalAuthMethod, BaseAuth):
|
2019-11-01 00:00:36 +01:00
|
|
|
# Whether we expect that the full_name value obtained by the
|
|
|
|
# social backend is definitely how the user should be referred to
|
|
|
|
# in Zulip, which in turn determines whether we should always show
|
|
|
|
# a registration form in the event with a default value of the
|
|
|
|
# user's name when using this social backend so they can change
|
|
|
|
# it. For social backends like SAML that are expected to be a
|
|
|
|
# central database, this should be True; for backends like GitHub
|
|
|
|
# where the user might not have a name set or have it set to
|
|
|
|
# something other than the name they will prefer to use in Zulip,
|
|
|
|
# it should be False.
|
|
|
|
full_name_validated = False
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
standard_relay_params = [*settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION, "next"]
|
2020-05-16 17:09:08 +02:00
|
|
|
|
2018-07-03 18:47:20 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
"""This is a small wrapper around the core `auth_complete` method of
|
|
|
|
python-social-auth, designed primarily to prevent 500s for
|
|
|
|
exceptions in the social auth code from situations that are
|
|
|
|
really user errors. Returning `None` from this function will
|
|
|
|
redirect the browser to the login page.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# Call the auth_complete method of social_core.backends.oauth.BaseOAuth2
|
2020-06-23 22:36:55 +02:00
|
|
|
return super().auth_complete(*args, **kwargs)
|
2020-05-20 14:52:03 +02:00
|
|
|
except (AuthFailed, HTTPError) as e:
|
2018-07-03 18:47:20 +02:00
|
|
|
# When a user's social authentication fails (e.g. because
|
|
|
|
# they did something funny with reloading in the middle of
|
2020-05-20 14:52:03 +02:00
|
|
|
# the flow or the IdP is unreliable and returns a bad http response),
|
|
|
|
# don't throw a 500, just send them back to the
|
2018-07-03 18:47:20 +02:00
|
|
|
# login page and record the event at the info log level.
|
2022-10-08 07:34:49 +02:00
|
|
|
self.logger.info("%s: %s", type(e).__name__, e)
|
2018-07-03 18:47:20 +02:00
|
|
|
return None
|
|
|
|
except SocialAuthBaseException as e:
|
|
|
|
# Other python-social-auth exceptions are likely
|
|
|
|
# interesting enough that we should log a warning.
|
2022-10-08 07:34:49 +02:00
|
|
|
self.logger.warning("%s", e)
|
2018-07-03 18:47:20 +02:00
|
|
|
return None
|
|
|
|
|
2021-06-13 14:18:28 +02:00
|
|
|
def should_auto_signup(self) -> bool:
|
|
|
|
return False
|
|
|
|
|
2021-11-01 17:03:55 +01:00
|
|
|
def get_params_to_store_in_authenticated_session(self) -> Dict[str, str]:
|
|
|
|
"""
|
|
|
|
Specifies a dict of keys:values to be saved in the user's session
|
|
|
|
after successfully authenticating.
|
|
|
|
"""
|
2023-05-23 14:48:20 +02:00
|
|
|
return {"social_auth_backend": self.name}
|
2021-11-01 17:03:55 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@classmethod
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-12 08:19:30 +01:00
|
|
|
def dict_representation(cls, realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
|
|
|
return [
|
|
|
|
dict(
|
|
|
|
name=cls.name,
|
|
|
|
display_name=cls.auth_backend_name,
|
|
|
|
display_icon=cls.display_icon,
|
2021-02-12 08:20:45 +01:00
|
|
|
login_url=reverse("login-social", args=(cls.name,)),
|
|
|
|
signup_url=reverse("signup-social", args=(cls.name,)),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
|
|
|
|
@external_auth_method
|
2018-07-03 18:47:20 +02:00
|
|
|
class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2):
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "github"
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_backend_name = "GitHub"
|
2019-03-10 09:38:20 +01:00
|
|
|
sort_order = 100
|
2023-02-14 01:11:49 +01:00
|
|
|
display_icon = staticfiles_storage.url("images/authentication_backends/github-icon.png")
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2020-03-28 10:59:06 +01:00
|
|
|
def get_all_associated_email_objects(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
2018-06-07 00:19:06 +02:00
|
|
|
access_token = kwargs["response"]["access_token"]
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
emails = self._user_data(access_token, "/emails")
|
2020-10-09 02:17:33 +02:00
|
|
|
except (HTTPError, json.JSONDecodeError): # nocoverage
|
2018-06-07 00:19:06 +02:00
|
|
|
# We don't really need an explicit test for this code
|
|
|
|
# path, since the outcome will be the same as any other
|
|
|
|
# case without any verified emails
|
|
|
|
emails = []
|
2020-03-28 10:59:06 +01:00
|
|
|
return emails
|
2018-06-07 00:19:06 +02:00
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
def get_unverified_emails(self, realm: Realm, *args: Any, **kwargs: Any) -> List[str]:
|
2020-03-28 10:59:06 +01:00
|
|
|
return [
|
2021-02-12 08:20:45 +01:00
|
|
|
email_obj["email"]
|
2021-02-12 08:19:30 +01:00
|
|
|
for email_obj in self.get_usable_email_objects(realm, *args, **kwargs)
|
2021-02-12 08:20:45 +01:00
|
|
|
if not email_obj.get("verified")
|
2020-03-28 10:59:06 +01:00
|
|
|
]
|
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
def get_verified_emails(self, realm: Realm, *args: Any, **kwargs: Any) -> List[str]:
|
docs: Add missing space to compound verbs “back up”, “log in”, etc.
Noun: backup, login, logout, lookup, setup.
Verb: back up, log in, log out, look up, set up.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2022-02-07 20:41:10 +01:00
|
|
|
# We only let users log in using email addresses that are
|
2020-04-03 20:01:03 +02:00
|
|
|
# verified by GitHub, because the whole point is for the user
|
|
|
|
# to demonstrate that they control the target email address.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
verified_emails: List[str] = []
|
2021-02-12 08:19:30 +01:00
|
|
|
for email_obj in [
|
|
|
|
obj
|
|
|
|
for obj in self.get_usable_email_objects(realm, *args, **kwargs)
|
2021-02-12 08:20:45 +01:00
|
|
|
if obj.get("verified")
|
2021-02-12 08:19:30 +01:00
|
|
|
]:
|
2018-07-10 12:03:42 +02:00
|
|
|
# social_associate_user_helper assumes that the first email in
|
|
|
|
# verified_emails is primary.
|
|
|
|
if email_obj.get("primary"):
|
|
|
|
verified_emails.insert(0, email_obj["email"])
|
|
|
|
else:
|
|
|
|
verified_emails.append(email_obj["email"])
|
2018-06-07 00:19:06 +02:00
|
|
|
|
|
|
|
return verified_emails
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_usable_email_objects(
|
|
|
|
self, realm: Realm, *args: Any, **kwargs: Any
|
|
|
|
) -> List[Dict[str, Any]]:
|
2020-07-22 18:31:56 +02:00
|
|
|
# We disallow creation of new accounts with
|
2019-08-05 15:15:56 +02:00
|
|
|
# @noreply.github.com/@users.noreply.github.com email
|
|
|
|
# addresses, because structurally, we only want to allow email
|
2020-07-23 01:08:42 +02:00
|
|
|
# addresses that can receive emails, and those cannot.
|
|
|
|
|
|
|
|
# However, if an account with this address already exists in
|
|
|
|
# the realm (which could happen e.g. as a result of data
|
|
|
|
# import from another chat tool), we will allow signing in to
|
|
|
|
# it.
|
2020-04-03 20:01:03 +02:00
|
|
|
email_objs = self.get_all_associated_email_objects(*args, **kwargs)
|
2018-07-18 23:45:49 +02:00
|
|
|
return [
|
2021-02-12 08:19:30 +01:00
|
|
|
email
|
|
|
|
for email in email_objs
|
|
|
|
if (
|
|
|
|
not email["email"].endswith("@users.noreply.github.com")
|
|
|
|
or common_get_active_user(email["email"], realm) is not None
|
|
|
|
)
|
2018-07-18 23:45:49 +02:00
|
|
|
]
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]:
|
|
|
|
"""This patched user_data function lets us combine together the 3
|
2019-11-28 01:17:30 +01:00
|
|
|
social auth backends into a single Zulip backend for GitHub OAuth2"""
|
2016-08-01 13:06:35 +02:00
|
|
|
team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID
|
|
|
|
org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if team_id is None and org_name is None:
|
|
|
|
# I believe this can't raise AuthFailed, so we don't try to catch it here.
|
|
|
|
return super().user_data(
|
2021-02-12 08:19:30 +01:00
|
|
|
access_token,
|
|
|
|
*args,
|
|
|
|
**kwargs,
|
2018-05-31 00:12:39 +02:00
|
|
|
)
|
|
|
|
elif team_id is not None:
|
2020-03-18 13:35:23 +01:00
|
|
|
backend = GithubTeamOAuth2(self.strategy, self.redirect_uri)
|
2016-08-01 13:06:35 +02:00
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required team")
|
|
|
|
elif org_name is not None:
|
2020-03-18 13:35:23 +01:00
|
|
|
backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri)
|
2016-08-01 13:06:35 +02:00
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required organization")
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
raise AssertionError("Invalid configuration")
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2018-10-05 14:32:02 +02:00
|
|
|
class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2):
|
2019-03-10 09:38:20 +01:00
|
|
|
sort_order = 50
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "azuread-oauth2"
|
2018-10-05 14:32:02 +02:00
|
|
|
auth_backend_name = "AzureAD"
|
2023-02-14 01:11:49 +01:00
|
|
|
display_icon = staticfiles_storage.url("images/authentication_backends/azuread-icon.png")
|
2018-10-05 14:32:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-01-31 18:19:53 +01:00
|
|
|
@external_auth_method
|
|
|
|
class GitLabAuthBackend(SocialAuthMixin, GitLabOAuth2):
|
|
|
|
sort_order = 75
|
|
|
|
name = "gitlab"
|
|
|
|
auth_backend_name = "GitLab"
|
2023-02-14 01:11:49 +01:00
|
|
|
display_icon = staticfiles_storage.url("images/authentication_backends/gitlab-icon.png")
|
2020-01-31 18:19:53 +01:00
|
|
|
|
|
|
|
# Note: GitLab as of early 2020 supports having multiple email
|
|
|
|
# addresses connected with a GitLab account, and we could access
|
|
|
|
# those emails, but its APIs don't indicate which of those email
|
|
|
|
# addresses were verified, so we cannot use them for
|
|
|
|
# authentication like we do for the GitHub integration. Instead,
|
|
|
|
# we just use the primary email address, which is always verified.
|
|
|
|
# (No code is required to do so, as that's the default behavior).
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2019-02-02 16:51:26 +01:00
|
|
|
class GoogleAuthBackend(SocialAuthMixin, GoogleOAuth2):
|
|
|
|
sort_order = 150
|
|
|
|
auth_backend_name = "Google"
|
|
|
|
name = "google"
|
2023-02-14 01:11:49 +01:00
|
|
|
display_icon = staticfiles_storage.url("images/authentication_backends/googl_e-icon.png")
|
2019-02-02 16:51:26 +01:00
|
|
|
|
|
|
|
def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
verified_emails: List[str] = []
|
2019-02-02 16:51:26 +01:00
|
|
|
details = kwargs["response"]
|
|
|
|
email_verified = details.get("email_verified")
|
|
|
|
if email_verified:
|
|
|
|
verified_emails.append(details["email"])
|
|
|
|
return verified_emails
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
@external_auth_method
|
|
|
|
class AppleAuthBackend(SocialAuthMixin, AppleIdAuth):
|
|
|
|
"""
|
|
|
|
Authentication backend for "Sign in with Apple". This supports two flows:
|
|
|
|
1. The web flow, usable in a browser, like our other social auth methods.
|
|
|
|
It is a slightly modified Oauth2 authorization flow, where the response
|
|
|
|
returning the access_token also contains a JWT id_token containing the user's
|
|
|
|
identity, signed with Apple's private keys.
|
|
|
|
https://developer.apple.com/documentation/sign_in_with_apple/tokenresponse
|
|
|
|
2. The native flow, intended for users on an Apple device. In the native flow,
|
|
|
|
the device handles authentication of the user with Apple's servers and ends up
|
|
|
|
with the JWT id_token (like in the web flow). The client-side details aren't
|
2020-06-09 18:17:32 +02:00
|
|
|
relevant to us; the app should simply send the id_token as a param to the
|
|
|
|
/complete/apple/ endpoint, together with native_flow=true and any other
|
|
|
|
appropriate params, such as mobile_flow_otp.
|
2020-06-09 12:04:21 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
sort_order = 10
|
|
|
|
name = "apple"
|
|
|
|
auth_backend_name = "Apple"
|
2023-02-14 01:11:49 +01:00
|
|
|
display_icon = staticfiles_storage.url("images/authentication_backends/apple-icon.png")
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
# Apple only sends `name` in its response the first time a user
|
|
|
|
# tries to sign up, so we won't have it in consecutive attempts.
|
|
|
|
# But if Apple does send us the user's name, it will be validated,
|
|
|
|
# so it's appropriate to set full_name_validated here.
|
|
|
|
full_name_validated = True
|
2021-02-12 08:19:30 +01:00
|
|
|
REDIS_EXPIRATION_SECONDS = 60 * 10
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2020-06-25 21:35:49 +02:00
|
|
|
SCOPE_SEPARATOR = "%20" # https://github.com/python-social-auth/social-core/issues/470
|
|
|
|
|
2020-07-04 19:09:01 +02:00
|
|
|
@classmethod
|
2020-06-17 14:25:25 +02:00
|
|
|
def check_config(cls) -> bool:
|
2020-07-04 19:09:01 +02:00
|
|
|
obligatory_apple_settings_list = [
|
|
|
|
settings.SOCIAL_AUTH_APPLE_TEAM,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_SERVICES_ID,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_KEY,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_SECRET,
|
|
|
|
]
|
|
|
|
if any(not setting for setting in obligatory_apple_settings_list):
|
2020-06-17 14:25:25 +02:00
|
|
|
return False
|
2020-07-04 19:09:01 +02:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
return True
|
2020-07-04 19:09:01 +02:00
|
|
|
|
2020-06-09 18:17:32 +02:00
|
|
|
def is_native_flow(self) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
return self.strategy.request_data().get("native_flow", False)
|
2020-06-09 18:17:32 +02:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
# This method replaces a method from python-social-auth; it is adapted to store
|
2020-10-23 02:43:28 +02:00
|
|
|
# the state_token data in Redis.
|
2020-06-09 12:04:21 +02:00
|
|
|
def get_or_create_state(self) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Creates the Oauth2 state parameter in first step of the flow,
|
2020-06-09 12:04:21 +02:00
|
|
|
before redirecting the user to the IdP (aka Apple).
|
|
|
|
|
|
|
|
Apple will send the user back to us with a POST
|
|
|
|
request. Normally, we rely on being able to store certain
|
|
|
|
parameters in the user's session and use them after the
|
|
|
|
redirect. But because we've configured our session cookies to
|
|
|
|
use the Django default of in SameSite Lax mode, the browser
|
|
|
|
won't send the session cookies to our server in delivering the
|
|
|
|
POST request coming from Apple.
|
|
|
|
|
|
|
|
To work around this, we replace python-social-auth's default
|
2020-10-23 02:43:28 +02:00
|
|
|
session-based storage with storing the parameters in Redis
|
2020-06-09 12:04:21 +02:00
|
|
|
under a random token derived from the state. That will allow
|
|
|
|
us to validate the state and retrieve the params after the
|
2020-10-23 02:43:28 +02:00
|
|
|
redirect - by querying Redis for the key derived from the
|
2020-06-09 12:04:21 +02:00
|
|
|
state sent in the POST redirect.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-06-09 12:04:21 +02:00
|
|
|
request_data = self.strategy.request_data().dict()
|
|
|
|
data_to_store = {
|
2021-02-12 08:19:30 +01:00
|
|
|
key: request_data[key] for key in self.standard_relay_params if key in request_data
|
2020-06-09 12:04:21 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
# Generate a random string of 32 alphanumeric characters.
|
|
|
|
state = self.state_token()
|
2021-02-12 08:19:30 +01:00
|
|
|
put_dict_in_redis(
|
|
|
|
redis_client,
|
2021-02-12 08:20:45 +01:00
|
|
|
"apple_auth_{token}",
|
2021-02-12 08:19:30 +01:00
|
|
|
data_to_store,
|
|
|
|
self.REDIS_EXPIRATION_SECONDS,
|
|
|
|
token=state,
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
return state
|
|
|
|
|
|
|
|
def validate_state(self) -> Optional[str]:
|
2020-06-09 18:17:32 +02:00
|
|
|
"""
|
|
|
|
This method replaces a method from python-social-auth; it is
|
2020-10-23 02:43:28 +02:00
|
|
|
adapted to retrieve the data stored in Redis, save it in
|
2020-06-09 18:17:32 +02:00
|
|
|
the session so that it can be accessed by the social pipeline.
|
|
|
|
"""
|
2020-06-09 12:04:21 +02:00
|
|
|
request_state = self.get_request_state()
|
|
|
|
|
|
|
|
if not request_state:
|
|
|
|
self.logger.info("Sign in with Apple failed: missing state parameter.")
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AuthMissingParameter(self, "state")
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
formatted_request_state = "apple_auth_" + request_state
|
2021-02-12 08:19:30 +01:00
|
|
|
redis_data = get_dict_from_redis(
|
|
|
|
redis_client, "apple_auth_{token}", formatted_request_state
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
if redis_data is None:
|
|
|
|
self.logger.info("Sign in with Apple failed: bad state token.")
|
|
|
|
raise AuthStateForbidden(self)
|
|
|
|
|
|
|
|
for param, value in redis_data.items():
|
|
|
|
if param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, value)
|
|
|
|
return request_state
|
|
|
|
|
2020-10-22 19:24:19 +02:00
|
|
|
def get_user_details(self, response: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
2021-04-25 22:54:23 +02:00
|
|
|
Overridden to correctly grab the user's name from the request params,
|
2020-10-22 19:24:19 +02:00
|
|
|
as current upstream code expects it in the id_token and Apple changed
|
|
|
|
the API.
|
|
|
|
Taken from https://github.com/python-social-auth/social-core/pull/483
|
|
|
|
TODO: Remove this when the PR is merged.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
name = response.get("name") or {}
|
|
|
|
name = json.loads(self.data.get("user", "{}")).get("name", {})
|
2020-10-22 19:24:19 +02:00
|
|
|
fullname, first_name, last_name = self.get_user_names(
|
2021-02-12 08:20:45 +01:00
|
|
|
fullname="", first_name=name.get("firstName", ""), last_name=name.get("lastName", "")
|
2020-10-22 19:24:19 +02:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
email = response.get("email", "")
|
2020-10-22 19:24:19 +02:00
|
|
|
# prevent updating User with empty strings
|
|
|
|
user_details = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"fullname": fullname or None,
|
|
|
|
"first_name": first_name or None,
|
|
|
|
"last_name": last_name or None,
|
|
|
|
"email": email,
|
2020-10-22 19:24:19 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
user_details["username"] = email
|
2020-10-22 19:24:19 +02:00
|
|
|
|
|
|
|
return user_details
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2020-06-09 18:17:32 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
if not self.is_native_flow():
|
|
|
|
# The default implementation in python-social-auth is the browser flow.
|
|
|
|
return super().auth_complete(*args, **kwargs)
|
|
|
|
|
|
|
|
# We handle the Apple's native flow on our own. In this flow,
|
|
|
|
# before contacting the server, the client obtains an id_token
|
|
|
|
# from Apple directly, and then sends that to /complete/apple/
|
|
|
|
# (the endpoint handled by this function), together with any
|
|
|
|
# other desired parameters from self.standard_relay_params.
|
|
|
|
#
|
|
|
|
# What we'd like to do with the payload is just pass it into
|
|
|
|
# the common code path for the web flow. In the web flow,
|
|
|
|
# before sending a request to Apple, python-social-auth sets
|
|
|
|
# various values about the intended authentication in the
|
|
|
|
# session, before the redirect.
|
|
|
|
#
|
|
|
|
# Thus, we need to set those session variables here, before
|
|
|
|
# processing the id_token we received using the common do_auth.
|
|
|
|
request_data = self.strategy.request_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "id_token" not in request_data:
|
2020-06-09 18:17:32 +02:00
|
|
|
raise JsonableError(_("Missing id_token parameter"))
|
|
|
|
|
|
|
|
for param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, request_data.get(param))
|
|
|
|
|
|
|
|
# We should get the subdomain from the hostname of the request.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.strategy.session_set("subdomain", get_subdomain(self.strategy.request))
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
# Things are now ready to be handled by the superclass code. It will
|
|
|
|
# validate the id_token and push appropriate user data to the social pipeline.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.do_auth(request_data["id_token"], *args, **kwargs)
|
2020-06-09 18:17:32 +02:00
|
|
|
return result
|
|
|
|
except (AuthFailed, AuthCanceled) as e:
|
|
|
|
# AuthFailed is a general "failure" exception from
|
|
|
|
# python-social-auth that we should convert to None return
|
|
|
|
# value here to avoid getting tracebacks.
|
|
|
|
#
|
|
|
|
# AuthCanceled is raised in the Apple backend
|
|
|
|
# implementation in python-social-auth in certain cases,
|
|
|
|
# though AuthFailed would have been more correct.
|
|
|
|
#
|
|
|
|
# We have an open PR to python-social-auth to clean this up.
|
2020-07-14 19:18:28 +02:00
|
|
|
self.logger.info("/complete/apple/: %s", str(e))
|
2020-06-09 18:17:32 +02:00
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-31 14:48:12 +02:00
|
|
|
class ZulipSAMLIdentityProvider(SAMLIdentityProvider):
|
|
|
|
def get_user_details(self, attributes: Dict[str, Any]) -> Dict[str, Any]:
|
|
|
|
"""
|
2021-10-18 16:30:46 +02:00
|
|
|
Overridden to support plumbing of additional Attributes
|
2021-05-31 14:48:12 +02:00
|
|
|
from the SAMLResponse.
|
|
|
|
"""
|
|
|
|
result = super().get_user_details(attributes)
|
|
|
|
|
|
|
|
extra_attr_names = self.conf.get("extra_attrs", [])
|
|
|
|
result["extra_attrs"] = {}
|
|
|
|
for extra_attr_name in extra_attr_names:
|
|
|
|
result["extra_attrs"][extra_attr_name] = self.get_attr(
|
|
|
|
attributes=attributes, conf_key=None, default_attribute=extra_attr_name
|
|
|
|
)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2021-10-30 17:39:18 +02:00
|
|
|
class SAMLDocument:
|
2021-11-01 20:07:53 +01:00
|
|
|
"""
|
|
|
|
Parent class, subclassed by SAMLRequest and SAMLResponse,
|
|
|
|
for wrapping the fiddly logic of handling these SAML XML documents.
|
|
|
|
"""
|
|
|
|
|
2023-09-27 01:16:15 +02:00
|
|
|
SAML_PARSING_EXCEPTIONS = (
|
|
|
|
OneLogin_Saml2_Error,
|
|
|
|
OneLogin_Saml2_ValidationError,
|
|
|
|
binascii.Error,
|
|
|
|
XMLSyntaxError,
|
|
|
|
)
|
2021-10-30 19:37:43 +02:00
|
|
|
|
|
|
|
def __init__(self, encoded_saml_message: str, backend: "SAMLAuthBackend") -> None:
|
2021-11-01 20:07:53 +01:00
|
|
|
"""
|
|
|
|
encoded_saml_message is the base64-encoded XML string that's received
|
|
|
|
in the SAMLRequest or SAMLResponse params. The underlying XML
|
|
|
|
can be either deflated or not, both cases should be handled fine by the class.
|
|
|
|
|
|
|
|
backend is an instance of the SAMLAuthBackend class, which is handling
|
|
|
|
the HTTP request in which the SAMLRequest or SAMLResponse was delivered.
|
|
|
|
"""
|
2021-10-30 17:39:18 +02:00
|
|
|
self.encoded_saml_message = encoded_saml_message
|
2021-10-30 19:37:43 +02:00
|
|
|
self.backend = backend
|
|
|
|
|
2021-11-01 20:08:20 +01:00
|
|
|
self._decoded_saml_message: Optional[str] = None
|
|
|
|
|
2021-10-30 19:37:43 +02:00
|
|
|
@property
|
|
|
|
def logger(self) -> logging.Logger:
|
|
|
|
return self.backend.logger
|
2021-10-30 17:39:18 +02:00
|
|
|
|
2021-11-01 20:08:20 +01:00
|
|
|
@property
|
|
|
|
def decoded_saml_message(self) -> str:
|
|
|
|
"""
|
|
|
|
Returns the decoded SAMLRequest/SAMLResponse.
|
|
|
|
"""
|
|
|
|
if self._decoded_saml_message is None:
|
|
|
|
# This logic is taken from how
|
|
|
|
# python3-saml handles decoding received SAMLRequest
|
|
|
|
# and SAMLResponse params.
|
|
|
|
self._decoded_saml_message = onelogin_saml2_compat.to_string(
|
|
|
|
OneLogin_Saml2_Utils.decode_base64_and_inflate(
|
|
|
|
self.encoded_saml_message, ignore_zip=True
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return self._decoded_saml_message
|
|
|
|
|
2021-10-30 17:39:18 +02:00
|
|
|
def document_type(self) -> str:
|
|
|
|
"""
|
|
|
|
Returns whether the instance is a SAMLRequest or SAMLResponse.
|
|
|
|
"""
|
|
|
|
return type(self).__name__
|
|
|
|
|
2021-10-30 19:37:43 +02:00
|
|
|
def get_issuing_idp(self) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Given a SAMLResponse or SAMLRequest, returns which of the configured IdPs
|
|
|
|
is declared as the issuer.
|
|
|
|
This value MUST NOT be trusted as the true issuer!
|
|
|
|
The signatures are not validated, so it can be tampered with by the user.
|
|
|
|
That's not a problem for this function,
|
|
|
|
and true validation happens later in the underlying libraries, but it's important
|
|
|
|
to note this detail. The purpose of this function is merely as a helper to figure out which
|
|
|
|
of the configured IdPs' information to use for parsing and validating the request.
|
|
|
|
"""
|
|
|
|
|
|
|
|
issuers = self.get_issuers()
|
|
|
|
|
|
|
|
for idp_name, idp_config in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
|
|
|
if idp_config["entity_id"] in issuers:
|
|
|
|
return idp_name
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
@abstractmethod
|
|
|
|
def get_issuers(self) -> List[str]:
|
|
|
|
"""
|
|
|
|
Returns a list of the issuers of the SAML document.
|
|
|
|
"""
|
|
|
|
|
2021-10-30 17:39:18 +02:00
|
|
|
|
|
|
|
class SAMLRequest(SAMLDocument):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-10-30 19:37:43 +02:00
|
|
|
def get_issuers(self) -> List[str]:
|
|
|
|
config = self.backend.generate_saml_config()
|
|
|
|
saml_settings = OneLogin_Saml2_Settings(config, sp_validation_only=True)
|
|
|
|
|
|
|
|
try:
|
|
|
|
# The only valid SAMLRequest we can receive is a LogoutRequest.
|
|
|
|
logout_request_xml = OneLogin_Saml2_Logout_Request(
|
|
|
|
saml_settings, self.encoded_saml_message
|
|
|
|
).get_xml()
|
|
|
|
issuers = [OneLogin_Saml2_Logout_Request.get_issuer(logout_request_xml)]
|
|
|
|
return issuers
|
|
|
|
except self.SAML_PARSING_EXCEPTIONS as e:
|
|
|
|
self.logger.error("Error parsing SAMLRequest: %s", str(e))
|
|
|
|
return []
|
2021-10-30 17:39:18 +02:00
|
|
|
|
|
|
|
|
|
|
|
class SAMLResponse(SAMLDocument):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-10-30 19:37:43 +02:00
|
|
|
def get_issuers(self) -> List[str]:
|
|
|
|
config = self.backend.generate_saml_config()
|
|
|
|
saml_settings = OneLogin_Saml2_Settings(config, sp_validation_only=True)
|
|
|
|
|
|
|
|
try:
|
2021-11-01 20:08:20 +01:00
|
|
|
if not self.is_logout_response():
|
|
|
|
resp = OneLogin_Saml2_Response(
|
|
|
|
settings=saml_settings, response=self.encoded_saml_message
|
|
|
|
)
|
|
|
|
return resp.get_issuers()
|
|
|
|
else:
|
|
|
|
logout_response = OneLogin_Saml2_Logout_Response(
|
|
|
|
settings=saml_settings, response=self.encoded_saml_message
|
|
|
|
)
|
|
|
|
return logout_response.get_issuer()
|
2021-10-30 19:37:43 +02:00
|
|
|
except self.SAML_PARSING_EXCEPTIONS as e:
|
|
|
|
self.logger.error("Error parsing SAMLResponse: %s", str(e))
|
|
|
|
return []
|
2021-10-30 17:39:18 +02:00
|
|
|
|
2022-12-08 20:06:30 +01:00
|
|
|
def get_session_index(self) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Returns the SessionIndex from the SAMLResponse.
|
|
|
|
"""
|
|
|
|
config = self.backend.generate_saml_config()
|
|
|
|
saml_settings = OneLogin_Saml2_Settings(config, sp_validation_only=True)
|
|
|
|
|
|
|
|
try:
|
|
|
|
resp = OneLogin_Saml2_Response(
|
|
|
|
settings=saml_settings, response=self.encoded_saml_message
|
|
|
|
)
|
|
|
|
return resp.get_session_index()
|
|
|
|
except self.SAML_PARSING_EXCEPTIONS as e:
|
|
|
|
self.logger.error("Error parsing SAMLResponse: %s", str(e))
|
|
|
|
return None
|
|
|
|
|
2021-11-01 20:08:20 +01:00
|
|
|
def is_logout_response(self) -> bool:
|
|
|
|
"""
|
|
|
|
Checks whether the SAMLResponse is a LogoutResponse based on some
|
|
|
|
basic XML parsing.
|
|
|
|
"""
|
|
|
|
|
|
|
|
try:
|
|
|
|
parsed_xml = OneLogin_Saml2_XML.to_etree(self.decoded_saml_message)
|
|
|
|
return bool(OneLogin_Saml2_XML.query(parsed_xml, "/samlp:LogoutResponse"))
|
|
|
|
except self.SAML_PARSING_EXCEPTIONS:
|
|
|
|
return False
|
|
|
|
|
2021-10-30 17:39:18 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2019-09-29 06:32:56 +02:00
|
|
|
class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
|
|
|
|
auth_backend_name = "SAML"
|
|
|
|
REDIS_EXPIRATION_SECONDS = 60 * 15
|
2021-10-30 19:37:43 +02:00
|
|
|
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "saml"
|
2019-10-19 21:12:33 +02:00
|
|
|
# Organization which go through the trouble of setting up SAML are most likely
|
|
|
|
# to have it as their main authentication method, so it seems appropriate to have
|
|
|
|
# SAML buttons at the top.
|
|
|
|
sort_order = 9999
|
2019-10-26 02:03:06 +02:00
|
|
|
# There's no common default logo for SAML authentication.
|
2019-11-05 00:08:27 +01:00
|
|
|
display_icon = None
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2019-11-01 00:00:36 +01:00
|
|
|
# The full_name provided by the IdP is very likely the standard
|
|
|
|
# employee directory name for the user, and thus what they and
|
|
|
|
# their organization want to use in Zulip. So don't unnecessarily
|
|
|
|
# provide a registration flow prompt for them to set their name.
|
|
|
|
full_name_validated = True
|
|
|
|
|
2020-04-16 12:05:26 +02:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
|
|
if settings.SAML_REQUIRE_LIMIT_TO_SUBDOMAINS:
|
|
|
|
idps_without_limit_to_subdomains = [
|
2021-02-12 08:19:30 +01:00
|
|
|
idp_name
|
|
|
|
for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "limit_to_subdomains" not in idp_dict
|
2020-04-16 12:05:26 +02:00
|
|
|
]
|
|
|
|
if idps_without_limit_to_subdomains:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.logger.error(
|
2023-01-03 02:16:53 +01:00
|
|
|
"SAML_REQUIRE_LIMIT_TO_SUBDOMAINS is enabled and the following IdPs don't have"
|
2023-02-04 01:42:19 +01:00
|
|
|
" limit_to_subdomains specified and will be ignored: %r",
|
|
|
|
idps_without_limit_to_subdomains,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-04-16 12:05:26 +02:00
|
|
|
for idp_name in idps_without_limit_to_subdomains:
|
|
|
|
del settings.SOCIAL_AUTH_SAML_ENABLED_IDPS[idp_name]
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
2021-05-31 14:48:12 +02:00
|
|
|
def get_idp(self, idp_name: str) -> ZulipSAMLIdentityProvider:
|
|
|
|
"""Given the name of an IdP, get a SAMLIdentityProvider instance.
|
|
|
|
Forked to use our subclass of SAMLIdentityProvider for more flexibility."""
|
|
|
|
idp_config = self.setting("ENABLED_IDPS")[idp_name]
|
|
|
|
return ZulipSAMLIdentityProvider(idp_name, **idp_config)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def auth_url(self) -> str:
|
|
|
|
"""Get the URL to which we must redirect in order to
|
|
|
|
authenticate the user. Overriding the original SAMLAuth.auth_url.
|
|
|
|
Runs when someone accesses the /login/saml/ endpoint."""
|
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
idp_name = self.strategy.request_data()["idp"]
|
2019-09-29 06:32:56 +02:00
|
|
|
auth = self._create_saml_auth(idp=self.get_idp(idp_name))
|
2020-02-20 15:54:39 +01:00
|
|
|
except KeyError as e:
|
2019-09-29 06:32:56 +02:00
|
|
|
# If the above raise KeyError, it means invalid or no idp was specified,
|
|
|
|
# we should log that and redirect to the login page.
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info("/login/saml/ : Bad idp param: KeyError: %s.", str(e))
|
2021-02-12 08:20:45 +01:00
|
|
|
return reverse("login_page", kwargs={"template_name": "zerver/login.html"})
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
# This where we change things. We need to pass some params
|
|
|
|
# (`mobile_flow_otp`, `next`, etc.) through RelayState, which
|
|
|
|
# then the IdP will pass back to us so we can read those
|
|
|
|
# parameters in the final part of the authentication flow, at
|
|
|
|
# the /complete/saml/ endpoint.
|
|
|
|
#
|
|
|
|
# To protect against network eavesdropping of these
|
|
|
|
# parameters, we send just a random token to the IdP in
|
2020-10-23 02:43:28 +02:00
|
|
|
# RelayState, which is used as a key into our Redis data store
|
2019-09-29 06:32:56 +02:00
|
|
|
# for fetching the actual parameters after the IdP has
|
|
|
|
# returned a successful authentication.
|
2020-05-22 18:44:29 +02:00
|
|
|
params_to_relay = self.standard_relay_params
|
2019-09-29 06:32:56 +02:00
|
|
|
request_data = self.strategy.request_data().dict()
|
2021-02-12 08:19:30 +01:00
|
|
|
data_to_relay = {key: request_data[key] for key in params_to_relay if key in request_data}
|
2020-08-07 01:09:47 +02:00
|
|
|
relay_state = orjson.dumps({"state_token": self.put_data_in_redis(data_to_relay)}).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
return auth.login(return_to=relay_state)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def put_data_in_redis(cls, data_to_relay: Dict[str, Any]) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
return put_dict_in_redis(
|
|
|
|
redis_client,
|
|
|
|
"saml_token_{token}",
|
|
|
|
data_to_store=data_to_relay,
|
|
|
|
expiration_seconds=cls.REDIS_EXPIRATION_SECONDS,
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_data_from_redis(cls, key: str) -> Optional[Dict[str, Any]]:
|
2020-01-20 14:17:53 +01:00
|
|
|
data = None
|
2021-02-12 08:20:45 +01:00
|
|
|
if key.startswith("saml_token_"):
|
2020-10-23 02:43:28 +02:00
|
|
|
# Safety if statement, to not allow someone to poke around arbitrary Redis keys here.
|
2020-01-26 19:01:56 +01:00
|
|
|
data = get_dict_from_redis(redis_client, "saml_token_{token}", key)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-01-20 14:17:53 +01:00
|
|
|
return data
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-05-23 15:21:19 +02:00
|
|
|
def get_relayed_params(self) -> Dict[str, Any]:
|
|
|
|
request_data = self.strategy.request_data()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "RelayState" not in request_data:
|
2020-05-23 15:21:19 +02:00
|
|
|
return {}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
relay_state = request_data["RelayState"]
|
2020-05-23 15:21:19 +02:00
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
data = orjson.loads(relay_state)
|
2021-02-12 08:20:45 +01:00
|
|
|
if "state_token" in data:
|
2020-05-23 15:21:19 +02:00
|
|
|
# SP-initiated sign in. We stored relevant information in the first
|
|
|
|
# step of the flow
|
2021-02-12 08:20:45 +01:00
|
|
|
return self.get_data_from_redis(data["state_token"]) or {}
|
2020-05-23 15:21:19 +02:00
|
|
|
else:
|
|
|
|
# IdP-initiated sign in. Right now we only support transporting subdomain through json in
|
|
|
|
# RelayState, but this format is nice in that it allows easy extensibility here.
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"subdomain": data.get("subdomain")}
|
2020-10-09 02:17:33 +02:00
|
|
|
except orjson.JSONDecodeError:
|
2020-05-23 15:21:19 +02:00
|
|
|
return {}
|
|
|
|
|
|
|
|
def choose_subdomain(self, relayed_params: Dict[str, Any]) -> Optional[str]:
|
|
|
|
subdomain = relayed_params.get("subdomain")
|
|
|
|
if subdomain is not None:
|
|
|
|
return subdomain
|
|
|
|
|
|
|
|
# If not specified otherwise, the intended subdomain for this
|
|
|
|
# authentication attempt is the subdomain of the request.
|
|
|
|
request_subdomain = get_subdomain(self.strategy.request)
|
|
|
|
try:
|
|
|
|
# We only want to do a basic sanity-check here for whether
|
|
|
|
# this subdomain has a realm one could try to authenticate
|
|
|
|
# to. True validation of whether the realm is active, the
|
|
|
|
# IdP is appropriate for the subdomain, etc. happens
|
|
|
|
# elsewhere in the flow and we shouldn't duplicate such
|
|
|
|
# logic here.
|
|
|
|
get_realm(request_subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return request_subdomain
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def _check_entitlements(
|
|
|
|
self, idp: SAMLIdentityProvider, attributes: Dict[str, List[str]]
|
|
|
|
) -> None:
|
2020-06-19 21:44:29 +02:00
|
|
|
"""
|
|
|
|
Below is the docstring from the social_core SAML backend.
|
|
|
|
|
|
|
|
Additional verification of a SAML response before
|
|
|
|
authenticating the user.
|
|
|
|
|
|
|
|
Subclasses can override this method if they need custom
|
|
|
|
validation code, such as requiring the presence of an
|
|
|
|
eduPersonEntitlement.
|
|
|
|
|
|
|
|
raise social_core.exceptions.AuthForbidden if the user should not
|
|
|
|
be authenticated, or do nothing to allow the login pipeline to
|
|
|
|
continue.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
org_membership_attribute = idp.conf.get("attr_org_membership", None)
|
2020-06-19 21:44:29 +02:00
|
|
|
if org_membership_attribute is None:
|
|
|
|
return
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain = self.strategy.session_get("subdomain")
|
2020-06-19 21:44:29 +02:00
|
|
|
entitlements: Union[str, List[str]] = attributes.get(org_membership_attribute, [])
|
2020-08-15 16:29:28 +02:00
|
|
|
if isinstance(entitlements, str): # nocoverage
|
|
|
|
# This shouldn't happen as we'd always expect a list from this attribute even
|
|
|
|
# if it only has one element, but it's safer to have this defensive code.
|
2021-02-12 08:19:30 +01:00
|
|
|
entitlements = [
|
|
|
|
entitlements,
|
|
|
|
]
|
2020-08-15 16:29:28 +02:00
|
|
|
assert isinstance(entitlements, list)
|
|
|
|
|
2020-08-15 17:27:09 +02:00
|
|
|
if is_subdomain_in_allowed_subdomains_list(subdomain, entitlements):
|
2020-06-19 21:44:29 +02:00
|
|
|
return
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
error_msg = (
|
2023-01-03 02:16:53 +01:00
|
|
|
f"SAML user from IdP {idp.name} rejected due to missing entitlement for subdomain"
|
|
|
|
f" '{subdomain}'. User entitlements: {entitlements}."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
raise AuthFailed(self, error_msg)
|
|
|
|
|
2021-10-21 14:16:26 +02:00
|
|
|
def process_logout(self, subdomain: str, idp_name: str) -> Optional[HttpResponse]:
|
|
|
|
"""
|
|
|
|
We override process_logout, because we need to customize
|
|
|
|
the way of revoking sessions and introduce NameID validation.
|
|
|
|
|
|
|
|
The python-social-auth and python3-saml implementations expect a simple
|
|
|
|
callback function without arguments, to delete the session. We're not
|
|
|
|
happy with that for two reasons:
|
|
|
|
1. These implementations don't look at the NameID in the LogoutRequest, which
|
docs: Add missing space to compound verbs “back up”, “log in”, etc.
Noun: backup, login, logout, lookup, setup.
Verb: back up, log in, log out, look up, set up.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2022-02-07 20:41:10 +01:00
|
|
|
is not quite correct, as a LogoutRequest to log out user X can be delivered
|
2021-10-21 14:16:26 +02:00
|
|
|
through any means, and doesn't need a session to be valid.
|
|
|
|
E.g. a backchannel logout request sent by the IdP wouldn't have a session cookie.
|
docs: Add missing space to compound verbs “back up”, “log in”, etc.
Noun: backup, login, logout, lookup, setup.
Verb: back up, log in, log out, look up, set up.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2022-02-07 20:41:10 +01:00
|
|
|
Also, hypothetically, a LogoutRequest to log out user Y shouldn't log out user X, even if the
|
2021-10-21 14:16:26 +02:00
|
|
|
request is made with a session cookie belonging to user X.
|
|
|
|
2. We want to revoke all sessions for the user, not just the current session
|
|
|
|
of the request, so after validating the LogoutRequest, we need to identify
|
|
|
|
the user by the NameID, do some validation and then revoke all sessions.
|
|
|
|
|
|
|
|
TODO: This does not return a LogoutResponse in case of failure, like the spec requires.
|
|
|
|
https://github.com/zulip/zulip/issues/20076 is the related issue with more detail
|
|
|
|
on how to implement the desired behavior.
|
|
|
|
"""
|
|
|
|
idp = self.get_idp(idp_name)
|
|
|
|
auth = self._create_saml_auth(idp)
|
2022-03-22 18:52:24 +01:00
|
|
|
|
|
|
|
# We only want to accept signed LogoutResponses - or potentially anyone
|
|
|
|
# would be able to create a LogoutResponse to get an arbitrary user logged out.
|
|
|
|
patch_saml_auth_require_messages_signed(auth)
|
2021-12-06 11:51:41 +01:00
|
|
|
|
2021-10-21 14:16:26 +02:00
|
|
|
# This validates the LogoutRequest and prepares the response
|
|
|
|
# (the URL to which to redirect the client to convey the response to the IdP)
|
|
|
|
# but is a no-op otherwise because keep_local_session=True keeps it from
|
|
|
|
# doing anything else. We want to take care of revoking session on our own.
|
|
|
|
url = auth.process_slo(keep_local_session=True)
|
|
|
|
errors = auth.get_errors()
|
|
|
|
if errors:
|
|
|
|
self.logger.info("/complete/saml/: LogoutRequest failed: %s", errors)
|
|
|
|
return None
|
|
|
|
|
|
|
|
logout_request_xml = auth.get_last_request_xml()
|
|
|
|
name_id = OneLogin_Saml2_Logout_Request.get_nameid(logout_request_xml)
|
|
|
|
try:
|
|
|
|
validate_email(name_id)
|
|
|
|
except ValidationError:
|
|
|
|
self.logger.info(
|
|
|
|
"/complete/saml/: LogoutRequest failed: NameID is not a valid email address: %s",
|
|
|
|
name_id,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
|
|
|
return_data: Dict[str, Any] = {}
|
|
|
|
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
user_profile = common_get_active_user(name_id, realm, return_data)
|
|
|
|
if user_profile is None:
|
|
|
|
self.logger.info(
|
|
|
|
"/complete/saml/: LogoutRequest failed: No user with email specified in NameID found in realm %s. return_data=%s",
|
|
|
|
realm.id,
|
|
|
|
return_data,
|
|
|
|
)
|
|
|
|
return None
|
|
|
|
|
|
|
|
self.logger.info(
|
|
|
|
"/complete/saml/: LogoutRequest triggered deletion of all session for user %s",
|
|
|
|
user_profile.id,
|
|
|
|
)
|
|
|
|
delete_user_sessions(user_profile)
|
|
|
|
do_regenerate_api_key(user_profile, user_profile)
|
|
|
|
|
|
|
|
return HttpResponseRedirect(url)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-09-29 06:32:56 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
"""
|
|
|
|
Additional ugly wrapping on top of auth_complete in SocialAuthMixin.
|
2021-10-21 14:16:26 +02:00
|
|
|
We handle two things for processing SAMLResponses here:
|
2019-09-29 06:32:56 +02:00
|
|
|
1. Working around bad RelayState or SAMLResponse parameters in the request.
|
|
|
|
Both parameters should be present if the user came to /complete/saml/ through
|
|
|
|
the IdP as intended. The errors can happen if someone simply types the endpoint into
|
|
|
|
their browsers, or generally tries messing with it in some ways.
|
|
|
|
|
|
|
|
2. The first part of our SAML authentication flow will encode important parameters
|
|
|
|
into the RelayState. We need to read them and set those values in the session,
|
|
|
|
and then change the RelayState param to the idp_name, because that's what
|
|
|
|
SAMLAuth.auth_complete() expects.
|
2021-10-21 14:16:26 +02:00
|
|
|
|
2022-02-08 00:13:33 +01:00
|
|
|
Additionally, this handles incoming LogoutRequests for IdP-initiated logout.
|
2019-09-29 06:32:56 +02:00
|
|
|
"""
|
2021-10-21 14:16:26 +02:00
|
|
|
|
2021-10-30 17:39:18 +02:00
|
|
|
encoded_saml_request = self.strategy.request_data().get("SAMLRequest")
|
|
|
|
encoded_saml_response = self.strategy.request_data().get("SAMLResponse")
|
|
|
|
if encoded_saml_response is None and encoded_saml_request is None:
|
2021-10-21 14:16:26 +02:00
|
|
|
self.logger.info("/complete/saml/: No SAMLResponse or SAMLRequest in request.")
|
2020-05-22 18:44:29 +02:00
|
|
|
return None
|
2021-10-30 17:39:18 +02:00
|
|
|
elif encoded_saml_request is not None:
|
2021-10-30 19:37:43 +02:00
|
|
|
saml_document: SAMLDocument = SAMLRequest(encoded_saml_request, self)
|
2021-10-30 17:39:18 +02:00
|
|
|
elif encoded_saml_response is not None:
|
2021-10-30 19:37:43 +02:00
|
|
|
saml_document = SAMLResponse(encoded_saml_response, self)
|
2020-05-22 18:44:29 +02:00
|
|
|
|
2020-05-23 15:21:19 +02:00
|
|
|
relayed_params = self.get_relayed_params()
|
|
|
|
|
|
|
|
subdomain = self.choose_subdomain(relayed_params)
|
|
|
|
if subdomain is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
error_msg = (
|
2023-01-03 02:16:53 +01:00
|
|
|
"/complete/saml/: Can't figure out subdomain for this %s. relayed_params: %s"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-10-30 17:39:18 +02:00
|
|
|
self.logger.info(error_msg, saml_document.document_type(), relayed_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
return None
|
|
|
|
|
2021-10-30 19:37:43 +02:00
|
|
|
idp_name = saml_document.get_issuing_idp()
|
2020-05-22 18:44:29 +02:00
|
|
|
if idp_name is None:
|
2021-10-21 14:16:26 +02:00
|
|
|
self.logger.info(
|
2021-10-30 17:39:18 +02:00
|
|
|
"/complete/saml/: No valid IdP as issuer of the %s.", saml_document.document_type()
|
2021-10-21 14:16:26 +02:00
|
|
|
)
|
2020-05-22 18:44:29 +02:00
|
|
|
return None
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
idp_valid = self.validate_idp_for_subdomain(idp_name, subdomain)
|
|
|
|
if not idp_valid:
|
2021-02-12 08:19:30 +01:00
|
|
|
error_msg = (
|
2023-01-03 02:16:53 +01:00
|
|
|
"/complete/saml/: Authentication request with IdP %s but this provider is not"
|
|
|
|
" enabled for this subdomain %s."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info(error_msg, idp_name, subdomain)
|
2020-04-10 16:30:02 +02:00
|
|
|
return None
|
|
|
|
|
2023-05-23 14:31:02 +02:00
|
|
|
# We have to branch here to do different things depending on the kind
|
|
|
|
# of SAMLRequest/SAMLResponse we received. We do just basic heuristics here
|
|
|
|
# to choose the right branch, since it's not our intent to do proper validation now.
|
|
|
|
# We end up calling the appropriate process_*() function, which takes care of validation
|
|
|
|
# in the python3-saml library, ensuring it received the correct kind of XML document
|
|
|
|
# and finishes processing it.
|
|
|
|
# (1) We received a SAMLRequest - the only SAMLRequest we accept is a LogoutRequest,
|
|
|
|
# so we call process_logout().
|
|
|
|
# (2) We received a SAMLResponse and it looks like a LogoutResponse - we call
|
|
|
|
# process_logout_response()
|
|
|
|
# (3) We received a SAMLResponse that's not a LogoutResponse. We proceed to treat it
|
|
|
|
# as an authentication response. We don't do anything security-sensitive here, just some setup
|
|
|
|
# before calling the super().auth_complete() method, which is where the actual validation
|
|
|
|
# and authentication will happen.
|
|
|
|
#
|
|
|
|
# If for any reason, an XML document that doesn't match the expected type is passed
|
|
|
|
# to these *_process() functions, it will be rejected.
|
2021-10-30 17:39:18 +02:00
|
|
|
if isinstance(saml_document, SAMLRequest):
|
2021-10-21 14:16:26 +02:00
|
|
|
return self.process_logout(subdomain, idp_name)
|
2021-11-01 20:08:20 +01:00
|
|
|
elif isinstance(saml_document, SAMLResponse) and saml_document.is_logout_response():
|
|
|
|
return SAMLSPInitiatedLogout.process_logout_response(saml_document, idp_name)
|
2021-10-21 14:16:26 +02:00
|
|
|
|
2023-05-23 14:31:02 +02:00
|
|
|
# IMPORTANT: The saml_document has not yet been validated at this point. We are
|
|
|
|
# assuming it is to be treated as an authentication SAMLResponse, but it will only
|
|
|
|
# be validated in the super().auth_complete() call below - and code until then
|
|
|
|
# must not assume trust in the data.
|
2022-12-08 20:06:30 +01:00
|
|
|
assert isinstance(saml_document, SAMLResponse)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
result = None
|
|
|
|
try:
|
2020-05-23 15:21:19 +02:00
|
|
|
params = relayed_params.copy()
|
2021-02-12 08:20:45 +01:00
|
|
|
params["subdomain"] = subdomain
|
2020-05-23 15:21:19 +02:00
|
|
|
for param, value in params.items():
|
2019-09-29 06:32:56 +02:00
|
|
|
if param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, value)
|
|
|
|
|
2021-06-13 14:18:28 +02:00
|
|
|
# We want the IdP name to be accessible from the social pipeline.
|
|
|
|
self.strategy.session_set("saml_idp_name", idp_name)
|
2022-12-08 20:06:30 +01:00
|
|
|
session_index = saml_document.get_session_index()
|
|
|
|
if session_index is None:
|
|
|
|
# In general IdPs will always provide a SessionIndex, but we can't know
|
|
|
|
# if some providers might not send it, so we allow it but log the event.
|
|
|
|
self.logger.info(
|
|
|
|
"/complete/saml/: IdP did not provide SessionIndex in the SAMLResponse."
|
|
|
|
)
|
|
|
|
self.strategy.session_set("saml_session_index", session_index)
|
2021-06-13 14:18:28 +02:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
# super().auth_complete expects to have RelayState set to the idp_name,
|
|
|
|
# so we need to replace this param.
|
|
|
|
post_params = self.strategy.request.POST.copy()
|
2021-02-12 08:20:45 +01:00
|
|
|
post_params["RelayState"] = idp_name
|
2019-09-29 06:32:56 +02:00
|
|
|
self.strategy.request.POST = post_params
|
|
|
|
|
|
|
|
# Call the auth_complete method of SocialAuthMixIn
|
2020-04-22 03:51:22 +02:00
|
|
|
result = super().auth_complete(*args, **kwargs)
|
2021-10-30 19:37:43 +02:00
|
|
|
except SAMLResponse.SAML_PARSING_EXCEPTIONS:
|
2020-05-22 15:26:17 +02:00
|
|
|
# These can be raised if SAMLResponse is missing or badly formatted.
|
2020-06-12 01:35:37 +02:00
|
|
|
self.logger.info("/complete/saml/: error while parsing SAMLResponse:", exc_info=True)
|
2019-09-29 06:32:56 +02:00
|
|
|
# Fall through to returning None.
|
|
|
|
finally:
|
2023-05-23 14:40:53 +02:00
|
|
|
# We need a finally: block to ensure we don't keep around information in the session
|
|
|
|
# if the authentication failed.
|
2019-09-29 06:32:56 +02:00
|
|
|
if result is None:
|
2023-05-23 14:40:53 +02:00
|
|
|
for param in [*self.standard_relay_params, "saml_idp_name", "saml_session_index"]:
|
2019-09-29 06:32:56 +02:00
|
|
|
# If an attacker managed to eavesdrop on the RelayState token,
|
|
|
|
# they may pass it here to the endpoint with an invalid SAMLResponse.
|
|
|
|
# We remove these potentially sensitive parameters that we have set in the session
|
2020-03-28 01:25:56 +01:00
|
|
|
# earlier, to avoid leaking their values.
|
2019-09-29 06:32:56 +02:00
|
|
|
self.strategy.session_set(param, None)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
@classmethod
|
|
|
|
def validate_idp_for_subdomain(cls, idp_name: str, subdomain: str) -> bool:
|
|
|
|
idp_dict = settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.get(idp_name)
|
|
|
|
if idp_dict is None:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError(f"IdP: {idp_name} not found")
|
2021-02-12 08:20:45 +01:00
|
|
|
if "limit_to_subdomains" in idp_dict and subdomain not in idp_dict["limit_to_subdomains"]:
|
2020-04-10 16:30:02 +02:00
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2019-10-26 01:51:48 +02:00
|
|
|
@classmethod
|
2020-06-17 14:25:25 +02:00
|
|
|
def check_config(cls) -> bool:
|
2019-10-26 01:51:48 +02:00
|
|
|
obligatory_saml_settings_list = [
|
|
|
|
settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
|
|
|
|
settings.SOCIAL_AUTH_SAML_ORG_INFO,
|
|
|
|
settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
|
|
|
|
settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
|
2019-10-26 01:51:48 +02:00
|
|
|
]
|
|
|
|
if any(not setting for setting in obligatory_saml_settings_list):
|
2020-06-17 14:25:25 +02:00
|
|
|
return False
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
return True
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@classmethod
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-02-12 08:19:30 +01:00
|
|
|
def dict_representation(cls, realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[ExternalAuthMethodDictT] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
2020-04-10 16:30:02 +02:00
|
|
|
if realm and not cls.validate_idp_for_subdomain(idp_name, realm.subdomain):
|
|
|
|
continue
|
2021-02-12 08:20:45 +01:00
|
|
|
if realm is None and "limit_to_subdomains" in idp_dict:
|
2020-04-18 15:47:41 +02:00
|
|
|
# If queried without a realm, only return IdPs that can be used on all realms.
|
|
|
|
continue
|
2020-04-10 16:30:02 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
saml_dict: ExternalAuthMethodDictT = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
name=f"saml:{idp_name}",
|
|
|
|
display_name=idp_dict.get("display_name", cls.auth_backend_name),
|
|
|
|
display_icon=idp_dict.get("display_icon", cls.display_icon),
|
|
|
|
login_url=reverse("login-social", args=("saml", idp_name)),
|
|
|
|
signup_url=reverse("signup-social", args=("saml", idp_name)),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2019-12-08 23:11:25 +01:00
|
|
|
result.append(saml_dict)
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
return result
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-06-13 14:18:28 +02:00
|
|
|
def should_auto_signup(self) -> bool:
|
|
|
|
"""
|
|
|
|
This function is meant to be called in the social pipeline or later,
|
|
|
|
as it requires (validated) information about the IdP name to have
|
|
|
|
already been store in the session.
|
|
|
|
"""
|
|
|
|
idp_name = self.strategy.session_get("saml_idp_name")
|
2021-07-24 20:24:19 +02:00
|
|
|
assert isinstance(idp_name, str)
|
|
|
|
auto_signup = settings.SOCIAL_AUTH_SAML_ENABLED_IDPS[idp_name].get("auto_signup", False)
|
|
|
|
assert isinstance(auto_signup, bool)
|
|
|
|
return auto_signup
|
2021-06-13 14:18:28 +02:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-11-01 17:03:55 +01:00
|
|
|
def get_params_to_store_in_authenticated_session(self) -> Dict[str, str]:
|
|
|
|
idp_name = self.strategy.session_get("saml_idp_name")
|
2022-12-08 20:06:30 +01:00
|
|
|
saml_session_index = self.strategy.session_get("saml_session_index")
|
2021-11-01 17:03:55 +01:00
|
|
|
|
2022-12-08 20:06:30 +01:00
|
|
|
return {
|
|
|
|
"social_auth_backend": f"saml:{idp_name}",
|
|
|
|
"saml_session_index": saml_session_index,
|
|
|
|
}
|
2021-11-01 17:03:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-03-22 18:52:24 +01:00
|
|
|
def patch_saml_auth_require_messages_signed(auth: OneLogin_Saml2_Auth) -> None:
|
|
|
|
"""
|
|
|
|
wantMessagesSigned controls whether requests processed by this saml auth
|
|
|
|
object need to be signed. The default of False is often not acceptable,
|
|
|
|
because we don't want anyone to be able to submit such a request.
|
|
|
|
Callers should use this to enforce the requirement of signatures.
|
|
|
|
"""
|
|
|
|
|
|
|
|
auth.get_settings().get_security_data()["wantMessagesSigned"] = True
|
|
|
|
# Defensive code to confirm the setting change above is successful,
|
|
|
|
# to catch API changes in python3-saml that would make the change not
|
|
|
|
# be applied to the actual settings of `auth` - e.g. due to us only
|
|
|
|
# receiving a copy of the dict.
|
|
|
|
assert auth.get_settings().get_security_data()["wantMessagesSigned"] is True
|
|
|
|
|
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
@external_auth_method
|
|
|
|
class GenericOpenIdConnectBackend(SocialAuthMixin, OpenIdConnectAuth):
|
|
|
|
name = "oidc"
|
|
|
|
auth_backend_name = "OpenID Connect"
|
|
|
|
sort_order = 100
|
|
|
|
|
|
|
|
# Hack: We don't yet support multiple IdPs, but we want this
|
|
|
|
# module to import if nothing has been configured yet.
|
2023-07-22 00:34:11 +02:00
|
|
|
settings_dict: OIDCIdPConfigDict
|
|
|
|
[settings_dict] = settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS.values() or [OIDCIdPConfigDict()]
|
2021-05-21 16:45:43 +02:00
|
|
|
|
2022-07-21 18:46:13 +02:00
|
|
|
display_icon: Optional[str] = settings_dict.get("display_icon", None)
|
|
|
|
display_name: str = settings_dict.get("display_name", "OIDC")
|
2021-05-21 16:45:43 +02:00
|
|
|
|
2021-05-23 12:00:20 +02:00
|
|
|
full_name_validated = getattr(settings, "SOCIAL_AUTH_OIDC_FULL_NAME_VALIDATED", False)
|
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
# Discovery endpoint for the superclass to read all the appropriate
|
|
|
|
# configuration from.
|
|
|
|
OIDC_ENDPOINT = settings_dict.get("oidc_url")
|
|
|
|
|
|
|
|
def get_key_and_secret(self) -> Tuple[str, str]:
|
|
|
|
client_id = self.settings_dict.get("client_id", "")
|
2021-07-24 20:24:19 +02:00
|
|
|
assert isinstance(client_id, str)
|
2021-05-21 16:45:43 +02:00
|
|
|
secret = self.settings_dict.get("secret", "")
|
2021-07-24 20:24:19 +02:00
|
|
|
assert isinstance(secret, str)
|
2021-05-21 16:45:43 +02:00
|
|
|
return client_id, secret
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def check_config(cls) -> bool:
|
|
|
|
if len(settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS.keys()) != 1:
|
|
|
|
# Only one IdP supported for now.
|
|
|
|
return False
|
|
|
|
|
|
|
|
mandatory_config_keys = ["oidc_url", "client_id", "secret"]
|
2023-07-22 00:34:11 +02:00
|
|
|
[idp_config_dict] = settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS.values()
|
2021-05-21 16:45:43 +02:00
|
|
|
if not all(idp_config_dict.get(key) for key in mandatory_config_keys):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
@classmethod
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-05-21 16:45:43 +02:00
|
|
|
def dict_representation(cls, realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
|
|
|
return [
|
|
|
|
dict(
|
|
|
|
name=f"oidc:{cls.name}",
|
|
|
|
display_name=cls.display_name,
|
|
|
|
display_icon=cls.display_icon,
|
|
|
|
login_url=reverse("login-social", args=(cls.name,)),
|
|
|
|
signup_url=reverse("signup-social", args=(cls.name,)),
|
|
|
|
)
|
|
|
|
]
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-07-16 11:44:11 +02:00
|
|
|
def should_auto_signup(self) -> bool:
|
2021-07-24 20:24:19 +02:00
|
|
|
result = self.settings_dict.get("auto_signup", False)
|
|
|
|
assert isinstance(result, bool)
|
|
|
|
return result
|
2021-07-16 11:44:11 +02:00
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def validate_otp_params(
|
|
|
|
mobile_flow_otp: Optional[str] = None, desktop_flow_otp: Optional[str] = None
|
|
|
|
) -> None:
|
2020-02-01 17:45:22 +01:00
|
|
|
for otp in [mobile_flow_otp, desktop_flow_otp]:
|
|
|
|
if otp is not None and not is_valid_otp(otp):
|
|
|
|
raise JsonableError(_("Invalid OTP"))
|
|
|
|
|
|
|
|
if mobile_flow_otp and desktop_flow_otp:
|
|
|
|
raise JsonableError(_("Can't use both mobile_flow_otp and desktop_flow_otp together."))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-11-01 20:08:20 +01:00
|
|
|
class SAMLSPInitiatedLogout:
|
|
|
|
@classmethod
|
|
|
|
def get_logged_in_user_idp(cls, request: HttpRequest) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
Information about the authentication method which was used for
|
|
|
|
this session is stored in social_auth_backend session attribute.
|
|
|
|
If SAML was used, this extracts the IdP name and returns it.
|
|
|
|
"""
|
|
|
|
# Some asserts to ensure this doesn't get called incorrectly:
|
|
|
|
assert hasattr(request, "user")
|
|
|
|
assert isinstance(request.user, UserProfile)
|
|
|
|
|
|
|
|
authentication_method = request.session.get("social_auth_backend", "")
|
|
|
|
if not authentication_method.startswith("saml:"):
|
|
|
|
return None
|
|
|
|
|
|
|
|
return authentication_method.split("saml:")[1]
|
|
|
|
|
2022-12-08 20:06:30 +01:00
|
|
|
@classmethod
|
|
|
|
def get_logged_in_user_session_index(cls, request: HttpRequest) -> Optional[str]:
|
|
|
|
"""
|
|
|
|
During SAML authentication, we obtain the SessionIndex value provided
|
|
|
|
by the IdP and save it in the session. This function can be used
|
|
|
|
to retrieve it.
|
|
|
|
"""
|
|
|
|
# Some asserts to ensure this doesn't get called incorrectly:
|
|
|
|
assert hasattr(request, "user")
|
|
|
|
assert isinstance(request.user, UserProfile)
|
|
|
|
|
|
|
|
session_index = request.session.get("saml_session_index")
|
|
|
|
return session_index
|
|
|
|
|
2021-11-01 20:08:20 +01:00
|
|
|
@classmethod
|
|
|
|
def slo_request_to_idp(
|
|
|
|
cls, request: HttpRequest, return_to: Optional[str] = None
|
2022-11-12 21:44:02 +01:00
|
|
|
) -> HttpResponse:
|
2021-11-01 20:08:20 +01:00
|
|
|
"""
|
|
|
|
Generates the redirect to the IdP's SLO endpoint with
|
2022-11-12 21:44:02 +01:00
|
|
|
the appropriately generated LogoutRequest. This should only be called
|
|
|
|
on requests with a session that was indeed obtained via SAML.
|
2021-11-01 20:08:20 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
user_profile = request.user
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
|
|
|
|
|
|
|
realm = user_profile.realm
|
|
|
|
assert saml_auth_enabled(realm)
|
|
|
|
|
|
|
|
complete_url = reverse("social:complete", args=("saml",))
|
|
|
|
saml_backend = load_backend(load_strategy(request), "saml", complete_url)
|
|
|
|
|
|
|
|
idp_name = cls.get_logged_in_user_idp(request)
|
|
|
|
if idp_name is None:
|
2022-11-12 21:44:02 +01:00
|
|
|
raise AssertionError("User not logged in via SAML")
|
2022-12-08 20:06:30 +01:00
|
|
|
session_index = cls.get_logged_in_user_session_index(request)
|
2021-11-01 20:08:20 +01:00
|
|
|
|
|
|
|
idp = saml_backend.get_idp(idp_name)
|
|
|
|
auth = saml_backend._create_saml_auth(idp)
|
2022-12-08 20:06:30 +01:00
|
|
|
slo_url = auth.logout(
|
|
|
|
name_id=user_profile.delivery_email, return_to=return_to, session_index=session_index
|
|
|
|
)
|
2021-11-01 20:08:20 +01:00
|
|
|
|
|
|
|
return HttpResponseRedirect(slo_url)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def process_logout_response(cls, logout_response: SAMLResponse, idp_name: str) -> HttpResponse:
|
|
|
|
"""
|
|
|
|
Validates the LogoutResponse and logs out the user if successful,
|
|
|
|
finishing the SP-initiated logout flow.
|
|
|
|
"""
|
|
|
|
from django.contrib.auth.views import logout_then_login as django_logout_then_login
|
|
|
|
|
|
|
|
idp = logout_response.backend.get_idp(idp_name)
|
|
|
|
auth = logout_response.backend._create_saml_auth(idp)
|
|
|
|
auth.process_slo(keep_local_session=True)
|
|
|
|
errors = auth.get_errors()
|
|
|
|
if errors:
|
|
|
|
# These errors should essentially only happen in case of misconfiguration,
|
|
|
|
# so we give a json error response with the direct error codes from python3-saml.
|
|
|
|
# They're informative but generic enough to not leak any sensitive information.
|
|
|
|
raise JsonableError(f"LogoutResponse error: {errors}")
|
|
|
|
|
|
|
|
# We call Django's version of logout_then_login so that POST isn't required.
|
|
|
|
return django_logout_then_login(logout_response.backend.strategy.request)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_external_method_dicts(realm: Optional[Realm] = None) -> List[ExternalAuthMethodDictT]:
|
2019-11-02 04:35:39 +01:00
|
|
|
"""
|
|
|
|
Returns a list of dictionaries that represent social backends, sorted
|
|
|
|
in the order in which they should be displayed.
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[ExternalAuthMethodDictT] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
for backend in EXTERNAL_AUTH_METHODS:
|
|
|
|
# EXTERNAL_AUTH_METHODS is already sorted in the correct order,
|
2019-11-02 04:35:39 +01:00
|
|
|
# so we don't need to worry about sorting here.
|
2019-10-22 18:11:28 +02:00
|
|
|
if auth_enabled_helper([backend.auth_backend_name], realm):
|
2020-04-10 16:30:02 +02:00
|
|
|
result.extend(backend.dict_representation(realm))
|
2019-10-22 18:11:28 +02:00
|
|
|
|
2019-11-02 04:35:39 +01:00
|
|
|
return result
|
2019-10-22 18:11:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
AUTH_BACKEND_NAME_MAP: Dict[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"Dev": DevAuthBackend,
|
|
|
|
"Email": EmailAuthBackend,
|
|
|
|
"LDAP": ZulipLDAPAuthBackend,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-10-12 01:58:01 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
for external_method in EXTERNAL_AUTH_METHODS:
|
|
|
|
AUTH_BACKEND_NAME_MAP[external_method.auth_backend_name] = external_method
|
2019-02-02 16:51:26 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
EXTERNAL_AUTH_METHODS = sorted(EXTERNAL_AUTH_METHODS, key=lambda x: x.sort_order, reverse=True)
|
2019-11-02 04:35:39 +01:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
# Provide this alternative name for backwards compatibility with
|
|
|
|
# installations that had the old backend enabled.
|
|
|
|
GoogleMobileOauth2Backend = GoogleAuthBackend
|