2019-03-10 02:43:29 +01:00
|
|
|
# Documentation for Zulip's authentication backends is split across a few places:
|
|
|
|
#
|
|
|
|
# * https://zulip.readthedocs.io/en/latest/production/authentication-methods.html and
|
|
|
|
# zproject/prod_settings_template.py have user-level configuration documentation.
|
2019-11-07 18:29:05 +01:00
|
|
|
# * https://zulip.readthedocs.io/en/latest/development/authentication.html
|
|
|
|
# has developer-level documentation, especially on testing authentication backends
|
|
|
|
# in the Zulip development environment.
|
2019-03-10 02:43:29 +01:00
|
|
|
#
|
|
|
|
# Django upstream's documentation for authentication backends is also
|
|
|
|
# helpful background. The most important detail to understand for
|
|
|
|
# reading this file is that the Django authenticate() function will
|
|
|
|
# call the authenticate methods of all backends registered in
|
|
|
|
# settings.AUTHENTICATION_BACKENDS that have a function signature
|
|
|
|
# matching the args/kwargs passed in the authenticate() call.
|
2020-05-22 15:26:17 +02:00
|
|
|
import binascii
|
2019-06-07 23:36:19 +02:00
|
|
|
import copy
|
2016-08-01 13:06:35 +02:00
|
|
|
import logging
|
2020-06-11 00:54:34 +02:00
|
|
|
from abc import ABC, abstractmethod
|
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Type, TypeVar, Union, cast
|
|
|
|
|
2019-06-07 23:36:19 +02:00
|
|
|
import magic
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2019-08-01 15:09:27 +02:00
|
|
|
from decorator import decorator
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2020-02-23 18:58:08 +01:00
|
|
|
from django.contrib.auth import authenticate, get_backends
|
2013-11-04 23:16:46 +01:00
|
|
|
from django.contrib.auth.backends import RemoteUserBackend
|
2018-05-31 00:12:39 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import validate_email
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.dispatch import Signal, receiver
|
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect
|
2018-07-18 23:45:49 +02:00
|
|
|
from django.shortcuts import render
|
2019-04-12 06:24:58 +02:00
|
|
|
from django.urls import reverse
|
2020-01-23 14:22:28 +01:00
|
|
|
from django.utils.translation import ugettext as _
|
2020-06-11 00:54:34 +02:00
|
|
|
from django_auth_ldap.backend import LDAPBackend, LDAPReverseEmailSearch, _LDAPUser, ldap_error
|
2020-05-22 15:26:17 +02:00
|
|
|
from lxml.etree import XMLSyntaxError
|
2019-09-29 06:32:56 +02:00
|
|
|
from onelogin.saml2.errors import OneLogin_Saml2_Error
|
2020-05-22 18:44:29 +02:00
|
|
|
from onelogin.saml2.response import OneLogin_Saml2_Response
|
2020-07-25 14:31:45 +02:00
|
|
|
from onelogin.saml2.settings import OneLogin_Saml2_Settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from requests import HTTPError
|
|
|
|
from social_core.backends.apple import AppleIdAuth
|
2018-10-05 14:32:02 +02:00
|
|
|
from social_core.backends.azuread import AzureADOAuth2
|
2018-05-31 00:12:39 +02:00
|
|
|
from social_core.backends.base import BaseAuth
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, GithubTeamOAuth2
|
|
|
|
from social_core.backends.gitlab import GitLabOAuth2
|
2019-02-02 16:51:26 +01:00
|
|
|
from social_core.backends.google import GoogleOAuth2
|
2020-06-19 21:44:29 +02:00
|
|
|
from social_core.backends.saml import SAMLAuth, SAMLIdentityProvider
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_core.exceptions import (
|
2020-06-09 18:17:32 +02:00
|
|
|
AuthCanceled,
|
2020-06-11 00:54:34 +02:00
|
|
|
AuthFailed,
|
|
|
|
AuthMissingParameter,
|
|
|
|
AuthStateForbidden,
|
|
|
|
SocialAuthBaseException,
|
|
|
|
)
|
2018-07-18 23:45:49 +02:00
|
|
|
from social_core.pipeline.partial import partial
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing_extensions import TypedDict
|
|
|
|
from zxcvbn import zxcvbn
|
2017-03-24 10:48:52 +01:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
from zerver.decorator import client_is_exempt_from_rate_limiting
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_create_user,
|
|
|
|
do_deactivate_user,
|
|
|
|
do_reactivate_user,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
)
|
|
|
|
from zerver.lib.avatar import avatar_url, is_avatar_new
|
2019-06-28 00:10:58 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_content_hash
|
2020-06-03 01:11:36 +02:00
|
|
|
from zerver.lib.create_user import get_role_for_new_user
|
2019-01-12 18:12:11 +01:00
|
|
|
from zerver.lib.dev_ldap_directory import init_fakeldap
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.email_validation import email_allowed_for_realm, validate_email_not_already_in_realm
|
2020-02-01 17:45:22 +01:00
|
|
|
from zerver.lib.mobile_auth_otp import is_valid_otp
|
2020-03-04 14:05:25 +01:00
|
|
|
from zerver.lib.rate_limiter import RateLimitedObject
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.redis_utils import get_dict_from_redis, get_redis_client, put_dict_in_redis
|
2017-10-24 17:59:39 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
2020-05-23 15:21:19 +02:00
|
|
|
from zerver.lib.subdomains import get_subdomain
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.users import check_full_name, validate_user_custom_profile_field
|
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
DisposableEmailError,
|
|
|
|
DomainNotAllowedForRealmError,
|
|
|
|
EmailContainsPlusError,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
UserProfile,
|
|
|
|
custom_profile_fields_for_realm,
|
|
|
|
email_to_username,
|
|
|
|
get_realm,
|
|
|
|
get_user_by_delivery_email,
|
|
|
|
get_user_profile_by_id,
|
|
|
|
remote_user_to_email,
|
|
|
|
supported_auth_backends,
|
|
|
|
)
|
2017-10-24 17:59:39 +02:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
redis_client = get_redis_client()
|
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# This first batch of methods is used by other code in Zulip to check
|
|
|
|
# whether a given authentication backend is enabled for a given realm.
|
|
|
|
# In each case, we both needs to check at the server level (via
|
|
|
|
# `settings.AUTHENTICATION_BACKENDS`, queried via
|
|
|
|
# `django.contrib.auth.get_backends`) and at the realm level (via the
|
|
|
|
# `Realm.authentication_methods` BitField).
|
2018-05-10 18:53:55 +02:00
|
|
|
def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]:
|
2016-11-06 23:44:45 +01:00
|
|
|
"""Pads an authentication methods dict to contain all auth backends
|
|
|
|
supported by the software, regardless of whether they are
|
|
|
|
configured on this server"""
|
|
|
|
for key in AUTH_BACKEND_NAME_MAP:
|
|
|
|
if key not in method_dict:
|
|
|
|
method_dict[key] = False
|
|
|
|
return method_dict
|
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def auth_enabled_helper(backends_to_check: List[str], realm: Optional[Realm]) -> bool:
|
2016-11-02 21:41:10 +01:00
|
|
|
if realm is not None:
|
|
|
|
enabled_method_dict = realm.authentication_methods_dict()
|
|
|
|
pad_method_dict(enabled_method_dict)
|
|
|
|
else:
|
2020-04-09 21:51:58 +02:00
|
|
|
enabled_method_dict = {method: True for method in Realm.AUTHENTICATION_FLAGS}
|
2016-11-02 21:41:10 +01:00
|
|
|
pad_method_dict(enabled_method_dict)
|
2019-03-17 22:25:47 +01:00
|
|
|
for supported_backend in supported_auth_backends():
|
2016-11-06 23:44:45 +01:00
|
|
|
for backend_name in backends_to_check:
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[backend_name]
|
|
|
|
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
|
|
|
|
return True
|
2013-11-04 23:42:31 +01:00
|
|
|
return False
|
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def ldap_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['LDAP'], realm)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def email_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Email'], realm)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def password_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2016-11-07 00:04:59 +01:00
|
|
|
return ldap_auth_enabled(realm) or email_auth_enabled(realm)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def dev_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Dev'], realm)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def google_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Google'], realm)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def github_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['GitHub'], realm)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2020-01-31 18:19:53 +01:00
|
|
|
def gitlab_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
|
|
|
return auth_enabled_helper(['GitLab'], realm)
|
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
def apple_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
|
|
|
return auth_enabled_helper(['Apple'], realm)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def saml_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
|
|
|
return auth_enabled_helper(['SAML'], realm)
|
|
|
|
|
2019-09-19 03:19:45 +02:00
|
|
|
def any_social_backend_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-04-20 21:02:56 +02:00
|
|
|
"""Used by the login page process to determine whether to show the
|
|
|
|
'OR' for login with Google"""
|
2019-09-19 03:14:12 +02:00
|
|
|
social_backend_names = [social_auth_subclass.auth_backend_name
|
2019-12-08 23:11:25 +01:00
|
|
|
for social_auth_subclass in EXTERNAL_AUTH_METHODS]
|
2019-09-19 03:14:12 +02:00
|
|
|
return auth_enabled_helper(social_backend_names, realm)
|
2017-04-20 21:02:56 +02:00
|
|
|
|
2019-10-26 01:51:48 +02:00
|
|
|
def redirect_to_config_error(error_type: str) -> HttpResponseRedirect:
|
2020-06-10 06:41:04 +02:00
|
|
|
return HttpResponseRedirect(f"/config-error/{error_type}")
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def require_email_format_usernames(realm: Optional[Realm]=None) -> bool:
|
2017-09-15 16:59:03 +02:00
|
|
|
if ldap_auth_enabled(realm):
|
|
|
|
if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2019-09-21 01:32:59 +02:00
|
|
|
def is_user_active(user_profile: UserProfile, return_data: Optional[Dict[str, Any]]=None) -> bool:
|
|
|
|
if not user_profile.is_active:
|
|
|
|
if return_data is not None:
|
|
|
|
if user_profile.is_mirror_dummy:
|
|
|
|
# Record whether it's a mirror dummy account
|
|
|
|
return_data['is_mirror_dummy'] = True
|
|
|
|
return_data['inactive_user'] = True
|
2020-05-19 22:36:51 +02:00
|
|
|
return_data['inactive_user_id'] = user_profile.id
|
2019-09-21 01:32:59 +02:00
|
|
|
return False
|
|
|
|
if user_profile.realm.deactivated:
|
|
|
|
if return_data is not None:
|
|
|
|
return_data['inactive_realm'] = True
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2017-11-17 22:43:16 +01:00
|
|
|
def common_get_active_user(email: str, realm: Realm,
|
2018-10-27 03:19:49 +02:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""This is the core common function used by essentially all
|
|
|
|
authentication backends to check if there's an active user account
|
|
|
|
with a given email address in the organization, handling both
|
|
|
|
user-level and realm-level deactivation correctly.
|
|
|
|
"""
|
2017-11-17 22:43:16 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2017-11-17 22:43:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# If the user doesn't have an account in the target realm, we
|
|
|
|
# check whether they might have an account in another realm,
|
|
|
|
# and if so, provide a helpful error message via
|
|
|
|
# `invalid_subdomain`.
|
2018-12-07 00:05:57 +01:00
|
|
|
if not UserProfile.objects.filter(delivery_email__iexact=email).exists():
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
|
|
|
if return_data is not None:
|
|
|
|
return_data['invalid_subdomain'] = True
|
|
|
|
return None
|
2019-09-21 01:32:59 +02:00
|
|
|
if not is_user_active(user_profile, return_data):
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
2019-09-21 01:32:59 +02:00
|
|
|
|
2017-11-17 22:43:16 +01:00
|
|
|
return user_profile
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
AuthFuncT = TypeVar('AuthFuncT', bound=Callable[..., Optional[UserProfile]])
|
2019-12-30 21:17:11 +01:00
|
|
|
rate_limiting_rules = settings.RATE_LIMITING_RULES['authenticate_by_username']
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
class RateLimitedAuthenticationByUsername(RateLimitedObject):
|
|
|
|
def __init__(self, username: str) -> None:
|
|
|
|
self.username = username
|
2020-03-05 13:38:20 +01:00
|
|
|
super().__init__()
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2020-03-06 10:49:04 +01:00
|
|
|
def key(self) -> str:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{type(self).__name__}:{self.username}"
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
def rules(self) -> List[Tuple[int, int]]:
|
|
|
|
return rate_limiting_rules
|
|
|
|
|
|
|
|
def rate_limit_authentication_by_username(request: HttpRequest, username: str) -> None:
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedAuthenticationByUsername(username).rate_limit_request(request)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
def auth_rate_limiting_already_applied(request: HttpRequest) -> bool:
|
2020-04-01 13:31:20 +02:00
|
|
|
if not hasattr(request, '_ratelimits_applied'):
|
|
|
|
return False
|
|
|
|
|
|
|
|
return any(isinstance(r.entity, RateLimitedAuthenticationByUsername)
|
|
|
|
for r in request._ratelimits_applied)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
# Django's authentication mechanism uses introspection on the various authenticate() functions
|
|
|
|
# defined by backends, so we need a decorator that doesn't break function signatures.
|
|
|
|
# @decorator does this for us.
|
|
|
|
# The usual @wraps from functools breaks signatures, so it can't be used here.
|
|
|
|
@decorator
|
|
|
|
def rate_limit_auth(auth_func: AuthFuncT, *args: Any, **kwargs: Any) -> Optional[UserProfile]:
|
|
|
|
if not settings.RATE_LIMITING_AUTHENTICATE:
|
|
|
|
return auth_func(*args, **kwargs)
|
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
request = args[1]
|
2019-08-01 15:09:27 +02:00
|
|
|
username = kwargs['username']
|
|
|
|
if not hasattr(request, 'client') or not client_is_exempt_from_rate_limiting(request):
|
|
|
|
# Django cycles through enabled authentication backends until one succeeds,
|
|
|
|
# or all of them fail. If multiple backends are tried like this, we only want
|
|
|
|
# to execute rate_limit_authentication_* once, on the first attempt:
|
|
|
|
if auth_rate_limiting_already_applied(request):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
# Apply rate limiting. If this request is above the limit,
|
|
|
|
# RateLimited will be raised, interrupting the authentication process.
|
|
|
|
# From there, the code calling authenticate() can either catch the exception
|
|
|
|
# and handle it on its own, or it will be processed by RateLimitMiddleware.
|
|
|
|
rate_limit_authentication_by_username(request, username)
|
|
|
|
|
|
|
|
result = auth_func(*args, **kwargs)
|
|
|
|
if result is not None:
|
|
|
|
# Authentication succeeded, clear the rate-limiting record.
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedAuthenticationByUsername(username).clear_history()
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2017-11-05 11:31:53 +01:00
|
|
|
class ZulipAuthMixin:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""This common mixin is used to override Django's default behavior for
|
|
|
|
looking up a logged-in user by ID to use a version that fetches
|
|
|
|
from memcached before checking the database (avoiding a database
|
|
|
|
query in most cases).
|
|
|
|
"""
|
2020-06-02 20:20:53 +02:00
|
|
|
name = "undefined"
|
|
|
|
_logger = None
|
|
|
|
|
|
|
|
@property
|
|
|
|
def logger(self) -> logging.Logger:
|
|
|
|
if self._logger is None:
|
|
|
|
self._logger = logging.getLogger(f"zulip.auth.{self.name}")
|
|
|
|
return self._logger
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_user(self, user_profile_id: int) -> Optional[UserProfile]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Override the Django method for getting a UserProfile object from
|
|
|
|
the user_profile_id,."""
|
2013-11-01 20:22:12 +01:00
|
|
|
try:
|
|
|
|
return get_user_profile_by_id(user_profile_id)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
class ZulipDummyBackend(ZulipAuthMixin):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Used when we want to log you in without checking any
|
2017-11-17 22:43:16 +01:00
|
|
|
authentication (i.e. new user registration or when otherwise
|
|
|
|
authentication has already been checked earlier in the process).
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
We ensure that this backend only ever successfully authenticates
|
|
|
|
when explicitly requested by including the use_dummy_backend kwarg.
|
2013-11-21 04:57:23 +01:00
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
username: str, realm: Realm,
|
2017-11-21 20:47:26 +01:00
|
|
|
use_dummy_backend: bool=False,
|
2018-10-27 03:19:49 +02:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2013-11-21 04:57:23 +01:00
|
|
|
if use_dummy_backend:
|
2017-11-17 22:43:16 +01:00
|
|
|
return common_get_active_user(username, realm, return_data)
|
2013-11-21 04:57:23 +01:00
|
|
|
return None
|
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
def check_password_strength(password: str) -> bool:
|
|
|
|
"""
|
|
|
|
Returns True if the password is strong enough,
|
|
|
|
False otherwise.
|
|
|
|
"""
|
|
|
|
if len(password) < settings.PASSWORD_MIN_LENGTH:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if password == '':
|
|
|
|
# zxcvbn throws an exception when passed the empty string, so
|
|
|
|
# we need a special case for the empty string password here.
|
|
|
|
return False
|
|
|
|
|
|
|
|
if int(zxcvbn(password)['guesses']) < settings.PASSWORD_MIN_GUESSES:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2013-11-01 20:22:12 +01:00
|
|
|
class EmailAuthBackend(ZulipAuthMixin):
|
2013-08-06 22:51:47 +02:00
|
|
|
"""
|
2019-03-10 02:43:29 +01:00
|
|
|
Email+Password Authentication Backend (the default).
|
2013-08-06 22:51:47 +02:00
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
Allows a user to sign in using an email/password pair.
|
2013-08-06 22:51:47 +02:00
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2020-06-03 13:18:08 +02:00
|
|
|
name = 'email'
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
@rate_limit_auth
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
username: str, password: str,
|
2019-05-05 01:04:48 +02:00
|
|
|
realm: Realm,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2013-08-06 22:51:47 +02:00
|
|
|
""" Authenticate a user based on email address as the user name. """
|
2017-11-17 23:56:45 +01:00
|
|
|
if not password_auth_enabled(realm):
|
2016-04-21 21:07:43 +02:00
|
|
|
if return_data is not None:
|
|
|
|
return_data['password_auth_disabled'] = True
|
2016-04-21 07:19:08 +02:00
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
if not email_auth_enabled(realm):
|
2016-11-07 00:04:59 +01:00
|
|
|
if return_data is not None:
|
|
|
|
return_data['email_auth_disabled'] = True
|
|
|
|
return None
|
2019-11-18 07:57:36 +01:00
|
|
|
if password == "":
|
|
|
|
# Never allow an empty password. This is defensive code;
|
|
|
|
# a user having password "" should only be possible
|
|
|
|
# through a bug somewhere else.
|
|
|
|
return None
|
2017-11-21 21:39:56 +01:00
|
|
|
|
2017-11-21 21:42:21 +01:00
|
|
|
user_profile = common_get_active_user(username, realm, return_data=return_data)
|
2017-11-21 21:39:56 +01:00
|
|
|
if user_profile is None:
|
|
|
|
return None
|
2016-04-21 07:19:08 +02:00
|
|
|
if user_profile.check_password(password):
|
|
|
|
return user_profile
|
2016-07-19 14:35:08 +02:00
|
|
|
return None
|
2013-08-06 22:51:47 +02:00
|
|
|
|
2019-01-09 17:58:39 +01:00
|
|
|
def is_valid_email(email: str) -> bool:
|
|
|
|
try:
|
|
|
|
validate_email(email)
|
|
|
|
except ValidationError:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
def check_ldap_config() -> None:
|
|
|
|
if not settings.LDAP_APPEND_DOMAIN:
|
|
|
|
# Email search needs to be configured in this case.
|
|
|
|
assert settings.AUTH_LDAP_USERNAME_ATTR and settings.AUTH_LDAP_REVERSE_EMAIL_SEARCH
|
|
|
|
|
2019-10-05 01:02:46 +02:00
|
|
|
def find_ldap_users_by_email(email: str) -> Optional[List[_LDAPUser]]:
|
|
|
|
"""
|
|
|
|
Returns list of _LDAPUsers matching the email search,
|
|
|
|
or None if no matches are found.
|
|
|
|
"""
|
|
|
|
email_search = LDAPReverseEmailSearch(LDAPBackend(), email)
|
|
|
|
return email_search.search_for_users(should_populate=False)
|
|
|
|
|
2018-05-29 07:25:08 +02:00
|
|
|
def email_belongs_to_ldap(realm: Realm, email: str) -> bool:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Used to make determinations on whether a user's email address is
|
|
|
|
managed by LDAP. For environments using both LDAP and
|
|
|
|
Email+Password authentication, we do not allow EmailAuthBackend
|
|
|
|
authentication for email addresses managed by LDAP (to avoid a
|
|
|
|
security issue where one create separate credentials for an LDAP
|
|
|
|
user), and this function is used to enforce that rule.
|
|
|
|
"""
|
2018-05-29 07:25:08 +02:00
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return False
|
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
check_ldap_config()
|
2019-10-05 01:02:46 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
|
|
|
# Check if the email ends with LDAP_APPEND_DOMAIN
|
|
|
|
return email.strip().lower().endswith("@" + settings.LDAP_APPEND_DOMAIN)
|
|
|
|
|
|
|
|
# If we don't have an LDAP domain, we have to do a lookup for the email.
|
|
|
|
if find_ldap_users_by_email(email):
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
2019-12-27 23:03:00 +01:00
|
|
|
ldap_logger = logging.getLogger("zulip.ldap")
|
2017-09-27 00:56:34 +02:00
|
|
|
class ZulipLDAPException(_LDAPUser.AuthenticationFailed):
|
2018-05-31 23:10:22 +02:00
|
|
|
"""Since this inherits from _LDAPUser.AuthenticationFailed, these will
|
|
|
|
be caught and logged at debug level inside django-auth-ldap's authenticate()"""
|
2015-10-13 23:08:05 +02:00
|
|
|
|
2019-10-25 02:26:05 +02:00
|
|
|
class ZulipLDAPExceptionNoMatchingLDAPUser(ZulipLDAPException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
class ZulipLDAPExceptionOutsideDomain(ZulipLDAPExceptionNoMatchingLDAPUser):
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
pass
|
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
class ZulipLDAPConfigurationError(Exception):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2
|
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Common code between LDAP authentication (ZulipLDAPAuthBackend) and
|
|
|
|
using LDAP just to sync user data (ZulipLDAPUserPopulator).
|
|
|
|
|
|
|
|
To fully understand our LDAP backend, you may want to skim
|
|
|
|
django_auth_ldap/backend.py from the upstream django-auth-ldap
|
|
|
|
library. It's not a lot of code, and searching around in that
|
|
|
|
file makes the flow for LDAP authentication clear.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2020-06-03 13:18:08 +02:00
|
|
|
name = "ldap"
|
|
|
|
|
2018-12-12 20:06:10 +01:00
|
|
|
def __init__(self) -> None:
|
2019-03-10 02:43:29 +01:00
|
|
|
# Used to initialize a fake LDAP directly for both manual
|
|
|
|
# and automated testing in a development environment where
|
|
|
|
# there is no actual LDAP server.
|
2018-12-12 20:06:10 +01:00
|
|
|
if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage
|
2019-01-12 18:12:11 +01:00
|
|
|
init_fakeldap()
|
2018-12-12 20:06:10 +01:00
|
|
|
|
2019-11-05 02:24:18 +01:00
|
|
|
check_ldap_config()
|
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# Disable django-auth-ldap's permissions functions -- we don't use
|
|
|
|
# the standard Django user/group permissions system because they
|
|
|
|
# are prone to performance issues.
|
2017-11-27 14:35:36 +01:00
|
|
|
def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any=None) -> bool:
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool:
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_all_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_group_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def django_to_ldap_username(self, username: str) -> str:
|
2019-10-25 02:26:05 +02:00
|
|
|
"""
|
|
|
|
Translates django username (user_profile.email or whatever the user typed in the login
|
|
|
|
field when authenticating via the ldap backend) into ldap username.
|
|
|
|
Guarantees that the username it returns actually has an entry in the ldap directory.
|
|
|
|
Raises ZulipLDAPExceptionNoMatchingLDAPUser if that's not possible.
|
|
|
|
"""
|
|
|
|
result = username
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2019-01-09 17:58:39 +01:00
|
|
|
if is_valid_email(username):
|
|
|
|
if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN):
|
2020-06-10 06:41:04 +02:00
|
|
|
raise ZulipLDAPExceptionOutsideDomain(f"Email {username} does not match LDAP domain {settings.LDAP_APPEND_DOMAIN}.")
|
2019-10-25 02:26:05 +02:00
|
|
|
result = email_to_username(username)
|
2019-11-05 02:24:18 +01:00
|
|
|
else:
|
2019-10-05 03:54:48 +02:00
|
|
|
# We can use find_ldap_users_by_email
|
|
|
|
if is_valid_email(username):
|
2019-10-25 02:26:05 +02:00
|
|
|
email_search_result = find_ldap_users_by_email(username)
|
|
|
|
if email_search_result is None:
|
|
|
|
result = username
|
|
|
|
elif len(email_search_result) == 1:
|
|
|
|
return email_search_result[0]._username
|
|
|
|
elif len(email_search_result) > 1:
|
2019-10-05 03:54:48 +02:00
|
|
|
# This is possible, but strange, so worth logging a warning about.
|
|
|
|
# We can't translate the email to a unique username,
|
|
|
|
# so we don't do anything else here.
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("Multiple users with email %s found in LDAP.", username)
|
2019-10-25 02:26:05 +02:00
|
|
|
result = username
|
2019-10-05 03:54:48 +02:00
|
|
|
|
2019-10-25 02:26:05 +02:00
|
|
|
if _LDAPUser(self, result).attrs is None:
|
|
|
|
# Check that there actually is an ldap entry matching the result username
|
|
|
|
# we want to return. Otherwise, raise an exception.
|
2019-12-27 23:03:00 +01:00
|
|
|
error_message = "No ldap user matching django_to_ldap_username result: {}. Input username: {}"
|
|
|
|
raise ZulipLDAPExceptionNoMatchingLDAPUser(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
error_message.format(result, username),
|
2019-12-27 23:03:00 +01:00
|
|
|
)
|
2019-10-25 02:26:05 +02:00
|
|
|
|
|
|
|
return result
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2019-10-06 00:32:25 +02:00
|
|
|
def user_email_from_ldapuser(self, username: str, ldap_user: _LDAPUser) -> str:
|
|
|
|
if hasattr(ldap_user, '_username'):
|
|
|
|
# In tests, we sometimes pass a simplified _LDAPUser without _username attr,
|
|
|
|
# and with the intended username in the username argument.
|
|
|
|
username = ldap_user._username
|
|
|
|
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2013-11-25 17:57:30 +01:00
|
|
|
return "@".join((username, settings.LDAP_APPEND_DOMAIN))
|
2019-10-06 00:32:25 +02:00
|
|
|
|
|
|
|
if settings.LDAP_EMAIL_ATTR is not None:
|
|
|
|
# Get email from ldap attributes.
|
|
|
|
if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise ZulipLDAPException(f"LDAP user doesn't have the needed {settings.LDAP_EMAIL_ATTR} attribute")
|
2019-10-06 00:32:25 +02:00
|
|
|
else:
|
|
|
|
return ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0]
|
|
|
|
|
|
|
|
return username
|
|
|
|
|
|
|
|
def ldap_to_django_username(self, username: str) -> str:
|
|
|
|
"""
|
|
|
|
This is called inside django_auth_ldap with only one role:
|
|
|
|
to convert _LDAPUser._username to django username (so in Zulip, the email)
|
|
|
|
and pass that as "username" argument to get_or_build_user(username, ldapuser).
|
|
|
|
In many cases, the email is stored in the _LDAPUser's attributes, so it can't be
|
|
|
|
constructed just from the username. We choose to do nothing in this function,
|
|
|
|
and our overrides of get_or_build_user() obtain that username from the _LDAPUser
|
|
|
|
object on their own, through our user_email_from_ldapuser function.
|
|
|
|
"""
|
2013-11-21 01:30:20 +01:00
|
|
|
return username
|
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None:
|
|
|
|
if 'avatar' in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
# We do local imports here to avoid import loops
|
|
|
|
from io import BytesIO
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.actions import do_change_avatar_fields
|
|
|
|
from zerver.lib.upload import upload_avatar_image
|
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP['avatar']
|
2018-12-30 01:32:16 +01:00
|
|
|
if avatar_attr_name not in ldap_user.attrs: # nocoverage
|
|
|
|
# If this specific user doesn't have e.g. a
|
|
|
|
# thumbnailPhoto set in LDAP, just skip that user.
|
|
|
|
return
|
2019-06-07 23:36:19 +02:00
|
|
|
|
2019-06-28 00:10:58 +02:00
|
|
|
ldap_avatar = ldap_user.attrs[avatar_attr_name][0]
|
|
|
|
|
|
|
|
avatar_changed = is_avatar_new(ldap_avatar, user)
|
|
|
|
if not avatar_changed:
|
|
|
|
# Don't do work to replace the avatar with itself.
|
|
|
|
return
|
|
|
|
|
|
|
|
io = BytesIO(ldap_avatar)
|
2019-06-07 23:36:19 +02:00
|
|
|
# Structurally, to make the S3 backend happy, we need to
|
|
|
|
# provide a Content-Type; since that isn't specified in
|
|
|
|
# any metadata, we auto-detect it.
|
|
|
|
content_type = magic.from_buffer(copy.deepcopy(io).read()[0:1024], mime=True)
|
|
|
|
if content_type.startswith("image/"):
|
|
|
|
upload_avatar_image(io, user, user, content_type=content_type)
|
2020-06-29 12:47:44 +02:00
|
|
|
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER, acting_user=None)
|
2019-06-28 00:10:58 +02:00
|
|
|
# Update avatar hash.
|
|
|
|
user.avatar_hash = user_avatar_content_hash(ldap_avatar)
|
|
|
|
user.save(update_fields=["avatar_hash"])
|
2019-06-07 23:36:19 +02:00
|
|
|
else:
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("Could not parse %s field for user %s",
|
|
|
|
avatar_attr_name, user.id)
|
2018-12-12 19:46:37 +01:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def is_account_control_disabled_user(self, ldap_user: _LDAPUser) -> bool:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Implements the userAccountControl check for whether a user has been
|
|
|
|
disabled in an Active Directory server being integrated with
|
|
|
|
Zulip via LDAP."""
|
2018-12-13 23:58:26 +01:00
|
|
|
account_control_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP['userAccountControl']][0]
|
2018-12-30 01:33:11 +01:00
|
|
|
ldap_disabled = bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK)
|
2018-12-13 23:58:26 +01:00
|
|
|
return ldap_disabled
|
|
|
|
|
2019-01-16 09:06:11 +01:00
|
|
|
@classmethod
|
2020-07-17 20:22:10 +02:00
|
|
|
def get_mapped_name(cls, ldap_user: _LDAPUser) -> str:
|
|
|
|
"""Constructs the user's Zulip full_name from the LDAP data"""
|
2019-01-10 18:25:34 +01:00
|
|
|
if "full_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"]
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = ldap_user.attrs[full_name_attr][0]
|
2019-01-10 18:25:34 +01:00
|
|
|
elif all(key in settings.AUTH_LDAP_USER_ATTR_MAP for key in {"first_name", "last_name"}):
|
|
|
|
first_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["first_name"]
|
|
|
|
last_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["last_name"]
|
2020-07-17 20:22:10 +02:00
|
|
|
first_name = ldap_user.attrs[first_name_attr][0]
|
|
|
|
last_name = ldap_user.attrs[last_name_attr][0]
|
|
|
|
full_name = f"{first_name} {last_name}"
|
2019-01-10 18:25:34 +01:00
|
|
|
else:
|
|
|
|
raise ZulipLDAPException("Missing required mapping for user's full name")
|
|
|
|
|
2020-07-17 20:22:10 +02:00
|
|
|
return full_name
|
2019-01-10 18:25:34 +01:00
|
|
|
|
|
|
|
def sync_full_name_from_ldap(self, user_profile: UserProfile,
|
2019-01-12 17:15:14 +01:00
|
|
|
ldap_user: _LDAPUser) -> None:
|
2019-01-10 18:25:34 +01:00
|
|
|
from zerver.lib.actions import do_change_full_name
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = self.get_mapped_name(ldap_user)
|
2019-01-10 18:25:34 +01:00
|
|
|
if full_name != user_profile.full_name:
|
|
|
|
try:
|
|
|
|
full_name = check_full_name(full_name)
|
|
|
|
except JsonableError as e:
|
|
|
|
raise ZulipLDAPException(e.msg)
|
|
|
|
do_change_full_name(user_profile, full_name, None)
|
|
|
|
|
2019-01-29 13:39:21 +01:00
|
|
|
def sync_custom_profile_fields_from_ldap(self, user_profile: UserProfile,
|
|
|
|
ldap_user: _LDAPUser) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
values_by_var_name: Dict[str, Union[int, str, List[int]]] = {}
|
2019-01-29 13:39:21 +01:00
|
|
|
for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items():
|
|
|
|
if not attr.startswith('custom_profile_field__'):
|
|
|
|
continue
|
|
|
|
var_name = attr.split('custom_profile_field__')[1]
|
2019-03-05 09:40:40 +01:00
|
|
|
try:
|
|
|
|
value = ldap_user.attrs[ldap_attr][0]
|
|
|
|
except KeyError:
|
|
|
|
# If this user doesn't have this field set then ignore this
|
|
|
|
# field and continue syncing other fields. `django-auth-ldap`
|
|
|
|
# automatically logs error about missing field.
|
|
|
|
continue
|
2019-01-29 13:39:21 +01:00
|
|
|
values_by_var_name[var_name] = value
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
fields_by_var_name: Dict[str, CustomProfileField] = {}
|
2019-01-29 13:39:21 +01:00
|
|
|
custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id)
|
|
|
|
for field in custom_profile_fields:
|
|
|
|
var_name = '_'.join(field.name.lower().split(' '))
|
|
|
|
fields_by_var_name[var_name] = field
|
|
|
|
|
|
|
|
existing_values = {}
|
|
|
|
for data in user_profile.profile_data:
|
2019-08-04 02:00:19 +02:00
|
|
|
var_name = '_'.join(data['name'].lower().split(' '))
|
2019-01-29 13:39:21 +01:00
|
|
|
existing_values[var_name] = data['value']
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
profile_data: List[Dict[str, Union[int, str, List[int]]]] = []
|
2019-01-29 13:39:21 +01:00
|
|
|
for var_name, value in values_by_var_name.items():
|
|
|
|
try:
|
|
|
|
field = fields_by_var_name[var_name]
|
|
|
|
except KeyError:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise ZulipLDAPException(f'Custom profile field with name {var_name} not found.')
|
2019-01-29 13:39:21 +01:00
|
|
|
if existing_values.get(var_name) == value:
|
|
|
|
continue
|
2020-06-21 02:36:20 +02:00
|
|
|
try:
|
|
|
|
validate_user_custom_profile_field(user_profile.realm.id, field, value)
|
|
|
|
except ValidationError as error:
|
|
|
|
raise ZulipLDAPException(f'Invalid data for {var_name} field: {error.message}')
|
2019-01-29 13:39:21 +01:00
|
|
|
profile_data.append({
|
|
|
|
'id': field.id,
|
|
|
|
'value': value,
|
|
|
|
})
|
2019-10-01 04:22:50 +02:00
|
|
|
do_update_user_custom_profile_data_if_changed(user_profile, profile_data)
|
2019-01-29 13:39:21 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
|
2017-09-22 10:58:12 +02:00
|
|
|
REALM_IS_NONE_ERROR = 1
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
@rate_limit_auth
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
username: str, password: str, realm: Realm,
|
2019-01-16 09:59:01 +01:00
|
|
|
prereg_user: Optional[PreregistrationUser]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-17 23:56:45 +01:00
|
|
|
self._realm = realm
|
2019-01-16 09:59:01 +01:00
|
|
|
self._prereg_user = prereg_user
|
2017-11-21 21:45:32 +01:00
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
try:
|
2019-12-27 17:17:56 +01:00
|
|
|
# We want to pass the user's LDAP username into
|
2019-10-05 03:54:48 +02:00
|
|
|
# authenticate() below. If an email address was entered
|
|
|
|
# in the login form, we need to use
|
|
|
|
# django_to_ldap_username to translate the email address
|
|
|
|
# to the user's LDAP username before calling the
|
|
|
|
# django-auth-ldap authenticate().
|
2015-10-13 23:08:05 +02:00
|
|
|
username = self.django_to_ldap_username(username)
|
2019-12-27 23:03:00 +01:00
|
|
|
except ZulipLDAPExceptionNoMatchingLDAPUser as e:
|
2020-05-02 20:57:12 +02:00
|
|
|
ldap_logger.debug("%s: %s", self.__class__.__name__, e)
|
2018-10-27 03:19:49 +02:00
|
|
|
if return_data is not None:
|
2019-10-25 02:26:05 +02:00
|
|
|
return_data['no_matching_ldap_user'] = True
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
return None
|
2015-10-13 23:08:05 +02:00
|
|
|
|
2019-03-10 02:43:29 +01:00
|
|
|
# Call into (ultimately) the django-auth-ldap authenticate
|
|
|
|
# function. This will check the username/password pair
|
|
|
|
# against the LDAP database, and assuming those are correct,
|
|
|
|
# end up calling `self.get_or_build_user` with the
|
|
|
|
# authenticated user's data from LDAP.
|
2020-02-04 14:04:10 +01:00
|
|
|
return super().authenticate(request=request, username=username, password=password)
|
2018-05-31 23:10:22 +02:00
|
|
|
|
2018-05-22 08:33:56 +02:00
|
|
|
def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""The main function of our authentication backend extension of
|
|
|
|
django-auth-ldap. When this is called (from `authenticate`),
|
|
|
|
django-auth-ldap will already have verified that the provided
|
|
|
|
username and password match those in the LDAP database.
|
|
|
|
|
|
|
|
This function's responsibility is to check (1) whether the
|
|
|
|
email address for this user obtained from LDAP has an active
|
|
|
|
account in this Zulip realm. If so, it will log them in.
|
|
|
|
|
|
|
|
Otherwise, to provide a seamless Single Sign-On experience
|
|
|
|
with LDAP, this function can automatically create a new Zulip
|
|
|
|
user account in the realm (assuming the realm is configured to
|
|
|
|
allow that email address to sign up).
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, Any] = {}
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2019-10-06 00:32:25 +02:00
|
|
|
username = self.user_email_from_ldapuser(username, ldap_user)
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: # nocoverage
|
|
|
|
ldap_disabled = self.is_account_control_disabled_user(ldap_user)
|
|
|
|
if ldap_disabled:
|
|
|
|
# Treat disabled users as deactivated in Zulip.
|
|
|
|
return_data["inactive_user"] = True
|
|
|
|
raise ZulipLDAPException("User has been deactivated")
|
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
user_profile = common_get_active_user(username, self._realm, return_data)
|
2017-11-21 21:57:23 +01:00
|
|
|
if user_profile is not None:
|
|
|
|
# An existing user, successfully authed; return it.
|
|
|
|
return user_profile, False
|
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
if return_data.get("inactive_realm"):
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if there is a user account in a deactivated realm
|
2017-11-21 21:52:34 +01:00
|
|
|
raise ZulipLDAPException("Realm has been deactivated")
|
|
|
|
if return_data.get("inactive_user"):
|
2017-11-18 01:07:20 +01:00
|
|
|
raise ZulipLDAPException("User has been deactivated")
|
2019-03-04 13:16:00 +01:00
|
|
|
# An invalid_subdomain `return_data` value here is ignored,
|
|
|
|
# since that just means we're trying to create an account in a
|
|
|
|
# second realm on the server (`ldap_auth_enabled(realm)` would
|
|
|
|
# have been false if this user wasn't meant to have an account
|
|
|
|
# in this second realm).
|
2017-11-18 01:12:05 +01:00
|
|
|
if self._realm.deactivated:
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if no account exists, but the realm is
|
|
|
|
# deactivated, so we shouldn't create a new user account
|
2017-11-18 01:12:05 +01:00
|
|
|
raise ZulipLDAPException("Realm has been deactivated")
|
|
|
|
|
2019-03-10 08:57:19 +01:00
|
|
|
# Makes sure that email domain hasn't be restricted for this
|
|
|
|
# realm. The main thing here is email_allowed_for_realm; but
|
2020-03-02 13:24:50 +01:00
|
|
|
# we also call validate_email_not_already_in_realm just for consistency,
|
2019-03-10 08:57:19 +01:00
|
|
|
# even though its checks were already done above.
|
|
|
|
try:
|
|
|
|
email_allowed_for_realm(username, self._realm)
|
2020-03-02 13:24:50 +01:00
|
|
|
validate_email_not_already_in_realm(self._realm, username)
|
2019-03-10 08:57:19 +01:00
|
|
|
except DomainNotAllowedForRealmError:
|
|
|
|
raise ZulipLDAPException("This email domain isn't allowed in this organization.")
|
|
|
|
except (DisposableEmailError, EmailContainsPlusError):
|
|
|
|
raise ZulipLDAPException("Email validation failed.")
|
|
|
|
|
2017-11-18 01:13:35 +01:00
|
|
|
# We have valid LDAP credentials; time to create an account.
|
2020-07-17 20:22:10 +02:00
|
|
|
full_name = self.get_mapped_name(ldap_user)
|
2017-11-18 01:12:05 +01:00
|
|
|
try:
|
|
|
|
full_name = check_full_name(full_name)
|
|
|
|
except JsonableError as e:
|
|
|
|
raise ZulipLDAPException(e.msg)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
opts: Dict[str, Any] = {}
|
2019-01-16 09:59:01 +01:00
|
|
|
if self._prereg_user:
|
|
|
|
invited_as = self._prereg_user.invited_as
|
2019-11-07 05:13:08 +01:00
|
|
|
realm_creation = self._prereg_user.realm_creation
|
2019-01-16 09:59:01 +01:00
|
|
|
opts['prereg_user'] = self._prereg_user
|
2020-06-03 01:11:36 +02:00
|
|
|
opts['role'] = get_role_for_new_user(invited_as, realm_creation)
|
2019-11-07 05:13:08 +01:00
|
|
|
opts['realm_creation'] = realm_creation
|
2020-01-14 23:40:17 +01:00
|
|
|
# TODO: Ideally, we should add a mechanism for the user
|
|
|
|
# entering which default stream groups they've selected in
|
|
|
|
# the LDAP flow.
|
|
|
|
opts['default_stream_groups'] = []
|
2019-01-16 09:59:01 +01:00
|
|
|
|
2020-07-16 14:10:43 +02:00
|
|
|
user_profile = do_create_user(username, None, self._realm, full_name, acting_user=None, **opts)
|
2018-12-12 19:46:37 +01:00
|
|
|
self.sync_avatar_from_ldap(user_profile, ldap_user)
|
2019-01-29 13:39:21 +01:00
|
|
|
self.sync_custom_profile_fields_from_ldap(user_profile, ldap_user)
|
2017-11-18 01:12:05 +01:00
|
|
|
|
|
|
|
return user_profile, True
|
2013-11-21 01:30:20 +01:00
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
class ZulipLDAPUser(_LDAPUser):
|
|
|
|
"""
|
|
|
|
This is an extension of the _LDAPUser class, with a realm attribute
|
|
|
|
attached to it. It's purpose is to call its inherited method
|
|
|
|
populate_user() which will sync the ldap data with the corresponding
|
|
|
|
UserProfile. The realm attribute serves to uniquely identify the UserProfile
|
|
|
|
in case the ldap user is registered to multiple realms.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.realm: Realm = kwargs['realm']
|
2019-11-09 00:27:18 +01:00
|
|
|
del kwargs['realm']
|
|
|
|
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Just like ZulipLDAPAuthBackend, but doesn't let you log in. Used
|
|
|
|
for syncing data like names, avatars, and custom profile fields
|
|
|
|
from LDAP in `manage.py sync_ldap_user_data` as well as in
|
|
|
|
registration for organizations that use a different SSO solution
|
|
|
|
for managing login (often via RemoteUserBackend).
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
username: str, password: str, realm: Realm,
|
2019-05-05 01:04:48 +02:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2013-11-21 01:30:20 +01:00
|
|
|
return None
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2019-10-26 23:28:01 +02:00
|
|
|
def get_or_build_user(self, username: str,
|
2019-11-09 00:27:18 +01:00
|
|
|
ldap_user: ZulipLDAPUser) -> Tuple[UserProfile, bool]:
|
2019-10-26 23:28:01 +02:00
|
|
|
"""This is used only in non-authentication contexts such as:
|
|
|
|
./manage.py sync_ldap_user_data
|
|
|
|
"""
|
|
|
|
# Obtain the django username from the ldap_user object:
|
|
|
|
username = self.user_email_from_ldapuser(username, ldap_user)
|
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
# We set the built flag (which tells django-auth-ldap whether the user object
|
|
|
|
# was taken from the database or freshly built) to False - because in this codepath
|
|
|
|
# the user we're syncing of course already has to exist in the database.
|
|
|
|
user = get_user_by_delivery_email(username, ldap_user.realm)
|
|
|
|
built = False
|
2019-10-26 23:28:01 +02:00
|
|
|
# Synchronise the UserProfile with its LDAP attributes:
|
|
|
|
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
user_disabled_in_ldap = self.is_account_control_disabled_user(ldap_user)
|
|
|
|
if user_disabled_in_ldap:
|
|
|
|
if user.is_active:
|
2020-07-17 15:39:59 +02:00
|
|
|
ldap_logger.info("Deactivating user %s because they are disabled in LDAP.",
|
|
|
|
user.delivery_email)
|
2019-10-26 23:28:01 +02:00
|
|
|
do_deactivate_user(user)
|
|
|
|
# Do an early return to avoid trying to sync additional data.
|
|
|
|
return (user, built)
|
|
|
|
elif not user.is_active:
|
2020-07-17 15:39:59 +02:00
|
|
|
ldap_logger.info("Reactivating user %s because they are not disabled in LDAP.",
|
|
|
|
user.delivery_email)
|
2019-10-26 23:28:01 +02:00
|
|
|
do_reactivate_user(user)
|
|
|
|
|
|
|
|
self.sync_avatar_from_ldap(user, ldap_user)
|
|
|
|
self.sync_full_name_from_ldap(user, ldap_user)
|
|
|
|
self.sync_custom_profile_fields_from_ldap(user, ldap_user)
|
|
|
|
return (user, built)
|
|
|
|
|
2019-09-04 10:11:25 +02:00
|
|
|
class PopulateUserLDAPError(ZulipLDAPException):
|
|
|
|
pass
|
|
|
|
|
|
|
|
@receiver(ldap_error, sender=ZulipLDAPUserPopulator)
|
|
|
|
def catch_ldap_error(signal: Signal, **kwargs: Any) -> None:
|
|
|
|
"""
|
|
|
|
Inside django_auth_ldap populate_user(), if LDAPError is raised,
|
|
|
|
e.g. due to invalid connection credentials, the function catches it
|
|
|
|
and emits a signal (ldap_error) to communicate this error to others.
|
|
|
|
We normally don't use signals, but here there's no choice, so in this function
|
|
|
|
we essentially convert the signal to a normal exception that will properly
|
|
|
|
propagate out of django_auth_ldap internals.
|
|
|
|
"""
|
|
|
|
if kwargs['context'] == 'populate_user':
|
|
|
|
# The exception message can contain the password (if it was invalid),
|
|
|
|
# so it seems better not to log that, and only use the original exception's name here.
|
|
|
|
raise PopulateUserLDAPError(kwargs['exception'].__class__.__name__)
|
|
|
|
|
2019-08-26 21:13:23 +02:00
|
|
|
def sync_user_from_ldap(user_profile: UserProfile, logger: logging.Logger) -> bool:
|
2019-01-12 12:32:54 +01:00
|
|
|
backend = ZulipLDAPUserPopulator()
|
2019-10-25 02:26:05 +02:00
|
|
|
try:
|
2019-12-15 20:10:09 +01:00
|
|
|
ldap_username = backend.django_to_ldap_username(user_profile.delivery_email)
|
2019-10-25 02:26:05 +02:00
|
|
|
except ZulipLDAPExceptionNoMatchingLDAPUser:
|
2020-06-10 06:06:57 +02:00
|
|
|
if (
|
|
|
|
settings.ONLY_LDAP
|
|
|
|
if settings.LDAP_DEACTIVATE_NON_MATCHING_USERS is None
|
|
|
|
else settings.LDAP_DEACTIVATE_NON_MATCHING_USERS
|
|
|
|
):
|
2019-10-25 02:26:05 +02:00
|
|
|
do_deactivate_user(user_profile)
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Deactivated non-matching user: %s", user_profile.delivery_email)
|
2019-10-25 02:26:05 +02:00
|
|
|
return True
|
|
|
|
elif user_profile.is_active:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.warning("Did not find %s in LDAP.", user_profile.delivery_email)
|
2019-10-25 02:26:05 +02:00
|
|
|
return False
|
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
# What one would expect to see like to do here is just a call to
|
|
|
|
# `backend.populate_user`, which in turn just creates the
|
|
|
|
# `_LDAPUser` object and calls `ldap_user.populate_user()` on
|
|
|
|
# that. Unfortunately, that will produce incorrect results in the
|
|
|
|
# case that the server has multiple Zulip users in different
|
|
|
|
# realms associated with a single LDAP user, because
|
|
|
|
# `django-auth-ldap` isn't implemented with the possibility of
|
|
|
|
# multiple realms on different subdomains in mind.
|
|
|
|
#
|
|
|
|
# To address this, we construct a version of the _LDAPUser class
|
|
|
|
# extended to store the realm of the target user, and call its
|
|
|
|
# `.populate_user` function directly.
|
|
|
|
#
|
|
|
|
# Ideally, we'd contribute changes to `django-auth-ldap` upstream
|
|
|
|
# making this flow possible in a more directly supported fashion.
|
|
|
|
updated_user = ZulipLDAPUser(backend, ldap_username, realm=user_profile.realm).populate_user()
|
2019-08-26 21:13:23 +02:00
|
|
|
if updated_user:
|
2020-05-02 08:44:14 +02:00
|
|
|
logger.info("Updated %s.", user_profile.delivery_email)
|
2019-08-26 21:13:23 +02:00
|
|
|
return True
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
raise PopulateUserLDAPError(f"populate_user unexpectedly returned {updated_user}")
|
2019-01-12 12:32:54 +01:00
|
|
|
|
2019-03-09 07:52:14 +01:00
|
|
|
# Quick tool to test whether you're correctly authenticating to LDAP
|
2019-03-09 08:32:06 +01:00
|
|
|
def query_ldap(email: str) -> List[str]:
|
|
|
|
values = []
|
|
|
|
backend = next((backend for backend in get_backends() if isinstance(backend, LDAPBackend)), None)
|
|
|
|
if backend is not None:
|
2019-10-25 02:26:05 +02:00
|
|
|
try:
|
|
|
|
ldap_username = backend.django_to_ldap_username(email)
|
2019-12-27 23:03:00 +01:00
|
|
|
except ZulipLDAPExceptionNoMatchingLDAPUser as e:
|
2020-06-09 00:25:09 +02:00
|
|
|
values.append(f"No such user found: {e}")
|
2019-10-25 02:26:05 +02:00
|
|
|
return values
|
|
|
|
|
|
|
|
ldap_attrs = _LDAPUser(backend, ldap_username).attrs
|
|
|
|
|
|
|
|
for django_field, ldap_field in settings.AUTH_LDAP_USER_ATTR_MAP.items():
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
value = ldap_attrs.get(ldap_field, ["LDAP field not present"])[0]
|
2019-10-25 02:26:05 +02:00
|
|
|
if django_field == "avatar":
|
|
|
|
if isinstance(value, bytes):
|
|
|
|
value = "(An avatar image file)"
|
2020-06-10 06:41:04 +02:00
|
|
|
values.append(f"{django_field}: {value}")
|
2019-10-25 02:26:05 +02:00
|
|
|
if settings.LDAP_EMAIL_ATTR is not None:
|
2020-06-10 06:41:04 +02:00
|
|
|
values.append("{}: {}".format('email', ldap_attrs[settings.LDAP_EMAIL_ATTR][0]))
|
2019-03-09 08:32:06 +01:00
|
|
|
else:
|
|
|
|
values.append("LDAP backend not configured on this server.")
|
|
|
|
return values
|
2019-03-09 07:52:14 +01:00
|
|
|
|
2015-08-19 02:58:20 +02:00
|
|
|
class DevAuthBackend(ZulipAuthMixin):
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Allow logging in as any user without a password. This is used for
|
|
|
|
convenience when developing Zulip, and is disabled in production."""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2020-06-03 13:18:08 +02:00
|
|
|
name = 'dev'
|
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
dev_auth_username: str, realm: Realm,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-21 21:19:58 +01:00
|
|
|
if not dev_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-21 21:20:44 +01:00
|
|
|
return common_get_active_user(dev_auth_username, realm, return_data=return_data)
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2020-05-02 06:24:43 +02:00
|
|
|
class ExternalAuthMethodDictT(TypedDict):
|
|
|
|
name: str
|
|
|
|
display_name: str
|
|
|
|
display_icon: Optional[str]
|
|
|
|
login_url: str
|
|
|
|
signup_url: str
|
2019-12-08 23:11:25 +01:00
|
|
|
|
|
|
|
class ExternalAuthMethod(ABC):
|
|
|
|
"""
|
|
|
|
To register a backend as an external_authentication_method, it should
|
|
|
|
subclass ExternalAuthMethod and define its dict_representation
|
|
|
|
classmethod, and finally use the external_auth_method class decorator to
|
|
|
|
get added to the EXTERNAL_AUTH_METHODS list.
|
|
|
|
"""
|
|
|
|
auth_backend_name = "undeclared"
|
|
|
|
name = "undeclared"
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
display_icon: Optional[str] = None
|
2019-12-08 23:11:25 +01:00
|
|
|
|
|
|
|
# Used to determine how to order buttons on login form, backend with
|
|
|
|
# higher sort order are displayed first.
|
|
|
|
sort_order = 0
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
@abstractmethod
|
2020-04-10 16:30:02 +02:00
|
|
|
def dict_representation(cls, realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]:
|
2019-12-08 23:11:25 +01:00
|
|
|
"""
|
|
|
|
Method returning dictionaries representing the authentication methods
|
|
|
|
corresponding to the backend that subclasses this. The documentation
|
|
|
|
for the external_authentication_methods field of the /server_settings endpoint
|
|
|
|
explains the details of these dictionaries.
|
|
|
|
This returns a list, because one backend can support configuring multiple methods,
|
|
|
|
that are all serviced by that backend - our SAML backend is an example of that.
|
|
|
|
"""
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
EXTERNAL_AUTH_METHODS: List[Type[ExternalAuthMethod]] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
|
|
|
|
def external_auth_method(cls: Type[ExternalAuthMethod]) -> Type[ExternalAuthMethod]:
|
|
|
|
assert issubclass(cls, ExternalAuthMethod)
|
|
|
|
|
|
|
|
EXTERNAL_AUTH_METHODS.append(cls)
|
|
|
|
return cls
|
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
# We want to be able to store this data in redis, so it has to be easy to serialize.
|
|
|
|
# That's why we avoid having fields that could pose a problem for that.
|
2020-06-13 09:36:36 +02:00
|
|
|
class ExternalAuthDataDict(TypedDict, total=False):
|
|
|
|
subdomain: str
|
|
|
|
full_name: str
|
|
|
|
email: str
|
|
|
|
is_signup: bool
|
|
|
|
is_realm_creation: bool
|
|
|
|
redirect_to: str
|
|
|
|
mobile_flow_otp: Optional[str]
|
|
|
|
desktop_flow_otp: Optional[str]
|
|
|
|
multiuse_object_key: str
|
|
|
|
full_name_validated: bool
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
class ExternalAuthResult:
|
|
|
|
LOGIN_KEY_PREFIX = "login_key_"
|
|
|
|
LOGIN_KEY_FORMAT = LOGIN_KEY_PREFIX + "{token}"
|
|
|
|
LOGIN_KEY_EXPIRATION_SECONDS = 15
|
|
|
|
LOGIN_TOKEN_LENGTH = UserProfile.API_KEY_LENGTH
|
|
|
|
|
|
|
|
def __init__(self, *, user_profile: Optional[UserProfile]=None,
|
|
|
|
data_dict: Optional[ExternalAuthDataDict]=None,
|
|
|
|
login_token: Optional[str]=None,
|
|
|
|
delete_stored_data: bool=True) -> None:
|
|
|
|
if data_dict is None:
|
|
|
|
data_dict = {}
|
|
|
|
|
|
|
|
if login_token is not None:
|
|
|
|
assert (not data_dict) and (user_profile is None), ("Passing in data_dict or user_profile " +
|
|
|
|
"with login_token is disallowed.")
|
|
|
|
self.instantiate_with_token(login_token, delete_stored_data)
|
|
|
|
else:
|
2020-06-23 00:39:19 +02:00
|
|
|
self.data_dict = data_dict.copy()
|
2020-02-23 18:58:08 +01:00
|
|
|
self.user_profile = user_profile
|
|
|
|
|
|
|
|
if self.user_profile is not None:
|
|
|
|
# Ensure data inconsistent with the user_profile wasn't passed in inside the data_dict argument.
|
|
|
|
assert 'full_name' not in data_dict or data_dict['full_name'] == self.user_profile.full_name
|
2020-08-05 16:40:41 +02:00
|
|
|
assert 'email' not in data_dict or data_dict['email'].lower() == self.user_profile.delivery_email.lower()
|
2020-02-23 18:58:08 +01:00
|
|
|
# Update these data_dict fields to ensure consistency with self.user_profile. This is mostly
|
|
|
|
# defensive code, but is useful in these scenarios:
|
|
|
|
# 1. user_profile argument was passed in, and no full_name or email_data in the data_dict arg.
|
|
|
|
# 2. We're instantiating from the login_token and the user has changed their full_name since
|
|
|
|
# the data was stored under the token.
|
|
|
|
self.data_dict['full_name'] = self.user_profile.full_name
|
|
|
|
self.data_dict['email'] = self.user_profile.delivery_email
|
|
|
|
|
|
|
|
if 'subdomain' not in self.data_dict:
|
|
|
|
self.data_dict['subdomain'] = self.user_profile.realm.subdomain
|
|
|
|
if not self.user_profile.is_mirror_dummy:
|
|
|
|
self.data_dict['is_signup'] = False
|
|
|
|
|
|
|
|
def store_data(self) -> str:
|
2020-06-23 00:39:19 +02:00
|
|
|
key = put_dict_in_redis(redis_client, self.LOGIN_KEY_FORMAT, self.data_dict,
|
2020-02-23 18:58:08 +01:00
|
|
|
expiration_seconds=self.LOGIN_KEY_EXPIRATION_SECONDS,
|
|
|
|
token_length=self.LOGIN_TOKEN_LENGTH)
|
|
|
|
token = key.split(self.LOGIN_KEY_PREFIX, 1)[1] # remove the prefix
|
|
|
|
return token
|
|
|
|
|
|
|
|
def instantiate_with_token(self, token: str, delete_stored_data: bool=True) -> None:
|
|
|
|
key = self.LOGIN_KEY_FORMAT.format(token=token)
|
|
|
|
data = get_dict_from_redis(redis_client, self.LOGIN_KEY_FORMAT, key)
|
|
|
|
if data is None or None in [data.get("email"), data.get("subdomain")]:
|
|
|
|
raise self.InvalidTokenError
|
|
|
|
|
|
|
|
if delete_stored_data:
|
|
|
|
redis_client.delete(key)
|
|
|
|
|
|
|
|
self.data_dict = cast(ExternalAuthDataDict, data)
|
|
|
|
|
|
|
|
# Here we refetch the UserProfile object (if any) for this
|
|
|
|
# ExternalAuthResult. Using authenticate() will re-check for
|
|
|
|
# (unlikely) races like the realm or user having been deactivated
|
|
|
|
# between generating this ExternalAuthResult and accessing it.
|
|
|
|
#
|
|
|
|
# In theory, we should return_data here so the caller can do
|
|
|
|
# more customized error messages for those unlikely races, but
|
|
|
|
# it's likely not worth implementing.
|
|
|
|
realm = get_realm(data['subdomain'])
|
|
|
|
self.user_profile = authenticate(username=data['email'], realm=realm,
|
|
|
|
use_dummy_backend=True)
|
|
|
|
|
|
|
|
class InvalidTokenError(Exception):
|
|
|
|
pass
|
|
|
|
|
2019-12-10 00:42:12 +01:00
|
|
|
@external_auth_method
|
|
|
|
class ZulipRemoteUserBackend(RemoteUserBackend, ExternalAuthMethod):
|
|
|
|
"""Authentication backend that reads the Apache REMOTE_USER variable.
|
|
|
|
Used primarily in enterprise environments with an SSO solution
|
|
|
|
that has an Apache REMOTE_USER integration. For manual testing, see
|
|
|
|
|
|
|
|
https://zulip.readthedocs.io/en/latest/production/authentication-methods.html
|
|
|
|
|
|
|
|
See also remote_user_sso in zerver/views/auth.py.
|
|
|
|
"""
|
|
|
|
auth_backend_name = "RemoteUser"
|
|
|
|
name = "remoteuser"
|
|
|
|
display_icon = None
|
|
|
|
sort_order = 9000 # If configured, this backend should have its button near the top of the list.
|
|
|
|
|
|
|
|
create_unknown_user = False
|
|
|
|
|
2020-02-04 14:04:10 +01:00
|
|
|
def authenticate(self, request: Optional[HttpRequest]=None, *,
|
|
|
|
remote_user: str, realm: Realm,
|
2019-12-10 00:42:12 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
|
|
|
if not auth_enabled_helper(["RemoteUser"], realm):
|
|
|
|
return None
|
|
|
|
|
|
|
|
email = remote_user_to_email(remote_user)
|
|
|
|
return common_get_active_user(email, realm, return_data=return_data)
|
|
|
|
|
|
|
|
@classmethod
|
2020-04-10 16:30:02 +02:00
|
|
|
def dict_representation(cls, realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]:
|
2019-12-10 00:42:12 +01:00
|
|
|
return [dict(
|
|
|
|
name=cls.name,
|
|
|
|
display_name="SSO",
|
|
|
|
display_icon=cls.display_icon,
|
|
|
|
# The user goes to the same URL for both login and signup:
|
2020-06-01 14:24:21 +02:00
|
|
|
login_url=reverse('start-login-sso'),
|
|
|
|
signup_url=reverse('start-login-sso'),
|
2019-12-10 00:42:12 +01:00
|
|
|
)]
|
|
|
|
|
2019-04-12 06:24:58 +02:00
|
|
|
def redirect_deactivated_user_to_login() -> HttpResponseRedirect:
|
2019-04-17 21:28:57 +02:00
|
|
|
# Specifying the template name makes sure that the user is not redirected to dev_login in case of
|
|
|
|
# a deactivated account on a test server.
|
|
|
|
login_url = reverse('zerver.views.auth.login_page', kwargs = {'template_name': 'zerver/login.html'})
|
2019-04-12 06:24:58 +02:00
|
|
|
redirect_url = login_url + '?is_deactivated=true'
|
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any],
|
2020-02-13 14:10:57 +01:00
|
|
|
*args: Any, **kwargs: Any) -> Union[HttpResponse, Optional[UserProfile]]:
|
2018-05-31 00:12:39 +02:00
|
|
|
"""Responsible for doing the Zulip-account lookup and validation parts
|
|
|
|
of the Zulip Social auth pipeline (similar to the authenticate()
|
|
|
|
methods in most other auth backends in this file).
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
Returns a UserProfile object for successful authentication, and None otherwise.
|
2018-05-31 00:12:39 +02:00
|
|
|
"""
|
|
|
|
subdomain = backend.strategy.session_get('subdomain')
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
2018-05-31 00:12:39 +02:00
|
|
|
return_data["invalid_realm"] = True
|
|
|
|
return None
|
2018-07-10 12:29:06 +02:00
|
|
|
return_data["realm_id"] = realm.id
|
2020-05-19 22:36:51 +02:00
|
|
|
return_data["realm_string_id"] = realm.string_id
|
2016-11-07 00:09:21 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if not auth_enabled_helper([backend.auth_backend_name], realm):
|
|
|
|
return_data["auth_backend_disabled"] = True
|
|
|
|
return None
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if 'auth_failed_reason' in kwargs.get('response', {}):
|
|
|
|
return_data["social_auth_failed_reason"] = kwargs['response']["auth_failed_reason"]
|
|
|
|
return None
|
2018-06-07 00:19:06 +02:00
|
|
|
elif hasattr(backend, 'get_verified_emails'):
|
2019-03-10 02:43:29 +01:00
|
|
|
# Some social backends, like GitHubAuthBackend, don't
|
|
|
|
# guarantee that the `details` data is validated (i.e., it's
|
|
|
|
# possible users can put any string they want in the "email"
|
|
|
|
# field of the `details` object). For those backends, we have
|
|
|
|
# custom per-backend code to properly fetch only verified
|
|
|
|
# email addresses from the appropriate third-party API.
|
2020-07-22 18:31:56 +02:00
|
|
|
verified_emails = backend.get_verified_emails(realm, *args, **kwargs)
|
2018-07-18 23:45:49 +02:00
|
|
|
verified_emails_length = len(verified_emails)
|
|
|
|
if verified_emails_length == 0:
|
2018-06-07 00:19:06 +02:00
|
|
|
# TODO: Provide a nice error message screen to the user
|
|
|
|
# for this case, rather than just logging a warning.
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.warning("Social auth (%s) failed because user has no verified emails",
|
|
|
|
backend.auth_backend_name)
|
2018-06-07 00:19:06 +02:00
|
|
|
return_data["email_not_verified"] = True
|
|
|
|
return None
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
if verified_emails_length == 1:
|
|
|
|
chosen_email = verified_emails[0]
|
|
|
|
else:
|
|
|
|
chosen_email = backend.strategy.request_data().get('email')
|
|
|
|
|
|
|
|
if not chosen_email:
|
2019-08-03 14:15:32 +02:00
|
|
|
avatars = {} # Dict[str, str]
|
2019-12-30 20:31:36 +01:00
|
|
|
existing_account_emails = []
|
2019-08-03 14:15:32 +02:00
|
|
|
for email in verified_emails:
|
|
|
|
existing_account = common_get_active_user(email, realm, {})
|
|
|
|
if existing_account is not None:
|
2019-12-30 20:31:36 +01:00
|
|
|
existing_account_emails.append(email)
|
2019-08-03 14:15:32 +02:00
|
|
|
avatars[email] = avatar_url(existing_account)
|
2020-03-28 10:59:06 +01:00
|
|
|
|
2019-12-30 20:31:36 +01:00
|
|
|
if (len(existing_account_emails) != 1 or backend.strategy.session_get('is_signup') == '1'):
|
2020-03-28 10:59:06 +01:00
|
|
|
unverified_emails = []
|
|
|
|
if hasattr(backend, 'get_unverified_emails'):
|
2020-07-22 18:31:56 +02:00
|
|
|
unverified_emails = backend.get_unverified_emails(realm, *args, **kwargs)
|
2019-12-30 20:31:36 +01:00
|
|
|
return render(backend.strategy.request, 'zerver/social_auth_select_email.html', context = {
|
|
|
|
'primary_email': verified_emails[0],
|
|
|
|
'verified_non_primary_emails': verified_emails[1:],
|
2020-03-28 10:59:06 +01:00
|
|
|
'unverified_emails': unverified_emails,
|
2019-12-30 20:31:36 +01:00
|
|
|
'backend': 'github',
|
|
|
|
'avatar_urls': avatars,
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
chosen_email = existing_account_emails[0]
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
validate_email(chosen_email)
|
|
|
|
except ValidationError:
|
|
|
|
return_data['invalid_email'] = True
|
|
|
|
return None
|
|
|
|
|
|
|
|
if chosen_email not in verified_emails:
|
|
|
|
# If a user edits the submit value for the choose email form, we might
|
|
|
|
# end up with a wrong email associated with the account. The below code
|
|
|
|
# takes care of that.
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.warning("Social auth (%s) failed because user has no verified"
|
|
|
|
" emails associated with the account",
|
|
|
|
backend.auth_backend_name)
|
2018-07-18 23:45:49 +02:00
|
|
|
return_data["email_not_associated"] = True
|
|
|
|
return None
|
|
|
|
|
|
|
|
validated_email = chosen_email
|
2019-09-28 01:51:36 +02:00
|
|
|
else:
|
|
|
|
try:
|
|
|
|
validate_email(kwargs["details"].get("email"))
|
|
|
|
except ValidationError:
|
|
|
|
return_data['invalid_email'] = True
|
|
|
|
return None
|
2018-05-31 00:12:39 +02:00
|
|
|
validated_email = kwargs["details"].get("email")
|
|
|
|
|
|
|
|
if not validated_email: # nocoverage
|
|
|
|
# This code path isn't used with GitHubAuthBackend, but may be relevant for other
|
|
|
|
# social auth backends.
|
|
|
|
return_data['invalid_email'] = True
|
|
|
|
return None
|
|
|
|
|
|
|
|
return_data["valid_attestation"] = True
|
|
|
|
return_data['validated_email'] = validated_email
|
|
|
|
user_profile = common_get_active_user(validated_email, realm, return_data)
|
|
|
|
|
2019-09-28 01:49:58 +02:00
|
|
|
full_name = kwargs['details'].get('fullname')
|
2020-06-28 15:23:21 +02:00
|
|
|
first_name = kwargs['details'].get('first_name')
|
|
|
|
last_name = kwargs['details'].get('last_name')
|
|
|
|
if all(name is None for name in [full_name, first_name, last_name]) and backend.name != "apple":
|
2020-08-11 02:20:10 +02:00
|
|
|
# Apple authentication provides the user's name only the very first time a user tries to log in.
|
2020-06-28 15:23:21 +02:00
|
|
|
# So if the user aborts login or otherwise is doing this the second time,
|
|
|
|
# we won't have any name data. So, this case is handled with the code below
|
|
|
|
# setting full name to empty string.
|
|
|
|
|
|
|
|
# We need custom code here for any social auth backends
|
|
|
|
# that don't provide name details feature.
|
|
|
|
raise AssertionError("Social auth backend doesn't provide name")
|
2019-09-28 01:49:58 +02:00
|
|
|
|
|
|
|
if full_name:
|
|
|
|
return_data["full_name"] = full_name
|
2018-05-31 00:12:39 +02:00
|
|
|
else:
|
2020-06-24 15:28:47 +02:00
|
|
|
# Some authentications methods like Apple and SAML send
|
2020-08-11 01:47:44 +02:00
|
|
|
# first name and last name as separate attributes. In that case
|
2019-09-28 01:49:58 +02:00
|
|
|
# we construct the full name from them.
|
2020-06-24 15:28:47 +02:00
|
|
|
return_data["full_name"] = f"{first_name or ''} {last_name or ''}".strip() # strip removes the unnecessary ' '
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
return user_profile
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2018-07-18 23:45:49 +02:00
|
|
|
@partial
|
2018-05-31 00:12:39 +02:00
|
|
|
def social_auth_associate_user(
|
|
|
|
backend: BaseAuth,
|
|
|
|
*args: Any,
|
2018-07-18 23:45:49 +02:00
|
|
|
**kwargs: Any) -> Union[HttpResponse, Dict[str, Any]]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""A simple wrapper function to reformat the return data from
|
|
|
|
social_associate_user_helper as a dictionary. The
|
|
|
|
python-social-auth infrastructure will then pass those values into
|
|
|
|
later stages of settings.SOCIAL_AUTH_PIPELINE, such as
|
|
|
|
social_auth_finish, as kwargs.
|
|
|
|
"""
|
2018-07-18 23:45:49 +02:00
|
|
|
partial_token = backend.strategy.request_data().get('partial_token')
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, Any] = {}
|
2018-05-31 00:12:39 +02:00
|
|
|
user_profile = social_associate_user_helper(
|
|
|
|
backend, return_data, *args, **kwargs)
|
|
|
|
|
2020-04-22 01:59:09 +02:00
|
|
|
if isinstance(user_profile, HttpResponse):
|
2018-07-18 23:45:49 +02:00
|
|
|
return user_profile
|
|
|
|
else:
|
|
|
|
return {'user_profile': user_profile,
|
|
|
|
'return_data': return_data,
|
|
|
|
'partial_token': partial_token,
|
|
|
|
'partial_backend_name': backend}
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
def social_auth_finish(backend: Any,
|
|
|
|
details: Dict[str, Any],
|
|
|
|
response: HttpResponse,
|
|
|
|
*args: Any,
|
2020-02-13 14:10:57 +01:00
|
|
|
**kwargs: Any) -> Optional[HttpResponse]:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given the determination in social_auth_associate_user for whether
|
|
|
|
the user should be authenticated, this takes care of actually
|
|
|
|
logging in the user (if appropriate) and redirecting the browser
|
|
|
|
to the appropriate next page depending on the situation. Read the
|
|
|
|
comments below as well as login_or_register_remote_user in
|
|
|
|
`zerver/views/auth.py` for the details on how that dispatch works.
|
|
|
|
"""
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.views.auth import login_or_register_remote_user, redirect_and_log_into_subdomain
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
user_profile = kwargs['user_profile']
|
|
|
|
return_data = kwargs['return_data']
|
|
|
|
|
2018-06-07 00:19:06 +02:00
|
|
|
no_verified_email = return_data.get("email_not_verified")
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_backend_disabled = return_data.get('auth_backend_disabled')
|
|
|
|
inactive_user = return_data.get('inactive_user')
|
|
|
|
inactive_realm = return_data.get('inactive_realm')
|
|
|
|
invalid_realm = return_data.get('invalid_realm')
|
|
|
|
invalid_email = return_data.get('invalid_email')
|
|
|
|
auth_failed_reason = return_data.get("social_auth_failed_reason")
|
2018-07-18 23:45:49 +02:00
|
|
|
email_not_associated = return_data.get("email_not_associated")
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
if invalid_realm:
|
2020-02-15 17:08:09 +01:00
|
|
|
# User has passed an invalid subdomain param - this shouldn't happen in the normal flow,
|
|
|
|
# unless the user manually edits the param. In any case, it's most appropriate to just take
|
|
|
|
# them to find_account, as there isn't even an appropriate subdomain to take them to the login
|
|
|
|
# form on.
|
|
|
|
return HttpResponseRedirect(reverse('zerver.views.registration.find_account'))
|
2019-04-12 06:24:58 +02:00
|
|
|
|
|
|
|
if inactive_user:
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.info("Failed login attempt for deactivated account: %s@%s",
|
|
|
|
return_data['inactive_user_id'], return_data['realm_string_id'])
|
2019-04-12 06:24:58 +02:00
|
|
|
return redirect_deactivated_user_to_login()
|
|
|
|
|
2018-07-18 23:45:49 +02:00
|
|
|
if auth_backend_disabled or inactive_realm or no_verified_email or email_not_associated:
|
2018-05-31 00:12:39 +02:00
|
|
|
# Redirect to login page. We can't send to registration
|
|
|
|
# workflow with these errors. We will redirect to login page.
|
|
|
|
return None
|
|
|
|
|
|
|
|
if invalid_email:
|
|
|
|
# In case of invalid email, we will end up on registration page.
|
|
|
|
# This seems better than redirecting to login page.
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.warning(
|
2020-05-02 08:44:14 +02:00
|
|
|
"%s got invalid email argument.", backend.auth_backend_name,
|
2018-05-31 00:12:39 +02:00
|
|
|
)
|
|
|
|
return None
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if auth_failed_reason:
|
2020-06-02 20:20:53 +02:00
|
|
|
backend.logger.info(auth_failed_reason)
|
2018-05-31 00:12:39 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
# Structurally, all the cases where we don't have an authenticated
|
|
|
|
# email for the user should be handled above; this assertion helps
|
|
|
|
# prevent any violations of that contract from resulting in a user
|
|
|
|
# being incorrectly authenticated.
|
|
|
|
assert return_data.get('valid_attestation') is True
|
|
|
|
|
2019-08-10 00:30:35 +02:00
|
|
|
strategy = backend.strategy
|
2019-11-01 00:00:36 +01:00
|
|
|
full_name_validated = backend.full_name_validated
|
2018-05-31 00:12:39 +02:00
|
|
|
email_address = return_data['validated_email']
|
|
|
|
full_name = return_data['full_name']
|
|
|
|
redirect_to = strategy.session_get('next')
|
2018-07-10 12:29:06 +02:00
|
|
|
realm = Realm.objects.get(id=return_data["realm_id"])
|
2019-02-08 17:09:25 +01:00
|
|
|
multiuse_object_key = strategy.session_get('multiuse_object_key', '')
|
2020-02-01 17:45:22 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
mobile_flow_otp = strategy.session_get('mobile_flow_otp')
|
2020-01-23 14:22:28 +01:00
|
|
|
desktop_flow_otp = strategy.session_get('desktop_flow_otp')
|
2020-02-01 17:45:22 +01:00
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2020-01-17 18:57:13 +01:00
|
|
|
if user_profile is None or user_profile.is_mirror_dummy:
|
|
|
|
is_signup = strategy.session_get('is_signup') == '1'
|
|
|
|
else:
|
|
|
|
is_signup = False
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
# At this point, we have now confirmed that the user has
|
|
|
|
# demonstrated control over the target email address.
|
|
|
|
#
|
|
|
|
# The next step is to call login_or_register_remote_user, but
|
|
|
|
# there are two code paths here because of an optimization to save
|
2020-01-23 14:22:28 +01:00
|
|
|
# a redirect on mobile and desktop.
|
2020-02-23 18:58:08 +01:00
|
|
|
data_dict = ExternalAuthDataDict(
|
|
|
|
subdomain=realm.subdomain,
|
|
|
|
is_signup=is_signup,
|
|
|
|
redirect_to=redirect_to,
|
|
|
|
multiuse_object_key=multiuse_object_key,
|
|
|
|
full_name_validated=full_name_validated,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
2020-02-23 18:58:08 +01:00
|
|
|
)
|
|
|
|
if user_profile is None:
|
|
|
|
data_dict.update(dict(full_name=full_name, email=email_address))
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
|
2019-03-10 02:43:29 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
if mobile_flow_otp or desktop_flow_otp:
|
2020-02-06 18:27:10 +01:00
|
|
|
if user_profile is not None and not user_profile.is_mirror_dummy:
|
|
|
|
# For mobile and desktop app authentication, login_or_register_remote_user
|
|
|
|
# will redirect to a special zulip:// URL that is handled by
|
|
|
|
# the app after a successful authentication; so we can
|
|
|
|
# redirect directly from here, saving a round trip over what
|
|
|
|
# we need to do to create session cookies on the right domain
|
|
|
|
# in the web login flow (below).
|
2020-02-23 18:58:08 +01:00
|
|
|
return login_or_register_remote_user(strategy.request, result)
|
2020-02-06 18:27:10 +01:00
|
|
|
else:
|
|
|
|
# The user needs to register, so we need to go the realm's
|
|
|
|
# subdomain for that.
|
|
|
|
pass
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
# If this authentication code were executing on
|
|
|
|
# subdomain.zulip.example.com, we would just call
|
|
|
|
# login_or_register_remote_user as in the mobile code path.
|
|
|
|
# However, because third-party SSO providers generally don't allow
|
|
|
|
# wildcard addresses in their redirect URLs, for multi-realm
|
|
|
|
# servers, we will have just completed authentication on e.g.
|
|
|
|
# auth.zulip.example.com (depending on
|
|
|
|
# settings.SOCIAL_AUTH_SUBDOMAIN), which cannot store cookies on
|
|
|
|
# the subdomain.zulip.example.com domain. So instead we serve a
|
|
|
|
# redirect (encoding the authentication result data in a
|
|
|
|
# cryptographically signed token) to a route on
|
|
|
|
# subdomain.zulip.example.com that will verify the signature and
|
|
|
|
# then call login_or_register_remote_user.
|
2020-02-23 18:58:08 +01:00
|
|
|
return redirect_and_log_into_subdomain(result)
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2020-06-23 22:36:55 +02:00
|
|
|
class SocialAuthMixin(ZulipAuthMixin, ExternalAuthMethod, BaseAuth):
|
2019-11-01 00:00:36 +01:00
|
|
|
# Whether we expect that the full_name value obtained by the
|
|
|
|
# social backend is definitely how the user should be referred to
|
|
|
|
# in Zulip, which in turn determines whether we should always show
|
|
|
|
# a registration form in the event with a default value of the
|
|
|
|
# user's name when using this social backend so they can change
|
|
|
|
# it. For social backends like SAML that are expected to be a
|
|
|
|
# central database, this should be True; for backends like GitHub
|
|
|
|
# where the user might not have a name set or have it set to
|
|
|
|
# something other than the name they will prefer to use in Zulip,
|
|
|
|
# it should be False.
|
|
|
|
full_name_validated = False
|
|
|
|
|
2020-05-16 17:09:08 +02:00
|
|
|
standard_relay_params = settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION + ['next']
|
|
|
|
|
2018-07-03 18:47:20 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
"""This is a small wrapper around the core `auth_complete` method of
|
|
|
|
python-social-auth, designed primarily to prevent 500s for
|
|
|
|
exceptions in the social auth code from situations that are
|
|
|
|
really user errors. Returning `None` from this function will
|
|
|
|
redirect the browser to the login page.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# Call the auth_complete method of social_core.backends.oauth.BaseOAuth2
|
2020-06-23 22:36:55 +02:00
|
|
|
return super().auth_complete(*args, **kwargs)
|
2020-05-20 14:52:03 +02:00
|
|
|
except (AuthFailed, HTTPError) as e:
|
2018-07-03 18:47:20 +02:00
|
|
|
# When a user's social authentication fails (e.g. because
|
|
|
|
# they did something funny with reloading in the middle of
|
2020-05-20 14:52:03 +02:00
|
|
|
# the flow or the IdP is unreliable and returns a bad http response),
|
|
|
|
# don't throw a 500, just send them back to the
|
2018-07-03 18:47:20 +02:00
|
|
|
# login page and record the event at the info log level.
|
2020-06-12 01:35:37 +02:00
|
|
|
self.logger.info("%s: %s", e.__class__.__name__, str(e))
|
2018-07-03 18:47:20 +02:00
|
|
|
return None
|
|
|
|
except SocialAuthBaseException as e:
|
|
|
|
# Other python-social-auth exceptions are likely
|
|
|
|
# interesting enough that we should log a warning.
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.warning(str(e))
|
2018-07-03 18:47:20 +02:00
|
|
|
return None
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@classmethod
|
2020-04-10 16:30:02 +02:00
|
|
|
def dict_representation(cls, realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]:
|
2019-12-08 23:11:25 +01:00
|
|
|
return [dict(
|
|
|
|
name=cls.name,
|
|
|
|
display_name=cls.auth_backend_name,
|
|
|
|
display_icon=cls.display_icon,
|
|
|
|
login_url=reverse('login-social', args=(cls.name,)),
|
|
|
|
signup_url=reverse('signup-social', args=(cls.name,)),
|
|
|
|
)]
|
|
|
|
|
|
|
|
@external_auth_method
|
2018-07-03 18:47:20 +02:00
|
|
|
class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2):
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "github"
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_backend_name = "GitHub"
|
2019-03-10 09:38:20 +01:00
|
|
|
sort_order = 100
|
2019-11-04 00:08:29 +01:00
|
|
|
display_icon = "/static/images/landing-page/logos/github-icon.png"
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2020-03-28 10:59:06 +01:00
|
|
|
def get_all_associated_email_objects(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
2018-06-07 00:19:06 +02:00
|
|
|
access_token = kwargs["response"]["access_token"]
|
|
|
|
try:
|
|
|
|
emails = self._user_data(access_token, '/emails')
|
|
|
|
except (HTTPError, ValueError, TypeError): # nocoverage
|
|
|
|
# We don't really need an explicit test for this code
|
|
|
|
# path, since the outcome will be the same as any other
|
|
|
|
# case without any verified emails
|
|
|
|
emails = []
|
2020-03-28 10:59:06 +01:00
|
|
|
return emails
|
2018-06-07 00:19:06 +02:00
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
def get_unverified_emails(self, realm: Realm, *args: Any, **kwargs: Any) -> List[str]:
|
2020-03-28 10:59:06 +01:00
|
|
|
return [
|
2020-07-22 18:31:56 +02:00
|
|
|
email_obj['email'] for email_obj in self.get_usable_email_objects(realm, *args, **kwargs)
|
2020-04-03 20:01:03 +02:00
|
|
|
if not email_obj.get('verified')
|
2020-03-28 10:59:06 +01:00
|
|
|
]
|
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
def get_verified_emails(self, realm: Realm, *args: Any, **kwargs: Any) -> List[str]:
|
2020-04-03 20:01:03 +02:00
|
|
|
# We only let users login using email addresses that are
|
|
|
|
# verified by GitHub, because the whole point is for the user
|
|
|
|
# to demonstrate that they control the target email address.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
verified_emails: List[str] = []
|
2020-07-22 18:31:56 +02:00
|
|
|
for email_obj in [obj for obj in self.get_usable_email_objects(realm, *args, **kwargs)
|
2020-04-03 20:01:03 +02:00
|
|
|
if obj.get('verified')]:
|
2018-07-10 12:03:42 +02:00
|
|
|
# social_associate_user_helper assumes that the first email in
|
|
|
|
# verified_emails is primary.
|
|
|
|
if email_obj.get("primary"):
|
|
|
|
verified_emails.insert(0, email_obj["email"])
|
|
|
|
else:
|
|
|
|
verified_emails.append(email_obj["email"])
|
2018-06-07 00:19:06 +02:00
|
|
|
|
|
|
|
return verified_emails
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
def get_usable_email_objects(self, realm: Realm, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
|
|
|
# We disallow creation of new accounts with
|
2019-08-05 15:15:56 +02:00
|
|
|
# @noreply.github.com/@users.noreply.github.com email
|
|
|
|
# addresses, because structurally, we only want to allow email
|
2020-07-23 01:08:42 +02:00
|
|
|
# addresses that can receive emails, and those cannot.
|
|
|
|
|
|
|
|
# However, if an account with this address already exists in
|
|
|
|
# the realm (which could happen e.g. as a result of data
|
|
|
|
# import from another chat tool), we will allow signing in to
|
|
|
|
# it.
|
2020-04-03 20:01:03 +02:00
|
|
|
email_objs = self.get_all_associated_email_objects(*args, **kwargs)
|
2018-07-18 23:45:49 +02:00
|
|
|
return [
|
2020-04-03 20:01:03 +02:00
|
|
|
email for email in email_objs
|
2020-07-22 18:31:56 +02:00
|
|
|
if (not email["email"].endswith("@users.noreply.github.com")
|
|
|
|
or common_get_active_user(email["email"], realm) is not None)
|
2018-07-18 23:45:49 +02:00
|
|
|
]
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]:
|
|
|
|
"""This patched user_data function lets us combine together the 3
|
2019-11-28 01:17:30 +01:00
|
|
|
social auth backends into a single Zulip backend for GitHub OAuth2"""
|
2016-08-01 13:06:35 +02:00
|
|
|
team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID
|
|
|
|
org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if team_id is None and org_name is None:
|
|
|
|
# I believe this can't raise AuthFailed, so we don't try to catch it here.
|
|
|
|
return super().user_data(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
access_token, *args, **kwargs,
|
2018-05-31 00:12:39 +02:00
|
|
|
)
|
|
|
|
elif team_id is not None:
|
2020-03-18 13:35:23 +01:00
|
|
|
backend = GithubTeamOAuth2(self.strategy, self.redirect_uri)
|
2016-08-01 13:06:35 +02:00
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required team")
|
|
|
|
elif org_name is not None:
|
2020-03-18 13:35:23 +01:00
|
|
|
backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri)
|
2016-08-01 13:06:35 +02:00
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required organization")
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
raise AssertionError("Invalid configuration")
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2018-10-05 14:32:02 +02:00
|
|
|
class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2):
|
2019-03-10 09:38:20 +01:00
|
|
|
sort_order = 50
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "azuread-oauth2"
|
2018-10-05 14:32:02 +02:00
|
|
|
auth_backend_name = "AzureAD"
|
2019-11-04 00:08:29 +01:00
|
|
|
display_icon = "/static/images/landing-page/logos/azuread-icon.png"
|
2018-10-05 14:32:02 +02:00
|
|
|
|
2020-01-31 18:19:53 +01:00
|
|
|
@external_auth_method
|
|
|
|
class GitLabAuthBackend(SocialAuthMixin, GitLabOAuth2):
|
|
|
|
sort_order = 75
|
|
|
|
name = "gitlab"
|
|
|
|
auth_backend_name = "GitLab"
|
|
|
|
display_icon = "/static/images/landing-page/logos/gitlab-icon.png"
|
|
|
|
|
|
|
|
# Note: GitLab as of early 2020 supports having multiple email
|
|
|
|
# addresses connected with a GitLab account, and we could access
|
|
|
|
# those emails, but its APIs don't indicate which of those email
|
|
|
|
# addresses were verified, so we cannot use them for
|
|
|
|
# authentication like we do for the GitHub integration. Instead,
|
|
|
|
# we just use the primary email address, which is always verified.
|
|
|
|
# (No code is required to do so, as that's the default behavior).
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2019-02-02 16:51:26 +01:00
|
|
|
class GoogleAuthBackend(SocialAuthMixin, GoogleOAuth2):
|
|
|
|
sort_order = 150
|
|
|
|
auth_backend_name = "Google"
|
|
|
|
name = "google"
|
2019-11-04 00:08:29 +01:00
|
|
|
display_icon = "/static/images/landing-page/logos/googl_e-icon.png"
|
2019-02-02 16:51:26 +01:00
|
|
|
|
|
|
|
def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
verified_emails: List[str] = []
|
2019-02-02 16:51:26 +01:00
|
|
|
details = kwargs["response"]
|
|
|
|
email_verified = details.get("email_verified")
|
|
|
|
if email_verified:
|
|
|
|
verified_emails.append(details["email"])
|
|
|
|
return verified_emails
|
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
@external_auth_method
|
|
|
|
class AppleAuthBackend(SocialAuthMixin, AppleIdAuth):
|
|
|
|
"""
|
|
|
|
Authentication backend for "Sign in with Apple". This supports two flows:
|
|
|
|
1. The web flow, usable in a browser, like our other social auth methods.
|
|
|
|
It is a slightly modified Oauth2 authorization flow, where the response
|
|
|
|
returning the access_token also contains a JWT id_token containing the user's
|
|
|
|
identity, signed with Apple's private keys.
|
|
|
|
https://developer.apple.com/documentation/sign_in_with_apple/tokenresponse
|
|
|
|
2. The native flow, intended for users on an Apple device. In the native flow,
|
|
|
|
the device handles authentication of the user with Apple's servers and ends up
|
|
|
|
with the JWT id_token (like in the web flow). The client-side details aren't
|
2020-06-09 18:17:32 +02:00
|
|
|
relevant to us; the app should simply send the id_token as a param to the
|
|
|
|
/complete/apple/ endpoint, together with native_flow=true and any other
|
|
|
|
appropriate params, such as mobile_flow_otp.
|
2020-06-09 12:04:21 +02:00
|
|
|
"""
|
|
|
|
sort_order = 10
|
|
|
|
name = "apple"
|
|
|
|
auth_backend_name = "Apple"
|
2020-06-18 21:01:37 +02:00
|
|
|
display_icon = "/static/images/landing-page/logos/apple-icon.png"
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
# Apple only sends `name` in its response the first time a user
|
|
|
|
# tries to sign up, so we won't have it in consecutive attempts.
|
|
|
|
# But if Apple does send us the user's name, it will be validated,
|
|
|
|
# so it's appropriate to set full_name_validated here.
|
|
|
|
full_name_validated = True
|
|
|
|
REDIS_EXPIRATION_SECONDS = 60*10
|
|
|
|
|
2020-06-25 21:35:49 +02:00
|
|
|
SCOPE_SEPARATOR = "%20" # https://github.com/python-social-auth/social-core/issues/470
|
|
|
|
|
2020-07-04 19:09:01 +02:00
|
|
|
@classmethod
|
|
|
|
def check_config(cls) -> Optional[HttpResponse]:
|
|
|
|
obligatory_apple_settings_list = [
|
|
|
|
settings.SOCIAL_AUTH_APPLE_TEAM,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_SERVICES_ID,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_KEY,
|
|
|
|
settings.SOCIAL_AUTH_APPLE_SECRET,
|
|
|
|
]
|
|
|
|
if any(not setting for setting in obligatory_apple_settings_list):
|
|
|
|
return redirect_to_config_error("apple")
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2020-06-09 18:17:32 +02:00
|
|
|
def is_native_flow(self) -> bool:
|
|
|
|
return self.strategy.request_data().get('native_flow', False)
|
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
# This method replaces a method from python-social-auth; it is adapted to store
|
|
|
|
# the state_token data in redis.
|
|
|
|
def get_or_create_state(self) -> str:
|
|
|
|
'''Creates the Oauth2 state parameter in first step of the flow,
|
|
|
|
before redirecting the user to the IdP (aka Apple).
|
|
|
|
|
|
|
|
Apple will send the user back to us with a POST
|
|
|
|
request. Normally, we rely on being able to store certain
|
|
|
|
parameters in the user's session and use them after the
|
|
|
|
redirect. But because we've configured our session cookies to
|
|
|
|
use the Django default of in SameSite Lax mode, the browser
|
|
|
|
won't send the session cookies to our server in delivering the
|
|
|
|
POST request coming from Apple.
|
|
|
|
|
|
|
|
To work around this, we replace python-social-auth's default
|
|
|
|
session-based storage with storing the parameters in redis
|
|
|
|
under a random token derived from the state. That will allow
|
|
|
|
us to validate the state and retrieve the params after the
|
|
|
|
redirect - by querying redis for the key derived from the
|
|
|
|
state sent in the POST redirect.
|
|
|
|
'''
|
|
|
|
request_data = self.strategy.request_data().dict()
|
|
|
|
data_to_store = {
|
|
|
|
key: request_data[key] for key in self.standard_relay_params
|
|
|
|
if key in request_data
|
|
|
|
}
|
|
|
|
|
|
|
|
# Generate a random string of 32 alphanumeric characters.
|
|
|
|
state = self.state_token()
|
|
|
|
put_dict_in_redis(redis_client, 'apple_auth_{token}',
|
|
|
|
data_to_store, self.REDIS_EXPIRATION_SECONDS,
|
|
|
|
token=state)
|
|
|
|
return state
|
|
|
|
|
|
|
|
def validate_state(self) -> Optional[str]:
|
2020-06-09 18:17:32 +02:00
|
|
|
"""
|
|
|
|
This method replaces a method from python-social-auth; it is
|
|
|
|
adapted to retrieve the data stored in redis, save it in
|
|
|
|
the session so that it can be accessed by the social pipeline.
|
|
|
|
"""
|
2020-06-09 12:04:21 +02:00
|
|
|
request_state = self.get_request_state()
|
|
|
|
|
|
|
|
if not request_state:
|
|
|
|
self.logger.info("Sign in with Apple failed: missing state parameter.")
|
|
|
|
raise AuthMissingParameter(self, 'state')
|
|
|
|
|
|
|
|
formatted_request_state = "apple_auth_" + request_state
|
|
|
|
redis_data = get_dict_from_redis(redis_client, "apple_auth_{token}",
|
|
|
|
formatted_request_state)
|
|
|
|
if redis_data is None:
|
|
|
|
self.logger.info("Sign in with Apple failed: bad state token.")
|
|
|
|
raise AuthStateForbidden(self)
|
|
|
|
|
|
|
|
for param, value in redis_data.items():
|
|
|
|
if param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, value)
|
|
|
|
return request_state
|
|
|
|
|
2020-06-09 18:17:32 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
if not self.is_native_flow():
|
|
|
|
# The default implementation in python-social-auth is the browser flow.
|
|
|
|
return super().auth_complete(*args, **kwargs)
|
|
|
|
|
|
|
|
# We handle the Apple's native flow on our own. In this flow,
|
|
|
|
# before contacting the server, the client obtains an id_token
|
|
|
|
# from Apple directly, and then sends that to /complete/apple/
|
|
|
|
# (the endpoint handled by this function), together with any
|
|
|
|
# other desired parameters from self.standard_relay_params.
|
|
|
|
#
|
|
|
|
# What we'd like to do with the payload is just pass it into
|
|
|
|
# the common code path for the web flow. In the web flow,
|
|
|
|
# before sending a request to Apple, python-social-auth sets
|
|
|
|
# various values about the intended authentication in the
|
|
|
|
# session, before the redirect.
|
|
|
|
#
|
|
|
|
# Thus, we need to set those session variables here, before
|
|
|
|
# processing the id_token we received using the common do_auth.
|
|
|
|
request_data = self.strategy.request_data()
|
|
|
|
if 'id_token' not in request_data:
|
|
|
|
raise JsonableError(_("Missing id_token parameter"))
|
|
|
|
|
|
|
|
for param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, request_data.get(param))
|
|
|
|
|
|
|
|
# We should get the subdomain from the hostname of the request.
|
|
|
|
self.strategy.session_set('subdomain', get_subdomain(self.strategy.request))
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Things are now ready to be handled by the superclass code. It will
|
|
|
|
# validate the id_token and push appropriate user data to the social pipeline.
|
|
|
|
result = self.do_auth(request_data['id_token'], *args, **kwargs)
|
|
|
|
return result
|
|
|
|
except (AuthFailed, AuthCanceled) as e:
|
|
|
|
# AuthFailed is a general "failure" exception from
|
|
|
|
# python-social-auth that we should convert to None return
|
|
|
|
# value here to avoid getting tracebacks.
|
|
|
|
#
|
|
|
|
# AuthCanceled is raised in the Apple backend
|
|
|
|
# implementation in python-social-auth in certain cases,
|
|
|
|
# though AuthFailed would have been more correct.
|
|
|
|
#
|
|
|
|
# We have an open PR to python-social-auth to clean this up.
|
|
|
|
logging.info("/complete/apple/: %s", str(e))
|
|
|
|
return None
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@external_auth_method
|
2019-09-29 06:32:56 +02:00
|
|
|
class SAMLAuthBackend(SocialAuthMixin, SAMLAuth):
|
|
|
|
auth_backend_name = "SAML"
|
|
|
|
REDIS_EXPIRATION_SECONDS = 60 * 15
|
2020-05-22 18:44:29 +02:00
|
|
|
SAMLRESPONSE_PARSING_EXCEPTIONS = (OneLogin_Saml2_Error, binascii.Error, XMLSyntaxError)
|
2019-10-22 18:11:28 +02:00
|
|
|
name = "saml"
|
2019-10-19 21:12:33 +02:00
|
|
|
# Organization which go through the trouble of setting up SAML are most likely
|
|
|
|
# to have it as their main authentication method, so it seems appropriate to have
|
|
|
|
# SAML buttons at the top.
|
|
|
|
sort_order = 9999
|
2019-10-26 02:03:06 +02:00
|
|
|
# There's no common default logo for SAML authentication.
|
2019-11-05 00:08:27 +01:00
|
|
|
display_icon = None
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2019-11-01 00:00:36 +01:00
|
|
|
# The full_name provided by the IdP is very likely the standard
|
|
|
|
# employee directory name for the user, and thus what they and
|
|
|
|
# their organization want to use in Zulip. So don't unnecessarily
|
|
|
|
# provide a registration flow prompt for them to set their name.
|
|
|
|
full_name_validated = True
|
|
|
|
|
2020-04-16 12:05:26 +02:00
|
|
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
|
|
if settings.SAML_REQUIRE_LIMIT_TO_SUBDOMAINS:
|
|
|
|
idps_without_limit_to_subdomains = [
|
|
|
|
idp_name for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items()
|
|
|
|
if 'limit_to_subdomains' not in idp_dict
|
|
|
|
]
|
|
|
|
if idps_without_limit_to_subdomains:
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.error("SAML_REQUIRE_LIMIT_TO_SUBDOMAINS is enabled and the following " +
|
|
|
|
"IdPs don't have limit_to_subdomains specified and will be ignored: " +
|
2020-06-10 06:41:04 +02:00
|
|
|
f"{idps_without_limit_to_subdomains}")
|
2020-04-16 12:05:26 +02:00
|
|
|
for idp_name in idps_without_limit_to_subdomains:
|
|
|
|
del settings.SOCIAL_AUTH_SAML_ENABLED_IDPS[idp_name]
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def auth_url(self) -> str:
|
|
|
|
"""Get the URL to which we must redirect in order to
|
|
|
|
authenticate the user. Overriding the original SAMLAuth.auth_url.
|
|
|
|
Runs when someone accesses the /login/saml/ endpoint."""
|
|
|
|
try:
|
|
|
|
idp_name = self.strategy.request_data()['idp']
|
|
|
|
auth = self._create_saml_auth(idp=self.get_idp(idp_name))
|
2020-02-20 15:54:39 +01:00
|
|
|
except KeyError as e:
|
2019-09-29 06:32:56 +02:00
|
|
|
# If the above raise KeyError, it means invalid or no idp was specified,
|
|
|
|
# we should log that and redirect to the login page.
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info("/login/saml/ : Bad idp param: KeyError: %s.", str(e))
|
2019-09-29 06:32:56 +02:00
|
|
|
return reverse('zerver.views.auth.login_page',
|
|
|
|
kwargs = {'template_name': 'zerver/login.html'})
|
|
|
|
|
|
|
|
# This where we change things. We need to pass some params
|
|
|
|
# (`mobile_flow_otp`, `next`, etc.) through RelayState, which
|
|
|
|
# then the IdP will pass back to us so we can read those
|
|
|
|
# parameters in the final part of the authentication flow, at
|
|
|
|
# the /complete/saml/ endpoint.
|
|
|
|
#
|
|
|
|
# To protect against network eavesdropping of these
|
|
|
|
# parameters, we send just a random token to the IdP in
|
|
|
|
# RelayState, which is used as a key into our redis data store
|
|
|
|
# for fetching the actual parameters after the IdP has
|
|
|
|
# returned a successful authentication.
|
2020-05-22 18:44:29 +02:00
|
|
|
params_to_relay = self.standard_relay_params
|
2019-09-29 06:32:56 +02:00
|
|
|
request_data = self.strategy.request_data().dict()
|
|
|
|
data_to_relay = {
|
|
|
|
key: request_data[key] for key in params_to_relay if key in request_data
|
|
|
|
}
|
2020-08-07 01:09:47 +02:00
|
|
|
relay_state = orjson.dumps({"state_token": self.put_data_in_redis(data_to_relay)}).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
return auth.login(return_to=relay_state)
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def put_data_in_redis(cls, data_to_relay: Dict[str, Any]) -> str:
|
2020-01-20 14:17:53 +01:00
|
|
|
return put_dict_in_redis(redis_client, "saml_token_{token}",
|
|
|
|
data_to_store=data_to_relay,
|
|
|
|
expiration_seconds=cls.REDIS_EXPIRATION_SECONDS)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def get_data_from_redis(cls, key: str) -> Optional[Dict[str, Any]]:
|
2020-01-20 14:17:53 +01:00
|
|
|
data = None
|
2019-09-29 06:32:56 +02:00
|
|
|
if key.startswith('saml_token_'):
|
|
|
|
# Safety if statement, to not allow someone to poke around arbitrary redis keys here.
|
2020-01-26 19:01:56 +01:00
|
|
|
data = get_dict_from_redis(redis_client, "saml_token_{token}", key)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-01-20 14:17:53 +01:00
|
|
|
return data
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-07-25 14:31:45 +02:00
|
|
|
def get_issuing_idp(self, SAMLResponse: str) -> Optional[str]:
|
2020-05-22 18:44:29 +02:00
|
|
|
"""
|
|
|
|
Given a SAMLResponse, returns which of the configured IdPs is declared as the issuer.
|
|
|
|
This value MUST NOT be trusted as the true issuer!
|
|
|
|
The signatures are not validated, so it can be tampered with by the user.
|
|
|
|
That's not a problem for this function,
|
|
|
|
and true validation happens later in the underlying libraries, but it's important
|
|
|
|
to note this detail. The purpose of this function is merely as a helper to figure out which
|
|
|
|
of the configured IdPs' information to use for parsing and validating the response.
|
|
|
|
"""
|
|
|
|
try:
|
2020-07-25 14:31:45 +02:00
|
|
|
config = self.generate_saml_config()
|
|
|
|
saml_settings = OneLogin_Saml2_Settings(config, sp_validation_only=True)
|
|
|
|
resp = OneLogin_Saml2_Response(settings=saml_settings, response=SAMLResponse)
|
2020-05-22 18:44:29 +02:00
|
|
|
issuers = resp.get_issuers()
|
2020-07-25 14:31:45 +02:00
|
|
|
except self.SAMLRESPONSE_PARSING_EXCEPTIONS:
|
2020-07-25 14:52:01 +02:00
|
|
|
self.logger.info("Error while parsing SAMLResponse:", exc_info=True)
|
2020-05-22 18:44:29 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
for idp_name, idp_config in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
|
|
|
if idp_config['entity_id'] in issuers:
|
|
|
|
return idp_name
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2020-05-23 15:21:19 +02:00
|
|
|
def get_relayed_params(self) -> Dict[str, Any]:
|
|
|
|
request_data = self.strategy.request_data()
|
|
|
|
if 'RelayState' not in request_data:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
relay_state = request_data['RelayState']
|
|
|
|
try:
|
2020-08-07 01:09:47 +02:00
|
|
|
data = orjson.loads(relay_state)
|
2020-05-23 15:21:19 +02:00
|
|
|
if 'state_token' in data:
|
|
|
|
# SP-initiated sign in. We stored relevant information in the first
|
|
|
|
# step of the flow
|
|
|
|
return self.get_data_from_redis(data['state_token']) or {}
|
|
|
|
else:
|
|
|
|
# IdP-initiated sign in. Right now we only support transporting subdomain through json in
|
|
|
|
# RelayState, but this format is nice in that it allows easy extensibility here.
|
|
|
|
return {'subdomain': data.get('subdomain')}
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
return {}
|
|
|
|
|
|
|
|
def choose_subdomain(self, relayed_params: Dict[str, Any]) -> Optional[str]:
|
|
|
|
subdomain = relayed_params.get("subdomain")
|
|
|
|
if subdomain is not None:
|
|
|
|
return subdomain
|
|
|
|
|
|
|
|
# If not specified otherwise, the intended subdomain for this
|
|
|
|
# authentication attempt is the subdomain of the request.
|
|
|
|
request_subdomain = get_subdomain(self.strategy.request)
|
|
|
|
try:
|
|
|
|
# We only want to do a basic sanity-check here for whether
|
|
|
|
# this subdomain has a realm one could try to authenticate
|
|
|
|
# to. True validation of whether the realm is active, the
|
|
|
|
# IdP is appropriate for the subdomain, etc. happens
|
|
|
|
# elsewhere in the flow and we shouldn't duplicate such
|
|
|
|
# logic here.
|
|
|
|
get_realm(request_subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
return None
|
|
|
|
else:
|
|
|
|
return request_subdomain
|
|
|
|
|
2020-06-19 21:44:29 +02:00
|
|
|
def _check_entitlements(self, idp: SAMLIdentityProvider, attributes: Dict[str, List[str]]) -> None:
|
|
|
|
"""
|
|
|
|
Below is the docstring from the social_core SAML backend.
|
|
|
|
|
|
|
|
Additional verification of a SAML response before
|
|
|
|
authenticating the user.
|
|
|
|
|
|
|
|
Subclasses can override this method if they need custom
|
|
|
|
validation code, such as requiring the presence of an
|
|
|
|
eduPersonEntitlement.
|
|
|
|
|
|
|
|
raise social_core.exceptions.AuthForbidden if the user should not
|
|
|
|
be authenticated, or do nothing to allow the login pipeline to
|
|
|
|
continue.
|
|
|
|
"""
|
|
|
|
org_membership_attribute = idp.conf.get('attr_org_membership', None)
|
|
|
|
if org_membership_attribute is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
subdomain = self.strategy.session_get('subdomain')
|
|
|
|
entitlements: Union[str, List[str]] = attributes.get(org_membership_attribute, [])
|
|
|
|
if subdomain in entitlements:
|
|
|
|
return
|
|
|
|
|
|
|
|
# The root subdomain is a special case, as sending an
|
|
|
|
# empty string in the list of values of the attribute may
|
|
|
|
# not be viable. So, any of the ROOT_SUBDOMAIN_ALIASES can
|
|
|
|
# be used to signify the user is authorized for the root
|
|
|
|
# subdomain.
|
|
|
|
if (subdomain == Realm.SUBDOMAIN_FOR_ROOT_DOMAIN
|
|
|
|
and not settings.ROOT_DOMAIN_LANDING_PAGE
|
|
|
|
and any(alias in entitlements for alias in settings.ROOT_SUBDOMAIN_ALIASES)):
|
|
|
|
return
|
|
|
|
|
|
|
|
error_msg = f"SAML user from IdP {idp.name} rejected due to missing entitlement " + \
|
|
|
|
f"for subdomain '{subdomain}'. User entitlements: {entitlements}."
|
|
|
|
raise AuthFailed(self, error_msg)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
"""
|
|
|
|
Additional ugly wrapping on top of auth_complete in SocialAuthMixin.
|
|
|
|
We handle two things here:
|
|
|
|
1. Working around bad RelayState or SAMLResponse parameters in the request.
|
|
|
|
Both parameters should be present if the user came to /complete/saml/ through
|
|
|
|
the IdP as intended. The errors can happen if someone simply types the endpoint into
|
|
|
|
their browsers, or generally tries messing with it in some ways.
|
|
|
|
|
|
|
|
2. The first part of our SAML authentication flow will encode important parameters
|
|
|
|
into the RelayState. We need to read them and set those values in the session,
|
|
|
|
and then change the RelayState param to the idp_name, because that's what
|
|
|
|
SAMLAuth.auth_complete() expects.
|
|
|
|
"""
|
2020-05-22 18:44:29 +02:00
|
|
|
SAMLResponse = self.strategy.request_data().get('SAMLResponse')
|
|
|
|
if SAMLResponse is None:
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info("/complete/saml/: No SAMLResponse in request.")
|
2020-05-22 18:44:29 +02:00
|
|
|
return None
|
|
|
|
|
2020-05-23 15:21:19 +02:00
|
|
|
relayed_params = self.get_relayed_params()
|
|
|
|
|
|
|
|
subdomain = self.choose_subdomain(relayed_params)
|
|
|
|
if subdomain is None:
|
|
|
|
error_msg = "/complete/saml/: Can't figure out subdomain for this authentication request. " + \
|
|
|
|
"relayed_params: %s"
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info(error_msg, relayed_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
return None
|
|
|
|
|
2020-05-22 18:44:29 +02:00
|
|
|
idp_name = self.get_issuing_idp(SAMLResponse)
|
|
|
|
if idp_name is None:
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info("/complete/saml/: No valid IdP as issuer of the SAMLResponse.")
|
2020-05-22 18:44:29 +02:00
|
|
|
return None
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
idp_valid = self.validate_idp_for_subdomain(idp_name, subdomain)
|
|
|
|
if not idp_valid:
|
2020-05-23 15:21:19 +02:00
|
|
|
error_msg = "/complete/saml/: Authentication request with IdP %s but this provider is not " + \
|
|
|
|
"enabled for this subdomain %s."
|
2020-06-02 20:20:53 +02:00
|
|
|
self.logger.info(error_msg, idp_name, subdomain)
|
2020-04-10 16:30:02 +02:00
|
|
|
return None
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
result = None
|
|
|
|
try:
|
2020-05-23 15:21:19 +02:00
|
|
|
params = relayed_params.copy()
|
|
|
|
params['subdomain'] = subdomain
|
|
|
|
for param, value in params.items():
|
2019-09-29 06:32:56 +02:00
|
|
|
if param in self.standard_relay_params:
|
|
|
|
self.strategy.session_set(param, value)
|
|
|
|
|
|
|
|
# super().auth_complete expects to have RelayState set to the idp_name,
|
|
|
|
# so we need to replace this param.
|
|
|
|
post_params = self.strategy.request.POST.copy()
|
2020-04-10 16:30:02 +02:00
|
|
|
post_params['RelayState'] = idp_name
|
2019-09-29 06:32:56 +02:00
|
|
|
self.strategy.request.POST = post_params
|
|
|
|
|
|
|
|
# Call the auth_complete method of SocialAuthMixIn
|
2020-04-22 03:51:22 +02:00
|
|
|
result = super().auth_complete(*args, **kwargs)
|
2020-06-12 01:35:37 +02:00
|
|
|
except self.SAMLRESPONSE_PARSING_EXCEPTIONS:
|
2020-05-22 15:26:17 +02:00
|
|
|
# These can be raised if SAMLResponse is missing or badly formatted.
|
2020-06-12 01:35:37 +02:00
|
|
|
self.logger.info("/complete/saml/: error while parsing SAMLResponse:", exc_info=True)
|
2019-09-29 06:32:56 +02:00
|
|
|
# Fall through to returning None.
|
|
|
|
finally:
|
|
|
|
if result is None:
|
|
|
|
for param in self.standard_relay_params:
|
|
|
|
# If an attacker managed to eavesdrop on the RelayState token,
|
|
|
|
# they may pass it here to the endpoint with an invalid SAMLResponse.
|
|
|
|
# We remove these potentially sensitive parameters that we have set in the session
|
2020-03-28 01:25:56 +01:00
|
|
|
# earlier, to avoid leaking their values.
|
2019-09-29 06:32:56 +02:00
|
|
|
self.strategy.session_set(param, None)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
@classmethod
|
|
|
|
def validate_idp_for_subdomain(cls, idp_name: str, subdomain: str) -> bool:
|
|
|
|
idp_dict = settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.get(idp_name)
|
|
|
|
if idp_dict is None:
|
2020-06-10 06:41:04 +02:00
|
|
|
raise AssertionError(f"IdP: {idp_name} not found")
|
2020-04-10 16:30:02 +02:00
|
|
|
if 'limit_to_subdomains' in idp_dict and subdomain not in idp_dict['limit_to_subdomains']:
|
|
|
|
return False
|
|
|
|
|
|
|
|
return True
|
|
|
|
|
2019-10-26 01:51:48 +02:00
|
|
|
@classmethod
|
|
|
|
def check_config(cls) -> Optional[HttpResponse]:
|
|
|
|
obligatory_saml_settings_list = [
|
|
|
|
settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID,
|
|
|
|
settings.SOCIAL_AUTH_SAML_ORG_INFO,
|
|
|
|
settings.SOCIAL_AUTH_SAML_TECHNICAL_CONTACT,
|
|
|
|
settings.SOCIAL_AUTH_SAML_SUPPORT_CONTACT,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
settings.SOCIAL_AUTH_SAML_ENABLED_IDPS,
|
2019-10-26 01:51:48 +02:00
|
|
|
]
|
|
|
|
if any(not setting for setting in obligatory_saml_settings_list):
|
|
|
|
return redirect_to_config_error("saml")
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
@classmethod
|
2020-04-10 16:30:02 +02:00
|
|
|
def dict_representation(cls, realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[ExternalAuthMethodDictT] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
2020-04-10 16:30:02 +02:00
|
|
|
if realm and not cls.validate_idp_for_subdomain(idp_name, realm.subdomain):
|
|
|
|
continue
|
2020-04-18 15:47:41 +02:00
|
|
|
if realm is None and 'limit_to_subdomains' in idp_dict:
|
|
|
|
# If queried without a realm, only return IdPs that can be used on all realms.
|
|
|
|
continue
|
2020-04-10 16:30:02 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
saml_dict: ExternalAuthMethodDictT = dict(
|
2020-06-09 00:25:09 +02:00
|
|
|
name=f'saml:{idp_name}',
|
2019-12-08 23:11:25 +01:00
|
|
|
display_name=idp_dict.get('display_name', cls.auth_backend_name),
|
|
|
|
display_icon=idp_dict.get('display_icon', cls.display_icon),
|
|
|
|
login_url=reverse('login-social-extra-arg', args=('saml', idp_name)),
|
|
|
|
signup_url=reverse('signup-social-extra-arg', args=('saml', idp_name)),
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2019-12-08 23:11:25 +01:00
|
|
|
result.append(saml_dict)
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
return result
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2020-02-01 17:45:22 +01:00
|
|
|
def validate_otp_params(mobile_flow_otp: Optional[str]=None,
|
|
|
|
desktop_flow_otp: Optional[str]=None) -> None:
|
|
|
|
for otp in [mobile_flow_otp, desktop_flow_otp]:
|
|
|
|
if otp is not None and not is_valid_otp(otp):
|
|
|
|
raise JsonableError(_("Invalid OTP"))
|
|
|
|
|
|
|
|
if mobile_flow_otp and desktop_flow_otp:
|
|
|
|
raise JsonableError(_("Can't use both mobile_flow_otp and desktop_flow_otp together."))
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
def get_external_method_dicts(realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]:
|
2019-11-02 04:35:39 +01:00
|
|
|
"""
|
|
|
|
Returns a list of dictionaries that represent social backends, sorted
|
|
|
|
in the order in which they should be displayed.
|
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[ExternalAuthMethodDictT] = []
|
2019-12-08 23:11:25 +01:00
|
|
|
for backend in EXTERNAL_AUTH_METHODS:
|
|
|
|
# EXTERNAL_AUTH_METHODS is already sorted in the correct order,
|
2019-11-02 04:35:39 +01:00
|
|
|
# so we don't need to worry about sorting here.
|
2019-10-22 18:11:28 +02:00
|
|
|
if auth_enabled_helper([backend.auth_backend_name], realm):
|
2020-04-10 16:30:02 +02:00
|
|
|
result.extend(backend.dict_representation(realm))
|
2019-10-22 18:11:28 +02:00
|
|
|
|
2019-11-02 04:35:39 +01:00
|
|
|
return result
|
2019-10-22 18:11:28 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
AUTH_BACKEND_NAME_MAP: Dict[str, Any] = {
|
2017-11-02 08:05:56 +01:00
|
|
|
'Dev': DevAuthBackend,
|
|
|
|
'Email': EmailAuthBackend,
|
|
|
|
'LDAP': ZulipLDAPAuthBackend,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-10-12 01:58:01 +02:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
for external_method in EXTERNAL_AUTH_METHODS:
|
|
|
|
AUTH_BACKEND_NAME_MAP[external_method.auth_backend_name] = external_method
|
2019-02-02 16:51:26 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
EXTERNAL_AUTH_METHODS = sorted(EXTERNAL_AUTH_METHODS, key=lambda x: x.sort_order, reverse=True)
|
2019-11-02 04:35:39 +01:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
# Provide this alternative name for backwards compatibility with
|
|
|
|
# installations that had the old backend enabled.
|
|
|
|
GoogleMobileOauth2Backend = GoogleAuthBackend
|