2016-08-01 13:06:35 +02:00
|
|
|
import logging
|
2018-08-03 20:05:19 +02:00
|
|
|
from typing import Any, Dict, List, Set, Tuple, Optional, Sequence
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2017-10-24 17:59:39 +02:00
|
|
|
from django_auth_ldap.backend import LDAPBackend, _LDAPUser
|
|
|
|
import django.contrib.auth
|
2013-11-04 23:16:46 +01:00
|
|
|
from django.contrib.auth.backends import RemoteUserBackend
|
2013-11-21 01:30:20 +01:00
|
|
|
from django.conf import settings
|
2018-05-31 00:12:39 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import validate_email
|
2017-10-24 17:59:39 +02:00
|
|
|
from django.http import HttpResponse
|
2018-06-07 00:19:06 +02:00
|
|
|
from requests import HTTPError
|
2017-01-21 16:52:59 +01:00
|
|
|
from social_core.backends.github import GithubOAuth2, GithubOrganizationOAuth2, \
|
2016-08-01 13:06:35 +02:00
|
|
|
GithubTeamOAuth2
|
2018-10-05 14:32:02 +02:00
|
|
|
from social_core.backends.azuread import AzureADOAuth2
|
2018-05-31 00:12:39 +02:00
|
|
|
from social_core.backends.base import BaseAuth
|
2018-10-12 02:08:33 +02:00
|
|
|
from social_core.backends.oauth import BaseOAuth2
|
2017-11-17 07:53:52 +01:00
|
|
|
from social_core.utils import handle_http_errors
|
2017-03-07 08:32:40 +01:00
|
|
|
from social_core.exceptions import AuthFailed, SocialAuthBaseException
|
2017-03-24 10:48:52 +01:00
|
|
|
from social_django.models import DjangoStorage
|
|
|
|
from social_django.strategy import DjangoStrategy
|
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
from zerver.lib.actions import do_create_user, do_reactivate_user, do_deactivate_user
|
2017-10-24 17:59:39 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
2017-10-20 02:53:24 +02:00
|
|
|
from zerver.lib.subdomains import user_matches_subdomain, get_subdomain
|
2017-10-24 17:59:39 +02:00
|
|
|
from zerver.lib.users import check_full_name
|
|
|
|
from zerver.models import UserProfile, Realm, get_user_profile_by_id, \
|
2018-12-07 00:05:57 +01:00
|
|
|
remote_user_to_email, email_to_username, get_realm, get_user_by_delivery_email
|
2017-10-24 17:59:39 +02:00
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def pad_method_dict(method_dict: Dict[str, bool]) -> Dict[str, bool]:
|
2016-11-06 23:44:45 +01:00
|
|
|
"""Pads an authentication methods dict to contain all auth backends
|
|
|
|
supported by the software, regardless of whether they are
|
|
|
|
configured on this server"""
|
|
|
|
for key in AUTH_BACKEND_NAME_MAP:
|
|
|
|
if key not in method_dict:
|
|
|
|
method_dict[key] = False
|
|
|
|
return method_dict
|
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def auth_enabled_helper(backends_to_check: List[str], realm: Optional[Realm]) -> bool:
|
2016-11-02 21:41:10 +01:00
|
|
|
if realm is not None:
|
|
|
|
enabled_method_dict = realm.authentication_methods_dict()
|
|
|
|
pad_method_dict(enabled_method_dict)
|
|
|
|
else:
|
|
|
|
enabled_method_dict = dict((method, True) for method in Realm.AUTHENTICATION_FLAGS)
|
|
|
|
pad_method_dict(enabled_method_dict)
|
2016-11-06 23:44:45 +01:00
|
|
|
for supported_backend in django.contrib.auth.get_backends():
|
|
|
|
for backend_name in backends_to_check:
|
|
|
|
backend = AUTH_BACKEND_NAME_MAP[backend_name]
|
|
|
|
if enabled_method_dict[backend_name] and isinstance(supported_backend, backend):
|
|
|
|
return True
|
2013-11-04 23:42:31 +01:00
|
|
|
return False
|
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def ldap_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['LDAP'], realm)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def email_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Email'], realm)
|
2016-11-07 00:04:59 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def password_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2016-11-07 00:04:59 +01:00
|
|
|
return ldap_auth_enabled(realm) or email_auth_enabled(realm)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def dev_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Dev'], realm)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def google_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['Google'], realm)
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def github_auth_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-11-02 08:05:56 +01:00
|
|
|
return auth_enabled_helper(['GitHub'], realm)
|
2015-08-19 02:58:20 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def any_oauth_backend_enabled(realm: Optional[Realm]=None) -> bool:
|
2017-04-20 21:02:56 +02:00
|
|
|
"""Used by the login page process to determine whether to show the
|
|
|
|
'OR' for login with Google"""
|
2018-10-12 02:08:33 +02:00
|
|
|
return auth_enabled_helper(OAUTH_BACKEND_NAMES, realm)
|
2017-04-20 21:02:56 +02:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def require_email_format_usernames(realm: Optional[Realm]=None) -> bool:
|
2017-09-15 16:59:03 +02:00
|
|
|
if ldap_auth_enabled(realm):
|
|
|
|
if settings.LDAP_EMAIL_ATTR or settings.LDAP_APPEND_DOMAIN:
|
|
|
|
return False
|
|
|
|
return True
|
|
|
|
|
2017-11-17 22:43:16 +01:00
|
|
|
def common_get_active_user(email: str, realm: Realm,
|
2018-10-27 03:19:49 +02:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-17 22:43:16 +01:00
|
|
|
try:
|
2018-12-07 00:05:57 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2017-11-17 22:43:16 +01:00
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
# If the user doesn't have an account in the target realm, we
|
|
|
|
# check whether they might have an account in another realm,
|
|
|
|
# and if so, provide a helpful error message via
|
|
|
|
# `invalid_subdomain`.
|
2018-12-07 00:05:57 +01:00
|
|
|
if not UserProfile.objects.filter(delivery_email__iexact=email).exists():
|
2017-11-17 22:43:16 +01:00
|
|
|
return None
|
|
|
|
if return_data is not None:
|
|
|
|
return_data['invalid_subdomain'] = True
|
|
|
|
return None
|
|
|
|
if not user_profile.is_active:
|
|
|
|
if return_data is not None:
|
2017-11-18 02:03:36 +01:00
|
|
|
if user_profile.is_mirror_dummy:
|
|
|
|
# Record whether it's a mirror dummy account
|
|
|
|
return_data['is_mirror_dummy'] = True
|
2017-11-17 22:43:16 +01:00
|
|
|
return_data['inactive_user'] = True
|
|
|
|
return None
|
|
|
|
if user_profile.realm.deactivated:
|
|
|
|
if return_data is not None:
|
|
|
|
return_data['inactive_realm'] = True
|
|
|
|
return None
|
|
|
|
return user_profile
|
|
|
|
|
2017-11-05 11:31:53 +01:00
|
|
|
class ZulipAuthMixin:
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_user(self, user_profile_id: int) -> Optional[UserProfile]:
|
2013-11-01 20:22:12 +01:00
|
|
|
""" Get a UserProfile object from the user_profile_id. """
|
|
|
|
try:
|
|
|
|
return get_user_profile_by_id(user_profile_id)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2013-11-21 04:57:23 +01:00
|
|
|
class ZulipDummyBackend(ZulipAuthMixin):
|
|
|
|
"""
|
2017-11-17 22:43:16 +01:00
|
|
|
Used when we want to log you in without checking any
|
|
|
|
authentication (i.e. new user registration or when otherwise
|
|
|
|
authentication has already been checked earlier in the process).
|
2013-11-21 04:57:23 +01:00
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-17 22:43:16 +01:00
|
|
|
def authenticate(self, username: Optional[str]=None, realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
use_dummy_backend: bool=False,
|
2018-10-27 03:19:49 +02:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2013-11-21 04:57:23 +01:00
|
|
|
if use_dummy_backend:
|
2017-11-17 22:43:16 +01:00
|
|
|
# These are kwargs only for readability; they should never be None
|
|
|
|
assert username is not None
|
|
|
|
assert realm is not None
|
|
|
|
return common_get_active_user(username, realm, return_data)
|
2013-11-21 04:57:23 +01:00
|
|
|
return None
|
|
|
|
|
2013-11-01 20:22:12 +01:00
|
|
|
class EmailAuthBackend(ZulipAuthMixin):
|
2013-08-06 22:51:47 +02:00
|
|
|
"""
|
|
|
|
Email Authentication Backend
|
|
|
|
|
|
|
|
Allows a user to sign in using an email/password pair rather than
|
|
|
|
a username/password pair.
|
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-21 20:47:26 +01:00
|
|
|
def authenticate(self, username: Optional[str]=None, password: Optional[str]=None,
|
2017-11-17 23:56:45 +01:00
|
|
|
realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2013-08-06 22:51:47 +02:00
|
|
|
""" Authenticate a user based on email address as the user name. """
|
|
|
|
if username is None or password is None:
|
2017-11-21 21:32:42 +01:00
|
|
|
# Because of how we structure our auth calls to always
|
|
|
|
# specify which backend to use when not using
|
|
|
|
# EmailAuthBackend, username and password should always be set.
|
|
|
|
raise AssertionError("Invalid call to authenticate for EmailAuthBackend")
|
2017-11-17 23:56:45 +01:00
|
|
|
if realm is None:
|
|
|
|
return None
|
|
|
|
if not password_auth_enabled(realm):
|
2016-04-21 21:07:43 +02:00
|
|
|
if return_data is not None:
|
|
|
|
return_data['password_auth_disabled'] = True
|
2016-04-21 07:19:08 +02:00
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
if not email_auth_enabled(realm):
|
2016-11-07 00:04:59 +01:00
|
|
|
if return_data is not None:
|
|
|
|
return_data['email_auth_disabled'] = True
|
|
|
|
return None
|
2017-11-21 21:39:56 +01:00
|
|
|
|
2017-11-21 21:42:21 +01:00
|
|
|
user_profile = common_get_active_user(username, realm, return_data=return_data)
|
2017-11-21 21:39:56 +01:00
|
|
|
if user_profile is None:
|
|
|
|
return None
|
2016-04-21 07:19:08 +02:00
|
|
|
if user_profile.check_password(password):
|
|
|
|
return user_profile
|
2016-07-19 14:35:08 +02:00
|
|
|
return None
|
2013-08-06 22:51:47 +02:00
|
|
|
|
2014-01-10 23:48:05 +01:00
|
|
|
class GoogleMobileOauth2Backend(ZulipAuthMixin):
|
|
|
|
"""
|
2017-11-21 21:29:09 +01:00
|
|
|
Google Apps authentication for the legacy Android app.
|
|
|
|
DummyAuthBackend is what's actually used for our modern Google auth,
|
|
|
|
both for web and mobile (the latter via the mobile_flow_otp feature).
|
2014-01-10 23:48:05 +01:00
|
|
|
|
|
|
|
Allows a user to sign in using a Google-issued OAuth2 token.
|
|
|
|
|
|
|
|
Ref:
|
|
|
|
https://developers.google.com/+/mobile/android/sign-in#server-side_access_for_your_app
|
|
|
|
https://developers.google.com/accounts/docs/CrossClientAuth#offlineAccess
|
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2018-10-27 03:19:49 +02:00
|
|
|
def authenticate(self, google_oauth2_token: Optional[str]=None, realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2018-08-08 18:11:49 +02:00
|
|
|
# We lazily import apiclient as part of optimizing the base
|
|
|
|
# import time for a Zulip management command, since it's only
|
|
|
|
# used in this one code path and takes 30-50ms to import.
|
|
|
|
from apiclient.sample_tools import client as googleapiclient
|
2018-08-08 22:08:53 +02:00
|
|
|
from oauth2client.crypt import AppIdentityError
|
2017-11-21 21:23:07 +01:00
|
|
|
if realm is None:
|
|
|
|
return None
|
2017-03-24 08:45:21 +01:00
|
|
|
if return_data is None:
|
|
|
|
return_data = {}
|
|
|
|
|
2017-11-21 21:23:41 +01:00
|
|
|
if not google_auth_enabled(realm=realm):
|
|
|
|
return_data["google_auth_disabled"] = True
|
|
|
|
return None
|
|
|
|
|
2014-01-10 23:48:05 +01:00
|
|
|
try:
|
|
|
|
token_payload = googleapiclient.verify_id_token(google_oauth2_token, settings.GOOGLE_CLIENT_ID)
|
|
|
|
except AppIdentityError:
|
|
|
|
return None
|
2017-11-21 21:25:58 +01:00
|
|
|
|
|
|
|
if token_payload["email_verified"] not in (True, "true"):
|
2014-01-10 23:48:05 +01:00
|
|
|
return_data["valid_attestation"] = False
|
2017-03-03 20:30:49 +01:00
|
|
|
return None
|
2014-01-10 23:48:05 +01:00
|
|
|
|
2017-11-21 21:25:58 +01:00
|
|
|
return_data["valid_attestation"] = True
|
2017-11-21 21:26:41 +01:00
|
|
|
return common_get_active_user(token_payload["email"], realm, return_data)
|
2017-11-21 21:25:58 +01:00
|
|
|
|
2013-11-04 23:16:46 +01:00
|
|
|
class ZulipRemoteUserBackend(RemoteUserBackend):
|
|
|
|
create_unknown_user = False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2018-04-22 23:20:58 +02:00
|
|
|
def authenticate(self, remote_user: Optional[str], realm: Optional[Realm]=None,
|
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-21 21:00:37 +01:00
|
|
|
assert remote_user is not None
|
2017-11-17 23:14:08 +01:00
|
|
|
if realm is None:
|
|
|
|
return None
|
2017-11-21 21:04:04 +01:00
|
|
|
if not auth_enabled_helper(["RemoteUser"], realm):
|
|
|
|
return None
|
2013-11-04 23:16:46 +01:00
|
|
|
|
|
|
|
email = remote_user_to_email(remote_user)
|
2018-04-22 23:20:58 +02:00
|
|
|
return common_get_active_user(email, realm, return_data=return_data)
|
2015-02-06 18:30:28 +01:00
|
|
|
|
2018-05-29 07:25:08 +02:00
|
|
|
def email_belongs_to_ldap(realm: Realm, email: str) -> bool:
|
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return False
|
|
|
|
|
|
|
|
# If we don't have an LDAP domain, it's impossible to tell which
|
|
|
|
# accounts are LDAP accounts, so treat all of them as LDAP
|
|
|
|
# accounts
|
|
|
|
if not settings.LDAP_APPEND_DOMAIN:
|
|
|
|
return True
|
|
|
|
|
|
|
|
# Otherwise, check if the email ends with LDAP_APPEND_DOMAIN
|
|
|
|
return email.strip().lower().endswith("@" + settings.LDAP_APPEND_DOMAIN)
|
|
|
|
|
2017-09-27 00:56:34 +02:00
|
|
|
class ZulipLDAPException(_LDAPUser.AuthenticationFailed):
|
2018-05-31 23:10:22 +02:00
|
|
|
"""Since this inherits from _LDAPUser.AuthenticationFailed, these will
|
|
|
|
be caught and logged at debug level inside django-auth-ldap's authenticate()"""
|
2015-10-13 23:08:05 +02:00
|
|
|
pass
|
|
|
|
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
class ZulipLDAPExceptionOutsideDomain(ZulipLDAPException):
|
|
|
|
pass
|
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
class ZulipLDAPConfigurationError(Exception):
|
|
|
|
pass
|
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK = 2
|
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend):
|
2018-12-12 20:06:10 +01:00
|
|
|
def __init__(self) -> None:
|
|
|
|
if settings.DEVELOPMENT and settings.FAKE_LDAP_MODE: # nocoverage
|
|
|
|
# We only use this in development. Importing mock inside
|
|
|
|
# this function is an import time optimization, which
|
|
|
|
# avoids the expensive import of the mock module (slow
|
|
|
|
# because its dependency pbr uses pkgresources, which is
|
|
|
|
# really slow to import.)
|
|
|
|
import mock
|
|
|
|
from fakeldap import MockLDAP
|
2018-12-13 22:46:37 +01:00
|
|
|
from zerver.lib.dev_ldap_directory import generate_dev_ldap_dir
|
2018-12-12 20:06:10 +01:00
|
|
|
|
|
|
|
ldap_patcher = mock.patch('django_auth_ldap.config.ldap.initialize')
|
|
|
|
self.mock_initialize = ldap_patcher.start()
|
|
|
|
self.mock_ldap = MockLDAP()
|
|
|
|
self.mock_initialize.return_value = self.mock_ldap
|
|
|
|
|
|
|
|
self.mock_ldap.directory = generate_dev_ldap_dir(settings.FAKE_LDAP_MODE,
|
|
|
|
settings.FAKE_LDAP_NUM_USERS)
|
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
# Don't use Django LDAP's permissions functions
|
2017-11-27 14:35:36 +01:00
|
|
|
def has_perm(self, user: Optional[UserProfile], perm: Any, obj: Any=None) -> bool:
|
2016-08-08 09:38:50 +02:00
|
|
|
# Using Any type is safe because we are not doing anything with
|
|
|
|
# the arguments.
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def has_module_perms(self, user: Optional[UserProfile], app_label: Optional[str]) -> bool:
|
2015-10-13 23:08:05 +02:00
|
|
|
return False
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_all_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
|
2016-08-08 09:38:50 +02:00
|
|
|
# Using Any type is safe because we are not doing anything with
|
2017-11-02 06:36:10 +01:00
|
|
|
# the arguments and always return empty set.
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def get_group_permissions(self, user: Optional[UserProfile], obj: Any=None) -> Set[Any]:
|
2016-08-08 09:38:50 +02:00
|
|
|
# Using Any type is safe because we are not doing anything with
|
2017-11-02 06:36:10 +01:00
|
|
|
# the arguments and always return empty set.
|
2015-10-13 23:08:05 +02:00
|
|
|
return set()
|
|
|
|
|
2018-05-10 18:53:55 +02:00
|
|
|
def django_to_ldap_username(self, username: str) -> str:
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2015-10-13 23:08:05 +02:00
|
|
|
if not username.endswith("@" + settings.LDAP_APPEND_DOMAIN):
|
2018-08-20 19:38:59 +02:00
|
|
|
raise ZulipLDAPExceptionOutsideDomain("Email %s does not match LDAP domain %s." % (
|
|
|
|
username, settings.LDAP_APPEND_DOMAIN))
|
2013-11-21 01:30:20 +01:00
|
|
|
return email_to_username(username)
|
|
|
|
return username
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-27 14:35:36 +01:00
|
|
|
def ldap_to_django_username(self, username: str) -> str:
|
2015-10-13 22:57:38 +02:00
|
|
|
if settings.LDAP_APPEND_DOMAIN:
|
2013-11-25 17:57:30 +01:00
|
|
|
return "@".join((username, settings.LDAP_APPEND_DOMAIN))
|
2013-11-21 01:30:20 +01:00
|
|
|
return username
|
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
def sync_avatar_from_ldap(self, user: UserProfile, ldap_user: _LDAPUser) -> None:
|
|
|
|
if 'avatar' in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
# We do local imports here to avoid import loops
|
|
|
|
from zerver.lib.upload import upload_avatar_image
|
|
|
|
from zerver.lib.actions import do_change_avatar_fields
|
|
|
|
from io import BytesIO
|
|
|
|
|
|
|
|
avatar_attr_name = settings.AUTH_LDAP_USER_ATTR_MAP['avatar']
|
2018-12-30 01:32:16 +01:00
|
|
|
if avatar_attr_name not in ldap_user.attrs: # nocoverage
|
|
|
|
# If this specific user doesn't have e.g. a
|
|
|
|
# thumbnailPhoto set in LDAP, just skip that user.
|
|
|
|
return
|
2018-12-12 19:46:37 +01:00
|
|
|
upload_avatar_image(BytesIO(ldap_user.attrs[avatar_attr_name][0]), user, user)
|
|
|
|
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_USER)
|
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
def is_account_control_disabled_user(self, ldap_user: _LDAPUser) -> bool: # nocoverage
|
|
|
|
account_control_value = ldap_user.attrs[settings.AUTH_LDAP_USER_ATTR_MAP['userAccountControl']][0]
|
2018-12-30 01:33:11 +01:00
|
|
|
ldap_disabled = bool(int(account_control_value) & LDAP_USER_ACCOUNT_CONTROL_DISABLED_MASK)
|
2018-12-13 23:58:26 +01:00
|
|
|
return ldap_disabled
|
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
def get_or_build_user(self, username: str,
|
|
|
|
ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]: # nocoverage
|
|
|
|
(user, built) = super().get_or_build_user(username, ldap_user)
|
|
|
|
self.sync_avatar_from_ldap(user, ldap_user)
|
2018-12-30 20:05:14 +01:00
|
|
|
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
user_disabled_in_ldap = self.is_account_control_disabled_user(ldap_user)
|
|
|
|
if user_disabled_in_ldap and user.is_active:
|
|
|
|
logging.info("Deactivating user %s because they are disabled in LDAP." %
|
|
|
|
(user.email,))
|
|
|
|
do_deactivate_user(user)
|
|
|
|
return (user, built)
|
|
|
|
if not user_disabled_in_ldap and not user.is_active:
|
|
|
|
logging.info("Reactivating user %s because they are not disabled in LDAP." %
|
|
|
|
(user.email,))
|
|
|
|
do_reactivate_user(user)
|
2018-12-12 19:46:37 +01:00
|
|
|
return (user, built)
|
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase):
|
2017-09-22 10:58:12 +02:00
|
|
|
REALM_IS_NONE_ERROR = 1
|
|
|
|
|
2017-11-17 23:56:45 +01:00
|
|
|
def authenticate(self, username: str, password: str, realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-17 23:56:45 +01:00
|
|
|
if realm is None:
|
|
|
|
return None
|
|
|
|
self._realm = realm
|
2017-11-21 21:45:32 +01:00
|
|
|
if not ldap_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-17 23:56:45 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
try:
|
|
|
|
username = self.django_to_ldap_username(username)
|
2018-05-31 23:10:22 +02:00
|
|
|
except ZulipLDAPExceptionOutsideDomain:
|
2018-10-27 03:19:49 +02:00
|
|
|
if return_data is not None:
|
|
|
|
return_data['outside_ldap_domain'] = True
|
auth: Improve interactions between LDAPAuthBackend and EmailAuthBackend.
Previously, if you had LDAPAuthBackend enabled, we basically blocked
any other auth backends from working at all, by requiring the user's
login flow include verifying the user's LDAP password.
We still want to enforce that in the case that the account email
matches LDAP_APPEND_DOMAIN, but there's a reasonable corner case:
Having effectively guest users from outside the LDAP domain.
We don't want to allow creating a Zulip-level password for a user
inside the LDAP domain, so we still verify the LDAP password in that
flow, but if the email is allowed to register (due to invite or
whatever) but is outside the LDAP domain for the organization, we
allow it to create an account and set a password.
For the moment, this solution only covers EmailAuthBackend. It's
likely that just extending the list of other backends we check for in
the new conditional on `email_auth_backend` would be correct, but we
haven't done any testing for those cases, and with auth code paths,
it's better to disallow than allow untested code paths.
Fixes #9422.
2018-05-29 06:52:06 +02:00
|
|
|
return None
|
2015-10-13 23:08:05 +02:00
|
|
|
|
2018-05-31 23:10:22 +02:00
|
|
|
return ZulipLDAPAuthBackendBase.authenticate(self,
|
2018-08-01 12:38:41 +02:00
|
|
|
request=None,
|
2018-05-31 23:10:22 +02:00
|
|
|
username=username,
|
|
|
|
password=password)
|
|
|
|
|
2018-05-22 08:33:56 +02:00
|
|
|
def get_or_build_user(self, username: str, ldap_user: _LDAPUser) -> Tuple[UserProfile, bool]:
|
2018-05-31 23:16:03 +02:00
|
|
|
return_data = {} # type: Dict[str, Any]
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2017-11-21 21:48:55 +01:00
|
|
|
if settings.LDAP_EMAIL_ATTR is not None:
|
|
|
|
# Get email from ldap attributes.
|
|
|
|
if settings.LDAP_EMAIL_ATTR not in ldap_user.attrs:
|
2018-05-31 23:16:03 +02:00
|
|
|
return_data["ldap_missing_attribute"] = settings.LDAP_EMAIL_ATTR
|
2017-11-21 21:48:55 +01:00
|
|
|
raise ZulipLDAPException("LDAP user doesn't have the needed %s attribute" % (
|
|
|
|
settings.LDAP_EMAIL_ATTR,))
|
|
|
|
|
|
|
|
username = ldap_user.attrs[settings.LDAP_EMAIL_ATTR][0]
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2018-12-13 23:58:26 +01:00
|
|
|
if 'userAccountControl' in settings.AUTH_LDAP_USER_ATTR_MAP: # nocoverage
|
|
|
|
ldap_disabled = self.is_account_control_disabled_user(ldap_user)
|
|
|
|
if ldap_disabled:
|
|
|
|
# Treat disabled users as deactivated in Zulip.
|
|
|
|
return_data["inactive_user"] = True
|
|
|
|
raise ZulipLDAPException("User has been deactivated")
|
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
user_profile = common_get_active_user(username, self._realm, return_data)
|
2017-11-21 21:57:23 +01:00
|
|
|
if user_profile is not None:
|
|
|
|
# An existing user, successfully authed; return it.
|
|
|
|
return user_profile, False
|
|
|
|
|
2017-11-21 21:52:34 +01:00
|
|
|
if return_data.get("inactive_realm"):
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if there is a user account in a deactivated realm
|
2017-11-21 21:52:34 +01:00
|
|
|
raise ZulipLDAPException("Realm has been deactivated")
|
|
|
|
if return_data.get("inactive_user"):
|
2017-11-18 01:07:20 +01:00
|
|
|
raise ZulipLDAPException("User has been deactivated")
|
2017-11-21 21:52:34 +01:00
|
|
|
if return_data.get("invalid_subdomain"):
|
|
|
|
# TODO: Implement something in the caller for this to
|
|
|
|
# provide a nice user-facing error message for this
|
|
|
|
# situation (right now it just acts like any other auth
|
|
|
|
# failure).
|
|
|
|
raise ZulipLDAPException("Wrong subdomain")
|
2017-11-18 01:12:05 +01:00
|
|
|
if self._realm.deactivated:
|
2017-11-18 01:13:35 +01:00
|
|
|
# This happens if no account exists, but the realm is
|
|
|
|
# deactivated, so we shouldn't create a new user account
|
2017-11-18 01:12:05 +01:00
|
|
|
raise ZulipLDAPException("Realm has been deactivated")
|
|
|
|
|
2017-11-18 01:13:35 +01:00
|
|
|
# We have valid LDAP credentials; time to create an account.
|
2017-11-18 01:12:05 +01:00
|
|
|
full_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["full_name"]
|
|
|
|
short_name = full_name = ldap_user.attrs[full_name_attr][0]
|
|
|
|
try:
|
|
|
|
full_name = check_full_name(full_name)
|
|
|
|
except JsonableError as e:
|
|
|
|
raise ZulipLDAPException(e.msg)
|
|
|
|
if "short_name" in settings.AUTH_LDAP_USER_ATTR_MAP:
|
|
|
|
short_name_attr = settings.AUTH_LDAP_USER_ATTR_MAP["short_name"]
|
|
|
|
short_name = ldap_user.attrs[short_name_attr][0]
|
|
|
|
|
|
|
|
user_profile = do_create_user(username, None, self._realm, full_name, short_name)
|
2018-12-12 19:46:37 +01:00
|
|
|
self.sync_avatar_from_ldap(user_profile, ldap_user)
|
2017-11-18 01:12:05 +01:00
|
|
|
|
|
|
|
return user_profile, True
|
2013-11-21 01:30:20 +01:00
|
|
|
|
2015-10-13 23:08:05 +02:00
|
|
|
# Just like ZulipLDAPAuthBackend, but doesn't let you log in.
|
|
|
|
class ZulipLDAPUserPopulator(ZulipLDAPAuthBackendBase):
|
2017-11-17 23:56:45 +01:00
|
|
|
def authenticate(self, username: str, password: str, realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> None:
|
2013-11-21 01:30:20 +01:00
|
|
|
return None
|
2015-08-19 02:58:20 +02:00
|
|
|
|
|
|
|
class DevAuthBackend(ZulipAuthMixin):
|
|
|
|
# Allow logging in as any user without a password.
|
|
|
|
# This is used for convenience when developing Zulip.
|
2017-11-21 21:19:20 +01:00
|
|
|
def authenticate(self, dev_auth_username: Optional[str]=None, realm: Optional[Realm]=None,
|
2017-11-21 20:47:26 +01:00
|
|
|
return_data: Optional[Dict[str, Any]]=None) -> Optional[UserProfile]:
|
2017-11-21 21:13:46 +01:00
|
|
|
assert dev_auth_username is not None
|
2017-11-21 21:19:20 +01:00
|
|
|
if realm is None:
|
|
|
|
return None
|
2017-11-21 21:19:58 +01:00
|
|
|
if not dev_auth_enabled(realm):
|
|
|
|
return None
|
2017-11-21 21:20:44 +01:00
|
|
|
return common_get_active_user(dev_auth_username, realm, return_data=return_data)
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def social_associate_user_helper(backend: BaseAuth, return_data: Dict[str, Any],
|
|
|
|
*args: Any, **kwargs: Any) -> Optional[UserProfile]:
|
|
|
|
"""Responsible for doing the Zulip-account lookup and validation parts
|
|
|
|
of the Zulip Social auth pipeline (similar to the authenticate()
|
|
|
|
methods in most other auth backends in this file).
|
|
|
|
"""
|
|
|
|
subdomain = backend.strategy.session_get('subdomain')
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
if realm is None:
|
|
|
|
return_data["invalid_realm"] = True
|
|
|
|
return None
|
2018-07-10 12:29:06 +02:00
|
|
|
return_data["realm_id"] = realm.id
|
2016-11-07 00:09:21 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if not auth_enabled_helper([backend.auth_backend_name], realm):
|
|
|
|
return_data["auth_backend_disabled"] = True
|
|
|
|
return None
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if 'auth_failed_reason' in kwargs.get('response', {}):
|
|
|
|
return_data["social_auth_failed_reason"] = kwargs['response']["auth_failed_reason"]
|
|
|
|
return None
|
2018-06-07 00:19:06 +02:00
|
|
|
elif hasattr(backend, 'get_verified_emails'):
|
2018-05-31 00:12:39 +02:00
|
|
|
# Some social backends, like GitHubAuthBackend, don't guarantee that
|
|
|
|
# the `details` data is validated.
|
2018-06-07 00:19:06 +02:00
|
|
|
verified_emails = backend.get_verified_emails(*args, **kwargs)
|
|
|
|
if len(verified_emails) == 0:
|
|
|
|
# TODO: Provide a nice error message screen to the user
|
|
|
|
# for this case, rather than just logging a warning.
|
|
|
|
logging.warning("Social auth (%s) failed because user has no verified emails" %
|
|
|
|
(backend.auth_backend_name,))
|
|
|
|
return_data["email_not_verified"] = True
|
|
|
|
return None
|
|
|
|
# TODO: ideally, we'd prompt the user for which email they
|
|
|
|
# want to use with another pipeline stage here.
|
|
|
|
validated_email = verified_emails[0]
|
2018-05-31 00:12:39 +02:00
|
|
|
else: # nocoverage
|
|
|
|
# This code path isn't used by GitHubAuthBackend
|
|
|
|
validated_email = kwargs["details"].get("email")
|
|
|
|
|
|
|
|
if not validated_email: # nocoverage
|
|
|
|
# This code path isn't used with GitHubAuthBackend, but may be relevant for other
|
|
|
|
# social auth backends.
|
|
|
|
return_data['invalid_email'] = True
|
|
|
|
return None
|
|
|
|
try:
|
|
|
|
validate_email(validated_email)
|
|
|
|
except ValidationError:
|
|
|
|
return_data['invalid_email'] = True
|
|
|
|
return None
|
|
|
|
|
|
|
|
return_data["valid_attestation"] = True
|
|
|
|
return_data['validated_email'] = validated_email
|
|
|
|
user_profile = common_get_active_user(validated_email, realm, return_data)
|
|
|
|
|
|
|
|
if 'fullname' in kwargs["details"]:
|
|
|
|
return_data["full_name"] = kwargs["details"]["fullname"]
|
|
|
|
else:
|
|
|
|
# If we add support for any of the social auth backends that
|
|
|
|
# don't provide this feature, we'll need to add code here.
|
|
|
|
raise AssertionError("Social auth backend doesn't provide fullname")
|
|
|
|
|
|
|
|
return user_profile
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def social_auth_associate_user(
|
|
|
|
backend: BaseAuth,
|
|
|
|
*args: Any,
|
|
|
|
**kwargs: Any) -> Dict[str, Any]:
|
|
|
|
return_data = {} # type: Dict[str, Any]
|
|
|
|
user_profile = social_associate_user_helper(
|
|
|
|
backend, return_data, *args, **kwargs)
|
|
|
|
|
|
|
|
return {'user_profile': user_profile,
|
|
|
|
'return_data': return_data}
|
|
|
|
|
|
|
|
def social_auth_finish(backend: Any,
|
|
|
|
details: Dict[str, Any],
|
|
|
|
response: HttpResponse,
|
|
|
|
*args: Any,
|
|
|
|
**kwargs: Any) -> Optional[UserProfile]:
|
|
|
|
from zerver.views.auth import (login_or_register_remote_user,
|
|
|
|
redirect_and_log_into_subdomain)
|
|
|
|
|
|
|
|
user_profile = kwargs['user_profile']
|
|
|
|
return_data = kwargs['return_data']
|
|
|
|
|
2018-06-07 00:19:06 +02:00
|
|
|
no_verified_email = return_data.get("email_not_verified")
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_backend_disabled = return_data.get('auth_backend_disabled')
|
|
|
|
inactive_user = return_data.get('inactive_user')
|
|
|
|
inactive_realm = return_data.get('inactive_realm')
|
|
|
|
invalid_realm = return_data.get('invalid_realm')
|
|
|
|
invalid_subdomain = return_data.get('invalid_subdomain')
|
|
|
|
invalid_email = return_data.get('invalid_email')
|
|
|
|
auth_failed_reason = return_data.get("social_auth_failed_reason")
|
|
|
|
|
|
|
|
if invalid_realm:
|
|
|
|
from zerver.views.auth import redirect_to_subdomain_login_url
|
|
|
|
return redirect_to_subdomain_login_url()
|
2018-06-07 00:19:06 +02:00
|
|
|
if auth_backend_disabled or inactive_user or inactive_realm or no_verified_email:
|
2018-05-31 00:12:39 +02:00
|
|
|
# Redirect to login page. We can't send to registration
|
|
|
|
# workflow with these errors. We will redirect to login page.
|
|
|
|
return None
|
|
|
|
|
|
|
|
if invalid_email:
|
|
|
|
# In case of invalid email, we will end up on registration page.
|
|
|
|
# This seems better than redirecting to login page.
|
|
|
|
logging.warning(
|
|
|
|
"{} got invalid email argument.".format(backend.auth_backend_name)
|
|
|
|
)
|
|
|
|
return None
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if auth_failed_reason:
|
|
|
|
logging.info(auth_failed_reason)
|
|
|
|
return None
|
|
|
|
|
|
|
|
# Structurally, all the cases where we don't have an authenticated
|
|
|
|
# email for the user should be handled above; this assertion helps
|
|
|
|
# prevent any violations of that contract from resulting in a user
|
|
|
|
# being incorrectly authenticated.
|
|
|
|
assert return_data.get('valid_attestation') is True
|
|
|
|
|
|
|
|
strategy = backend.strategy # type: ignore # This comes from Python Social Auth.
|
|
|
|
email_address = return_data['validated_email']
|
|
|
|
full_name = return_data['full_name']
|
|
|
|
is_signup = strategy.session_get('is_signup') == '1'
|
|
|
|
redirect_to = strategy.session_get('next')
|
2018-07-10 12:29:06 +02:00
|
|
|
realm = Realm.objects.get(id=return_data["realm_id"])
|
2018-05-31 00:12:39 +02:00
|
|
|
|
|
|
|
mobile_flow_otp = strategy.session_get('mobile_flow_otp')
|
|
|
|
if mobile_flow_otp is not None:
|
|
|
|
return login_or_register_remote_user(strategy.request, email_address,
|
|
|
|
user_profile, full_name,
|
|
|
|
invalid_subdomain=bool(invalid_subdomain),
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
is_signup=is_signup,
|
|
|
|
redirect_to=redirect_to)
|
|
|
|
return redirect_and_log_into_subdomain(realm, full_name, email_address,
|
|
|
|
is_signup=is_signup,
|
|
|
|
redirect_to=redirect_to)
|
|
|
|
|
2018-07-03 18:47:20 +02:00
|
|
|
class SocialAuthMixin(ZulipAuthMixin):
|
2018-10-12 02:35:07 +02:00
|
|
|
auth_backend_name = "undeclared"
|
|
|
|
|
2018-07-03 18:47:20 +02:00
|
|
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
|
|
|
"""This is a small wrapper around the core `auth_complete` method of
|
|
|
|
python-social-auth, designed primarily to prevent 500s for
|
|
|
|
exceptions in the social auth code from situations that are
|
|
|
|
really user errors. Returning `None` from this function will
|
|
|
|
redirect the browser to the login page.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
# Call the auth_complete method of social_core.backends.oauth.BaseOAuth2
|
|
|
|
return super().auth_complete(*args, **kwargs) # type: ignore # monkey-patching
|
|
|
|
except AuthFailed as e:
|
|
|
|
# When a user's social authentication fails (e.g. because
|
|
|
|
# they did something funny with reloading in the middle of
|
|
|
|
# the flow), don't throw a 500, just send them back to the
|
|
|
|
# login page and record the event at the info log level.
|
|
|
|
logging.info(str(e))
|
|
|
|
return None
|
|
|
|
except SocialAuthBaseException as e:
|
|
|
|
# Other python-social-auth exceptions are likely
|
|
|
|
# interesting enough that we should log a warning.
|
|
|
|
logging.warning(str(e))
|
|
|
|
return None
|
|
|
|
|
|
|
|
class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2):
|
2018-05-31 00:12:39 +02:00
|
|
|
auth_backend_name = "GitHub"
|
2017-03-09 09:45:21 +01:00
|
|
|
|
2018-06-07 00:19:06 +02:00
|
|
|
def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]:
|
|
|
|
access_token = kwargs["response"]["access_token"]
|
|
|
|
try:
|
|
|
|
emails = self._user_data(access_token, '/emails')
|
|
|
|
except (HTTPError, ValueError, TypeError): # nocoverage
|
|
|
|
# We don't really need an explicit test for this code
|
|
|
|
# path, since the outcome will be the same as any other
|
|
|
|
# case without any verified emails
|
|
|
|
emails = []
|
|
|
|
|
2018-07-10 12:03:42 +02:00
|
|
|
verified_emails = [] # type: List[str]
|
2018-06-07 00:19:06 +02:00
|
|
|
for email_obj in emails:
|
|
|
|
if not email_obj.get("verified"):
|
|
|
|
continue
|
2018-07-10 12:03:42 +02:00
|
|
|
# social_associate_user_helper assumes that the first email in
|
|
|
|
# verified_emails is primary.
|
|
|
|
if email_obj.get("primary"):
|
|
|
|
verified_emails.insert(0, email_obj["email"])
|
|
|
|
else:
|
|
|
|
verified_emails.append(email_obj["email"])
|
2018-06-07 00:19:06 +02:00
|
|
|
|
|
|
|
return verified_emails
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def user_data(self, access_token: str, *args: Any, **kwargs: Any) -> Dict[str, str]:
|
|
|
|
"""This patched user_data function lets us combine together the 3
|
|
|
|
social auth backends into a single Zulip backend for GitHub Oauth2"""
|
2016-08-01 13:06:35 +02:00
|
|
|
team_id = settings.SOCIAL_AUTH_GITHUB_TEAM_ID
|
|
|
|
org_name = settings.SOCIAL_AUTH_GITHUB_ORG_NAME
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
if team_id is None and org_name is None:
|
|
|
|
# I believe this can't raise AuthFailed, so we don't try to catch it here.
|
|
|
|
return super().user_data(
|
|
|
|
access_token, *args, **kwargs
|
|
|
|
)
|
|
|
|
elif team_id is not None:
|
2016-08-01 13:06:35 +02:00
|
|
|
backend = GithubTeamOAuth2(self.strategy, self.redirect_uri)
|
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required team")
|
|
|
|
elif org_name is not None:
|
2016-08-01 13:06:35 +02:00
|
|
|
backend = GithubOrganizationOAuth2(self.strategy, self.redirect_uri)
|
|
|
|
try:
|
2018-05-31 00:12:39 +02:00
|
|
|
return backend.user_data(access_token, *args, **kwargs)
|
2016-08-01 13:06:35 +02:00
|
|
|
except AuthFailed:
|
2018-05-31 00:12:39 +02:00
|
|
|
return dict(auth_failed_reason="GitHub user is not member of required organization")
|
2016-08-01 13:06:35 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
raise AssertionError("Invalid configuration")
|
2016-11-06 23:44:45 +01:00
|
|
|
|
2018-10-05 14:32:02 +02:00
|
|
|
class AzureADAuthBackend(SocialAuthMixin, AzureADOAuth2):
|
|
|
|
auth_backend_name = "AzureAD"
|
|
|
|
|
2016-11-06 23:44:45 +01:00
|
|
|
AUTH_BACKEND_NAME_MAP = {
|
2017-11-02 08:05:56 +01:00
|
|
|
'Dev': DevAuthBackend,
|
|
|
|
'Email': EmailAuthBackend,
|
|
|
|
'Google': GoogleMobileOauth2Backend,
|
|
|
|
'LDAP': ZulipLDAPAuthBackend,
|
|
|
|
'RemoteUser': ZulipRemoteUserBackend,
|
2018-05-10 18:53:55 +02:00
|
|
|
} # type: Dict[str, Any]
|
2018-10-12 02:08:33 +02:00
|
|
|
OAUTH_BACKEND_NAMES = ["Google"] # type: List[str]
|
2018-10-12 01:58:01 +02:00
|
|
|
|
|
|
|
# Authomatically add all of our social auth backends to relevant data structures.
|
|
|
|
for social_auth_subclass in SocialAuthMixin.__subclasses__():
|
|
|
|
AUTH_BACKEND_NAME_MAP[social_auth_subclass.auth_backend_name] = social_auth_subclass
|
2018-10-12 02:08:33 +02:00
|
|
|
if issubclass(social_auth_subclass, BaseOAuth2):
|
|
|
|
OAUTH_BACKEND_NAMES.append(social_auth_subclass.auth_backend_name)
|