2020-06-11 00:54:34 +02:00
|
|
|
import base64
|
|
|
|
import copy
|
|
|
|
import datetime
|
|
|
|
import json
|
2022-01-21 06:06:30 +01:00
|
|
|
import os
|
2020-06-11 00:54:34 +02:00
|
|
|
import re
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
|
|
|
import urllib
|
2022-03-08 00:48:15 +01:00
|
|
|
from abc import ABC, abstractmethod
|
2020-06-09 18:17:32 +02:00
|
|
|
from contextlib import contextmanager
|
2022-07-27 23:33:49 +02:00
|
|
|
from email.headerregistry import Address
|
2022-06-08 04:52:09 +02:00
|
|
|
from typing import (
|
|
|
|
TYPE_CHECKING,
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
2022-07-19 04:43:14 +02:00
|
|
|
Iterable,
|
2022-06-08 04:52:09 +02:00
|
|
|
Iterator,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Tuple,
|
|
|
|
Type,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2021-10-14 01:56:37 +02:00
|
|
|
from urllib.parse import urlencode
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
import jwt
|
|
|
|
import ldap
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
import requests
|
|
|
|
import responses
|
2020-02-17 16:18:09 +01:00
|
|
|
from bs4 import BeautifulSoup
|
2022-01-22 07:31:33 +01:00
|
|
|
from bs4.element import Tag
|
2020-04-25 06:49:19 +02:00
|
|
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
2016-04-21 18:34:54 +02:00
|
|
|
from django.conf import settings
|
2019-11-01 23:26:49 +01:00
|
|
|
from django.contrib.auth import authenticate
|
2017-06-15 07:15:57 +02:00
|
|
|
from django.core import mail
|
2022-06-08 04:52:09 +02:00
|
|
|
from django.http import HttpRequest
|
2017-05-25 02:29:42 +02:00
|
|
|
from django.test import override_settings
|
2018-01-30 06:05:25 +01:00
|
|
|
from django.urls import reverse
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
|
|
|
from django_auth_ldap.backend import LDAPSearch, _LDAPUser
|
2020-06-24 15:28:47 +02:00
|
|
|
from jwt.exceptions import PyJWTError
|
2020-06-11 00:54:34 +02:00
|
|
|
from onelogin.saml2.auth import OneLogin_Saml2_Auth
|
2021-10-21 14:16:26 +02:00
|
|
|
from onelogin.saml2.logout_request import OneLogin_Saml2_Logout_Request
|
2020-06-11 00:54:34 +02:00
|
|
|
from onelogin.saml2.response import OneLogin_Saml2_Response
|
2021-10-21 14:16:26 +02:00
|
|
|
from onelogin.saml2.utils import OneLogin_Saml2_Utils
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_core.exceptions import AuthFailed, AuthStateForbidden
|
|
|
|
from social_django.storage import BaseDjangoStorage
|
|
|
|
from social_django.strategy import DjangoStrategy
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from confirmation.models import Confirmation, create_confirmation_link
|
2022-04-14 23:58:15 +02:00
|
|
|
from zerver.actions.create_realm import do_create_realm
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user
|
2022-04-14 23:36:07 +02:00
|
|
|
from zerver.actions.invites import do_invite_users
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import (
|
2017-03-21 18:08:40 +01:00
|
|
|
do_deactivate_realm,
|
|
|
|
do_reactivate_realm,
|
2019-12-15 20:10:09 +01:00
|
|
|
do_set_realm_property,
|
2017-03-21 18:08:40 +01:00
|
|
|
)
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.user_settings import do_change_password
|
|
|
|
from zerver.actions.users import change_user_is_active, do_deactivate_user
|
2018-12-12 19:46:37 +01:00
|
|
|
from zerver.lib.avatar import avatar_url
|
2019-06-07 23:36:19 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
2018-12-13 22:46:37 +01:00
|
|
|
from zerver.lib.dev_ldap_directory import generate_dev_ldap_dir
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.email_validation import (
|
|
|
|
get_existing_user_errors,
|
|
|
|
get_realm_email_validator,
|
|
|
|
validate_email_is_valid,
|
|
|
|
)
|
2022-11-17 09:30:48 +01:00
|
|
|
from zerver.lib.exceptions import JsonableError, RateLimitedError
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.initial_password import initial_password
|
2017-03-19 20:01:01 +01:00
|
|
|
from zerver.lib.mobile_auth_otp import otp_decrypt_api_key
|
2020-03-04 14:05:25 +01:00
|
|
|
from zerver.lib.rate_limiter import add_ratelimit_rule, remove_ratelimit_rule
|
2019-07-17 02:29:08 +02:00
|
|
|
from zerver.lib.storage import static_path
|
2022-04-14 23:42:50 +02:00
|
|
|
from zerver.lib.streams import ensure_stream
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import (
|
2022-07-08 22:45:02 +02:00
|
|
|
HostRequestMock,
|
2020-06-11 00:54:34 +02:00
|
|
|
create_s3_buckets,
|
|
|
|
load_subdomain_token,
|
2022-01-13 23:24:16 +01:00
|
|
|
read_test_image_file,
|
2020-06-11 00:54:34 +02:00
|
|
|
use_s3_backend,
|
|
|
|
)
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.types import Validator
|
2022-01-21 06:06:30 +01:00
|
|
|
from zerver.lib.upload import DEFAULT_AVATAR_SIZE, MEDIUM_AVATAR_SIZE, resize_avatar
|
2020-02-03 05:21:29 +01:00
|
|
|
from zerver.lib.users import get_all_api_keys
|
2022-07-21 23:06:09 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.validator import (
|
|
|
|
check_bool,
|
|
|
|
check_dict_only,
|
|
|
|
check_int,
|
|
|
|
check_list,
|
2020-06-25 23:52:28 +02:00
|
|
|
check_none_or,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_string,
|
|
|
|
validate_login_email,
|
|
|
|
)
|
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
|
|
|
MultiuseInvite,
|
|
|
|
PasswordTooWeakError,
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
RealmDomain,
|
2022-01-23 13:22:12 +01:00
|
|
|
Stream,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
|
|
|
clear_supported_auth_backends_cache,
|
|
|
|
get_realm,
|
|
|
|
get_user_by_delivery_email,
|
2016-04-21 18:34:54 +02:00
|
|
|
)
|
2018-08-10 00:58:44 +02:00
|
|
|
from zerver.signals import JUST_CREATED_THRESHOLD
|
2020-09-22 02:54:44 +02:00
|
|
|
from zerver.views.auth import log_into_subdomain, maybe_send_to_registration
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import (
|
|
|
|
AUTH_BACKEND_NAME_MAP,
|
|
|
|
AppleAuthBackend,
|
|
|
|
AzureADAuthBackend,
|
|
|
|
DevAuthBackend,
|
|
|
|
EmailAuthBackend,
|
|
|
|
ExternalAuthDataDict,
|
2022-07-21 23:06:09 +02:00
|
|
|
ExternalAuthMethod,
|
2020-06-11 00:54:34 +02:00
|
|
|
ExternalAuthResult,
|
2021-05-21 16:45:43 +02:00
|
|
|
GenericOpenIdConnectBackend,
|
2020-06-11 00:54:34 +02:00
|
|
|
GitHubAuthBackend,
|
|
|
|
GitLabAuthBackend,
|
|
|
|
GoogleAuthBackend,
|
2022-11-17 09:30:48 +01:00
|
|
|
NoMatchingLDAPUserError,
|
|
|
|
OutsideLDAPDomainError,
|
2020-06-11 00:54:34 +02:00
|
|
|
PopulateUserLDAPError,
|
|
|
|
RateLimitedAuthenticationByUsername,
|
|
|
|
SAMLAuthBackend,
|
2021-10-30 19:37:43 +02:00
|
|
|
SAMLDocument,
|
2020-06-11 00:54:34 +02:00
|
|
|
SocialAuthMixin,
|
|
|
|
ZulipAuthMixin,
|
|
|
|
ZulipDummyBackend,
|
|
|
|
ZulipLDAPAuthBackend,
|
|
|
|
ZulipLDAPConfigurationError,
|
2022-11-17 09:30:48 +01:00
|
|
|
ZulipLDAPError,
|
2020-06-11 00:54:34 +02:00
|
|
|
ZulipLDAPUser,
|
|
|
|
ZulipLDAPUserPopulator,
|
|
|
|
ZulipRemoteUserBackend,
|
|
|
|
apple_auth_enabled,
|
|
|
|
check_password_strength,
|
|
|
|
dev_auth_enabled,
|
|
|
|
email_belongs_to_ldap,
|
|
|
|
get_external_method_dicts,
|
|
|
|
github_auth_enabled,
|
|
|
|
gitlab_auth_enabled,
|
|
|
|
google_auth_enabled,
|
|
|
|
password_auth_enabled,
|
|
|
|
query_ldap,
|
|
|
|
require_email_format_usernames,
|
|
|
|
saml_auth_enabled,
|
|
|
|
sync_user_from_ldap,
|
|
|
|
)
|
2022-08-10 06:35:22 +02:00
|
|
|
from zproject.config import get_from_file_if_exists
|
2016-10-26 14:40:14 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.test.client import _MonkeyPatchedWSGIResponse as TestHttpResponse
|
|
|
|
|
2022-08-10 06:35:22 +02:00
|
|
|
APPLE_ID_TOKEN_GENERATION_KEY = get_from_file_if_exists(
|
|
|
|
"zerver/tests/fixtures/apple/token_gen_private_key"
|
|
|
|
)
|
|
|
|
EXAMPLE_JWK = get_from_file_if_exists("zerver/tests/fixtures/example_jwk")
|
|
|
|
|
2016-04-21 18:34:54 +02:00
|
|
|
|
2017-04-27 06:46:43 +02:00
|
|
|
class AuthBackendTest(ZulipTestCase):
|
2022-07-27 23:33:49 +02:00
|
|
|
def get_email(self) -> str:
|
|
|
|
return self.example_email("hamlet")
|
2017-03-28 11:11:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def verify_backend(
|
|
|
|
self,
|
|
|
|
backend: Any,
|
|
|
|
*,
|
|
|
|
good_kwargs: Dict[str, Any],
|
|
|
|
bad_kwargs: Optional[Dict[str, Any]] = None,
|
|
|
|
) -> None:
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-03-28 11:16:36 +02:00
|
|
|
|
2016-04-21 18:34:54 +02:00
|
|
|
# If bad_kwargs was specified, verify auth fails in that case
|
|
|
|
if bad_kwargs is not None:
|
2017-03-28 11:16:36 +02:00
|
|
|
self.assertIsNone(backend.authenticate(**bad_kwargs))
|
2016-04-21 18:34:54 +02:00
|
|
|
|
|
|
|
# Verify auth works
|
2017-03-28 11:16:36 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
2016-04-21 18:34:54 +02:00
|
|
|
self.assertEqual(user_profile, result)
|
|
|
|
|
|
|
|
# Verify auth fails with a deactivated user
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2019-04-12 06:24:58 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
|
|
|
if isinstance(backend, SocialAuthMixin):
|
|
|
|
# Returns a redirect to login page with an error.
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-08-23 15:14:05 +02:00
|
|
|
self.assertEqual(
|
2022-05-29 21:12:13 +02:00
|
|
|
result["Location"],
|
2021-10-14 01:56:37 +02:00
|
|
|
f"{user_profile.realm.uri}/login/?"
|
|
|
|
+ urlencode({"is_deactivated": user_profile.delivery_email}),
|
2021-08-23 15:14:05 +02:00
|
|
|
)
|
2019-04-12 06:24:58 +02:00
|
|
|
else:
|
|
|
|
# Just takes you back to the login page treating as
|
|
|
|
# invalid auth; this is correct because the form will
|
|
|
|
# provide the appropriate validation error for deactivated
|
|
|
|
# account.
|
|
|
|
self.assertIsNone(result)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
|
|
|
# Reactivate the user and verify auth works again
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user_profile, acting_user=None)
|
2017-03-28 11:16:36 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
2016-04-21 18:34:54 +02:00
|
|
|
self.assertEqual(user_profile, result)
|
|
|
|
|
|
|
|
# Verify auth fails with a deactivated realm
|
2021-04-02 17:11:25 +02:00
|
|
|
do_deactivate_realm(user_profile.realm, acting_user=None)
|
2021-06-26 18:51:43 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
|
|
|
if isinstance(backend, SocialAuthMixin):
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], user_profile.realm.uri + "/login/")
|
2021-06-26 18:51:43 +02:00
|
|
|
else:
|
|
|
|
self.assertIsNone(result)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
|
|
|
# Verify auth works again after reactivating the realm
|
|
|
|
do_reactivate_realm(user_profile.realm)
|
2017-03-28 11:16:36 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
2016-04-21 18:34:54 +02:00
|
|
|
self.assertEqual(user_profile, result)
|
|
|
|
|
2016-11-07 00:09:21 +01:00
|
|
|
# ZulipDummyBackend isn't a real backend so the remainder
|
|
|
|
# doesn't make sense for it
|
|
|
|
if isinstance(backend, ZulipDummyBackend):
|
|
|
|
return
|
|
|
|
|
|
|
|
# Verify auth fails if the auth backend is disabled on server
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipDummyBackend",)):
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache()
|
2021-06-26 18:51:43 +02:00
|
|
|
result = backend.authenticate(**good_kwargs)
|
|
|
|
if isinstance(backend, SocialAuthMixin):
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], user_profile.realm.uri + "/login/")
|
2021-06-26 18:51:43 +02:00
|
|
|
else:
|
|
|
|
self.assertIsNone(result)
|
2019-03-17 22:19:53 +01:00
|
|
|
clear_supported_auth_backends_cache()
|
2016-11-02 21:41:10 +01:00
|
|
|
|
|
|
|
# Verify auth fails if the auth backend is disabled for the realm
|
2022-12-12 03:39:16 +01:00
|
|
|
for backend_name in AUTH_BACKEND_NAME_MAP:
|
2016-11-02 21:41:10 +01:00
|
|
|
if isinstance(backend, AUTH_BACKEND_NAME_MAP[backend_name]):
|
|
|
|
break
|
|
|
|
|
|
|
|
index = getattr(user_profile.realm.authentication_methods, backend_name).number
|
|
|
|
user_profile.realm.authentication_methods.set_bit(index, False)
|
|
|
|
user_profile.realm.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
if "realm" in good_kwargs:
|
2017-11-17 23:14:08 +01:00
|
|
|
# Because this test is a little unfaithful to the ordering
|
|
|
|
# (i.e. we fetched the realm object before this function
|
|
|
|
# was called, when in fact it should be fetched after we
|
|
|
|
# changed the allowed authentication methods), we need to
|
|
|
|
# propagate the changes we just made to the actual realm
|
|
|
|
# object in good_kwargs.
|
2021-02-12 08:20:45 +01:00
|
|
|
good_kwargs["realm"] = user_profile.realm
|
2021-06-26 18:51:43 +02:00
|
|
|
|
|
|
|
if isinstance(backend, SocialAuthMixin):
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], user_profile.realm.uri + "/login/")
|
2021-06-26 18:51:43 +02:00
|
|
|
else:
|
|
|
|
self.assertIsNone(result)
|
2016-11-02 21:41:10 +01:00
|
|
|
user_profile.realm.authentication_methods.set_bit(index, True)
|
|
|
|
user_profile.realm.save()
|
2016-11-07 00:09:21 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_dummy_backend(self) -> None:
|
2017-10-03 02:29:20 +02:00
|
|
|
realm = get_realm("zulip")
|
2022-07-27 23:33:49 +02:00
|
|
|
username = self.get_email()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
ZulipDummyBackend(),
|
|
|
|
good_kwargs=dict(username=username, realm=realm, use_dummy_backend=True),
|
|
|
|
bad_kwargs=dict(username=username, realm=realm, use_dummy_backend=False),
|
|
|
|
)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def setup_subdomain(self, user_profile: UserProfile) -> None:
|
2016-10-07 13:38:01 +02:00
|
|
|
realm = user_profile.realm
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.string_id = "zulip"
|
2016-10-07 13:38:01 +02:00
|
|
|
realm.save()
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_email_auth_backend(self) -> None:
|
2022-07-27 23:33:49 +02:00
|
|
|
username = self.get_email()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2016-04-21 18:34:54 +02:00
|
|
|
password = "testpassword"
|
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zproject.backends.email_auth_enabled", return_value=False), mock.patch(
|
|
|
|
"zproject.backends.password_auth_enabled", return_value=True
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, bool] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
user = EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=user_profile.delivery_email,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
password=password,
|
|
|
|
return_data=return_data,
|
|
|
|
)
|
2017-03-23 07:49:42 +01:00
|
|
|
self.assertEqual(user, None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(return_data["email_auth_disabled"])
|
2017-03-23 07:49:42 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
EmailAuthBackend(),
|
|
|
|
good_kwargs=dict(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
password=password,
|
|
|
|
username=username,
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
return_data={},
|
|
|
|
),
|
|
|
|
bad_kwargs=dict(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
password=password,
|
|
|
|
username=username,
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zephyr"),
|
2021-02-12 08:19:30 +01:00
|
|
|
return_data={},
|
|
|
|
),
|
|
|
|
)
|
2016-10-07 13:38:01 +02:00
|
|
|
|
2019-11-18 07:57:36 +01:00
|
|
|
def test_email_auth_backend_empty_password(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2019-11-18 07:57:36 +01:00
|
|
|
password = "testpassword"
|
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
2022-02-08 00:13:33 +01:00
|
|
|
# First, verify authentication works with a nonempty
|
2019-11-18 07:57:36 +01:00
|
|
|
# password so we know we've set up the test correctly.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIsNotNone(
|
|
|
|
EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
password=password,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
)
|
|
|
|
)
|
2019-11-18 07:57:36 +01:00
|
|
|
|
|
|
|
# Now do the same test with the empty string as the password.
|
|
|
|
password = ""
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
with self.assertRaises(PasswordTooWeakError):
|
|
|
|
# UserProfile.set_password protects against setting an empty password.
|
|
|
|
user_profile.set_password(password)
|
|
|
|
# We do want to force an empty password for this test, so we bypass the protection
|
|
|
|
# by using Django's version of this method.
|
|
|
|
super(UserProfile, user_profile).set_password(password)
|
2019-11-18 07:57:36 +01:00
|
|
|
user_profile.save()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIsNone(
|
|
|
|
EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
password=password,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
)
|
|
|
|
)
|
2019-11-18 07:57:36 +01:00
|
|
|
|
2017-11-20 03:22:57 +01:00
|
|
|
def test_email_auth_backend_disabled_password_auth(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2016-04-21 18:34:54 +02:00
|
|
|
password = "testpassword"
|
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
# Verify if a realm has password auth disabled, correct password is rejected
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zproject.backends.password_auth_enabled", return_value=False):
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIsNone(
|
|
|
|
EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
password=password,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
)
|
|
|
|
)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
2020-07-03 19:42:54 +02:00
|
|
|
def test_email_auth_backend_password_hasher_change(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
password = "a_password_of_22_chars"
|
|
|
|
|
2021-05-07 15:14:24 +02:00
|
|
|
with self.settings(PASSWORD_HASHERS=("django.contrib.auth.hashers.SHA1PasswordHasher",)):
|
2020-07-03 19:42:54 +02:00
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
PASSWORD_HASHERS=(
|
2021-05-07 15:14:24 +02:00
|
|
|
"django.contrib.auth.hashers.MD5PasswordHasher",
|
|
|
|
"django.contrib.auth.hashers.SHA1PasswordHasher",
|
2020-07-03 19:42:54 +02:00
|
|
|
),
|
|
|
|
PASSWORD_MIN_LENGTH=30,
|
2021-05-07 15:10:35 +02:00
|
|
|
), self.assertLogs("zulip.auth.email", level="INFO"), self.assertRaises(JsonableError) as m:
|
2020-07-03 19:42:54 +02:00
|
|
|
EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
|
|
|
password=password,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
)
|
|
|
|
self.assertEqual(str(m.exception), "You need to reset your password.")
|
|
|
|
|
2019-04-13 09:37:53 +02:00
|
|
|
def test_login_preview(self) -> None:
|
|
|
|
# Test preview=true displays organization login page
|
|
|
|
# instead of redirecting to app
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2019-04-13 09:37:53 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/login/", {"preview": "true"})
|
2019-04-13 09:37:53 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response(realm.description, result)
|
2020-07-05 01:28:06 +02:00
|
|
|
assert realm.name is not None
|
2019-04-13 09:37:53 +02:00
|
|
|
self.assert_in_response(realm.name, result)
|
|
|
|
self.assert_in_response("Log in to Zulip", result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
data = dict(
|
|
|
|
description=orjson.dumps("New realm description").decode(),
|
|
|
|
name=orjson.dumps("New Zulip").decode(),
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/realm", data)
|
2019-04-13 09:37:53 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/login/", {"preview": "true"})
|
2019-04-13 09:37:53 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("New realm description", result)
|
|
|
|
self.assert_in_response("New Zulip", result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/login/")
|
2019-04-13 09:37:53 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "http://zulip.testserver")
|
2019-04-13 09:37:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipDummyBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_no_backend_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/login/")
|
2017-04-27 06:46:43 +02:00
|
|
|
self.assert_in_success_response(["No authentication backends are enabled"], result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/register/")
|
2017-04-27 06:46:43 +02:00
|
|
|
self.assert_in_success_response(["No authentication backends are enabled"], result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.GoogleAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_any_backend_enabled(self) -> None:
|
2017-04-27 06:46:43 +02:00
|
|
|
|
|
|
|
# testing to avoid false error messages.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/login/")
|
2018-06-05 08:39:01 +02:00
|
|
|
self.assert_not_in_success_response(["No authentication backends are enabled"], result)
|
2017-04-27 06:46:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/register/")
|
2018-06-05 08:39:01 +02:00
|
|
|
self.assert_not_in_success_response(["No authentication backends are enabled"], result)
|
2017-04-27 06:46:43 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",), LDAP_EMAIL_ATTR="mail"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_ldap_backend(self) -> None:
|
2019-10-05 03:54:48 +02:00
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
password = self.ldap_password("hamlet")
|
2016-10-07 13:38:01 +02:00
|
|
|
self.setup_subdomain(user_profile)
|
|
|
|
|
2022-07-27 23:33:49 +02:00
|
|
|
username = self.get_email()
|
2016-04-21 18:34:54 +02:00
|
|
|
backend = ZulipLDAPAuthBackend()
|
|
|
|
|
|
|
|
# Test LDAP auth fails when LDAP server rejects password
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIsNone(
|
|
|
|
backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=email,
|
|
|
|
password="wrongpass",
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
self.verify_backend(
|
|
|
|
backend,
|
|
|
|
bad_kwargs=dict(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=username,
|
|
|
|
password=password,
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zephyr"),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
good_kwargs=dict(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=username,
|
|
|
|
password=password,
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2017-03-23 07:49:42 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_devauth_backend(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
DevAuthBackend(),
|
2022-07-27 23:33:49 +02:00
|
|
|
good_kwargs=dict(dev_auth_username=self.get_email(), realm=get_realm("zulip")),
|
|
|
|
bad_kwargs=dict(dev_auth_username=self.get_email(), realm=get_realm("zephyr")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_remote_user_backend(self) -> None:
|
2022-07-27 23:33:49 +02:00
|
|
|
username = self.get_email()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
ZulipRemoteUserBackend(),
|
2021-02-12 08:20:45 +01:00
|
|
|
good_kwargs=dict(remote_user=username, realm=get_realm("zulip")),
|
|
|
|
bad_kwargs=dict(remote_user=username, realm=get_realm("zephyr")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-11-17 23:14:08 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",))
|
2017-12-09 06:57:59 +01:00
|
|
|
def test_remote_user_backend_invalid_realm(self) -> None:
|
2022-07-27 23:33:49 +02:00
|
|
|
username = self.get_email()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
ZulipRemoteUserBackend(),
|
2021-02-12 08:20:45 +01:00
|
|
|
good_kwargs=dict(remote_user=username, realm=get_realm("zulip")),
|
|
|
|
bad_kwargs=dict(remote_user=username, realm=get_realm("zephyr")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-04-21 18:34:54 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",))
|
|
|
|
@override_settings(SSO_APPEND_DOMAIN="zulip.com")
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_remote_user_backend_sso_append_domain(self) -> None:
|
2022-07-27 23:33:49 +02:00
|
|
|
username = Address(addr_spec=self.get_email()).username
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(
|
|
|
|
ZulipRemoteUserBackend(),
|
|
|
|
good_kwargs=dict(remote_user=username, realm=get_realm("zulip")),
|
2021-02-12 08:20:45 +01:00
|
|
|
bad_kwargs=dict(remote_user=username, realm=get_realm("zephyr")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.GoogleAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-03-04 21:11:23 +01:00
|
|
|
def test_social_auth_backends(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2018-05-31 00:12:39 +02:00
|
|
|
token_data_dict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"access_token": "foobar",
|
|
|
|
"token_type": "bearer",
|
2018-05-31 00:12:39 +02:00
|
|
|
}
|
2019-03-04 21:11:23 +01:00
|
|
|
github_email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=user.delivery_email, verified=True, primary=True),
|
|
|
|
dict(email="nonprimary@zulip.com", verified=True),
|
|
|
|
dict(email="ignored@example.com", verified=False),
|
2018-06-07 00:19:06 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
google_email_data = dict(
|
|
|
|
email=user.delivery_email, name=user.full_name, email_verified=True
|
|
|
|
)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
backends_to_test: Dict[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"google": {
|
|
|
|
"urls": [
|
2020-01-22 17:41:49 +01:00
|
|
|
# The limited process that we test here doesn't require mocking any urls.
|
2019-02-02 16:51:26 +01:00
|
|
|
],
|
2021-02-12 08:20:45 +01:00
|
|
|
"backend": GoogleAuthBackend,
|
2019-02-02 16:51:26 +01:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"github": {
|
|
|
|
"urls": [
|
2019-03-04 21:11:23 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"url": "https://api.github.com/user/emails",
|
|
|
|
"method": responses.GET,
|
|
|
|
"status": 200,
|
|
|
|
"body": json.dumps(github_email_data),
|
2019-03-04 21:11:23 +01:00
|
|
|
},
|
|
|
|
],
|
2021-02-12 08:20:45 +01:00
|
|
|
"backend": GitHubAuthBackend,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2020-06-25 04:40:06 +02:00
|
|
|
def patched_authenticate(
|
2022-06-08 04:52:09 +02:00
|
|
|
request: Optional["TestHttpResponse"] = None,
|
2020-06-25 04:40:06 +02:00
|
|
|
**kwargs: Any,
|
|
|
|
) -> Any:
|
2018-07-18 23:45:49 +02:00
|
|
|
# This is how we pass the subdomain to the authentication
|
|
|
|
# backend in production code, so we need to do this setup
|
|
|
|
# here.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "subdomain" in kwargs:
|
2018-05-31 00:12:39 +02:00
|
|
|
backend.strategy.session_set("subdomain", kwargs["subdomain"])
|
2021-02-12 08:20:45 +01:00
|
|
|
del kwargs["subdomain"]
|
2018-07-18 23:45:49 +02:00
|
|
|
|
|
|
|
# Because we're not simulating the full python-social-auth
|
|
|
|
# pipeline here, we need to provide the user's choice of
|
|
|
|
# which email to select in the partial phase of the
|
|
|
|
# pipeline when we display an email picker for the GitHub
|
|
|
|
# authentication backend. We do that here.
|
|
|
|
def return_email() -> Dict[str, str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"email": user.delivery_email}
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-07-18 23:45:49 +02:00
|
|
|
backend.strategy.request_data = return_email
|
|
|
|
|
2021-09-02 18:46:58 +02:00
|
|
|
if request is None:
|
|
|
|
request = mock.MagicMock()
|
|
|
|
request.META = dict(REMOTE_ADDR="127.0.0.1")
|
|
|
|
backend.strategy.request = request
|
|
|
|
|
2020-06-25 04:40:06 +02:00
|
|
|
result = orig_authenticate(backend, request, **kwargs)
|
2018-05-31 00:12:39 +02:00
|
|
|
return result
|
2019-03-04 21:11:23 +01:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
def patched_get_verified_emails(*args: Any, **kwargs: Any) -> Any:
|
2021-02-12 08:20:45 +01:00
|
|
|
return google_email_data["email"]
|
2019-02-02 16:51:26 +01:00
|
|
|
|
2019-03-04 21:11:23 +01:00
|
|
|
for backend_name in backends_to_test:
|
2020-01-22 17:41:49 +01:00
|
|
|
with responses.RequestsMock(assert_all_requests_are_fired=True) as requests_mock:
|
2021-02-12 08:20:45 +01:00
|
|
|
urls: List[Dict[str, Any]] = backends_to_test[backend_name]["urls"]
|
2020-01-22 17:41:49 +01:00
|
|
|
for details in urls:
|
|
|
|
requests_mock.add(
|
2021-02-12 08:20:45 +01:00
|
|
|
details["method"],
|
|
|
|
details["url"],
|
|
|
|
status=details["status"],
|
|
|
|
body=details["body"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_class = backends_to_test[backend_name]["backend"]
|
2020-06-25 04:40:06 +02:00
|
|
|
|
|
|
|
# We're creating a new class instance here, so the
|
|
|
|
# monkey-patching of the instance that we're about to
|
|
|
|
# do will be discarded at the end of this test.
|
2020-01-22 17:41:49 +01:00
|
|
|
backend = backend_class()
|
|
|
|
backend.strategy = DjangoStrategy(storage=BaseDjangoStorage())
|
|
|
|
|
|
|
|
orig_authenticate = backend_class.authenticate
|
|
|
|
backend.authenticate = patched_authenticate
|
|
|
|
if backend_name == "google":
|
|
|
|
backend.get_verified_emails = patched_get_verified_emails
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
good_kwargs = dict(
|
|
|
|
backend=backend,
|
|
|
|
strategy=backend.strategy,
|
|
|
|
storage=backend.strategy.storage,
|
|
|
|
response=token_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
bad_kwargs = dict(subdomain="acme")
|
|
|
|
logger_name = f"zulip.auth.{backend.name}"
|
2020-07-13 05:42:56 +02:00
|
|
|
|
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.views.auth.redirect_and_log_into_subdomain", return_value=user
|
2020-07-13 05:42:56 +02:00
|
|
|
), self.assertLogs(logger_name, level="INFO") as info_log:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(backend, good_kwargs=good_kwargs, bad_kwargs=bad_kwargs)
|
2021-02-12 08:20:45 +01:00
|
|
|
bad_kwargs["subdomain"] = "zephyr"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.verify_backend(backend, good_kwargs=good_kwargs, bad_kwargs=bad_kwargs)
|
2020-07-13 05:42:56 +02:00
|
|
|
# Verify logging for deactivated users
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-09-02 18:46:58 +02:00
|
|
|
# Filter out noisy logs:
|
|
|
|
[
|
|
|
|
output
|
|
|
|
for output in info_log.output
|
|
|
|
if "Authentication attempt from 127.0.0.1" not in output
|
|
|
|
],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
f"INFO:{logger_name}:Failed login attempt for deactivated account: {user.id}@{user.realm.string_id}",
|
|
|
|
f"INFO:{logger_name}:Failed login attempt for deactivated account: {user.id}@{user.realm.string_id}",
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-07-13 05:42:56 +02:00
|
|
|
|
2016-07-25 11:24:36 +02:00
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
class RateLimitAuthenticationTests(ZulipTestCase):
|
|
|
|
@override_settings(RATE_LIMITING_AUTHENTICATE=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
def do_test_auth_rate_limiting(
|
|
|
|
self,
|
|
|
|
attempt_authentication_func: Callable[[HttpRequest, str, str], Optional[UserProfile]],
|
|
|
|
username: str,
|
|
|
|
correct_password: str,
|
|
|
|
wrong_password: str,
|
|
|
|
expected_user_profile: UserProfile,
|
|
|
|
) -> None:
|
2020-03-06 10:49:04 +01:00
|
|
|
# We have to mock RateLimitedAuthenticationByUsername.key to avoid key collisions
|
2019-08-01 15:09:27 +02:00
|
|
|
# if tests run in parallel.
|
2020-03-06 10:49:04 +01:00
|
|
|
original_key_method = RateLimitedAuthenticationByUsername.key
|
2020-09-05 04:02:13 +02:00
|
|
|
salt = secrets.token_hex(16)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2020-03-06 10:49:04 +01:00
|
|
|
def _mock_key(self: RateLimitedAuthenticationByUsername) -> str:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"{salt}:{original_key_method(self)}"
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
def attempt_authentication(username: str, password: str) -> Optional[UserProfile]:
|
|
|
|
request = HttpRequest()
|
2020-06-26 19:29:37 +02:00
|
|
|
request.session = mock.MagicMock()
|
2019-08-01 15:09:27 +02:00
|
|
|
return attempt_authentication_func(request, username, password)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
add_ratelimit_rule(10, 2, domain="authenticate_by_username")
|
|
|
|
with mock.patch.object(RateLimitedAuthenticationByUsername, "key", new=_mock_key):
|
2019-08-01 15:09:27 +02:00
|
|
|
try:
|
|
|
|
start_time = time.time()
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("time.time", return_value=start_time):
|
2019-08-01 15:09:27 +02:00
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
# 2 failed attempts is the limit, so the next ones should get blocked,
|
|
|
|
# even with the correct password.
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(RateLimitedError):
|
2019-08-01 15:09:27 +02:00
|
|
|
attempt_authentication(username, correct_password)
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(RateLimitedError):
|
2019-08-01 15:09:27 +02:00
|
|
|
attempt_authentication(username, wrong_password)
|
|
|
|
|
|
|
|
# After enough time passes, more authentication attempts can be made:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("time.time", return_value=start_time + 11.0):
|
2019-08-01 15:09:27 +02:00
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
|
2020-09-02 02:50:08 +02:00
|
|
|
# Correct password
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
attempt_authentication(username, correct_password), expected_user_profile
|
|
|
|
)
|
2019-08-01 15:09:27 +02:00
|
|
|
# A correct login attempt should reset the rate limits for this user profile,
|
|
|
|
# so the next two attempts shouldn't get limited:
|
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
# But the third attempt goes over the limit:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(RateLimitedError):
|
2019-08-01 15:09:27 +02:00
|
|
|
attempt_authentication(username, wrong_password)
|
2022-04-11 22:51:30 +02:00
|
|
|
|
|
|
|
# Resetting the password also clears the rate-limit
|
|
|
|
do_change_password(expected_user_profile, correct_password)
|
|
|
|
self.assertIsNone(attempt_authentication(username, wrong_password))
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
finally:
|
|
|
|
# Clean up to avoid affecting other tests.
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedAuthenticationByUsername(username).clear_history()
|
2021-02-12 08:20:45 +01:00
|
|
|
remove_ratelimit_rule(10, 2, domain="authenticate_by_username")
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
def test_email_auth_backend_user_based_rate_limiting(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2019-08-01 15:09:27 +02:00
|
|
|
password = "testpassword"
|
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def attempt_authentication(
|
|
|
|
request: HttpRequest, username: str, password: str
|
|
|
|
) -> Optional[UserProfile]:
|
|
|
|
return EmailAuthBackend().authenticate(
|
|
|
|
request=request,
|
|
|
|
username=username,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
password=password,
|
|
|
|
return_data={},
|
|
|
|
)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.do_test_auth_rate_limiting(
|
|
|
|
attempt_authentication,
|
|
|
|
user_profile.delivery_email,
|
|
|
|
password,
|
2021-02-12 08:20:45 +01:00
|
|
|
"wrong_password",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
)
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",), LDAP_EMAIL_ATTR="mail"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-08-01 15:09:27 +02:00
|
|
|
def test_ldap_backend_user_based_rate_limiting(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
password = self.ldap_password("hamlet")
|
2019-08-01 15:09:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def attempt_authentication(
|
|
|
|
request: HttpRequest, username: str, password: str
|
|
|
|
) -> Optional[UserProfile]:
|
|
|
|
return ZulipLDAPAuthBackend().authenticate(
|
|
|
|
request=request,
|
|
|
|
username=username,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
password=password,
|
|
|
|
return_data={},
|
|
|
|
)
|
|
|
|
|
|
|
|
self.do_test_auth_rate_limiting(
|
|
|
|
attempt_authentication,
|
|
|
|
user_profile.delivery_email,
|
|
|
|
password,
|
2021-02-12 08:20:45 +01:00
|
|
|
"wrong_password",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
)
|
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
LDAP_EMAIL_ATTR="mail",
|
|
|
|
)
|
2019-08-01 15:09:27 +02:00
|
|
|
def test_email_and_ldap_backends_user_based_rate_limiting(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
ldap_password = self.ldap_password("hamlet")
|
2019-08-01 15:09:27 +02:00
|
|
|
|
|
|
|
email_password = "email_password"
|
|
|
|
user_profile.set_password(email_password)
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def attempt_authentication(
|
|
|
|
request: HttpRequest, username: str, password: str
|
|
|
|
) -> Optional[UserProfile]:
|
2022-05-31 03:17:38 +02:00
|
|
|
user = authenticate(
|
2021-02-12 08:19:30 +01:00
|
|
|
request=request,
|
|
|
|
username=username,
|
|
|
|
realm=get_realm("zulip"),
|
|
|
|
password=password,
|
|
|
|
return_data={},
|
|
|
|
)
|
2022-05-31 03:17:38 +02:00
|
|
|
if user is not None:
|
|
|
|
assert isinstance(user, UserProfile)
|
|
|
|
return user
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
self.do_test_auth_rate_limiting(
|
|
|
|
attempt_authentication,
|
|
|
|
user_profile.delivery_email,
|
|
|
|
email_password,
|
2021-02-12 08:20:45 +01:00
|
|
|
"wrong_password",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
)
|
|
|
|
self.do_test_auth_rate_limiting(
|
|
|
|
attempt_authentication,
|
|
|
|
user_profile.delivery_email,
|
|
|
|
ldap_password,
|
2021-02-12 08:20:45 +01:00
|
|
|
"wrong_password",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile,
|
|
|
|
)
|
|
|
|
|
2019-08-01 15:09:27 +02:00
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
class CheckPasswordStrengthTest(ZulipTestCase):
|
|
|
|
def test_check_password_strength(self) -> None:
|
|
|
|
with self.settings(PASSWORD_MIN_LENGTH=0, PASSWORD_MIN_GUESSES=0):
|
|
|
|
# Never allow empty password.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(check_password_strength(""))
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
|
|
|
|
with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(check_password_strength(""))
|
|
|
|
self.assertFalse(check_password_strength("short"))
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
# Long enough, but too easy:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(check_password_strength("longer"))
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
# Good password:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(check_password_strength("f657gdGGk9"))
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-17 16:18:09 +01:00
|
|
|
class DesktopFlowTestingLib(ZulipTestCase):
|
2022-06-08 04:52:09 +02:00
|
|
|
def verify_desktop_flow_app_page(self, response: "TestHttpResponse") -> None:
|
2020-04-25 06:49:19 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
self.assertIn(b"<h1>Finish desktop login</h1>", response.content)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def verify_desktop_flow_end_page(
|
2022-06-08 04:52:09 +02:00
|
|
|
self, response: "TestHttpResponse", email: str, desktop_flow_otp: str
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2020-02-17 16:18:09 +01:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
|
|
|
|
soup = BeautifulSoup(response.content, "html.parser")
|
2022-01-22 07:31:33 +01:00
|
|
|
input = soup.find("input", value=True)
|
|
|
|
assert isinstance(input, Tag)
|
|
|
|
desktop_data = input["value"]
|
|
|
|
assert isinstance(desktop_data, str)
|
|
|
|
a = soup.find("a", href=True)
|
|
|
|
assert isinstance(a, Tag)
|
|
|
|
browser_url = a["href"]
|
2020-04-25 06:49:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(browser_url, "/login/")
|
2020-04-25 06:49:19 +02:00
|
|
|
decrypted_key = self.verify_desktop_data_and_return_key(desktop_data, desktop_flow_otp)
|
2020-02-17 16:18:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"http://zulip.testserver/accounts/login/subdomain/{decrypted_key}"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-17 16:18:09 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
realm = get_realm("zulip")
|
2020-03-12 14:17:25 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2020-02-17 16:18:09 +01:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
|
|
|
|
2020-04-25 06:49:19 +02:00
|
|
|
def verify_desktop_data_and_return_key(self, desktop_data: str, desktop_flow_otp: str) -> str:
|
|
|
|
key = bytes.fromhex(desktop_flow_otp)
|
|
|
|
data = bytes.fromhex(desktop_data)
|
|
|
|
iv = data[:12]
|
|
|
|
ciphertext = data[12:]
|
|
|
|
return AESGCM(key).decrypt(iv, ciphertext, b"").decode()
|
2020-02-17 16:18:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-03-08 00:48:15 +01:00
|
|
|
class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase, ABC):
|
2019-07-22 04:26:47 +02:00
|
|
|
"""This is a base class for testing social-auth backends. These
|
2020-03-28 01:25:56 +01:00
|
|
|
methods are often overridden by subclasses:
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2019-07-22 04:26:47 +02:00
|
|
|
register_extra_endpoints() - If the backend being tested calls some extra
|
|
|
|
endpoints then they can be added here.
|
2019-02-03 09:18:01 +01:00
|
|
|
|
|
|
|
get_account_data_dict() - Return the data returned by the user info endpoint
|
|
|
|
according to the respective backend.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-10 17:40:56 +02:00
|
|
|
BACKEND_CLASS: "Type[SocialAuthMixin]"
|
|
|
|
LOGIN_URL: str
|
|
|
|
SIGNUP_URL: str
|
|
|
|
AUTHORIZATION_URL: str
|
|
|
|
AUTH_FINISH_URL: str
|
|
|
|
ACCESS_TOKEN_URL: str
|
|
|
|
USER_INFO_URL: str
|
|
|
|
CLIENT_KEY_SETTING: str
|
|
|
|
CLIENT_SECRET_SETTING: str
|
|
|
|
|
2022-03-08 00:48:15 +01:00
|
|
|
@abstractmethod
|
2021-06-10 17:40:56 +02:00
|
|
|
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
|
2022-03-08 00:48:15 +01:00
|
|
|
raise NotImplementedError
|
2021-06-10 17:40:56 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
self.email = self.user_profile.delivery_email
|
2019-09-29 00:55:10 +02:00
|
|
|
self.name = self.user_profile.full_name
|
2019-02-03 09:18:01 +01:00
|
|
|
self.backend = self.BACKEND_CLASS
|
2017-04-19 19:05:04 +02:00
|
|
|
self.backend.strategy = DjangoStrategy(storage=BaseDjangoStorage())
|
2021-02-12 08:20:45 +01:00
|
|
|
self.logger_string = f"zulip.auth.{self.backend.name}"
|
2016-07-25 11:24:36 +02:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
# This is a workaround for the fact that Python social auth
|
|
|
|
# caches the set of authentication backends that are enabled
|
|
|
|
# the first time that `social_django.utils` is imported. See
|
|
|
|
# https://github.com/python-social-auth/social-app-django/pull/162
|
|
|
|
# for details.
|
|
|
|
from social_core.backends.utils import load_backends
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
load_backends(settings.AUTHENTICATION_BACKENDS, force_load=True)
|
2016-10-07 11:10:21 +02:00
|
|
|
|
2020-06-02 20:20:53 +02:00
|
|
|
def logger_output(self, output_string: str, type: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"{type.upper()}:zulip.auth.{self.backend.name}:{output_string}"
|
2020-06-02 20:20:53 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def register_extra_endpoints(
|
|
|
|
self,
|
|
|
|
requests_mock: responses.RequestsMock,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
**extra_data: Any,
|
|
|
|
) -> None:
|
2019-02-02 16:51:26 +01:00
|
|
|
pass
|
|
|
|
|
2020-04-25 06:49:19 +02:00
|
|
|
def prepare_login_url_and_headers(
|
|
|
|
self,
|
2020-06-20 13:55:12 +02:00
|
|
|
subdomain: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next: str = "",
|
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url: Optional[str] = None,
|
2020-04-25 06:49:19 +02:00
|
|
|
*,
|
2021-02-12 08:19:30 +01:00
|
|
|
user_agent: Optional[str] = None,
|
2020-04-25 06:49:19 +02:00
|
|
|
) -> Tuple[str, Dict[str, Any]]:
|
2019-02-03 09:18:01 +01:00
|
|
|
url = self.LOGIN_URL
|
2019-08-27 05:51:04 +02:00
|
|
|
if alternative_start_url is not None:
|
|
|
|
url = alternative_start_url
|
|
|
|
|
2018-05-18 03:27:47 +02:00
|
|
|
params = {}
|
|
|
|
headers = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
if subdomain == "":
|
2020-06-20 13:55:12 +02:00
|
|
|
# "testserver" may trip up some libraries' URL validation,
|
|
|
|
# so let's use the equivalent www. version.
|
2021-02-12 08:20:45 +01:00
|
|
|
headers["HTTP_HOST"] = "www.testserver"
|
2020-06-20 13:55:12 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
headers["HTTP_HOST"] = subdomain + ".testserver"
|
2018-05-18 03:27:47 +02:00
|
|
|
if mobile_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["mobile_flow_otp"] = mobile_flow_otp
|
|
|
|
headers["HTTP_USER_AGENT"] = "ZulipAndroid"
|
2020-01-23 14:22:28 +01:00
|
|
|
if desktop_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["desktop_flow_otp"] = desktop_flow_otp
|
2020-03-23 20:46:28 +01:00
|
|
|
if is_signup:
|
2019-02-03 09:18:01 +01:00
|
|
|
url = self.SIGNUP_URL
|
2021-02-12 08:20:45 +01:00
|
|
|
params["next"] = next
|
|
|
|
params["multiuse_object_key"] = multiuse_object_key
|
2018-05-18 03:27:47 +02:00
|
|
|
if len(params) > 0:
|
2021-10-14 01:56:37 +02:00
|
|
|
url += f"?{urlencode(params)}"
|
2020-04-25 06:49:19 +02:00
|
|
|
if user_agent is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
headers["HTTP_USER_AGENT"] = user_agent
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2019-09-29 02:25:46 +02:00
|
|
|
return url, headers
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def social_auth_test_finish(
|
|
|
|
self,
|
2022-06-08 04:52:09 +02:00
|
|
|
result: "TestHttpResponse",
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
expect_choose_email_screen: bool,
|
|
|
|
headers: Any,
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2021-02-12 08:20:45 +01:00
|
|
|
csrf_state = urllib.parse.parse_qs(parsed_url.query)["state"]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(self.AUTH_FINISH_URL, dict(state=csrf_state), **headers)
|
2020-03-31 19:30:38 +02:00
|
|
|
return result
|
|
|
|
|
2021-05-23 17:20:52 +02:00
|
|
|
def generate_access_token_url_payload(self, account_data_dict: Dict[str, str]) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
return json.dumps(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"access_token": "foobar",
|
|
|
|
"token_type": "bearer",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
def social_auth_test(
|
|
|
|
self,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
*,
|
|
|
|
subdomain: str,
|
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next: str = "",
|
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen: bool = False,
|
|
|
|
alternative_start_url: Optional[str] = None,
|
|
|
|
user_agent: Optional[str] = None,
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-02-08 00:13:33 +01:00
|
|
|
"""Main entry point for all social authentication tests.
|
2020-05-02 23:41:21 +02:00
|
|
|
|
|
|
|
* account_data_dict: Dictionary containing the name/email data
|
|
|
|
that should be returned by the social auth backend.
|
|
|
|
* subdomain: Which organization's login page is being accessed.
|
|
|
|
* desktop_flow_otp / mobile_flow_otp: Token to be used for
|
|
|
|
mobile or desktop authentication flow testing.
|
|
|
|
* is_signup: Whether we're testing the social flow for
|
|
|
|
/register (True) or /login (False). This is important
|
|
|
|
because we need to verify behavior like the
|
2020-08-11 02:20:10 +02:00
|
|
|
"Continue to registration" if you try to log in using an
|
2020-10-13 23:50:18 +02:00
|
|
|
account that doesn't exist but is allowed to sign up.
|
2020-05-02 23:41:21 +02:00
|
|
|
* next: Parameter passed through in production authentication
|
2021-05-14 00:16:30 +02:00
|
|
|
to redirect the user to (e.g.) the specific page in the web app
|
2020-05-02 23:41:21 +02:00
|
|
|
that they clicked a link to before being presented with the login
|
|
|
|
page.
|
|
|
|
* expect_choose_email_screen: Some social auth backends, like
|
|
|
|
GitHub, simultaneously authenticate for multiple email addresses.
|
|
|
|
Set this to True if we expect to show the "Choose Email" screen
|
|
|
|
in this test should the backend have that feature.
|
|
|
|
* multiuse_object_key: Used when the user has clicked a multi-use
|
|
|
|
reusable invitation link.
|
|
|
|
* alternative_start_url: Used to test legacy mobile app behavior.
|
|
|
|
* user_agent: What user-agent to use for the HTTP requests.
|
|
|
|
"""
|
|
|
|
|
2019-09-29 02:25:46 +02:00
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2021-02-12 08:19:30 +01:00
|
|
|
subdomain,
|
|
|
|
mobile_flow_otp,
|
|
|
|
desktop_flow_otp,
|
|
|
|
is_signup,
|
|
|
|
next,
|
|
|
|
multiuse_object_key,
|
|
|
|
alternative_start_url,
|
2020-04-25 06:49:19 +02:00
|
|
|
user_agent=user_agent,
|
2019-09-29 02:25:46 +02:00
|
|
|
)
|
|
|
|
|
2018-05-18 03:27:47 +02:00
|
|
|
result = self.client_get(url, **headers)
|
2018-07-10 08:07:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_result_url_prefix = f"http://testserver/login/{self.backend.name}/"
|
2018-07-10 08:07:23 +02:00
|
|
|
if settings.SOCIAL_AUTH_SUBDOMAIN is not None:
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_result_url_prefix = (
|
2021-02-12 08:20:45 +01:00
|
|
|
f"http://{settings.SOCIAL_AUTH_SUBDOMAIN}.testserver/login/{self.backend.name}/"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-10 08:07:23 +02:00
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
if result.status_code != 302 or not result["Location"].startswith(
|
|
|
|
expected_result_url_prefix
|
|
|
|
):
|
2018-05-18 03:27:47 +02:00
|
|
|
return result
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"], **headers)
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
assert self.AUTHORIZATION_URL in result["Location"]
|
2018-05-18 03:27:47 +02:00
|
|
|
|
|
|
|
self.client.cookies = result.cookies
|
|
|
|
|
|
|
|
# Next, the browser requests result["Location"], and gets
|
2019-02-03 09:18:01 +01:00
|
|
|
# redirected back to the registered redirect uri.
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
# We register callbacks for the key URLs on Identity Provider that
|
2020-10-23 02:43:28 +02:00
|
|
|
# auth completion URL will call
|
2020-01-22 17:41:49 +01:00
|
|
|
with responses.RequestsMock(assert_all_requests_are_fired=False) as requests_mock:
|
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.POST,
|
|
|
|
self.ACCESS_TOKEN_URL,
|
|
|
|
status=200,
|
2021-05-23 17:20:52 +02:00
|
|
|
body=self.generate_access_token_url_payload(account_data_dict),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-22 17:41:49 +01:00
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
|
|
|
self.USER_INFO_URL,
|
2018-07-18 23:45:49 +02:00
|
|
|
status=200,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
body=json.dumps(account_data_dict),
|
2018-07-18 23:45:49 +02:00
|
|
|
)
|
2020-01-22 17:41:49 +01:00
|
|
|
self.register_extra_endpoints(requests_mock, account_data_dict, **extra_data)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test_finish(
|
|
|
|
result, account_data_dict, expect_choose_email_screen, headers=headers, **extra_data
|
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
return result
|
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_no_key(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
with self.settings(**{self.CLIENT_KEY_SETTING: None}):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", next="/user_uploads/image"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error"], result)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2020-02-15 19:16:16 +01:00
|
|
|
def test_config_error_development(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(self, "CLIENT_KEY_SETTING") and hasattr(self, "CLIENT_SECRET_SETTING"):
|
2020-02-15 19:16:16 +01:00
|
|
|
with self.settings(**{self.CLIENT_KEY_SETTING: None}):
|
|
|
|
result = self.client_get(self.LOGIN_URL)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error"], result)
|
2020-02-15 19:16:16 +01:00
|
|
|
self.assert_in_success_response([self.CLIENT_KEY_SETTING.lower()], result)
|
|
|
|
self.assert_in_success_response([self.CLIENT_SECRET_SETTING.lower()], result)
|
|
|
|
self.assert_in_success_response(["zproject/dev-secrets.conf"], result)
|
|
|
|
self.assert_not_in_success_response([self.CLIENT_KEY_SETTING], result)
|
|
|
|
self.assert_not_in_success_response(["zproject/dev_settings.py"], result)
|
|
|
|
self.assert_not_in_success_response(["/etc/zulip/settings.py"], result)
|
|
|
|
self.assert_not_in_success_response(["/etc/zulip/zulip-secrets.conf"], result)
|
|
|
|
|
|
|
|
@override_settings(DEVELOPMENT=False)
|
|
|
|
def test_config_error_production(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if hasattr(self, "CLIENT_KEY_SETTING") and hasattr(self, "CLIENT_SECRET_SETTING"):
|
2020-02-15 19:16:16 +01:00
|
|
|
with self.settings(**{self.CLIENT_KEY_SETTING: None}):
|
|
|
|
result = self.client_get(self.LOGIN_URL)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error"], result)
|
2020-02-15 19:16:16 +01:00
|
|
|
self.assert_in_success_response([self.CLIENT_KEY_SETTING], result)
|
|
|
|
self.assert_in_success_response(["/etc/zulip/settings.py"], result)
|
|
|
|
self.assert_in_success_response([self.CLIENT_SECRET_SETTING.lower()], result)
|
|
|
|
self.assert_in_success_response(["/etc/zulip/zulip-secrets.conf"], result)
|
|
|
|
self.assert_not_in_success_response([self.CLIENT_KEY_SETTING.lower()], result)
|
|
|
|
self.assert_not_in_success_response(["zproject/dev_settings.py"], result)
|
|
|
|
self.assert_not_in_success_response(["zproject/dev-secrets.conf"], result)
|
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_success(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-09-02 18:46:58 +02:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=False,
|
|
|
|
subdomain="zulip",
|
|
|
|
next="/user_uploads/image",
|
|
|
|
)
|
2018-07-10 08:07:23 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2018-07-10 08:07:23 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-07-10 08:07:23 +02:00
|
|
|
|
2021-09-02 18:46:58 +02:00
|
|
|
self.assertIn(
|
|
|
|
f"INFO:{self.logger_string}:Authentication attempt from 127.0.0.1: subdomain=zulip;username=hamlet@zulip.com;outcome=success",
|
|
|
|
m.output[0],
|
|
|
|
)
|
|
|
|
|
2018-07-10 08:07:23 +02:00
|
|
|
@override_settings(SOCIAL_AUTH_SUBDOMAIN=None)
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_when_social_auth_subdomain_is_not_set(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_deactivated_user(self) -> None:
|
2018-05-31 02:20:01 +02:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2018-07-18 23:45:49 +02:00
|
|
|
# We expect to go through the "choose email" screen here,
|
|
|
|
# because there won't be an existing user account we can
|
|
|
|
# auto-select for the user.
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain="zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-19 22:36:51 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-08-23 15:14:05 +02:00
|
|
|
self.assertEqual(
|
2022-05-29 21:12:13 +02:00
|
|
|
result["Location"],
|
2021-10-14 01:56:37 +02:00
|
|
|
f"{user_profile.realm.uri}/login/?"
|
|
|
|
+ urlencode({"is_deactivated": user_profile.delivery_email}),
|
2021-08-23 15:14:05 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"Failed login attempt for deactivated account: {user_profile.id}@zulip", "info"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2021-08-23 15:14:05 +02:00
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2021-08-23 15:14:05 +02:00
|
|
|
self.assert_in_success_response(
|
2021-08-23 15:14:05 +02:00
|
|
|
[f"Your account {user_profile.delivery_email} has been deactivated."], result
|
2021-08-23 15:14:05 +02:00
|
|
|
)
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_invalid_realm(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.middleware.get_realm", return_value=get_realm("zulip")):
|
2018-05-31 00:12:39 +02:00
|
|
|
# This mock.patch case somewhat hackishly arranges it so
|
|
|
|
# that we switch realms halfway through the test
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="invalid", next="/user_uploads/image"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "/accounts/find/")
|
2018-05-31 00:12:39 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_invalid_email(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email="invalid", name=self.name)
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain=subdomain,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-08-02 23:36:06 +02:00
|
|
|
f"{self.backend.auth_backend_name} got invalid email argument.",
|
2021-02-12 08:20:45 +01:00
|
|
|
"warning",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2021-06-26 18:51:43 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/register/")
|
2018-05-31 02:20:01 +02:00
|
|
|
|
2018-05-18 03:27:47 +02:00
|
|
|
def test_user_cannot_log_into_nonexisting_realm(self) -> None:
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.social_auth_test(account_data_dict, subdomain="nonexistent")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_response("There is no Zulip organization hosted at this subdomain.", result)
|
2019-03-12 01:56:52 +01:00
|
|
|
self.assertEqual(result.status_code, 404)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
|
|
|
def test_user_cannot_log_into_wrong_subdomain(self) -> None:
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain="zephyr"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertTrue(
|
|
|
|
result["Location"].startswith("http://zephyr.testserver/accounts/login/subdomain/")
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2022-05-29 21:12:13 +02:00
|
|
|
result["Location"].replace("http://zephyr.testserver", ""), subdomain="zephyr"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assert_in_success_response(
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"Your email address, hamlet@zulip.com, is not in one of the domains ",
|
|
|
|
"that are allowed to register for accounts in this organization.",
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
result,
|
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_mobile_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2021-11-16 16:04:04 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
account_data_dict = self.get_account_data_dict(
|
|
|
|
email=hamlet.delivery_email, name="Full Name"
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.date_joined = timezone_now() - datetime.timedelta(
|
|
|
|
seconds=JUST_CREATED_THRESHOLD + 1
|
|
|
|
)
|
2018-08-10 00:58:44 +02:00
|
|
|
self.user_profile.save()
|
|
|
|
|
2018-05-18 03:27:47 +02:00
|
|
|
with self.settings(SEND_LOGIN_EMAILS=True):
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", mobile_flow_otp="1234"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", mobile_flow_otp="invalido" * 8
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
|
|
|
|
|
|
|
# Now do it correctly
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2018-05-18 03:27:47 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip.testserver"])
|
2021-11-16 16:04:04 +01:00
|
|
|
self.assertEqual(query_params["email"], [hamlet.delivery_email])
|
|
|
|
self.assertEqual(query_params["user_id"], [str(hamlet.id)])
|
|
|
|
|
2018-05-18 03:27:47 +02:00
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet_api_keys = get_all_api_keys(self.example_user("hamlet"))
|
2018-08-01 10:53:40 +02:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), hamlet_api_keys)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("Zulip on Android", mail.outbox[0].body)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2020-01-23 14:22:28 +01:00
|
|
|
def test_social_auth_desktop_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
desktop_flow_otp = "1234abcd" * 8
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name="Full Name")
|
2020-01-23 14:22:28 +01:00
|
|
|
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", desktop_flow_otp="1234"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-23 14:22:28 +01:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", desktop_flow_otp="invalido" * 8
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-23 14:22:28 +01:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
|
|
|
|
|
|
|
# Now do it correctly
|
2020-04-25 06:49:19 +02:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2020-04-25 06:49:19 +02:00
|
|
|
expect_choose_email_screen=False,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
|
|
|
user_agent="ZulipElectron/5.0.0",
|
|
|
|
)
|
|
|
|
self.verify_desktop_flow_app_page(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
|
|
|
)
|
2020-02-17 16:18:09 +01:00
|
|
|
self.verify_desktop_flow_end_page(result, self.email, desktop_flow_otp)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2020-01-27 12:05:32 +01:00
|
|
|
def test_social_auth_session_fields_cleared_correctly(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2020-01-27 12:05:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def initiate_auth(mobile_flow_otp: Optional[str] = None) -> None:
|
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip", mobile_flow_otp=mobile_flow_otp
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-27 12:05:32 +01:00
|
|
|
result = self.client_get(url, **headers)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"], **headers)
|
2020-01-27 12:05:32 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
# Start social auth with mobile_flow_otp param. It should get saved into the session
|
|
|
|
# on SOCIAL_AUTH_SUBDOMAIN.
|
|
|
|
initiate_auth(mobile_flow_otp)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(self.client.session["mobile_flow_otp"], mobile_flow_otp)
|
2020-01-27 12:05:32 +01:00
|
|
|
|
|
|
|
# Make a request without mobile_flow_otp param and verify the field doesn't persist
|
|
|
|
# in the session from the previous request.
|
|
|
|
initiate_auth()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(self.client.session.get("mobile_flow_otp"), None)
|
2020-01-27 12:05:32 +01:00
|
|
|
|
2020-01-23 14:22:28 +01:00
|
|
|
def test_social_auth_mobile_and_desktop_flow_in_one_request_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
otp = "1234abcd" * 8
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name="Full Name")
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
|
|
|
desktop_flow_otp=otp,
|
|
|
|
mobile_flow_otp=otp,
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Can't use both mobile_flow_otp and desktop_flow_otp together."
|
|
|
|
)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_registration_existing_account(self) -> None:
|
2018-05-31 00:12:39 +02:00
|
|
|
"""If the user already exists, signup flow just logs them in"""
|
|
|
|
email = "hamlet@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain="zulip", is_signup=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-31 00:12:39 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
2020-02-23 18:58:08 +01:00
|
|
|
# Verify data has the full_name consistent with the user we're logging in as.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["full_name"], self.example_user("hamlet").full_name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-05-31 00:12:39 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
# Name wasn't changed at all
|
|
|
|
self.assertEqual(hamlet.full_name, "King Hamlet")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def stage_two_of_registration(
|
|
|
|
self,
|
2022-06-08 04:52:09 +02:00
|
|
|
result: "TestHttpResponse",
|
2021-02-12 08:19:30 +01:00
|
|
|
realm: Realm,
|
|
|
|
subdomain: str,
|
|
|
|
email: str,
|
|
|
|
name: str,
|
|
|
|
expected_final_name: str,
|
|
|
|
skip_registration_form: bool,
|
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
expect_confirm_registration_page: bool = False,
|
|
|
|
expect_full_name_prepopulated: bool = True,
|
|
|
|
) -> None:
|
2018-05-18 03:27:47 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], email)
|
|
|
|
self.assertEqual(data["full_name"], name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2020-02-23 16:49:19 +01:00
|
|
|
if expect_confirm_registration_page:
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
else:
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-11-01 00:23:05 +01:00
|
|
|
confirmation = Confirmation.objects.all().last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2018-05-18 03:27:47 +02:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2020-02-23 16:49:19 +01:00
|
|
|
if expect_confirm_registration_page:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_in_success_response(["do_confirm/" + confirmation_key], result)
|
|
|
|
do_confirm_url = "/accounts/do_confirm/" + confirmation_key
|
2020-02-23 16:49:19 +01:00
|
|
|
else:
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, result["Location"])
|
|
|
|
do_confirm_url = result["Location"]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(do_confirm_url, name=name)
|
2018-05-18 03:27:47 +02:00
|
|
|
self.assert_in_response('action="/accounts/register/"', result)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_data = {"from_confirmation": "1", "key": confirmation_key}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/register/", confirmation_data)
|
2019-11-01 00:33:56 +01:00
|
|
|
if not skip_registration_form:
|
2019-11-01 00:00:36 +01:00
|
|
|
self.assert_in_response("We just need you to do one last thing", result)
|
|
|
|
|
|
|
|
# Verify that the user is asked for name but not password
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_not_in_success_response(["id_password"], result)
|
|
|
|
self.assert_in_success_response(["id_full_name"], result)
|
2020-06-09 12:04:21 +02:00
|
|
|
if expect_full_name_prepopulated:
|
|
|
|
# Verify the name field gets correctly pre-populated:
|
|
|
|
self.assert_in_success_response([expected_final_name], result)
|
2019-11-01 00:00:36 +01:00
|
|
|
|
|
|
|
# Click confirm registration button.
|
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/register/",
|
|
|
|
{"full_name": expected_final_name, "key": confirmation_key, "terms": True},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2020-02-06 18:27:10 +01:00
|
|
|
# Mobile and desktop flow have additional steps:
|
|
|
|
if mobile_flow_otp:
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2020-02-06 18:27:10 +01:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip.testserver"])
|
2020-02-06 18:27:10 +01:00
|
|
|
self.assertEqual(query_params["email"], [email])
|
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2020-03-12 14:17:25 +01:00
|
|
|
user_api_keys = get_all_api_keys(get_user_by_delivery_email(email, realm))
|
2020-02-06 18:27:10 +01:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), user_api_keys)
|
|
|
|
return
|
|
|
|
elif desktop_flow_otp:
|
2020-02-17 16:18:09 +01:00
|
|
|
self.verify_desktop_flow_end_page(result, email, desktop_flow_otp)
|
|
|
|
# Now the desktop app is logged in, continue with the logged in check.
|
|
|
|
else:
|
2020-02-06 18:27:10 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2019-11-01 00:23:05 +01:00
|
|
|
self.assertEqual(user_profile.full_name, expected_final_name)
|
|
|
|
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
self.assertFalse(user_profile.has_usable_password())
|
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2019-11-01 00:23:05 +01:00
|
|
|
def test_social_auth_registration(self) -> None:
|
|
|
|
"""If the user doesn't exist yet, social auth can be used to register an account"""
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2019-11-01 00:23:05 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain=subdomain, is_signup=True
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result, realm, subdomain, email, name, name, self.BACKEND_CLASS.full_name_validated
|
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2020-02-06 18:27:10 +01:00
|
|
|
def test_social_auth_mobile_registration(self) -> None:
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2020-02-06 18:27:10 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2020-02-06 18:27:10 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
is_signup=True,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
self.BACKEND_CLASS.full_name_validated,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2020-02-06 18:27:10 +01:00
|
|
|
def test_social_auth_desktop_registration(self) -> None:
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2020-02-06 18:27:10 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
desktop_flow_otp = "1234abcd" * 8
|
2020-02-06 18:27:10 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
is_signup=True,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
self.BACKEND_CLASS.full_name_validated,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2019-12-10 18:45:36 +01:00
|
|
|
def test_social_auth_registration_invitation_exists(self) -> None:
|
|
|
|
"""
|
|
|
|
This tests the registration flow in the case where an invitation for the user
|
|
|
|
was generated.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2019-12-10 18:45:36 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
iago = self.example_user("iago")
|
2022-02-10 11:52:34 +01:00
|
|
|
do_invite_users(iago, [email], [], invite_expires_in_minutes=2 * 24 * 60)
|
2019-12-10 18:45:36 +01:00
|
|
|
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain=subdomain, is_signup=True
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result, realm, subdomain, email, name, name, self.BACKEND_CLASS.full_name_validated
|
|
|
|
)
|
2019-12-10 18:45:36 +01:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2021-05-20 22:52:01 +02:00
|
|
|
def test_social_auth_with_invalid_multiuse_invite(self) -> None:
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
|
|
|
|
|
|
|
multiuse_object_key = "invalid"
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
multiuse_object_key=multiuse_object_key,
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2021-05-20 22:52:01 +02:00
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 404)
|
2021-12-02 17:51:38 +01:00
|
|
|
self.assert_in_response("Whoops. The confirmation link is malformed.", result)
|
2021-05-20 22:52:01 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2019-02-08 17:09:25 +01:00
|
|
|
def test_social_auth_registration_using_multiuse_invite(self) -> None:
|
|
|
|
"""If the user doesn't exist yet, social auth can be used to register an account"""
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2019-02-08 17:09:25 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.invite_required = True
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
stream_names = ["new_stream_1", "new_stream_2"]
|
|
|
|
streams = []
|
|
|
|
for stream_name in set(stream_names):
|
2021-04-02 18:11:45 +02:00
|
|
|
stream = ensure_stream(realm, stream_name, acting_user=None)
|
2019-02-08 17:09:25 +01:00
|
|
|
streams.append(stream)
|
|
|
|
|
|
|
|
referrer = self.example_user("hamlet")
|
|
|
|
multiuse_obj = MultiuseInvite.objects.create(realm=realm, referred_by=referrer)
|
|
|
|
multiuse_obj.streams.set(streams)
|
2022-02-10 11:52:34 +01:00
|
|
|
validity_in_minutes = 2 * 24 * 60
|
2021-07-31 22:08:54 +02:00
|
|
|
create_confirmation_link(
|
2022-02-10 11:52:34 +01:00
|
|
|
multiuse_obj, Confirmation.MULTIUSE_INVITE, validity_in_minutes=validity_in_minutes
|
2021-07-31 22:08:54 +02:00
|
|
|
)
|
2019-02-08 17:09:25 +01:00
|
|
|
multiuse_confirmation = Confirmation.objects.all().last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert multiuse_confirmation is not None
|
2019-02-08 17:09:25 +01:00
|
|
|
multiuse_object_key = multiuse_confirmation.confirmation_key
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
2020-10-13 23:50:18 +02:00
|
|
|
# First, try to sign up for closed realm without using an invitation
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain=subdomain, is_signup=True
|
|
|
|
)
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2020-10-13 23:50:18 +02:00
|
|
|
# Verify that we're unable to sign up, since this is a closed realm
|
2019-02-08 17:09:25 +01:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_success_response(["Sign up"], result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
multiuse_object_key=multiuse_object_key,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result, realm, subdomain, email, name, name, self.BACKEND_CLASS.full_name_validated
|
|
|
|
)
|
2019-02-08 17:09:25 +01:00
|
|
|
|
2022-07-21 23:38:59 +02:00
|
|
|
# Verify the PreregistrationUser object was set up as expected.
|
|
|
|
prereg_user = PreregistrationUser.objects.last()
|
2022-07-28 00:15:56 +02:00
|
|
|
assert prereg_user is not None
|
2022-07-21 23:38:59 +02:00
|
|
|
self.assertEqual(prereg_user.email, email)
|
|
|
|
self.assertEqual(prereg_user.multiuse_invite, multiuse_obj)
|
|
|
|
|
2022-01-23 13:22:12 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
|
|
|
def test_social_auth_registration_using_multiuse_invite_realm_validation(self) -> None:
|
|
|
|
"""If the user doesn't exist yet, social auth can be used to register an account"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.invite_required = True
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
streams: List[Stream] = []
|
|
|
|
|
|
|
|
# Generate an invitation for a different realm than the one we'll attempt to join:
|
|
|
|
lear_realm = get_realm("lear")
|
|
|
|
multiuse_obj = MultiuseInvite.objects.create(
|
2022-07-21 23:06:09 +02:00
|
|
|
realm=lear_realm,
|
|
|
|
referred_by=assert_is_not_none(UserProfile.objects.filter(realm=lear_realm).first()),
|
2022-01-23 13:22:12 +01:00
|
|
|
)
|
|
|
|
multiuse_obj.streams.set(streams)
|
2022-02-10 11:52:34 +01:00
|
|
|
validity_in_minutes = 2 * 24 * 60
|
2022-01-23 13:22:12 +01:00
|
|
|
create_confirmation_link(
|
2022-02-10 11:52:34 +01:00
|
|
|
multiuse_obj, Confirmation.MULTIUSE_INVITE, validity_in_minutes=validity_in_minutes
|
2022-01-23 13:22:12 +01:00
|
|
|
)
|
|
|
|
multiuse_confirmation = Confirmation.objects.all().last()
|
|
|
|
assert multiuse_confirmation is not None
|
|
|
|
multiuse_object_key = multiuse_confirmation.confirmation_key
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
|
|
|
# Now we try to use the invitation for the lear realm to join the zulip realm,
|
|
|
|
# which should fail.
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
multiuse_object_key=multiuse_object_key,
|
|
|
|
)
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2022-01-23 13:22:12 +01:00
|
|
|
self.assert_in_response(
|
|
|
|
"Whoops. We couldn't find your confirmation link in the system.", result
|
|
|
|
)
|
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_registration_without_is_signup(self) -> None:
|
|
|
|
"""If `is_signup` is not set then a new account isn't created"""
|
2018-05-31 02:27:43 +02:00
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain="zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-31 02:27:43 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], email)
|
|
|
|
self.assertEqual(data["full_name"], name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2018-05-31 02:27:43 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-05-31 02:27:43 +02:00
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2018-05-31 02:27:43 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response("No account found for newuser@zulip.com.", result)
|
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_registration_without_is_signup_closed_realm(self) -> None:
|
2018-05-31 00:12:39 +02:00
|
|
|
"""If the user doesn't exist yet in closed realm, give an error"""
|
2020-03-06 16:22:23 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
|
2018-05-31 00:12:39 +02:00
|
|
|
email = "nonexisting@phantom.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
2019-02-03 09:18:01 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain="zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], email)
|
|
|
|
self.assertEqual(data["full_name"], name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"])
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_response('action="/register/"', result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_response(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Your email address, {}, is not "
|
|
|
|
"in one of the domains that are allowed to register "
|
|
|
|
"for accounts in this organization.".format(email),
|
2021-02-12 08:19:30 +01:00
|
|
|
result,
|
|
|
|
)
|
2018-05-18 03:27:47 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2019-11-01 00:33:56 +01:00
|
|
|
def test_social_auth_with_ldap_populate_registration_from_confirmation(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
name = "Full Name"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
subdomain = "zulip"
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "cn"}
|
2019-11-01 00:33:56 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_path = f"zproject.backends.{self.BACKEND_CLASS.__name__}"
|
2019-11-01 00:33:56 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
POPULATE_PROFILE_VIA_LDAP=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
backend_path,
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipLDAPUserPopulator",
|
|
|
|
"zproject.backends.ZulipDummyBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs(level="WARNING") as log_warn:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
)
|
2020-10-23 02:43:28 +02:00
|
|
|
# Full name should get populated from LDAP:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
"New LDAP fullname",
|
|
|
|
skip_registration_form=True,
|
|
|
|
)
|
2019-11-01 00:33:56 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Now try a user that doesn't exist in LDAP:
|
2019-11-09 07:01:05 +01:00
|
|
|
email = self.nonreg_email("alice")
|
|
|
|
name = "Alice Social"
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
)
|
2019-11-09 07:01:05 +01:00
|
|
|
# Full name should get populated as provided by the social backend, because
|
2020-10-23 02:43:28 +02:00
|
|
|
# this user isn't in the LDAP dictionary:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
skip_registration_form=self.BACKEND_CLASS.full_name_validated,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
log_warn.output, [f"WARNING:root:New account email {email} could not be found in LDAP"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-11-09 07:01:05 +01:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2019-11-21 14:48:49 +01:00
|
|
|
def test_social_auth_with_ldap_auth_registration_from_confirmation(self) -> None:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
This test checks that in configurations that use the LDAP authentication backend
|
|
|
|
and a social backend, it is possible to create non-LDAP users via the social backend.
|
2019-11-21 14:48:49 +01:00
|
|
|
"""
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
email = self.nonreg_email("alice")
|
|
|
|
name = "Alice Social"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
subdomain = "zulip"
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "cn"}
|
2019-11-21 14:48:49 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_path = f"zproject.backends.{self.BACKEND_CLASS.__name__}"
|
2019-11-21 14:48:49 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
POPULATE_PROFILE_VIA_LDAP=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_EMAIL_ATTR="mail",
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
backend_path,
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
|
|
|
"zproject.backends.ZulipDummyBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("zulip.ldap", level="DEBUG") as log_debug, self.assertLogs(
|
|
|
|
level="WARNING"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as log_warn:
|
2019-11-21 14:48:49 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
)
|
2019-11-21 14:48:49 +01:00
|
|
|
# Full name should get populated as provided by the social backend, because
|
2020-10-23 02:43:28 +02:00
|
|
|
# this user isn't in the LDAP dictionary:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
skip_registration_form=self.BACKEND_CLASS.full_name_validated,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
log_warn.output, [f"WARNING:root:New account email {email} could not be found in LDAP"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
log_debug.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
f"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: {email}. Input username: {email}"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-11-21 14:48:49 +01:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_complete(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.oauth.BaseOAuth2.process_error",
|
|
|
|
side_effect=AuthFailed("Not found"),
|
|
|
|
), self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.client_get(reverse("social:complete", args=[self.backend.name]))
|
2018-07-03 18:47:20 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
self.logger_output("AuthFailed: Authentication failed: ", "info"),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2018-07-03 18:47:20 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.oauth.BaseOAuth2.auth_complete",
|
2021-02-12 08:19:30 +01:00
|
|
|
side_effect=requests.exceptions.HTTPError,
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.client_get(reverse("social:complete", args=[self.backend.name]))
|
2020-05-20 14:52:03 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
self.logger_output("HTTPError: ", "info"),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-05-20 14:52:03 +02:00
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_complete_when_base_exc_is_raised(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.oauth.BaseOAuth2.auth_complete",
|
|
|
|
side_effect=AuthStateForbidden("State forbidden"),
|
|
|
|
), self.assertLogs(self.logger_string, level="WARNING"):
|
|
|
|
result = self.client_get(reverse("social:complete", args=[self.backend.name]))
|
2018-07-03 18:47:20 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2018-07-03 18:47:20 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2020-06-12 16:19:17 +02:00
|
|
|
def test_social_auth_invited_as_admin_but_expired(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
email = self.nonreg_email("alice")
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Alice Jones"
|
2020-06-12 16:19:17 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2021-04-05 18:42:45 +02:00
|
|
|
do_invite_users(
|
|
|
|
iago,
|
|
|
|
[email],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2021-04-05 18:42:45 +02:00
|
|
|
invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-02-10 11:52:34 +01:00
|
|
|
now = timezone_now() + datetime.timedelta(days=3)
|
2020-06-12 16:19:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain = "zulip"
|
2020-06-12 16:19:17 +02:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain=subdomain, is_signup=True
|
|
|
|
)
|
2021-04-05 18:42:45 +02:00
|
|
|
with mock.patch("zerver.models.timezone_now", return_value=now):
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result, realm, subdomain, email, name, name, self.BACKEND_CLASS.full_name_validated
|
|
|
|
)
|
2020-06-12 16:19:17 +02:00
|
|
|
|
|
|
|
# The invitation is expired, so the user should be created as normal member only.
|
|
|
|
created_user = get_user_by_delivery_email(email, realm)
|
|
|
|
self.assertEqual(created_user.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
class SAMLAuthBackendTest(SocialAuthBase):
|
|
|
|
BACKEND_CLASS = SAMLAuthBackend
|
2019-10-22 18:23:57 +02:00
|
|
|
LOGIN_URL = "/accounts/login/social/saml/test_idp"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/saml/test_idp"
|
2019-09-29 06:32:56 +02:00
|
|
|
AUTHORIZATION_URL = "https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO"
|
|
|
|
AUTH_FINISH_URL = "/complete/saml/"
|
|
|
|
|
|
|
|
# We have to define our own social_auth_test as the flow of SAML authentication
|
|
|
|
# is different from the other social backends.
|
2021-02-12 08:19:30 +01:00
|
|
|
def social_auth_test(
|
|
|
|
self,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
*,
|
|
|
|
subdomain: str,
|
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next: str = "",
|
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_agent: Optional[str] = None,
|
|
|
|
extra_attributes: Mapping[str, List[str]] = {},
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2019-09-29 06:32:56 +02:00
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2020-04-25 06:49:19 +02:00
|
|
|
subdomain,
|
|
|
|
mobile_flow_otp,
|
|
|
|
desktop_flow_otp,
|
|
|
|
is_signup,
|
|
|
|
next,
|
|
|
|
multiuse_object_key,
|
|
|
|
user_agent=user_agent,
|
2019-09-29 06:32:56 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
result = self.client_get(url, **headers)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
expected_result_url_prefix = f"http://testserver/login/{self.backend.name}/"
|
2019-09-29 06:32:56 +02:00
|
|
|
if settings.SOCIAL_AUTH_SUBDOMAIN is not None:
|
|
|
|
expected_result_url_prefix = (
|
2021-02-12 08:20:45 +01:00
|
|
|
f"http://{settings.SOCIAL_AUTH_SUBDOMAIN}.testserver/login/{self.backend.name}/"
|
2019-09-29 06:32:56 +02:00
|
|
|
)
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
if result.status_code != 302 or not result["Location"].startswith(
|
|
|
|
expected_result_url_prefix
|
|
|
|
):
|
2019-09-29 06:32:56 +02:00
|
|
|
return result
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
result = self.client_get(result["Location"], **headers)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
assert self.AUTHORIZATION_URL in result["Location"]
|
|
|
|
assert "samlrequest" in result["Location"].lower()
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
self.client.cookies = result.cookies
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2021-02-12 08:20:45 +01:00
|
|
|
relay_state = urllib.parse.parse_qs(parsed_url.query)["RelayState"][0]
|
2019-09-29 06:32:56 +02:00
|
|
|
# Make sure params are getting encoded into RelayState:
|
2021-02-12 08:20:45 +01:00
|
|
|
data = SAMLAuthBackend.get_data_from_redis(orjson.loads(relay_state)["state_token"])
|
2020-07-05 01:28:06 +02:00
|
|
|
assert data is not None
|
2019-09-29 06:32:56 +02:00
|
|
|
if next:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["next"], next)
|
2019-09-29 06:32:56 +02:00
|
|
|
if is_signup:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["is_signup"], "1")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
saml_response = self.generate_saml_response(
|
2021-02-12 08:20:45 +01:00
|
|
|
email=account_data_dict["email"],
|
|
|
|
name=account_data_dict["name"],
|
2021-02-12 08:19:30 +01:00
|
|
|
extra_attributes=extra_attributes,
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
post_params = {"SAMLResponse": saml_response, "RelayState": relay_state}
|
|
|
|
# The mock below is necessary, so that python3-saml accepts our SAMLResponse,
|
|
|
|
# and doesn't verify the cryptographic signatures etc., since generating
|
|
|
|
# a perfectly valid SAMLResponse for the purpose of these tests would be too complex,
|
|
|
|
# and we simply use one loaded from a fixture file.
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2020-05-25 13:48:28 +02:00
|
|
|
# We are simulating a cross-domain POST request here. Session is a Lax cookie, meaning
|
|
|
|
# it won't be sent by the browser in this request. To simulate that effect with the django
|
|
|
|
# test client, we flush the session before the request.
|
|
|
|
self.client.session.flush()
|
2019-09-29 06:32:56 +02:00
|
|
|
result = self.client_post(self.AUTH_FINISH_URL, post_params, **headers)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def generate_saml_response(
|
|
|
|
self, email: str, name: str, extra_attributes: Mapping[str, List[str]] = {}
|
|
|
|
) -> str:
|
2019-09-29 06:32:56 +02:00
|
|
|
"""
|
|
|
|
The samlresponse.txt fixture has a pre-generated SAMLResponse,
|
|
|
|
with {email}, {first_name}, {last_name} placeholders, that can
|
|
|
|
be filled out with the data we want.
|
|
|
|
"""
|
2021-07-07 15:48:28 +02:00
|
|
|
if name:
|
|
|
|
name_parts = name.split(" ")
|
|
|
|
first_name = name_parts[0]
|
|
|
|
last_name = name_parts[1]
|
|
|
|
else:
|
|
|
|
first_name = ""
|
|
|
|
last_name = ""
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_attrs = ""
|
2020-06-19 21:44:29 +02:00
|
|
|
for extra_attr_name, extra_attr_values in extra_attributes.items():
|
2021-02-12 08:20:45 +01:00
|
|
|
values = "".join(
|
2021-02-12 08:19:30 +01:00
|
|
|
'<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" '
|
|
|
|
+ 'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">'
|
2021-02-12 08:20:45 +01:00
|
|
|
+ f"{value}</saml2:AttributeValue>"
|
2021-02-12 08:19:30 +01:00
|
|
|
for value in extra_attr_values
|
|
|
|
)
|
|
|
|
extra_attrs += (
|
|
|
|
f'<saml2:Attribute Name="{extra_attr_name}" '
|
|
|
|
+ 'NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified">'
|
2021-02-12 08:20:45 +01:00
|
|
|
+ f"{values}</saml2:Attribute>"
|
2020-06-19 21:44:29 +02:00
|
|
|
)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
unencoded_saml_response = self.fixture_data("samlresponse.txt", type="saml").format(
|
|
|
|
email=email,
|
|
|
|
first_name=first_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
last_name=last_name,
|
2020-06-19 21:44:29 +02:00
|
|
|
extra_attrs=extra_attrs,
|
2019-09-29 06:32:56 +02:00
|
|
|
)
|
|
|
|
# SAMLResponse needs to be base64-encoded.
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
saml_response: str = base64.b64encode(unencoded_saml_response.encode()).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
return saml_response
|
|
|
|
|
2021-10-21 14:16:26 +02:00
|
|
|
def generate_saml_logout_request_from_idp(self, email: str) -> str:
|
|
|
|
"""
|
|
|
|
The logoutrequest.txt fixture has a pre-generated LogoutRequest,
|
|
|
|
with {email} placeholder, that can
|
|
|
|
be filled out with the data we want.
|
|
|
|
"""
|
|
|
|
unencoded_logout_request = self.fixture_data("logoutrequest.txt", type="saml").format(
|
|
|
|
email=email,
|
|
|
|
)
|
|
|
|
logout_request: str = base64.b64encode(unencoded_logout_request.encode()).decode()
|
|
|
|
|
|
|
|
return logout_request
|
|
|
|
|
|
|
|
def make_idp_initiated_logout_request(
|
|
|
|
self, email: str, make_validity_checks_pass: bool = True
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2021-10-21 14:16:26 +02:00
|
|
|
samlrequest = self.generate_saml_logout_request_from_idp(email)
|
|
|
|
parameters = {"SAMLRequest": samlrequest}
|
|
|
|
|
|
|
|
if make_validity_checks_pass:
|
|
|
|
# It's hard to create fully-correct LogoutRequests with signatures in tests,
|
|
|
|
# so we rely on mocking the validating functions instead.
|
|
|
|
with mock.patch.object(
|
|
|
|
OneLogin_Saml2_Logout_Request, "is_valid", return_value=True
|
|
|
|
), mock.patch.object(
|
|
|
|
OneLogin_Saml2_Auth,
|
|
|
|
"validate_request_signature",
|
|
|
|
return_value=True,
|
|
|
|
):
|
|
|
|
result = self.client_get("http://zulip.testserver/complete/saml/", parameters)
|
|
|
|
else:
|
|
|
|
result = self.client_get("http://zulip.testserver/complete/saml/", parameters)
|
|
|
|
return result
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
|
|
|
|
return dict(email=email, name=name)
|
|
|
|
|
2021-10-21 14:16:26 +02:00
|
|
|
def test_saml_idp_initiated_logout_success(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
old_api_key = hamlet.api_key
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
result = self.make_idp_initiated_logout_request(hamlet.delivery_email)
|
|
|
|
self.assert_logged_in_user_id(None)
|
|
|
|
|
|
|
|
# The expected response is a redirect to the IdP's slo_url endpoint
|
|
|
|
# with a SAMLResponse announcing success.
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
redirect_to = result["Location"]
|
|
|
|
self.assertIn(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS["test_idp"]["slo_url"], redirect_to)
|
|
|
|
|
|
|
|
parsed = urllib.parse.urlparse(redirect_to)
|
|
|
|
query_dict = urllib.parse.parse_qs(parsed.query)
|
|
|
|
|
|
|
|
self.assertIn("SAMLResponse", query_dict)
|
|
|
|
# Do some very basic parsing of the SAMLResponse to verify it's a success response.
|
|
|
|
saml_response_encoded = query_dict["SAMLResponse"][0]
|
|
|
|
saml_response = OneLogin_Saml2_Utils.decode_base64_and_inflate(
|
|
|
|
saml_response_encoded
|
|
|
|
).decode()
|
|
|
|
self.assertIn(
|
|
|
|
'<samlp:StatusCode Value="urn:oasis:names:tc:SAML:2.0:status:Success" />', saml_response
|
|
|
|
)
|
|
|
|
|
|
|
|
hamlet.refresh_from_db()
|
|
|
|
# Ensure that the user's api_key was rotated:
|
|
|
|
self.assertNotEqual(hamlet.api_key, old_api_key)
|
|
|
|
|
|
|
|
def test_saml_idp_initiated_logout_request_for_different_user(self) -> None:
|
|
|
|
"""
|
|
|
|
This test verifies that sessions are revoked based on the NameID
|
|
|
|
in the LogoutRequest rather than just the logged in session cookie.
|
|
|
|
"""
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia_old_api_key = cordelia.api_key
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
# We're logged in as hamlet, but deliver a LogoutRequest for cordelia.
|
|
|
|
# This means our session should not be affected.
|
|
|
|
self.make_idp_initiated_logout_request(cordelia.delivery_email)
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
|
|
|
|
cordelia.refresh_from_db()
|
|
|
|
# Cordelia's api_key should have been rotated:
|
|
|
|
self.assertNotEqual(cordelia.api_key, cordelia_old_api_key)
|
|
|
|
|
|
|
|
def test_saml_idp_initiated_logout_invalid_nameid_format(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
with self.assertLogs("zulip.auth.saml") as mock_logger:
|
|
|
|
# LogoutRequests need to have the email address in NameID
|
|
|
|
# so putting "hamlet" there is invalid.
|
|
|
|
result = self.make_idp_initiated_logout_request("hamlet")
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
mock_logger.output,
|
|
|
|
[
|
|
|
|
"INFO:zulip.auth.saml:/complete/saml/: LogoutRequest failed: NameID is not a valid email address: hamlet"
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/")
|
|
|
|
|
|
|
|
def test_saml_idp_initiated_logout_user_not_in_realm(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
with self.assertLogs("zulip.auth.saml") as mock_logger:
|
2021-10-28 01:49:22 +02:00
|
|
|
result = self.make_idp_initiated_logout_request("nonexistent@zulip.com")
|
2021-10-21 14:16:26 +02:00
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
mock_logger.output,
|
|
|
|
[
|
|
|
|
"INFO:zulip.auth.saml:/complete/saml/: LogoutRequest failed: No user with email specified in NameID found in realm 2. return_data={}"
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/")
|
|
|
|
|
|
|
|
def test_saml_idp_initiated_logout_invalid_signature(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
with self.assertLogs("zulip.auth.saml") as mock_logger:
|
|
|
|
# LogoutRequests we generate in tests don't have signatures. We can use
|
|
|
|
# the make_validity_checks_pass argument to disable mocking of python3-saml
|
|
|
|
# internal validation functions to make validation of our LogoutRequest fail
|
|
|
|
# and test our error-handling of that.
|
|
|
|
result = self.make_idp_initiated_logout_request(
|
|
|
|
hamlet.delivery_email, make_validity_checks_pass=False
|
|
|
|
)
|
|
|
|
self.assert_logged_in_user_id(hamlet.id)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
mock_logger.output,
|
|
|
|
[
|
|
|
|
"INFO:zulip.auth.saml:/complete/saml/: LogoutRequest failed: ['invalid_logout_request_signature', 'Signature validation failed. Logout Request rejected']"
|
|
|
|
],
|
|
|
|
)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/")
|
|
|
|
|
2021-10-30 19:37:43 +02:00
|
|
|
def test_saml_idp_initiate_logout_invalid_logout_response(self) -> None:
|
|
|
|
parameters = {"SAMLRequest": "this is not a valid SAMLRequest string."}
|
|
|
|
with self.assertLogs("zulip.auth.saml") as mock_logger:
|
|
|
|
result = self.client_get("http://zulip.testserver/complete/saml/", parameters)
|
|
|
|
|
|
|
|
self.assertIn(
|
|
|
|
"ERROR:zulip.auth.saml:Error parsing SAMLRequest: Start tag expected, '<' not found",
|
|
|
|
mock_logger.output[0],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertEqual(result["Location"], "/login/")
|
|
|
|
|
2021-07-07 15:48:28 +02:00
|
|
|
def test_auth_registration_with_no_name_provided(self) -> None:
|
|
|
|
"""
|
|
|
|
The SAMLResponse may not actually provide name values, which is considered
|
|
|
|
unexpected behavior for most social backends, but SAML is an exception. The
|
|
|
|
signup flow should proceed normally, without pre-filling the name in the
|
|
|
|
registration form.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name="")
|
|
|
|
result = self.social_auth_test(account_data_dict, subdomain=subdomain, is_signup=True)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
"",
|
|
|
|
"Full Name",
|
|
|
|
skip_registration_form=False,
|
|
|
|
expect_full_name_prepopulated=False,
|
|
|
|
)
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def test_social_auth_no_key(self) -> None:
|
|
|
|
"""
|
|
|
|
Since in the case of SAML there isn't a direct equivalent of CLIENT_KEY_SETTING,
|
|
|
|
we override this test, to test for the case where the obligatory
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS isn't configured.
|
|
|
|
"""
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=None):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, subdomain="zulip", next="/user_uploads/image"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2019-10-26 01:51:48 +02:00
|
|
|
# Test the signup path too:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, is_signup=True, subdomain="zulip", next="/user_uploads/image"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2020-02-15 19:16:16 +01:00
|
|
|
def test_config_error_page(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="INFO") as info_log:
|
2020-07-26 01:28:29 +02:00
|
|
|
result = self.client_get("/accounts/login/social/saml")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["INFO:root:Attempted to initiate SAML authentication with wrong idp argument: None"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
|
2020-02-15 19:16:16 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def test_saml_auth_works_without_private_public_keys(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_SP_PUBLIC_CERT="", SOCIAL_AUTH_SAML_SP_PRIVATE_KEY=""):
|
2019-09-29 06:32:56 +02:00
|
|
|
self.test_social_auth_success()
|
|
|
|
|
|
|
|
def test_saml_auth_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.SAMLAuthBackend",)):
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertTrue(saml_auth_enabled())
|
|
|
|
result = self.client_get("/saml/metadata.xml")
|
|
|
|
self.assert_in_success_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
[f'entityID="{settings.SOCIAL_AUTH_SAML_SP_ENTITY_ID}"'],
|
|
|
|
result,
|
2019-09-29 06:32:56 +02:00
|
|
|
)
|
2021-06-13 14:18:28 +02:00
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2021-06-13 14:18:28 +02:00
|
|
|
def test_social_auth_registration_auto_signup(self) -> None:
|
|
|
|
"""
|
|
|
|
Verify that with SAML auto signup enabled, a user coming from the /login page
|
|
|
|
(so without the is_signup param) will be taken straight to registration, without
|
|
|
|
having to go through the step of having to confirm that they do want to sign up.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
|
|
|
idps_dict["test_idp"]["auto_signup"] = True
|
|
|
|
|
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=False,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
self.BACKEND_CLASS.full_name_validated,
|
|
|
|
expect_confirm_registration_page=False,
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
def test_social_auth_complete(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch.object(
|
2021-02-12 08:20:45 +01:00
|
|
|
OneLogin_Saml2_Auth, "is_authenticated", return_value=False
|
|
|
|
), self.assertLogs(self.logger_string, level="INFO") as m:
|
2019-09-29 06:32:56 +02:00
|
|
|
# This mock causes AuthFailed to be raised.
|
|
|
|
saml_response = self.generate_saml_response(self.email, self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
post_params = {"SAMLResponse": saml_response, "RelayState": relay_state}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
"AuthFailed: Authentication failed: SAML login failed: [] (None)", "info"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
def test_social_auth_complete_when_base_exc_is_raised(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.saml.SAMLAuth.auth_complete",
|
|
|
|
side_effect=AuthStateForbidden("State forbidden"),
|
|
|
|
), self.assertLogs(self.logger_string, level="WARNING") as m:
|
2019-09-29 06:32:56 +02:00
|
|
|
saml_response = self.generate_saml_response(self.email, self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
post_params = {"SAMLResponse": saml_response, "RelayState": relay_state}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
m.output, [self.logger_output("Wrong state parameter given.", "warning")]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
def test_social_auth_complete_bad_params(self) -> None:
|
|
|
|
# Simple GET for /complete/saml without the required parameters.
|
|
|
|
# This tests the auth_complete wrapped in our SAMLAuthBackend,
|
|
|
|
# ensuring it prevents this requests from causing an internal server error.
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.client_get("/complete/saml/")
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-10-21 14:16:26 +02:00
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"/complete/saml/: No SAMLResponse or SAMLRequest in request.", "info"
|
|
|
|
)
|
|
|
|
],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
# Check that POSTing the RelayState, but with missing SAMLResponse,
|
|
|
|
# doesn't cause errors either:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
2019-09-29 06:32:56 +02:00
|
|
|
post_params = {"RelayState": relay_state}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-10-21 14:16:26 +02:00
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"/complete/saml/: No SAMLResponse or SAMLRequest in request.", "info"
|
|
|
|
)
|
|
|
|
],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-22 15:26:17 +02:00
|
|
|
|
|
|
|
# Now test bad SAMLResponses.
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
post_params = {"RelayState": relay_state, "SAMLResponse": ""}
|
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-22 15:26:17 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2022-07-01 13:59:37 +02:00
|
|
|
self.assert_length(m.output, 2)
|
2020-05-22 15:26:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
2021-02-12 08:20:45 +01:00
|
|
|
post_params = {"RelayState": relay_state, "SAMLResponse": "b"}
|
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-22 15:26:17 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2022-07-01 13:59:37 +02:00
|
|
|
self.assert_length(m.output, 2)
|
2020-05-22 15:26:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
|
|
|
post_params = {
|
|
|
|
"RelayState": relay_state,
|
2021-02-12 08:20:45 +01:00
|
|
|
"SAMLResponse": base64.b64encode(b"test").decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-22 15:26:17 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2022-07-01 13:59:37 +02:00
|
|
|
self.assert_length(m.output, 2)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-05-22 18:44:29 +02:00
|
|
|
def test_social_auth_complete_no_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
post_params = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"RelayState": "",
|
|
|
|
"SAMLResponse": self.generate_saml_response(
|
2021-02-12 08:19:30 +01:00
|
|
|
email=self.example_email("hamlet"), name="King Hamlet"
|
|
|
|
),
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(SAMLAuthBackend, "choose_subdomain", return_value=None):
|
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-04-10 16:30:02 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-10-21 14:16:26 +02:00
|
|
|
"/complete/saml/: Can't figure out subdomain for this SAMLResponse. relayed_params: {}".format(
|
2021-02-12 08:19:30 +01:00
|
|
|
"{}"
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-04-10 16:30:02 +02:00
|
|
|
|
2020-05-22 18:44:29 +02:00
|
|
|
def test_social_auth_complete_wrong_issuing_idp(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
|
|
|
saml_response = self.generate_saml_response(
|
|
|
|
email=self.example_email("hamlet"), name="King Hamlet"
|
|
|
|
)
|
2020-05-22 18:44:29 +02:00
|
|
|
|
|
|
|
# We change the entity_id of the configured test IdP, which means it won't match
|
|
|
|
# the Entity ID in the SAMLResponse generated above.
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["entity_id"] = "https://different.idp.example.com/"
|
2020-05-22 18:44:29 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
post_params = {"RelayState": relay_state, "SAMLResponse": saml_response}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-22 18:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"/complete/saml/: No valid IdP as issuer of the SAMLResponse.", "info"
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-05-22 18:44:29 +02:00
|
|
|
|
|
|
|
def test_social_auth_complete_valid_get_idp_bad_samlresponse(self) -> None:
|
|
|
|
"""
|
|
|
|
This tests for a hypothetical scenario where our basic parsing of the SAMLResponse
|
|
|
|
successfully returns the issuing IdP, but it fails further down the line, during proper
|
|
|
|
validation in the underlying libraries.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m, mock.patch.object(
|
2021-10-30 19:37:43 +02:00
|
|
|
SAMLDocument, "get_issuing_idp", return_value="test_idp"
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
relay_state = orjson.dumps(
|
|
|
|
dict(
|
|
|
|
state_token=SAMLAuthBackend.put_data_in_redis({"subdomain": "zulip"}),
|
|
|
|
)
|
|
|
|
).decode()
|
|
|
|
post_params = {
|
|
|
|
"RelayState": relay_state,
|
2021-02-12 08:20:45 +01:00
|
|
|
"SAMLResponse": base64.b64encode(b"test").decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-22 18:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2020-05-22 18:44:29 +02:00
|
|
|
|
2022-07-01 13:59:37 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2020-05-22 18:44:29 +02:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
def test_social_auth_saml_bad_idp_param_on_login_page(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.client_get("/login/saml/")
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/login/saml/ : Bad idp param: KeyError: {}.".format("'idp'"), "info"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/login/saml/", {"idp": "bad_idp"})
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/login/saml/ : Bad idp param: KeyError: {}.".format("'bad_idp'"), "info"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
def test_social_auth_invalid_email(self) -> None:
|
|
|
|
"""
|
|
|
|
This test needs an override from the original class. For security reasons,
|
|
|
|
the 'next' and 'mobile_flow_otp' params don't get passed on in the session
|
|
|
|
if the authentication attempt failed. See SAMLAuthBackend.auth_complete for details.
|
|
|
|
"""
|
|
|
|
account_data_dict = self.get_account_data_dict(email="invalid", name=self.name)
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2020-07-13 05:42:56 +02:00
|
|
|
with self.assertLogs(self.logger_string, "WARNING") as warn_log:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain=subdomain,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
warn_log.output, [self.logger_output("SAML got invalid email argument.", "warning")]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-09-29 06:32:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/register/")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
def test_social_auth_saml_multiple_idps_configured(self) -> None:
|
docs: Add missing space to compound verbs “back up”, “log in”, etc.
Noun: backup, login, logout, lookup, setup.
Verb: back up, log in, log out, look up, set up.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2022-02-07 20:41:10 +01:00
|
|
|
# Set up a new SOCIAL_AUTH_SAML_ENABLED_IDPS dict with two idps.
|
2019-10-22 18:23:57 +02:00
|
|
|
# We deepcopy() dictionaries around for the sake of brevity,
|
|
|
|
# to avoid having to spell them out explicitly here.
|
|
|
|
# The second idp's configuration is a copy of the first one,
|
2020-04-10 16:30:02 +02:00
|
|
|
# with name test_idp2 and altered url. It is also configured to be
|
|
|
|
# limited to the zulip realm, so that we get to test both types
|
|
|
|
# of configs here.
|
2019-10-22 18:23:57 +02:00
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp2"] = copy.deepcopy(idps_dict["test_idp"])
|
|
|
|
idps_dict["test_idp2"]["url"] = "https://idp2.example.com/idp/profile/SAML2/Redirect/SSO"
|
|
|
|
idps_dict["test_idp2"]["display_name"] = "Second Test IdP"
|
|
|
|
idps_dict["test_idp2"]["limit_to_subdomains"] = ["zulip"]
|
2019-10-22 18:23:57 +02:00
|
|
|
|
|
|
|
# Run tests with multiple idps configured:
|
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
# Go to the login page and check that buttons to log in show up for both IdPs:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/accounts/login/")
|
2019-10-22 18:23:57 +02:00
|
|
|
self.assert_in_success_response(["Log in with Test IdP"], result)
|
|
|
|
self.assert_in_success_response(["/accounts/login/social/saml/test_idp"], result)
|
|
|
|
self.assert_in_success_response(["Log in with Second Test IdP"], result)
|
|
|
|
self.assert_in_success_response(["/accounts/login/social/saml/test_idp2"], result)
|
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Try successful authentication with the regular idp from all previous tests:
|
2019-10-22 18:23:57 +02:00
|
|
|
self.test_social_auth_success()
|
|
|
|
|
|
|
|
# Now test with the second idp:
|
|
|
|
original_LOGIN_URL = self.LOGIN_URL
|
|
|
|
original_SIGNUP_URL = self.SIGNUP_URL
|
|
|
|
original_AUTHORIZATION_URL = self.AUTHORIZATION_URL
|
|
|
|
self.LOGIN_URL = "/accounts/login/social/saml/test_idp2"
|
|
|
|
self.SIGNUP_URL = "/accounts/register/social/saml/test_idp2"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.AUTHORIZATION_URL = idps_dict["test_idp2"]["url"]
|
2019-10-22 18:23:57 +02:00
|
|
|
|
|
|
|
try:
|
|
|
|
self.test_social_auth_success()
|
|
|
|
finally:
|
|
|
|
# Restore original values at the end, regardless of what happens
|
|
|
|
# in the block above, to avoid affecting other tests in unpredictable
|
|
|
|
# ways.
|
|
|
|
self.LOGIN_URL = original_LOGIN_URL
|
|
|
|
self.SIGNUP_URL = original_SIGNUP_URL
|
|
|
|
self.AUTHORIZATION_URL = original_AUTHORIZATION_URL
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
def test_social_auth_saml_idp_limited_to_subdomains_success(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["limit_to_subdomains"] = ["zulip"]
|
2020-04-10 16:30:02 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
self.test_social_auth_success()
|
|
|
|
|
|
|
|
def test_social_auth_saml_idp_limited_to_subdomains_attempt_wrong_realm(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["limit_to_subdomains"] = ["zulip"]
|
2020-04-10 16:30:02 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
result = self.social_auth_test(account_data_dict, subdomain="zephyr")
|
2020-04-10 16:30:02 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/complete/saml/: Authentication request with IdP test_idp but this provider is not enabled "
|
|
|
|
"for this subdomain zephyr.",
|
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-04-10 16:30:02 +02:00
|
|
|
|
2019-10-22 18:23:57 +02:00
|
|
|
def test_social_auth_saml_login_bad_idp_arg(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
for action in ["login", "register"]:
|
|
|
|
with self.assertLogs(level="INFO") as info_log:
|
|
|
|
result = self.client_get(f"/accounts/{action}/social/saml")
|
2019-10-22 18:23:57 +02:00
|
|
|
# Missing idp argument.
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:root:Attempted to initiate SAML authentication with wrong idp argument: None"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="INFO") as info_log:
|
|
|
|
result = self.client_get(f"/accounts/{action}/social/saml/nonexistent_idp")
|
2019-10-22 18:23:57 +02:00
|
|
|
# No such IdP is configured.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:root:Attempted to initiate SAML authentication with wrong idp argument: nonexistent_idp"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "SAML authentication"], result)
|
2019-10-22 18:23:57 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get(f"/accounts/{action}/social/saml/")
|
2020-10-23 02:43:28 +02:00
|
|
|
# No matching URL pattern.
|
2019-10-22 18:23:57 +02:00
|
|
|
self.assertEqual(result.status_code, 404)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2020-04-16 12:05:26 +02:00
|
|
|
def test_social_auth_saml_require_limit_to_subdomains(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp2"] = copy.deepcopy(idps_dict["test_idp"])
|
|
|
|
idps_dict["test_idp2"]["url"] = "https://idp2.example.com/idp/profile/SAML2/Redirect/SSO"
|
|
|
|
idps_dict["test_idp2"]["display_name"] = "Second Test IdP"
|
|
|
|
idps_dict["test_idp2"]["limit_to_subdomains"] = ["zulip"]
|
2020-04-16 12:05:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict, SAML_REQUIRE_LIMIT_TO_SUBDOMAINS=True
|
|
|
|
):
|
2020-06-02 20:20:53 +02:00
|
|
|
with self.assertLogs(self.logger_string, level="ERROR") as m:
|
2020-04-16 12:05:26 +02:00
|
|
|
# Initialization of the backend should validate the configured IdPs
|
|
|
|
# with respect to the SAML_REQUIRE_LIMIT_TO_SUBDOMAINS setting and remove
|
|
|
|
# the non-compliant ones.
|
|
|
|
SAMLAuthBackend()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(list(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.keys()), ["test_idp2"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"SAML_REQUIRE_LIMIT_TO_SUBDOMAINS is enabled and the following "
|
|
|
|
"IdPs don't have limit_to_subdomains specified and will be ignored: "
|
|
|
|
"['test_idp']",
|
2021-02-12 08:20:45 +01:00
|
|
|
"error",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-04-16 12:05:26 +02:00
|
|
|
|
2020-05-23 15:21:19 +02:00
|
|
|
def test_idp_initiated_signin_subdomain_specified(self) -> None:
|
|
|
|
post_params = {
|
|
|
|
"RelayState": '{"subdomain": "zulip"}',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"SAMLResponse": self.generate_saml_response(email=self.email, name=self.name),
|
2020-05-23 15:21:19 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2020-05-23 15:21:19 +02:00
|
|
|
# We're not able to generate valid signatures in tests, so we need the mock.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2020-05-23 15:21:19 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
self.client_get(uri)
|
|
|
|
self.assert_logged_in_user_id(self.example_user("hamlet").id)
|
|
|
|
|
|
|
|
def test_choose_subdomain_invalid_subdomain_specified(self) -> None:
|
|
|
|
post_params = {
|
|
|
|
"RelayState": '{"subdomain": "invalid"}',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"SAMLResponse": self.generate_saml_response(email=self.email, name=self.name),
|
2020-05-23 15:21:19 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2020-05-23 15:21:19 +02:00
|
|
|
# We're not able to generate valid signatures in tests, so we need the mock.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/saml/", post_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "/accounts/find/")
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
def test_idp_initiated_signin_subdomain_implicit(self) -> None:
|
|
|
|
post_params = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"RelayState": "",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"SAMLResponse": self.generate_saml_response(email=self.email, name=self.name),
|
2020-05-23 15:21:19 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2020-05-23 15:21:19 +02:00
|
|
|
# We're not able to generate valid signatures in tests, so we need the mock.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("http://zulip.testserver/complete/saml/", post_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2020-05-23 15:21:19 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
self.client_get(uri)
|
|
|
|
self.assert_logged_in_user_id(self.example_user("hamlet").id)
|
|
|
|
|
|
|
|
def test_idp_initiated_signin_subdomain_implicit_no_relaystate_param(self) -> None:
|
|
|
|
post_params = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"SAMLResponse": self.generate_saml_response(email=self.email, name=self.name),
|
2020-05-23 15:21:19 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(OneLogin_Saml2_Response, "is_valid", return_value=True):
|
2020-05-23 15:21:19 +02:00
|
|
|
# We're not able to generate valid signatures in tests, so we need the mock.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("http://zulip.testserver/complete/saml/", post_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2020-05-23 15:21:19 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
self.client_get(uri)
|
|
|
|
self.assert_logged_in_user_id(self.example_user("hamlet").id)
|
|
|
|
|
|
|
|
def test_idp_initiated_signin_subdomain_implicit_invalid(self) -> None:
|
|
|
|
post_params = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"RelayState": "",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"SAMLResponse": self.generate_saml_response(email=self.email, name=self.name),
|
2020-05-23 15:21:19 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
|
|
|
with mock.patch("zproject.backends.get_subdomain", return_value="invalid"):
|
2020-05-23 15:21:19 +02:00
|
|
|
# Due to the quirks of our test setup, get_subdomain on all these `some_subdomain.testserver`
|
|
|
|
# requests returns 'zulip', so we need to mock it here.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("http://invalid.testserver/complete/saml/", post_params)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-10-21 14:16:26 +02:00
|
|
|
"/complete/saml/: Can't figure out subdomain for this SAMLResponse. relayed_params: {}",
|
2021-02-12 08:19:30 +01:00
|
|
|
"info",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-05-23 15:21:19 +02:00
|
|
|
|
2020-06-19 21:44:29 +02:00
|
|
|
def test_social_auth_saml_idp_org_membership_success(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["attr_org_membership"] = "member"
|
2020-06-19 21:44:29 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_attributes=dict(member=["zulip"]),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.email)
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2020-06-19 21:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
def test_social_auth_saml_idp_org_membership_root_subdomain(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.string_id = ""
|
2020-06-19 21:44:29 +02:00
|
|
|
realm.save()
|
|
|
|
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["attr_org_membership"] = "member"
|
2020-06-19 21:44:29 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
# Having one of the settings.ROOT_SUBDOMAIN_ALIASES in the membership attributes
|
|
|
|
# authorizes the user to access the root subdomain.
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_attributes=dict(member=["www"]),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.email)
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "")
|
2020-06-19 21:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
# Failure, the user doesn't have entitlements for the root subdomain.
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_attributes=dict(member=["zephyr"]),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"AuthFailed: Authentication failed: SAML user from IdP test_idp rejected due to "
|
|
|
|
+ "missing entitlement for subdomain ''. User entitlements: ['zephyr'].",
|
|
|
|
"info",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
|
|
|
|
def test_social_auth_saml_idp_org_membership_failed(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp"]["attr_org_membership"] = "member"
|
2020-06-19 21:44:29 +02:00
|
|
|
with self.settings(SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict):
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
|
|
|
extra_attributes=dict(member=["zephyr", "othersubdomain"]),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-19 21:44:29 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("/login/", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"AuthFailed: Authentication failed: SAML user from IdP test_idp rejected due to "
|
|
|
|
+ "missing entitlement for subdomain 'zulip'. User entitlements: ['zephyr', 'othersubdomain'].",
|
|
|
|
"info",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2021-05-31 14:48:12 +02:00
|
|
|
def test_social_auth_custom_profile_field_sync(self) -> None:
|
|
|
|
birthday_field = CustomProfileField.objects.get(
|
|
|
|
realm=self.user_profile.realm, name="Birthday"
|
|
|
|
)
|
|
|
|
old_birthday_field_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=self.user_profile, field=birthday_field
|
|
|
|
).value
|
|
|
|
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
|
|
|
idps_dict["test_idp"]["extra_attrs"] = ["mobilePhone"]
|
|
|
|
|
|
|
|
sync_custom_attrs_dict = {
|
|
|
|
"zulip": {
|
|
|
|
"saml": {
|
|
|
|
"phone_number": "mobilePhone",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict,
|
|
|
|
SOCIAL_AUTH_SYNC_CUSTOM_ATTRS_DICT=sync_custom_attrs_dict,
|
|
|
|
):
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
subdomain="zulip",
|
|
|
|
extra_attributes=dict(mobilePhone=["123412341234"], birthday=["2021-01-01"]),
|
|
|
|
)
|
|
|
|
data = load_subdomain_token(result)
|
|
|
|
self.assertEqual(data["email"], self.email)
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
phone_field = CustomProfileField.objects.get(
|
|
|
|
realm=self.user_profile.realm, name="Phone number"
|
|
|
|
)
|
|
|
|
phone_field_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=self.user_profile, field=phone_field
|
|
|
|
).value
|
|
|
|
self.assertEqual(phone_field_value, "123412341234")
|
|
|
|
|
|
|
|
# Verify the Birthday field doesn't get synced - because it isn't configured for syncing.
|
|
|
|
new_birthday_field_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=self.user_profile, field=birthday_field
|
|
|
|
).value
|
|
|
|
self.assertEqual(new_birthday_field_value, old_birthday_field_value)
|
|
|
|
|
|
|
|
def test_social_auth_custom_profile_field_sync_custom_field_not_existing(self) -> None:
|
|
|
|
sync_custom_attrs_dict = {
|
|
|
|
"zulip": {
|
|
|
|
"saml": {
|
|
|
|
"title": "title",
|
|
|
|
"phone_number": "mobilePhone",
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.assertFalse(
|
|
|
|
CustomProfileField.objects.filter(
|
|
|
|
realm=self.user_profile.realm, name__iexact="title"
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
|
|
|
idps_dict["test_idp"]["extra_attrs"] = ["mobilePhone", "title"]
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict,
|
|
|
|
SOCIAL_AUTH_SYNC_CUSTOM_ATTRS_DICT=sync_custom_attrs_dict,
|
|
|
|
):
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
subdomain="zulip",
|
|
|
|
extra_attributes=dict(mobilePhone=["123412341234"], birthday=["2021-01-01"]),
|
|
|
|
)
|
|
|
|
data = load_subdomain_token(result)
|
|
|
|
self.assertEqual(data["email"], self.email)
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Exception while syncing custom profile fields for "
|
|
|
|
+ f"user {self.user_profile.id}: Custom profile field with name title not found.",
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-06-19 21:44:29 +02:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
class AppleAuthMixin:
|
|
|
|
BACKEND_CLASS = AppleAuthBackend
|
|
|
|
CLIENT_KEY_SETTING = "SOCIAL_AUTH_APPLE_KEY"
|
|
|
|
AUTHORIZATION_URL = "https://appleid.apple.com/auth/authorize"
|
|
|
|
ACCESS_TOKEN_URL = "https://appleid.apple.com/auth/token"
|
|
|
|
AUTH_FINISH_URL = "/complete/apple/"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def generate_id_token(
|
|
|
|
self, account_data_dict: Dict[str, str], audience: Optional[str] = None
|
|
|
|
) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = dict(email=account_data_dict["email"])
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
# This setup is important because python-social-auth decodes `id_token`
|
|
|
|
# with `SOCIAL_AUTH_APPLE_CLIENT` as the `audience`
|
2022-07-21 23:06:09 +02:00
|
|
|
assert settings.SOCIAL_AUTH_APPLE_CLIENT is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["aud"] = settings.SOCIAL_AUTH_APPLE_CLIENT
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
if audience is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload["aud"] = audience
|
2020-06-09 18:17:32 +02:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
headers = {"kid": "SOMEKID"}
|
2022-08-10 06:35:22 +02:00
|
|
|
private_key = APPLE_ID_TOKEN_GENERATION_KEY
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2021-07-06 08:06:18 +02:00
|
|
|
id_token = jwt.encode(payload, private_key, algorithm="RS256", headers=headers)
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
return id_token
|
|
|
|
|
|
|
|
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
name_parts = name.split(" ")
|
2020-06-09 12:04:21 +02:00
|
|
|
first_name = name_parts[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
last_name = ""
|
2021-02-12 08:19:30 +01:00
|
|
|
if len(name_parts) > 0:
|
2020-06-09 12:04:21 +02:00
|
|
|
last_name = name_parts[-1]
|
2021-02-12 08:20:45 +01:00
|
|
|
name_dict = {"firstName": first_name, "lastName": last_name}
|
2020-06-09 12:04:21 +02:00
|
|
|
return dict(email=email, name=name_dict, email_verified=True)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
class AppleIdAuthBackendTest(AppleAuthMixin, SocialAuthBase):
|
|
|
|
LOGIN_URL = "/accounts/login/social/apple"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/apple"
|
|
|
|
|
|
|
|
# This URL isn't used in the Apple auth flow, so we just set a
|
|
|
|
# dummy value to keep SocialAuthBase common code happy.
|
2021-02-12 08:20:45 +01:00
|
|
|
USER_INFO_URL = "/invalid-unused-url"
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def social_auth_test_finish(
|
|
|
|
self,
|
2022-06-08 04:52:09 +02:00
|
|
|
result: "TestHttpResponse",
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
expect_choose_email_screen: bool,
|
|
|
|
headers: Any,
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2021-02-12 08:20:45 +01:00
|
|
|
state = urllib.parse.parse_qs(parsed_url.query)["state"]
|
2020-10-22 19:24:19 +02:00
|
|
|
user_param = json.dumps(account_data_dict)
|
2020-06-09 12:04:21 +02:00
|
|
|
self.client.session.flush()
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
self.AUTH_FINISH_URL, dict(state=state, user=user_param), **headers
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def register_extra_endpoints(
|
|
|
|
self,
|
|
|
|
requests_mock: responses.RequestsMock,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
**extra_data: Any,
|
|
|
|
) -> None:
|
2020-06-09 12:04:21 +02:00
|
|
|
# This is an URL of an endpoint on Apple servers that returns
|
|
|
|
# the public keys to be used for verifying the signature
|
|
|
|
# on the JWT id_token.
|
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
|
|
|
self.BACKEND_CLASS.JWK_URL,
|
|
|
|
status=200,
|
2022-08-10 06:35:22 +02:00
|
|
|
json=json.loads(EXAMPLE_JWK),
|
2020-06-09 12:04:21 +02:00
|
|
|
)
|
|
|
|
|
2021-05-23 17:20:52 +02:00
|
|
|
def generate_access_token_url_payload(self, account_data_dict: Dict[str, str]) -> str:
|
2021-05-23 17:17:16 +02:00
|
|
|
# The ACCESS_TOKEN_URL endpoint works a bit different than in standard Oauth2,
|
|
|
|
# and here, similarly to OIDC, id_token is also returned in the response.
|
|
|
|
# In Apple auth, all the user information is carried in the id_token.
|
2021-02-12 08:19:30 +01:00
|
|
|
return json.dumps(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"access_token": "foobar",
|
|
|
|
"expires_in": time.time() + 60 * 5,
|
|
|
|
"id_token": self.generate_id_token(account_data_dict),
|
|
|
|
"token_type": "bearer",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
def test_apple_auth_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.AppleAuthBackend",)):
|
2020-06-09 12:04:21 +02:00
|
|
|
self.assertTrue(apple_auth_enabled())
|
|
|
|
|
|
|
|
def test_auth_registration_with_no_name_sent_from_apple(self) -> None:
|
|
|
|
"""
|
|
|
|
Apple doesn't send the name in consecutive attempts if user registration
|
|
|
|
fails the first time. This tests verifies that the social pipeline is able
|
|
|
|
to handle the case of the backend not providing this information.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name="")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, expect_choose_email_screen=True, subdomain=subdomain, is_signup=True
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
2021-02-12 08:20:45 +01:00
|
|
|
"",
|
|
|
|
"Full Name",
|
2021-02-12 08:19:30 +01:00
|
|
|
skip_registration_form=False,
|
|
|
|
expect_full_name_prepopulated=False,
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
def test_id_token_verification_failure(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2020-06-24 15:28:47 +02:00
|
|
|
with mock.patch("jwt.decode", side_effect=PyJWTError):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
is_signup=True,
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2022-01-22 01:51:32 +01:00
|
|
|
"AuthFailed: Authentication failed: Token validation failed by ", "info"
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
def test_validate_state(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as m:
|
2020-06-09 12:04:21 +02:00
|
|
|
|
|
|
|
# (1) check if auth fails if no state value is sent.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/apple/")
|
2020-06-09 12:04:21 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# (2) Check if auth fails when a state sent has no valid data stored in Redis.
|
2020-06-09 12:04:21 +02:00
|
|
|
fake_state = "fa42e4ccdb630f0070c1daab70ad198d8786d4b639cd7a1b4db4d5a13c623060"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/complete/apple/", {"state": fake_state})
|
2020-06-09 12:04:21 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("login", result["Location"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Sign in with Apple failed: missing state parameter.", "info"
|
|
|
|
), # (1)
|
|
|
|
self.logger_output("Missing needed parameter state", "warning"),
|
|
|
|
self.logger_output("Sign in with Apple failed: bad state token.", "info"), # (2)
|
|
|
|
self.logger_output("Wrong state parameter given.", "warning"),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-06-09 12:04:21 +02:00
|
|
|
|
2020-06-09 18:17:32 +02:00
|
|
|
class AppleAuthBackendNativeFlowTest(AppleAuthMixin, SocialAuthBase):
|
2021-02-12 08:20:45 +01:00
|
|
|
SIGNUP_URL = "/complete/apple/"
|
|
|
|
LOGIN_URL = "/complete/apple/"
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
def prepare_login_url_and_headers(
|
|
|
|
self,
|
2020-06-20 13:55:12 +02:00
|
|
|
subdomain: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next: str = "",
|
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url: Optional[str] = None,
|
|
|
|
id_token: Optional[str] = None,
|
2022-10-06 11:56:48 +02:00
|
|
|
account_data_dict: Mapping[str, str] = {},
|
2020-06-09 18:17:32 +02:00
|
|
|
*,
|
2021-02-12 08:19:30 +01:00
|
|
|
user_agent: Optional[str] = None,
|
2020-06-09 18:17:32 +02:00
|
|
|
) -> Tuple[str, Dict[str, Any]]:
|
|
|
|
url, headers = super().prepare_login_url_and_headers(
|
2021-02-12 08:19:30 +01:00
|
|
|
subdomain,
|
|
|
|
mobile_flow_otp,
|
|
|
|
desktop_flow_otp,
|
|
|
|
is_signup,
|
|
|
|
next,
|
|
|
|
multiuse_object_key,
|
|
|
|
alternative_start_url=alternative_start_url,
|
2020-06-09 18:17:32 +02:00
|
|
|
user_agent=user_agent,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
params = {"native_flow": "true"}
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
if id_token is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["id_token"] = id_token
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
if is_signup:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["is_signup"] = "1"
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
if subdomain:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["subdomain"] = subdomain
|
2020-06-09 18:17:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
params["user"] = json.dumps(account_data_dict)
|
2020-10-22 19:24:19 +02:00
|
|
|
|
2021-10-14 01:56:37 +02:00
|
|
|
url += f"&{urlencode(params)}"
|
2020-06-09 18:17:32 +02:00
|
|
|
return url, headers
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def social_auth_test(
|
|
|
|
self,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
*,
|
|
|
|
subdomain: str,
|
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next: str = "",
|
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url: Optional[str] = None,
|
|
|
|
skip_id_token: bool = False,
|
|
|
|
user_agent: Optional[str] = None,
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2020-06-09 18:17:32 +02:00
|
|
|
"""In Apple's native authentication flow, the client app authenticates
|
|
|
|
with Apple and receives the JWT id_token, before contacting
|
|
|
|
the Zulip server. The app sends an appropriate request with
|
|
|
|
it to /complete/apple/ to get logged in. See the backend
|
|
|
|
class for details.
|
|
|
|
|
|
|
|
As a result, we need a custom social_auth_test function that
|
|
|
|
effectively just does the second half of the flow (i.e. the
|
|
|
|
part after the redirect from this third-party authentication
|
|
|
|
provider) with a properly generated id_token.
|
|
|
|
"""
|
|
|
|
|
|
|
|
if not skip_id_token:
|
2021-02-12 08:19:30 +01:00
|
|
|
id_token: Optional[str] = self.generate_id_token(
|
|
|
|
account_data_dict, settings.SOCIAL_AUTH_APPLE_APP_ID
|
|
|
|
)
|
2020-06-09 18:17:32 +02:00
|
|
|
else:
|
|
|
|
id_token = None
|
|
|
|
|
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2021-02-12 08:19:30 +01:00
|
|
|
subdomain,
|
|
|
|
mobile_flow_otp,
|
|
|
|
desktop_flow_otp,
|
|
|
|
is_signup,
|
|
|
|
next,
|
|
|
|
multiuse_object_key,
|
|
|
|
alternative_start_url=self.AUTH_FINISH_URL,
|
|
|
|
user_agent=user_agent,
|
|
|
|
id_token=id_token,
|
|
|
|
account_data_dict=account_data_dict,
|
2020-06-09 18:17:32 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
with self.apple_jwk_url_mock():
|
|
|
|
result = self.client_get(url, **headers)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
@contextmanager
|
|
|
|
def apple_jwk_url_mock(self) -> Iterator[None]:
|
|
|
|
with responses.RequestsMock(assert_all_requests_are_fired=False) as requests_mock:
|
|
|
|
# The server fetches public keys for validating the id_token
|
|
|
|
# from Apple servers. We need to mock that URL to return our key,
|
|
|
|
# created for these tests.
|
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
|
|
|
self.BACKEND_CLASS.JWK_URL,
|
|
|
|
status=200,
|
2022-08-10 06:35:22 +02:00
|
|
|
json=json.loads(EXAMPLE_JWK),
|
2020-06-09 18:17:32 +02:00
|
|
|
)
|
|
|
|
yield
|
|
|
|
|
|
|
|
def test_no_id_token_sent(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
skip_id_token=True,
|
|
|
|
)
|
2020-06-09 18:17:32 +02:00
|
|
|
self.assert_json_error(result, "Missing id_token parameter")
|
|
|
|
|
|
|
|
def test_social_auth_session_fields_cleared_correctly(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2020-10-22 19:24:19 +02:00
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2020-06-09 18:17:32 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def initiate_auth(mobile_flow_otp: Optional[str] = None) -> None:
|
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
|
|
|
id_token="invalid",
|
2021-02-12 08:19:30 +01:00
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
account_data_dict=account_data_dict,
|
|
|
|
)
|
2020-06-09 18:17:32 +02:00
|
|
|
result = self.client_get(url, **headers)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as info_log:
|
2020-07-13 05:42:56 +02:00
|
|
|
# Start Apple auth with mobile_flow_otp param. It should get saved into the session
|
|
|
|
# on SOCIAL_AUTH_SUBDOMAIN.
|
|
|
|
initiate_auth(mobile_flow_otp)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(self.client.session["mobile_flow_otp"], mobile_flow_otp)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2022-01-22 01:51:32 +01:00
|
|
|
"/complete/apple/: Authentication failed: Token validation failed by Not enough segments",
|
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-07-13 05:42:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="INFO") as info_log:
|
2020-07-13 05:42:56 +02:00
|
|
|
# Make a request without mobile_flow_otp param and verify the field doesn't persist
|
|
|
|
# in the session from the previous request.
|
|
|
|
initiate_auth()
|
2020-06-09 18:17:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(self.client.session.get("mobile_flow_otp"), None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2022-01-22 01:51:32 +01:00
|
|
|
"/complete/apple/: Authentication failed: Token validation failed by Not enough segments",
|
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
def test_id_token_with_invalid_aud_sent(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
url, headers = self.prepare_login_url_and_headers(
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url=self.AUTH_FINISH_URL,
|
2021-02-12 08:20:45 +01:00
|
|
|
id_token=self.generate_id_token(account_data_dict, audience="com.different.app"),
|
2020-10-22 19:24:19 +02:00
|
|
|
account_data_dict=account_data_dict,
|
2020-06-09 18:17:32 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.apple_jwk_url_mock(), self.assertLogs(self.logger_string, level="INFO") as m:
|
2022-07-06 21:50:13 +02:00
|
|
|
self.client_get(url, **headers)
|
2020-07-14 19:18:28 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2022-01-22 01:51:32 +01:00
|
|
|
"/complete/apple/: Authentication failed: Token validation failed by Invalid audience",
|
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2020-06-09 18:17:32 +02:00
|
|
|
|
|
|
|
def test_social_auth_desktop_success(self) -> None:
|
|
|
|
"""
|
|
|
|
The desktop app doesn't use the native flow currently and the desktop app flow in its
|
2022-02-08 00:13:33 +01:00
|
|
|
current form happens in the browser, thus only the web flow is viable there.
|
2020-06-09 18:17:32 +02:00
|
|
|
"""
|
|
|
|
|
|
|
|
def test_social_auth_no_key(self) -> None:
|
|
|
|
"""
|
|
|
|
The basic validation of server configuration is handled on the
|
|
|
|
/login/social/apple/ endpoint which isn't even a part of the native flow.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
class GenericOpenIdConnectTest(SocialAuthBase):
|
|
|
|
BACKEND_CLASS = GenericOpenIdConnectBackend
|
|
|
|
CLIENT_KEY_SETTING = "SOCIAL_AUTH_TESTOIDC_KEY"
|
|
|
|
CLIENT_SECRET_SETTING = "SOCIAL_AUTH_TESTOIDC_SECRET"
|
|
|
|
LOGIN_URL = "/accounts/login/social/oidc"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/oidc"
|
|
|
|
|
|
|
|
BASE_OIDC_URL = "https://example.com/api/openid"
|
|
|
|
AUTHORIZATION_URL = f"{BASE_OIDC_URL}/authorize"
|
|
|
|
ACCESS_TOKEN_URL = f"{BASE_OIDC_URL}/token"
|
|
|
|
JWKS_URL = f"{BASE_OIDC_URL}/jwks"
|
|
|
|
USER_INFO_URL = f"{BASE_OIDC_URL}/userinfo"
|
|
|
|
AUTH_FINISH_URL = "/complete/oidc/"
|
|
|
|
|
|
|
|
def social_auth_test(
|
|
|
|
self,
|
|
|
|
*args: Any,
|
|
|
|
**kwargs: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2021-05-21 16:45:43 +02:00
|
|
|
# Example payload of the discovery endpoint (with appropriate values filled
|
|
|
|
# in to match our test setup).
|
|
|
|
# All the attributes below are REQUIRED per OIDC specification:
|
|
|
|
# https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata
|
|
|
|
# or at least required for the `code` flow with userinfo - that this implementation uses.
|
|
|
|
# Other flows are not supported right now.
|
|
|
|
idp_discovery_endpoint_payload_dict = {
|
|
|
|
"issuer": self.BASE_OIDC_URL,
|
|
|
|
"authorization_endpoint": self.AUTHORIZATION_URL,
|
|
|
|
"token_endpoint": self.ACCESS_TOKEN_URL,
|
|
|
|
"userinfo_endpoint": self.USER_INFO_URL,
|
|
|
|
"response_types_supported": [
|
|
|
|
"code",
|
|
|
|
"id_token",
|
|
|
|
"id_token token",
|
|
|
|
"code token",
|
|
|
|
"code id_token",
|
|
|
|
"code id_token token",
|
|
|
|
],
|
|
|
|
"jwks_uri": self.JWKS_URL,
|
|
|
|
"id_token_signing_alg_values_supported": ["HS256", "RS256"],
|
|
|
|
"subject_types_supported": ["public"],
|
|
|
|
}
|
|
|
|
|
|
|
|
# We need to run the social_auth_test procedure with a mock response set up for the
|
|
|
|
# OIDC discovery endpoint as that's the first thing requested by the server when a user
|
|
|
|
# starts trying to authenticate.
|
|
|
|
with responses.RequestsMock(assert_all_requests_are_fired=False) as requests_mock:
|
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
|
|
|
f"{self.BASE_OIDC_URL}/.well-known/openid-configuration",
|
|
|
|
status=200,
|
|
|
|
body=json.dumps(idp_discovery_endpoint_payload_dict),
|
|
|
|
)
|
|
|
|
result = super().social_auth_test(*args, **kwargs)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def social_auth_test_finish(self, *args: Any, **kwargs: Any) -> "TestHttpResponse":
|
2021-05-21 16:45:43 +02:00
|
|
|
# Trying to generate a (access_token, id_token) pair here in tests that would
|
|
|
|
# successfully pass validation by validate_and_return_id_token is impractical
|
|
|
|
# and unnecessary (see python-social-auth implementation of the method for
|
|
|
|
# how the validation works).
|
|
|
|
# We can simply mock the method to make it succeed and return an empty dict, because
|
|
|
|
# the return value is not used for anything.
|
|
|
|
with mock.patch.object(
|
|
|
|
GenericOpenIdConnectBackend, "validate_and_return_id_token", return_value={}
|
|
|
|
):
|
|
|
|
return super().social_auth_test_finish(*args, **kwargs)
|
|
|
|
|
|
|
|
def register_extra_endpoints(
|
|
|
|
self,
|
|
|
|
requests_mock: responses.RequestsMock,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
**extra_data: Any,
|
|
|
|
) -> None:
|
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
|
|
|
self.JWKS_URL,
|
|
|
|
status=200,
|
2022-08-10 06:35:22 +02:00
|
|
|
json=json.loads(EXAMPLE_JWK),
|
2021-05-21 16:45:43 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def generate_access_token_url_payload(self, account_data_dict: Dict[str, str]) -> str:
|
|
|
|
return json.dumps(
|
|
|
|
{
|
|
|
|
"access_token": "foobar",
|
|
|
|
"expires_in": time.time() + 60 * 5,
|
|
|
|
"id_token": "abcd1234",
|
|
|
|
"token_type": "bearer",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2022-01-30 12:48:45 +01:00
|
|
|
def get_account_data_dict(self, email: str, name: Optional[str]) -> Dict[str, Any]:
|
|
|
|
if name is not None:
|
|
|
|
name_parts = name.split(" ")
|
|
|
|
given_name = name_parts[0]
|
|
|
|
family_name = name_parts[1]
|
|
|
|
else:
|
|
|
|
given_name = None
|
|
|
|
family_name = None
|
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
return dict(
|
|
|
|
email=email,
|
|
|
|
name=name,
|
|
|
|
nickname="somenickname",
|
2022-01-30 12:48:45 +01:00
|
|
|
given_name=given_name,
|
|
|
|
family_name=family_name,
|
2021-05-21 16:45:43 +02:00
|
|
|
)
|
|
|
|
|
2021-11-03 21:36:54 +01:00
|
|
|
@override_settings(TERMS_OF_SERVICE_VERSION=None)
|
2021-07-16 11:44:11 +02:00
|
|
|
def test_social_auth_registration_auto_signup(self) -> None:
|
|
|
|
"""
|
|
|
|
The analogue of the auto_signup test for SAML.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
|
|
|
|
oidc_setting_dict = copy.deepcopy(settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS)
|
|
|
|
idp_settings_dict = list(oidc_setting_dict.values())[0]
|
|
|
|
idp_settings_dict["auto_signup"] = True
|
|
|
|
with mock.patch.object(GenericOpenIdConnectBackend, "settings_dict", new=idp_settings_dict):
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=False,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
self.BACKEND_CLASS.full_name_validated,
|
|
|
|
expect_confirm_registration_page=False,
|
|
|
|
)
|
|
|
|
|
2022-01-30 12:48:45 +01:00
|
|
|
def test_auth_registration_with_no_name_provided(self) -> None:
|
|
|
|
"""
|
|
|
|
The OIDC IdP may not send the name information. The
|
|
|
|
signup flow should proceed normally, without pre-filling the name in the
|
|
|
|
registration form.
|
|
|
|
"""
|
|
|
|
email = "newuser@zulip.com"
|
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=None)
|
|
|
|
result = self.social_auth_test(account_data_dict, subdomain=subdomain, is_signup=True)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
"",
|
|
|
|
"Full Name",
|
|
|
|
skip_registration_form=False,
|
|
|
|
expect_full_name_prepopulated=False,
|
|
|
|
)
|
|
|
|
|
2021-05-21 16:45:43 +02:00
|
|
|
def test_social_auth_no_key(self) -> None:
|
|
|
|
"""
|
|
|
|
Requires overriding because client key/secret are configured
|
|
|
|
in a different way than default for social auth backends.
|
|
|
|
"""
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
|
|
|
|
mock_oidc_setting_dict = copy.deepcopy(settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS)
|
|
|
|
idp_config_dict = list(mock_oidc_setting_dict.values())[0]
|
|
|
|
del idp_config_dict["client_id"]
|
|
|
|
with self.settings(SOCIAL_AUTH_OIDC_ENABLED_IDPS=mock_oidc_setting_dict):
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, subdomain="zulip", next="/user_uploads/image"
|
|
|
|
)
|
|
|
|
self.assert_in_success_response(["Configuration error", "OpenID Connect"], result)
|
|
|
|
|
|
|
|
def test_too_many_idps(self) -> None:
|
|
|
|
"""
|
|
|
|
Only one IdP is supported for now.
|
|
|
|
"""
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
|
|
|
|
mock_oidc_setting_dict = copy.deepcopy(settings.SOCIAL_AUTH_OIDC_ENABLED_IDPS)
|
|
|
|
idp_config_dict = list(mock_oidc_setting_dict.values())[0]
|
|
|
|
mock_oidc_setting_dict["secondprovider"] = idp_config_dict
|
|
|
|
with self.settings(SOCIAL_AUTH_OIDC_ENABLED_IDPS=mock_oidc_setting_dict):
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict, subdomain="zulip", next="/user_uploads/image"
|
|
|
|
)
|
|
|
|
self.assert_in_success_response(["Configuration error", "OpenID Connect"], result)
|
|
|
|
|
|
|
|
def test_config_error_development(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is redundant for now, as test_social_auth_no_key already
|
|
|
|
tests this basic case, since this backend doesn't yet have more
|
|
|
|
comprehensive config_error pages.
|
|
|
|
"""
|
|
|
|
return
|
|
|
|
|
|
|
|
def test_config_error_production(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is redundant for now, as test_social_auth_no_key already
|
|
|
|
tests this basic case, since this backend doesn't yet have more
|
|
|
|
comprehensive config_error pages.
|
|
|
|
"""
|
|
|
|
return
|
|
|
|
|
|
|
|
|
2019-02-03 09:18:01 +01:00
|
|
|
class GitHubAuthBackendTest(SocialAuthBase):
|
|
|
|
BACKEND_CLASS = GitHubAuthBackend
|
|
|
|
CLIENT_KEY_SETTING = "SOCIAL_AUTH_GITHUB_KEY"
|
2020-02-15 19:16:16 +01:00
|
|
|
CLIENT_SECRET_SETTING = "SOCIAL_AUTH_GITHUB_SECRET"
|
2019-02-03 09:18:01 +01:00
|
|
|
LOGIN_URL = "/accounts/login/social/github"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/github"
|
|
|
|
AUTHORIZATION_URL = "https://github.com/login/oauth/authorize"
|
|
|
|
ACCESS_TOKEN_URL = "https://github.com/login/oauth/access_token"
|
|
|
|
USER_INFO_URL = "https://api.github.com/user"
|
|
|
|
AUTH_FINISH_URL = "/complete/github/"
|
2020-03-28 10:59:06 +01:00
|
|
|
email_data: List[Dict[str, Any]] = []
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def social_auth_test_finish(
|
|
|
|
self,
|
2022-06-08 04:52:09 +02:00
|
|
|
result: "TestHttpResponse",
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
expect_choose_email_screen: bool,
|
|
|
|
headers: Any,
|
|
|
|
expect_noreply_email_allowed: bool = False,
|
|
|
|
**extra_data: Any,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2021-02-12 08:20:45 +01:00
|
|
|
csrf_state = urllib.parse.parse_qs(parsed_url.query)["state"]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(self.AUTH_FINISH_URL, dict(state=csrf_state), **headers)
|
2020-03-31 19:30:38 +02:00
|
|
|
|
2020-04-04 14:18:06 +02:00
|
|
|
if expect_choose_email_screen:
|
2020-03-31 19:30:38 +02:00
|
|
|
# As GitHub authenticates multiple email addresses,
|
|
|
|
# we'll have an additional screen where the user selects
|
2020-08-11 02:20:10 +02:00
|
|
|
# which email address to log in using (this screen is a
|
2020-03-31 19:30:38 +02:00
|
|
|
# "partial" state of the python-social-auth pipeline).
|
|
|
|
#
|
|
|
|
# TODO: Generalize this testing code for use with other
|
2022-02-08 00:13:33 +01:00
|
|
|
# authentication backends when a new authentication backend
|
2020-04-04 14:18:06 +02:00
|
|
|
# that requires "choose email" screen;
|
|
|
|
self.assert_in_success_response(["Select account"], result)
|
2020-03-28 10:59:06 +01:00
|
|
|
# Verify that all the emails returned by GitHub auth
|
2020-10-23 02:43:28 +02:00
|
|
|
# Are in the "choose email" screen.
|
2020-03-28 10:59:06 +01:00
|
|
|
all_emails_verified = True
|
|
|
|
for email_data_dict in self.email_data:
|
|
|
|
email = email_data_dict["email"]
|
2020-07-22 18:31:56 +02:00
|
|
|
if email.endswith("@users.noreply.github.com") and not expect_noreply_email_allowed:
|
2020-03-28 10:59:06 +01:00
|
|
|
self.assert_not_in_success_response([email], result)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif email_data_dict.get("verified"):
|
2020-03-28 10:59:06 +01:00
|
|
|
self.assert_in_success_response([email], result)
|
|
|
|
else:
|
|
|
|
# We may change this if we provide a way to see
|
|
|
|
# the list of emails the user had.
|
|
|
|
self.assert_not_in_success_response([email], result)
|
|
|
|
all_emails_verified = False
|
|
|
|
|
|
|
|
if all_emails_verified:
|
|
|
|
self.assert_not_in_success_response(["also has unverified email"], result)
|
|
|
|
else:
|
|
|
|
self.assert_in_success_response(["also has unverified email"], result)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
|
|
|
self.AUTH_FINISH_URL,
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(state=csrf_state, email=account_data_dict["email"]),
|
2021-02-12 08:19:30 +01:00
|
|
|
**headers,
|
|
|
|
)
|
2020-04-04 14:18:06 +02:00
|
|
|
|
2020-03-31 19:30:38 +02:00
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def register_extra_endpoints(
|
|
|
|
self,
|
|
|
|
requests_mock: responses.RequestsMock,
|
|
|
|
account_data_dict: Dict[str, str],
|
|
|
|
**extra_data: Any,
|
|
|
|
) -> None:
|
2019-02-03 09:18:01 +01:00
|
|
|
# Keeping a verified email before the primary email makes sure
|
|
|
|
# get_verified_emails puts the primary email at the start of the
|
|
|
|
# email list returned as social_associate_user_helper assumes the
|
|
|
|
# first email as the primary email.
|
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="notprimary@example.com", verified=True),
|
|
|
|
dict(email=account_data_dict["email"], verified=True, primary=True),
|
|
|
|
dict(email="ignored@example.com", verified=False),
|
2019-02-03 09:18:01 +01:00
|
|
|
]
|
|
|
|
email_data = extra_data.get("email_data", email_data)
|
|
|
|
|
2020-01-22 17:41:49 +01:00
|
|
|
requests_mock.add(
|
|
|
|
requests_mock.GET,
|
2019-02-03 09:18:01 +01:00
|
|
|
"https://api.github.com/user/emails",
|
|
|
|
status=200,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
body=json.dumps(email_data),
|
2019-02-03 09:18:01 +01:00
|
|
|
)
|
|
|
|
|
2018-07-18 23:45:49 +02:00
|
|
|
self.email_data = email_data
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_account_data_dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
self, email: str, name: str, user_avatar_url: str = ""
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> Dict[str, Any]:
|
2020-06-25 04:34:41 +02:00
|
|
|
return dict(email=email, name=name, user_avatar_url=user_avatar_url)
|
2019-02-03 09:18:01 +01:00
|
|
|
|
|
|
|
def test_social_auth_email_not_verified(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=False, primary=True),
|
2019-02-03 09:18:01 +01:00
|
|
|
]
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
2021-06-26 18:51:43 +02:00
|
|
|
account_data_dict, subdomain=subdomain, email_data=email_data
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Social auth ({}) failed because user has no verified emails".format("GitHub"),
|
2021-02-12 08:19:30 +01:00
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2022-09-08 22:39:01 +02:00
|
|
|
@override_settings(SOCIAL_AUTH_GITHUB_TEAM_ID="51246")
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_github_team_not_member_failed(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.github.GithubTeamOAuth2.user_data",
|
|
|
|
side_effect=AuthFailed("Not found"),
|
|
|
|
), self.assertLogs(self.logger_string, level="INFO") as mock_info:
|
2021-06-26 18:51:43 +02:00
|
|
|
result = self.social_auth_test(account_data_dict, subdomain=subdomain)
|
2019-02-03 09:18:01 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
mock_info.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"GitHub user is not member of required team",
|
2021-02-12 08:20:45 +01:00
|
|
|
"info",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2022-09-08 22:39:01 +02:00
|
|
|
@override_settings(SOCIAL_AUTH_GITHUB_TEAM_ID="51247")
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_github_team_member_success(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.github.GithubTeamOAuth2.user_data", return_value=account_data_dict
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=False, subdomain="zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(SOCIAL_AUTH_GITHUB_ORG_NAME="Zulip")
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_github_organization_not_member_failed(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.github.GithubOrganizationOAuth2.user_data",
|
|
|
|
side_effect=AuthFailed("Not found"),
|
|
|
|
), self.assertLogs(self.logger_string, level="INFO") as mock_info:
|
2021-06-26 18:51:43 +02:00
|
|
|
result = self.social_auth_test(account_data_dict, subdomain=subdomain)
|
2019-02-03 09:18:01 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
mock_info.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"GitHub user is not member of required organization",
|
|
|
|
"info",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(SOCIAL_AUTH_GITHUB_ORG_NAME="Zulip")
|
2019-02-03 09:18:01 +01:00
|
|
|
def test_social_auth_github_organization_member_success(self) -> None:
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"social_core.backends.github.GithubOrganizationOAuth2.user_data",
|
2021-02-12 08:19:30 +01:00
|
|
|
return_value=account_data_dict,
|
|
|
|
):
|
|
|
|
result = self.social_auth_test(
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict, expect_choose_email_screen=False, subdomain="zulip"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-02-03 09:18:01 +01:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2019-02-03 09:18:01 +01:00
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
def test_github_auth_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.GitHubAuthBackend",)):
|
2018-05-31 00:12:39 +02:00
|
|
|
self.assertTrue(github_auth_enabled())
|
2017-03-07 08:32:40 +01:00
|
|
|
|
2019-07-22 04:26:47 +02:00
|
|
|
def test_github_oauth2_success_non_primary(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="nonprimary@zulip.com", name="Non Primary"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(email="hamlet@zulip.com", verified=True, primary=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="aaron@zulip.com", verified=True),
|
|
|
|
dict(email="ignored@example.com", verified=False),
|
2019-07-22 04:26:47 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], "nonprimary@zulip.com")
|
|
|
|
self.assertEqual(data["full_name"], "Non Primary")
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2019-07-22 04:26:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2019-07-22 04:26:47 +02:00
|
|
|
|
|
|
|
def test_github_oauth2_success_single_email(self) -> None:
|
|
|
|
# If the user has a single email associated with its GitHub account,
|
|
|
|
# the choose email screen should not be shown and the first email
|
|
|
|
# should be used for user's signup/login.
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email="not-hamlet@zulip.com", name=self.name)
|
2019-07-22 04:26:47 +02:00
|
|
|
email_data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(email="hamlet@zulip.com", verified=True, primary=True),
|
2019-07-22 04:26:47 +02:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], self.example_email("hamlet"))
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2019-07-22 04:26:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2019-07-22 04:26:47 +02:00
|
|
|
|
2019-12-30 20:31:36 +01:00
|
|
|
def test_github_oauth2_login_only_one_account_exists(self) -> None:
|
|
|
|
# In a login flow, if only one of the user's verified emails
|
|
|
|
# is associated with an existing account, the user should be
|
|
|
|
# just logged in (skipping the "choose email screen"). We
|
|
|
|
# only want that screen if the user were instead trying to
|
|
|
|
# register a new account, which they're not.
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
|
|
|
dict(email="notprimary@zulip.com", verified=True),
|
|
|
|
dict(email="verifiedemail@zulip.com", verified=True),
|
2019-12-30 20:31:36 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=False,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-12-30 20:31:36 +01:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], account_data_dict["email"])
|
|
|
|
self.assertEqual(data["full_name"], self.name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2019-12-30 20:31:36 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2019-12-30 20:31:36 +01:00
|
|
|
|
|
|
|
def test_github_oauth2_login_multiple_accounts_exist(self) -> None:
|
|
|
|
# In the login flow, if multiple of the user's verified emails
|
|
|
|
# are associated with existing accounts, we expect the choose
|
|
|
|
# email screen to select which account to use.
|
2020-02-23 18:58:08 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(email="hamlet@zulip.com", name="Hamlet")
|
2019-12-30 20:31:36 +01:00
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(email="hamlet@zulip.com", verified=True, primary=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="aaron@zulip.com", verified=True),
|
|
|
|
dict(email="ignored@example.com", verified=False),
|
2019-12-30 20:31:36 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-12-30 20:31:36 +01:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], "hamlet@zulip.com")
|
|
|
|
self.assertEqual(data["full_name"], hamlet.full_name)
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2019-12-30 20:31:36 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2019-12-30 20:31:36 +01:00
|
|
|
|
2020-02-23 16:49:19 +01:00
|
|
|
def test_github_oauth2_login_no_account_exists(self) -> None:
|
|
|
|
# In the login flow, if the user has multiple verified emails,
|
|
|
|
# none of which are associated with an existing account, the
|
|
|
|
# choose email screen should be shown (which will lead to a
|
|
|
|
# "continue to registration" choice).
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(
|
|
|
|
email="not-hamlet@zulip.com", name="Not Hamlet"
|
|
|
|
)
|
2020-02-23 16:49:19 +01:00
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
|
|
|
dict(email="notprimary@zulip.com", verified=True),
|
|
|
|
dict(email="verifiedemail@zulip.com", verified=True),
|
2020-02-23 16:49:19 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
email = account_data_dict["email"]
|
|
|
|
name = account_data_dict["name"]
|
|
|
|
subdomain = "zulip"
|
2020-02-23 16:49:19 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.stage_two_of_registration(
|
|
|
|
result,
|
|
|
|
realm,
|
|
|
|
subdomain,
|
|
|
|
email,
|
|
|
|
name,
|
|
|
|
name,
|
|
|
|
expect_confirm_registration_page=True,
|
|
|
|
skip_registration_form=False,
|
|
|
|
)
|
2020-02-23 16:49:19 +01:00
|
|
|
|
2019-12-30 20:31:36 +01:00
|
|
|
def test_github_oauth2_signup_choose_existing_account(self) -> None:
|
|
|
|
# In the sign up flow, if the user has chosen an email of an
|
|
|
|
# existing account, the user will be logged in.
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name=self.name)
|
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
|
|
|
dict(email="notprimary@zulip.com", verified=True),
|
|
|
|
dict(email="verifiedemail@zulip.com", verified=True),
|
2019-12-30 20:31:36 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
email_data=email_data,
|
|
|
|
is_signup=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
next="/user_uploads/image",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-12-30 20:31:36 +01:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], account_data_dict["email"])
|
|
|
|
self.assertEqual(data["full_name"], account_data_dict["name"])
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
|
|
|
self.assertEqual(data["redirect_to"], "/user_uploads/image")
|
2019-12-30 20:31:36 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-06-10 06:40:53 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2019-12-30 20:31:36 +01:00
|
|
|
|
|
|
|
def test_github_oauth2_signup_choose_new_email_to_register(self) -> None:
|
|
|
|
# In the sign up flow, if the user has multiple verified
|
|
|
|
# emails, we show the "choose email" screen, even if the user
|
|
|
|
# has another verified email with an existing account,
|
|
|
|
# allowing the user to register a second account associated
|
|
|
|
# with the second email.
|
|
|
|
email = "newuser@zulip.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
name = "Full Name"
|
|
|
|
subdomain = "zulip"
|
2019-12-30 20:31:36 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
account_data_dict = self.get_account_data_dict(email=email, name=name)
|
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="hamlet@zulip.com", verified=True),
|
|
|
|
dict(email=email, verified=True),
|
|
|
|
dict(email="verifiedemail@zulip.com", verified=True),
|
2019-12-30 20:31:36 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
|
|
|
email_data=email_data,
|
|
|
|
expect_choose_email_screen=True,
|
|
|
|
subdomain=subdomain,
|
|
|
|
is_signup=True,
|
|
|
|
)
|
|
|
|
self.stage_two_of_registration(
|
|
|
|
result, realm, subdomain, email, name, name, self.BACKEND_CLASS.full_name_validated
|
|
|
|
)
|
2019-12-30 20:31:36 +01:00
|
|
|
|
2019-07-22 04:26:47 +02:00
|
|
|
def test_github_oauth2_email_no_reply_dot_github_dot_com(self) -> None:
|
|
|
|
# As emails ending with `noreply.github.com` are excluded from
|
2020-07-22 18:31:56 +02:00
|
|
|
# verified_emails unless an account with that email already exists,
|
|
|
|
# choosing it as an email should raise a `email not associated` warning.
|
|
|
|
noreply_email = "hamlet@users.noreply.github.com"
|
|
|
|
account_data_dict = self.get_account_data_dict(email=noreply_email, name=self.name)
|
2019-07-22 04:26:47 +02:00
|
|
|
email_data = [
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="notprimary@zulip.com", verified=True),
|
|
|
|
dict(email="hamlet@zulip.com", verified=True, primary=True),
|
|
|
|
dict(email="aaron@zulip.com", verified=True),
|
|
|
|
dict(email=account_data_dict["email"], verified=True),
|
2019-07-22 04:26:47 +02:00
|
|
|
]
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain=subdomain,
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
email_data=email_data,
|
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Social auth (GitHub) failed because user has no verified"
|
|
|
|
" emails associated with the account",
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
|
2020-07-22 18:31:56 +02:00
|
|
|
# Now we create the user account with the noreply email and verify that it's
|
|
|
|
# possible to sign in to it.
|
|
|
|
realm = get_realm("zulip")
|
2021-02-06 14:27:06 +01:00
|
|
|
do_create_user(
|
|
|
|
noreply_email, "password", realm, account_data_dict["name"], acting_user=None
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
expect_noreply_email_allowed=True,
|
|
|
|
email_data=email_data,
|
|
|
|
)
|
2020-07-22 18:31:56 +02:00
|
|
|
data = load_subdomain_token(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(data["email"], account_data_dict["email"])
|
|
|
|
self.assertEqual(data["full_name"], account_data_dict["name"])
|
|
|
|
self.assertEqual(data["subdomain"], "zulip")
|
2020-07-22 18:31:56 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(result["Location"])
|
2020-07-22 18:31:56 +02:00
|
|
|
uri = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(uri.startswith("http://zulip.testserver/accounts/login/subdomain/"))
|
2020-07-22 18:31:56 +02:00
|
|
|
|
2019-07-22 04:26:47 +02:00
|
|
|
def test_github_oauth2_email_not_associated(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
account_data_dict = self.get_account_data_dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="not-associated@zulip.com", name=self.name
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
email_data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(email="nonprimary@zulip.com", verified=True),
|
|
|
|
dict(email="hamlet@zulip.com", verified=True, primary=True),
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="aaron@zulip.com", verified=True),
|
2019-07-22 04:26:47 +02:00
|
|
|
]
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain=subdomain,
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
email_data=email_data,
|
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Social auth (GitHub) failed because user has no verified"
|
|
|
|
" emails associated with the account",
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-07-22 04:26:47 +02:00
|
|
|
|
2020-03-28 10:59:06 +01:00
|
|
|
def test_github_unverified_email_with_existing_account(self) -> None:
|
2020-08-11 02:20:10 +02:00
|
|
|
# check if a user is denied to log in if the user manages to
|
2020-03-28 10:59:06 +01:00
|
|
|
# send an unverified email that has an existing account in
|
2022-02-08 00:13:33 +01:00
|
|
|
# organization through `email` GET parameter.
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
account_data_dict = dict(email="hamlet@zulip.com", name=self.name)
|
2020-03-28 10:59:06 +01:00
|
|
|
email_data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(email="iago@zulip.com", verified=True),
|
|
|
|
dict(email="hamlet@zulip.com", verified=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(email="aaron@zulip.com", verified=True, primary=True),
|
2020-03-28 10:59:06 +01:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain=subdomain,
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
email_data=email_data,
|
|
|
|
)
|
2020-03-28 10:59:06 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Social auth ({}) failed because user has no verified emails associated with the account".format(
|
|
|
|
"GitHub"
|
|
|
|
),
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-03-28 10:59:06 +01:00
|
|
|
|
2020-01-31 18:19:53 +01:00
|
|
|
class GitLabAuthBackendTest(SocialAuthBase):
|
|
|
|
BACKEND_CLASS = GitLabAuthBackend
|
|
|
|
CLIENT_KEY_SETTING = "SOCIAL_AUTH_GITLAB_KEY"
|
2020-02-15 19:16:16 +01:00
|
|
|
CLIENT_SECRET_SETTING = "SOCIAL_AUTH_GITLAB_SECRET"
|
2020-01-31 18:19:53 +01:00
|
|
|
LOGIN_URL = "/accounts/login/social/gitlab"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/gitlab"
|
|
|
|
AUTHORIZATION_URL = "https://gitlab.com/oauth/authorize"
|
|
|
|
ACCESS_TOKEN_URL = "https://gitlab.com/oauth/token"
|
|
|
|
USER_INFO_URL = "https://gitlab.com/api/v4/user"
|
|
|
|
AUTH_FINISH_URL = "/complete/gitlab/"
|
|
|
|
|
|
|
|
def test_gitlab_auth_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.GitLabAuthBackend",)):
|
2020-01-31 18:19:53 +01:00
|
|
|
self.assertTrue(gitlab_auth_enabled())
|
|
|
|
|
|
|
|
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
|
|
|
|
return dict(email=email, name=name, email_verified=True)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
class GoogleAuthBackendTest(SocialAuthBase):
|
|
|
|
BACKEND_CLASS = GoogleAuthBackend
|
|
|
|
CLIENT_KEY_SETTING = "SOCIAL_AUTH_GOOGLE_KEY"
|
2020-02-15 19:16:16 +01:00
|
|
|
CLIENT_SECRET_SETTING = "SOCIAL_AUTH_GOOGLE_SECRET"
|
2019-02-02 16:51:26 +01:00
|
|
|
LOGIN_URL = "/accounts/login/social/google"
|
|
|
|
SIGNUP_URL = "/accounts/register/social/google"
|
|
|
|
AUTHORIZATION_URL = "https://accounts.google.com/o/oauth2/auth"
|
|
|
|
ACCESS_TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
|
|
|
|
USER_INFO_URL = "https://www.googleapis.com/oauth2/v3/userinfo"
|
|
|
|
AUTH_FINISH_URL = "/complete/google/"
|
2017-09-16 19:34:59 +02:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
def get_account_data_dict(self, email: str, name: str) -> Dict[str, Any]:
|
|
|
|
return dict(email=email, name=name, email_verified=True)
|
2018-08-10 00:58:44 +02:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
def test_social_auth_email_not_verified(self) -> None:
|
|
|
|
account_data_dict = dict(email=self.email, name=self.name)
|
2021-06-26 18:51:43 +02:00
|
|
|
subdomain = "zulip"
|
|
|
|
realm = get_realm(subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(self.logger_string, level="WARNING") as m:
|
2021-06-26 18:51:43 +02:00
|
|
|
result = self.social_auth_test(account_data_dict, subdomain=subdomain)
|
2019-02-02 16:51:26 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], realm.uri + "/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
self.logger_output(
|
|
|
|
"Social auth ({}) failed because user has no verified emails".format("Google"),
|
|
|
|
"warning",
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2017-03-19 20:01:01 +01:00
|
|
|
|
2020-06-25 20:43:48 +02:00
|
|
|
def test_social_auth_mobile_realm_uri(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name="Full Name")
|
2020-06-25 20:43:48 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
REALM_MOBILE_REMAP_URIS={"http://zulip.testserver": "http://zulip-mobile.testserver"}
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
alternative_start_url="/accounts/login/google/",
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
)
|
2020-06-25 20:43:48 +02:00
|
|
|
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2020-06-25 20:43:48 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip-mobile.testserver"])
|
2020-06-25 20:43:48 +02:00
|
|
|
self.assertEqual(query_params["email"], [self.example_email("hamlet")])
|
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet_api_keys = get_all_api_keys(self.example_user("hamlet"))
|
2020-06-25 20:43:48 +02:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), hamlet_api_keys)
|
|
|
|
|
2019-08-27 05:51:04 +02:00
|
|
|
def test_social_auth_mobile_success_legacy_url(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
|
|
|
account_data_dict = self.get_account_data_dict(email=self.email, name="Full Name")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.user_profile.date_joined = timezone_now() - datetime.timedelta(
|
|
|
|
seconds=JUST_CREATED_THRESHOLD + 1
|
|
|
|
)
|
2019-08-27 05:51:04 +02:00
|
|
|
self.user_profile.save()
|
|
|
|
|
|
|
|
with self.settings(SEND_LOGIN_EMAILS=True):
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url="/accounts/login/google/",
|
|
|
|
mobile_flow_otp="1234",
|
|
|
|
)
|
2019-08-27 05:51:04 +02:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
alternative_start_url="/accounts/login/google/",
|
|
|
|
mobile_flow_otp="invalido" * 8,
|
|
|
|
)
|
2019-08-27 05:51:04 +02:00
|
|
|
self.assert_json_error(result, "Invalid OTP")
|
|
|
|
|
|
|
|
# Now do it correctly
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.social_auth_test(
|
|
|
|
account_data_dict,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain="zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
expect_choose_email_screen=True,
|
|
|
|
alternative_start_url="/accounts/login/google/",
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
)
|
2019-08-27 05:51:04 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2019-08-27 05:51:04 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip.testserver"])
|
2019-08-27 05:51:04 +02:00
|
|
|
self.assertEqual(query_params["email"], [self.example_email("hamlet")])
|
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet_api_keys = get_all_api_keys(self.example_user("hamlet"))
|
2019-08-27 05:51:04 +02:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), hamlet_api_keys)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("Zulip on Android", mail.outbox[0].body)
|
2019-08-27 05:51:04 +02:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
def test_google_auth_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.GoogleAuthBackend",)):
|
2019-02-02 16:51:26 +01:00
|
|
|
self.assertTrue(google_auth_enabled())
|
2017-03-19 20:01:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_log_into_subdomain(
|
|
|
|
self,
|
|
|
|
data: ExternalAuthDataDict,
|
|
|
|
*,
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain: str = "zulip",
|
2021-02-12 08:19:30 +01:00
|
|
|
force_token: Optional[str] = None,
|
2022-06-08 04:52:09 +02:00
|
|
|
) -> "TestHttpResponse":
|
2020-01-23 12:21:55 +01:00
|
|
|
if force_token is None:
|
2020-02-23 18:58:08 +01:00
|
|
|
token = ExternalAuthResult(data_dict=data).store_data()
|
2020-01-23 12:21:55 +01:00
|
|
|
else:
|
|
|
|
token = force_token
|
2020-09-22 02:54:44 +02:00
|
|
|
url_path = reverse(log_into_subdomain, args=[token])
|
2017-10-27 02:45:38 +02:00
|
|
|
return self.client_get(url_path, subdomain=subdomain)
|
2017-10-27 02:41:54 +02:00
|
|
|
|
2018-03-12 12:54:50 +01:00
|
|
|
def test_redirect_to_next_url_for_log_into_subdomain(self) -> None:
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_redirect_to_next_url(next: str = "") -> "TestHttpResponse":
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "Hamlet",
|
|
|
|
"email": self.example_email("hamlet"),
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": False,
|
|
|
|
"redirect_to": next,
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
with mock.patch("zerver.views.auth.authenticate", return_value=user_profile):
|
|
|
|
with mock.patch("zerver.views.auth.do_login"):
|
2018-03-12 12:54:50 +01:00
|
|
|
result = self.get_log_into_subdomain(data)
|
|
|
|
return result
|
|
|
|
|
|
|
|
res = test_redirect_to_next_url()
|
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver")
|
2021-02-12 08:20:45 +01:00
|
|
|
res = test_redirect_to_next_url("/user_uploads/path_to_image")
|
2018-03-12 12:54:50 +01:00
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver/user_uploads/path_to_image")
|
2018-03-12 12:54:50 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
res = test_redirect_to_next_url("/#narrow/stream/7-test-here")
|
2018-03-12 14:08:12 +01:00
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver/#narrow/stream/7-test-here")
|
2018-03-12 14:08:12 +01:00
|
|
|
|
2020-01-23 12:21:55 +01:00
|
|
|
def test_log_into_subdomain_when_token_is_malformed(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "Full Name",
|
|
|
|
"email": self.example_email("hamlet"),
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": False,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
|
|
|
result = self.get_log_into_subdomain(data, force_token="nonsense")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
["WARNING:root:log_into_subdomain: Malformed token given: {}".format("nonsense")],
|
|
|
|
)
|
2017-10-27 02:45:38 +02:00
|
|
|
self.assertEqual(result.status_code, 400)
|
|
|
|
|
2020-01-23 12:21:55 +01:00
|
|
|
def test_log_into_subdomain_when_token_not_found(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "Full Name",
|
|
|
|
"email": self.example_email("hamlet"),
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": False,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2020-09-05 04:02:13 +02:00
|
|
|
token = secrets.token_hex(ExternalAuthResult.LOGIN_TOKEN_LENGTH // 2)
|
2020-01-23 12:21:55 +01:00
|
|
|
result = self.get_log_into_subdomain(data, force_token=token)
|
2017-10-27 02:41:54 +02:00
|
|
|
self.assertEqual(result.status_code, 400)
|
2020-04-05 18:01:28 +02:00
|
|
|
self.assert_in_response("Invalid or expired login session.", result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-08-02 23:36:06 +02:00
|
|
|
m.output, [f"WARNING:root:log_into_subdomain: Invalid token given: {token}"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-27 02:41:54 +02:00
|
|
|
|
2020-03-19 13:35:09 +01:00
|
|
|
def test_prevent_duplicate_signups(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
existing_user = self.example_user("hamlet")
|
|
|
|
existing_user.delivery_email = "existing@zulip.com"
|
|
|
|
existing_user.email = "whatever@zulip.com"
|
2020-03-19 13:35:09 +01:00
|
|
|
existing_user.save()
|
|
|
|
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "Full Name",
|
|
|
|
"email": "existing@zulip.com",
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": True,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2017-10-27 02:41:54 +02:00
|
|
|
result = self.get_log_into_subdomain(data)
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
# Should simply get logged into the existing account:
|
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assert_logged_in_user_id(existing_user.id)
|
2017-04-20 08:25:15 +02:00
|
|
|
|
2018-04-23 00:12:52 +02:00
|
|
|
def test_log_into_subdomain_when_is_signup_is_true_and_new_user(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "New User Name",
|
|
|
|
"email": "new@zulip.com",
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": True,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2017-10-27 02:41:54 +02:00
|
|
|
result = self.get_log_into_subdomain(data)
|
2017-09-27 07:01:41 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
confirmation = Confirmation.objects.all().first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2017-09-27 07:01:41 +02:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, result["Location"])
|
|
|
|
result = self.client_get(result["Location"])
|
2017-09-27 07:01:41 +02:00
|
|
|
self.assert_in_response('action="/accounts/register/"', result)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_data = {
|
|
|
|
"from_confirmation": "1",
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": data["full_name"],
|
2021-02-12 08:19:30 +01:00
|
|
|
"key": confirmation_key,
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/register/", confirmation_data, subdomain="zulip")
|
2018-06-26 00:33:05 +02:00
|
|
|
self.assert_in_response("We just need you to do one last thing", result)
|
2017-09-27 07:01:41 +02:00
|
|
|
|
|
|
|
# Verify that the user is asked for name but not password
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_not_in_success_response(["id_password"], result)
|
|
|
|
self.assert_in_success_response(["id_full_name"], result)
|
2017-08-09 22:09:38 +02:00
|
|
|
|
2018-04-23 00:12:52 +02:00
|
|
|
def test_log_into_subdomain_when_is_signup_is_false_and_new_user(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "New User Name",
|
|
|
|
"email": "new@zulip.com",
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": False,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2018-04-23 00:12:52 +02:00
|
|
|
result = self.get_log_into_subdomain(data)
|
|
|
|
self.assertEqual(result.status_code, 200)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_in_response("No account found for", result)
|
|
|
|
self.assert_in_response("new@zulip.com.", result)
|
2018-04-23 00:12:52 +02:00
|
|
|
self.assert_in_response('action="http://zulip.testserver/accounts/do_confirm/', result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
url = re.findall(
|
|
|
|
'action="(http://zulip.testserver/accounts/do_confirm[^"]*)"',
|
2021-08-02 23:20:39 +02:00
|
|
|
result.content.decode(),
|
2021-02-12 08:19:30 +01:00
|
|
|
)[0]
|
2018-04-23 00:12:52 +02:00
|
|
|
confirmation = Confirmation.objects.all().first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2018-04-23 00:12:52 +02:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, url)
|
2018-04-23 00:12:52 +02:00
|
|
|
result = self.client_get(url)
|
|
|
|
self.assert_in_response('action="/accounts/register/"', result)
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_data = {
|
|
|
|
"from_confirmation": "1",
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": data["full_name"],
|
2021-02-12 08:19:30 +01:00
|
|
|
"key": confirmation_key,
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/register/", confirmation_data, subdomain="zulip")
|
2018-06-26 00:33:05 +02:00
|
|
|
self.assert_in_response("We just need you to do one last thing", result)
|
2018-04-23 00:12:52 +02:00
|
|
|
|
|
|
|
# Verify that the user is asked for name but not password
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_not_in_success_response(["id_password"], result)
|
|
|
|
self.assert_in_success_response(["id_full_name"], result)
|
2018-04-23 00:12:52 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_log_into_subdomain_when_using_invite_link(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "New User Name",
|
|
|
|
"email": "new@zulip.com",
|
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": True,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2017-09-27 03:34:58 +02:00
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.invite_required = True
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
stream_names = ["new_stream_1", "new_stream_2"]
|
|
|
|
streams = []
|
|
|
|
for stream_name in set(stream_names):
|
2021-04-02 18:11:45 +02:00
|
|
|
stream = ensure_stream(realm, stream_name, acting_user=None)
|
2017-09-27 03:34:58 +02:00
|
|
|
streams.append(stream)
|
|
|
|
|
|
|
|
# Without the invite link, we can't create an account due to invite_required
|
2017-10-27 02:41:54 +02:00
|
|
|
result = self.get_log_into_subdomain(data)
|
2017-09-27 03:34:58 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_in_success_response(["Sign up for Zulip"], result)
|
2017-09-27 03:34:58 +02:00
|
|
|
|
|
|
|
# Now confirm an invitation link works
|
|
|
|
referrer = self.example_user("hamlet")
|
|
|
|
multiuse_obj = MultiuseInvite.objects.create(realm=realm, referred_by=referrer)
|
2018-01-31 08:22:07 +01:00
|
|
|
multiuse_obj.streams.set(streams)
|
2022-02-10 11:52:34 +01:00
|
|
|
validity_in_minutes = 2 * 24 * 60
|
2021-07-31 22:08:54 +02:00
|
|
|
create_confirmation_link(
|
2022-02-10 11:52:34 +01:00
|
|
|
multiuse_obj, Confirmation.MULTIUSE_INVITE, validity_in_minutes=validity_in_minutes
|
2021-07-31 22:08:54 +02:00
|
|
|
)
|
2019-02-08 17:09:25 +01:00
|
|
|
multiuse_confirmation = Confirmation.objects.all().last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert multiuse_confirmation is not None
|
2019-02-08 17:09:25 +01:00
|
|
|
multiuse_object_key = multiuse_confirmation.confirmation_key
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2019-02-08 17:09:25 +01:00
|
|
|
data["multiuse_object_key"] = multiuse_object_key
|
2017-10-27 02:45:38 +02:00
|
|
|
result = self.get_log_into_subdomain(data)
|
2017-09-27 03:34:58 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
|
|
|
confirmation = Confirmation.objects.all().last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2017-09-27 03:34:58 +02:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, result["Location"])
|
|
|
|
result = self.client_get(result["Location"])
|
2017-09-27 03:34:58 +02:00
|
|
|
self.assert_in_response('action="/accounts/register/"', result)
|
2021-02-12 08:20:45 +01:00
|
|
|
data2 = {"from_confirmation": "1", "full_name": data["full_name"], "key": confirmation_key}
|
|
|
|
result = self.client_post("/accounts/register/", data2, subdomain="zulip")
|
2018-06-26 00:33:05 +02:00
|
|
|
self.assert_in_response("We just need you to do one last thing", result)
|
2017-09-27 03:34:58 +02:00
|
|
|
|
|
|
|
# Verify that the user is asked for name but not password
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_not_in_success_response(["id_password"], result)
|
|
|
|
self.assert_in_success_response(["id_full_name"], result)
|
2017-09-27 03:34:58 +02:00
|
|
|
|
|
|
|
# Click confirm registration button.
|
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/register/",
|
|
|
|
{"full_name": "New User Name", "key": confirmation_key, "terms": True},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-09-27 03:34:58 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
new_user = get_user_by_delivery_email("new@zulip.com", realm)
|
2020-03-09 21:41:26 +01:00
|
|
|
new_streams = self.get_streams(new_user)
|
|
|
|
self.assertEqual(sorted(new_streams), stream_names)
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_log_into_subdomain_when_email_is_none(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"subdomain": "zulip",
|
|
|
|
"is_signup": False,
|
|
|
|
"redirect_to": "",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2017-04-18 08:34:29 +02:00
|
|
|
|
2020-07-07 12:57:28 +02:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
|
|
|
token = secrets.token_hex(ExternalAuthResult.LOGIN_TOKEN_LENGTH // 2)
|
|
|
|
result = self.get_log_into_subdomain(data, force_token=token)
|
2020-02-23 18:58:08 +01:00
|
|
|
self.assertEqual(result.status_code, 400)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-08-02 23:36:06 +02:00
|
|
|
m.output, [f"WARNING:root:log_into_subdomain: Invalid token given: {token}"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-18 08:34:29 +02:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def test_user_cannot_log_into_wrong_subdomain(self) -> None:
|
2020-05-09 00:10:17 +02:00
|
|
|
data: ExternalAuthDataDict = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "Full Name",
|
|
|
|
"email": self.example_email("hamlet"),
|
|
|
|
"subdomain": "zephyr",
|
2020-05-09 00:10:17 +02:00
|
|
|
}
|
2020-01-23 12:21:55 +01:00
|
|
|
result = self.get_log_into_subdomain(data)
|
|
|
|
self.assert_json_error(result, "Invalid subdomain")
|
2016-10-17 14:28:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-11 23:08:53 +01:00
|
|
|
class JSONFetchAPIKeyTest(ZulipTestCase):
|
|
|
|
def test_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/fetch_api_key", dict(password=initial_password(user.delivery_email))
|
|
|
|
)
|
2018-01-11 23:08:53 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_not_loggedin(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/fetch_api_key", dict(password=initial_password(user.delivery_email))
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
2018-01-11 23:08:53 +01:00
|
|
|
|
|
|
|
def test_wrong_password(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/json/fetch_api_key", dict(password="wrong"))
|
2022-01-28 03:59:26 +01:00
|
|
|
self.assert_json_error(result, "Password is incorrect.", 400)
|
2018-01-11 23:08:53 +01:00
|
|
|
|
2020-12-12 20:21:06 +01:00
|
|
|
def test_invalid_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
username = "hamlet"
|
2020-12-12 20:21:06 +01:00
|
|
|
user = self.example_user(username)
|
|
|
|
self.login_user(user)
|
|
|
|
with mock.patch("zerver.views.auth.get_realm_from_request", return_value=None):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/fetch_api_key",
|
|
|
|
dict(username=username, password=initial_password(user.delivery_email)),
|
|
|
|
)
|
2020-12-12 20:21:06 +01:00
|
|
|
self.assert_json_error(result, "Invalid subdomain", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class FetchAPIKeyTest(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
self.email = self.user_profile.delivery_email
|
2016-04-21 21:07:43 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_success(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
|
|
|
dict(username=self.email, password=initial_password(self.email)),
|
|
|
|
)
|
2016-04-21 21:07:43 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_invalid_email(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/api/v1/fetch_api_key", dict(username="hamlet", password=initial_password(self.email))
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-07 08:21:29 +02:00
|
|
|
self.assert_json_error(result, "Enter a valid email address.", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_wrong_password(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key", dict(username=self.email, password="wrong")
|
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2016-04-21 21:07:43 +02:00
|
|
|
|
2020-11-10 22:46:49 +01:00
|
|
|
def test_invalid_subdomain(self) -> None:
|
|
|
|
with mock.patch("zerver.views.auth.get_realm_from_request", return_value=None):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(username="hamlet", password=initial_password(self.email)),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-07-05 20:24:44 +02:00
|
|
|
self.assert_json_error(result, "Invalid subdomain", 404)
|
2020-11-10 22:46:49 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_password_auth_disabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zproject.backends.password_auth_enabled", return_value=False):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
|
|
|
dict(username=self.email, password=initial_password(self.email)),
|
|
|
|
)
|
2021-07-05 20:24:44 +02:00
|
|
|
self.assert_json_error_contains(
|
2021-07-05 20:28:24 +02:00
|
|
|
result, "Password authentication is disabled in this organization", 401
|
2021-07-05 20:24:44 +02:00
|
|
|
)
|
2016-04-21 21:07:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_ldap_auth_email_auth_disabled_success(self) -> None:
|
2019-10-16 18:10:40 +02:00
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-02-12 08:20:45 +01:00
|
|
|
dict(username=self.example_email("hamlet"), password=self.ldap_password("hamlet")),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-07 01:41:29 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-08-15 18:33:16 +02:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
|
2021-06-11 21:08:09 +02:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
2020-08-15 18:33:16 +02:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "org_membership": "department"},
|
|
|
|
)
|
|
|
|
def test_ldap_auth_email_auth_organization_restriction(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
# We do test two combinations here:
|
|
|
|
# The first user has no (department) attribute set
|
|
|
|
# The second user has one set, but to a different value
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-08-15 18:33:16 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-08-15 18:33:16 +02:00
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "department", "testWrongRealm")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-08-15 18:33:16 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-08-15 18:33:16 +02:00
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "department", "zulip")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-08-15 18:33:16 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2020-09-22 18:18:45 +02:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
|
2021-06-11 21:08:09 +02:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
2020-09-22 18:18:45 +02:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "org_membership": "department"},
|
|
|
|
AUTH_LDAP_ADVANCED_REALM_ACCESS_CONTROL={
|
|
|
|
"zulip": [{"test1": "test", "test2": "testing"}, {"test1": "test2"}],
|
|
|
|
"anotherRealm": [{"test2": "test2"}],
|
|
|
|
},
|
|
|
|
)
|
|
|
|
def test_ldap_auth_email_auth_advanced_organization_restriction(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
|
|
|
|
# The first user has no attribute set
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-09-22 18:18:45 +02:00
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "test2", "testing")
|
|
|
|
# Check with only one set
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-09-22 18:18:45 +02:00
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "test1", "test")
|
|
|
|
# Setting org_membership to not cause django_ldap_auth to warn, when synchronising
|
|
|
|
self.change_ldap_user_attr("hamlet", "department", "wrongDepartment")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.remove_ldap_user_attr("hamlet", "test2")
|
|
|
|
self.remove_ldap_user_attr("hamlet", "test1")
|
|
|
|
|
|
|
|
# Using the OR value
|
|
|
|
self.change_ldap_user_attr("hamlet", "test1", "test2")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Testing without org_membership
|
|
|
|
with override_settings(AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn"}):
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
# Setting test1 to wrong value
|
|
|
|
self.change_ldap_user_attr("hamlet", "test1", "invalid")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-09-22 18:18:45 +02:00
|
|
|
|
|
|
|
# Override access with `org_membership`
|
|
|
|
self.change_ldap_user_attr("hamlet", "department", "zulip")
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.remove_ldap_user_attr("hamlet", "department")
|
|
|
|
|
|
|
|
# Test wrong configuration
|
|
|
|
with override_settings(
|
|
|
|
AUTH_LDAP_ADVANCED_REALM_ACCESS_CONTROL={"not_zulip": [{"department": "zulip"}]}
|
|
|
|
):
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
2021-06-11 21:08:09 +02:00
|
|
|
dict(username="hamlet", password=self.ldap_password("hamlet")),
|
2020-09-22 18:18:45 +02:00
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error(result, "Your username or password is incorrect", 401)
|
2020-09-22 18:18:45 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_inactive_user(self) -> None:
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(self.user_profile, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
|
|
|
dict(username=self.email, password=initial_password(self.email)),
|
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error_contains(result, "Account is deactivated", 401)
|
2016-04-21 21:07:43 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_deactivated_realm(self) -> None:
|
2021-04-02 17:11:25 +02:00
|
|
|
do_deactivate_realm(self.user_profile.realm, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
|
|
|
dict(username=self.email, password=initial_password(self.email)),
|
|
|
|
)
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error_contains(result, "This organization has been deactivated", 401)
|
2016-06-01 02:28:27 +02:00
|
|
|
|
2021-05-07 15:10:35 +02:00
|
|
|
def test_old_weak_password_after_hasher_change(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
password = "a_password_of_22_chars"
|
|
|
|
|
2021-05-07 15:14:24 +02:00
|
|
|
with self.settings(PASSWORD_HASHERS=("django.contrib.auth.hashers.SHA1PasswordHasher",)):
|
2021-05-07 15:10:35 +02:00
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
PASSWORD_HASHERS=(
|
2021-05-07 15:14:24 +02:00
|
|
|
"django.contrib.auth.hashers.MD5PasswordHasher",
|
|
|
|
"django.contrib.auth.hashers.SHA1PasswordHasher",
|
2021-05-07 15:10:35 +02:00
|
|
|
),
|
|
|
|
PASSWORD_MIN_LENGTH=30,
|
|
|
|
), self.assertLogs("zulip.auth.email", level="INFO"):
|
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/fetch_api_key",
|
|
|
|
dict(username=self.email, password=password),
|
|
|
|
)
|
2021-07-05 20:24:44 +02:00
|
|
|
self.assert_json_error(
|
2021-07-05 20:28:24 +02:00
|
|
|
result, "Your password has been disabled and needs to be reset", 401
|
2021-07-05 20:24:44 +02:00
|
|
|
)
|
2021-05-07 15:10:35 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class DevFetchAPIKeyTest(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
self.email = self.user_profile.delivery_email
|
2016-06-01 02:28:27 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_success(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=self.email))
|
2022-06-07 01:37:01 +02:00
|
|
|
data = self.assert_json_success(result)
|
2016-06-01 02:28:27 +02:00
|
|
|
self.assertEqual(data["email"], self.email)
|
2018-08-01 10:53:40 +02:00
|
|
|
user_api_keys = get_all_api_keys(self.user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn(data["api_key"], user_api_keys)
|
2016-06-01 02:28:27 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_invalid_email(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "hamlet"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=email))
|
2017-04-07 08:21:29 +02:00
|
|
|
self.assert_json_error_contains(result, "Enter a valid email address.", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_unregistered_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "foo@zulip.com"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=email))
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error_contains(result, "Your username or password is incorrect", 401)
|
2017-05-22 01:34:21 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_inactive_user(self) -> None:
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(self.user_profile, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=self.email))
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error_contains(result, "Account is deactivated", 401)
|
2016-06-01 02:28:27 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_deactivated_realm(self) -> None:
|
2021-04-02 17:11:25 +02:00
|
|
|
do_deactivate_realm(self.user_profile.realm, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=self.email))
|
2021-07-05 20:28:24 +02:00
|
|
|
self.assert_json_error_contains(result, "This organization has been deactivated", 401)
|
2016-06-01 02:28:27 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_dev_auth_disabled(self) -> None:
|
2021-04-21 22:07:08 +02:00
|
|
|
with mock.patch("zerver.views.development.dev_login.dev_auth_enabled", return_value=False):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post("/api/v1/dev_fetch_api_key", dict(username=self.email))
|
2020-07-21 21:53:15 +02:00
|
|
|
self.assert_json_error_contains(result, "DevAuthBackend not enabled.", 400)
|
2016-06-01 02:28:43 +02:00
|
|
|
|
2020-12-12 20:21:06 +01:00
|
|
|
def test_invalid_subdomain(self) -> None:
|
2021-04-21 22:07:08 +02:00
|
|
|
with mock.patch(
|
|
|
|
"zerver.views.development.dev_login.get_realm_from_request", return_value=None
|
|
|
|
):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/api/v1/dev_fetch_api_key",
|
|
|
|
dict(username=self.email, password=initial_password(self.email)),
|
|
|
|
)
|
2021-07-05 20:24:44 +02:00
|
|
|
self.assert_json_error_contains(result, "Invalid subdomain", 404)
|
2020-12-12 20:21:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class DevGetEmailsTest(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_success(self) -> None:
|
2018-04-05 21:16:56 +02:00
|
|
|
result = self.client_get("/api/v1/dev_list_users")
|
2016-06-01 02:28:43 +02:00
|
|
|
self.assert_json_success(result)
|
2016-07-12 15:41:45 +02:00
|
|
|
self.assert_in_response("direct_admins", result)
|
|
|
|
self.assert_in_response("direct_users", result)
|
2016-06-01 02:28:43 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_dev_auth_disabled(self) -> None:
|
2021-04-21 22:07:08 +02:00
|
|
|
with mock.patch("zerver.views.development.dev_login.dev_auth_enabled", return_value=False):
|
2018-04-05 21:16:56 +02:00
|
|
|
result = self.client_get("/api/v1/dev_list_users")
|
2020-07-21 21:53:15 +02:00
|
|
|
self.assert_json_error_contains(result, "DevAuthBackend not enabled.", 400)
|
|
|
|
|
|
|
|
with override_settings(PRODUCTION=True):
|
|
|
|
result = self.client_get("/api/v1/dev_list_users")
|
|
|
|
self.assert_json_error_contains(result, "Endpoint not available in production.", 400)
|
2016-06-21 03:32:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
class ExternalMethodDictsTests(ZulipTestCase):
|
2022-07-19 04:43:14 +02:00
|
|
|
def get_configured_saml_backend_idp_names(self) -> Iterable[str]:
|
2020-01-31 15:03:55 +01:00
|
|
|
return settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.keys()
|
|
|
|
|
2019-12-08 23:11:25 +01:00
|
|
|
def test_get_external_method_dicts_correctly_sorted(self) -> None:
|
2019-11-02 04:35:39 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.GoogleAuthBackend",
|
|
|
|
"zproject.backends.ZulipRemoteUserBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
|
|
|
"zproject.backends.AzureADAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2019-11-02 04:35:39 +01:00
|
|
|
):
|
2019-12-08 23:11:25 +01:00
|
|
|
external_auth_methods = get_external_method_dicts()
|
2022-07-21 23:05:08 +02:00
|
|
|
external_auth_backends: List[Type[ExternalAuthMethod]] = [
|
|
|
|
ZulipRemoteUserBackend,
|
|
|
|
GitHubAuthBackend,
|
|
|
|
AzureADAuthBackend,
|
|
|
|
GoogleAuthBackend,
|
|
|
|
]
|
2019-11-02 04:35:39 +01:00
|
|
|
# First backends in the list should be SAML:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("saml:", external_auth_methods[0]["name"])
|
2019-11-02 04:35:39 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
[social_backend["name"] for social_backend in external_auth_methods[1:]],
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
social_backend.name
|
|
|
|
for social_backend in sorted(
|
2022-07-21 23:05:08 +02:00
|
|
|
external_auth_backends,
|
2021-02-12 08:19:30 +01:00
|
|
|
key=lambda x: x.sort_order,
|
|
|
|
reverse=True,
|
|
|
|
)
|
|
|
|
],
|
2019-11-02 04:35:39 +01:00
|
|
|
)
|
|
|
|
|
2020-01-31 15:03:55 +01:00
|
|
|
def test_get_external_method_buttons(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.GoogleAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2020-01-31 15:03:55 +01:00
|
|
|
):
|
|
|
|
saml_idp_names = self.get_configured_saml_backend_idp_names()
|
|
|
|
expected_button_id_strings = [
|
|
|
|
'id="{}_auth_button_github"',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'id="{}_auth_button_google"',
|
2020-01-31 15:03:55 +01:00
|
|
|
]
|
|
|
|
for name in saml_idp_names:
|
2020-06-10 06:41:04 +02:00
|
|
|
expected_button_id_strings.append(f'id="{{}}_auth_button_saml:{name}"')
|
2020-01-31 15:03:55 +01:00
|
|
|
|
|
|
|
result = self.client_get("/login/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_success_response(
|
|
|
|
[string.format("login") for string in expected_button_id_strings], result
|
|
|
|
)
|
2020-01-31 15:03:55 +01:00
|
|
|
|
|
|
|
result = self.client_get("/register/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_success_response(
|
|
|
|
[string.format("register") for string in expected_button_id_strings], result
|
|
|
|
)
|
2020-01-31 15:03:55 +01:00
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
def test_get_external_method_dicts_multiple_saml_idps(self) -> None:
|
|
|
|
idps_dict = copy.deepcopy(settings.SOCIAL_AUTH_SAML_ENABLED_IDPS)
|
|
|
|
# Create another IdP config, by copying the original one and changing some details.idps_dict['test_idp'])
|
2021-02-12 08:20:45 +01:00
|
|
|
idps_dict["test_idp2"] = copy.deepcopy(idps_dict["test_idp"])
|
|
|
|
idps_dict["test_idp2"]["url"] = "https://idp2.example.com/idp/profile/SAML2/Redirect/SSO"
|
|
|
|
idps_dict["test_idp2"]["display_name"] = "Second Test IdP"
|
|
|
|
idps_dict["test_idp2"]["limit_to_subdomains"] = ["zephyr"]
|
2020-04-10 16:30:02 +02:00
|
|
|
with self.settings(
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS=idps_dict,
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.GitHubAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
2020-04-10 16:30:02 +02:00
|
|
|
):
|
|
|
|
# Calling get_external_method_dicts without a realm returns all methods configured on the server:
|
|
|
|
external_auth_methods = get_external_method_dicts()
|
2020-10-23 02:43:28 +02:00
|
|
|
# 1 IdP enabled for all realms + a dict for GitHub auth
|
2020-04-18 15:47:41 +02:00
|
|
|
self.assert_length(external_auth_methods, 2)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
[external_auth_methods[0]["name"], external_auth_methods[1]["name"]],
|
|
|
|
["saml:test_idp", "github"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-04-10 16:30:02 +02:00
|
|
|
|
|
|
|
external_auth_methods = get_external_method_dicts(get_realm("zulip"))
|
2020-10-23 02:43:28 +02:00
|
|
|
# Only test_idp enabled for the zulip realm, + GitHub auth.
|
2020-04-10 16:30:02 +02:00
|
|
|
self.assert_length(external_auth_methods, 2)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
[external_auth_methods[0]["name"], external_auth_methods[1]["name"]],
|
|
|
|
["saml:test_idp", "github"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-04-10 16:30:02 +02:00
|
|
|
|
|
|
|
external_auth_methods = get_external_method_dicts(get_realm("zephyr"))
|
2020-10-23 02:43:28 +02:00
|
|
|
# Both idps enabled for the zephyr realm, + GitHub auth.
|
2020-04-10 16:30:02 +02:00
|
|
|
self.assert_length(external_auth_methods, 3)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
{external_auth_methods[0]["name"], external_auth_methods[1]["name"]},
|
|
|
|
{"saml:test_idp", "saml:test_idp2"},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2020-04-10 16:30:02 +02:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class FetchAuthBackends(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_get_server_settings(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_result(
|
2022-06-08 04:52:09 +02:00
|
|
|
result: "TestHttpResponse", extra_fields: Sequence[Tuple[str, Validator[object]]] = []
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-12-19 01:35:13 +01:00
|
|
|
authentication_methods_list = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("password", check_bool),
|
2018-12-19 01:35:13 +01:00
|
|
|
]
|
|
|
|
for backend_name_with_case in AUTH_BACKEND_NAME_MAP:
|
|
|
|
authentication_methods_list.append((backend_name_with_case.lower(), check_bool))
|
2019-12-08 23:11:25 +01:00
|
|
|
external_auth_methods = get_external_method_dicts()
|
2018-12-19 01:35:13 +01:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
checker = check_dict_only(
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
("authentication_methods", check_dict_only(authentication_methods_list)),
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"external_authentication_methods",
|
2021-02-12 08:19:30 +01:00
|
|
|
check_list(
|
|
|
|
check_dict_only(
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
("display_icon", check_none_or(check_string)),
|
|
|
|
("display_name", check_string),
|
|
|
|
("login_url", check_string),
|
|
|
|
("name", check_string),
|
|
|
|
("signup_url", check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
|
|
|
),
|
|
|
|
length=len(external_auth_methods),
|
|
|
|
),
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
("email_auth_enabled", check_bool),
|
|
|
|
("is_incompatible", check_bool),
|
|
|
|
("require_email_format_usernames", check_bool),
|
|
|
|
("realm_uri", check_string),
|
|
|
|
("zulip_version", check_string),
|
2021-07-30 12:25:53 +02:00
|
|
|
("zulip_merge_base", check_string),
|
2021-02-12 08:20:45 +01:00
|
|
|
("zulip_feature_level", check_int),
|
|
|
|
("push_notifications_enabled", check_bool),
|
2022-02-20 21:40:59 +01:00
|
|
|
("realm_web_public_access_enabled", check_bool),
|
2021-02-12 08:20:45 +01:00
|
|
|
("msg", check_string),
|
|
|
|
("result", check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
*extra_fields,
|
|
|
|
]
|
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
checker("data", response_dict)
|
2018-02-12 23:12:47 +01:00
|
|
|
|
2018-12-06 02:49:34 +01:00
|
|
|
result = self.client_get("/api/v1/server_settings", subdomain="", HTTP_USER_AGENT="")
|
2018-02-12 23:12:47 +01:00
|
|
|
check_result(result)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict["external_authentication_methods"], get_external_method_dicts()
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-08-25 23:59:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
|
|
|
"/api/v1/server_settings", subdomain="", HTTP_USER_AGENT="ZulipInvalid"
|
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertTrue(response_dict["is_incompatible"])
|
2018-12-06 02:49:34 +01:00
|
|
|
|
2017-10-03 01:31:20 +02:00
|
|
|
with self.settings(ROOT_DOMAIN_LANDING_PAGE=False):
|
2018-12-06 02:49:34 +01:00
|
|
|
result = self.client_get("/api/v1/server_settings", subdomain="", HTTP_USER_AGENT="")
|
2018-02-12 23:12:47 +01:00
|
|
|
check_result(result)
|
|
|
|
|
|
|
|
with self.settings(ROOT_DOMAIN_LANDING_PAGE=False):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
|
|
|
"/api/v1/server_settings", subdomain="zulip", HTTP_USER_AGENT=""
|
|
|
|
)
|
|
|
|
check_result(
|
|
|
|
result,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
("realm_name", check_string),
|
|
|
|
("realm_description", check_string),
|
|
|
|
("realm_icon", check_string),
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-05-04 01:13:56 +02:00
|
|
|
|
2019-11-01 05:12:11 +01:00
|
|
|
# Verify invalid subdomain
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get("/api/v1/server_settings", subdomain="invalid")
|
2019-11-01 05:12:11 +01:00
|
|
|
self.assert_json_error_contains(result, "Invalid subdomain", 400)
|
2018-12-19 01:35:13 +01:00
|
|
|
|
2019-11-01 05:12:11 +01:00
|
|
|
with self.settings(ROOT_DOMAIN_LANDING_PAGE=True):
|
|
|
|
# With ROOT_DOMAIN_LANDING_PAGE, homepage fails
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get("/api/v1/server_settings", subdomain="")
|
2019-11-01 05:12:11 +01:00
|
|
|
self.assert_json_error_contains(result, "Subdomain required", 400)
|
2017-03-10 06:29:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
class TestTwoFactor(ZulipTestCase):
|
|
|
|
def test_direct_dev_login_with_2fa(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = self.example_email("hamlet")
|
|
|
|
user_profile = self.example_user("hamlet")
|
2017-07-13 13:42:57 +02:00
|
|
|
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
2017-07-13 13:42:57 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2017-07-13 13:42:57 +02:00
|
|
|
# User logs in but when otp device doesn't exist.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertNotIn("otp_device_id", self.client.session.keys())
|
2017-07-13 13:42:57 +02:00
|
|
|
|
|
|
|
self.create_default_device(user_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
2017-07-13 13:42:57 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2017-07-13 13:42:57 +02:00
|
|
|
# User logs in when otp device exists.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("otp_device_id", self.client.session.keys())
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2022-07-05 22:14:19 +02:00
|
|
|
@mock.patch("two_factor.plugins.phonenumber.models.totp")
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def test_two_factor_login_with_ldap(self, mock_totp: mock.MagicMock) -> None:
|
2017-07-13 13:42:57 +02:00
|
|
|
token = 123456
|
2021-02-12 08:20:45 +01:00
|
|
|
email = self.example_email("hamlet")
|
|
|
|
password = self.ldap_password("hamlet")
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-07-13 13:42:57 +02:00
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
self.create_default_device(user_profile)
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def totp(*args: Any, **kwargs: Any) -> int:
|
2017-07-13 13:42:57 +02:00
|
|
|
return token
|
|
|
|
|
|
|
|
mock_totp.side_effect = totp
|
|
|
|
|
docs: Add missing space to compound verbs “back up”, “log in”, etc.
Noun: backup, login, logout, lookup, setup.
Verb: back up, log in, log out, look up, set up.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2022-02-07 20:41:10 +01:00
|
|
|
# Set up LDAP
|
2019-10-16 18:10:40 +02:00
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "cn"}
|
2017-07-13 13:42:57 +02:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
|
|
|
|
TWO_FACTOR_CALL_GATEWAY="two_factor.gateways.fake.Fake",
|
|
|
|
TWO_FACTOR_SMS_GATEWAY="two_factor.gateways.fake.Fake",
|
2021-02-12 08:19:30 +01:00
|
|
|
TWO_FACTOR_AUTHENTICATION_ENABLED=True,
|
|
|
|
POPULATE_PROFILE_VIA_LDAP=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map,
|
2019-10-16 18:10:40 +02:00
|
|
|
):
|
2021-02-12 08:19:30 +01:00
|
|
|
first_step_data = {
|
|
|
|
"username": email,
|
|
|
|
"password": password,
|
|
|
|
"two_factor_login_view-current_step": "auth",
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs("two_factor.gateways.fake", "INFO") as info_log:
|
2020-07-26 01:28:29 +02:00
|
|
|
result = self.client_post("/accounts/login/", first_step_data)
|
2017-07-13 13:42:57 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
['INFO:two_factor.gateways.fake:Fake SMS to +12125550100: "Your token is: 123456"'],
|
|
|
|
)
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
second_step_data = {
|
|
|
|
"token-otp_token": str(token),
|
|
|
|
"two_factor_login_view-current_step": "token",
|
|
|
|
}
|
2017-07-13 13:42:57 +02:00
|
|
|
result = self.client_post("/accounts/login/", second_step_data)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["Location"], "http://zulip.testserver")
|
2017-07-13 13:42:57 +02:00
|
|
|
|
|
|
|
# Going to login page should redirect to `realm.uri` if user is
|
|
|
|
# already logged in.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/accounts/login/")
|
2017-07-13 13:42:57 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(result["Location"], "http://zulip.testserver")
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-24 08:35:16 +02:00
|
|
|
class TestDevAuthBackend(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
2016-10-24 08:35:16 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 08:35:16 +02:00
|
|
|
|
2020-10-07 07:10:02 +02:00
|
|
|
def test_spectator(self) -> None:
|
|
|
|
data = {"prefers_web_public_view": "Anonymous login"}
|
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "http://zulip.testserver/")
|
2020-10-07 07:10:02 +02:00
|
|
|
|
2017-07-13 13:42:57 +02:00
|
|
|
def test_login_success_with_2fa(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-07-13 13:42:57 +02:00
|
|
|
self.create_default_device(user_profile)
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
2017-07-13 13:42:57 +02:00
|
|
|
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
2017-07-13 13:42:57 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "http://zulip.testserver/")
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("otp_device_id", list(self.client.session.keys()))
|
2017-07-13 13:42:57 +02:00
|
|
|
|
2018-03-12 12:25:50 +01:00
|
|
|
def test_redirect_to_next_url(self) -> None:
|
2022-06-08 04:52:09 +02:00
|
|
|
def do_local_login(formaction: str) -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
user_email = self.example_email("hamlet")
|
|
|
|
data = {"direct_email": user_email}
|
2018-03-12 12:25:50 +01:00
|
|
|
return self.client_post(formaction, data)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
res = do_local_login("/accounts/login/local/")
|
2018-03-12 12:25:50 +01:00
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver/")
|
2018-03-12 12:25:50 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
res = do_local_login("/accounts/login/local/?next=/user_uploads/path_to_image")
|
2018-03-12 12:25:50 +01:00
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver/user_uploads/path_to_image")
|
2018-03-12 12:25:50 +01:00
|
|
|
|
2018-03-12 14:08:12 +01:00
|
|
|
# In local Email based authentication we never make browser send the hash
|
|
|
|
# to the backend. Rather we depend upon the browser's behaviour of persisting
|
|
|
|
# hash anchors in between redirect requests. See below stackoverflow conversation
|
|
|
|
# https://stackoverflow.com/questions/5283395/url-hash-is-persisting-between-redirects
|
2021-02-12 08:20:45 +01:00
|
|
|
res = do_local_login("/accounts/login/local/?next=#narrow/stream/7-test-here")
|
2018-03-12 14:08:12 +01:00
|
|
|
self.assertEqual(res.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(res["Location"], "http://zulip.testserver")
|
2018-03-12 14:08:12 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_with_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
2017-10-03 01:31:20 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/accounts/login/local/", data)
|
2017-03-25 20:44:14 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2017-03-25 20:44:14 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_choose_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/devlogin/", subdomain="zulip")
|
2019-05-21 23:37:21 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2017-09-15 22:03:49 +02:00
|
|
|
self.assert_in_success_response(["Click on a user to log in to Zulip Dev!"], result)
|
|
|
|
self.assert_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/devlogin/", subdomain="")
|
2019-05-21 23:37:21 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2017-08-15 00:13:58 +02:00
|
|
|
self.assert_in_success_response(["Click on a user to log in!"], result)
|
|
|
|
self.assert_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
|
|
|
self.assert_in_success_response(["starnine@mit.edu", "espuser@mit.edu"], result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/devlogin/", {"new_realm": "all_realms"}, subdomain="zephyr")
|
2019-05-21 23:37:21 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_success_response(["starnine@mit.edu", "espuser@mit.edu"], result)
|
|
|
|
self.assert_in_success_response(["Click on a user to log in!"], result)
|
|
|
|
self.assert_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"new_realm": "zephyr"}
|
|
|
|
result = self.client_post("/devlogin/", data, subdomain="zulip")
|
2017-09-15 22:03:49 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual(result["Location"], "http://zephyr.testserver")
|
2019-05-21 23:37:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/devlogin/", subdomain="zephyr")
|
2019-05-21 23:37:21 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2017-08-15 00:13:58 +02:00
|
|
|
self.assert_in_success_response(["starnine@mit.edu", "espuser@mit.edu"], result)
|
|
|
|
self.assert_in_success_response(["Click on a user to log in to MIT!"], result)
|
2017-09-15 22:03:49 +02:00
|
|
|
self.assert_not_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_choose_realm_with_subdomains_enabled(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.views.auth.is_subdomain_root_or_alias", return_value=False):
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.views.auth.get_realm_from_request", return_value=get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2017-10-03 01:31:20 +02:00
|
|
|
result = self.client_get("http://zulip.testserver/devlogin/")
|
|
|
|
self.assert_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
|
|
|
self.assert_not_in_success_response(["starnine@mit.edu", "espuser@mit.edu"], result)
|
|
|
|
self.assert_in_success_response(["Click on a user to log in to Zulip Dev!"], result)
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.views.auth.get_realm_from_request", return_value=get_realm("zephyr")
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"http://zulip.testserver/devlogin/", {"new_realm": "zephyr"}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-03 01:31:20 +02:00
|
|
|
self.assertEqual(result["Location"], "http://zephyr.testserver")
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2017-10-03 01:31:20 +02:00
|
|
|
result = self.client_get("http://zephyr.testserver/devlogin/")
|
|
|
|
self.assert_not_in_success_response(["iago@zulip.com", "hamlet@zulip.com"], result)
|
|
|
|
self.assert_in_success_response(["starnine@mit.edu", "espuser@mit.edu"], result)
|
|
|
|
self.assert_in_success_response(["Click on a user to log in to MIT!"], result)
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"direct_email": email}
|
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",)):
|
|
|
|
response = self.client_post("/accounts/login/local/", data)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "DevAuthBackend"], response)
|
2016-10-24 08:35:16 +02:00
|
|
|
|
2020-02-15 19:16:16 +01:00
|
|
|
def test_dev_direct_production_config_error(self) -> None:
|
|
|
|
result = self.client_get("/config-error/dev")
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assert_in_success_response(["DevAuthBackend"], result)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_nonexistent_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
|
|
|
data = {"direct_email": email}
|
2020-02-04 15:29:34 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_post("/accounts/login/local/", data)
|
2020-06-17 14:25:25 +02:00
|
|
|
self.assert_in_success_response(["Configuration error", "DevAuthBackend"], response)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-17 16:18:09 +01:00
|
|
|
class TestZulipRemoteUserBackend(DesktopFlowTestingLib, ZulipTestCase):
|
2020-06-01 14:24:21 +02:00
|
|
|
def test_start_remote_user_sso(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
|
|
|
"/accounts/login/start/sso/", {"param1": "value1", "params": "value2"}
|
|
|
|
)
|
2020-06-01 14:24:21 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
|
2022-05-29 21:12:13 +02:00
|
|
|
url = result["Location"]
|
2020-06-01 14:24:21 +02:00
|
|
|
parsed_url = urllib.parse.urlparse(url)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.path, "/accounts/login/sso/")
|
|
|
|
self.assertEqual(parsed_url.query, "param1=value1¶ms=value2")
|
2020-06-01 14:24:21 +02:00
|
|
|
|
|
|
|
def test_start_remote_user_sso_with_desktop_app(self) -> None:
|
2022-06-14 22:34:47 +02:00
|
|
|
result = self.client_get(
|
|
|
|
"/accounts/login/start/sso/", {}, HTTP_USER_AGENT="ZulipElectron/5.0.0"
|
|
|
|
)
|
2020-06-01 14:24:21 +02:00
|
|
|
self.verify_desktop_flow_app_page(result)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email)
|
2016-10-24 09:09:31 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success_with_sso_append_domain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
username = "hamlet"
|
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",),
|
|
|
|
SSO_APPEND_DOMAIN="zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=username)
|
2016-10-24 09:09:31 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2020-08-05 16:40:41 +02:00
|
|
|
def test_login_case_insensitive(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-08-05 16:40:41 +02:00
|
|
|
email_upper = user_profile.delivery_email.upper()
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email_upper)
|
2020-08-05 16:40:41 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_success_response(
|
|
|
|
["Configuration error", "Authentication via the REMOTE_USER header is"], result
|
|
|
|
)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_nonexisting_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email)
|
2017-04-20 08:25:15 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2017-07-17 17:31:05 +02:00
|
|
|
self.assert_in_response("No account found for", result)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_invalid_email(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "hamlet"
|
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email)
|
2017-04-07 08:21:29 +02:00
|
|
|
self.assert_json_error_contains(result, "Enter a valid email address.", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_missing_field(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
result = self.client_get("/accounts/login/sso/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_success_response(
|
|
|
|
["Configuration error", "The REMOTE_USER header is not set."], result
|
|
|
|
)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_wrong_subdomain(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value="acme"):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"http://testserver:9080/accounts/login/sso/", REMOTE_USER=email
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-10-24 09:09:31 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-04-23 00:12:52 +02:00
|
|
|
self.assert_in_response("You need an invitation to join this organization.", result)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_empty_subdomain(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)):
|
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value=""):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"http://testserver:9080/accounts/login/sso/", REMOTE_USER=email
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-10-24 09:09:31 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-04-23 00:12:52 +02:00
|
|
|
self.assert_in_response("You need an invitation to join this organization.", result)
|
2016-10-24 09:09:31 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success_under_subdomains(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value="zulip"):
|
2016-10-24 09:09:31 +02:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/accounts/login/sso/", REMOTE_USER=email)
|
2016-10-24 09:09:31 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2018-02-06 23:29:57 +01:00
|
|
|
@override_settings(SEND_LOGIN_EMAILS=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",))
|
2018-12-10 19:33:52 +01:00
|
|
|
def test_login_mobile_flow_otp_success_email(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2018-08-10 00:58:44 +02:00
|
|
|
user_profile.date_joined = timezone_now() - datetime.timedelta(seconds=61)
|
|
|
|
user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2018-02-06 23:29:57 +01:00
|
|
|
|
2018-08-10 00:58:44 +02:00
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp="1234"),
|
|
|
|
REMOTE_USER=email,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-02-06 23:29:57 +01:00
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp="invalido" * 8),
|
|
|
|
REMOTE_USER=email,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-02-06 23:29:57 +01:00
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp=mobile_flow_otp),
|
|
|
|
REMOTE_USER=email,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2018-02-06 23:29:57 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2018-02-06 23:29:57 +01:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip.testserver"])
|
2018-12-10 19:33:52 +01:00
|
|
|
self.assertEqual(query_params["email"], [self.example_email("hamlet")])
|
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet_api_keys = get_all_api_keys(self.example_user("hamlet"))
|
2018-12-10 19:33:52 +01:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), hamlet_api_keys)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("Zulip on Android", mail.outbox[0].body)
|
2018-12-10 19:33:52 +01:00
|
|
|
|
|
|
|
@override_settings(SEND_LOGIN_EMAILS=True)
|
|
|
|
@override_settings(SSO_APPEND_DOMAIN="zulip.com")
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",))
|
2018-12-10 19:33:52 +01:00
|
|
|
def test_login_mobile_flow_otp_success_username(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2022-07-27 23:33:49 +02:00
|
|
|
remote_user = Address(addr_spec=email).username
|
2018-12-10 19:33:52 +01:00
|
|
|
user_profile.date_joined = timezone_now() - datetime.timedelta(seconds=61)
|
|
|
|
user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = "1234abcd" * 8
|
2018-12-10 19:33:52 +01:00
|
|
|
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp="1234"),
|
|
|
|
REMOTE_USER=remote_user,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-12-10 19:33:52 +01:00
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp="invalido" * 8),
|
|
|
|
REMOTE_USER=remote_user,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2018-12-10 19:33:52 +01:00
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(mobile_flow_otp=mobile_flow_otp),
|
|
|
|
REMOTE_USER=remote_user,
|
|
|
|
HTTP_USER_AGENT="ZulipAndroid",
|
|
|
|
)
|
2018-12-10 19:33:52 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = result["Location"]
|
2018-12-10 19:33:52 +01:00
|
|
|
parsed_url = urllib.parse.urlparse(redirect_url)
|
|
|
|
query_params = urllib.parse.parse_qs(parsed_url.query)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(parsed_url.scheme, "zulip")
|
|
|
|
self.assertEqual(query_params["realm"], ["http://zulip.testserver"])
|
2018-02-06 23:29:57 +01:00
|
|
|
self.assertEqual(query_params["email"], [self.example_email("hamlet")])
|
|
|
|
encrypted_api_key = query_params["otp_encrypted_api_key"][0]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet_api_keys = get_all_api_keys(self.example_user("hamlet"))
|
2018-08-01 10:53:40 +02:00
|
|
|
self.assertIn(otp_decrypt_api_key(encrypted_api_key, mobile_flow_otp), hamlet_api_keys)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(mail.outbox, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("Zulip on Android", mail.outbox[0].body)
|
2018-02-06 23:29:57 +01:00
|
|
|
|
2020-02-01 17:00:56 +01:00
|
|
|
@override_settings(SEND_LOGIN_EMAILS=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipRemoteUserBackend",
|
|
|
|
"zproject.backends.ZulipDummyBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
def test_login_desktop_flow_otp_success_email(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2020-02-01 17:00:56 +01:00
|
|
|
user_profile.date_joined = timezone_now() - datetime.timedelta(seconds=61)
|
|
|
|
user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
desktop_flow_otp = "1234abcd" * 8
|
2020-02-01 17:00:56 +01:00
|
|
|
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp="1234"), REMOTE_USER=email
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
self.assert_logged_in_user_id(None)
|
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp="invalido" * 8), REMOTE_USER=email
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
self.assert_logged_in_user_id(None)
|
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp=desktop_flow_otp), REMOTE_USER=email
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-17 16:18:09 +01:00
|
|
|
self.verify_desktop_flow_end_page(result, email, desktop_flow_otp)
|
2020-02-01 17:00:56 +01:00
|
|
|
|
|
|
|
@override_settings(SEND_LOGIN_EMAILS=True)
|
|
|
|
@override_settings(SSO_APPEND_DOMAIN="zulip.com")
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipRemoteUserBackend",
|
|
|
|
"zproject.backends.ZulipDummyBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
def test_login_desktop_flow_otp_success_username(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2022-07-27 23:33:49 +02:00
|
|
|
remote_user = Address(addr_spec=email).username
|
2020-02-01 17:00:56 +01:00
|
|
|
user_profile.date_joined = timezone_now() - datetime.timedelta(seconds=61)
|
|
|
|
user_profile.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
desktop_flow_otp = "1234abcd" * 8
|
2020-02-01 17:00:56 +01:00
|
|
|
|
|
|
|
# Verify that the right thing happens with an invalid-format OTP
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp="1234"), REMOTE_USER=remote_user
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
self.assert_logged_in_user_id(None)
|
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp="invalido" * 8), REMOTE_USER=remote_user
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-01 17:00:56 +01:00
|
|
|
self.assert_logged_in_user_id(None)
|
|
|
|
self.assert_json_error_contains(result, "Invalid OTP", 400)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_get(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/accounts/login/sso/", dict(desktop_flow_otp=desktop_flow_otp), REMOTE_USER=remote_user
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-17 16:18:09 +01:00
|
|
|
self.verify_desktop_flow_end_page(result, email, desktop_flow_otp)
|
2020-02-01 17:00:56 +01:00
|
|
|
|
2018-02-24 22:38:48 +01:00
|
|
|
def test_redirect_to(self) -> None:
|
2018-04-22 23:14:27 +02:00
|
|
|
"""This test verifies the behavior of the redirect_to logic in
|
|
|
|
login_or_register_remote_user."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
def test_with_redirect_to_param_set_as_next(next: str = "") -> "TestHttpResponse":
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = user_profile.delivery_email
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipRemoteUserBackend",)
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/accounts/login/sso/", {"next": next}, REMOTE_USER=email)
|
2018-02-24 22:38:48 +01:00
|
|
|
return result
|
|
|
|
|
|
|
|
res = test_with_redirect_to_param_set_as_next()
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("http://zulip.testserver", res["Location"])
|
2021-02-12 08:20:45 +01:00
|
|
|
res = test_with_redirect_to_param_set_as_next("/user_uploads/image_path")
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("http://zulip.testserver/user_uploads/image_path", res["Location"])
|
2018-02-24 22:38:48 +01:00
|
|
|
|
2018-04-22 23:14:27 +02:00
|
|
|
# Third-party domains are rejected and just send you to root domain
|
2021-02-12 08:20:45 +01:00
|
|
|
res = test_with_redirect_to_param_set_as_next("https://rogue.zulip-like.server/login")
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertEqual("http://zulip.testserver", res["Location"])
|
2018-04-22 23:14:27 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-24 11:38:38 +02:00
|
|
|
class TestJWTLogin(ZulipTestCase):
|
|
|
|
"""
|
|
|
|
JWT uses ZulipDummyBackend.
|
|
|
|
"""
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "hamlet", "realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
key = settings.JWT_AUTH_KEYS["zulip"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["zulip"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
user_profile = get_user_by_delivery_email(email, realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_user_is_missing(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
key = settings.JWT_AUTH_KEYS["zulip"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["zulip"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error_contains(
|
|
|
|
result, "No user specified in JSON web token claims", 400
|
|
|
|
)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_realm_is_missing(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "hamlet"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
key = settings.JWT_AUTH_KEYS["zulip"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["zulip"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error_contains(
|
|
|
|
result, "No organization specified in JSON web token claims", 400
|
|
|
|
)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_key_does_not_exist(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": "not relevant"}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "Auth key for this subdomain not found.", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_key_is_missing(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
result = self.client_post("/accounts/login/jwt/")
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "No JSON web token passed in request", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_bad_token_is_passed(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
result = self.client_post("/accounts/login/jwt/")
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "No JSON web token passed in request", 400)
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": "bad token"}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "Bad JSON web token", 400)
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_user_does_not_exist(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "nonexisting", "realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
key = settings.JWT_AUTH_KEYS["zulip"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["zulip"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2017-06-04 11:36:52 +02:00
|
|
|
self.assertEqual(result.status_code, 200) # This should ideally be not 200.
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_wrong_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "hamlet", "realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"acme": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value="acme"):
|
|
|
|
key = settings.JWT_AUTH_KEYS["acme"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["acme"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
|
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "Wrong subdomain", 400)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_empty_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "hamlet", "realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value=""):
|
|
|
|
key = settings.JWT_AUTH_KEYS[""]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS[""]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
|
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assert_json_error_contains(result, "Wrong subdomain", 400)
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(None)
|
2016-10-24 11:38:38 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success_under_subdomains(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
payload = {"user": "hamlet", "realm": "zulip.com"}
|
|
|
|
with self.settings(JWT_AUTH_KEYS={"zulip": {"key": "key", "algorithms": ["HS256"]}}):
|
|
|
|
with mock.patch("zerver.views.auth.get_subdomain", return_value="zulip"):
|
|
|
|
key = settings.JWT_AUTH_KEYS["zulip"]["key"]
|
|
|
|
[algorithm] = settings.JWT_AUTH_KEYS["zulip"]["algorithms"]
|
2021-07-06 08:06:18 +02:00
|
|
|
web_token = jwt.encode(payload, key, algorithm)
|
2021-02-12 08:20:45 +01:00
|
|
|
|
|
|
|
data = {"json_web_token": web_token}
|
|
|
|
result = self.client_post("/accounts/login/jwt/", data)
|
2016-10-24 11:38:38 +02:00
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2019-05-26 22:12:46 +02:00
|
|
|
self.assert_logged_in_user_id(user_profile.id)
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-01 23:26:49 +01:00
|
|
|
class DjangoToLDAPUsernameTests(ZulipTestCase):
|
|
|
|
def setUp(self) -> None:
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
self.backend = ZulipLDAPAuthBackend()
|
|
|
|
|
|
|
|
def test_django_to_ldap_username_with_append_domain(self) -> None:
|
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
|
|
|
self.assertEqual(self.backend.django_to_ldap_username("hamlet"), "hamlet")
|
|
|
|
self.assertEqual(self.backend.django_to_ldap_username("hamlet@zulip.com"), "hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaisesRegex(
|
2022-11-17 09:30:48 +01:00
|
|
|
OutsideLDAPDomainError,
|
2021-02-12 08:20:45 +01:00
|
|
|
"Email hamlet@example.com does not match LDAP domain zulip.com.",
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2019-11-01 23:26:49 +01:00
|
|
|
self.backend.django_to_ldap_username("hamlet@example.com")
|
|
|
|
|
|
|
|
self.mock_ldap.directory['uid="hamlet@test",ou=users,dc=zulip,dc=com'] = {
|
|
|
|
"cn": ["King Hamlet"],
|
2022-07-27 23:33:49 +02:00
|
|
|
"uid": ["hamlet@test"],
|
2019-11-01 23:26:49 +01:00
|
|
|
}
|
|
|
|
username = self.backend.django_to_ldap_username('"hamlet@test"@zulip.com')
|
2022-07-27 23:33:49 +02:00
|
|
|
self.assertEqual(username, "hamlet@test")
|
2019-11-01 23:26:49 +01:00
|
|
|
|
|
|
|
self.mock_ldap.directory['uid="hamlet@test"@zulip,ou=users,dc=zulip,dc=com'] = {
|
|
|
|
"cn": ["King Hamlet"],
|
|
|
|
"uid": ['"hamlet@test"@zulip'],
|
|
|
|
}
|
|
|
|
username = self.backend.django_to_ldap_username('"hamlet@test"@zulip')
|
|
|
|
self.assertEqual(username, '"hamlet@test"@zulip')
|
|
|
|
|
|
|
|
def test_django_to_ldap_username_with_email_search(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("hamlet"), self.ldap_username("hamlet")
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("hamlet@zulip.com"), self.ldap_username("hamlet")
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
# If there are no matches through the email search, raise exception:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(NoMatchingLDAPUserError):
|
2019-11-01 23:26:49 +01:00
|
|
|
self.backend.django_to_ldap_username("no_such_email@example.com")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("aaron@zulip.com"), self.ldap_username("aaron")
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
|
2020-07-07 12:57:28 +02:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(NoMatchingLDAPUserError):
|
2019-11-01 23:26:49 +01:00
|
|
|
self.backend.django_to_ldap_username("shared_email@zulip.com")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
"WARNING:root:Multiple users with email {} found in LDAP.".format(
|
|
|
|
"shared_email@zulip.com"
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
|
|
|
|
# Test on a weird case of a user whose uid is an email and his actual "mail"
|
|
|
|
# attribute is a different email address:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.mock_ldap.directory["uid=some_user@organization_a.com,ou=users,dc=zulip,dc=com"] = {
|
2019-11-01 23:26:49 +01:00
|
|
|
"cn": ["Some User"],
|
2021-02-12 08:20:45 +01:00
|
|
|
"uid": ["some_user@organization_a.com"],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"mail": ["some_user@contactaddress.com"],
|
2019-11-01 23:26:49 +01:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("some_user@contactaddress.com"),
|
|
|
|
"some_user@organization_a.com",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("some_user@organization_a.com"),
|
|
|
|
"some_user@organization_a.com",
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
|
|
|
|
# Configure email search for emails in the uid attribute:
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_REVERSE_EMAIL_SEARCH=LDAPSearch(
|
|
|
|
"ou=users,dc=zulip,dc=com", ldap.SCOPE_ONELEVEL, "(uid=%(email)s)"
|
|
|
|
)
|
|
|
|
):
|
|
|
|
self.assertEqual(
|
|
|
|
self.backend.django_to_ldap_username("newuser_email_as_uid@zulip.com"),
|
|
|
|
"newuser_email_as_uid@zulip.com",
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
|
|
|
|
self.mock_ldap.directory['uid="hamlet@test"@zulip.com",ou=users,dc=zulip,dc=com'] = {
|
|
|
|
"cn": ["King Hamlet"],
|
|
|
|
"uid": ['"hamlet@test"@zulip.com'],
|
|
|
|
}
|
|
|
|
username = self.backend.django_to_ldap_username('"hamlet@test"@zulip.com')
|
|
|
|
self.assertEqual(username, '"hamlet@test"@zulip.com')
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
def test_authenticate_to_ldap_via_email(self) -> None:
|
|
|
|
"""
|
|
|
|
With AUTH_LDAP_REVERSE_EMAIL_SEARCH configured, django_to_ldap_username
|
2020-10-23 02:43:28 +02:00
|
|
|
should be able to translate an email to LDAP username,
|
2020-03-12 14:17:25 +01:00
|
|
|
and thus it should be possible to authenticate through user_profile.delivery_email.
|
2019-11-01 23:26:49 +01:00
|
|
|
"""
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
password = "testpassword"
|
|
|
|
user_profile.set_password(password)
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_EMAIL_ATTR="mail"):
|
2019-11-01 23:26:49 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:19:30 +01:00
|
|
|
authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=user_profile.delivery_email,
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
2021-02-12 08:19:30 +01:00
|
|
|
realm=realm,
|
|
|
|
),
|
|
|
|
user_profile,
|
|
|
|
)
|
2019-11-01 23:26:49 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(LDAP_EMAIL_ATTR="mail", LDAP_DEACTIVATE_NON_MATCHING_USERS=True)
|
2019-11-01 23:26:49 +01:00
|
|
|
def test_sync_user_from_ldap_with_email_attr(self) -> None:
|
|
|
|
"""In LDAP configurations with LDAP_EMAIL_ATTR configured and
|
|
|
|
LDAP_DEACTIVATE_NON_MATCHING_USERS set, a possible failure
|
|
|
|
mode if django_to_ldap_username isn't configured correctly is
|
|
|
|
all LDAP users having their accounts deactivated. Before the
|
|
|
|
introduction of AUTH_LDAP_REVERSE_EMAIL_SEARCH, this would happen
|
|
|
|
even in valid LDAP configurations using LDAP_EMAIL_ATTR.
|
|
|
|
|
|
|
|
This test confirms that such a failure mode doesn't happen with
|
|
|
|
a valid LDAP configuration.
|
|
|
|
"""
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
with self.settings():
|
|
|
|
sync_user_from_ldap(user_profile, mock.Mock())
|
|
|
|
# Syncing didn't deactivate the user:
|
|
|
|
self.assertTrue(user_profile.is_active)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
class ZulipLDAPTestCase(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-16 18:10:40 +02:00
|
|
|
super().setUp()
|
|
|
|
|
|
|
|
self.init_default_ldap_database()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2016-10-24 14:41:45 +02:00
|
|
|
self.setup_subdomain(user_profile)
|
|
|
|
self.backend = ZulipLDAPAuthBackend()
|
2019-10-16 18:10:40 +02:00
|
|
|
|
2019-01-16 09:59:01 +01:00
|
|
|
# Internally `_realm` and `_prereg_user` attributes are automatically set
|
|
|
|
# by the `authenticate()` method. But for testing the `get_or_build_user()`
|
|
|
|
# method separately, we need to set them manually.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.backend._realm = get_realm("zulip")
|
2019-01-16 09:59:01 +01:00
|
|
|
self.backend._prereg_user = None
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2017-11-19 04:02:03 +01:00
|
|
|
def setup_subdomain(self, user_profile: UserProfile) -> None:
|
2016-10-24 14:41:45 +02:00
|
|
|
realm = user_profile.realm
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.string_id = "zulip"
|
2016-10-24 14:41:45 +02:00
|
|
|
realm.save()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
class TestLDAP(ZulipLDAPTestCase):
|
2018-08-03 20:05:19 +02:00
|
|
|
def test_generate_dev_ldap_dir(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_dir = generate_dev_ldap_dir("A", 10)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(ldap_dir, 10)
|
2021-02-12 08:19:30 +01:00
|
|
|
regex = re.compile(
|
2021-02-12 08:20:45 +01:00
|
|
|
r"(uid\=)+[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+(\,ou\=users\,dc\=zulip\,dc\=com)"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
common_attrs = ["cn", "userPassword", "phoneNumber", "birthDate"]
|
2018-08-18 04:22:37 +02:00
|
|
|
for key, value in ldap_dir.items():
|
|
|
|
self.assertTrue(regex.match(key))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertCountEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
list(value.keys()), [*common_attrs, "uid", "thumbnailPhoto", "userAccountControl"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-03 20:05:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_dir = generate_dev_ldap_dir("b", 9)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(ldap_dir, 9)
|
2021-02-12 08:20:45 +01:00
|
|
|
regex = re.compile(r"(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)")
|
2018-08-18 04:22:37 +02:00
|
|
|
for key, value in ldap_dir.items():
|
|
|
|
self.assertTrue(regex.match(key))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertCountEqual(list(value.keys()), [*common_attrs, "uid", "jpegPhoto"])
|
2018-08-03 20:05:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_dir = generate_dev_ldap_dir("c", 8)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(ldap_dir, 8)
|
2021-02-12 08:20:45 +01:00
|
|
|
regex = re.compile(r"(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)")
|
2018-08-18 04:22:37 +02:00
|
|
|
for key, value in ldap_dir.items():
|
|
|
|
self.assertTrue(regex.match(key))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertCountEqual(list(value.keys()), [*common_attrs, "uid", "email"])
|
2018-08-03 20:05:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-11-07 06:57:09 +01:00
|
|
|
def test_dev_ldap_fail_login(self) -> None:
|
2019-01-16 08:36:27 +01:00
|
|
|
# Tests that login with a substring of password fails. We had a bug in
|
|
|
|
# dev LDAP environment that allowed login via password substrings.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.mock_ldap.directory = generate_dev_ldap_dir("B", 8)
|
2019-01-16 08:36:27 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTH_LDAP_USER_SEARCH=LDAPSearch(
|
|
|
|
"ou=users,dc=zulip,dc=com", ldap.SCOPE_ONELEVEL, "(uid=%(user)s)"
|
|
|
|
),
|
|
|
|
AUTH_LDAP_REVERSE_EMAIL_SEARCH=LDAPSearch(
|
|
|
|
"ou=users,dc=zulip,dc=com", ldap.SCOPE_ONELEVEL, "(email=%(email)s)"
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
2019-11-07 06:57:09 +01:00
|
|
|
):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username="ldapuser1",
|
|
|
|
password="dapu",
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-16 08:36:27 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
assert user_profile is None
|
2019-01-16 08:36:27 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-05-24 04:21:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
assert user_profile is not None
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, self.example_email("hamlet"))
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-01-09 17:58:39 +01:00
|
|
|
def test_login_success_with_username(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username="hamlet",
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-09 17:58:39 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
assert user_profile is not None
|
2019-10-16 18:10:40 +02:00
|
|
|
self.assertEqual(user_profile, self.example_user("hamlet"))
|
2019-01-09 17:58:39 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success_with_email_attr(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_EMAIL_ATTR="mail"):
|
2020-02-19 19:40:49 +01:00
|
|
|
username = self.ldap_username("aaron")
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=username,
|
|
|
|
password=self.ldap_password(username),
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
assert user_profile is not None
|
2019-10-16 18:10:40 +02:00
|
|
|
self.assertEqual(user_profile, self.example_user("aaron"))
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2019-10-05 01:02:46 +02:00
|
|
|
def test_email_and_ldap_backends_together(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_EMAIL_ATTR="mail",
|
2021-02-12 08:19:30 +01:00
|
|
|
AUTH_LDAP_REVERSE_EMAIL_SEARCH=LDAPSearch(
|
|
|
|
"ou=users,dc=zulip,dc=com", ldap.SCOPE_ONELEVEL, "(mail=%(email)s)"
|
|
|
|
),
|
|
|
|
AUTH_LDAP_USERNAME_ATTR="uid",
|
2019-10-05 01:02:46 +02:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-10-05 01:02:46 +02:00
|
|
|
self.assertEqual(email_belongs_to_ldap(realm, self.example_email("aaron")), True)
|
2020-02-19 19:40:49 +01:00
|
|
|
username = self.ldap_username("aaron")
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = ZulipLDAPAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=username,
|
|
|
|
password=self.ldap_password(username),
|
|
|
|
realm=realm,
|
|
|
|
)
|
2019-10-05 01:02:46 +02:00
|
|
|
self.assertEqual(user_profile, self.example_user("aaron"))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2019-10-05 01:02:46 +02:00
|
|
|
password = "testpassword"
|
|
|
|
othello.set_password(password)
|
|
|
|
othello.save()
|
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(email_belongs_to_ldap(realm, othello.delivery_email), False)
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = EmailAuthBackend().authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=othello.delivery_email,
|
|
|
|
password=password,
|
|
|
|
realm=realm,
|
|
|
|
)
|
2019-10-05 01:02:46 +02:00
|
|
|
self.assertEqual(user_profile, othello)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_wrong_password(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
password="wrong",
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-06-21 10:12:56 +02:00
|
|
|
self.assertIs(user, None)
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_nonexistent_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"), self.assertLogs(
|
|
|
|
"zulip.ldap", level="DEBUG"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as log_debug:
|
|
|
|
user = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username="nonexistent@zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
password="doesnt_matter",
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
log_debug.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: No LDAP user matching django_to_ldap_username result: nonexistent. Input username: nonexistent@zulip.com"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-06-21 10:12:56 +02:00
|
|
|
self.assertIs(user, None)
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_ldap_permissions(self) -> None:
|
2016-10-24 14:41:45 +02:00
|
|
|
backend = self.backend
|
|
|
|
self.assertFalse(backend.has_perm(None, None))
|
|
|
|
self.assertFalse(backend.has_module_perms(None, None))
|
|
|
|
self.assertTrue(backend.get_all_permissions(None, None) == set())
|
|
|
|
self.assertTrue(backend.get_group_permissions(None, None) == set())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-10-06 00:32:25 +02:00
|
|
|
def test_user_email_from_ldapuser_with_append_domain(self) -> None:
|
2016-10-24 14:41:45 +02:00
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
username = backend.user_email_from_ldapuser(
|
2022-07-27 23:33:49 +02:00
|
|
|
"this_argument_is_ignored", _LDAPUser(self.backend, username="hamlet@test")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-10-24 14:41:45 +02:00
|
|
|
self.assertEqual(username, '"hamlet@test"@zulip.com')
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2018-05-22 08:33:56 +02:00
|
|
|
def test_get_or_build_user_when_user_exists(self) -> None:
|
2017-11-05 11:49:43 +01:00
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Full Name"], "sn": ["Short Name"]}
|
2016-10-24 14:41:45 +02:00
|
|
|
|
|
|
|
backend = self.backend
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2018-05-22 08:33:56 +02:00
|
|
|
user_profile, created = backend.get_or_build_user(str(email), _LDAPUser())
|
2016-10-24 14:41:45 +02:00
|
|
|
self.assertFalse(created)
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, email)
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2018-05-22 08:33:56 +02:00
|
|
|
def test_get_or_build_user_when_user_does_not_exist(self) -> None:
|
2017-11-05 11:49:43 +01:00
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Full Name"]}
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "fn"}
|
2016-10-24 14:41:45 +02:00
|
|
|
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map):
|
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "newuser@zulip.com"
|
2018-05-22 08:33:56 +02:00
|
|
|
user_profile, created = backend.get_or_build_user(email, _LDAPUser())
|
2016-10-24 14:41:45 +02:00
|
|
|
self.assertTrue(created)
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, email)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_profile.full_name, "Full Name")
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2018-05-22 08:33:56 +02:00
|
|
|
def test_get_or_build_user_when_user_has_invalid_name(self) -> None:
|
2017-11-05 11:49:43 +01:00
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["<invalid name>"]}
|
2017-02-08 05:04:14 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "fn"}
|
2017-02-08 05:04:14 +01:00
|
|
|
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map):
|
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
2017-02-08 05:04:14 +01:00
|
|
|
with self.assertRaisesRegex(Exception, "Invalid characters in name!"):
|
2018-05-22 08:33:56 +02:00
|
|
|
backend.get_or_build_user(email, _LDAPUser())
|
2017-02-08 05:04:14 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2018-05-22 08:33:56 +02:00
|
|
|
def test_get_or_build_user_when_realm_is_deactivated(self) -> None:
|
2017-11-05 11:49:43 +01:00
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Full Name"]}
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "fn"}
|
2016-10-24 14:41:45 +02:00
|
|
|
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map):
|
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
2021-04-02 17:11:25 +02:00
|
|
|
do_deactivate_realm(backend._realm, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertRaisesRegex(Exception, "Realm has been deactivated"):
|
2018-05-22 08:33:56 +02:00
|
|
|
backend.get_or_build_user(email, _LDAPUser())
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2018-05-22 08:33:56 +02:00
|
|
|
def test_get_or_build_user_when_ldap_has_no_email_attr(self) -> None:
|
2017-11-05 11:49:43 +01:00
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Full Name"], "sn": ["Short Name"]}
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
nonexisting_attr = "email"
|
2017-09-10 17:25:24 +02:00
|
|
|
with self.settings(LDAP_EMAIL_ATTR=nonexisting_attr):
|
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaisesRegex(
|
2021-02-12 08:20:45 +01:00
|
|
|
Exception, "LDAP user doesn't have the needed email attribute"
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2018-05-22 08:33:56 +02:00
|
|
|
backend.get_or_build_user(email, _LDAPUser())
|
2017-09-10 17:25:24 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-03-10 08:57:19 +01:00
|
|
|
def test_get_or_build_user_email(self) -> None:
|
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Test User"]}
|
2019-03-10 08:57:19 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "fn"}
|
2019-03-10 08:57:19 +01:00
|
|
|
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map):
|
|
|
|
realm = self.backend._realm
|
|
|
|
realm.emails_restricted_to_domains = False
|
|
|
|
realm.disallow_disposable_email_addresses = True
|
|
|
|
realm.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "spam@mailnator.com"
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaisesRegex(ZulipLDAPError, "Email validation failed."):
|
2019-03-10 08:57:19 +01:00
|
|
|
self.backend.get_or_build_user(email, _LDAPUser())
|
|
|
|
|
|
|
|
realm.emails_restricted_to_domains = True
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.save(update_fields=["emails_restricted_to_domains"])
|
2019-03-10 08:57:19 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "spam+spam@mailnator.com"
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaisesRegex(ZulipLDAPError, "Email validation failed."):
|
2019-03-10 08:57:19 +01:00
|
|
|
self.backend.get_or_build_user(email, _LDAPUser())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "spam@acme.com"
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaisesRegex(
|
2022-11-17 09:30:48 +01:00
|
|
|
ZulipLDAPError, "This email domain isn't allowed in this organization."
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2019-03-10 08:57:19 +01:00
|
|
|
self.backend.get_or_build_user(email, _LDAPUser())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-01-10 18:25:34 +01:00
|
|
|
def test_get_or_build_user_when_ldap_has_no_full_name_mapping(self) -> None:
|
|
|
|
class _LDAPUser:
|
2021-02-12 08:20:45 +01:00
|
|
|
attrs = {"fn": ["Full Name"], "sn": ["Short Name"]}
|
2019-01-10 18:25:34 +01:00
|
|
|
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={}):
|
|
|
|
backend = self.backend
|
2021-02-12 08:20:45 +01:00
|
|
|
email = "nonexisting@zulip.com"
|
2019-01-10 18:25:34 +01:00
|
|
|
with self.assertRaisesRegex(Exception, "Missing required mapping for user's full name"):
|
|
|
|
backend.get_or_build_user(email, _LDAPUser())
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_when_domain_does_not_match(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="acme.com"), self.assertLogs(
|
|
|
|
"zulip.ldap", "DEBUG"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as debug_log:
|
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-09-27 21:30:53 +02:00
|
|
|
self.assertIs(user_profile, None)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
debug_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"DEBUG:zulip.ldap:ZulipLDAPAuthBackend: Email hamlet@zulip.com does not match LDAP domain acme.com."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2017-09-27 21:30:53 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-03-04 13:16:00 +01:00
|
|
|
def test_login_success_with_different_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
ldap_user_attr_map = {"full_name": "cn"}
|
2019-03-10 08:57:19 +01:00
|
|
|
|
2021-03-08 13:22:43 +01:00
|
|
|
do_create_realm(string_id="acme", name="acme")
|
2016-10-24 14:41:45 +02:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com", AUTH_LDAP_USER_ATTR_MAP=ldap_user_attr_map
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username=self.example_email("hamlet"),
|
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("acme"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, self.example_email("hamlet"))
|
2016-10-24 14:41:45 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_success_with_valid_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
assert user_profile is not None
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, self.example_email("hamlet"))
|
2016-10-26 12:46:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_login_failure_due_to_deactivated_user(self) -> None:
|
2017-11-18 01:07:20 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user_profile, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
|
|
|
username=self.example_email("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
password=self.ldap_password("hamlet"),
|
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-11-18 01:07:20 +01:00
|
|
|
self.assertIs(user_profile, None)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2021-02-12 08:19:30 +01:00
|
|
|
@override_settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
|
|
|
"full_name": "cn",
|
|
|
|
"avatar": "jpegPhoto",
|
|
|
|
}
|
|
|
|
)
|
2019-03-10 08:57:19 +01:00
|
|
|
def test_login_success_when_user_does_not_exist_with_valid_subdomain(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmDomain.objects.create(realm=self.backend._realm, domain="acme.com")
|
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="acme.com"):
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username="newuser@acme.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
password=self.ldap_password("newuser"),
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
assert user_profile is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, "newuser@acme.com")
|
|
|
|
self.assertEqual(user_profile.full_name, "New LDAP fullname")
|
|
|
|
self.assertEqual(user_profile.realm.string_id, "zulip")
|
2017-01-22 11:21:27 +01:00
|
|
|
|
2018-12-12 19:46:37 +01:00
|
|
|
# Verify avatar gets created
|
|
|
|
self.assertEqual(user_profile.avatar_source, UserProfile.AVATAR_FROM_USER)
|
2020-08-07 04:45:55 +02:00
|
|
|
url = avatar_url(user_profile)
|
|
|
|
assert url is not None
|
2022-01-21 06:06:30 +01:00
|
|
|
response = self.client_get(url)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
2022-07-13 08:21:48 +02:00
|
|
|
with open(
|
2022-10-28 11:35:46 +02:00
|
|
|
os.path.join(
|
|
|
|
settings.DEPLOY_ROOT,
|
|
|
|
"static/images/test-images/avatars/example_profile_picture.png",
|
|
|
|
),
|
|
|
|
"rb",
|
2022-07-13 08:21:48 +02:00
|
|
|
) as f:
|
2022-10-28 11:35:46 +02:00
|
|
|
example_avatar = f.read()
|
|
|
|
self.assert_streaming_content(
|
|
|
|
response, resize_avatar(example_avatar, DEFAULT_AVATAR_SIZE)
|
|
|
|
)
|
2018-12-12 19:46:37 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-01-10 18:25:34 +01:00
|
|
|
def test_login_success_when_user_does_not_exist_with_split_full_name_mapping(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"first_name": "sn", "last_name": "cn"},
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
|
|
|
user_profile = self.backend.authenticate(
|
|
|
|
request=mock.MagicMock(),
|
2021-02-12 08:20:45 +01:00
|
|
|
username="newuser_splitname@zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
password=self.ldap_password("newuser_splitname"),
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
assert user_profile is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_profile.delivery_email, "newuser_splitname@zulip.com")
|
|
|
|
self.assertEqual(user_profile.full_name, "First Last")
|
|
|
|
self.assertEqual(user_profile.realm.string_id, "zulip")
|
2019-01-10 18:25:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
class TestZulipLDAPUserPopulator(ZulipLDAPTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_authenticate(self) -> None:
|
2016-10-26 12:46:51 +02:00
|
|
|
backend = ZulipLDAPUserPopulator()
|
2021-02-12 08:19:30 +01:00
|
|
|
result = backend.authenticate(
|
|
|
|
username=self.example_email("hamlet"),
|
|
|
|
password=self.ldap_password("hamlet"),
|
2021-02-12 08:20:45 +01:00
|
|
|
realm=get_realm("zulip"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-10-26 12:46:51 +02:00
|
|
|
self.assertIs(result, None)
|
2016-10-26 12:39:09 +02:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def perform_ldap_sync(self, user_profile: UserProfile) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com"):
|
2019-08-26 21:13:23 +02:00
|
|
|
result = sync_user_from_ldap(user_profile, mock.Mock())
|
2019-01-12 17:15:14 +01:00
|
|
|
self.assertTrue(result)
|
|
|
|
|
2019-09-04 10:11:25 +02:00
|
|
|
@mock.patch("zproject.backends.do_deactivate_user")
|
2019-10-25 02:26:05 +02:00
|
|
|
def test_ldaperror_doesnt_deactivate_user(self, mock_deactivate: mock.MagicMock) -> None:
|
2019-09-04 10:11:25 +02:00
|
|
|
"""
|
|
|
|
This is a test for a bug where failure to connect to LDAP in sync_user_from_ldap
|
|
|
|
(e.g. due to invalid credentials) would cause the user to be deactivated if
|
|
|
|
LDAP_DEACTIVATE_NON_MATCHING_USERS was True.
|
|
|
|
Details: https://github.com/zulip/zulip/issues/13130
|
|
|
|
"""
|
|
|
|
with self.settings(
|
2021-02-12 08:19:30 +01:00
|
|
|
LDAP_DEACTIVATE_NON_MATCHING_USERS=True,
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
|
|
|
AUTH_LDAP_BIND_PASSWORD="wrongpass",
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2019-10-25 02:26:05 +02:00
|
|
|
with self.assertRaises(ldap.INVALID_CREDENTIALS):
|
2021-02-12 08:20:45 +01:00
|
|
|
sync_user_from_ldap(self.example_user("hamlet"), mock.Mock())
|
2019-10-25 02:26:05 +02:00
|
|
|
mock_deactivate.assert_not_called()
|
|
|
|
|
|
|
|
# Make sure other types of LDAPError won't cause deactivation either:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(_LDAPUser, "_get_or_create_user", side_effect=ldap.LDAPError):
|
2019-09-04 10:11:25 +02:00
|
|
|
with self.assertRaises(PopulateUserLDAPError):
|
2021-02-12 08:20:45 +01:00
|
|
|
sync_user_from_ldap(self.example_user("hamlet"), mock.Mock())
|
2019-09-04 10:11:25 +02:00
|
|
|
mock_deactivate.assert_not_called()
|
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
@override_settings(LDAP_EMAIL_ATTR="mail")
|
2019-10-25 02:26:05 +02:00
|
|
|
def test_populate_user_returns_none(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch.object(ZulipLDAPUser, "populate_user", return_value=None):
|
2019-10-25 02:26:05 +02:00
|
|
|
with self.assertRaises(PopulateUserLDAPError):
|
2021-02-12 08:20:45 +01:00
|
|
|
sync_user_from_ldap(self.example_user("hamlet"), mock.Mock())
|
2019-10-25 02:26:05 +02:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def test_update_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "cn", "New Name")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.full_name, "New Name")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2019-12-15 20:10:09 +01:00
|
|
|
def test_update_with_hidden_emails(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-12-15 20:10:09 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
do_set_realm_property(
|
2021-03-01 11:33:24 +01:00
|
|
|
realm,
|
|
|
|
"email_address_visibility",
|
|
|
|
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-12-15 20:10:09 +01:00
|
|
|
hamlet.refresh_from_db()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "cn", "New Name")
|
2019-12-15 20:10:09 +01:00
|
|
|
self.perform_ldap_sync(hamlet)
|
|
|
|
|
|
|
|
hamlet.refresh_from_db()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(hamlet.full_name, "New Name")
|
2019-12-15 20:10:09 +01:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def test_update_split_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "cn", "Name")
|
|
|
|
self.change_ldap_user_attr("hamlet", "sn", "Full")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={"first_name": "sn", "last_name": "cn"}):
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertEqual(hamlet.full_name, "Full Name")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
|
|
|
def test_same_full_name(self) -> None:
|
2022-04-14 23:49:26 +02:00
|
|
|
with mock.patch("zerver.actions.user_settings.do_change_full_name") as fn:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-01-12 17:15:14 +01:00
|
|
|
fn.assert_not_called()
|
|
|
|
|
|
|
|
def test_too_short_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "cn", "a")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(ZulipLDAPError), self.assertLogs(
|
2021-02-12 08:20:45 +01:00
|
|
|
"django_auth_ldap", "WARNING"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as warn_log:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["WARNING:django_auth_ldap:Name too short! while authenticating hamlet"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-09-16 20:04:19 +02:00
|
|
|
def test_deactivate_user_with_useraccountcontrol_attr(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "userAccountControl", "2")
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "userAccountControl": "userAccountControl"}
|
|
|
|
), self.assertLogs("zulip.ldap") as info_logs:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
2019-01-12 17:15:14 +01:00
|
|
|
self.assertFalse(hamlet.is_active)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.ldap:Deactivating user hamlet@zulip.com because they are disabled in LDAP."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-09-16 20:04:19 +02:00
|
|
|
def test_deactivate_reactivate_user_with_deactivated_attr(self) -> None:
|
|
|
|
self.change_ldap_user_attr("hamlet", "someCustomAttr", "TRUE")
|
|
|
|
|
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "deactivated": "someCustomAttr"}
|
|
|
|
), self.assertLogs("zulip.ldap") as info_logs:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.assertFalse(hamlet.is_active)
|
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output,
|
|
|
|
[
|
|
|
|
"INFO:zulip.ldap:Deactivating user hamlet@zulip.com because they are disabled in LDAP."
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "someCustomAttr", "FALSE")
|
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "deactivated": "someCustomAttr"}
|
|
|
|
), self.assertLogs("zulip.ldap") as info_logs:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet.refresh_from_db()
|
|
|
|
self.assertTrue(hamlet.is_active)
|
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output,
|
|
|
|
[
|
|
|
|
"INFO:zulip.ldap:Reactivating user hamlet@zulip.com because they are not disabled in LDAP."
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
|
|
|
self.change_ldap_user_attr("hamlet", "someCustomAttr", "YESSS")
|
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "deactivated": "someCustomAttr"}
|
|
|
|
), self.assertLogs("django_auth_ldap") as ldap_logs, self.assertRaises(AssertionError):
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet.refresh_from_db()
|
|
|
|
self.assertTrue(hamlet.is_active)
|
|
|
|
self.assertEqual(
|
|
|
|
ldap_logs.output,
|
|
|
|
[
|
|
|
|
"WARNING:django_auth_ldap:Invalid value 'YESSS' in the LDAP attribute mapped to deactivated while authenticating hamlet"
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2019-08-23 22:02:30 +02:00
|
|
|
@mock.patch("zproject.backends.ZulipLDAPAuthBackendBase.sync_full_name_from_ldap")
|
|
|
|
def test_dont_sync_disabled_ldap_user(self, fake_sync: mock.MagicMock) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "userAccountControl", "2")
|
2019-08-23 22:02:30 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "userAccountControl": "userAccountControl"}
|
|
|
|
), self.assertLogs("zulip.ldap") as info_logs:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-08-23 22:02:30 +02:00
|
|
|
fake_sync.assert_not_called()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.ldap:Deactivating user hamlet@zulip.com because they are disabled in LDAP."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-08-23 22:02:30 +02:00
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def test_reactivate_user(self) -> None:
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(self.example_user("hamlet"), acting_user=None)
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "userAccountControl": "userAccountControl"}
|
|
|
|
), self.assertLogs("zulip.ldap") as info_logs:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
2019-01-12 17:15:14 +01:00
|
|
|
self.assertTrue(hamlet.is_active)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_logs.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"INFO:zulip.ldap:Reactivating user hamlet@zulip.com because they are not disabled in LDAP."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2019-11-09 00:27:18 +01:00
|
|
|
def test_user_in_multiple_realms(self) -> None:
|
2021-03-10 20:54:27 +01:00
|
|
|
test_realm = do_create_realm("test", "test", emails_restricted_to_domains=False)
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = hamlet.delivery_email
|
2021-02-06 14:27:06 +01:00
|
|
|
hamlet2 = do_create_user(email, None, test_realm, hamlet.full_name, acting_user=None)
|
2019-11-09 00:27:18 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "cn", "Second Hamlet")
|
|
|
|
expected_call_args = [hamlet2, "Second Hamlet", None]
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn"}):
|
2022-04-14 23:49:26 +02:00
|
|
|
with mock.patch("zerver.actions.user_settings.do_change_full_name") as f:
|
2019-11-09 00:27:18 +01:00
|
|
|
self.perform_ldap_sync(hamlet2)
|
|
|
|
f.assert_called_once_with(*expected_call_args)
|
|
|
|
|
|
|
|
# Get the updated model and make sure the full name is changed correctly:
|
2020-03-12 14:17:25 +01:00
|
|
|
hamlet2 = get_user_by_delivery_email(email, test_realm)
|
2019-11-09 00:27:18 +01:00
|
|
|
self.assertEqual(hamlet2.full_name, "Second Hamlet")
|
|
|
|
# Now get the original hamlet and make he still has his name unchanged:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-11-09 00:27:18 +01:00
|
|
|
self.assertEqual(hamlet.full_name, "King Hamlet")
|
|
|
|
|
2019-08-26 21:13:23 +02:00
|
|
|
def test_user_not_found_in_ldap(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
LDAP_DEACTIVATE_NON_MATCHING_USERS=False, LDAP_APPEND_DOMAIN="zulip.com"
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2019-10-16 18:10:40 +02:00
|
|
|
othello = self.example_user("othello") # othello isn't in our test directory
|
2019-08-26 21:13:23 +02:00
|
|
|
mock_logger = mock.MagicMock()
|
2019-10-16 18:10:40 +02:00
|
|
|
result = sync_user_from_ldap(othello, mock_logger)
|
2020-03-12 14:17:25 +01:00
|
|
|
mock_logger.warning.assert_called_once_with(
|
2021-02-12 08:19:30 +01:00
|
|
|
"Did not find %s in LDAP.", othello.delivery_email
|
|
|
|
)
|
2019-08-26 21:13:23 +02:00
|
|
|
self.assertFalse(result)
|
|
|
|
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(othello, acting_user=None)
|
2019-08-26 21:13:23 +02:00
|
|
|
mock_logger = mock.MagicMock()
|
2019-10-16 18:10:40 +02:00
|
|
|
result = sync_user_from_ldap(othello, mock_logger)
|
2020-09-02 02:50:08 +02:00
|
|
|
# In this case the logger shouldn't be used.
|
|
|
|
self.assertEqual(mock_logger.method_calls, [])
|
2019-08-26 21:13:23 +02:00
|
|
|
self.assertFalse(result)
|
|
|
|
|
2019-01-12 17:15:14 +01:00
|
|
|
def test_update_user_avatar(self) -> None:
|
2019-10-16 18:10:40 +02:00
|
|
|
# Hamlet has jpegPhoto set in our test directory by default.
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.upload.upload_avatar_image") as fn, self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "avatar": "jpegPhoto"}
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-01-12 17:15:14 +01:00
|
|
|
fn.assert_called_once()
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-01-12 17:15:14 +01:00
|
|
|
self.assertEqual(hamlet.avatar_source, UserProfile.AVATAR_FROM_USER)
|
2019-06-28 00:10:58 +02:00
|
|
|
|
|
|
|
# Verify that the next time we do an LDAP sync, we don't
|
|
|
|
# end up updating this user's avatar again if the LDAP
|
|
|
|
# data hasn't changed.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-06-28 00:10:58 +02:00
|
|
|
fn.assert_called_once()
|
|
|
|
|
2019-10-16 18:10:40 +02:00
|
|
|
# Now verify that if we do change the jpegPhoto image, we
|
2019-06-28 00:10:58 +02:00
|
|
|
# will upload a new avatar.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.change_ldap_user_attr(
|
2021-02-12 08:20:45 +01:00
|
|
|
"hamlet", "jpegPhoto", static_path("images/logo/zulip-icon-512x512.png"), binary=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.upload.upload_avatar_image") as fn, self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "avatar": "jpegPhoto"}
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-06-28 00:10:58 +02:00
|
|
|
fn.assert_called_once()
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-06-28 00:10:58 +02:00
|
|
|
self.assertEqual(hamlet.avatar_source, UserProfile.AVATAR_FROM_USER)
|
2019-01-12 17:15:14 +01:00
|
|
|
|
2019-06-07 23:36:19 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_update_user_avatar_for_s3(self) -> None:
|
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2022-01-13 23:24:16 +01:00
|
|
|
test_image_data = read_test_image_file("img.png")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "jpegPhoto", test_image_data)
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "avatar": "jpegPhoto"}):
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-06-07 23:36:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-06-07 23:36:19 +02:00
|
|
|
path_id = user_avatar_path(hamlet)
|
|
|
|
original_image_path_id = path_id + ".original"
|
|
|
|
medium_path_id = path_id + "-medium.png"
|
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
original_image_key = bucket.Object(original_image_path_id)
|
|
|
|
medium_image_key = bucket.Object(medium_path_id)
|
2019-06-07 23:36:19 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = original_image_key.get()["Body"].read()
|
2019-06-07 23:36:19 +02:00
|
|
|
self.assertEqual(image_data, test_image_data)
|
|
|
|
|
|
|
|
test_medium_image_data = resize_avatar(test_image_data, MEDIUM_AVATAR_SIZE)
|
2021-02-12 08:20:45 +01:00
|
|
|
medium_image_data = medium_image_key.get()["Body"].read()
|
2019-06-07 23:36:19 +02:00
|
|
|
self.assertEqual(medium_image_data, test_medium_image_data)
|
|
|
|
|
2019-10-16 18:10:40 +02:00
|
|
|
# Try to use invalid data as the image:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "jpegPhoto", b"00" + test_image_data)
|
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn", "avatar": "jpegPhoto"}):
|
2020-07-07 12:57:28 +02:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
m.output,
|
|
|
|
[
|
|
|
|
"WARNING:root:Could not parse {} field for user {}".format(
|
|
|
|
"jpegPhoto", hamlet.id
|
|
|
|
)
|
|
|
|
],
|
|
|
|
)
|
2019-06-07 23:36:19 +02:00
|
|
|
|
2019-01-13 13:53:52 +01:00
|
|
|
def test_deactivate_non_matching_users(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(LDAP_APPEND_DOMAIN="zulip.com", LDAP_DEACTIVATE_NON_MATCHING_USERS=True):
|
2019-10-16 18:10:40 +02:00
|
|
|
# othello isn't in our test directory
|
2021-02-12 08:20:45 +01:00
|
|
|
result = sync_user_from_ldap(self.example_user("othello"), mock.Mock())
|
2019-01-13 13:53:52 +01:00
|
|
|
|
2019-08-26 21:13:23 +02:00
|
|
|
self.assertTrue(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
othello = self.example_user("othello")
|
2019-10-16 18:10:40 +02:00
|
|
|
self.assertFalse(othello.is_active)
|
2019-01-13 13:53:52 +01:00
|
|
|
|
2019-01-29 13:39:21 +01:00
|
|
|
def test_update_custom_profile_field(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"custom_profile_field__phone_number": "homePhone",
|
|
|
|
"custom_profile_field__birthday": "birthDate",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
|
|
|
hamlet = self.example_user("hamlet")
|
2019-01-29 13:39:21 +01:00
|
|
|
test_data = [
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"field_name": "Phone number",
|
|
|
|
"expected_value": "123456789",
|
2019-01-29 13:39:21 +01:00
|
|
|
},
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"field_name": "Birthday",
|
|
|
|
"expected_value": "1900-09-08",
|
2019-01-29 13:39:21 +01:00
|
|
|
},
|
|
|
|
]
|
|
|
|
for test_case in test_data:
|
2021-02-12 08:20:45 +01:00
|
|
|
field = CustomProfileField.objects.get(realm=hamlet.realm, name=test_case["field_name"])
|
2021-02-12 08:19:30 +01:00
|
|
|
field_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=field
|
|
|
|
).value
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(field_value, test_case["expected_value"])
|
2019-01-29 13:39:21 +01:00
|
|
|
|
|
|
|
def test_update_non_existent_profile_field(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"custom_profile_field__non_existent": "homePhone",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
|
|
|
with self.assertRaisesRegex(
|
2022-11-17 09:30:48 +01:00
|
|
|
ZulipLDAPError, "Custom profile field with name non_existent not found"
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("django_auth_ldap", "WARNING") as warn_log:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:django_auth_ldap:Custom profile field with name non_existent not found. while authenticating hamlet"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-01-29 13:39:21 +01:00
|
|
|
|
|
|
|
def test_update_custom_profile_field_invalid_data(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.change_ldap_user_attr("hamlet", "birthDate", "9999")
|
2019-10-16 18:10:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"custom_profile_field__birthday": "birthDate",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
|
|
|
with self.assertRaisesRegex(
|
2022-11-17 09:30:48 +01:00
|
|
|
ZulipLDAPError, "Invalid data for birthday field"
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("django_auth_ldap", "WARNING") as warn_log:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:django_auth_ldap:Invalid data for birthday field: Birthday is not a date while authenticating hamlet"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-01-29 13:39:21 +01:00
|
|
|
|
|
|
|
def test_update_custom_profile_field_no_mapping(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
no_op_field = CustomProfileField.objects.get(realm=hamlet.realm, name="Phone number")
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=no_op_field
|
|
|
|
).value
|
2019-01-29 13:39:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"custom_profile_field__birthday": "birthDate",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-01-29 13:39:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
actual_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=no_op_field
|
|
|
|
).value
|
2019-01-29 13:39:21 +01:00
|
|
|
self.assertEqual(actual_value, expected_value)
|
|
|
|
|
|
|
|
def test_update_custom_profile_field_no_update(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
phone_number_field = CustomProfileField.objects.get(realm=hamlet.realm, name="Phone number")
|
|
|
|
birthday_field = CustomProfileField.objects.get(realm=hamlet.realm, name="Birthday")
|
2021-02-12 08:19:30 +01:00
|
|
|
phone_number_field_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=phone_number_field
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
phone_number_field_value.value = "123456789"
|
|
|
|
phone_number_field_value.save(update_fields=["value"])
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_call_args = [
|
|
|
|
hamlet,
|
|
|
|
[
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": birthday_field.id,
|
|
|
|
"value": "1900-09-08",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
],
|
|
|
|
]
|
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"custom_profile_field__birthday": "birthDate",
|
|
|
|
"custom_profile_field__phone_number": "homePhone",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zproject.backends.do_update_user_custom_profile_data_if_changed") as f:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-01-29 13:39:21 +01:00
|
|
|
f.assert_called_once_with(*expected_call_args)
|
|
|
|
|
2019-03-05 09:40:40 +01:00
|
|
|
def test_update_custom_profile_field_not_present_in_ldap(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
no_op_field = CustomProfileField.objects.get(realm=hamlet.realm, name="Birthday")
|
2021-02-12 08:19:30 +01:00
|
|
|
expected_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=no_op_field
|
|
|
|
).value
|
2019-03-05 09:40:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
2022-02-08 00:13:33 +01:00
|
|
|
"custom_profile_field__birthday": "nonExistentAttr",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
), self.assertLogs("django_auth_ldap", "WARNING") as warn_log:
|
|
|
|
self.perform_ldap_sync(self.example_user("hamlet"))
|
2019-03-05 09:40:40 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
actual_value = CustomProfileFieldValue.objects.get(
|
|
|
|
user_profile=hamlet, field=no_op_field
|
|
|
|
).value
|
2019-03-05 09:40:40 +01:00
|
|
|
self.assertEqual(actual_value, expected_value)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
[
|
2022-02-08 00:13:33 +01:00
|
|
|
"WARNING:django_auth_ldap:uid=hamlet,ou=users,dc=zulip,dc=com does not have a value for the attribute nonExistentAttr"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2019-03-05 09:40:40 +01:00
|
|
|
|
2019-03-09 08:32:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class TestQueryLDAP(ZulipLDAPTestCase):
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",))
|
2019-03-09 08:32:06 +01:00
|
|
|
def test_ldap_not_configured(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
values = query_ldap(self.example_email("hamlet"))
|
|
|
|
self.assertEqual(values, ["LDAP backend not configured on this server."])
|
2019-03-09 08:32:06 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-03-09 08:32:06 +01:00
|
|
|
def test_user_not_present(self) -> None:
|
2019-10-16 18:10:40 +02:00
|
|
|
# othello doesn't have an entry in our test directory
|
2021-02-12 08:20:45 +01:00
|
|
|
values = query_ldap(self.example_email("othello"))
|
2019-12-27 23:03:00 +01:00
|
|
|
self.assert_length(values, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("No such user found", values[0])
|
2019-03-09 08:32:06 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-03-09 08:32:06 +01:00
|
|
|
def test_normal_query(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(
|
|
|
|
AUTH_LDAP_USER_ATTR_MAP={
|
2021-02-12 08:20:45 +01:00
|
|
|
"full_name": "cn",
|
|
|
|
"avatar": "jpegPhoto",
|
|
|
|
"custom_profile_field__birthday": "birthDate",
|
|
|
|
"custom_profile_field__phone_number": "nonExistentAttr",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
values = query_ldap(self.example_email("hamlet"))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(values, 4)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("full_name: King Hamlet", values)
|
|
|
|
self.assertIn("avatar: (An avatar image file)", values)
|
|
|
|
self.assertIn("custom_profile_field__birthday: 1900-09-08", values)
|
|
|
|
self.assertIn("custom_profile_field__phone_number: LDAP field not present", values)
|
2019-03-09 08:32:06 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2019-03-09 08:32:06 +01:00
|
|
|
def test_query_email_attr(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTH_LDAP_USER_ATTR_MAP={"full_name": "cn"}, LDAP_EMAIL_ATTR="mail"):
|
2019-10-23 00:15:29 +02:00
|
|
|
# This will look up the user by email in our test dictionary,
|
2020-10-23 02:43:28 +02:00
|
|
|
# should successfully find hamlet's LDAP entry.
|
2021-02-12 08:20:45 +01:00
|
|
|
values = query_ldap(self.example_email("hamlet"))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(values, 2)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("full_name: King Hamlet", values)
|
|
|
|
self.assertIn("email: hamlet@zulip.com", values)
|
2019-03-09 08:32:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-26 12:39:09 +02:00
|
|
|
class TestZulipAuthMixin(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_get_user(self) -> None:
|
2016-10-26 12:39:09 +02:00
|
|
|
backend = ZulipAuthMixin()
|
|
|
|
result = backend.get_user(11111)
|
|
|
|
self.assertIs(result, None)
|
2016-10-26 13:50:00 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-26 13:50:00 +02:00
|
|
|
class TestPasswordAuthEnabled(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_password_auth_enabled_for_ldap(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",)):
|
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2016-10-26 13:50:00 +02:00
|
|
|
self.assertTrue(password_auth_enabled(realm))
|
2016-10-26 14:40:14 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-09-15 16:59:03 +02:00
|
|
|
class TestRequireEmailFormatUsernames(ZulipTestCase):
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_require_email_format_usernames_for_ldap_with_append_domain(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
|
2021-02-12 08:19:30 +01:00
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2017-09-15 16:59:03 +02:00
|
|
|
self.assertFalse(require_email_format_usernames(realm))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_require_email_format_usernames_for_ldap_with_email_attr(self) -> None:
|
|
|
|
with self.settings(
|
2021-02-12 08:20:45 +01:00
|
|
|
AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",),
|
2021-02-12 08:19:30 +01:00
|
|
|
LDAP_EMAIL_ATTR="email",
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2017-09-15 16:59:03 +02:00
|
|
|
self.assertFalse(require_email_format_usernames(realm))
|
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_require_email_format_usernames_for_email_only(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(AUTHENTICATION_BACKENDS=("zproject.backends.EmailAuthBackend",)):
|
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2017-09-15 16:59:03 +02:00
|
|
|
self.assertTrue(require_email_format_usernames(realm))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_require_email_format_usernames_for_email_and_ldap_with_email_attr(self) -> None:
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
LDAP_EMAIL_ATTR="email",
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2017-09-15 16:59:03 +02:00
|
|
|
self.assertFalse(require_email_format_usernames(realm))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_require_email_format_usernames_for_email_and_ldap_with_append_email(self) -> None:
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
LDAP_APPEND_DOMAIN="zulip.com",
|
|
|
|
):
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2017-09-15 16:59:03 +02:00
|
|
|
self.assertFalse(require_email_format_usernames(realm))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-26 14:40:14 +02:00
|
|
|
class TestMaybeSendToRegistration(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_sso_only_when_preregistration_user_does_not_exist(self) -> None:
|
2022-07-08 22:45:02 +02:00
|
|
|
request = HostRequestMock(host=Realm.host_for_subdomain("zulip"))
|
2016-10-26 14:40:14 +02:00
|
|
|
|
|
|
|
# Creating a mock Django form in order to keep the test simple.
|
2022-02-08 00:13:33 +01:00
|
|
|
# This form will be returned by the create_homepage_form function
|
2016-10-26 14:40:14 +02:00
|
|
|
# and will always be valid so that the code that we want to test
|
|
|
|
# actually runs.
|
2017-11-05 11:49:43 +01:00
|
|
|
class Form:
|
2017-11-19 04:02:03 +01:00
|
|
|
def is_valid(self) -> bool:
|
2016-10-26 14:40:14 +02:00
|
|
|
return True
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.views.auth.HomepageForm", return_value=Form()):
|
2019-12-10 18:45:36 +01:00
|
|
|
self.assertEqual(PreregistrationUser.objects.all().count(), 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = maybe_send_to_registration(
|
|
|
|
request, self.example_email("hamlet"), is_signup=True
|
|
|
|
)
|
2019-12-10 18:45:36 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
confirmation = Confirmation.objects.all().first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2019-12-10 18:45:36 +01:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, result["Location"])
|
2019-12-10 18:45:36 +01:00
|
|
|
self.assertEqual(PreregistrationUser.objects.all().count(), 1)
|
2016-10-26 14:40:14 +02:00
|
|
|
|
2022-06-08 04:52:09 +02:00
|
|
|
response = self.client_get(result["Location"])
|
|
|
|
self.assert_in_response('action="/accounts/register/"', response)
|
|
|
|
self.assert_in_response(f'value="{confirmation_key}" name="key"', response)
|
2016-12-01 08:54:21 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_sso_only_when_preregistration_user_exists(self) -> None:
|
2022-07-08 22:45:02 +02:00
|
|
|
request = HostRequestMock(host=Realm.host_for_subdomain("zulip"))
|
2016-10-26 14:40:14 +02:00
|
|
|
|
2022-01-27 23:32:49 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
2016-10-26 14:40:14 +02:00
|
|
|
# Creating a mock Django form in order to keep the test simple.
|
2022-02-08 00:13:33 +01:00
|
|
|
# This form will be returned by the create_homepage_form function
|
2016-10-26 14:40:14 +02:00
|
|
|
# and will always be valid so that the code that we want to test
|
|
|
|
# actually runs.
|
2017-11-05 11:49:43 +01:00
|
|
|
class Form:
|
2017-11-19 04:02:03 +01:00
|
|
|
def is_valid(self) -> bool:
|
2016-10-26 14:40:14 +02:00
|
|
|
return True
|
|
|
|
|
2017-05-25 01:40:26 +02:00
|
|
|
email = self.example_email("hamlet")
|
2022-01-27 23:32:49 +01:00
|
|
|
user = PreregistrationUser(email=email, realm=realm)
|
2022-09-26 00:06:13 +02:00
|
|
|
streams = Stream.objects.filter(realm=realm)
|
2016-10-26 14:40:14 +02:00
|
|
|
user.save()
|
2022-09-26 00:06:13 +02:00
|
|
|
user.streams.set(streams)
|
|
|
|
|
2021-04-05 18:42:45 +02:00
|
|
|
create_confirmation_link(user, Confirmation.USER_REGISTRATION)
|
2016-10-26 14:40:14 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.views.auth.HomepageForm", return_value=Form()):
|
2019-12-10 18:45:36 +01:00
|
|
|
self.assertEqual(PreregistrationUser.objects.all().count(), 1)
|
|
|
|
result = maybe_send_to_registration(request, email, is_signup=True)
|
|
|
|
self.assertEqual(result.status_code, 302)
|
2021-04-05 18:42:45 +02:00
|
|
|
confirmation = Confirmation.objects.all().last()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert confirmation is not None
|
2019-12-10 18:45:36 +01:00
|
|
|
confirmation_key = confirmation.confirmation_key
|
2022-05-29 21:12:13 +02:00
|
|
|
self.assertIn("do_confirm/" + confirmation_key, result["Location"])
|
2022-09-26 00:06:13 +02:00
|
|
|
prereg_users = list(PreregistrationUser.objects.all())
|
|
|
|
self.assert_length(prereg_users, 2)
|
|
|
|
self.assertEqual(
|
|
|
|
list(prereg_users[0].streams.all().order_by("id")),
|
|
|
|
list(prereg_users[1].streams.all().order_by("id")),
|
|
|
|
)
|
2016-11-02 21:51:56 +01:00
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class TestAdminSetBackends(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_change_enabled_backends(self) -> None:
|
2016-11-02 21:51:56 +01:00
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"authentication_methods": orjson.dumps({"Email": False, "Dev": True}).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Must be an organization owner")
|
2020-06-11 16:24:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"authentication_methods": orjson.dumps({"Email": False, "Dev": True}).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-11-02 21:51:56 +01:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2016-11-02 21:51:56 +01:00
|
|
|
self.assertFalse(password_auth_enabled(realm))
|
2016-11-07 21:20:55 +01:00
|
|
|
self.assertTrue(dev_auth_enabled(realm))
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_disable_all_backends(self) -> None:
|
2016-11-02 21:51:56 +01:00
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"authentication_methods": orjson.dumps({"Email": False, "Dev": False}).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "At least one authentication method must be enabled.")
|
|
|
|
realm = get_realm("zulip")
|
2016-11-02 21:51:56 +01:00
|
|
|
self.assertTrue(password_auth_enabled(realm))
|
2016-11-07 21:20:55 +01:00
|
|
|
self.assertTrue(dev_auth_enabled(realm))
|
2016-11-02 21:51:56 +01:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_supported_backends_only_updated(self) -> None:
|
2016-11-02 21:51:56 +01:00
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
2016-11-02 21:51:56 +01:00
|
|
|
# Set some supported and unsupported backends
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/realm",
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"authentication_methods": orjson.dumps(
|
|
|
|
{"Email": False, "Dev": True, "GitHub": False}
|
2021-02-12 08:19:30 +01:00
|
|
|
).decode()
|
|
|
|
},
|
|
|
|
)
|
2016-11-02 21:51:56 +01:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2016-11-02 21:51:56 +01:00
|
|
|
# Check that unsupported backend is not enabled
|
|
|
|
self.assertFalse(github_auth_enabled(realm))
|
2016-11-07 21:20:55 +01:00
|
|
|
self.assertTrue(dev_auth_enabled(realm))
|
2016-11-02 21:51:56 +01:00
|
|
|
self.assertFalse(password_auth_enabled(realm))
|
2017-04-10 08:06:10 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-14 22:17:23 +02:00
|
|
|
class EmailValidatorTestCase(ZulipTestCase):
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_valid_email(self) -> None:
|
2017-05-25 01:40:26 +02:00
|
|
|
validate_login_email(self.example_email("hamlet"))
|
2017-04-10 08:06:10 +02:00
|
|
|
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_invalid_email(self) -> None:
|
2017-04-10 08:06:10 +02:00
|
|
|
with self.assertRaises(JsonableError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_login_email("hamlet")
|
2017-04-20 08:25:15 +02:00
|
|
|
|
2018-08-14 22:17:23 +02:00
|
|
|
def test_validate_email(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
inviter = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2020-03-06 12:47:33 +01:00
|
|
|
realm = inviter.realm
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
|
2020-03-06 16:22:23 +01:00
|
|
|
inviter.realm.refresh_from_db()
|
2020-03-02 22:26:24 +01:00
|
|
|
error = validate_email_is_valid(
|
2021-02-12 08:20:45 +01:00
|
|
|
"fred+5555@zulip.com",
|
2020-03-06 12:47:33 +01:00
|
|
|
get_realm_email_validator(realm),
|
2020-03-02 22:26:24 +01:00
|
|
|
)
|
2022-05-29 21:52:25 +02:00
|
|
|
assert error is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("containing + are not allowed", error)
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia_email = cordelia.delivery_email
|
|
|
|
errors = get_existing_user_errors(realm, {cordelia_email})
|
|
|
|
error, is_deactivated = errors[cordelia_email]
|
2019-11-14 11:21:08 +01:00
|
|
|
self.assertEqual(False, is_deactivated)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(error, "Already has an account.")
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2021-02-14 00:03:40 +01:00
|
|
|
change_user_is_active(cordelia, False)
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2020-03-12 14:17:25 +01:00
|
|
|
errors = get_existing_user_errors(realm, {cordelia_email})
|
|
|
|
error, is_deactivated = errors[cordelia_email]
|
2019-11-14 11:21:08 +01:00
|
|
|
self.assertEqual(True, is_deactivated)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(error, "Account has been deactivated.")
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
errors = get_existing_user_errors(realm, {"fred-is-fine@zulip.com"})
|
2020-03-06 12:47:33 +01:00
|
|
|
self.assertEqual(errors, {})
|
2018-08-14 22:17:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
class LDAPBackendTest(ZulipTestCase):
|
2021-02-12 08:20:45 +01:00
|
|
|
@override_settings(AUTHENTICATION_BACKENDS=("zproject.backends.ZulipLDAPAuthBackend",))
|
2017-11-17 10:47:43 +01:00
|
|
|
def test_non_existing_realm(self) -> None:
|
2019-10-16 18:10:40 +02:00
|
|
|
self.init_default_ldap_database()
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2020-03-12 14:17:25 +01:00
|
|
|
|
|
|
|
data = dict(
|
|
|
|
username=user.delivery_email,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
password=initial_password(user.delivery_email),
|
2020-03-12 14:17:25 +01:00
|
|
|
)
|
2017-09-22 10:58:12 +02:00
|
|
|
error_type = ZulipLDAPAuthBackend.REALM_IS_NONE_ERROR
|
2021-02-12 08:20:45 +01:00
|
|
|
error = ZulipLDAPConfigurationError("Realm is None", error_type)
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zproject.backends.ZulipLDAPAuthBackend.get_or_build_user", side_effect=error
|
|
|
|
), mock.patch("django_auth_ldap.backend._LDAPUser._authenticate_user_dn"), self.assertLogs(
|
|
|
|
"django_auth_ldap", "WARNING"
|
2021-02-12 08:19:30 +01:00
|
|
|
) as warn_log:
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_post("/login/", data)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_in_success_response(
|
|
|
|
["Configuration error", "You are trying to log in using LDAP without creating an"],
|
|
|
|
response,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
["WARNING:django_auth_ldap:('Realm is None', 1) while authenticating hamlet"],
|
|
|
|
)
|
2022-03-08 00:48:15 +01:00
|
|
|
|
|
|
|
|
|
|
|
# Don't load the base class as a test: https://bugs.python.org/issue17519.
|
|
|
|
del SocialAuthBase
|