2020-06-11 00:54:34 +02:00
|
|
|
import logging
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import urllib
|
|
|
|
from functools import wraps
|
2023-01-27 14:47:06 +01:00
|
|
|
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Mapping, Optional, Tuple, cast
|
2021-08-19 09:55:54 +02:00
|
|
|
from urllib.parse import urlencode
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
import jwt
|
2022-12-08 23:15:25 +01:00
|
|
|
import orjson
|
2020-04-25 06:49:19 +02:00
|
|
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
2016-10-12 04:50:38 +02:00
|
|
|
from django.conf import settings
|
2019-02-02 23:53:22 +01:00
|
|
|
from django.contrib.auth import authenticate
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth.views import LoginView as DjangoLoginView
|
2020-02-02 14:44:13 +01:00
|
|
|
from django.contrib.auth.views import PasswordResetView as DjangoPasswordResetView
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth.views import logout_then_login as django_logout_then_login
|
2021-08-23 15:14:05 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
|
|
|
from django.core.validators import validate_email
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.forms import Form
|
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, HttpResponseServerError
|
2017-03-16 14:10:39 +01:00
|
|
|
from django.shortcuts import redirect, render
|
2023-02-01 08:05:01 +01:00
|
|
|
from django.template.response import SimpleTemplateResponse, TemplateResponse
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.urls import reverse
|
2021-04-16 00:59:20 +02:00
|
|
|
from django.utils.http import url_has_allowed_host_and_scheme
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2016-10-12 04:50:38 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2019-08-12 05:44:35 +02:00
|
|
|
from django.views.decorators.http import require_safe
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_django.utils import load_backend, load_strategy
|
|
|
|
from two_factor.forms import BackupTokenForm
|
|
|
|
from two_factor.views import LoginView as BaseTwoFactorLoginView
|
2023-08-02 23:53:10 +02:00
|
|
|
from typing_extensions import Concatenate, ParamSpec, TypeAlias
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-05-20 22:52:01 +02:00
|
|
|
from confirmation.models import (
|
|
|
|
Confirmation,
|
2022-11-17 09:30:48 +01:00
|
|
|
ConfirmationKeyError,
|
2021-05-20 22:52:01 +02:00
|
|
|
create_confirmation_link,
|
|
|
|
get_object_from_key,
|
2021-12-02 17:51:38 +01:00
|
|
|
render_confirmation_key_error,
|
2021-05-20 22:52:01 +02:00
|
|
|
)
|
2021-07-30 12:25:53 +02:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_MERGE_BASE, ZULIP_VERSION
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.context_processors import get_realm_from_request, login_context, zulip_default_context
|
|
|
|
from zerver.decorator import do_login, log_view_func, process_client, require_post
|
|
|
|
from zerver.forms import (
|
|
|
|
DEACTIVATED_ACCOUNT_ERROR,
|
|
|
|
AuthenticationTokenForm,
|
|
|
|
HomepageForm,
|
|
|
|
OurAuthenticationForm,
|
|
|
|
ZulipPasswordResetForm,
|
|
|
|
)
|
2021-07-05 20:24:44 +02:00
|
|
|
from zerver.lib.exceptions import (
|
|
|
|
AuthenticationFailedError,
|
|
|
|
InvalidSubdomainError,
|
2021-07-16 22:11:10 +02:00
|
|
|
JsonableError,
|
2021-07-05 20:24:44 +02:00
|
|
|
PasswordAuthDisabledError,
|
|
|
|
PasswordResetRequiredError,
|
2022-11-17 09:30:48 +01:00
|
|
|
RateLimitedError,
|
2021-07-05 20:24:44 +02:00
|
|
|
RealmDeactivatedError,
|
|
|
|
UserDeactivatedError,
|
|
|
|
)
|
2020-02-01 17:45:22 +01:00
|
|
|
from zerver.lib.mobile_auth_otp import otp_encrypt_api_key
|
2018-02-12 23:34:59 +01:00
|
|
|
from zerver.lib.push_notifications import push_notifications_enabled
|
2019-12-20 00:00:45 +01:00
|
|
|
from zerver.lib.pysa import mark_sanitized
|
2020-02-17 16:18:09 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import REQ, RequestNotes, has_request_variables
|
2021-07-05 20:24:44 +02:00
|
|
|
from zerver.lib.response import json_success
|
2020-02-06 18:27:10 +01:00
|
|
|
from zerver.lib.sessions import set_expirable_session_var
|
2017-10-19 07:21:57 +02:00
|
|
|
from zerver.lib.subdomains import get_subdomain, is_subdomain_root_or_alias
|
2021-10-14 01:45:34 +02:00
|
|
|
from zerver.lib.url_encoding import append_url_query_string
|
2018-12-06 02:49:34 +01:00
|
|
|
from zerver.lib.user_agent import parse_user_agent
|
2023-11-08 08:21:24 +01:00
|
|
|
from zerver.lib.users import get_api_key, get_users_for_api, is_2fa_verified
|
2020-01-23 12:21:55 +01:00
|
|
|
from zerver.lib.utils import has_api_key_format
|
2022-09-13 17:39:18 +02:00
|
|
|
from zerver.lib.validator import check_bool, validate_login_email
|
2020-06-12 16:19:17 +02:00
|
|
|
from zerver.models import (
|
2021-07-16 22:11:10 +02:00
|
|
|
MultiuseInvite,
|
2023-03-03 11:58:00 +01:00
|
|
|
PreregistrationRealm,
|
2020-06-12 16:19:17 +02:00
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
UserProfile,
|
|
|
|
filter_to_valid_prereg_users,
|
|
|
|
get_realm,
|
|
|
|
remote_user_to_email,
|
|
|
|
)
|
2017-06-15 07:15:57 +02:00
|
|
|
from zerver.signals import email_on_new_login
|
2023-10-06 16:13:17 +02:00
|
|
|
from zerver.views.errors import config_error
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import (
|
|
|
|
AUTH_BACKEND_NAME_MAP,
|
2020-07-04 19:09:01 +02:00
|
|
|
AppleAuthBackend,
|
2020-06-11 00:54:34 +02:00
|
|
|
ExternalAuthDataDict,
|
|
|
|
ExternalAuthResult,
|
2021-05-21 16:45:43 +02:00
|
|
|
GenericOpenIdConnectBackend,
|
2020-06-11 00:54:34 +02:00
|
|
|
SAMLAuthBackend,
|
2021-11-01 20:08:20 +01:00
|
|
|
SAMLSPInitiatedLogout,
|
2020-06-11 00:54:34 +02:00
|
|
|
ZulipLDAPAuthBackend,
|
|
|
|
ZulipLDAPConfigurationError,
|
|
|
|
ZulipRemoteUserBackend,
|
|
|
|
auth_enabled_helper,
|
|
|
|
dev_auth_enabled,
|
|
|
|
ldap_auth_enabled,
|
|
|
|
password_auth_enabled,
|
|
|
|
saml_auth_enabled,
|
|
|
|
validate_otp_params,
|
|
|
|
)
|
2017-12-20 07:57:26 +01:00
|
|
|
|
2022-07-28 17:59:57 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from django.http.request import _ImmutableQueryDict
|
|
|
|
|
2022-08-21 04:03:39 +02:00
|
|
|
ParamT = ParamSpec("ParamT")
|
2023-08-02 23:53:10 +02:00
|
|
|
ExtraContext: TypeAlias = Optional[Dict[str, Any]]
|
2017-12-20 07:57:26 +01:00
|
|
|
|
2023-05-17 14:10:00 +02:00
|
|
|
EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS = 3600
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def get_safe_redirect_to(url: str, redirect_host: str) -> str:
|
2021-04-16 00:59:20 +02:00
|
|
|
is_url_safe = url_has_allowed_host_and_scheme(url=url, allowed_hosts=None)
|
2018-03-12 12:25:50 +01:00
|
|
|
if is_url_safe:
|
2019-12-20 00:00:45 +01:00
|
|
|
# Mark as safe to prevent Pysa from surfacing false positives for
|
|
|
|
# open redirects. In this branch, we have already checked that the URL
|
|
|
|
# points to the specified 'redirect_host', or is relative.
|
|
|
|
return urllib.parse.urljoin(redirect_host, mark_sanitized(url))
|
2018-03-12 12:25:50 +01:00
|
|
|
else:
|
|
|
|
return redirect_host
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_preregistration_user(
|
|
|
|
email: str,
|
2022-01-23 20:37:40 +01:00
|
|
|
realm: Optional[Realm],
|
2021-02-12 08:19:30 +01:00
|
|
|
password_required: bool = True,
|
|
|
|
full_name: Optional[str] = None,
|
|
|
|
full_name_validated: bool = False,
|
2022-07-21 23:38:59 +02:00
|
|
|
multiuse_invite: Optional[MultiuseInvite] = None,
|
2021-07-24 19:40:01 +02:00
|
|
|
) -> PreregistrationUser:
|
2019-11-01 00:00:36 +01:00
|
|
|
return PreregistrationUser.objects.create(
|
|
|
|
email=email,
|
|
|
|
password_required=password_required,
|
|
|
|
realm=realm,
|
|
|
|
full_name=full_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
full_name_validated=full_name_validated,
|
2022-07-21 23:38:59 +02:00
|
|
|
multiuse_invite=multiuse_invite,
|
2019-11-01 00:00:36 +01:00
|
|
|
)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-03-03 11:58:00 +01:00
|
|
|
def create_preregistration_realm(
|
|
|
|
email: str,
|
|
|
|
name: str,
|
|
|
|
string_id: str,
|
|
|
|
org_type: int,
|
2023-09-12 21:58:58 +02:00
|
|
|
default_language: str,
|
2023-03-03 11:58:00 +01:00
|
|
|
) -> PreregistrationRealm:
|
|
|
|
return PreregistrationRealm.objects.create(
|
|
|
|
email=email,
|
|
|
|
name=name,
|
|
|
|
string_id=string_id,
|
|
|
|
org_type=org_type,
|
2023-09-12 21:58:58 +02:00
|
|
|
default_language=default_language,
|
2023-03-03 11:58:00 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def maybe_send_to_registration(
|
|
|
|
request: HttpRequest,
|
|
|
|
email: str,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
2021-02-12 08:20:45 +01:00
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
full_name_validated: bool = False,
|
2022-12-08 23:15:25 +01:00
|
|
|
params_to_store_in_authenticated_session: Optional[Dict[str, str]] = None,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given a successful authentication for an email address (i.e. we've
|
|
|
|
confirmed the user controls the email address) that does not
|
|
|
|
currently have a Zulip account in the target realm, send them to
|
|
|
|
the registration flow or the "continue to registration" flow,
|
|
|
|
depending on is_signup, whether the email address can join the
|
|
|
|
organization (checked in HomepageForm), and similar details.
|
|
|
|
"""
|
2020-02-06 18:27:10 +01:00
|
|
|
|
|
|
|
# In the desktop and mobile registration flows, the sign up
|
|
|
|
# happens in the browser so the user can use their
|
|
|
|
# already-logged-in social accounts. Then at the end, with the
|
|
|
|
# user account created, we pass the appropriate data to the app
|
|
|
|
# via e.g. a `zulip://` redirect. We store the OTP keys for the
|
|
|
|
# mobile/desktop flow in the session with 1-hour expiry, because
|
|
|
|
# we want this configuration of having a successful authentication
|
|
|
|
# result in being logged into the app to persist if the user makes
|
|
|
|
# mistakes while trying to authenticate (E.g. clicks the wrong
|
|
|
|
# Google account, hits back, etc.) during a given browser session,
|
2021-05-14 00:16:30 +02:00
|
|
|
# rather than just logging into the web app in the target browser.
|
2020-02-06 18:27:10 +01:00
|
|
|
#
|
|
|
|
# We can't use our usual pre-account-creation state storage
|
|
|
|
# approach of putting something in PreregistrationUser, because
|
|
|
|
# that would apply to future registration attempts on other
|
|
|
|
# devices, e.g. just creating an account on the web on their laptop.
|
|
|
|
assert not (mobile_flow_otp and desktop_flow_otp)
|
|
|
|
if mobile_flow_otp:
|
2021-02-12 08:19:30 +01:00
|
|
|
set_expirable_session_var(
|
2023-05-17 14:10:00 +02:00
|
|
|
request.session,
|
|
|
|
"registration_mobile_flow_otp",
|
|
|
|
mobile_flow_otp,
|
|
|
|
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
elif desktop_flow_otp:
|
2021-02-12 08:19:30 +01:00
|
|
|
set_expirable_session_var(
|
2023-05-17 14:10:00 +02:00
|
|
|
request.session,
|
|
|
|
"registration_desktop_flow_otp",
|
|
|
|
desktop_flow_otp,
|
|
|
|
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-12-08 23:15:25 +01:00
|
|
|
if params_to_store_in_authenticated_session:
|
|
|
|
set_expirable_session_var(
|
|
|
|
request.session,
|
|
|
|
"registration_desktop_flow_params_to_store_in_authenticated_session",
|
|
|
|
orjson.dumps(params_to_store_in_authenticated_session).decode(),
|
|
|
|
expiry_seconds=EXPIRABLE_SESSION_VAR_DEFAULT_EXPIRY_SECS,
|
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
|
2022-01-23 13:22:12 +01:00
|
|
|
try:
|
|
|
|
# TODO: This should use get_realm_from_request, but a bunch of tests
|
|
|
|
# rely on mocking get_subdomain here, so they'll need to be tweaked first.
|
|
|
|
realm: Optional[Realm] = get_realm(get_subdomain(request))
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
|
|
|
|
2021-05-20 22:52:01 +02:00
|
|
|
multiuse_obj: Optional[MultiuseInvite] = None
|
|
|
|
from_multiuse_invite = False
|
2019-02-08 17:09:25 +01:00
|
|
|
if multiuse_object_key:
|
2017-09-27 03:34:58 +02:00
|
|
|
from_multiuse_invite = True
|
2021-05-20 22:52:01 +02:00
|
|
|
try:
|
2022-06-28 21:52:24 +02:00
|
|
|
confirmation_obj = get_object_from_key(
|
2022-07-19 21:13:32 +02:00
|
|
|
multiuse_object_key, [Confirmation.MULTIUSE_INVITE], mark_object_used=False
|
2022-06-28 21:52:24 +02:00
|
|
|
)
|
2022-11-17 09:30:48 +01:00
|
|
|
except ConfirmationKeyError as exception:
|
2021-12-02 17:51:38 +01:00
|
|
|
return render_confirmation_key_error(request, exception)
|
2021-05-20 22:52:01 +02:00
|
|
|
|
2022-06-28 21:52:24 +02:00
|
|
|
assert isinstance(confirmation_obj, MultiuseInvite)
|
|
|
|
multiuse_obj = confirmation_obj
|
2022-01-23 13:22:12 +01:00
|
|
|
if realm != multiuse_obj.realm:
|
|
|
|
return render(request, "confirmation/link_does_not_exist.html", status=404)
|
|
|
|
|
2019-02-06 22:57:14 +01:00
|
|
|
invited_as = multiuse_obj.invited_as
|
2019-05-04 04:47:44 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
invited_as = PreregistrationUser.INVITE_AS["MEMBER"]
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2022-08-14 19:42:55 +02:00
|
|
|
form = HomepageForm(
|
|
|
|
{"email": email},
|
|
|
|
realm=realm,
|
|
|
|
from_multiuse_invite=from_multiuse_invite,
|
|
|
|
invited_as=invited_as,
|
|
|
|
)
|
2016-10-12 04:50:38 +02:00
|
|
|
if form.is_valid():
|
2019-03-10 02:43:29 +01:00
|
|
|
# If the email address is allowed to sign up for an account in
|
|
|
|
# this organization, construct a PreregistrationUser and
|
|
|
|
# Confirmation objects, and then send the user to account
|
|
|
|
# creation or confirm-continue-registration depending on
|
|
|
|
# is_signup.
|
2019-12-10 18:45:36 +01:00
|
|
|
try:
|
2022-09-26 00:06:13 +02:00
|
|
|
# If there's an existing, valid PreregistrationUser for this
|
|
|
|
# user, we want to fetch it since some values from it will be used
|
|
|
|
# as defaults for creating the signed up user.
|
|
|
|
existing_prereg_user = filter_to_valid_prereg_users(
|
2021-02-12 08:19:30 +01:00
|
|
|
PreregistrationUser.objects.filter(email__iexact=email, realm=realm)
|
|
|
|
).latest("invited_at")
|
2019-12-10 18:45:36 +01:00
|
|
|
except PreregistrationUser.DoesNotExist:
|
2022-09-26 00:06:13 +02:00
|
|
|
existing_prereg_user = None
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2023-03-12 20:04:32 +01:00
|
|
|
# full_name data passed here as argument should take precedence
|
2022-09-26 00:06:13 +02:00
|
|
|
# over the defaults with which the existing PreregistrationUser that we've just fetched
|
|
|
|
# was created.
|
|
|
|
prereg_user = create_preregistration_user(
|
|
|
|
email,
|
|
|
|
realm,
|
2023-03-12 20:04:32 +01:00
|
|
|
password_required=False,
|
2022-09-26 00:06:13 +02:00
|
|
|
full_name=full_name,
|
|
|
|
full_name_validated=full_name_validated,
|
|
|
|
multiuse_invite=multiuse_obj,
|
|
|
|
)
|
|
|
|
|
|
|
|
streams_to_subscribe = None
|
2020-01-28 02:20:17 +01:00
|
|
|
if multiuse_obj is not None:
|
2022-09-26 00:06:13 +02:00
|
|
|
# If the user came here explicitly via a multiuse invite link, then
|
|
|
|
# we use the defaults implied by the invite.
|
2020-01-28 02:20:17 +01:00
|
|
|
streams_to_subscribe = list(multiuse_obj.streams.all())
|
2022-09-26 00:06:13 +02:00
|
|
|
elif existing_prereg_user:
|
|
|
|
# Otherwise, the user is doing this signup not via any invite link,
|
|
|
|
# but we can use the pre-existing PreregistrationUser for these values
|
|
|
|
# since it tells how they were intended to be, when the user was invited.
|
|
|
|
streams_to_subscribe = list(existing_prereg_user.streams.all())
|
|
|
|
invited_as = existing_prereg_user.invited_as
|
|
|
|
|
|
|
|
if streams_to_subscribe:
|
2020-01-28 02:20:17 +01:00
|
|
|
prereg_user.streams.set(streams_to_subscribe)
|
2022-09-26 00:06:13 +02:00
|
|
|
prereg_user.invited_as = invited_as
|
|
|
|
prereg_user.multiuse_invite = multiuse_obj
|
|
|
|
prereg_user.save()
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
|
2018-04-23 00:12:52 +02:00
|
|
|
if is_signup:
|
|
|
|
return redirect(confirmation_link)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
context = {"email": email, "continue_link": confirmation_link, "full_name": full_name}
|
|
|
|
return render(request, "zerver/confirm_continue_registration.html", context=context)
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
# This email address it not allowed to join this organization, so
|
|
|
|
# just send the user back to the registration page.
|
2021-02-12 08:20:45 +01:00
|
|
|
url = reverse("register")
|
2019-03-20 13:13:44 +01:00
|
|
|
context = login_context(request)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_context: Mapping[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"form": form,
|
|
|
|
"current_url": lambda: url,
|
|
|
|
"from_multiuse_invite": from_multiuse_invite,
|
|
|
|
"multiuse_object_key": multiuse_object_key,
|
|
|
|
"mobile_flow_otp": mobile_flow_otp,
|
|
|
|
"desktop_flow_otp": desktop_flow_otp,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2019-03-20 13:13:44 +01:00
|
|
|
context.update(extra_context)
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/accounts_home.html", context=context)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
|
2020-01-18 13:47:08 +01:00
|
|
|
# We have verified the user controls an email address, but
|
|
|
|
# there's no associated Zulip user account. Consider sending
|
|
|
|
# the request to registration.
|
2020-06-23 00:39:19 +02:00
|
|
|
kwargs: Dict[str, Any] = dict(result.data_dict)
|
2020-02-23 18:58:08 +01:00
|
|
|
# maybe_send_to_registration doesn't take these arguments, so delete them.
|
2022-12-15 21:29:17 +01:00
|
|
|
|
|
|
|
# These are the kwargs taken by maybe_send_to_registration. Remove anything
|
|
|
|
# else from the dict.
|
|
|
|
kwargs_to_pass = [
|
|
|
|
"email",
|
|
|
|
"full_name",
|
|
|
|
"mobile_flow_otp",
|
|
|
|
"desktop_flow_otp",
|
|
|
|
"is_signup",
|
|
|
|
"multiuse_object_key",
|
|
|
|
"full_name_validated",
|
2022-12-08 23:15:25 +01:00
|
|
|
"params_to_store_in_authenticated_session",
|
2022-12-15 21:29:17 +01:00
|
|
|
]
|
|
|
|
for key in dict(kwargs):
|
|
|
|
if key not in kwargs_to_pass:
|
|
|
|
kwargs.pop(key, None)
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
return maybe_send_to_registration(request, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def login_or_register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given a successful authentication showing the user controls given
|
2020-02-22 16:26:39 +01:00
|
|
|
email address (email) and potentially a UserProfile
|
2019-03-10 02:43:29 +01:00
|
|
|
object (if the user already has a Zulip account), redirect the
|
|
|
|
browser to the appropriate place:
|
|
|
|
|
|
|
|
* The logged-in app if the user already has a Zulip account and is
|
2020-08-11 02:20:10 +02:00
|
|
|
trying to log in, potentially to an initial narrow or page that had been
|
2019-03-10 02:43:29 +01:00
|
|
|
saved in the `redirect_to` parameter.
|
|
|
|
* The registration form if is_signup was set (i.e. the user is
|
|
|
|
trying to create a Zulip account)
|
|
|
|
* A special `confirm_continue_registration.html` "do you want to
|
|
|
|
register or try another account" if the user doesn't have a
|
2020-08-11 02:20:10 +02:00
|
|
|
Zulip account but is_signup is False (i.e. the user tried to log in
|
2019-03-10 02:43:29 +01:00
|
|
|
and then did social authentication selecting an email address that does
|
|
|
|
not have a Zulip account in this organization).
|
2020-01-23 14:22:28 +01:00
|
|
|
* A zulip:// URL to send control back to the mobile or desktop apps if they
|
|
|
|
are doing authentication using the mobile_flow_otp or desktop_flow_otp flow.
|
2019-03-10 02:43:29 +01:00
|
|
|
"""
|
2021-11-01 17:03:55 +01:00
|
|
|
|
2022-11-16 18:31:10 +01:00
|
|
|
params_to_store_in_authenticated_session = result.data_dict.get(
|
|
|
|
"params_to_store_in_authenticated_session", {}
|
|
|
|
)
|
|
|
|
mobile_flow_otp = result.data_dict.get("mobile_flow_otp")
|
|
|
|
desktop_flow_otp = result.data_dict.get("desktop_flow_otp")
|
|
|
|
if not mobile_flow_otp and not desktop_flow_otp:
|
|
|
|
# We don't want to store anything in the browser session if we're doing
|
|
|
|
# mobile or desktop flows, since that's just an intermediary step and the
|
|
|
|
# browser session is not to be used any further. Storing extra data in
|
|
|
|
# it just risks bugs or leaking the data.
|
|
|
|
for key, value in params_to_store_in_authenticated_session.items():
|
|
|
|
request.session[key] = value
|
2021-11-01 17:03:55 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
user_profile = result.user_profile
|
2017-05-05 19:19:02 +02:00
|
|
|
if user_profile is None or user_profile.is_mirror_dummy:
|
2020-02-23 18:58:08 +01:00
|
|
|
return register_remote_user(request, result)
|
2018-04-22 23:58:37 +02:00
|
|
|
# Otherwise, the user has successfully authenticated to an
|
|
|
|
# account, and we need to do the right thing depending whether
|
|
|
|
# or not they're using the mobile OTP flow or want a browser session.
|
2021-02-12 08:20:45 +01:00
|
|
|
is_realm_creation = result.data_dict.get("is_realm_creation")
|
2017-05-05 19:19:02 +02:00
|
|
|
if mobile_flow_otp is not None:
|
2020-01-18 14:41:12 +01:00
|
|
|
return finish_mobile_flow(request, user_profile, mobile_flow_otp)
|
2020-01-23 14:22:28 +01:00
|
|
|
elif desktop_flow_otp is not None:
|
2022-11-16 18:31:10 +01:00
|
|
|
return finish_desktop_flow(
|
|
|
|
request, user_profile, desktop_flow_otp, params_to_store_in_authenticated_session
|
|
|
|
)
|
2017-03-19 20:01:01 +01:00
|
|
|
|
2017-08-25 01:11:30 +02:00
|
|
|
do_login(request, user_profile)
|
2018-03-12 12:54:50 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_to = result.data_dict.get("redirect_to", "")
|
2020-10-14 09:44:01 +02:00
|
|
|
if is_realm_creation is not None and settings.BILLING_ENABLED:
|
|
|
|
from corporate.lib.stripe import is_free_trial_offer_enabled
|
|
|
|
|
|
|
|
if is_free_trial_offer_enabled():
|
2023-11-23 09:31:46 +01:00
|
|
|
redirect_to = reverse("upgrade_page")
|
2020-05-22 15:42:46 +02:00
|
|
|
|
|
|
|
redirect_to = get_safe_redirect_to(redirect_to, user_profile.realm.uri)
|
2018-03-12 12:54:50 +01:00
|
|
|
return HttpResponseRedirect(redirect_to)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-16 18:31:10 +01:00
|
|
|
def finish_desktop_flow(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
otp: str,
|
|
|
|
params_to_store_in_authenticated_session: Optional[Dict[str, str]] = None,
|
|
|
|
) -> HttpResponse:
|
2020-01-23 14:22:28 +01:00
|
|
|
"""
|
2020-04-25 06:49:19 +02:00
|
|
|
The desktop otp flow returns to the app (through the clipboard)
|
2020-01-23 14:22:28 +01:00
|
|
|
a token that allows obtaining (through log_into_subdomain) a logged in session
|
|
|
|
for the user account we authenticated in this flow.
|
2020-02-23 18:58:08 +01:00
|
|
|
The token can only be used once and within ExternalAuthResult.LOGIN_KEY_EXPIRATION_SECONDS
|
2020-01-23 14:22:28 +01:00
|
|
|
of being created, as nothing more powerful is needed for the desktop flow
|
|
|
|
and this ensures the key can only be used for completing this authentication attempt.
|
|
|
|
"""
|
2022-11-16 18:31:10 +01:00
|
|
|
data_dict = None
|
|
|
|
if params_to_store_in_authenticated_session:
|
|
|
|
data_dict = ExternalAuthDataDict(
|
|
|
|
params_to_store_in_authenticated_session=params_to_store_in_authenticated_session
|
|
|
|
)
|
|
|
|
|
|
|
|
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
|
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
token = result.store_data()
|
2020-04-25 06:49:19 +02:00
|
|
|
key = bytes.fromhex(otp)
|
2020-09-05 04:02:13 +02:00
|
|
|
iv = secrets.token_bytes(12)
|
2020-04-25 06:49:19 +02:00
|
|
|
desktop_data = (iv + AESGCM(key).encrypt(iv, token.encode(), b"")).hex()
|
2021-02-12 08:19:30 +01:00
|
|
|
context = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"desktop_data": desktop_data,
|
|
|
|
"browser_url": reverse("login_page", kwargs={"template_name": "zerver/login.html"}),
|
|
|
|
"realm_icon_url": realm_icon_url(user_profile.realm),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2023-02-01 08:05:01 +01:00
|
|
|
return TemplateResponse(request, "zerver/desktop_redirect.html", context=context)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-01-18 14:41:12 +01:00
|
|
|
def finish_mobile_flow(request: HttpRequest, user_profile: UserProfile, otp: str) -> HttpResponse:
|
2019-11-28 01:17:30 +01:00
|
|
|
# For the mobile OAuth flow, we send the API key and other
|
2020-01-18 14:41:12 +01:00
|
|
|
# necessary details in a redirect to a zulip:// URI scheme.
|
|
|
|
api_key = get_api_key(user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
response = create_response_for_otp_flow(
|
2021-02-12 08:20:45 +01:00
|
|
|
api_key, otp, user_profile, encrypted_key_field_name="otp_encrypted_api_key"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-18 14:41:12 +01:00
|
|
|
|
|
|
|
# Since we are returning an API key instead of going through
|
|
|
|
# the Django login() function (which creates a browser
|
|
|
|
# session, etc.), the "new login" signal handler (which
|
|
|
|
# triggers an email notification new logins) will not run
|
|
|
|
# automatically. So we call it manually here.
|
|
|
|
#
|
|
|
|
# Arguably, sending a fake 'user_logged_in' signal would be a better approach:
|
2022-10-08 07:35:48 +02:00
|
|
|
# user_logged_in.send(sender=type(user_profile), request=request, user=user_profile)
|
|
|
|
email_on_new_login(sender=type(user_profile), request=request, user=user_profile)
|
2020-01-18 14:41:12 +01:00
|
|
|
|
|
|
|
# Mark this request as having a logged-in user for our server logs.
|
|
|
|
process_client(request, user_profile)
|
2023-06-20 22:52:31 +02:00
|
|
|
RequestNotes.get_notes(request).requester_for_logs = user_profile.format_requester_for_logs()
|
2020-01-18 14:41:12 +01:00
|
|
|
|
|
|
|
return response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_response_for_otp_flow(
|
|
|
|
key: str, otp: str, user_profile: UserProfile, encrypted_key_field_name: str
|
|
|
|
) -> HttpResponse:
|
2020-06-25 20:43:48 +02:00
|
|
|
realm_uri = user_profile.realm.uri
|
|
|
|
|
|
|
|
# Check if the mobile URI is overridden in settings, if so, replace it
|
|
|
|
# This block should only apply to the mobile flow, so we if add others, this
|
|
|
|
# needs to be conditional.
|
|
|
|
if realm_uri in settings.REALM_MOBILE_REMAP_URIS:
|
|
|
|
realm_uri = settings.REALM_MOBILE_REMAP_URIS[realm_uri]
|
|
|
|
|
2020-01-23 14:29:15 +01:00
|
|
|
params = {
|
2020-01-23 14:22:28 +01:00
|
|
|
encrypted_key_field_name: otp_encrypt_api_key(key, otp),
|
2021-02-12 08:20:45 +01:00
|
|
|
"email": user_profile.delivery_email,
|
2021-11-16 16:04:04 +01:00
|
|
|
"user_id": user_profile.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
"realm": realm_uri,
|
2020-01-23 14:29:15 +01:00
|
|
|
}
|
|
|
|
# We can't use HttpResponseRedirect, since it only allows HTTP(S) URLs
|
|
|
|
response = HttpResponse(status=302)
|
2021-10-14 01:45:34 +02:00
|
|
|
response["Location"] = append_url_query_string("zulip://login", urllib.parse.urlencode(params))
|
2020-01-23 14:29:15 +01:00
|
|
|
|
|
|
|
return response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-03 22:26:31 +01:00
|
|
|
@log_view_func
|
2018-02-06 23:29:57 +01:00
|
|
|
@has_request_variables
|
2020-05-15 04:13:33 +02:00
|
|
|
def remote_user_sso(
|
|
|
|
request: HttpRequest,
|
|
|
|
mobile_flow_otp: Optional[str] = REQ(default=None),
|
|
|
|
desktop_flow_otp: Optional[str] = REQ(default=None),
|
|
|
|
next: str = REQ(default="/"),
|
|
|
|
) -> HttpResponse:
|
2019-12-11 22:47:22 +01:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
try:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
realm: Optional[Realm] = get_realm(subdomain)
|
2019-12-11 22:47:22 +01:00
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
|
|
|
|
|
|
|
if not auth_enabled_helper([ZulipRemoteUserBackend.auth_backend_name], realm):
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "remote_user_backend_disabled")
|
2019-12-11 22:47:22 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
try:
|
|
|
|
remote_user = request.META["REMOTE_USER"]
|
|
|
|
except KeyError:
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "remote_user_header_missing")
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2017-04-07 08:21:29 +02:00
|
|
|
# Django invokes authenticate methods by matching arguments, and this
|
|
|
|
# authentication flow will not invoke LDAP authentication because of
|
|
|
|
# this condition of Django so no need to check if LDAP backend is
|
|
|
|
# enabled.
|
|
|
|
validate_login_email(remote_user_to_email(remote_user))
|
|
|
|
|
2020-02-01 17:00:56 +01:00
|
|
|
# Here we support the mobile and desktop flow for REMOTE_USER_BACKEND; we
|
2018-02-06 23:29:57 +01:00
|
|
|
# validate the data format and then pass it through to
|
|
|
|
# login_or_register_remote_user if appropriate.
|
2020-02-01 17:20:11 +01:00
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2018-02-06 23:29:57 +01:00
|
|
|
|
2019-12-11 22:47:22 +01:00
|
|
|
if realm is None:
|
2019-05-05 01:04:48 +02:00
|
|
|
user_profile = None
|
|
|
|
else:
|
|
|
|
user_profile = authenticate(remote_user=remote_user, realm=realm)
|
2022-05-31 03:17:38 +02:00
|
|
|
if user_profile is not None:
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
2018-02-24 22:38:48 +01:00
|
|
|
|
2020-02-22 16:26:39 +01:00
|
|
|
email = remote_user_to_email(remote_user)
|
2020-02-23 18:58:08 +01:00
|
|
|
data_dict = ExternalAuthDataDict(
|
|
|
|
email=email,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
redirect_to=next,
|
2020-02-23 18:58:08 +01:00
|
|
|
)
|
|
|
|
if realm:
|
|
|
|
data_dict["subdomain"] = realm.subdomain
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
data_dict["subdomain"] = "" # realm creation happens on root subdomain
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
|
|
|
|
return login_or_register_remote_user(request, result)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-01-30 00:03:08 +01:00
|
|
|
@has_request_variables
|
2023-01-27 14:47:06 +01:00
|
|
|
def get_email_and_realm_from_jwt_authentication_request(
|
2023-04-26 19:57:22 +02:00
|
|
|
request: HttpRequest, json_web_token: str
|
2023-01-27 14:47:06 +01:00
|
|
|
) -> Tuple[str, Realm]:
|
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise InvalidSubdomainError
|
2023-01-27 14:47:06 +01:00
|
|
|
|
2016-10-24 11:12:45 +02:00
|
|
|
try:
|
2023-01-27 14:47:06 +01:00
|
|
|
key = settings.JWT_AUTH_KEYS[realm.subdomain]["key"]
|
|
|
|
algorithms = settings.JWT_AUTH_KEYS[realm.subdomain]["algorithms"]
|
2016-10-24 11:12:45 +02:00
|
|
|
except KeyError:
|
2023-01-29 23:57:09 +01:00
|
|
|
raise JsonableError(_("JWT authentication is not enabled for this organization"))
|
2016-10-24 11:12:45 +02:00
|
|
|
|
2023-01-30 00:03:08 +01:00
|
|
|
if not json_web_token:
|
|
|
|
raise JsonableError(_("No JSON web token passed in request"))
|
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
options = {"verify_signature": True}
|
2020-06-08 22:33:51 +02:00
|
|
|
payload = jwt.decode(json_web_token, key, algorithms=algorithms, options=options)
|
2016-10-24 11:12:45 +02:00
|
|
|
except jwt.InvalidTokenError:
|
2016-10-12 04:50:38 +02:00
|
|
|
raise JsonableError(_("Bad JSON web token"))
|
|
|
|
|
2023-01-27 14:47:06 +01:00
|
|
|
remote_email = payload.get("email", None)
|
|
|
|
if remote_email is None:
|
|
|
|
raise JsonableError(_("No email specified in JSON web token claims"))
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2023-01-27 14:47:06 +01:00
|
|
|
return remote_email, realm
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2023-01-27 14:47:06 +01:00
|
|
|
|
|
|
|
@csrf_exempt
|
2023-01-30 00:06:53 +01:00
|
|
|
@require_post
|
2023-01-27 14:47:06 +01:00
|
|
|
@log_view_func
|
2023-04-26 19:57:22 +02:00
|
|
|
@has_request_variables
|
|
|
|
def remote_user_jwt(request: HttpRequest, token: str = REQ(default="")) -> HttpResponse:
|
|
|
|
email, realm = get_email_and_realm_from_jwt_authentication_request(request, token)
|
2017-10-03 02:34:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = authenticate(username=email, realm=realm, use_dummy_backend=True)
|
2020-02-23 18:58:08 +01:00
|
|
|
if user_profile is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = ExternalAuthResult(
|
2023-01-27 14:47:06 +01:00
|
|
|
data_dict={"email": email, "full_name": "", "subdomain": realm.subdomain}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-23 18:58:08 +01:00
|
|
|
else:
|
2022-05-31 03:17:38 +02:00
|
|
|
assert isinstance(user_profile, UserProfile)
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile)
|
|
|
|
|
|
|
|
return login_or_register_remote_user(request, result)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-15 04:13:33 +02:00
|
|
|
@has_request_variables
|
|
|
|
def oauth_redirect_to_root(
|
|
|
|
request: HttpRequest,
|
|
|
|
url: str,
|
|
|
|
sso_type: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_signup: bool = False,
|
2022-10-06 11:56:48 +02:00
|
|
|
extra_url_params: Mapping[str, str] = {},
|
2020-05-15 04:13:33 +02:00
|
|
|
next: Optional[str] = REQ(default=None),
|
2021-07-29 19:12:02 +02:00
|
|
|
multiuse_object_key: str = REQ(default=""),
|
|
|
|
mobile_flow_otp: Optional[str] = REQ(default=None),
|
|
|
|
desktop_flow_otp: Optional[str] = REQ(default=None),
|
2020-05-15 04:13:33 +02:00
|
|
|
) -> HttpResponse:
|
2023-04-26 02:24:11 +02:00
|
|
|
main_site_url = settings.ROOT_DOMAIN_URI + url
|
2021-02-12 08:20:45 +01:00
|
|
|
if settings.SOCIAL_AUTH_SUBDOMAIN is not None and sso_type == "social":
|
2023-04-26 02:24:11 +02:00
|
|
|
main_site_url = (
|
2021-02-12 08:19:30 +01:00
|
|
|
settings.EXTERNAL_URI_SCHEME
|
|
|
|
+ settings.SOCIAL_AUTH_SUBDOMAIN
|
|
|
|
+ "."
|
|
|
|
+ settings.EXTERNAL_HOST
|
|
|
|
) + url
|
2018-07-10 08:07:23 +02:00
|
|
|
|
2017-03-19 20:01:01 +01:00
|
|
|
params = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"subdomain": get_subdomain(request),
|
|
|
|
"is_signup": "1" if is_signup else "0",
|
2017-03-19 20:01:01 +01:00
|
|
|
}
|
|
|
|
|
2021-07-29 19:12:02 +02:00
|
|
|
params["multiuse_object_key"] = multiuse_object_key
|
2019-02-08 17:09:25 +01:00
|
|
|
|
2017-03-19 20:01:01 +01:00
|
|
|
# mobile_flow_otp is a one-time pad provided by the app that we
|
|
|
|
# can use to encrypt the API key when passing back to the app.
|
2020-02-01 17:20:11 +01:00
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2017-03-19 20:01:01 +01:00
|
|
|
if mobile_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["mobile_flow_otp"] = mobile_flow_otp
|
2020-01-23 14:22:28 +01:00
|
|
|
if desktop_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["desktop_flow_otp"] = desktop_flow_otp
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2018-03-12 12:54:50 +01:00
|
|
|
if next:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["next"] = next
|
2018-03-12 12:54:50 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
params = {**params, **extra_url_params}
|
|
|
|
|
2023-04-26 02:24:11 +02:00
|
|
|
return redirect(append_url_query_string(main_site_url, urllib.parse.urlencode(params)))
|
2016-10-14 14:12:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-21 04:03:39 +02:00
|
|
|
def handle_desktop_flow(
|
|
|
|
func: Callable[Concatenate[HttpRequest, ParamT], HttpResponse]
|
|
|
|
) -> Callable[Concatenate[HttpRequest, ParamT], HttpResponse]:
|
2020-05-31 17:31:30 +02:00
|
|
|
@wraps(func)
|
2022-08-21 04:03:39 +02:00
|
|
|
def wrapper(
|
|
|
|
request: HttpRequest, /, *args: ParamT.args, **kwargs: ParamT.kwargs
|
|
|
|
) -> HttpResponse:
|
2022-05-12 06:54:12 +02:00
|
|
|
user_agent = parse_user_agent(request.headers.get("User-Agent", "Missing User-Agent"))
|
2020-05-31 17:31:30 +02:00
|
|
|
if user_agent["name"] == "ZulipElectron":
|
|
|
|
return render(request, "zerver/desktop_login.html")
|
|
|
|
|
|
|
|
return func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-08-21 04:03:39 +02:00
|
|
|
return wrapper
|
2020-05-31 17:31:30 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-01 14:24:21 +02:00
|
|
|
@handle_desktop_flow
|
|
|
|
def start_remote_user_sso(request: HttpRequest) -> HttpResponse:
|
|
|
|
"""
|
|
|
|
The purpose of this endpoint is to provide an initial step in the flow
|
|
|
|
on which we can handle the special behavior for the desktop app.
|
|
|
|
/accounts/login/sso may have Apache intercepting requests to it
|
|
|
|
to do authentication, so we need this additional endpoint.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
query = request.META["QUERY_STRING"]
|
2021-10-14 01:45:34 +02:00
|
|
|
return redirect(append_url_query_string(reverse(remote_user_sso), query))
|
2020-06-01 14:24:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 17:31:30 +02:00
|
|
|
@handle_desktop_flow
|
2021-02-12 08:19:30 +01:00
|
|
|
def start_social_login(
|
|
|
|
request: HttpRequest,
|
|
|
|
backend: str,
|
|
|
|
extra_arg: Optional[str] = None,
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_url = reverse("social:begin", args=[backend])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_url_params: Dict[str, str] = {}
|
2019-09-29 06:32:56 +02:00
|
|
|
if backend == "saml":
|
2020-06-17 14:25:25 +02:00
|
|
|
if not SAMLAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "saml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
# This backend requires the name of the IdP (from the list of configured ones)
|
|
|
|
# to be passed as the parameter.
|
2019-10-22 18:23:57 +02:00
|
|
|
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.info(
|
|
|
|
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
|
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_url_params = {"idp": extra_arg}
|
2020-03-01 11:22:54 +01:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
if backend == "apple" and not AppleAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "apple")
|
2021-05-21 16:45:43 +02:00
|
|
|
if backend == "oidc" and not GenericOpenIdConnectBackend.check_config():
|
|
|
|
return config_error(request, "oidc")
|
2020-07-04 19:09:01 +02:00
|
|
|
|
2020-02-17 15:14:16 +01:00
|
|
|
# TODO: Add AzureAD also.
|
2020-07-04 19:09:01 +02:00
|
|
|
if backend in ["github", "google", "gitlab"]:
|
2020-02-17 15:14:16 +01:00
|
|
|
key_setting = "SOCIAL_AUTH_" + backend.upper() + "_KEY"
|
|
|
|
secret_setting = "SOCIAL_AUTH_" + backend.upper() + "_SECRET"
|
|
|
|
if not (getattr(settings, key_setting) and getattr(settings, secret_setting)):
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, backend)
|
2017-08-07 17:38:25 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return oauth_redirect_to_root(request, backend_url, "social", extra_url_params=extra_url_params)
|
2016-12-01 13:10:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 17:32:31 +02:00
|
|
|
@handle_desktop_flow
|
2021-02-12 08:19:30 +01:00
|
|
|
def start_social_signup(
|
|
|
|
request: HttpRequest,
|
|
|
|
backend: str,
|
|
|
|
extra_arg: Optional[str] = None,
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_url = reverse("social:begin", args=[backend])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_url_params: Dict[str, str] = {}
|
2019-09-29 06:32:56 +02:00
|
|
|
if backend == "saml":
|
2020-06-17 14:25:25 +02:00
|
|
|
if not SAMLAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "saml")
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2019-10-22 18:23:57 +02:00
|
|
|
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.info(
|
|
|
|
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
|
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_url_params = {"idp": extra_arg}
|
2021-02-12 08:19:30 +01:00
|
|
|
return oauth_redirect_to_root(
|
2021-02-12 08:20:45 +01:00
|
|
|
request, backend_url, "social", is_signup=True, extra_url_params=extra_url_params
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2017-04-18 11:50:44 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
_subdomain_token_salt = "zerver.views.auth.log_into_subdomain"
|
2017-10-27 02:45:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-03 22:26:31 +01:00
|
|
|
@log_view_func
|
2018-04-24 03:47:28 +02:00
|
|
|
def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
|
2020-01-23 12:21:55 +01:00
|
|
|
"""Given a valid authentication token (generated by
|
2019-03-10 02:43:29 +01:00
|
|
|
redirect_and_log_into_subdomain called on auth.zulip.example.com),
|
|
|
|
call login_or_register_remote_user, passing all the authentication
|
2020-10-23 02:43:28 +02:00
|
|
|
result data that has been stored in Redis, associated with this token.
|
2019-03-10 02:43:29 +01:00
|
|
|
"""
|
2020-09-02 02:50:08 +02:00
|
|
|
# The tokens are intended to have the same format as API keys.
|
|
|
|
if not has_api_key_format(token):
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("log_into_subdomain: Malformed token given: %s", token)
|
2016-10-14 14:12:16 +02:00
|
|
|
return HttpResponse(status=400)
|
2020-01-23 12:21:55 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
try:
|
|
|
|
result = ExternalAuthResult(login_token=token)
|
|
|
|
except ExternalAuthResult.InvalidTokenError:
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("log_into_subdomain: Invalid token given: %s", token)
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/log_into_subdomain_token_invalid.html", status=400)
|
2016-10-14 14:12:16 +02:00
|
|
|
|
2017-10-03 02:24:27 +02:00
|
|
|
subdomain = get_subdomain(request)
|
2021-02-12 08:20:45 +01:00
|
|
|
if result.data_dict["subdomain"] != subdomain:
|
2020-01-23 12:21:55 +01:00
|
|
|
raise JsonableError(_("Invalid subdomain"))
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
return login_or_register_remote_user(request, result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def redirect_and_log_into_subdomain(result: ExternalAuthResult) -> HttpResponse:
|
|
|
|
token = result.store_data()
|
|
|
|
realm = get_realm(result.data_dict["subdomain"])
|
2021-02-12 08:19:30 +01:00
|
|
|
subdomain_login_uri = realm.uri + reverse(log_into_subdomain, args=[token])
|
2017-10-27 02:45:38 +02:00
|
|
|
return redirect(subdomain_login_uri)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-26 16:29:19 +02:00
|
|
|
def redirect_to_misconfigured_ldap_notice(request: HttpRequest, error_type: int) -> HttpResponse:
|
2017-09-22 10:58:12 +02:00
|
|
|
if error_type == ZulipLDAPAuthBackend.REALM_IS_NONE_ERROR:
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "ldap")
|
2017-09-22 10:58:12 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid error type")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def show_deactivation_notice(request: HttpRequest) -> HttpResponse:
|
2017-10-27 00:27:59 +02:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm and realm.deactivated:
|
2020-12-12 12:23:48 +01:00
|
|
|
context = {"deactivated_domain_name": realm.name}
|
|
|
|
if realm.deactivated_redirect is not None:
|
|
|
|
context["deactivated_redirect"] = realm.deactivated_redirect
|
2021-02-12 08:19:30 +01:00
|
|
|
return render(request, "zerver/deactivated.html", context=context)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponseRedirect(reverse("login_page"))
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def redirect_to_deactivation_notice() -> HttpResponse:
|
2020-09-22 02:54:44 +02:00
|
|
|
return HttpResponseRedirect(reverse(show_deactivation_notice))
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-12-20 07:26:29 +01:00
|
|
|
def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> None:
|
2021-08-23 15:14:05 +02:00
|
|
|
for key in ("email", "already_registered"):
|
2023-01-18 05:25:49 +01:00
|
|
|
if key in request.GET:
|
2017-12-20 07:26:29 +01:00
|
|
|
context[key] = request.GET[key]
|
|
|
|
|
2021-08-23 15:14:05 +02:00
|
|
|
deactivated_email = request.GET.get("is_deactivated")
|
|
|
|
if deactivated_email is None:
|
|
|
|
return
|
|
|
|
try:
|
|
|
|
validate_email(deactivated_email)
|
2023-03-21 07:10:20 +01:00
|
|
|
context["deactivated_account_error"] = DEACTIVATED_ACCOUNT_ERROR.format(
|
|
|
|
username=deactivated_email
|
2021-08-23 15:14:05 +02:00
|
|
|
)
|
|
|
|
except ValidationError:
|
|
|
|
logging.info("Invalid email in is_deactivated param to login page: %s", deactivated_email)
|
2017-12-20 07:26:29 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-12-20 07:57:26 +01:00
|
|
|
class TwoFactorLoginView(BaseTwoFactorLoginView):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_context: ExtraContext = None
|
2017-12-20 07:57:26 +01:00
|
|
|
form_list = (
|
2021-02-12 08:20:45 +01:00
|
|
|
("auth", OurAuthenticationForm),
|
|
|
|
("token", AuthenticationTokenForm),
|
|
|
|
("backup", BackupTokenForm),
|
2017-12-20 07:57:26 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def __init__(self, extra_context: ExtraContext = None, *args: Any, **kwargs: Any) -> None:
|
2017-12-20 07:57:26 +01:00
|
|
|
self.extra_context = extra_context
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
2018-06-05 05:55:42 +02:00
|
|
|
context = super().get_context_data(**kwargs)
|
2017-12-20 07:57:26 +01:00
|
|
|
if self.extra_context is not None:
|
|
|
|
context.update(self.extra_context)
|
|
|
|
update_login_page_context(self.request, context)
|
|
|
|
|
|
|
|
realm = get_realm_from_request(self.request)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_to = realm.uri if realm else "/"
|
|
|
|
context["next"] = self.request.POST.get(
|
|
|
|
"next",
|
|
|
|
self.request.GET.get("next", redirect_to),
|
2020-05-15 04:13:33 +02:00
|
|
|
)
|
2017-12-20 07:57:26 +01:00
|
|
|
return context
|
|
|
|
|
|
|
|
def done(self, form_list: List[Form], **kwargs: Any) -> HttpResponse:
|
|
|
|
"""
|
docs: Add missing space to compound verbs “log in”, “set up”, etc.
Noun: backup, checkout, cleanup, login, logout, setup, shutdown, signup,
timeout.
Verb: back up, check out, clean up, log in, log out, set up, shut
down, sign up, time out.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2021-04-25 23:05:38 +02:00
|
|
|
Log in the user and redirect to the desired page.
|
2017-12-20 07:57:26 +01:00
|
|
|
|
|
|
|
We need to override this function so that we can redirect to
|
|
|
|
realm.uri instead of '/'.
|
|
|
|
"""
|
2018-06-05 10:25:41 +02:00
|
|
|
realm_uri = self.get_user().realm.uri
|
2018-10-18 00:27:27 +02:00
|
|
|
# This mock.patch business is an unpleasant hack that we'd
|
|
|
|
# ideally like to remove by instead patching the upstream
|
|
|
|
# module to support better configurability of the
|
|
|
|
# LOGIN_REDIRECT_URL setting. But until then, it works. We
|
|
|
|
# import mock.patch here because mock has an expensive import
|
|
|
|
# process involving pbr -> pkgresources (which is really slow).
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import patch
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch.object(settings, "LOGIN_REDIRECT_URL", realm_uri):
|
2018-06-05 10:25:41 +02:00
|
|
|
return super().done(form_list, **kwargs)
|
2017-12-20 07:57:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-15 04:13:33 +02:00
|
|
|
@has_request_variables
|
|
|
|
def login_page(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
2022-07-28 01:11:55 +02:00
|
|
|
/,
|
2021-02-12 08:19:30 +01:00
|
|
|
next: str = REQ(default="/"),
|
|
|
|
**kwargs: Any,
|
2020-05-15 04:13:33 +02:00
|
|
|
) -> HttpResponse:
|
2023-01-28 00:59:59 +01:00
|
|
|
if get_subdomain(request) == settings.SOCIAL_AUTH_SUBDOMAIN:
|
2021-08-23 15:14:05 +02:00
|
|
|
return social_auth_subdomain_login_page(request)
|
|
|
|
|
2019-04-13 09:37:53 +02:00
|
|
|
# To support previewing the Zulip login pages, we have a special option
|
|
|
|
# that disables the default behavior of redirecting logged-in users to the
|
|
|
|
# logged-in app.
|
2021-02-12 08:20:45 +01:00
|
|
|
is_preview = "preview" in request.GET
|
2017-07-12 09:50:19 +02:00
|
|
|
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
|
2022-07-23 00:02:23 +02:00
|
|
|
if request.user.is_authenticated and is_2fa_verified(request.user):
|
2022-10-27 19:08:41 +02:00
|
|
|
redirect_to = get_safe_redirect_to(next, request.user.realm.uri)
|
|
|
|
return HttpResponseRedirect(redirect_to)
|
2019-04-13 09:37:53 +02:00
|
|
|
elif request.user.is_authenticated and not is_preview:
|
2022-10-27 19:08:41 +02:00
|
|
|
redirect_to = get_safe_redirect_to(next, request.user.realm.uri)
|
|
|
|
return HttpResponseRedirect(redirect_to)
|
2017-08-25 04:32:16 +02:00
|
|
|
if is_subdomain_root_or_alias(request) and settings.ROOT_DOMAIN_LANDING_PAGE:
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = reverse("realm_redirect")
|
2019-08-12 05:44:35 +02:00
|
|
|
if request.GET:
|
2021-10-14 01:45:34 +02:00
|
|
|
redirect_url = append_url_query_string(redirect_url, request.GET.urlencode())
|
2017-01-10 10:44:56 +01:00
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
2017-08-24 09:58:44 +02:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm and realm.deactivated:
|
|
|
|
return redirect_to_deactivation_notice()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_context = kwargs.pop("extra_context", {})
|
2020-05-15 04:13:33 +02:00
|
|
|
extra_context["next"] = next
|
2021-04-16 17:37:13 +02:00
|
|
|
if dev_auth_enabled() and kwargs.get("template_name") == "zerver/development/dev_login.html":
|
2021-04-21 22:07:08 +02:00
|
|
|
from zerver.views.development.dev_login import add_dev_login_context
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "new_realm" in request.POST:
|
2019-05-21 23:37:21 +02:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm(request.POST["new_realm"])
|
2019-05-21 23:37:21 +02:00
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2017-11-29 00:17:33 +01:00
|
|
|
add_dev_login_context(realm, extra_context)
|
2021-02-12 08:20:45 +01:00
|
|
|
if realm and "new_realm" in request.POST:
|
2017-10-23 20:33:56 +02:00
|
|
|
# If we're switching realms, redirect to that realm, but
|
|
|
|
# only if it actually exists.
|
2017-08-15 00:13:58 +02:00
|
|
|
return HttpResponseRedirect(realm.uri)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "username" in request.POST:
|
|
|
|
extra_context["email"] = request.POST["username"]
|
2020-01-31 15:02:28 +01:00
|
|
|
extra_context.update(login_context(request))
|
2018-02-23 09:02:13 +01:00
|
|
|
|
2017-07-12 09:50:19 +02:00
|
|
|
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
|
2021-02-12 08:19:30 +01:00
|
|
|
return start_two_factor_auth(request, extra_context=extra_context, **kwargs)
|
2017-07-12 09:50:19 +02:00
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
try:
|
2020-02-02 13:12:38 +01:00
|
|
|
template_response = DjangoLoginView.as_view(
|
2021-02-12 08:19:30 +01:00
|
|
|
authentication_form=OurAuthenticationForm, extra_context=extra_context, **kwargs
|
|
|
|
)(request)
|
2017-09-22 10:58:12 +02:00
|
|
|
except ZulipLDAPConfigurationError as e:
|
|
|
|
assert len(e.args) > 1
|
2020-06-17 14:25:25 +02:00
|
|
|
return redirect_to_misconfigured_ldap_notice(request, e.args[1])
|
2017-09-22 10:58:12 +02:00
|
|
|
|
2018-05-21 06:40:18 +02:00
|
|
|
if isinstance(template_response, SimpleTemplateResponse):
|
|
|
|
# Only those responses that are rendered using a template have
|
|
|
|
# context_data attribute. This attribute doesn't exist otherwise. It is
|
|
|
|
# added in SimpleTemplateResponse class, which is a derived class of
|
|
|
|
# HttpResponse. See django.template.response.SimpleTemplateResponse,
|
2021-08-31 23:32:37 +02:00
|
|
|
# https://github.com/django/django/blob/2.0/django/template/response.py#L19
|
2022-07-21 22:30:57 +02:00
|
|
|
assert template_response.context_data is not None
|
2018-05-21 06:40:18 +02:00
|
|
|
update_login_page_context(request, template_response.context_data)
|
2017-12-20 09:53:50 +01:00
|
|
|
|
2021-07-26 16:29:19 +02:00
|
|
|
assert isinstance(template_response, HttpResponse)
|
2016-10-12 04:50:38 +02:00
|
|
|
return template_response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-23 15:14:05 +02:00
|
|
|
def social_auth_subdomain_login_page(request: HttpRequest) -> HttpResponse:
|
|
|
|
origin_subdomain = request.session.get("subdomain")
|
|
|
|
if origin_subdomain is not None:
|
|
|
|
try:
|
|
|
|
origin_realm = get_realm(origin_subdomain)
|
|
|
|
return HttpResponseRedirect(origin_realm.uri)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
pass
|
|
|
|
|
|
|
|
return render(request, "zerver/auth_subdomain.html", status=400)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def start_two_factor_auth(
|
|
|
|
request: HttpRequest, extra_context: ExtraContext = None, **kwargs: Any
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
two_fa_form_field = "two_factor_login_view-current_step"
|
2017-07-12 09:50:19 +02:00
|
|
|
if two_fa_form_field not in request.POST:
|
|
|
|
# Here we inject the 2FA step in the request context if it's missing to
|
|
|
|
# force the user to go to the first step of 2FA authentication process.
|
|
|
|
# This seems a bit hackish but simplifies things from testing point of
|
|
|
|
# view. I don't think this can result in anything bad because all the
|
|
|
|
# authentication logic runs after the auth step.
|
|
|
|
#
|
|
|
|
# If we don't do this, we will have to modify a lot of auth tests to
|
|
|
|
# insert this variable in the request.
|
2022-07-28 17:59:57 +02:00
|
|
|
new_query_dict = request.POST.copy()
|
|
|
|
new_query_dict[two_fa_form_field] = "auth"
|
|
|
|
new_query_dict._mutable = False
|
|
|
|
request.POST = cast("_ImmutableQueryDict", new_query_dict)
|
2017-07-12 09:50:19 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
This is how Django implements as_view(), so extra_context will be passed
|
|
|
|
to the __init__ method of TwoFactorLoginView.
|
|
|
|
|
|
|
|
def as_view(cls, **initkwargs):
|
|
|
|
def view(request, *args, **kwargs):
|
|
|
|
self = cls(**initkwargs)
|
|
|
|
...
|
|
|
|
|
|
|
|
return view
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
two_fa_view = TwoFactorLoginView.as_view(extra_context=extra_context, **kwargs)
|
2017-07-12 09:50:19 +02:00
|
|
|
return two_fa_view(request, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2023-02-01 13:23:09 +01:00
|
|
|
def process_api_key_fetch_authenticate_result(
|
2023-02-02 20:12:26 +01:00
|
|
|
request: HttpRequest, user_profile: UserProfile
|
2023-02-01 13:23:09 +01:00
|
|
|
) -> str:
|
2021-07-24 20:37:35 +02:00
|
|
|
assert user_profile.is_authenticated
|
|
|
|
|
2017-06-15 07:15:57 +02:00
|
|
|
# Maybe sending 'user_logged_in' signal is the better approach:
|
2022-10-08 07:35:48 +02:00
|
|
|
# user_logged_in.send(sender=type(user_profile), request=request, user=user_profile)
|
2017-06-15 07:15:57 +02:00
|
|
|
# Not doing this only because over here we don't add the user information
|
|
|
|
# in the session. If the signal receiver assumes that we do then that
|
|
|
|
# would cause problems.
|
2022-10-08 07:35:48 +02:00
|
|
|
email_on_new_login(sender=type(user_profile), request=request, user=user_profile)
|
2017-08-25 00:58:34 +02:00
|
|
|
|
|
|
|
# Mark this request as having a logged-in user for our server logs.
|
2022-05-31 03:17:38 +02:00
|
|
|
assert isinstance(user_profile, UserProfile)
|
2017-08-25 00:58:34 +02:00
|
|
|
process_client(request, user_profile)
|
2023-06-20 22:52:31 +02:00
|
|
|
RequestNotes.get_notes(request).requester_for_logs = user_profile.format_requester_for_logs()
|
2017-08-25 00:58:34 +02:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(user_profile)
|
2023-02-01 13:23:09 +01:00
|
|
|
return api_key
|
|
|
|
|
|
|
|
|
2023-02-01 13:27:10 +01:00
|
|
|
def get_api_key_fetch_authenticate_failure(return_data: Dict[str, bool]) -> JsonableError:
|
|
|
|
if return_data.get("inactive_user"):
|
|
|
|
return UserDeactivatedError()
|
|
|
|
if return_data.get("inactive_realm"):
|
|
|
|
return RealmDeactivatedError()
|
|
|
|
if return_data.get("password_auth_disabled"):
|
|
|
|
return PasswordAuthDisabledError()
|
|
|
|
if return_data.get("password_reset_needed"):
|
|
|
|
return PasswordResetRequiredError()
|
2022-09-13 17:39:18 +02:00
|
|
|
if return_data.get("invalid_subdomain"):
|
2023-02-04 02:07:20 +01:00
|
|
|
raise InvalidSubdomainError
|
2023-02-01 13:27:10 +01:00
|
|
|
|
|
|
|
return AuthenticationFailedError()
|
|
|
|
|
|
|
|
|
2022-09-13 17:39:18 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def jwt_fetch_api_key(
|
|
|
|
request: HttpRequest,
|
|
|
|
include_profile: bool = REQ(default=False, json_validator=check_bool),
|
2023-04-26 19:57:22 +02:00
|
|
|
token: str = REQ(default=""),
|
2022-09-13 17:39:18 +02:00
|
|
|
) -> HttpResponse:
|
2023-04-26 19:57:22 +02:00
|
|
|
remote_email, realm = get_email_and_realm_from_jwt_authentication_request(request, token)
|
|
|
|
|
2022-09-13 17:39:18 +02:00
|
|
|
return_data: Dict[str, bool] = {}
|
|
|
|
|
|
|
|
user_profile = authenticate(
|
|
|
|
username=remote_email, realm=realm, return_data=return_data, use_dummy_backend=True
|
|
|
|
)
|
|
|
|
if user_profile is None:
|
|
|
|
raise get_api_key_fetch_authenticate_failure(return_data)
|
|
|
|
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
|
|
|
|
|
|
|
api_key = process_api_key_fetch_authenticate_result(request, user_profile)
|
|
|
|
|
|
|
|
result: Dict[str, Any] = {
|
|
|
|
"api_key": api_key,
|
|
|
|
"email": user_profile.delivery_email,
|
|
|
|
}
|
|
|
|
|
|
|
|
if include_profile:
|
2023-11-08 08:21:24 +01:00
|
|
|
members = get_users_for_api(
|
2022-09-13 17:39:18 +02:00
|
|
|
realm,
|
|
|
|
user_profile,
|
|
|
|
target_user=user_profile,
|
|
|
|
client_gravatar=False,
|
|
|
|
user_avatar_url_field_optional=False,
|
|
|
|
include_custom_profile_fields=False,
|
|
|
|
)
|
|
|
|
result["user"] = members[user_profile.id]
|
|
|
|
|
|
|
|
return json_success(request, data=result)
|
|
|
|
|
|
|
|
|
2023-02-01 13:23:09 +01:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
|
|
|
def api_fetch_api_key(
|
|
|
|
request: HttpRequest, username: str = REQ(), password: str = REQ()
|
|
|
|
) -> HttpResponse:
|
|
|
|
return_data: Dict[str, bool] = {}
|
|
|
|
|
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
2023-02-04 02:07:20 +01:00
|
|
|
raise InvalidSubdomainError
|
2023-02-01 13:23:09 +01:00
|
|
|
|
|
|
|
if not ldap_auth_enabled(realm=realm):
|
|
|
|
# In case we don't authenticate against LDAP, check for a valid
|
|
|
|
# email. LDAP backend can authenticate against a non-email.
|
|
|
|
validate_login_email(username)
|
|
|
|
user_profile = authenticate(
|
|
|
|
request=request, username=username, password=password, realm=realm, return_data=return_data
|
|
|
|
)
|
2023-02-01 13:27:10 +01:00
|
|
|
if user_profile is None:
|
|
|
|
raise get_api_key_fetch_authenticate_failure(return_data)
|
|
|
|
|
|
|
|
assert isinstance(user_profile, UserProfile)
|
2023-02-01 13:23:09 +01:00
|
|
|
|
2023-02-02 20:12:26 +01:00
|
|
|
api_key = process_api_key_fetch_authenticate_result(request, user_profile)
|
2023-02-01 13:23:09 +01:00
|
|
|
|
2023-04-04 12:36:44 +02:00
|
|
|
return json_success(
|
|
|
|
request,
|
|
|
|
data={"api_key": api_key, "email": user_profile.delivery_email, "user_id": user_profile.id},
|
|
|
|
)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def get_auth_backends_data(request: HttpRequest) -> Dict[str, Any]:
|
2017-03-10 06:29:09 +01:00
|
|
|
"""Returns which authentication methods are enabled on the server"""
|
2017-10-02 08:32:09 +02:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
try:
|
|
|
|
realm = Realm.objects.get(string_id=subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
# If not the root subdomain, this is an error
|
2017-10-20 02:56:49 +02:00
|
|
|
if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
2017-10-02 08:32:09 +02:00
|
|
|
raise JsonableError(_("Invalid subdomain"))
|
|
|
|
# With the root subdomain, it's an error or not depending
|
|
|
|
# whether ROOT_DOMAIN_LANDING_PAGE (which indicates whether
|
|
|
|
# there are some realms without subdomains on this server)
|
|
|
|
# is set.
|
|
|
|
if settings.ROOT_DOMAIN_LANDING_PAGE:
|
|
|
|
raise JsonableError(_("Subdomain required"))
|
|
|
|
else:
|
|
|
|
realm = None
|
2018-12-19 01:13:59 +01:00
|
|
|
result = {
|
2017-10-24 20:59:11 +02:00
|
|
|
"password": password_auth_enabled(realm),
|
|
|
|
}
|
2018-12-19 01:13:59 +01:00
|
|
|
for auth_backend_name in AUTH_BACKEND_NAME_MAP:
|
|
|
|
key = auth_backend_name.lower()
|
|
|
|
result[key] = auth_enabled_helper([auth_backend_name], realm)
|
|
|
|
return result
|
2017-05-04 01:13:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-06 02:49:34 +01:00
|
|
|
def check_server_incompatibility(request: HttpRequest) -> bool:
|
2022-05-12 06:54:12 +02:00
|
|
|
user_agent = parse_user_agent(request.headers.get("User-Agent", "Missing User-Agent"))
|
2021-02-12 08:20:45 +01:00
|
|
|
return user_agent["name"] == "ZulipInvalid"
|
2018-12-06 02:49:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-12 05:44:35 +02:00
|
|
|
@require_safe
|
2017-05-04 01:13:56 +02:00
|
|
|
@csrf_exempt
|
2017-11-27 09:28:57 +01:00
|
|
|
def api_get_server_settings(request: HttpRequest) -> HttpResponse:
|
2018-12-11 20:25:57 +01:00
|
|
|
# Log which client is making this request.
|
2022-06-12 21:31:23 +02:00
|
|
|
process_client(request)
|
2017-05-04 01:13:56 +02:00
|
|
|
result = dict(
|
|
|
|
authentication_methods=get_auth_backends_data(request),
|
|
|
|
zulip_version=ZULIP_VERSION,
|
2021-07-30 12:25:53 +02:00
|
|
|
zulip_merge_base=ZULIP_MERGE_BASE,
|
2020-04-20 00:57:28 +02:00
|
|
|
zulip_feature_level=API_FEATURE_LEVEL,
|
2018-02-12 23:34:59 +01:00
|
|
|
push_notifications_enabled=push_notifications_enabled(),
|
2018-12-06 02:49:34 +01:00
|
|
|
is_incompatible=check_server_incompatibility(request),
|
2017-05-04 01:13:56 +02:00
|
|
|
)
|
|
|
|
context = zulip_default_context(request)
|
2019-03-20 13:13:44 +01:00
|
|
|
context.update(login_context(request))
|
2017-05-04 01:13:56 +02:00
|
|
|
# IMPORTANT NOTE:
|
|
|
|
# realm_name, realm_icon, etc. are not guaranteed to appear in the response.
|
|
|
|
# * If they do, that means the server URL has only one realm on it
|
|
|
|
# * If they don't, the server has multiple realms, and it's not clear which is
|
|
|
|
# the requested realm, so we can't send back these data.
|
2017-09-15 19:13:48 +02:00
|
|
|
for settings_item in [
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_auth_enabled",
|
|
|
|
"require_email_format_usernames",
|
|
|
|
"realm_uri",
|
|
|
|
"realm_name",
|
|
|
|
"realm_icon",
|
|
|
|
"realm_description",
|
2022-02-20 21:40:59 +01:00
|
|
|
"realm_web_public_access_enabled",
|
2021-02-12 08:19:30 +01:00
|
|
|
"external_authentication_methods",
|
|
|
|
]:
|
2017-05-04 01:13:56 +02:00
|
|
|
if context[settings_item] is not None:
|
|
|
|
result[settings_item] = context[settings_item]
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data=result)
|
2017-05-04 01:13:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_fetch_api_key(
|
2021-02-12 08:20:45 +01:00
|
|
|
request: HttpRequest, user_profile: UserProfile, password: str = REQ(default="")
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-12-12 20:21:06 +01:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("Invalid subdomain"))
|
2023-01-18 02:59:37 +01:00
|
|
|
if password_auth_enabled(user_profile.realm) and not authenticate(
|
|
|
|
request=request, username=user_profile.delivery_email, password=password, realm=realm
|
|
|
|
):
|
|
|
|
raise JsonableError(_("Password is incorrect."))
|
2018-08-01 10:53:40 +02:00
|
|
|
|
|
|
|
api_key = get_api_key(user_profile)
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request, data={"api_key": api_key, "email": user_profile.delivery_email})
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-02-16 20:33:41 +01:00
|
|
|
logout_then_login = require_post(django_logout_then_login)
|
2017-11-18 03:30:07 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-11-12 21:44:02 +01:00
|
|
|
def should_do_saml_sp_initiated_logout(request: HttpRequest) -> bool:
|
2021-11-01 20:08:20 +01:00
|
|
|
realm = RequestNotes.get_notes(request).realm
|
|
|
|
assert realm is not None
|
|
|
|
|
|
|
|
if not request.user.is_authenticated:
|
2022-11-12 21:44:02 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
if not saml_auth_enabled(realm):
|
|
|
|
return False
|
|
|
|
|
|
|
|
idp_name = SAMLSPInitiatedLogout.get_logged_in_user_idp(request)
|
|
|
|
if idp_name is None:
|
|
|
|
# This session wasn't authenticated via SAML, so proceed with normal logout process.
|
|
|
|
return False
|
|
|
|
|
|
|
|
return settings.SOCIAL_AUTH_SAML_ENABLED_IDPS[idp_name].get(
|
|
|
|
"sp_initiated_logout_enabled", False
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@require_post
|
|
|
|
def logout_view(request: HttpRequest, /, **kwargs: Any) -> HttpResponse:
|
|
|
|
if not should_do_saml_sp_initiated_logout(request):
|
|
|
|
return logout_then_login(request, **kwargs)
|
2021-11-01 20:08:20 +01:00
|
|
|
|
|
|
|
# This will first redirect to the IdP with a LogoutRequest and if successful on the IdP side,
|
|
|
|
# the user will be redirected to our SAMLResponse-handling endpoint with a success LogoutResponse,
|
|
|
|
# where we will finally terminate their session.
|
|
|
|
result = SAMLSPInitiatedLogout.slo_request_to_idp(request, return_to=None)
|
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2020-02-02 14:44:13 +01:00
|
|
|
def password_reset(request: HttpRequest) -> HttpResponse:
|
2021-08-19 09:55:54 +02:00
|
|
|
if is_subdomain_root_or_alias(request) and settings.ROOT_DOMAIN_LANDING_PAGE:
|
2021-10-14 01:45:34 +02:00
|
|
|
redirect_url = append_url_query_string(
|
2021-08-19 09:55:54 +02:00
|
|
|
reverse("realm_redirect"), urlencode({"next": reverse("password_reset")})
|
|
|
|
)
|
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
2021-11-05 02:21:26 +01:00
|
|
|
try:
|
|
|
|
response = DjangoPasswordResetView.as_view(
|
|
|
|
template_name="zerver/reset.html",
|
|
|
|
form_class=ZulipPasswordResetForm,
|
|
|
|
success_url="/accounts/password/reset/done/",
|
|
|
|
)(request)
|
2022-11-17 09:30:48 +01:00
|
|
|
except RateLimitedError as e:
|
2021-11-05 02:21:26 +01:00
|
|
|
assert e.secs_to_freedom is not None
|
|
|
|
return render(
|
|
|
|
request,
|
|
|
|
"zerver/rate_limit_exceeded.html",
|
|
|
|
context={"retry_after": int(e.secs_to_freedom)},
|
|
|
|
status=429,
|
|
|
|
)
|
2021-07-26 16:29:19 +02:00
|
|
|
assert isinstance(response, HttpResponse)
|
|
|
|
return response
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
@csrf_exempt
|
2022-02-16 20:41:35 +01:00
|
|
|
def saml_sp_metadata(request: HttpRequest) -> HttpResponse: # nocoverage
|
2019-09-29 06:32:56 +02:00
|
|
|
"""
|
|
|
|
This is the view function for generating our SP metadata
|
|
|
|
for SAML authentication. It's meant for helping check the correctness
|
|
|
|
of the configuration when setting up SAML, or for obtaining the XML metadata
|
|
|
|
if the IdP requires it.
|
|
|
|
Taken from https://python-social-auth.readthedocs.io/en/latest/backends/saml.html
|
|
|
|
"""
|
|
|
|
if not saml_auth_enabled():
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
complete_url = reverse("social:complete", args=("saml",))
|
2021-02-12 08:19:30 +01:00
|
|
|
saml_backend = load_backend(load_strategy(request), "saml", complete_url)
|
2019-09-29 06:32:56 +02:00
|
|
|
metadata, errors = saml_backend.generate_metadata_xml()
|
|
|
|
if not errors:
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponse(content=metadata, content_type="text/xml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponseServerError(content=", ".join(errors))
|