2020-06-11 00:54:34 +02:00
|
|
|
import logging
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import urllib
|
|
|
|
from functools import wraps
|
2020-06-24 02:10:50 +02:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, cast
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
import jwt
|
2020-04-25 06:49:19 +02:00
|
|
|
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
2016-10-12 04:50:38 +02:00
|
|
|
from django.conf import settings
|
2019-02-02 23:53:22 +01:00
|
|
|
from django.contrib.auth import authenticate
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth.views import LoginView as DjangoLoginView
|
2020-02-02 14:44:13 +01:00
|
|
|
from django.contrib.auth.views import PasswordResetView as DjangoPasswordResetView
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.contrib.auth.views import logout_then_login as django_logout_then_login
|
|
|
|
from django.forms import Form
|
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, HttpResponseServerError
|
2017-03-16 14:10:39 +01:00
|
|
|
from django.shortcuts import redirect, render
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.template.response import SimpleTemplateResponse
|
|
|
|
from django.urls import reverse
|
|
|
|
from django.utils.http import is_safe_url
|
|
|
|
from django.utils.translation import ugettext as _
|
2016-10-12 04:50:38 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
2019-08-12 05:44:35 +02:00
|
|
|
from django.views.decorators.http import require_safe
|
2020-06-11 00:54:34 +02:00
|
|
|
from social_django.utils import load_backend, load_strategy
|
|
|
|
from two_factor.forms import BackupTokenForm
|
|
|
|
from two_factor.views import LoginView as BaseTwoFactorLoginView
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2017-07-08 04:38:13 +02:00
|
|
|
from confirmation.models import Confirmation, create_confirmation_link
|
2020-06-11 00:54:34 +02:00
|
|
|
from version import API_FEATURE_LEVEL, ZULIP_VERSION
|
|
|
|
from zerver.context_processors import get_realm_from_request, login_context, zulip_default_context
|
|
|
|
from zerver.decorator import do_login, log_view_func, process_client, require_post
|
|
|
|
from zerver.forms import (
|
|
|
|
DEACTIVATED_ACCOUNT_ERROR,
|
|
|
|
AuthenticationTokenForm,
|
|
|
|
HomepageForm,
|
|
|
|
OurAuthenticationForm,
|
|
|
|
ZulipPasswordResetForm,
|
|
|
|
)
|
2020-02-01 17:45:22 +01:00
|
|
|
from zerver.lib.mobile_auth_otp import otp_encrypt_api_key
|
2018-02-12 23:34:59 +01:00
|
|
|
from zerver.lib.push_notifications import push_notifications_enabled
|
2019-12-20 00:00:45 +01:00
|
|
|
from zerver.lib.pysa import mark_sanitized
|
2020-02-17 16:18:09 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.request import REQ, JsonableError, has_request_variables
|
|
|
|
from zerver.lib.response import json_error, json_success
|
2020-02-06 18:27:10 +01:00
|
|
|
from zerver.lib.sessions import set_expirable_session_var
|
2017-10-19 07:21:57 +02:00
|
|
|
from zerver.lib.subdomains import get_subdomain, is_subdomain_root_or_alias
|
2020-05-31 17:31:30 +02:00
|
|
|
from zerver.lib.types import ViewFuncT
|
2020-03-02 19:38:16 +01:00
|
|
|
from zerver.lib.url_encoding import add_query_to_redirect_url
|
2018-12-06 02:49:34 +01:00
|
|
|
from zerver.lib.user_agent import parse_user_agent
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2020-01-23 12:21:55 +01:00
|
|
|
from zerver.lib.utils import has_api_key_format
|
2017-04-07 08:21:29 +02:00
|
|
|
from zerver.lib.validator import validate_login_email
|
2020-06-12 16:19:17 +02:00
|
|
|
from zerver.models import (
|
|
|
|
PreregistrationUser,
|
|
|
|
Realm,
|
|
|
|
UserProfile,
|
|
|
|
filter_to_valid_prereg_users,
|
|
|
|
get_realm,
|
|
|
|
remote_user_to_email,
|
|
|
|
)
|
2017-06-15 07:15:57 +02:00
|
|
|
from zerver.signals import email_on_new_login
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject.backends import (
|
|
|
|
AUTH_BACKEND_NAME_MAP,
|
2020-07-04 19:09:01 +02:00
|
|
|
AppleAuthBackend,
|
2020-06-11 00:54:34 +02:00
|
|
|
ExternalAuthDataDict,
|
|
|
|
ExternalAuthResult,
|
|
|
|
SAMLAuthBackend,
|
|
|
|
ZulipLDAPAuthBackend,
|
|
|
|
ZulipLDAPConfigurationError,
|
|
|
|
ZulipRemoteUserBackend,
|
|
|
|
auth_enabled_helper,
|
|
|
|
dev_auth_enabled,
|
|
|
|
ldap_auth_enabled,
|
|
|
|
password_auth_enabled,
|
|
|
|
saml_auth_enabled,
|
|
|
|
validate_otp_params,
|
|
|
|
)
|
2017-12-20 07:57:26 +01:00
|
|
|
|
|
|
|
ExtraContext = Optional[Dict[str, Any]]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
def get_safe_redirect_to(url: str, redirect_host: str) -> str:
|
2020-06-10 06:19:21 +02:00
|
|
|
is_url_safe = is_safe_url(url=url, allowed_hosts=None)
|
2018-03-12 12:25:50 +01:00
|
|
|
if is_url_safe:
|
2019-12-20 00:00:45 +01:00
|
|
|
# Mark as safe to prevent Pysa from surfacing false positives for
|
|
|
|
# open redirects. In this branch, we have already checked that the URL
|
|
|
|
# points to the specified 'redirect_host', or is relative.
|
|
|
|
return urllib.parse.urljoin(redirect_host, mark_sanitized(url))
|
2018-03-12 12:25:50 +01:00
|
|
|
else:
|
|
|
|
return redirect_host
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_preregistration_user(
|
|
|
|
email: str,
|
|
|
|
request: HttpRequest,
|
|
|
|
realm_creation: bool = False,
|
|
|
|
password_required: bool = True,
|
|
|
|
full_name: Optional[str] = None,
|
|
|
|
full_name_validated: bool = False,
|
|
|
|
) -> HttpResponse:
|
2017-11-08 22:02:59 +01:00
|
|
|
realm = None
|
|
|
|
if not realm_creation:
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(get_subdomain(request))
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
pass
|
2019-11-01 00:00:36 +01:00
|
|
|
return PreregistrationUser.objects.create(
|
|
|
|
email=email,
|
|
|
|
realm_creation=realm_creation,
|
|
|
|
password_required=password_required,
|
|
|
|
realm=realm,
|
|
|
|
full_name=full_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
full_name_validated=full_name_validated,
|
2019-11-01 00:00:36 +01:00
|
|
|
)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def maybe_send_to_registration(
|
|
|
|
request: HttpRequest,
|
|
|
|
email: str,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
mobile_flow_otp: Optional[str] = None,
|
|
|
|
desktop_flow_otp: Optional[str] = None,
|
|
|
|
is_signup: bool = False,
|
|
|
|
password_required: bool = True,
|
2021-02-12 08:20:45 +01:00
|
|
|
multiuse_object_key: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
full_name_validated: bool = False,
|
|
|
|
) -> HttpResponse:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given a successful authentication for an email address (i.e. we've
|
|
|
|
confirmed the user controls the email address) that does not
|
|
|
|
currently have a Zulip account in the target realm, send them to
|
|
|
|
the registration flow or the "continue to registration" flow,
|
|
|
|
depending on is_signup, whether the email address can join the
|
|
|
|
organization (checked in HomepageForm), and similar details.
|
|
|
|
"""
|
2020-02-06 18:27:10 +01:00
|
|
|
|
|
|
|
# In the desktop and mobile registration flows, the sign up
|
|
|
|
# happens in the browser so the user can use their
|
|
|
|
# already-logged-in social accounts. Then at the end, with the
|
|
|
|
# user account created, we pass the appropriate data to the app
|
|
|
|
# via e.g. a `zulip://` redirect. We store the OTP keys for the
|
|
|
|
# mobile/desktop flow in the session with 1-hour expiry, because
|
|
|
|
# we want this configuration of having a successful authentication
|
|
|
|
# result in being logged into the app to persist if the user makes
|
|
|
|
# mistakes while trying to authenticate (E.g. clicks the wrong
|
|
|
|
# Google account, hits back, etc.) during a given browser session,
|
|
|
|
# rather than just logging into the webapp in the target browser.
|
|
|
|
#
|
|
|
|
# We can't use our usual pre-account-creation state storage
|
|
|
|
# approach of putting something in PreregistrationUser, because
|
|
|
|
# that would apply to future registration attempts on other
|
|
|
|
# devices, e.g. just creating an account on the web on their laptop.
|
|
|
|
assert not (mobile_flow_otp and desktop_flow_otp)
|
|
|
|
if mobile_flow_otp:
|
2021-02-12 08:19:30 +01:00
|
|
|
set_expirable_session_var(
|
2021-02-12 08:20:45 +01:00
|
|
|
request.session, "registration_mobile_flow_otp", mobile_flow_otp, expiry_seconds=3600
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
elif desktop_flow_otp:
|
2021-02-12 08:19:30 +01:00
|
|
|
set_expirable_session_var(
|
2021-02-12 08:20:45 +01:00
|
|
|
request.session, "registration_desktop_flow_otp", desktop_flow_otp, expiry_seconds=3600
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-06 18:27:10 +01:00
|
|
|
|
2019-02-08 17:09:25 +01:00
|
|
|
if multiuse_object_key:
|
2017-09-27 03:34:58 +02:00
|
|
|
from_multiuse_invite = True
|
|
|
|
multiuse_obj = Confirmation.objects.get(confirmation_key=multiuse_object_key).content_object
|
|
|
|
realm = multiuse_obj.realm
|
2019-02-06 22:57:14 +01:00
|
|
|
invited_as = multiuse_obj.invited_as
|
2019-05-04 04:47:44 +02:00
|
|
|
else:
|
|
|
|
from_multiuse_invite = False
|
|
|
|
multiuse_obj = None
|
|
|
|
try:
|
|
|
|
realm = get_realm(get_subdomain(request))
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
2021-02-12 08:20:45 +01:00
|
|
|
invited_as = PreregistrationUser.INVITE_AS["MEMBER"]
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
form = HomepageForm({"email": email}, realm=realm, from_multiuse_invite=from_multiuse_invite)
|
2016-10-12 04:50:38 +02:00
|
|
|
if form.is_valid():
|
2019-03-10 02:43:29 +01:00
|
|
|
# If the email address is allowed to sign up for an account in
|
|
|
|
# this organization, construct a PreregistrationUser and
|
|
|
|
# Confirmation objects, and then send the user to account
|
|
|
|
# creation or confirm-continue-registration depending on
|
|
|
|
# is_signup.
|
2019-12-10 18:45:36 +01:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
prereg_user = filter_to_valid_prereg_users(
|
|
|
|
PreregistrationUser.objects.filter(email__iexact=email, realm=realm)
|
|
|
|
).latest("invited_at")
|
2019-12-10 18:45:36 +01:00
|
|
|
|
|
|
|
# password_required and full_name data passed here as argument should take precedence
|
|
|
|
# over the defaults with which the existing PreregistrationUser that we've just fetched
|
|
|
|
# was created.
|
|
|
|
prereg_user.password_required = password_required
|
|
|
|
update_fields = ["password_required"]
|
|
|
|
if full_name:
|
|
|
|
prereg_user.full_name = full_name
|
|
|
|
prereg_user.full_name_validated = full_name_validated
|
|
|
|
update_fields.extend(["full_name", "full_name_validated"])
|
|
|
|
prereg_user.save(update_fields=update_fields)
|
|
|
|
except PreregistrationUser.DoesNotExist:
|
2019-11-01 00:00:36 +01:00
|
|
|
prereg_user = create_preregistration_user(
|
2021-02-12 08:19:30 +01:00
|
|
|
email,
|
|
|
|
request,
|
2019-11-01 00:00:36 +01:00
|
|
|
password_required=password_required,
|
|
|
|
full_name=full_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
full_name_validated=full_name_validated,
|
2019-11-01 00:00:36 +01:00
|
|
|
)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2020-01-28 02:20:17 +01:00
|
|
|
if multiuse_obj is not None:
|
2017-09-27 03:34:58 +02:00
|
|
|
request.session.modified = True
|
2020-01-28 02:20:17 +01:00
|
|
|
streams_to_subscribe = list(multiuse_obj.streams.all())
|
|
|
|
prereg_user.streams.set(streams_to_subscribe)
|
2019-02-06 22:57:14 +01:00
|
|
|
prereg_user.invited_as = invited_as
|
|
|
|
prereg_user.save()
|
2017-09-27 03:34:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
confirmation_link = create_confirmation_link(prereg_user, Confirmation.USER_REGISTRATION)
|
2018-04-23 00:12:52 +02:00
|
|
|
if is_signup:
|
|
|
|
return redirect(confirmation_link)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
context = {"email": email, "continue_link": confirmation_link, "full_name": full_name}
|
|
|
|
return render(request, "zerver/confirm_continue_registration.html", context=context)
|
2019-03-10 02:43:29 +01:00
|
|
|
|
|
|
|
# This email address it not allowed to join this organization, so
|
|
|
|
# just send the user back to the registration page.
|
2021-02-12 08:20:45 +01:00
|
|
|
url = reverse("register")
|
2019-03-20 13:13:44 +01:00
|
|
|
context = login_context(request)
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_context: Mapping[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"form": form,
|
|
|
|
"current_url": lambda: url,
|
|
|
|
"from_multiuse_invite": from_multiuse_invite,
|
|
|
|
"multiuse_object_key": multiuse_object_key,
|
|
|
|
"mobile_flow_otp": mobile_flow_otp,
|
|
|
|
"desktop_flow_otp": desktop_flow_otp,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2019-03-20 13:13:44 +01:00
|
|
|
context.update(extra_context)
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/accounts_home.html", context=context)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
|
2020-01-18 13:47:08 +01:00
|
|
|
# We have verified the user controls an email address, but
|
|
|
|
# there's no associated Zulip user account. Consider sending
|
|
|
|
# the request to registration.
|
2020-06-23 00:39:19 +02:00
|
|
|
kwargs: Dict[str, Any] = dict(result.data_dict)
|
2020-02-23 18:58:08 +01:00
|
|
|
# maybe_send_to_registration doesn't take these arguments, so delete them.
|
2021-02-12 08:20:45 +01:00
|
|
|
kwargs.pop("subdomain", None)
|
|
|
|
kwargs.pop("redirect_to", None)
|
|
|
|
kwargs.pop("is_realm_creation", None)
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
kwargs["password_required"] = False
|
|
|
|
return maybe_send_to_registration(request, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def login_or_register_remote_user(request: HttpRequest, result: ExternalAuthResult) -> HttpResponse:
|
2019-03-10 02:43:29 +01:00
|
|
|
"""Given a successful authentication showing the user controls given
|
2020-02-22 16:26:39 +01:00
|
|
|
email address (email) and potentially a UserProfile
|
2019-03-10 02:43:29 +01:00
|
|
|
object (if the user already has a Zulip account), redirect the
|
|
|
|
browser to the appropriate place:
|
|
|
|
|
|
|
|
* The logged-in app if the user already has a Zulip account and is
|
2020-08-11 02:20:10 +02:00
|
|
|
trying to log in, potentially to an initial narrow or page that had been
|
2019-03-10 02:43:29 +01:00
|
|
|
saved in the `redirect_to` parameter.
|
|
|
|
* The registration form if is_signup was set (i.e. the user is
|
|
|
|
trying to create a Zulip account)
|
|
|
|
* A special `confirm_continue_registration.html` "do you want to
|
|
|
|
register or try another account" if the user doesn't have a
|
2020-08-11 02:20:10 +02:00
|
|
|
Zulip account but is_signup is False (i.e. the user tried to log in
|
2019-03-10 02:43:29 +01:00
|
|
|
and then did social authentication selecting an email address that does
|
|
|
|
not have a Zulip account in this organization).
|
2020-01-23 14:22:28 +01:00
|
|
|
* A zulip:// URL to send control back to the mobile or desktop apps if they
|
|
|
|
are doing authentication using the mobile_flow_otp or desktop_flow_otp flow.
|
2019-03-10 02:43:29 +01:00
|
|
|
"""
|
2020-02-23 18:58:08 +01:00
|
|
|
user_profile = result.user_profile
|
2017-05-05 19:19:02 +02:00
|
|
|
if user_profile is None or user_profile.is_mirror_dummy:
|
2020-02-23 18:58:08 +01:00
|
|
|
return register_remote_user(request, result)
|
2018-04-22 23:58:37 +02:00
|
|
|
# Otherwise, the user has successfully authenticated to an
|
|
|
|
# account, and we need to do the right thing depending whether
|
|
|
|
# or not they're using the mobile OTP flow or want a browser session.
|
2021-02-12 08:20:45 +01:00
|
|
|
is_realm_creation = result.data_dict.get("is_realm_creation")
|
|
|
|
mobile_flow_otp = result.data_dict.get("mobile_flow_otp")
|
|
|
|
desktop_flow_otp = result.data_dict.get("desktop_flow_otp")
|
2017-05-05 19:19:02 +02:00
|
|
|
if mobile_flow_otp is not None:
|
2020-01-18 14:41:12 +01:00
|
|
|
return finish_mobile_flow(request, user_profile, mobile_flow_otp)
|
2020-01-23 14:22:28 +01:00
|
|
|
elif desktop_flow_otp is not None:
|
2020-02-22 15:55:32 +01:00
|
|
|
return finish_desktop_flow(request, user_profile, desktop_flow_otp)
|
2017-03-19 20:01:01 +01:00
|
|
|
|
2017-08-25 01:11:30 +02:00
|
|
|
do_login(request, user_profile)
|
2018-03-12 12:54:50 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_to = result.data_dict.get("redirect_to", "")
|
2020-05-22 15:42:46 +02:00
|
|
|
if is_realm_creation is not None and settings.FREE_TRIAL_DAYS not in [None, 0]:
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_to = "{}?onboarding=true".format(reverse("initial_upgrade"))
|
2020-05-22 15:42:46 +02:00
|
|
|
|
|
|
|
redirect_to = get_safe_redirect_to(redirect_to, user_profile.realm.uri)
|
2018-03-12 12:54:50 +01:00
|
|
|
return HttpResponseRedirect(redirect_to)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def finish_desktop_flow(request: HttpRequest, user_profile: UserProfile, otp: str) -> HttpResponse:
|
2020-01-23 14:22:28 +01:00
|
|
|
"""
|
2020-04-25 06:49:19 +02:00
|
|
|
The desktop otp flow returns to the app (through the clipboard)
|
2020-01-23 14:22:28 +01:00
|
|
|
a token that allows obtaining (through log_into_subdomain) a logged in session
|
|
|
|
for the user account we authenticated in this flow.
|
2020-02-23 18:58:08 +01:00
|
|
|
The token can only be used once and within ExternalAuthResult.LOGIN_KEY_EXPIRATION_SECONDS
|
2020-01-23 14:22:28 +01:00
|
|
|
of being created, as nothing more powerful is needed for the desktop flow
|
|
|
|
and this ensures the key can only be used for completing this authentication attempt.
|
|
|
|
"""
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile)
|
|
|
|
token = result.store_data()
|
2020-04-25 06:49:19 +02:00
|
|
|
key = bytes.fromhex(otp)
|
2020-09-05 04:02:13 +02:00
|
|
|
iv = secrets.token_bytes(12)
|
2020-04-25 06:49:19 +02:00
|
|
|
desktop_data = (iv + AESGCM(key).encrypt(iv, token.encode(), b"")).hex()
|
2021-02-12 08:19:30 +01:00
|
|
|
context = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"desktop_data": desktop_data,
|
|
|
|
"browser_url": reverse("login_page", kwargs={"template_name": "zerver/login.html"}),
|
|
|
|
"realm_icon_url": realm_icon_url(user_profile.realm),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/desktop_redirect.html", context=context)
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-01-18 14:41:12 +01:00
|
|
|
def finish_mobile_flow(request: HttpRequest, user_profile: UserProfile, otp: str) -> HttpResponse:
|
2019-11-28 01:17:30 +01:00
|
|
|
# For the mobile OAuth flow, we send the API key and other
|
2020-01-18 14:41:12 +01:00
|
|
|
# necessary details in a redirect to a zulip:// URI scheme.
|
|
|
|
api_key = get_api_key(user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
response = create_response_for_otp_flow(
|
2021-02-12 08:20:45 +01:00
|
|
|
api_key, otp, user_profile, encrypted_key_field_name="otp_encrypted_api_key"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-18 14:41:12 +01:00
|
|
|
|
|
|
|
# Since we are returning an API key instead of going through
|
|
|
|
# the Django login() function (which creates a browser
|
|
|
|
# session, etc.), the "new login" signal handler (which
|
|
|
|
# triggers an email notification new logins) will not run
|
|
|
|
# automatically. So we call it manually here.
|
|
|
|
#
|
|
|
|
# Arguably, sending a fake 'user_logged_in' signal would be a better approach:
|
|
|
|
# user_logged_in.send(sender=user_profile.__class__, request=request, user=user_profile)
|
|
|
|
email_on_new_login(sender=user_profile.__class__, request=request, user=user_profile)
|
|
|
|
|
|
|
|
# Mark this request as having a logged-in user for our server logs.
|
|
|
|
process_client(request, user_profile)
|
2020-03-09 11:39:20 +01:00
|
|
|
request._requestor_for_logs = user_profile.format_requestor_for_logs()
|
2020-01-18 14:41:12 +01:00
|
|
|
|
|
|
|
return response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_response_for_otp_flow(
|
|
|
|
key: str, otp: str, user_profile: UserProfile, encrypted_key_field_name: str
|
|
|
|
) -> HttpResponse:
|
2020-06-25 20:43:48 +02:00
|
|
|
realm_uri = user_profile.realm.uri
|
|
|
|
|
|
|
|
# Check if the mobile URI is overridden in settings, if so, replace it
|
|
|
|
# This block should only apply to the mobile flow, so we if add others, this
|
|
|
|
# needs to be conditional.
|
|
|
|
if realm_uri in settings.REALM_MOBILE_REMAP_URIS:
|
|
|
|
realm_uri = settings.REALM_MOBILE_REMAP_URIS[realm_uri]
|
|
|
|
|
2020-01-23 14:29:15 +01:00
|
|
|
params = {
|
2020-01-23 14:22:28 +01:00
|
|
|
encrypted_key_field_name: otp_encrypt_api_key(key, otp),
|
2021-02-12 08:20:45 +01:00
|
|
|
"email": user_profile.delivery_email,
|
|
|
|
"realm": realm_uri,
|
2020-01-23 14:29:15 +01:00
|
|
|
}
|
|
|
|
# We can't use HttpResponseRedirect, since it only allows HTTP(S) URLs
|
|
|
|
response = HttpResponse(status=302)
|
2021-02-12 08:20:45 +01:00
|
|
|
response["Location"] = add_query_to_redirect_url(
|
|
|
|
"zulip://login", urllib.parse.urlencode(params)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-01-23 14:29:15 +01:00
|
|
|
|
|
|
|
return response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-03 22:26:31 +01:00
|
|
|
@log_view_func
|
2018-02-06 23:29:57 +01:00
|
|
|
@has_request_variables
|
2020-05-15 04:13:33 +02:00
|
|
|
def remote_user_sso(
|
|
|
|
request: HttpRequest,
|
|
|
|
mobile_flow_otp: Optional[str] = REQ(default=None),
|
|
|
|
desktop_flow_otp: Optional[str] = REQ(default=None),
|
|
|
|
next: str = REQ(default="/"),
|
|
|
|
) -> HttpResponse:
|
2019-12-11 22:47:22 +01:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
try:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
realm: Optional[Realm] = get_realm(subdomain)
|
2019-12-11 22:47:22 +01:00
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
|
|
|
|
|
|
|
if not auth_enabled_helper([ZulipRemoteUserBackend.auth_backend_name], realm):
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "remote_user_backend_disabled")
|
2019-12-11 22:47:22 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
try:
|
|
|
|
remote_user = request.META["REMOTE_USER"]
|
|
|
|
except KeyError:
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "remote_user_header_missing")
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2017-04-07 08:21:29 +02:00
|
|
|
# Django invokes authenticate methods by matching arguments, and this
|
|
|
|
# authentication flow will not invoke LDAP authentication because of
|
|
|
|
# this condition of Django so no need to check if LDAP backend is
|
|
|
|
# enabled.
|
|
|
|
validate_login_email(remote_user_to_email(remote_user))
|
|
|
|
|
2020-02-01 17:00:56 +01:00
|
|
|
# Here we support the mobile and desktop flow for REMOTE_USER_BACKEND; we
|
2018-02-06 23:29:57 +01:00
|
|
|
# validate the data format and then pass it through to
|
|
|
|
# login_or_register_remote_user if appropriate.
|
2020-02-01 17:20:11 +01:00
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2018-02-06 23:29:57 +01:00
|
|
|
|
2019-12-11 22:47:22 +01:00
|
|
|
if realm is None:
|
2019-05-05 01:04:48 +02:00
|
|
|
user_profile = None
|
|
|
|
else:
|
|
|
|
user_profile = authenticate(remote_user=remote_user, realm=realm)
|
2018-02-24 22:38:48 +01:00
|
|
|
|
2020-02-22 16:26:39 +01:00
|
|
|
email = remote_user_to_email(remote_user)
|
2020-02-23 18:58:08 +01:00
|
|
|
data_dict = ExternalAuthDataDict(
|
|
|
|
email=email,
|
|
|
|
mobile_flow_otp=mobile_flow_otp,
|
|
|
|
desktop_flow_otp=desktop_flow_otp,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
redirect_to=next,
|
2020-02-23 18:58:08 +01:00
|
|
|
)
|
|
|
|
if realm:
|
|
|
|
data_dict["subdomain"] = realm.subdomain
|
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
data_dict["subdomain"] = "" # realm creation happens on root subdomain
|
2020-02-23 18:58:08 +01:00
|
|
|
result = ExternalAuthResult(user_profile=user_profile, data_dict=data_dict)
|
|
|
|
return login_or_register_remote_user(request, result)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@csrf_exempt
|
2017-11-03 22:26:31 +01:00
|
|
|
@log_view_func
|
2017-11-27 09:28:57 +01:00
|
|
|
def remote_user_jwt(request: HttpRequest) -> HttpResponse:
|
2016-10-24 11:12:45 +02:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
try:
|
2020-06-08 22:33:51 +02:00
|
|
|
key = settings.JWT_AUTH_KEYS[subdomain]["key"]
|
|
|
|
algorithms = settings.JWT_AUTH_KEYS[subdomain]["algorithms"]
|
2016-10-24 11:12:45 +02:00
|
|
|
except KeyError:
|
|
|
|
raise JsonableError(_("Auth key for this subdomain not found."))
|
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
try:
|
|
|
|
json_web_token = request.POST["json_web_token"]
|
2021-02-12 08:20:45 +01:00
|
|
|
options = {"verify_signature": True}
|
2020-06-08 22:33:51 +02:00
|
|
|
payload = jwt.decode(json_web_token, key, algorithms=algorithms, options=options)
|
2016-10-12 04:50:38 +02:00
|
|
|
except KeyError:
|
|
|
|
raise JsonableError(_("No JSON web token passed in request"))
|
2016-10-24 11:12:45 +02:00
|
|
|
except jwt.InvalidTokenError:
|
2016-10-12 04:50:38 +02:00
|
|
|
raise JsonableError(_("Bad JSON web token"))
|
|
|
|
|
|
|
|
remote_user = payload.get("user", None)
|
|
|
|
if remote_user is None:
|
|
|
|
raise JsonableError(_("No user specified in JSON web token claims"))
|
2021-02-12 08:20:45 +01:00
|
|
|
email_domain = payload.get("realm", None)
|
2017-10-03 02:35:41 +02:00
|
|
|
if email_domain is None:
|
2018-03-08 02:05:50 +01:00
|
|
|
raise JsonableError(_("No organization specified in JSON web token claims"))
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
email = f"{remote_user}@{email_domain}"
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2019-05-04 04:47:44 +02:00
|
|
|
try:
|
|
|
|
realm = get_realm(subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
2017-10-03 02:34:58 +02:00
|
|
|
raise JsonableError(_("Wrong subdomain"))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = authenticate(username=email, realm=realm, use_dummy_backend=True)
|
2020-02-23 18:58:08 +01:00
|
|
|
if user_profile is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = ExternalAuthResult(
|
|
|
|
data_dict={"email": email, "full_name": remote_user, "subdomain": realm.subdomain}
|
|
|
|
)
|
2020-02-23 18:58:08 +01:00
|
|
|
else:
|
|
|
|
result = ExternalAuthResult(user_profile=user_profile)
|
|
|
|
|
|
|
|
return login_or_register_remote_user(request, result)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-15 04:13:33 +02:00
|
|
|
@has_request_variables
|
|
|
|
def oauth_redirect_to_root(
|
|
|
|
request: HttpRequest,
|
|
|
|
url: str,
|
|
|
|
sso_type: str,
|
2021-02-12 08:19:30 +01:00
|
|
|
is_signup: bool = False,
|
|
|
|
extra_url_params: Dict[str, str] = {},
|
2020-05-15 04:13:33 +02:00
|
|
|
next: Optional[str] = REQ(default=None),
|
|
|
|
) -> HttpResponse:
|
2017-10-27 03:17:12 +02:00
|
|
|
main_site_uri = settings.ROOT_DOMAIN_URI + url
|
2021-02-12 08:20:45 +01:00
|
|
|
if settings.SOCIAL_AUTH_SUBDOMAIN is not None and sso_type == "social":
|
2021-02-12 08:19:30 +01:00
|
|
|
main_site_uri = (
|
|
|
|
settings.EXTERNAL_URI_SCHEME
|
|
|
|
+ settings.SOCIAL_AUTH_SUBDOMAIN
|
|
|
|
+ "."
|
|
|
|
+ settings.EXTERNAL_HOST
|
|
|
|
) + url
|
2018-07-10 08:07:23 +02:00
|
|
|
|
2017-03-19 20:01:01 +01:00
|
|
|
params = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"subdomain": get_subdomain(request),
|
|
|
|
"is_signup": "1" if is_signup else "0",
|
2017-03-19 20:01:01 +01:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
params["multiuse_object_key"] = request.GET.get("multiuse_object_key", "")
|
2019-02-08 17:09:25 +01:00
|
|
|
|
2017-03-19 20:01:01 +01:00
|
|
|
# mobile_flow_otp is a one-time pad provided by the app that we
|
|
|
|
# can use to encrypt the API key when passing back to the app.
|
2021-02-12 08:20:45 +01:00
|
|
|
mobile_flow_otp = request.GET.get("mobile_flow_otp")
|
|
|
|
desktop_flow_otp = request.GET.get("desktop_flow_otp")
|
2020-02-01 17:20:11 +01:00
|
|
|
|
|
|
|
validate_otp_params(mobile_flow_otp, desktop_flow_otp)
|
2017-03-19 20:01:01 +01:00
|
|
|
if mobile_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["mobile_flow_otp"] = mobile_flow_otp
|
2020-01-23 14:22:28 +01:00
|
|
|
if desktop_flow_otp is not None:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["desktop_flow_otp"] = desktop_flow_otp
|
2020-01-23 14:22:28 +01:00
|
|
|
|
2018-03-12 12:54:50 +01:00
|
|
|
if next:
|
2021-02-12 08:20:45 +01:00
|
|
|
params["next"] = next
|
2018-03-12 12:54:50 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
params = {**params, **extra_url_params}
|
|
|
|
|
2020-03-02 19:38:16 +01:00
|
|
|
return redirect(add_query_to_redirect_url(main_site_uri, urllib.parse.urlencode(params)))
|
2016-10-14 14:12:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 17:31:30 +02:00
|
|
|
def handle_desktop_flow(func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def wrapper(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2020-05-31 17:31:30 +02:00
|
|
|
user_agent = parse_user_agent(request.META.get("HTTP_USER_AGENT", "Missing User-Agent"))
|
|
|
|
if user_agent["name"] == "ZulipElectron":
|
|
|
|
return render(request, "zerver/desktop_login.html")
|
|
|
|
|
|
|
|
return func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2020-05-31 17:31:30 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-01 14:24:21 +02:00
|
|
|
@handle_desktop_flow
|
|
|
|
def start_remote_user_sso(request: HttpRequest) -> HttpResponse:
|
|
|
|
"""
|
|
|
|
The purpose of this endpoint is to provide an initial step in the flow
|
|
|
|
on which we can handle the special behavior for the desktop app.
|
|
|
|
/accounts/login/sso may have Apache intercepting requests to it
|
|
|
|
to do authentication, so we need this additional endpoint.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
query = request.META["QUERY_STRING"]
|
2020-09-22 02:54:44 +02:00
|
|
|
return redirect(add_query_to_redirect_url(reverse(remote_user_sso), query))
|
2020-06-01 14:24:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 17:31:30 +02:00
|
|
|
@handle_desktop_flow
|
2021-02-12 08:19:30 +01:00
|
|
|
def start_social_login(
|
|
|
|
request: HttpRequest,
|
|
|
|
backend: str,
|
|
|
|
extra_arg: Optional[str] = None,
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_url = reverse("social:begin", args=[backend])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_url_params: Dict[str, str] = {}
|
2019-09-29 06:32:56 +02:00
|
|
|
if backend == "saml":
|
2020-06-17 14:25:25 +02:00
|
|
|
if not SAMLAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "saml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
|
|
|
# This backend requires the name of the IdP (from the list of configured ones)
|
|
|
|
# to be passed as the parameter.
|
2019-10-22 18:23:57 +02:00
|
|
|
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.info(
|
|
|
|
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
|
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_url_params = {"idp": extra_arg}
|
2020-03-01 11:22:54 +01:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
if backend == "apple" and not AppleAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "apple")
|
2020-07-04 19:09:01 +02:00
|
|
|
|
2020-02-17 15:14:16 +01:00
|
|
|
# TODO: Add AzureAD also.
|
2020-07-04 19:09:01 +02:00
|
|
|
if backend in ["github", "google", "gitlab"]:
|
2020-02-17 15:14:16 +01:00
|
|
|
key_setting = "SOCIAL_AUTH_" + backend.upper() + "_KEY"
|
|
|
|
secret_setting = "SOCIAL_AUTH_" + backend.upper() + "_SECRET"
|
|
|
|
if not (getattr(settings, key_setting) and getattr(settings, secret_setting)):
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, backend)
|
2017-08-07 17:38:25 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return oauth_redirect_to_root(request, backend_url, "social", extra_url_params=extra_url_params)
|
2016-12-01 13:10:59 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-31 17:32:31 +02:00
|
|
|
@handle_desktop_flow
|
2021-02-12 08:19:30 +01:00
|
|
|
def start_social_signup(
|
|
|
|
request: HttpRequest,
|
|
|
|
backend: str,
|
|
|
|
extra_arg: Optional[str] = None,
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
backend_url = reverse("social:begin", args=[backend])
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_url_params: Dict[str, str] = {}
|
2019-09-29 06:32:56 +02:00
|
|
|
if backend == "saml":
|
2020-06-17 14:25:25 +02:00
|
|
|
if not SAMLAuthBackend.check_config():
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "saml")
|
2019-10-26 01:51:48 +02:00
|
|
|
|
2019-10-22 18:23:57 +02:00
|
|
|
if not extra_arg or extra_arg not in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS:
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.info(
|
|
|
|
"Attempted to initiate SAML authentication with wrong idp argument: %s", extra_arg
|
|
|
|
)
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_url_params = {"idp": extra_arg}
|
2021-02-12 08:19:30 +01:00
|
|
|
return oauth_redirect_to_root(
|
2021-02-12 08:20:45 +01:00
|
|
|
request, backend_url, "social", is_signup=True, extra_url_params=extra_url_params
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2017-04-18 11:50:44 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
_subdomain_token_salt = "zerver.views.auth.log_into_subdomain"
|
2017-10-27 02:45:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-03 22:26:31 +01:00
|
|
|
@log_view_func
|
2018-04-24 03:47:28 +02:00
|
|
|
def log_into_subdomain(request: HttpRequest, token: str) -> HttpResponse:
|
2020-01-23 12:21:55 +01:00
|
|
|
"""Given a valid authentication token (generated by
|
2019-03-10 02:43:29 +01:00
|
|
|
redirect_and_log_into_subdomain called on auth.zulip.example.com),
|
|
|
|
call login_or_register_remote_user, passing all the authentication
|
2020-10-23 02:43:28 +02:00
|
|
|
result data that has been stored in Redis, associated with this token.
|
2019-03-10 02:43:29 +01:00
|
|
|
"""
|
2020-09-02 02:50:08 +02:00
|
|
|
# The tokens are intended to have the same format as API keys.
|
|
|
|
if not has_api_key_format(token):
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("log_into_subdomain: Malformed token given: %s", token)
|
2016-10-14 14:12:16 +02:00
|
|
|
return HttpResponse(status=400)
|
2020-01-23 12:21:55 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
try:
|
|
|
|
result = ExternalAuthResult(login_token=token)
|
|
|
|
except ExternalAuthResult.InvalidTokenError:
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("log_into_subdomain: Invalid token given: %s", token)
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/log_into_subdomain_token_invalid.html", status=400)
|
2016-10-14 14:12:16 +02:00
|
|
|
|
2017-10-03 02:24:27 +02:00
|
|
|
subdomain = get_subdomain(request)
|
2021-02-12 08:20:45 +01:00
|
|
|
if result.data_dict["subdomain"] != subdomain:
|
2020-01-23 12:21:55 +01:00
|
|
|
raise JsonableError(_("Invalid subdomain"))
|
2020-02-23 18:58:08 +01:00
|
|
|
|
|
|
|
return login_or_register_remote_user(request, result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-23 18:58:08 +01:00
|
|
|
def redirect_and_log_into_subdomain(result: ExternalAuthResult) -> HttpResponse:
|
|
|
|
token = result.store_data()
|
|
|
|
realm = get_realm(result.data_dict["subdomain"])
|
2021-02-12 08:19:30 +01:00
|
|
|
subdomain_login_uri = realm.uri + reverse(log_into_subdomain, args=[token])
|
2017-10-27 02:45:38 +02:00
|
|
|
return redirect(subdomain_login_uri)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_dev_users(realm: Optional[Realm] = None, extra_users_count: int = 10) -> List[UserProfile]:
|
2017-02-04 20:16:46 +01:00
|
|
|
# Development environments usually have only a few users, but
|
|
|
|
# it still makes sense to limit how many extra users we render to
|
|
|
|
# support performance testing with DevAuthBackend.
|
2017-08-15 00:13:58 +02:00
|
|
|
if realm is not None:
|
2021-02-12 08:19:30 +01:00
|
|
|
users_query = UserProfile.objects.select_related().filter(
|
|
|
|
is_bot=False, is_active=True, realm=realm
|
|
|
|
)
|
2017-08-15 00:13:58 +02:00
|
|
|
else:
|
|
|
|
users_query = UserProfile.objects.select_related().filter(is_bot=False, is_active=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
shakespearian_users = users_query.exclude(email__startswith="extrauser").order_by("email")
|
|
|
|
extra_users = users_query.filter(email__startswith="extrauser").order_by("email")
|
2017-02-04 20:16:46 +01:00
|
|
|
# Limit the number of extra users we offer by default
|
|
|
|
extra_users = extra_users[0:extra_users_count]
|
|
|
|
users = list(shakespearian_users) + list(extra_users)
|
|
|
|
return users
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
def redirect_to_misconfigured_ldap_notice(request: HttpResponse, error_type: int) -> HttpResponse:
|
2017-09-22 10:58:12 +02:00
|
|
|
if error_type == ZulipLDAPAuthBackend.REALM_IS_NONE_ERROR:
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "ldap")
|
2017-09-22 10:58:12 +02:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid error type")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def show_deactivation_notice(request: HttpRequest) -> HttpResponse:
|
2017-10-27 00:27:59 +02:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm and realm.deactivated:
|
2020-12-12 12:23:48 +01:00
|
|
|
context = {"deactivated_domain_name": realm.name}
|
|
|
|
if realm.deactivated_redirect is not None:
|
|
|
|
context["deactivated_redirect"] = realm.deactivated_redirect
|
2021-02-12 08:19:30 +01:00
|
|
|
return render(request, "zerver/deactivated.html", context=context)
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponseRedirect(reverse("login_page"))
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def redirect_to_deactivation_notice() -> HttpResponse:
|
2020-09-22 02:54:44 +02:00
|
|
|
return HttpResponseRedirect(reverse(show_deactivation_notice))
|
2017-10-27 00:27:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-05-04 04:47:44 +02:00
|
|
|
def add_dev_login_context(realm: Optional[Realm], context: Dict[str, Any]) -> None:
|
2017-11-29 00:17:33 +01:00
|
|
|
users = get_dev_users(realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
context["current_realm"] = realm
|
|
|
|
context["all_realms"] = Realm.objects.all()
|
2017-11-29 00:17:33 +01:00
|
|
|
|
2020-03-30 15:49:20 +02:00
|
|
|
def sort(lst: List[UserProfile]) -> List[UserProfile]:
|
|
|
|
return sorted(lst, key=lambda u: u.delivery_email)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
context["direct_owners"] = sort([u for u in users if u.is_realm_owner])
|
|
|
|
context["direct_admins"] = sort([u for u in users if u.is_realm_admin and not u.is_realm_owner])
|
|
|
|
context["guest_users"] = sort([u for u in users if u.is_guest])
|
|
|
|
context["direct_users"] = sort([u for u in users if not (u.is_realm_admin or u.is_guest)])
|
2017-11-29 00:17:33 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-12-20 07:26:29 +01:00
|
|
|
def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
for key in ("email", "already_registered", "is_deactivated"):
|
2017-12-20 07:26:29 +01:00
|
|
|
try:
|
|
|
|
context[key] = request.GET[key]
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
context["deactivated_account_error"] = DEACTIVATED_ACCOUNT_ERROR
|
2017-12-20 07:26:29 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-12-20 07:57:26 +01:00
|
|
|
class TwoFactorLoginView(BaseTwoFactorLoginView):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
extra_context: ExtraContext = None
|
2017-12-20 07:57:26 +01:00
|
|
|
form_list = (
|
2021-02-12 08:20:45 +01:00
|
|
|
("auth", OurAuthenticationForm),
|
|
|
|
("token", AuthenticationTokenForm),
|
|
|
|
("backup", BackupTokenForm),
|
2017-12-20 07:57:26 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def __init__(self, extra_context: ExtraContext = None, *args: Any, **kwargs: Any) -> None:
|
2017-12-20 07:57:26 +01:00
|
|
|
self.extra_context = extra_context
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
def get_context_data(self, **kwargs: Any) -> Dict[str, Any]:
|
2018-06-05 05:55:42 +02:00
|
|
|
context = super().get_context_data(**kwargs)
|
2017-12-20 07:57:26 +01:00
|
|
|
if self.extra_context is not None:
|
|
|
|
context.update(self.extra_context)
|
|
|
|
update_login_page_context(self.request, context)
|
|
|
|
|
|
|
|
realm = get_realm_from_request(self.request)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_to = realm.uri if realm else "/"
|
|
|
|
context["next"] = self.request.POST.get(
|
|
|
|
"next",
|
|
|
|
self.request.GET.get("next", redirect_to),
|
2020-05-15 04:13:33 +02:00
|
|
|
)
|
2017-12-20 07:57:26 +01:00
|
|
|
return context
|
|
|
|
|
|
|
|
def done(self, form_list: List[Form], **kwargs: Any) -> HttpResponse:
|
|
|
|
"""
|
|
|
|
Login the user and redirect to the desired page.
|
|
|
|
|
|
|
|
We need to override this function so that we can redirect to
|
|
|
|
realm.uri instead of '/'.
|
|
|
|
"""
|
2018-06-05 10:25:41 +02:00
|
|
|
realm_uri = self.get_user().realm.uri
|
2018-10-18 00:27:27 +02:00
|
|
|
# This mock.patch business is an unpleasant hack that we'd
|
|
|
|
# ideally like to remove by instead patching the upstream
|
|
|
|
# module to support better configurability of the
|
|
|
|
# LOGIN_REDIRECT_URL setting. But until then, it works. We
|
|
|
|
# import mock.patch here because mock has an expensive import
|
|
|
|
# process involving pbr -> pkgresources (which is really slow).
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import patch
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch.object(settings, "LOGIN_REDIRECT_URL", realm_uri):
|
2018-06-05 10:25:41 +02:00
|
|
|
return super().done(form_list, **kwargs)
|
2017-12-20 07:57:26 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-15 04:13:33 +02:00
|
|
|
@has_request_variables
|
|
|
|
def login_page(
|
2021-02-12 08:19:30 +01:00
|
|
|
request: HttpRequest,
|
|
|
|
next: str = REQ(default="/"),
|
|
|
|
**kwargs: Any,
|
2020-05-15 04:13:33 +02:00
|
|
|
) -> HttpResponse:
|
2019-04-13 09:37:53 +02:00
|
|
|
# To support previewing the Zulip login pages, we have a special option
|
|
|
|
# that disables the default behavior of redirecting logged-in users to the
|
|
|
|
# logged-in app.
|
2021-02-12 08:20:45 +01:00
|
|
|
is_preview = "preview" in request.GET
|
2017-07-12 09:50:19 +02:00
|
|
|
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
|
|
|
|
if request.user and request.user.is_verified():
|
|
|
|
return HttpResponseRedirect(request.user.realm.uri)
|
2019-04-13 09:37:53 +02:00
|
|
|
elif request.user.is_authenticated and not is_preview:
|
2017-10-06 01:19:11 +02:00
|
|
|
return HttpResponseRedirect(request.user.realm.uri)
|
2017-08-25 04:32:16 +02:00
|
|
|
if is_subdomain_root_or_alias(request) and settings.ROOT_DOMAIN_LANDING_PAGE:
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = reverse("realm_redirect")
|
2019-08-12 05:44:35 +02:00
|
|
|
if request.GET:
|
2020-03-02 19:38:16 +01:00
|
|
|
redirect_url = add_query_to_redirect_url(redirect_url, request.GET.urlencode())
|
2017-01-10 10:44:56 +01:00
|
|
|
return HttpResponseRedirect(redirect_url)
|
|
|
|
|
2017-08-24 09:58:44 +02:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm and realm.deactivated:
|
|
|
|
return redirect_to_deactivation_notice()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
extra_context = kwargs.pop("extra_context", {})
|
2020-05-15 04:13:33 +02:00
|
|
|
extra_context["next"] = next
|
2019-03-17 22:03:57 +01:00
|
|
|
if dev_auth_enabled() and kwargs.get("template_name") == "zerver/dev_login.html":
|
2021-02-12 08:20:45 +01:00
|
|
|
if "new_realm" in request.POST:
|
2019-05-21 23:37:21 +02:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm(request.POST["new_realm"])
|
2019-05-21 23:37:21 +02:00
|
|
|
except Realm.DoesNotExist:
|
|
|
|
realm = None
|
2017-08-15 00:13:58 +02:00
|
|
|
|
2017-11-29 00:17:33 +01:00
|
|
|
add_dev_login_context(realm, extra_context)
|
2021-02-12 08:20:45 +01:00
|
|
|
if realm and "new_realm" in request.POST:
|
2017-10-23 20:33:56 +02:00
|
|
|
# If we're switching realms, redirect to that realm, but
|
|
|
|
# only if it actually exists.
|
2017-08-15 00:13:58 +02:00
|
|
|
return HttpResponseRedirect(realm.uri)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if "username" in request.POST:
|
|
|
|
extra_context["email"] = request.POST["username"]
|
2020-01-31 15:02:28 +01:00
|
|
|
extra_context.update(login_context(request))
|
2018-02-23 09:02:13 +01:00
|
|
|
|
2017-07-12 09:50:19 +02:00
|
|
|
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
|
2021-02-12 08:19:30 +01:00
|
|
|
return start_two_factor_auth(request, extra_context=extra_context, **kwargs)
|
2017-07-12 09:50:19 +02:00
|
|
|
|
2017-09-22 10:58:12 +02:00
|
|
|
try:
|
2020-02-02 13:12:38 +01:00
|
|
|
template_response = DjangoLoginView.as_view(
|
2021-02-12 08:19:30 +01:00
|
|
|
authentication_form=OurAuthenticationForm, extra_context=extra_context, **kwargs
|
|
|
|
)(request)
|
2017-09-22 10:58:12 +02:00
|
|
|
except ZulipLDAPConfigurationError as e:
|
|
|
|
assert len(e.args) > 1
|
2020-06-17 14:25:25 +02:00
|
|
|
return redirect_to_misconfigured_ldap_notice(request, e.args[1])
|
2017-09-22 10:58:12 +02:00
|
|
|
|
2018-05-21 06:40:18 +02:00
|
|
|
if isinstance(template_response, SimpleTemplateResponse):
|
|
|
|
# Only those responses that are rendered using a template have
|
|
|
|
# context_data attribute. This attribute doesn't exist otherwise. It is
|
|
|
|
# added in SimpleTemplateResponse class, which is a derived class of
|
|
|
|
# HttpResponse. See django.template.response.SimpleTemplateResponse,
|
|
|
|
# https://github.com/django/django/blob/master/django/template/response.py#L19.
|
|
|
|
update_login_page_context(request, template_response.context_data)
|
2017-12-20 09:53:50 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
return template_response
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def start_two_factor_auth(
|
|
|
|
request: HttpRequest, extra_context: ExtraContext = None, **kwargs: Any
|
|
|
|
) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
two_fa_form_field = "two_factor_login_view-current_step"
|
2017-07-12 09:50:19 +02:00
|
|
|
if two_fa_form_field not in request.POST:
|
|
|
|
# Here we inject the 2FA step in the request context if it's missing to
|
|
|
|
# force the user to go to the first step of 2FA authentication process.
|
|
|
|
# This seems a bit hackish but simplifies things from testing point of
|
|
|
|
# view. I don't think this can result in anything bad because all the
|
|
|
|
# authentication logic runs after the auth step.
|
|
|
|
#
|
|
|
|
# If we don't do this, we will have to modify a lot of auth tests to
|
|
|
|
# insert this variable in the request.
|
|
|
|
request.POST = request.POST.copy()
|
2021-02-12 08:20:45 +01:00
|
|
|
request.POST.update({two_fa_form_field: "auth"})
|
2017-07-12 09:50:19 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
This is how Django implements as_view(), so extra_context will be passed
|
|
|
|
to the __init__ method of TwoFactorLoginView.
|
|
|
|
|
|
|
|
def as_view(cls, **initkwargs):
|
|
|
|
def view(request, *args, **kwargs):
|
|
|
|
self = cls(**initkwargs)
|
|
|
|
...
|
|
|
|
|
|
|
|
return view
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
two_fa_view = TwoFactorLoginView.as_view(extra_context=extra_context, **kwargs)
|
2017-07-12 09:50:19 +02:00
|
|
|
return two_fa_view(request, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-11 17:30:04 +02:00
|
|
|
@csrf_exempt
|
2020-05-15 04:13:33 +02:00
|
|
|
@has_request_variables
|
|
|
|
def dev_direct_login(
|
|
|
|
request: HttpRequest,
|
|
|
|
next: str = REQ(default="/"),
|
|
|
|
) -> HttpResponse:
|
2017-11-05 03:14:28 +01:00
|
|
|
# This function allows logging in without a password and should only be called
|
|
|
|
# in development environments. It may be called if the DevAuthBackend is included
|
|
|
|
# in settings.AUTHENTICATION_BACKENDS
|
2016-10-12 04:50:38 +02:00
|
|
|
if (not dev_auth_enabled()) or settings.PRODUCTION:
|
2017-11-05 03:14:28 +01:00
|
|
|
# This check is probably not required, since authenticate would fail without
|
|
|
|
# an enabled DevAuthBackend.
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "dev")
|
|
|
|
email = request.POST["direct_email"]
|
2017-11-21 21:13:46 +01:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
realm = get_realm(subdomain)
|
2017-11-21 21:19:20 +01:00
|
|
|
user_profile = authenticate(dev_auth_username=email, realm=realm)
|
2016-10-12 04:50:38 +02:00
|
|
|
if user_profile is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
return config_error(request, "dev")
|
2017-08-25 01:11:30 +02:00
|
|
|
do_login(request, user_profile)
|
2018-03-12 12:25:50 +01:00
|
|
|
|
|
|
|
redirect_to = get_safe_redirect_to(next, user_profile.realm.uri)
|
|
|
|
return HttpResponseRedirect(redirect_to)
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-21 21:53:15 +02:00
|
|
|
def check_dev_auth_backend() -> None:
|
|
|
|
if settings.PRODUCTION:
|
|
|
|
raise JsonableError(_("Endpoint not available in production."))
|
|
|
|
if not dev_auth_enabled():
|
|
|
|
raise JsonableError(_("DevAuthBackend not enabled."))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_dev_fetch_api_key(request: HttpRequest, username: str = REQ()) -> HttpResponse:
|
2016-10-12 04:50:38 +02:00
|
|
|
"""This function allows logging in without a password on the Zulip
|
|
|
|
mobile apps when connecting to a Zulip development environment. It
|
|
|
|
requires DevAuthBackend to be included in settings.AUTHENTICATION_BACKENDS.
|
|
|
|
"""
|
2020-07-21 21:53:15 +02:00
|
|
|
check_dev_auth_backend()
|
2017-04-07 08:21:29 +02:00
|
|
|
|
|
|
|
# Django invokes authenticate methods by matching arguments, and this
|
|
|
|
# authentication flow will not invoke LDAP authentication because of
|
|
|
|
# this condition of Django so no need to check if LDAP backend is
|
|
|
|
# enabled.
|
|
|
|
validate_login_email(username)
|
2020-12-12 20:21:06 +01:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
|
|
|
return json_error(_("Invalid subdomain"))
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, bool] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = authenticate(dev_auth_username=username, realm=realm, return_data=return_data)
|
2017-01-24 06:11:18 +01:00
|
|
|
if return_data.get("inactive_realm"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("This organization has been deactivated."),
|
|
|
|
data={"reason": "realm deactivated"},
|
|
|
|
status=403,
|
|
|
|
)
|
2017-01-24 06:11:18 +01:00
|
|
|
if return_data.get("inactive_user"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("Your account has been disabled."), data={"reason": "user disable"}, status=403
|
|
|
|
)
|
2017-05-22 01:34:21 +02:00
|
|
|
if user_profile is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("This user is not registered."), data={"reason": "unregistered"}, status=403
|
|
|
|
)
|
2017-08-25 01:11:30 +02:00
|
|
|
do_login(request, user_profile)
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(user_profile)
|
2019-04-04 22:24:29 +02:00
|
|
|
return json_success({"api_key": api_key, "email": user_profile.delivery_email})
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@csrf_exempt
|
2018-04-05 21:16:56 +02:00
|
|
|
def api_dev_list_users(request: HttpRequest) -> HttpResponse:
|
2020-07-21 21:53:15 +02:00
|
|
|
check_dev_auth_backend()
|
|
|
|
|
2017-02-04 20:16:46 +01:00
|
|
|
users = get_dev_users()
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_success(
|
|
|
|
dict(
|
|
|
|
direct_admins=[
|
|
|
|
dict(email=u.delivery_email, realm_uri=u.realm.uri)
|
|
|
|
for u in users
|
|
|
|
if u.is_realm_admin
|
|
|
|
],
|
|
|
|
direct_users=[
|
|
|
|
dict(email=u.delivery_email, realm_uri=u.realm.uri)
|
|
|
|
for u in users
|
|
|
|
if not u.is_realm_admin
|
|
|
|
],
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
|
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def api_fetch_api_key(
|
|
|
|
request: HttpRequest, username: str = REQ(), password: str = REQ()
|
|
|
|
) -> HttpResponse:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
return_data: Dict[str, bool] = {}
|
2020-11-10 22:46:49 +01:00
|
|
|
|
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
|
|
|
return json_error(_("Invalid subdomain"))
|
|
|
|
|
|
|
|
if not ldap_auth_enabled(realm=realm):
|
2019-02-02 16:51:26 +01:00
|
|
|
# In case we don't authenticate against LDAP, check for a valid
|
|
|
|
# email. LDAP backend can authenticate against a non-email.
|
|
|
|
validate_login_email(username)
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = authenticate(
|
|
|
|
request=request, username=username, password=password, realm=realm, return_data=return_data
|
|
|
|
)
|
2017-01-24 06:11:18 +01:00
|
|
|
if return_data.get("inactive_user"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("Your account has been disabled."), data={"reason": "user disable"}, status=403
|
|
|
|
)
|
2017-01-24 06:11:18 +01:00
|
|
|
if return_data.get("inactive_realm"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("This organization has been deactivated."),
|
|
|
|
data={"reason": "realm deactivated"},
|
|
|
|
status=403,
|
|
|
|
)
|
2017-01-24 06:11:18 +01:00
|
|
|
if return_data.get("password_auth_disabled"):
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("Password auth is disabled in your team."),
|
|
|
|
data={"reason": "password auth disabled"},
|
|
|
|
status=403,
|
|
|
|
)
|
2016-10-12 04:50:38 +02:00
|
|
|
if user_profile is None:
|
2021-02-12 08:19:30 +01:00
|
|
|
return json_error(
|
|
|
|
_("Your username or password is incorrect."),
|
|
|
|
data={"reason": "incorrect_creds"},
|
|
|
|
status=403,
|
|
|
|
)
|
2017-06-15 07:15:57 +02:00
|
|
|
|
|
|
|
# Maybe sending 'user_logged_in' signal is the better approach:
|
|
|
|
# user_logged_in.send(sender=user_profile.__class__, request=request, user=user_profile)
|
|
|
|
# Not doing this only because over here we don't add the user information
|
|
|
|
# in the session. If the signal receiver assumes that we do then that
|
|
|
|
# would cause problems.
|
|
|
|
email_on_new_login(sender=user_profile.__class__, request=request, user=user_profile)
|
2017-08-25 00:58:34 +02:00
|
|
|
|
|
|
|
# Mark this request as having a logged-in user for our server logs.
|
|
|
|
process_client(request, user_profile)
|
2020-03-09 11:39:20 +01:00
|
|
|
request._requestor_for_logs = user_profile.format_requestor_for_logs()
|
2017-08-25 00:58:34 +02:00
|
|
|
|
2018-08-01 10:53:40 +02:00
|
|
|
api_key = get_api_key(user_profile)
|
2019-04-04 22:24:29 +02:00
|
|
|
return json_success({"api_key": api_key, "email": user_profile.delivery_email})
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 09:28:57 +01:00
|
|
|
def get_auth_backends_data(request: HttpRequest) -> Dict[str, Any]:
|
2017-03-10 06:29:09 +01:00
|
|
|
"""Returns which authentication methods are enabled on the server"""
|
2017-10-02 08:32:09 +02:00
|
|
|
subdomain = get_subdomain(request)
|
|
|
|
try:
|
|
|
|
realm = Realm.objects.get(string_id=subdomain)
|
|
|
|
except Realm.DoesNotExist:
|
|
|
|
# If not the root subdomain, this is an error
|
2017-10-20 02:56:49 +02:00
|
|
|
if subdomain != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
2017-10-02 08:32:09 +02:00
|
|
|
raise JsonableError(_("Invalid subdomain"))
|
|
|
|
# With the root subdomain, it's an error or not depending
|
|
|
|
# whether ROOT_DOMAIN_LANDING_PAGE (which indicates whether
|
|
|
|
# there are some realms without subdomains on this server)
|
|
|
|
# is set.
|
|
|
|
if settings.ROOT_DOMAIN_LANDING_PAGE:
|
|
|
|
raise JsonableError(_("Subdomain required"))
|
|
|
|
else:
|
|
|
|
realm = None
|
2018-12-19 01:13:59 +01:00
|
|
|
result = {
|
2017-10-24 20:59:11 +02:00
|
|
|
"password": password_auth_enabled(realm),
|
|
|
|
}
|
2018-12-19 01:13:59 +01:00
|
|
|
for auth_backend_name in AUTH_BACKEND_NAME_MAP:
|
|
|
|
key = auth_backend_name.lower()
|
|
|
|
result[key] = auth_enabled_helper([auth_backend_name], realm)
|
|
|
|
return result
|
2017-05-04 01:13:40 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-06 02:49:34 +01:00
|
|
|
def check_server_incompatibility(request: HttpRequest) -> bool:
|
2018-12-11 20:29:25 +01:00
|
|
|
user_agent = parse_user_agent(request.META.get("HTTP_USER_AGENT", "Missing User-Agent"))
|
2021-02-12 08:20:45 +01:00
|
|
|
return user_agent["name"] == "ZulipInvalid"
|
2018-12-06 02:49:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-12 05:44:35 +02:00
|
|
|
@require_safe
|
2017-05-04 01:13:56 +02:00
|
|
|
@csrf_exempt
|
2017-11-27 09:28:57 +01:00
|
|
|
def api_get_server_settings(request: HttpRequest) -> HttpResponse:
|
2018-12-11 20:25:57 +01:00
|
|
|
# Log which client is making this request.
|
|
|
|
process_client(request, request.user, skip_update_user_activity=True)
|
2017-05-04 01:13:56 +02:00
|
|
|
result = dict(
|
|
|
|
authentication_methods=get_auth_backends_data(request),
|
|
|
|
zulip_version=ZULIP_VERSION,
|
2020-04-20 00:57:28 +02:00
|
|
|
zulip_feature_level=API_FEATURE_LEVEL,
|
2018-02-12 23:34:59 +01:00
|
|
|
push_notifications_enabled=push_notifications_enabled(),
|
2018-12-06 02:49:34 +01:00
|
|
|
is_incompatible=check_server_incompatibility(request),
|
2017-05-04 01:13:56 +02:00
|
|
|
)
|
|
|
|
context = zulip_default_context(request)
|
2019-03-20 13:13:44 +01:00
|
|
|
context.update(login_context(request))
|
2017-05-04 01:13:56 +02:00
|
|
|
# IMPORTANT NOTE:
|
|
|
|
# realm_name, realm_icon, etc. are not guaranteed to appear in the response.
|
|
|
|
# * If they do, that means the server URL has only one realm on it
|
|
|
|
# * If they don't, the server has multiple realms, and it's not clear which is
|
|
|
|
# the requested realm, so we can't send back these data.
|
2017-09-15 19:13:48 +02:00
|
|
|
for settings_item in [
|
2021-02-12 08:19:30 +01:00
|
|
|
"email_auth_enabled",
|
|
|
|
"require_email_format_usernames",
|
|
|
|
"realm_uri",
|
|
|
|
"realm_name",
|
|
|
|
"realm_icon",
|
|
|
|
"realm_description",
|
|
|
|
"external_authentication_methods",
|
|
|
|
]:
|
2017-05-04 01:13:56 +02:00
|
|
|
if context[settings_item] is not None:
|
|
|
|
result[settings_item] = context[settings_item]
|
|
|
|
return json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def json_fetch_api_key(
|
2021-02-12 08:20:45 +01:00
|
|
|
request: HttpRequest, user_profile: UserProfile, password: str = REQ(default="")
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2020-12-12 20:21:06 +01:00
|
|
|
realm = get_realm_from_request(request)
|
|
|
|
if realm is None:
|
|
|
|
return json_error(_("Invalid subdomain"))
|
2016-10-12 04:50:38 +02:00
|
|
|
if password_auth_enabled(user_profile.realm):
|
2021-02-12 08:19:30 +01:00
|
|
|
if not authenticate(
|
|
|
|
request=request, username=user_profile.delivery_email, password=password, realm=realm
|
|
|
|
):
|
2016-10-12 04:50:38 +02:00
|
|
|
return json_error(_("Your username or password is incorrect."))
|
2018-08-01 10:53:40 +02:00
|
|
|
|
|
|
|
api_key = get_api_key(user_profile)
|
2020-06-24 10:46:23 +02:00
|
|
|
return json_success({"api_key": api_key, "email": user_profile.delivery_email})
|
2016-10-12 04:50:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-10-12 04:50:38 +02:00
|
|
|
@require_post
|
2017-11-27 09:28:57 +01:00
|
|
|
def logout_then_login(request: HttpRequest, **kwargs: Any) -> HttpResponse:
|
2016-10-12 04:50:38 +02:00
|
|
|
return django_logout_then_login(request, kwargs)
|
2017-11-18 03:30:07 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-02-02 14:44:13 +01:00
|
|
|
def password_reset(request: HttpRequest) -> HttpResponse:
|
2021-02-12 08:19:30 +01:00
|
|
|
view_func = DjangoPasswordResetView.as_view(
|
2021-02-12 08:20:45 +01:00
|
|
|
template_name="zerver/reset.html",
|
2021-02-12 08:19:30 +01:00
|
|
|
form_class=ZulipPasswordResetForm,
|
2021-02-12 08:20:45 +01:00
|
|
|
success_url="/accounts/password/reset/done/",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-02 14:44:13 +01:00
|
|
|
return view_func(request)
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
@csrf_exempt
|
|
|
|
def saml_sp_metadata(request: HttpRequest, **kwargs: Any) -> HttpResponse: # nocoverage
|
|
|
|
"""
|
|
|
|
This is the view function for generating our SP metadata
|
|
|
|
for SAML authentication. It's meant for helping check the correctness
|
|
|
|
of the configuration when setting up SAML, or for obtaining the XML metadata
|
|
|
|
if the IdP requires it.
|
|
|
|
Taken from https://python-social-auth.readthedocs.io/en/latest/backends/saml.html
|
|
|
|
"""
|
|
|
|
if not saml_auth_enabled():
|
2020-06-17 14:25:25 +02:00
|
|
|
return config_error(request, "saml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
complete_url = reverse("social:complete", args=("saml",))
|
2021-02-12 08:19:30 +01:00
|
|
|
saml_backend = load_backend(load_strategy(request), "saml", complete_url)
|
2019-09-29 06:32:56 +02:00
|
|
|
metadata, errors = saml_backend.generate_metadata_xml()
|
|
|
|
if not errors:
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponse(content=metadata, content_type="text/xml")
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return HttpResponseServerError(content=", ".join(errors))
|
2020-02-22 18:43:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-17 14:25:25 +02:00
|
|
|
def config_error(request: HttpRequest, error_category_name: str) -> HttpResponse:
|
2020-02-22 18:43:45 +01:00
|
|
|
contexts = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"apple": {"social_backend_name": "apple", "has_markdown_file": True},
|
|
|
|
"google": {"social_backend_name": "google", "has_markdown_file": True},
|
|
|
|
"github": {"social_backend_name": "github", "has_markdown_file": True},
|
|
|
|
"gitlab": {"social_backend_name": "gitlab", "has_markdown_file": True},
|
|
|
|
"ldap": {"error_name": "ldap_error_realm_is_none"},
|
|
|
|
"dev": {"error_name": "dev_not_supported_error"},
|
|
|
|
"saml": {"social_backend_name": "saml"},
|
|
|
|
"smtp": {"error_name": "smtp_error"},
|
|
|
|
"remote_user_backend_disabled": {"error_name": "remoteuser_error_backend_disabled"},
|
|
|
|
"remote_user_header_missing": {"error_name": "remoteuser_error_remote_user_header_missing"},
|
2020-02-22 18:43:45 +01:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
return render(request, "zerver/config_error.html", contexts[error_category_name])
|