2020-06-11 00:54:34 +02:00
|
|
|
import base64
|
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
import urllib
|
|
|
|
from functools import wraps
|
|
|
|
from io import BytesIO
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
from typing import Callable, Dict, Optional, Sequence, Set, Tuple, TypeVar, Union, cast, overload
|
2017-07-12 09:50:19 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import django_otp
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
import orjson
|
|
|
|
from circuitbreaker import CircuitBreakerError, circuit
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.contrib.auth import REDIRECT_FIELD_NAME
|
|
|
|
from django.contrib.auth import login as django_login
|
2017-07-12 10:16:02 +02:00
|
|
|
from django.contrib.auth.decorators import user_passes_test as django_user_passes_test
|
2019-01-04 00:17:50 +01:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2020-09-15 03:04:07 +02:00
|
|
|
from django.contrib.auth.views import redirect_to_login
|
2020-08-22 13:52:39 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse, HttpResponseRedirect, QueryDict
|
2013-03-21 20:18:44 +01:00
|
|
|
from django.http.multipartparser import MultiPartParser
|
2016-04-21 23:48:34 +02:00
|
|
|
from django.shortcuts import resolve_url
|
2020-08-22 13:52:39 +02:00
|
|
|
from django.template.response import SimpleTemplateResponse, TemplateResponse
|
2017-04-15 04:03:56 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
|
|
from django_otp import user_has_device
|
|
|
|
from two_factor.utils import default_device
|
2018-11-15 05:31:34 +01:00
|
|
|
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
from zerver.lib.cache import cache_with_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.exceptions import (
|
2021-07-04 08:45:34 +02:00
|
|
|
AccessDeniedError,
|
2021-09-13 20:23:54 +02:00
|
|
|
AnomalousWebhookPayload,
|
2020-06-11 00:54:34 +02:00
|
|
|
ErrorCode,
|
|
|
|
InvalidAPIKeyError,
|
|
|
|
InvalidAPIKeyFormatError,
|
|
|
|
InvalidJSONError,
|
|
|
|
JsonableError,
|
|
|
|
OrganizationAdministratorRequired,
|
2020-07-15 22:18:32 +02:00
|
|
|
OrganizationMemberRequired,
|
2020-06-11 00:54:34 +02:00
|
|
|
OrganizationOwnerRequired,
|
2021-07-16 22:11:10 +02:00
|
|
|
RateLimited,
|
2021-03-31 13:14:08 +02:00
|
|
|
RealmDeactivatedError,
|
2022-01-14 04:20:39 +01:00
|
|
|
RemoteServerDeactivatedError,
|
2020-08-19 22:26:38 +02:00
|
|
|
UnsupportedWebhookEventType,
|
2021-03-31 12:00:56 +02:00
|
|
|
UserDeactivatedError,
|
2021-09-13 20:23:54 +02:00
|
|
|
WebhookError,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2021-07-16 22:11:10 +02:00
|
|
|
from zerver.lib.rate_limiter import RateLimitedIPAddr, RateLimitedUser
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import REQ, RequestNotes, has_request_variables
|
2021-07-04 08:45:34 +02:00
|
|
|
from zerver.lib.response import json_method_not_allowed, json_success, json_unauthorized
|
2017-10-20 02:53:24 +02:00
|
|
|
from zerver.lib.subdomains import get_subdomain, user_matches_subdomain
|
2016-12-22 04:46:31 +01:00
|
|
|
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
|
2018-03-13 17:44:46 +01:00
|
|
|
from zerver.lib.types import ViewFuncT
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.utils import has_api_key_format, statsd
|
|
|
|
from zerver.models import Realm, UserProfile, get_client, get_user_profile_by_api_key
|
2016-06-06 01:54:58 +02:00
|
|
|
|
2016-10-27 23:55:31 +02:00
|
|
|
if settings.ZILENCER_ENABLED:
|
2021-07-08 14:46:47 +02:00
|
|
|
from zilencer.models import (
|
|
|
|
RateLimitedRemoteZulipServer,
|
|
|
|
RemoteZulipServer,
|
|
|
|
get_remote_server_by_uuid,
|
|
|
|
)
|
2016-10-27 23:55:31 +02:00
|
|
|
|
2021-07-08 15:33:15 +02:00
|
|
|
rate_limiter_logger = logging.getLogger("zerver.lib.rate_limiter")
|
|
|
|
|
2017-12-13 01:45:57 +01:00
|
|
|
webhook_logger = logging.getLogger("zulip.zerver.webhooks")
|
2020-08-19 22:36:07 +02:00
|
|
|
webhook_unsupported_events_logger = logging.getLogger("zulip.zerver.webhooks.unsupported")
|
2021-09-13 20:23:54 +02:00
|
|
|
webhook_anomalous_payloads_logger = logging.getLogger("zulip.zerver.webhooks.anomalous")
|
2019-06-06 05:55:09 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
FuncT = TypeVar("FuncT", bound=Callable[..., object])
|
2020-06-24 01:52:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 01:52:37 +02:00
|
|
|
def cachify(method: FuncT) -> FuncT:
|
|
|
|
dct: Dict[Tuple[object, ...], object] = {}
|
2017-10-31 00:31:47 +01:00
|
|
|
|
2020-06-24 01:52:37 +02:00
|
|
|
def cache_wrapper(*args: object) -> object:
|
2017-10-31 00:31:47 +01:00
|
|
|
tup = tuple(args)
|
|
|
|
if tup in dct:
|
|
|
|
return dct[tup]
|
|
|
|
result = method(*args)
|
|
|
|
dct[tup] = result
|
|
|
|
return result
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 01:52:37 +02:00
|
|
|
return cast(FuncT, cache_wrapper) # https://github.com/python/mypy/issues/1927
|
2017-10-31 00:31:47 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def update_user_activity(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, query: Optional[str]
|
|
|
|
) -> None:
|
2020-10-23 02:43:28 +02:00
|
|
|
# update_active_status also pushes to RabbitMQ, and it seems
|
2013-03-25 20:37:00 +01:00
|
|
|
# redundant to log that here as well.
|
2021-02-12 08:20:45 +01:00
|
|
|
if request.META["PATH_INFO"] == "/json/users/me/presence":
|
2013-03-25 20:37:00 +01:00
|
|
|
return
|
2013-10-03 19:48:03 +02:00
|
|
|
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2017-11-03 22:44:59 +01:00
|
|
|
if query is not None:
|
|
|
|
pass
|
2021-07-09 15:17:33 +02:00
|
|
|
elif request_notes.query is not None:
|
|
|
|
query = request_notes.query
|
2013-10-03 19:48:03 +02:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
query = request.META["PATH_INFO"]
|
2013-10-03 19:48:03 +02:00
|
|
|
|
2021-07-09 18:10:51 +02:00
|
|
|
assert request_notes.client is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
event = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"query": query,
|
|
|
|
"user_profile_id": user_profile.id,
|
|
|
|
"time": datetime_to_timestamp(timezone_now()),
|
2021-07-09 18:10:51 +02:00
|
|
|
"client_id": request_notes.client.id,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2013-03-25 20:37:00 +01:00
|
|
|
queue_json_publish("user_activity", event, lambda event: None)
|
2013-01-11 21:16:42 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-11-08 02:02:48 +01:00
|
|
|
# Based on django.views.decorators.http.require_http_methods
|
2017-11-27 07:33:05 +01:00
|
|
|
def require_post(func: ViewFuncT) -> ViewFuncT:
|
2013-11-08 02:02:48 +01:00
|
|
|
@wraps(func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def wrapper(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
dependencies: Remove WebSockets system for sending messages.
Zulip has had a small use of WebSockets (specifically, for the code
path of sending messages, via the webapp only) since ~2013. We
originally added this use of WebSockets in the hope that the latency
benefits of doing so would allow us to avoid implementing a markdown
local echo; they were not. Further, HTTP/2 may have eliminated the
latency difference we hoped to exploit by using WebSockets in any
case.
While we’d originally imagined using WebSockets for other endpoints,
there was never a good justification for moving more components to the
WebSockets system.
This WebSockets code path had a lot of downsides/complexity,
including:
* The messy hack involving constructing an emulated request object to
hook into doing Django requests.
* The `message_senders` queue processor system, which increases RAM
needs and must be provisioned independently from the rest of the
server).
* A duplicate check_send_receive_time Nagios test specific to
WebSockets.
* The requirement for users to have their firewalls/NATs allow
WebSocket connections, and a setting to disable them for networks
where WebSockets don’t work.
* Dependencies on the SockJS family of libraries, which has at times
been poorly maintained, and periodically throws random JavaScript
exceptions in our production environments without a deep enough
traceback to effectively investigate.
* A total of about 1600 lines of our code related to the feature.
* Increased load on the Tornado system, especially around a Zulip
server restart, and especially for large installations like
zulipchat.com, resulting in extra delay before messages can be sent
again.
As detailed in
https://github.com/zulip/zulip/pull/12862#issuecomment-536152397, it
appears that removing WebSockets moderately increases the time it
takes for the `send_message` API query to return from the server, but
does not significantly change the time between when a message is sent
and when it is received by clients. We don’t understand the reason
for that change (suggesting the possibility of a measurement error),
and even if it is a real change, we consider that potential small
latency regression to be acceptable.
If we later want WebSockets, we’ll likely want to just use Django
Channels.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2019-07-23 01:43:40 +02:00
|
|
|
if request.method != "POST":
|
|
|
|
err_method = request.method
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.warning(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Method Not Allowed (%s): %s",
|
2021-02-12 08:19:30 +01:00
|
|
|
err_method,
|
|
|
|
request.path,
|
2021-02-12 08:20:45 +01:00
|
|
|
extra={"status_code": 405, "request": request},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-08-21 19:24:20 +02:00
|
|
|
if RequestNotes.get_notes(request).error_format == "JSON":
|
2020-08-22 13:52:39 +02:00
|
|
|
return json_method_not_allowed(["POST"])
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
return TemplateResponse(
|
2021-02-12 08:20:45 +01:00
|
|
|
request, "404.html", context={"status_code": 405}, status=405
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2013-11-08 02:02:48 +01:00
|
|
|
return func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2012-11-06 20:27:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-11 00:26:49 +02:00
|
|
|
def require_realm_owner(func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def wrapper(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2020-06-11 00:26:49 +02:00
|
|
|
if not user_profile.is_realm_owner:
|
|
|
|
raise OrganizationOwnerRequired()
|
|
|
|
return func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2020-06-11 00:26:49 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def require_realm_admin(func: ViewFuncT) -> ViewFuncT:
|
2013-12-09 22:12:18 +01:00
|
|
|
@wraps(func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def wrapper(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2016-02-08 03:59:38 +01:00
|
|
|
if not user_profile.is_realm_admin:
|
2019-11-16 15:53:56 +01:00
|
|
|
raise OrganizationAdministratorRequired()
|
2013-12-09 22:12:18 +01:00
|
|
|
return func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2013-12-09 22:12:18 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-15 22:18:32 +02:00
|
|
|
def require_organization_member(func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def wrapper(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2020-07-15 22:18:32 +02:00
|
|
|
if user_profile.role > UserProfile.ROLE_MEMBER:
|
|
|
|
raise OrganizationMemberRequired()
|
|
|
|
return func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-15 22:18:32 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-01 11:26:29 +01:00
|
|
|
def require_billing_access(func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def wrapper(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2020-07-14 14:40:39 +02:00
|
|
|
if not user_profile.has_billing_access:
|
|
|
|
raise JsonableError(_("Must be a billing administrator or an organization owner"))
|
2018-11-01 11:26:29 +01:00
|
|
|
return func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapper) # https://github.com/python/mypy/issues/1927
|
2018-11-01 11:26:29 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def process_client(
|
|
|
|
request: HttpRequest,
|
2021-07-24 20:37:35 +02:00
|
|
|
user: Union[UserProfile, AnonymousUser],
|
2021-02-12 08:19:30 +01:00
|
|
|
*,
|
|
|
|
is_browser_view: bool = False,
|
|
|
|
client_name: Optional[str] = None,
|
|
|
|
skip_update_user_activity: bool = False,
|
|
|
|
query: Optional[str] = None,
|
|
|
|
) -> None:
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2016-05-12 22:49:36 +02:00
|
|
|
if client_name is None:
|
2021-07-09 18:10:51 +02:00
|
|
|
client_name = request_notes.client_name
|
|
|
|
|
|
|
|
assert client_name is not None
|
2020-03-08 21:12:38 +01:00
|
|
|
|
|
|
|
# We could check for a browser's name being "Mozilla", but
|
|
|
|
# e.g. Opera and MobileSafari don't set that, and it seems
|
|
|
|
# more robust to just key off whether it was a browser view
|
|
|
|
if is_browser_view and not client_name.startswith("Zulip"):
|
|
|
|
# Avoid changing the client string for browsers, but let
|
|
|
|
# the Zulip desktop apps be themselves.
|
|
|
|
client_name = "website"
|
2014-01-08 17:52:36 +01:00
|
|
|
|
2021-07-09 18:10:51 +02:00
|
|
|
request_notes.client = get_client(client_name)
|
2021-07-24 20:37:35 +02:00
|
|
|
if not skip_update_user_activity and user.is_authenticated:
|
|
|
|
update_user_activity(request, user, query)
|
2013-03-21 19:21:46 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-12 03:02:35 +02:00
|
|
|
class InvalidZulipServerError(JsonableError):
|
|
|
|
code = ErrorCode.INVALID_ZULIP_SERVER
|
2021-02-12 08:20:45 +01:00
|
|
|
data_fields = ["role"]
|
2017-10-12 03:02:35 +02:00
|
|
|
|
2018-05-11 01:39:17 +02:00
|
|
|
def __init__(self, role: str) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.role: str = role
|
2017-10-12 03:02:35 +02:00
|
|
|
|
|
|
|
@staticmethod
|
2018-05-11 01:39:17 +02:00
|
|
|
def msg_format() -> str:
|
2021-10-19 03:30:05 +02:00
|
|
|
return "Zulip server auth failure: {role} is not registered -- did you run `manage.py register_server`?"
|
2017-10-12 03:02:35 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-26 06:36:34 +02:00
|
|
|
class InvalidZulipServerKeyError(InvalidZulipServerError):
|
2017-10-12 03:02:35 +02:00
|
|
|
@staticmethod
|
2018-05-11 01:39:17 +02:00
|
|
|
def msg_format() -> str:
|
2017-10-12 03:02:35 +02:00
|
|
|
return "Zulip server auth failure: key does not match role {role}"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_api_key(
|
|
|
|
request: HttpRequest,
|
|
|
|
role: Optional[str],
|
|
|
|
api_key: str,
|
|
|
|
allow_webhook_access: bool = False,
|
|
|
|
client_name: Optional[str] = None,
|
|
|
|
) -> Union[UserProfile, "RemoteZulipServer"]:
|
2013-08-21 00:36:45 +02:00
|
|
|
# Remove whitespace to protect users from trivial errors.
|
2017-08-15 01:21:46 +02:00
|
|
|
api_key = api_key.strip()
|
|
|
|
if role is not None:
|
|
|
|
role = role.strip()
|
2013-08-21 00:36:45 +02:00
|
|
|
|
tests: Add uuid_get and uuid_post.
We want a clean codepath for the vast majority
of cases of using api_get/api_post, which now
uses email and which we'll soon convert to
accepting `user` as a parameter.
These apis that take two different types of
values for the same parameter make sweeps
like this kinda painful, and they're pretty
easy to avoid by extracting helpers to do
the actual common tasks. So, for example,
here I still keep a common method to
actually encode the credentials (since
the whole encode/decode business is an
annoying detail that you don't want to fix
in two places):
def encode_credentials(self, identifier: str, api_key: str) -> str:
"""
identifier: Can be an email or a remote server uuid.
"""
credentials = "%s:%s" % (identifier, api_key)
return 'Basic ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
But then the rest of the code has two separate
codepaths.
And for the uuid functions, we no longer have
crufty references to realm. (In fairness, realm
will also go away when we introduce users.)
For the `is_remote_server` helper, I just inlined
it, since it's now only needed in one place, and the
name didn't make total sense anyway, plus it wasn't
a super robust check. In context, it's easier
just to use a comment now to say what we're doing:
# If `role` doesn't look like an email, it might be a uuid.
if settings.ZILENCER_ENABLED and role is not None and '@' not in role:
# do stuff
2020-03-10 12:34:25 +01:00
|
|
|
# If `role` doesn't look like an email, it might be a uuid.
|
2021-02-12 08:20:45 +01:00
|
|
|
if settings.ZILENCER_ENABLED and role is not None and "@" not in role:
|
2016-10-27 23:55:31 +02:00
|
|
|
try:
|
2017-08-15 00:41:04 +02:00
|
|
|
remote_server = get_remote_server_by_uuid(role)
|
2016-10-27 23:55:31 +02:00
|
|
|
except RemoteZulipServer.DoesNotExist:
|
2017-10-12 03:02:35 +02:00
|
|
|
raise InvalidZulipServerError(role)
|
2017-08-15 00:41:04 +02:00
|
|
|
if api_key != remote_server.api_key:
|
2017-10-12 03:02:35 +02:00
|
|
|
raise InvalidZulipServerKeyError(role)
|
2017-08-15 00:39:36 +02:00
|
|
|
|
2022-01-14 04:20:39 +01:00
|
|
|
if remote_server.deactivated:
|
|
|
|
raise RemoteServerDeactivatedError()
|
|
|
|
|
2017-10-20 02:52:15 +02:00
|
|
|
if get_subdomain(request) != Realm.SUBDOMAIN_FOR_ROOT_DOMAIN:
|
2017-10-12 03:02:35 +02:00
|
|
|
raise JsonableError(_("Invalid subdomain for push notifications bouncer"))
|
2017-08-16 06:04:19 +02:00
|
|
|
request.user = remote_server
|
2017-08-15 00:59:19 +02:00
|
|
|
remote_server.rate_limits = ""
|
2018-12-11 20:09:11 +01:00
|
|
|
# Skip updating UserActivity, since remote_server isn't actually a UserProfile object.
|
|
|
|
process_client(request, remote_server, skip_update_user_activity=True)
|
2017-08-15 00:41:04 +02:00
|
|
|
return remote_server
|
2017-08-15 00:40:20 +02:00
|
|
|
|
2017-08-15 00:59:57 +02:00
|
|
|
user_profile = access_user_by_api_key(request, api_key, email=role)
|
2020-09-22 00:38:29 +02:00
|
|
|
if user_profile.is_incoming_webhook and not allow_webhook_access:
|
2017-08-15 00:44:34 +02:00
|
|
|
raise JsonableError(_("This API is not available to incoming webhook bots."))
|
|
|
|
|
2017-08-15 00:59:19 +02:00
|
|
|
request.user = user_profile
|
2017-08-15 01:21:46 +02:00
|
|
|
process_client(request, user_profile, client_name=client_name)
|
2017-08-15 00:59:19 +02:00
|
|
|
|
2017-08-15 00:59:57 +02:00
|
|
|
return user_profile
|
2013-03-21 19:21:46 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def validate_account_and_subdomain(request: HttpRequest, user_profile: UserProfile) -> None:
|
2017-08-15 00:28:39 +02:00
|
|
|
if user_profile.realm.deactivated:
|
2021-03-31 13:14:08 +02:00
|
|
|
raise RealmDeactivatedError()
|
2018-08-10 00:57:18 +02:00
|
|
|
if not user_profile.is_active:
|
2021-03-31 12:00:56 +02:00
|
|
|
raise UserDeactivatedError()
|
2017-08-15 00:28:39 +02:00
|
|
|
|
dependencies: Remove WebSockets system for sending messages.
Zulip has had a small use of WebSockets (specifically, for the code
path of sending messages, via the webapp only) since ~2013. We
originally added this use of WebSockets in the hope that the latency
benefits of doing so would allow us to avoid implementing a markdown
local echo; they were not. Further, HTTP/2 may have eliminated the
latency difference we hoped to exploit by using WebSockets in any
case.
While we’d originally imagined using WebSockets for other endpoints,
there was never a good justification for moving more components to the
WebSockets system.
This WebSockets code path had a lot of downsides/complexity,
including:
* The messy hack involving constructing an emulated request object to
hook into doing Django requests.
* The `message_senders` queue processor system, which increases RAM
needs and must be provisioned independently from the rest of the
server).
* A duplicate check_send_receive_time Nagios test specific to
WebSockets.
* The requirement for users to have their firewalls/NATs allow
WebSocket connections, and a setting to disable them for networks
where WebSockets don’t work.
* Dependencies on the SockJS family of libraries, which has at times
been poorly maintained, and periodically throws random JavaScript
exceptions in our production environments without a deep enough
traceback to effectively investigate.
* A total of about 1600 lines of our code related to the feature.
* Increased load on the Tornado system, especially around a Zulip
server restart, and especially for large installations like
zulipchat.com, resulting in extra delay before messages can be sent
again.
As detailed in
https://github.com/zulip/zulip/pull/12862#issuecomment-536152397, it
appears that removing WebSockets moderately increases the time it
takes for the `send_message` API query to return from the server, but
does not significantly change the time between when a message is sent
and when it is received by clients. We don’t understand the reason
for that change (suggesting the possibility of a measurement error),
and even if it is a real change, we consider that potential small
latency regression to be acceptable.
If we later want WebSockets, we’ll likely want to just use Django
Channels.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2019-07-23 01:43:40 +02:00
|
|
|
# Either the subdomain matches, or we're accessing Tornado from
|
|
|
|
# and to localhost (aka spoofing a request as the user).
|
2021-02-12 08:19:30 +01:00
|
|
|
if not user_matches_subdomain(get_subdomain(request), user_profile) and not (
|
|
|
|
settings.RUNNING_INSIDE_TORNADO
|
|
|
|
and request.META["SERVER_NAME"] == "127.0.0.1"
|
|
|
|
and request.META["REMOTE_ADDR"] == "127.0.0.1"
|
|
|
|
):
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning(
|
|
|
|
"User %s (%s) attempted to access API on wrong subdomain (%s)",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile.delivery_email,
|
|
|
|
user_profile.realm.subdomain,
|
|
|
|
get_subdomain(request),
|
2020-05-02 08:44:14 +02:00
|
|
|
)
|
2017-08-15 00:28:39 +02:00
|
|
|
raise JsonableError(_("Account is not associated with this subdomain"))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def access_user_by_api_key(
|
|
|
|
request: HttpRequest, api_key: str, email: Optional[str] = None
|
|
|
|
) -> UserProfile:
|
2019-12-16 08:12:39 +01:00
|
|
|
if not has_api_key_format(api_key):
|
|
|
|
raise InvalidAPIKeyFormatError()
|
|
|
|
|
2017-08-15 01:28:48 +02:00
|
|
|
try:
|
2017-08-25 07:43:38 +02:00
|
|
|
user_profile = get_user_profile_by_api_key(api_key)
|
2017-08-15 01:28:48 +02:00
|
|
|
except UserProfile.DoesNotExist:
|
2019-01-05 20:18:18 +01:00
|
|
|
raise InvalidAPIKeyError()
|
2018-12-07 00:05:57 +01:00
|
|
|
if email is not None and email.lower() != user_profile.delivery_email.lower():
|
2017-08-15 01:28:48 +02:00
|
|
|
# This covers the case that the API key is correct, but for a
|
|
|
|
# different user. We may end up wanting to relaxing this
|
|
|
|
# constraint or give a different error message in the future.
|
2019-01-05 20:18:18 +01:00
|
|
|
raise InvalidAPIKeyError()
|
2017-08-15 01:28:48 +02:00
|
|
|
|
|
|
|
validate_account_and_subdomain(request, user_profile)
|
|
|
|
|
2017-08-15 00:28:39 +02:00
|
|
|
return user_profile
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
def log_unsupported_webhook_event(summary: str) -> None:
|
|
|
|
# This helper is primarily used by some of our more complicated
|
|
|
|
# webhook integrations (e.g. GitHub) that need to log an unsupported
|
|
|
|
# event based on attributes nested deep within a complicated JSON
|
|
|
|
# payload. In such cases, the error message we want to log may not
|
|
|
|
# really fit what a regular UnsupportedWebhookEventType exception
|
|
|
|
# represents.
|
|
|
|
webhook_unsupported_events_logger.exception(summary, stack_info=True)
|
|
|
|
|
|
|
|
|
|
|
|
def log_exception_to_webhook_logger(err: Exception) -> None:
|
|
|
|
if isinstance(err, AnomalousWebhookPayload):
|
|
|
|
webhook_anomalous_payloads_logger.exception(str(err), stack_info=True)
|
|
|
|
elif isinstance(err, UnsupportedWebhookEventType):
|
|
|
|
webhook_unsupported_events_logger.exception(str(err), stack_info=True)
|
2019-06-06 05:55:09 +02:00
|
|
|
else:
|
2021-09-13 20:23:54 +02:00
|
|
|
webhook_logger.exception(str(err), stack_info=True)
|
2018-02-25 01:54:29 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def full_webhook_client_name(raw_client_name: Optional[str] = None) -> Optional[str]:
|
2018-03-16 23:37:32 +01:00
|
|
|
if raw_client_name is None:
|
|
|
|
return None
|
2020-06-09 00:25:09 +02:00
|
|
|
return f"Zulip{raw_client_name}Webhook"
|
2018-03-16 23:37:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-10-03 01:12:57 +02:00
|
|
|
# Use this for webhook views that don't get an email passed in.
|
2020-08-20 00:32:15 +02:00
|
|
|
def webhook_view(
|
2021-02-12 08:19:30 +01:00
|
|
|
webhook_client_name: str,
|
|
|
|
notify_bot_owner_on_invalid_json: bool = True,
|
2021-06-26 10:07:54 +02:00
|
|
|
all_event_types: Optional[Sequence[str]] = None,
|
2020-06-23 04:30:55 +02:00
|
|
|
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
|
2020-08-20 00:20:05 +02:00
|
|
|
# Unfortunately, callback protocols are insufficient for this:
|
|
|
|
# https://mypy.readthedocs.io/en/stable/protocols.html#callback-protocols
|
|
|
|
# Variadic generics are necessary: https://github.com/python/typing/issues/193
|
2020-06-23 04:30:55 +02:00
|
|
|
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
|
2016-05-12 22:49:36 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@has_request_variables
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_func_arguments(
|
|
|
|
request: HttpRequest, api_key: str = REQ(), *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
|
|
|
user_profile = validate_api_key(
|
|
|
|
request,
|
|
|
|
None,
|
|
|
|
api_key,
|
|
|
|
allow_webhook_access=True,
|
|
|
|
client_name=full_webhook_client_name(webhook_client_name),
|
|
|
|
)
|
2016-05-12 22:49:36 +02:00
|
|
|
|
|
|
|
if settings.RATE_LIMITING:
|
2021-02-12 08:20:45 +01:00
|
|
|
rate_limit_user(request, user_profile, domain="api_by_user")
|
2017-05-12 05:21:09 +02:00
|
|
|
try:
|
|
|
|
return view_func(request, user_profile, *args, **kwargs)
|
2017-07-19 05:08:51 +02:00
|
|
|
except Exception as err:
|
2018-12-06 00:12:19 +01:00
|
|
|
if isinstance(err, InvalidJSONError) and notify_bot_owner_on_invalid_json:
|
|
|
|
# NOTE: importing this at the top of file leads to a
|
|
|
|
# cyclic import; correct fix is probably to move
|
|
|
|
# notify_bot_owner_about_invalid_json to a smaller file.
|
|
|
|
from zerver.lib.webhooks.common import notify_bot_owner_about_invalid_json
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-06 00:12:19 +01:00
|
|
|
notify_bot_owner_about_invalid_json(user_profile, webhook_client_name)
|
2021-09-13 20:23:54 +02:00
|
|
|
elif isinstance(err, JsonableError) and not isinstance(err, WebhookError):
|
2020-09-22 00:50:08 +02:00
|
|
|
pass
|
2018-12-06 00:12:19 +01:00
|
|
|
else:
|
2021-09-13 20:23:54 +02:00
|
|
|
if isinstance(err, WebhookError):
|
2020-08-20 00:50:06 +02:00
|
|
|
err.webhook_name = webhook_client_name
|
2021-09-13 20:23:54 +02:00
|
|
|
log_exception_to_webhook_logger(err)
|
2017-07-19 05:08:51 +02:00
|
|
|
raise err
|
2017-05-12 05:21:09 +02:00
|
|
|
|
2021-06-26 10:07:54 +02:00
|
|
|
_wrapped_func_arguments._all_event_types = all_event_types
|
2016-05-12 22:49:36 +02:00
|
|
|
return _wrapped_func_arguments
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-10-03 01:12:57 +02:00
|
|
|
return _wrapped_view_func
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-12-24 11:39:29 +01:00
|
|
|
def zulip_redirect_to_login(
|
|
|
|
request: HttpRequest,
|
|
|
|
login_url: Optional[str] = None,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
) -> HttpResponseRedirect:
|
|
|
|
path = request.build_absolute_uri()
|
|
|
|
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
|
|
|
|
# If the login URL is the same scheme and net location then just
|
|
|
|
# use the path as the "next" url.
|
|
|
|
login_scheme, login_netloc = urllib.parse.urlparse(resolved_login_url)[:2]
|
|
|
|
current_scheme, current_netloc = urllib.parse.urlparse(path)[:2]
|
|
|
|
if (not login_scheme or login_scheme == current_scheme) and (
|
|
|
|
not login_netloc or login_netloc == current_netloc
|
|
|
|
):
|
|
|
|
path = request.get_full_path()
|
|
|
|
|
2020-10-07 07:10:02 +02:00
|
|
|
if path == "/":
|
2020-12-24 11:39:29 +01:00
|
|
|
# Don't add ?next=/, to keep our URLs clean
|
|
|
|
return HttpResponseRedirect(resolved_login_url)
|
|
|
|
return redirect_to_login(path, resolved_login_url, redirect_field_name)
|
|
|
|
|
|
|
|
|
2020-08-14 10:10:18 +02:00
|
|
|
# From Django 2.2, modified to pass the request rather than just the
|
|
|
|
# user into test_func; this is useful so that we can revalidate the
|
|
|
|
# subdomain matches the user's realm. It is likely that we could make
|
|
|
|
# the subdomain validation happen elsewhere and switch to using the
|
|
|
|
# stock Django version.
|
2021-02-12 08:19:30 +01:00
|
|
|
def user_passes_test(
|
2021-07-26 16:29:19 +02:00
|
|
|
test_func: Callable[[HttpRequest], bool],
|
2021-02-12 08:19:30 +01:00
|
|
|
login_url: Optional[str] = None,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
) -> Callable[[ViewFuncT], ViewFuncT]:
|
2016-04-21 23:48:34 +02:00
|
|
|
"""
|
|
|
|
Decorator for views that checks that the user passes the given test,
|
|
|
|
redirecting to the log-in page if necessary. The test should be a callable
|
|
|
|
that takes the user object and returns True if the user passes.
|
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-13 23:03:41 +01:00
|
|
|
def decorator(view_func: ViewFuncT) -> ViewFuncT:
|
2020-08-14 10:10:18 +02:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2016-07-19 14:22:13 +02:00
|
|
|
if test_func(request):
|
2016-04-21 23:48:34 +02:00
|
|
|
return view_func(request, *args, **kwargs)
|
2020-12-24 11:39:29 +01:00
|
|
|
return zulip_redirect_to_login(request, login_url, redirect_field_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view) # https://github.com/python/mypy/issues/1927
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-04-21 23:48:34 +02:00
|
|
|
return decorator
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def logged_in_and_active(request: HttpRequest) -> bool:
|
2017-05-18 11:42:19 +02:00
|
|
|
if not request.user.is_authenticated:
|
2016-04-22 00:56:39 +02:00
|
|
|
return False
|
2016-07-19 14:22:13 +02:00
|
|
|
if not request.user.is_active:
|
2016-04-22 00:56:39 +02:00
|
|
|
return False
|
2016-07-19 14:22:13 +02:00
|
|
|
if request.user.realm.deactivated:
|
2016-04-22 00:56:39 +02:00
|
|
|
return False
|
2017-10-20 02:53:24 +02:00
|
|
|
return user_matches_subdomain(get_subdomain(request), request.user)
|
2016-04-22 00:56:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-07-12 09:50:19 +02:00
|
|
|
def do_two_factor_login(request: HttpRequest, user_profile: UserProfile) -> None:
|
|
|
|
device = default_device(user_profile)
|
|
|
|
if device:
|
|
|
|
django_otp.login(request, device)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def do_login(request: HttpRequest, user_profile: UserProfile) -> None:
|
2017-08-25 01:11:30 +02:00
|
|
|
"""Creates a session, logging in the user, using the Django method,
|
|
|
|
and also adds helpful data needed by our server logs.
|
|
|
|
"""
|
|
|
|
django_login(request, user_profile)
|
2021-08-21 19:24:20 +02:00
|
|
|
RequestNotes.get_notes(request).requestor_for_logs = user_profile.format_requestor_for_logs()
|
2017-08-25 01:11:30 +02:00
|
|
|
process_client(request, user_profile, is_browser_view=True)
|
2017-07-12 09:50:19 +02:00
|
|
|
if settings.TWO_FACTOR_AUTHENTICATION_ENABLED:
|
docs: Add missing space to compound verbs “log in”, “set up”, etc.
Noun: backup, checkout, cleanup, login, logout, setup, shutdown, signup,
timeout.
Verb: back up, check out, clean up, log in, log out, set up, shut
down, sign up, time out.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
2021-04-25 23:05:38 +02:00
|
|
|
# Log in with two factor authentication as well.
|
2017-07-12 09:50:19 +02:00
|
|
|
do_two_factor_login(request, user_profile)
|
2017-08-25 01:11:30 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def log_view_func(view_func: ViewFuncT) -> ViewFuncT:
|
2017-11-03 22:26:31 +01:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2021-08-21 19:24:20 +02:00
|
|
|
RequestNotes.get_notes(request).query = view_func.__name__
|
2017-11-03 22:26:31 +01:00
|
|
|
return view_func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2017-11-03 22:26:31 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def add_logging_data(view_func: ViewFuncT) -> ViewFuncT:
|
2017-02-20 20:55:18 +01:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2021-02-12 08:19:30 +01:00
|
|
|
process_client(request, request.user, is_browser_view=True, query=view_func.__name__)
|
2017-03-26 07:00:59 +02:00
|
|
|
return rate_limit()(view_func)(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2017-04-15 20:51:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def human_users_only(view_func: ViewFuncT) -> ViewFuncT:
|
2017-04-15 20:51:51 +02:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2021-07-24 20:37:35 +02:00
|
|
|
assert request.user.is_authenticated
|
2017-04-15 20:51:51 +02:00
|
|
|
if request.user.is_bot:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("This endpoint does not accept bot requests."))
|
2017-04-15 20:51:51 +02:00
|
|
|
return view_func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2017-02-20 20:55:18 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-26 17:03:22 +02:00
|
|
|
@overload
|
|
|
|
def zulip_login_required(
|
|
|
|
function: ViewFuncT,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
login_url: str = settings.HOME_NOT_LOGGED_IN,
|
|
|
|
) -> ViewFuncT:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
|
|
|
@overload
|
|
|
|
def zulip_login_required(
|
|
|
|
function: None,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
login_url: str = settings.HOME_NOT_LOGGED_IN,
|
|
|
|
) -> Callable[[ViewFuncT], ViewFuncT]:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2016-04-21 23:48:34 +02:00
|
|
|
# Based on Django 1.8's @login_required
|
2017-12-09 06:40:18 +01:00
|
|
|
def zulip_login_required(
|
2021-02-12 08:19:30 +01:00
|
|
|
function: Optional[ViewFuncT] = None,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
login_url: str = settings.HOME_NOT_LOGGED_IN,
|
2018-03-14 20:56:20 +01:00
|
|
|
) -> Union[Callable[[ViewFuncT], ViewFuncT], ViewFuncT]:
|
2020-06-24 01:52:07 +02:00
|
|
|
actual_decorator = lambda function: user_passes_test(
|
2016-04-22 00:56:39 +02:00
|
|
|
logged_in_and_active,
|
2016-04-21 23:48:34 +02:00
|
|
|
login_url=login_url,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
redirect_field_name=redirect_field_name,
|
2020-06-24 01:52:07 +02:00
|
|
|
)(
|
|
|
|
zulip_otp_required(
|
2021-02-12 08:19:30 +01:00
|
|
|
redirect_field_name=redirect_field_name,
|
|
|
|
login_url=login_url,
|
2020-06-24 01:52:07 +02:00
|
|
|
)(add_logging_data(function))
|
2017-07-12 10:16:02 +02:00
|
|
|
)
|
|
|
|
|
2016-04-21 23:48:34 +02:00
|
|
|
if function:
|
2020-06-24 01:52:07 +02:00
|
|
|
return actual_decorator(function)
|
|
|
|
return actual_decorator # nocoverage # We don't use this without a function
|
2016-04-21 23:48:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-09-27 06:49:16 +02:00
|
|
|
def web_public_view(
|
2021-02-12 08:19:30 +01:00
|
|
|
view_func: ViewFuncT,
|
|
|
|
redirect_field_name: str = REDIRECT_FIELD_NAME,
|
|
|
|
login_url: str = settings.HOME_NOT_LOGGED_IN,
|
2020-09-27 06:49:16 +02:00
|
|
|
) -> Union[Callable[[ViewFuncT], ViewFuncT], ViewFuncT]:
|
|
|
|
"""
|
|
|
|
This wrapper adds client info for unauthenticated users but
|
|
|
|
forces authenticated users to go through 2fa.
|
|
|
|
"""
|
|
|
|
actual_decorator = lambda view_func: zulip_otp_required(
|
2021-02-12 08:19:30 +01:00
|
|
|
redirect_field_name=redirect_field_name, login_url=login_url
|
|
|
|
)(add_logging_data(view_func))
|
2020-09-27 06:49:16 +02:00
|
|
|
|
|
|
|
return actual_decorator(view_func)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def require_server_admin(view_func: ViewFuncT) -> ViewFuncT:
|
2016-04-21 23:48:34 +02:00
|
|
|
@zulip_login_required
|
2013-11-01 18:43:38 +01:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2016-12-14 06:02:50 +01:00
|
|
|
if not request.user.is_staff:
|
2013-10-22 15:39:39 +02:00
|
|
|
return HttpResponseRedirect(settings.HOME_NOT_LOGGED_IN)
|
2013-10-22 21:03:34 +02:00
|
|
|
|
2017-02-20 20:55:18 +01:00
|
|
|
return add_logging_data(view_func)(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2013-10-03 01:12:57 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-15 18:29:06 +02:00
|
|
|
def require_server_admin_api(view_func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@zulip_login_required
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_view_func(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2018-04-15 18:29:06 +02:00
|
|
|
if not user_profile.is_staff:
|
|
|
|
raise JsonableError(_("Must be an server administrator"))
|
|
|
|
return view_func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2018-04-15 18:29:06 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-04 19:14:29 +02:00
|
|
|
def require_non_guest_user(view_func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_view_func(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2018-05-04 19:14:29 +02:00
|
|
|
if user_profile.is_guest:
|
|
|
|
raise JsonableError(_("Not allowed for guest users"))
|
|
|
|
return view_func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2018-05-04 19:14:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-18 16:43:22 +02:00
|
|
|
def require_member_or_admin(view_func: ViewFuncT) -> ViewFuncT:
|
2018-05-04 19:14:29 +02:00
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_view_func(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2018-05-04 19:14:29 +02:00
|
|
|
if user_profile.is_guest:
|
|
|
|
raise JsonableError(_("Not allowed for guest users"))
|
|
|
|
if user_profile.is_bot:
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("This endpoint does not accept bot requests."))
|
2018-05-04 19:14:29 +02:00
|
|
|
return view_func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2019-11-02 17:58:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-11-16 15:56:40 +01:00
|
|
|
def require_user_group_edit_permission(view_func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@require_member_or_admin
|
2019-11-02 17:58:55 +01:00
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_view_func(
|
|
|
|
request: HttpRequest, user_profile: UserProfile, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2021-05-21 07:02:43 +02:00
|
|
|
if not user_profile.can_edit_user_groups():
|
|
|
|
raise JsonableError(_("Insufficient permission"))
|
2019-11-02 17:58:55 +01:00
|
|
|
return view_func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2018-05-04 19:14:29 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-13 19:04:39 +02:00
|
|
|
# This API endpoint is used only for the mobile apps. It is part of a
|
|
|
|
# workaround for the fact that React Native doesn't support setting
|
|
|
|
# HTTP basic authentication headers.
|
2020-06-23 04:30:55 +02:00
|
|
|
def authenticated_uploads_api_view(
|
|
|
|
skip_rate_limiting: bool = False,
|
|
|
|
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
|
|
|
|
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
|
2018-04-13 19:04:39 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@has_request_variables
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_func_arguments(
|
|
|
|
request: HttpRequest, api_key: str = REQ(), *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2018-04-13 19:04:39 +02:00
|
|
|
user_profile = validate_api_key(request, None, api_key, False)
|
2018-12-11 20:46:52 +01:00
|
|
|
if not skip_rate_limiting:
|
|
|
|
limited_func = rate_limit()(view_func)
|
|
|
|
else:
|
|
|
|
limited_func = view_func
|
2018-04-13 19:04:39 +02:00
|
|
|
return limited_func(request, user_profile, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-13 19:04:39 +02:00
|
|
|
return _wrapped_func_arguments
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-13 19:04:39 +02:00
|
|
|
return _wrapped_view_func
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# A more REST-y authentication decorator, using, in particular, HTTP basic
|
2013-08-29 20:47:04 +02:00
|
|
|
# authentication.
|
2018-03-16 23:37:32 +01:00
|
|
|
#
|
|
|
|
# If webhook_client_name is specific, the request is a webhook view
|
|
|
|
# with that string as the basis for the client string.
|
2020-06-23 04:30:55 +02:00
|
|
|
def authenticated_rest_api_view(
|
|
|
|
*,
|
|
|
|
webhook_client_name: Optional[str] = None,
|
2020-09-22 00:38:29 +02:00
|
|
|
allow_webhook_access: bool = False,
|
2020-06-23 04:30:55 +02:00
|
|
|
skip_rate_limiting: bool = False,
|
|
|
|
) -> Callable[[Callable[..., HttpResponse]], Callable[..., HttpResponse]]:
|
2020-09-22 00:42:29 +02:00
|
|
|
if webhook_client_name is not None:
|
|
|
|
allow_webhook_access = True
|
|
|
|
|
2020-06-23 04:30:55 +02:00
|
|
|
def _wrapped_view_func(view_func: Callable[..., HttpResponse]) -> Callable[..., HttpResponse]:
|
2016-05-18 20:35:35 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_func_arguments(
|
|
|
|
request: HttpRequest, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2016-05-18 20:35:35 +02:00
|
|
|
# First try block attempts to get the credentials we need to do authentication
|
|
|
|
try:
|
|
|
|
# Grab the base64-encoded authentication string, decode it, and split it into
|
|
|
|
# the email and API key
|
2021-02-12 08:20:45 +01:00
|
|
|
auth_type, credentials = request.META["HTTP_AUTHORIZATION"].split()
|
2016-05-18 20:35:35 +02:00
|
|
|
# case insensitive per RFC 1945
|
|
|
|
if auth_type.lower() != "basic":
|
2021-06-30 18:35:50 +02:00
|
|
|
raise JsonableError(_("This endpoint requires HTTP basic authentication."))
|
2021-08-02 23:20:39 +02:00
|
|
|
role, api_key = base64.b64decode(credentials).decode().split(":")
|
2016-05-18 20:35:35 +02:00
|
|
|
except ValueError:
|
2017-01-29 21:48:10 +01:00
|
|
|
return json_unauthorized(_("Invalid authorization header for basic auth"))
|
2016-05-18 20:35:35 +02:00
|
|
|
except KeyError:
|
2018-04-26 07:14:21 +02:00
|
|
|
return json_unauthorized(_("Missing authorization header for basic auth"))
|
2016-05-18 20:35:35 +02:00
|
|
|
|
|
|
|
# Now we try to do authentication or die
|
|
|
|
try:
|
2016-10-27 23:55:31 +02:00
|
|
|
# profile is a Union[UserProfile, RemoteZulipServer]
|
2021-02-12 08:19:30 +01:00
|
|
|
profile = validate_api_key(
|
|
|
|
request,
|
|
|
|
role,
|
|
|
|
api_key,
|
|
|
|
allow_webhook_access=allow_webhook_access,
|
|
|
|
client_name=full_webhook_client_name(webhook_client_name),
|
|
|
|
)
|
2016-05-18 20:35:35 +02:00
|
|
|
except JsonableError as e:
|
2017-07-20 00:22:36 +02:00
|
|
|
return json_unauthorized(e.msg)
|
2018-03-27 04:34:43 +02:00
|
|
|
try:
|
2018-12-11 20:46:52 +01:00
|
|
|
if not skip_rate_limiting:
|
|
|
|
# Apply rate limiting
|
|
|
|
target_view_func = rate_limit()(view_func)
|
|
|
|
else:
|
|
|
|
target_view_func = view_func
|
|
|
|
return target_view_func(request, profile, *args, **kwargs)
|
2018-03-27 04:34:43 +02:00
|
|
|
except Exception as err:
|
2020-09-22 00:50:08 +02:00
|
|
|
if not webhook_client_name:
|
|
|
|
raise err
|
2021-02-12 08:19:30 +01:00
|
|
|
if isinstance(err, JsonableError) and not isinstance(
|
2021-09-13 20:23:54 +02:00
|
|
|
err, WebhookError
|
2021-02-12 08:19:30 +01:00
|
|
|
): # nocoverage
|
2020-09-22 00:50:08 +02:00
|
|
|
raise err
|
|
|
|
|
2021-09-13 20:23:54 +02:00
|
|
|
if isinstance(err, WebhookError):
|
2020-09-22 00:50:08 +02:00
|
|
|
err.webhook_name = webhook_client_name
|
2021-09-13 20:23:54 +02:00
|
|
|
log_exception_to_webhook_logger(err)
|
2018-03-27 04:34:43 +02:00
|
|
|
raise err
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-05-18 20:35:35 +02:00
|
|
|
return _wrapped_func_arguments
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-03-21 20:15:27 +01:00
|
|
|
return _wrapped_view_func
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def process_as_post(view_func: ViewFuncT) -> ViewFuncT:
|
2013-03-21 20:18:44 +01:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2013-03-21 20:18:44 +01:00
|
|
|
# Adapted from django/http/__init__.py.
|
|
|
|
# So by default Django doesn't populate request.POST for anything besides
|
2013-04-03 21:44:12 +02:00
|
|
|
# POST requests. We want this dict populated for PATCH/PUT, so we have to
|
2013-03-21 20:18:44 +01:00
|
|
|
# do it ourselves.
|
|
|
|
#
|
|
|
|
# This will not be required in the future, a bug will be filed against
|
|
|
|
# Django upstream.
|
2013-04-03 22:01:58 +02:00
|
|
|
|
|
|
|
if not request.POST:
|
|
|
|
# Only take action if POST is empty.
|
2021-02-12 08:20:45 +01:00
|
|
|
if request.META.get("CONTENT_TYPE", "").startswith("multipart"):
|
2013-08-01 19:33:30 +02:00
|
|
|
# Note that request._files is just the private attribute that backs the
|
|
|
|
# FILES property, so we are essentially setting request.FILES here. (In
|
|
|
|
# Django 1.5 FILES was still a read-only property.)
|
2016-12-03 00:04:17 +01:00
|
|
|
request.POST, request._files = MultiPartParser(
|
|
|
|
request.META,
|
|
|
|
BytesIO(request.body),
|
|
|
|
request.upload_handlers,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
request.encoding,
|
2016-12-03 00:04:17 +01:00
|
|
|
).parse()
|
2013-04-03 22:01:58 +02:00
|
|
|
else:
|
|
|
|
request.POST = QueryDict(request.body, encoding=request.encoding)
|
2013-03-21 20:18:44 +01:00
|
|
|
|
|
|
|
return view_func(request, *args, **kwargs)
|
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2013-03-21 20:18:44 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
def authenticate_log_and_execute_json(
|
|
|
|
request: HttpRequest,
|
|
|
|
view_func: ViewFuncT,
|
|
|
|
*args: object,
|
|
|
|
skip_rate_limiting: bool = False,
|
|
|
|
allow_unauthenticated: bool = False,
|
|
|
|
**kwargs: object,
|
|
|
|
) -> HttpResponse:
|
2018-12-17 00:10:20 +01:00
|
|
|
if not skip_rate_limiting:
|
|
|
|
limited_view_func = rate_limit()(view_func)
|
|
|
|
else:
|
|
|
|
limited_view_func = view_func
|
|
|
|
|
2017-05-18 11:42:19 +02:00
|
|
|
if not request.user.is_authenticated:
|
2018-12-17 00:10:20 +01:00
|
|
|
if not allow_unauthenticated:
|
2019-09-14 00:58:02 +02:00
|
|
|
return json_unauthorized()
|
2018-12-17 00:10:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
process_client(
|
|
|
|
request,
|
|
|
|
request.user,
|
|
|
|
is_browser_view=True,
|
|
|
|
skip_update_user_activity=True,
|
|
|
|
query=view_func.__name__,
|
|
|
|
)
|
2018-12-17 00:10:20 +01:00
|
|
|
return limited_view_func(request, request.user, *args, **kwargs)
|
|
|
|
|
2013-03-29 17:39:53 +01:00
|
|
|
user_profile = request.user
|
2017-08-15 01:28:48 +02:00
|
|
|
validate_account_and_subdomain(request, user_profile)
|
|
|
|
|
2016-05-19 23:44:58 +02:00
|
|
|
if user_profile.is_incoming_webhook:
|
|
|
|
raise JsonableError(_("Webhook bots can only access webhooks"))
|
2016-08-14 04:16:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
process_client(request, user_profile, is_browser_view=True, query=view_func.__name__)
|
2018-12-17 00:10:20 +01:00
|
|
|
return limited_view_func(request, user_profile, *args, **kwargs)
|
2012-12-02 20:51:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 03:22:41 +02:00
|
|
|
# Checks if the user is logged in. If not, return an error (the
|
|
|
|
# @login_required behavior of redirecting to a login page doesn't make
|
|
|
|
# sense for json views)
|
2020-06-23 04:30:55 +02:00
|
|
|
def authenticated_json_view(
|
|
|
|
view_func: Callable[..., HttpResponse],
|
|
|
|
skip_rate_limiting: bool = False,
|
|
|
|
allow_unauthenticated: bool = False,
|
|
|
|
) -> Callable[..., HttpResponse]:
|
2012-12-02 20:51:51 +01:00
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2020-06-24 02:10:50 +02:00
|
|
|
return authenticate_log_and_execute_json(
|
|
|
|
request,
|
|
|
|
view_func,
|
|
|
|
*args,
|
|
|
|
skip_rate_limiting=skip_rate_limiting,
|
|
|
|
allow_unauthenticated=allow_unauthenticated,
|
|
|
|
**kwargs,
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-23 04:30:55 +02:00
|
|
|
return _wrapped_view_func
|
2012-11-01 23:21:12 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:39:17 +02:00
|
|
|
def is_local_addr(addr: str) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
return addr in ("127.0.0.1", "::1")
|
2016-07-09 20:37:09 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-28 05:37:13 +01:00
|
|
|
# These views are used by the main Django server to notify the Tornado server
|
|
|
|
# of events. We protect them from the outside world by checking a shared
|
|
|
|
# secret, and also the originating IP (for now).
|
2017-11-27 07:33:05 +01:00
|
|
|
def authenticate_notify(request: HttpRequest) -> bool:
|
2021-02-12 08:19:30 +01:00
|
|
|
return (
|
2021-02-12 08:20:45 +01:00
|
|
|
is_local_addr(request.META["REMOTE_ADDR"])
|
|
|
|
and request.POST.get("secret") == settings.SHARED_SECRET
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2012-11-28 05:37:13 +01:00
|
|
|
|
2017-11-27 07:33:05 +01:00
|
|
|
def client_is_exempt_from_rate_limiting(request: HttpRequest) -> bool:
|
2016-07-09 08:08:42 +02:00
|
|
|
|
|
|
|
# Don't rate limit requests from Django that come from our own servers,
|
|
|
|
# and don't rate-limit dev instances
|
2021-08-21 19:24:20 +02:00
|
|
|
client = RequestNotes.get_notes(request).client
|
2021-07-09 18:10:51 +02:00
|
|
|
return (client is not None and client.name.lower() == "internal") and (
|
2021-02-12 08:20:45 +01:00
|
|
|
is_local_addr(request.META["REMOTE_ADDR"]) or settings.DEBUG_RATE_LIMITING
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2016-07-09 08:08:42 +02:00
|
|
|
|
2021-07-26 17:32:10 +02:00
|
|
|
def internal_notify_view(
|
|
|
|
is_tornado_view: bool,
|
|
|
|
) -> Callable[[ViewFuncT], Callable[..., HttpResponse]]:
|
2021-05-10 07:02:14 +02:00
|
|
|
# The typing here could be improved by using the extended Callable types:
|
|
|
|
# https://mypy.readthedocs.io/en/stable/additional_features.html#extended-callable-types
|
2017-04-18 18:56:19 +02:00
|
|
|
"""Used for situations where something running on the Zulip server
|
|
|
|
needs to make a request to the (other) Django/Tornado processes running on
|
|
|
|
the server."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-26 17:32:10 +02:00
|
|
|
def _wrapped_view_func(view_func: ViewFuncT) -> Callable[..., HttpResponse]:
|
2017-04-18 18:56:19 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_post
|
|
|
|
@wraps(view_func)
|
2021-02-12 08:19:30 +01:00
|
|
|
def _wrapped_func_arguments(
|
|
|
|
request: HttpRequest, *args: object, **kwargs: object
|
|
|
|
) -> HttpResponse:
|
2017-04-18 18:56:19 +02:00
|
|
|
if not authenticate_notify(request):
|
2021-07-04 08:45:34 +02:00
|
|
|
raise AccessDeniedError()
|
2021-08-21 19:24:20 +02:00
|
|
|
request_notes = RequestNotes.get_notes(request)
|
2021-07-09 15:30:06 +02:00
|
|
|
is_tornado_request = request_notes.tornado_handler is not None
|
2017-04-18 18:56:19 +02:00
|
|
|
# These next 2 are not security checks; they are internal
|
|
|
|
# assertions to help us find bugs.
|
|
|
|
if is_tornado_view and not is_tornado_request:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise RuntimeError("Tornado notify view called with no Tornado handler")
|
2017-04-18 18:56:19 +02:00
|
|
|
if not is_tornado_view and is_tornado_request:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise RuntimeError("Django notify view called with Tornado handler")
|
2021-07-09 13:25:36 +02:00
|
|
|
request_notes.requestor_for_logs = "internal"
|
2017-04-18 18:56:19 +02:00
|
|
|
return view_func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-04-18 18:56:19 +02:00
|
|
|
return _wrapped_func_arguments
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2012-11-28 05:37:13 +01:00
|
|
|
return _wrapped_view_func
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:39:17 +02:00
|
|
|
def to_utc_datetime(timestamp: str) -> datetime.datetime:
|
2016-12-22 04:46:31 +01:00
|
|
|
return timestamp_to_datetime(float(timestamp))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def statsd_increment(counter: str, val: int = 1) -> Callable[[FuncT], FuncT]:
|
2013-04-16 22:52:32 +02:00
|
|
|
"""Increments a statsd counter on completion of the
|
|
|
|
decorated function.
|
|
|
|
|
|
|
|
Pass the name of the counter to this decorator-returning function."""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 01:52:37 +02:00
|
|
|
def wrapper(func: FuncT) -> FuncT:
|
2013-04-16 22:52:32 +02:00
|
|
|
@wraps(func)
|
2020-06-24 01:52:37 +02:00
|
|
|
def wrapped_func(*args: object, **kwargs: object) -> object:
|
2013-05-29 23:58:07 +02:00
|
|
|
ret = func(*args, **kwargs)
|
2013-04-16 22:52:32 +02:00
|
|
|
statsd.incr(counter, val)
|
2013-05-29 23:58:07 +02:00
|
|
|
return ret
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 01:52:37 +02:00
|
|
|
return cast(FuncT, wrapped_func) # https://github.com/python/mypy/issues/1927
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-05-29 23:58:07 +02:00
|
|
|
return wrapper
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:39:17 +02:00
|
|
|
def rate_limit_user(request: HttpRequest, user: UserProfile, domain: str) -> None:
|
2013-06-06 20:08:02 +02:00
|
|
|
"""Returns whether or not a user was rate limited. Will raise a RateLimited exception
|
|
|
|
if the user has been rate limited, otherwise returns and modifies request to contain
|
|
|
|
the rate limit information"""
|
|
|
|
|
2020-03-04 14:05:25 +01:00
|
|
|
RateLimitedUser(user, domain=domain).rate_limit_request(request)
|
2013-06-06 20:08:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
@cache_with_key(lambda: "tor_ip_addresses:", timeout=60 * 60)
|
|
|
|
@circuit(failure_threshold=2, recovery_timeout=60 * 10)
|
|
|
|
def get_tor_ips() -> Set[str]:
|
|
|
|
if not settings.RATE_LIMIT_TOR_TOGETHER:
|
|
|
|
return set()
|
|
|
|
|
|
|
|
# Cron job in /etc/cron.d/fetch-for-exit-nodes fetches this
|
|
|
|
# hourly; we cache it in memcached to prevent going to disk on
|
|
|
|
# every unauth'd request. In case of failures to read, we
|
|
|
|
# circuit-break so 2 failures cause a 10-minute backoff.
|
|
|
|
|
|
|
|
with open(settings.TOR_EXIT_NODE_FILE_PATH, "rb") as f:
|
|
|
|
exit_node_list = orjson.loads(f.read())
|
|
|
|
|
|
|
|
# This should always be non-empty; if it's empty, assume something
|
|
|
|
# went wrong with writing and treat it as a non-existent file.
|
|
|
|
# Circuit-breaking will ensure that we back off on re-reading the
|
|
|
|
# file.
|
|
|
|
if len(exit_node_list) == 0:
|
2022-01-22 07:56:30 +01:00
|
|
|
raise OSError("File is empty")
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
|
|
|
|
return set(exit_node_list)
|
|
|
|
|
|
|
|
|
2021-07-08 14:46:47 +02:00
|
|
|
def rate_limit_ip(request: HttpRequest, ip_addr: str, domain: str) -> None:
|
|
|
|
RateLimitedIPAddr(ip_addr, domain=domain).rate_limit_request(request)
|
|
|
|
|
|
|
|
|
2021-07-19 20:28:37 +02:00
|
|
|
def rate_limit_request_by_ip(request: HttpRequest, domain: str) -> None:
|
|
|
|
# REMOTE_ADDR is set by SetRemoteAddrFromRealIpHeader in conjunction
|
|
|
|
# with the nginx configuration to guarantee this to be *the* correct
|
|
|
|
# IP address to use - without worrying we'll grab the IP of a proxy.
|
|
|
|
ip_addr = request.META["REMOTE_ADDR"]
|
|
|
|
assert ip_addr
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
# We lump all TOR exit nodes into one bucket; this prevents
|
|
|
|
# abuse from TOR, while still allowing some access to these
|
|
|
|
# endpoints for legitimate users. Checking for local
|
|
|
|
# addresses is a shortcut somewhat for ease of testing without
|
|
|
|
# mocking the TOR endpoint in every test.
|
|
|
|
if is_local_addr(ip_addr):
|
|
|
|
pass
|
|
|
|
elif ip_addr in get_tor_ips():
|
|
|
|
ip_addr = "tor-exit-node"
|
2022-01-22 07:56:30 +01:00
|
|
|
except (OSError, CircuitBreakerError) as err:
|
rate_limit: Add a flag to lump all TOR exit node IPs together.
TOR users are legitimate users of the system; however, that system can
also be used for abuse -- specifically, by evading IP-based
rate-limiting.
For the purposes of IP-based rate-limiting, add a
RATE_LIMIT_TOR_TOGETHER flag, defaulting to false, which lumps all
requests from TOR exit nodes into the same bucket. This may allow a
TOR user to deny other TOR users access to the find-my-account and
new-realm endpoints, but this is a low cost for cutting off a
significant potential abuse vector.
If enabled, the list of TOR exit nodes is fetched from their public
endpoint once per hour, via a cron job, and cached on disk. Django
processes load this data from disk, and cache it in memcached.
Requests are spared from the burden of checking disk on failure via a
circuitbreaker, which trips of there are two failures in a row, and
only begins trying again after 10 minutes.
2021-11-03 21:43:02 +01:00
|
|
|
# In the event that we can't get an updated list of TOR exit
|
|
|
|
# nodes, assume the IP is _not_ one, and leave it unchanged.
|
|
|
|
# We log a warning so that this endpoint being taken out of
|
|
|
|
# service doesn't silently remove this functionality.
|
|
|
|
rate_limiter_logger.warning("Failed to fetch TOR exit node list: %s", err)
|
|
|
|
pass
|
2021-07-19 20:28:37 +02:00
|
|
|
rate_limit_ip(request, ip_addr, domain=domain)
|
|
|
|
|
|
|
|
|
2021-07-08 14:46:47 +02:00
|
|
|
def rate_limit_remote_server(
|
|
|
|
request: HttpRequest, remote_server: "RemoteZulipServer", domain: str
|
|
|
|
) -> None:
|
2021-07-08 15:33:15 +02:00
|
|
|
try:
|
|
|
|
RateLimitedRemoteZulipServer(remote_server, domain=domain).rate_limit_request(request)
|
|
|
|
except RateLimited as e:
|
|
|
|
rate_limiter_logger.warning(
|
|
|
|
"Remote server %s exceeded rate limits on domain %s", remote_server, domain
|
|
|
|
)
|
|
|
|
raise e
|
2021-07-08 14:46:47 +02:00
|
|
|
|
|
|
|
|
2021-07-08 14:46:47 +02:00
|
|
|
def rate_limit() -> Callable[[ViewFuncT], ViewFuncT]:
|
|
|
|
"""Rate-limits a view. Returns a decorator"""
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-13 17:51:56 +01:00
|
|
|
def wrapper(func: ViewFuncT) -> ViewFuncT:
|
2013-05-29 23:58:07 +02:00
|
|
|
@wraps(func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def wrapped_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2016-07-09 08:08:42 +02:00
|
|
|
|
2016-07-09 20:25:31 +02:00
|
|
|
# It is really tempting to not even wrap our original function
|
|
|
|
# when settings.RATE_LIMITING is False, but it would make
|
|
|
|
# for awkward unit testing in some situations.
|
|
|
|
if not settings.RATE_LIMITING:
|
|
|
|
return func(request, *args, **kwargs)
|
|
|
|
|
2016-07-09 08:08:42 +02:00
|
|
|
if client_is_exempt_from_rate_limiting(request):
|
2013-05-29 23:58:07 +02:00
|
|
|
return func(request, *args, **kwargs)
|
|
|
|
|
2020-08-21 13:28:14 +02:00
|
|
|
user = request.user
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2021-07-08 14:46:47 +02:00
|
|
|
if isinstance(user, AnonymousUser):
|
2021-07-19 20:28:37 +02:00
|
|
|
rate_limit_request_by_ip(request, domain="api_by_ip")
|
2019-01-04 00:17:50 +01:00
|
|
|
return func(request, *args, **kwargs)
|
2021-07-08 14:46:47 +02:00
|
|
|
elif settings.ZILENCER_ENABLED and isinstance(user, RemoteZulipServer):
|
|
|
|
rate_limit_remote_server(request, user, domain="api_by_remote_server")
|
2021-07-08 14:46:47 +02:00
|
|
|
else:
|
|
|
|
assert isinstance(user, UserProfile)
|
|
|
|
rate_limit_user(request, user, domain="api_by_user")
|
2013-05-29 23:58:07 +02:00
|
|
|
|
|
|
|
return func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, wrapped_func) # https://github.com/python/mypy/issues/1927
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-04-16 22:52:32 +02:00
|
|
|
return wrapper
|
2013-07-02 17:30:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-03-13 17:51:56 +01:00
|
|
|
def return_success_on_head_request(view_func: ViewFuncT) -> ViewFuncT:
|
2016-11-15 17:20:22 +01:00
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2021-02-12 08:20:45 +01:00
|
|
|
if request.method == "HEAD":
|
2022-01-31 13:44:02 +01:00
|
|
|
return json_success(request)
|
2016-11-15 17:20:22 +01:00
|
|
|
return view_func(request, *args, **kwargs)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|
2017-07-12 10:16:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 01:52:07 +02:00
|
|
|
def zulip_otp_required(
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_field_name: str = "next",
|
2021-02-12 08:19:30 +01:00
|
|
|
login_url: str = settings.HOME_NOT_LOGGED_IN,
|
2020-06-24 01:52:07 +02:00
|
|
|
) -> Callable[[ViewFuncT], ViewFuncT]:
|
2017-07-12 10:16:02 +02:00
|
|
|
"""
|
|
|
|
The reason we need to create this function is that the stock
|
|
|
|
otp_required decorator doesn't play well with tests. We cannot
|
|
|
|
enable/disable if_configured parameter during tests since the decorator
|
|
|
|
retains its value due to closure.
|
|
|
|
|
|
|
|
Similar to :func:`~django.contrib.auth.decorators.login_required`, but
|
|
|
|
requires the user to be :term:`verified`. By default, this redirects users
|
|
|
|
to :setting:`OTP_LOGIN_URL`.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def test(user: UserProfile) -> bool:
|
|
|
|
"""
|
|
|
|
:if_configured: If ``True``, an authenticated user with no confirmed
|
2020-09-22 17:16:53 +02:00
|
|
|
OTP devices will be allowed. Also, non-authenticated users will be
|
2021-06-15 18:03:32 +02:00
|
|
|
allowed as spectator users. Default is ``False``. If ``False``,
|
2017-07-12 10:16:02 +02:00
|
|
|
2FA will not do any authentication.
|
|
|
|
"""
|
|
|
|
if_configured = settings.TWO_FACTOR_AUTHENTICATION_ENABLED
|
|
|
|
if not if_configured:
|
|
|
|
return True
|
|
|
|
|
2020-09-22 17:16:53 +02:00
|
|
|
# User has completed 2FA verification
|
|
|
|
if user.is_verified():
|
|
|
|
return True
|
|
|
|
|
|
|
|
# This request is unauthenticated (logged-out) access; 2FA is
|
|
|
|
# not required or possible.
|
2020-09-27 06:49:16 +02:00
|
|
|
#
|
|
|
|
# TODO: Add a test for 2FA-enabled with web-public views.
|
2020-09-22 17:16:53 +02:00
|
|
|
if not user.is_authenticated: # nocoverage
|
|
|
|
return True
|
|
|
|
|
2020-10-13 23:50:18 +02:00
|
|
|
# If the user doesn't have 2FA set up, we can't enforce 2FA.
|
2020-09-22 17:16:53 +02:00
|
|
|
if not user_has_device(user):
|
|
|
|
return True
|
|
|
|
|
|
|
|
# User has configured 2FA and is not verified, so the user
|
|
|
|
# fails the test (and we should redirect to the 2FA view).
|
|
|
|
return False
|
2017-07-12 10:16:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
decorator = django_user_passes_test(
|
|
|
|
test, login_url=login_url, redirect_field_name=redirect_field_name
|
|
|
|
)
|
2017-07-12 10:16:02 +02:00
|
|
|
|
2020-06-24 01:52:07 +02:00
|
|
|
return decorator
|
2020-05-08 06:37:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
def add_google_analytics_context(context: Dict[str, object]) -> None:
|
2020-05-08 06:37:58 +02:00
|
|
|
if settings.GOOGLE_ANALYTICS_ID is not None: # nocoverage
|
2020-06-24 02:10:50 +02:00
|
|
|
page_params = context.setdefault("page_params", {})
|
|
|
|
assert isinstance(page_params, dict)
|
|
|
|
page_params["google_analytics_id"] = settings.GOOGLE_ANALYTICS_ID
|
2020-05-08 06:37:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-08 06:37:58 +02:00
|
|
|
def add_google_analytics(view_func: ViewFuncT) -> ViewFuncT:
|
|
|
|
@wraps(view_func)
|
2020-06-24 02:10:50 +02:00
|
|
|
def _wrapped_view_func(request: HttpRequest, *args: object, **kwargs: object) -> HttpResponse:
|
2020-05-08 06:37:58 +02:00
|
|
|
response = view_func(request, *args, **kwargs)
|
|
|
|
if isinstance(response, SimpleTemplateResponse):
|
|
|
|
if response.context_data is None:
|
|
|
|
response.context_data = {}
|
|
|
|
add_google_analytics_context(response.context_data)
|
|
|
|
elif response.status_code == 200: # nocoverage
|
|
|
|
raise TypeError("add_google_analytics requires a TemplateResponse")
|
|
|
|
return response
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-24 02:10:50 +02:00
|
|
|
return cast(ViewFuncT, _wrapped_view_func) # https://github.com/python/mypy/issues/1927
|