2017-11-16 19:51:44 +01:00
|
|
|
# System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html
|
2020-06-11 00:54:34 +02:00
|
|
|
import logging
|
|
|
|
import subprocess
|
2020-08-31 09:22:40 +02:00
|
|
|
from typing import Any, Dict, Mapping, Optional, Union
|
2020-06-27 03:05:47 +02:00
|
|
|
from urllib.parse import SplitResult
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
2020-08-31 09:22:40 +02:00
|
|
|
from django.contrib.auth.models import AnonymousUser
|
2016-06-04 21:47:59 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2018-04-11 05:50:08 +02:00
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
|
|
from django.views.decorators.http import require_POST
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-31 09:22:40 +02:00
|
|
|
from zerver.context_processors import get_valid_realm_from_request
|
2020-05-07 13:19:54 +02:00
|
|
|
from zerver.decorator import human_users_only
|
2020-06-25 15:00:33 +02:00
|
|
|
from zerver.lib.markdown import privacy_clean_markdown
|
2015-11-23 17:29:37 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
2021-08-21 19:24:20 +02:00
|
|
|
from zerver.lib.request import REQ, RequestNotes, has_request_variables
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.response import json_success
|
2019-07-17 02:29:08 +02:00
|
|
|
from zerver.lib.storage import static_path
|
2015-11-23 17:29:37 +01:00
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.utils import statsd, statsd_key
|
2020-05-07 13:19:54 +02:00
|
|
|
from zerver.lib.validator import check_bool, check_dict, to_non_negative_int
|
2016-06-04 21:47:59 +02:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
js_source_map: Optional[SourceMap] = None
|
2017-03-01 04:19:56 +01:00
|
|
|
|
|
|
|
# Read the source map information for decoding JavaScript backtraces.
|
2017-10-27 02:18:49 +02:00
|
|
|
def get_js_source_map() -> Optional[SourceMap]:
|
2017-03-01 04:19:56 +01:00
|
|
|
global js_source_map
|
2017-03-01 06:42:31 +01:00
|
|
|
if not js_source_map and not (settings.DEVELOPMENT or settings.TEST_SUITE):
|
2021-02-12 08:19:30 +01:00
|
|
|
js_source_map = SourceMap(
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
static_path("webpack-bundles"),
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
|
|
|
)
|
2017-03-01 04:19:56 +01:00
|
|
|
return js_source_map
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def report_send_times(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
time: int = REQ(converter=to_non_negative_int),
|
|
|
|
received: int = REQ(converter=to_non_negative_int, default=-1),
|
|
|
|
displayed: int = REQ(converter=to_non_negative_int, default=-1),
|
2021-04-07 22:00:44 +02:00
|
|
|
locally_echoed: bool = REQ(json_validator=check_bool, default=False),
|
|
|
|
rendered_content_disparity: bool = REQ(json_validator=check_bool, default=False),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-11-03 05:13:04 +01:00
|
|
|
received_str = "(unknown)"
|
|
|
|
if received > 0:
|
|
|
|
received_str = str(received)
|
|
|
|
displayed_str = "(unknown)"
|
|
|
|
if displayed > 0:
|
|
|
|
displayed_str = str(displayed)
|
|
|
|
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data[
|
2021-02-12 08:19:30 +01:00
|
|
|
"extra"
|
|
|
|
] = f"[{time}ms/{received_str}ms/{displayed_str}ms/echo:{locally_echoed}/diff:{rendered_content_disparity}]"
|
2017-11-03 05:13:04 +01:00
|
|
|
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.timing(f"endtoend.send_time.{base_key}", time)
|
2017-11-03 05:13:04 +01:00
|
|
|
if received > 0:
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.timing(f"endtoend.receive_time.{base_key}", received)
|
2017-11-03 05:13:04 +01:00
|
|
|
if displayed > 0:
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.timing(f"endtoend.displayed_time.{base_key}", displayed)
|
2015-11-23 17:29:37 +01:00
|
|
|
if locally_echoed:
|
2021-02-12 08:20:45 +01:00
|
|
|
statsd.incr("locally_echoed")
|
2015-11-23 17:29:37 +01:00
|
|
|
if rendered_content_disparity:
|
2021-02-12 08:20:45 +01:00
|
|
|
statsd.incr("render_disparity")
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def report_narrow_times(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
initial_core: int = REQ(converter=to_non_negative_int),
|
|
|
|
initial_free: int = REQ(converter=to_non_negative_int),
|
|
|
|
network: int = REQ(converter=to_non_negative_int),
|
|
|
|
) -> HttpResponse:
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]"
|
2020-08-31 09:22:40 +02:00
|
|
|
realm = get_valid_realm_from_request(request)
|
|
|
|
base_key = statsd_key(realm.string_id, clean_periods=True)
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.timing(f"narrow.initial_core.{base_key}", initial_core)
|
|
|
|
statsd.timing(f"narrow.initial_free.{base_key}", initial_free)
|
|
|
|
statsd.timing(f"narrow.network.{base_key}", network)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def report_unnarrow_times(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: Union[UserProfile, AnonymousUser],
|
|
|
|
initial_core: int = REQ(converter=to_non_negative_int),
|
|
|
|
initial_free: int = REQ(converter=to_non_negative_int),
|
|
|
|
) -> HttpResponse:
|
2021-08-21 19:24:20 +02:00
|
|
|
log_data = RequestNotes.get_notes(request).log_data
|
2021-07-09 10:06:04 +02:00
|
|
|
assert log_data is not None
|
|
|
|
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]"
|
2020-08-31 09:22:40 +02:00
|
|
|
realm = get_valid_realm_from_request(request)
|
|
|
|
base_key = statsd_key(realm.string_id, clean_periods=True)
|
2020-06-10 06:41:04 +02:00
|
|
|
statsd.timing(f"unnarrow.initial_core.{base_key}", initial_core)
|
|
|
|
statsd.timing(f"unnarrow.initial_free.{base_key}", initial_free)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def report_error(
|
|
|
|
request: HttpRequest,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
message: str = REQ(),
|
|
|
|
stacktrace: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
ui_message: bool = REQ(json_validator=check_bool),
|
2021-02-12 08:19:30 +01:00
|
|
|
user_agent: str = REQ(),
|
|
|
|
href: str = REQ(),
|
|
|
|
log: str = REQ(),
|
2021-04-07 22:00:44 +02:00
|
|
|
more_info: Mapping[str, Any] = REQ(json_validator=check_dict([]), default={}),
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2017-01-24 07:37:46 +01:00
|
|
|
"""Accepts an error report and stores in a queue for processing. The
|
2020-03-09 12:52:20 +01:00
|
|
|
actual error reports are later handled by do_report_error"""
|
2017-01-24 07:54:18 +01:00
|
|
|
if not settings.BROWSER_ERROR_REPORTING:
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
2020-06-13 03:34:01 +02:00
|
|
|
more_info = dict(more_info)
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-03-01 04:19:56 +01:00
|
|
|
js_source_map = get_js_source_map()
|
2015-11-23 17:29:37 +01:00
|
|
|
if js_source_map:
|
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
|
|
|
|
|
|
|
try:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
version: Optional[str] = subprocess.check_output(
|
2020-10-09 03:10:51 +02:00
|
|
|
["git", "show", "-s", "--oneline"],
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
universal_newlines=True,
|
|
|
|
)
|
2020-10-09 03:32:00 +02:00
|
|
|
except (FileNotFoundError, subprocess.CalledProcessError):
|
2015-11-23 17:29:37 +01:00
|
|
|
version = None
|
|
|
|
|
2017-10-04 19:27:58 +02:00
|
|
|
# Get the IP address of the request
|
2021-02-12 08:20:45 +01:00
|
|
|
remote_ip = request.META["REMOTE_ADDR"]
|
2017-10-04 19:27:58 +02:00
|
|
|
|
2017-10-12 02:40:42 +02:00
|
|
|
# For the privacy of our users, we remove any actual text content
|
|
|
|
# in draft_content (from drafts rendering exceptions). See the
|
|
|
|
# comment on privacy_clean_markdown for more details.
|
2021-02-12 08:20:45 +01:00
|
|
|
if more_info.get("draft_content"):
|
|
|
|
more_info["draft_content"] = privacy_clean_markdown(more_info["draft_content"])
|
2017-10-12 02:40:42 +02:00
|
|
|
|
2018-12-17 00:10:20 +01:00
|
|
|
if user_profile.is_authenticated:
|
|
|
|
email = user_profile.delivery_email
|
|
|
|
full_name = user_profile.full_name
|
|
|
|
else:
|
|
|
|
email = "unauthenticated@example.com"
|
|
|
|
full_name = "Anonymous User"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
queue_json_publish(
|
2021-02-12 08:20:45 +01:00
|
|
|
"error_reports",
|
2021-02-12 08:19:30 +01:00
|
|
|
dict(
|
|
|
|
type="browser",
|
|
|
|
report=dict(
|
|
|
|
host=SplitResult("", request.get_host(), "", "", "").hostname,
|
|
|
|
ip_address=remote_ip,
|
|
|
|
user_email=email,
|
|
|
|
user_full_name=full_name,
|
|
|
|
user_visible=ui_message,
|
|
|
|
server_path=settings.DEPLOY_ROOT,
|
|
|
|
version=version,
|
|
|
|
user_agent=user_agent,
|
|
|
|
href=href,
|
|
|
|
message=message,
|
|
|
|
stacktrace=stacktrace,
|
|
|
|
log=log,
|
|
|
|
more_info=more_info,
|
|
|
|
),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
return json_success()
|
2018-04-11 05:50:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-11 05:50:08 +02:00
|
|
|
@csrf_exempt
|
|
|
|
@require_POST
|
|
|
|
@has_request_variables
|
2021-02-12 08:19:30 +01:00
|
|
|
def report_csp_violations(
|
2021-02-12 08:20:45 +01:00
|
|
|
request: HttpRequest, csp_report: Dict[str, Any] = REQ(argument_type="body")
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> HttpResponse:
|
2018-04-11 05:50:08 +02:00
|
|
|
def get_attr(csp_report_attr: str) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return csp_report.get(csp_report_attr, "")
|
2018-04-11 05:50:08 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.warning(
|
2021-05-10 07:02:14 +02:00
|
|
|
"CSP violation in document('%s'). "
|
|
|
|
"blocked URI('%s'), original policy('%s'), "
|
|
|
|
"violated directive('%s'), effective directive('%s'), "
|
|
|
|
"disposition('%s'), referrer('%s'), "
|
|
|
|
"status code('%s'), script sample('%s')",
|
2021-02-12 08:20:45 +01:00
|
|
|
get_attr("document-uri"),
|
|
|
|
get_attr("blocked-uri"),
|
|
|
|
get_attr("original-policy"),
|
|
|
|
get_attr("violated-directive"),
|
|
|
|
get_attr("effective-directive"),
|
|
|
|
get_attr("disposition"),
|
|
|
|
get_attr("referrer"),
|
|
|
|
get_attr("status-code"),
|
|
|
|
get_attr("script-sample"),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-04-11 05:50:08 +02:00
|
|
|
|
|
|
|
return json_success()
|