2017-11-16 19:51:44 +01:00
|
|
|
# System documented in https://zulip.readthedocs.io/en/latest/subsystems/logging.html
|
2017-09-24 18:43:30 +02:00
|
|
|
|
2018-04-24 03:47:28 +02:00
|
|
|
from typing import Any, Dict, Optional, Union
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
2016-06-04 21:47:59 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2018-04-11 05:50:08 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
|
|
|
from django.views.decorators.csrf import csrf_exempt
|
|
|
|
from django.views.decorators.http import require_POST
|
2017-11-04 00:59:22 +01:00
|
|
|
from zerver.decorator import human_users_only, \
|
2017-10-28 00:16:13 +02:00
|
|
|
to_non_negative_int
|
2017-10-12 02:40:42 +02:00
|
|
|
from zerver.lib.bugdown import privacy_clean_markdown
|
2017-10-28 00:07:31 +02:00
|
|
|
from zerver.lib.request import has_request_variables, REQ
|
2018-04-11 05:50:08 +02:00
|
|
|
from zerver.lib.response import json_success, json_error
|
2015-11-23 17:29:37 +01:00
|
|
|
from zerver.lib.queue import queue_json_publish
|
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.utils import statsd, statsd_key
|
|
|
|
from zerver.lib.validator import check_bool, check_dict
|
2016-06-04 21:47:59 +02:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
import subprocess
|
|
|
|
import os
|
2018-04-11 05:50:08 +02:00
|
|
|
import logging
|
|
|
|
import ujson
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2018-05-15 22:35:23 +02:00
|
|
|
js_source_map = None # type: Optional[SourceMap]
|
2017-03-01 04:19:56 +01:00
|
|
|
|
|
|
|
# Read the source map information for decoding JavaScript backtraces.
|
2017-10-27 02:18:49 +02:00
|
|
|
def get_js_source_map() -> Optional[SourceMap]:
|
2017-03-01 04:19:56 +01:00
|
|
|
global js_source_map
|
2017-03-01 06:42:31 +01:00
|
|
|
if not js_source_map and not (settings.DEVELOPMENT or settings.TEST_SUITE):
|
2017-07-28 20:32:57 +02:00
|
|
|
js_source_map = SourceMap([
|
|
|
|
os.path.join(settings.DEPLOY_ROOT, 'prod-static/source-map'),
|
2017-08-01 07:17:51 +02:00
|
|
|
os.path.join(settings.STATIC_ROOT, 'webpack-bundles')
|
2017-07-28 20:32:57 +02:00
|
|
|
])
|
2017-03-01 04:19:56 +01:00
|
|
|
return js_source_map
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-11-03 05:13:04 +01:00
|
|
|
def report_send_times(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
time: int=REQ(converter=to_non_negative_int),
|
|
|
|
received: int=REQ(converter=to_non_negative_int, default=-1),
|
|
|
|
displayed: int=REQ(converter=to_non_negative_int, default=-1),
|
|
|
|
locally_echoed: bool=REQ(validator=check_bool, default=False),
|
2017-11-09 11:45:56 +01:00
|
|
|
rendered_content_disparity: bool=REQ(validator=check_bool,
|
|
|
|
default=False)) -> HttpResponse:
|
2017-11-03 05:13:04 +01:00
|
|
|
received_str = "(unknown)"
|
|
|
|
if received > 0:
|
|
|
|
received_str = str(received)
|
|
|
|
displayed_str = "(unknown)"
|
|
|
|
if displayed > 0:
|
|
|
|
displayed_str = str(displayed)
|
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms/echo:%s/diff:%s]" \
|
2017-11-03 05:13:04 +01:00
|
|
|
% (time, received_str, displayed_str, locally_echoed, rendered_content_disparity)
|
|
|
|
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.send_time.%s" % (base_key,), time)
|
2017-11-03 05:13:04 +01:00
|
|
|
if received > 0:
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.receive_time.%s" % (base_key,), received)
|
2017-11-03 05:13:04 +01:00
|
|
|
if displayed > 0:
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.displayed_time.%s" % (base_key,), displayed)
|
2015-11-23 17:29:37 +01:00
|
|
|
if locally_echoed:
|
|
|
|
statsd.incr('locally_echoed')
|
|
|
|
if rendered_content_disparity:
|
|
|
|
statsd.incr('render_disparity')
|
|
|
|
return json_success()
|
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-12-29 14:34:49 +01:00
|
|
|
def report_narrow_times(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
initial_core: int=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free: int=REQ(converter=to_non_negative_int),
|
|
|
|
network: int=REQ(converter=to_non_negative_int)) -> HttpResponse:
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms]" % (initial_core, initial_free, network)
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("narrow.initial_core.%s" % (base_key,), initial_core)
|
|
|
|
statsd.timing("narrow.initial_free.%s" % (base_key,), initial_free)
|
|
|
|
statsd.timing("narrow.network.%s" % (base_key,), network)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-12-29 14:34:49 +01:00
|
|
|
def report_unnarrow_times(request: HttpRequest, user_profile: UserProfile,
|
|
|
|
initial_core: int=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free: int=REQ(converter=to_non_negative_int)) -> HttpResponse:
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free)
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("unnarrow.initial_core.%s" % (base_key,), initial_core)
|
|
|
|
statsd.timing("unnarrow.initial_free.%s" % (base_key,), initial_free)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
|
|
|
@has_request_variables
|
2018-04-24 03:47:28 +02:00
|
|
|
def report_error(request: HttpRequest, user_profile: UserProfile, message: str=REQ(),
|
|
|
|
stacktrace: str=REQ(), ui_message: bool=REQ(validator=check_bool),
|
|
|
|
user_agent: str=REQ(), href: str=REQ(), log: str=REQ(),
|
2017-12-29 14:34:49 +01:00
|
|
|
more_info: Optional[Dict[str, Any]]=REQ(validator=check_dict([]), default=None)
|
|
|
|
) -> HttpResponse:
|
2017-01-24 07:37:46 +01:00
|
|
|
"""Accepts an error report and stores in a queue for processing. The
|
|
|
|
actual error reports are later handled by do_report_error (below)"""
|
2017-01-24 07:54:18 +01:00
|
|
|
if not settings.BROWSER_ERROR_REPORTING:
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
2017-10-13 02:22:47 +02:00
|
|
|
if more_info is None:
|
|
|
|
more_info = {}
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-03-01 04:19:56 +01:00
|
|
|
js_source_map = get_js_source_map()
|
2015-11-23 17:29:37 +01:00
|
|
|
if js_source_map:
|
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
|
|
|
|
|
|
|
try:
|
2017-02-11 05:45:39 +01:00
|
|
|
version = subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"],
|
2018-04-24 03:47:28 +02:00
|
|
|
universal_newlines=True) # type: Optional[str]
|
2015-11-23 17:29:37 +01:00
|
|
|
except Exception:
|
|
|
|
version = None
|
|
|
|
|
2017-10-04 19:27:58 +02:00
|
|
|
# Get the IP address of the request
|
|
|
|
remote_ip = request.META.get('HTTP_X_REAL_IP')
|
|
|
|
if remote_ip is None:
|
|
|
|
remote_ip = request.META['REMOTE_ADDR']
|
|
|
|
|
2017-10-12 02:40:42 +02:00
|
|
|
# For the privacy of our users, we remove any actual text content
|
|
|
|
# in draft_content (from drafts rendering exceptions). See the
|
|
|
|
# comment on privacy_clean_markdown for more details.
|
|
|
|
if more_info.get('draft_content'):
|
|
|
|
more_info['draft_content'] = privacy_clean_markdown(more_info['draft_content'])
|
|
|
|
|
2018-12-17 00:10:20 +01:00
|
|
|
if user_profile.is_authenticated:
|
|
|
|
email = user_profile.delivery_email
|
|
|
|
full_name = user_profile.full_name
|
|
|
|
else:
|
|
|
|
email = "unauthenticated@example.com"
|
|
|
|
full_name = "Anonymous User"
|
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
queue_json_publish('error_reports', dict(
|
|
|
|
type = "browser",
|
|
|
|
report = dict(
|
2017-01-28 21:04:35 +01:00
|
|
|
host = request.get_host().split(":")[0],
|
2017-10-04 19:27:58 +02:00
|
|
|
ip_address = remote_ip,
|
2018-12-17 00:10:20 +01:00
|
|
|
user_email = email,
|
|
|
|
user_full_name = full_name,
|
2015-11-23 17:29:37 +01:00
|
|
|
user_visible = ui_message,
|
|
|
|
server_path = settings.DEPLOY_ROOT,
|
|
|
|
version = version,
|
|
|
|
user_agent = user_agent,
|
|
|
|
href = href,
|
|
|
|
message = message,
|
|
|
|
stacktrace = stacktrace,
|
|
|
|
log = log,
|
|
|
|
more_info = more_info,
|
|
|
|
)
|
2017-11-24 13:18:46 +01:00
|
|
|
))
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
return json_success()
|
2018-04-11 05:50:08 +02:00
|
|
|
|
|
|
|
@csrf_exempt
|
|
|
|
@require_POST
|
|
|
|
@has_request_variables
|
|
|
|
def report_csp_violations(request: HttpRequest,
|
|
|
|
csp_report: Dict[str, Any]=REQ(argument_type='body')) -> HttpResponse:
|
|
|
|
def get_attr(csp_report_attr: str) -> str:
|
|
|
|
return csp_report.get(csp_report_attr, '')
|
|
|
|
|
|
|
|
logging.warning("CSP Violation in Document('%s'). "
|
|
|
|
"Blocked URI('%s'), Original Policy('%s'), "
|
|
|
|
"Violated Directive('%s'), Effective Directive('%s'), "
|
|
|
|
"Disposition('%s'), Referrer('%s'), "
|
|
|
|
"Status Code('%s'), Script Sample('%s')" % (get_attr('document-uri'),
|
|
|
|
get_attr('blocked-uri'),
|
|
|
|
get_attr('original-policy'),
|
|
|
|
get_attr('violated-directive'),
|
|
|
|
get_attr('effective-directive'),
|
|
|
|
get_attr('disposition'),
|
|
|
|
get_attr('referrer'),
|
|
|
|
get_attr('status-code'),
|
|
|
|
get_attr('script-sample')))
|
|
|
|
|
|
|
|
return json_success()
|