2017-09-24 18:43:30 +02:00
|
|
|
# System documented in https://zulip.readthedocs.io/en/latest/logging.html
|
|
|
|
|
2017-07-10 06:10:34 +02:00
|
|
|
from typing import Any, Dict, Optional, Text
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
from django.conf import settings
|
2016-06-04 21:47:59 +02:00
|
|
|
from django.http import HttpRequest, HttpResponse
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-11-04 00:59:22 +01:00
|
|
|
from zerver.decorator import human_users_only, \
|
2017-10-28 00:16:13 +02:00
|
|
|
to_non_negative_int
|
2017-10-12 02:40:42 +02:00
|
|
|
from zerver.lib.bugdown import privacy_clean_markdown
|
2017-10-28 00:07:31 +02:00
|
|
|
from zerver.lib.request import has_request_variables, REQ
|
2015-11-23 17:29:37 +01:00
|
|
|
from zerver.lib.response import json_success
|
|
|
|
from zerver.lib.queue import queue_json_publish
|
|
|
|
from zerver.lib.unminify import SourceMap
|
|
|
|
from zerver.lib.utils import statsd, statsd_key
|
|
|
|
from zerver.lib.validator import check_bool, check_dict
|
2016-06-04 21:47:59 +02:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
import subprocess
|
|
|
|
import os
|
|
|
|
|
|
|
|
js_source_map = None
|
2017-03-01 04:19:56 +01:00
|
|
|
|
|
|
|
# Read the source map information for decoding JavaScript backtraces.
|
2017-10-27 02:18:49 +02:00
|
|
|
def get_js_source_map() -> Optional[SourceMap]:
|
2017-03-01 04:19:56 +01:00
|
|
|
global js_source_map
|
2017-03-01 06:42:31 +01:00
|
|
|
if not js_source_map and not (settings.DEVELOPMENT or settings.TEST_SUITE):
|
2017-07-28 20:32:57 +02:00
|
|
|
js_source_map = SourceMap([
|
|
|
|
os.path.join(settings.DEPLOY_ROOT, 'prod-static/source-map'),
|
2017-08-01 07:17:51 +02:00
|
|
|
os.path.join(settings.STATIC_ROOT, 'webpack-bundles')
|
2017-07-28 20:32:57 +02:00
|
|
|
])
|
2017-03-01 04:19:56 +01:00
|
|
|
return js_source_map
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-10-16 22:07:19 +02:00
|
|
|
def report_send_times(request, user_profile,
|
|
|
|
time=REQ(converter=to_non_negative_int),
|
|
|
|
received=REQ(converter=to_non_negative_int, default="(unknown)"),
|
|
|
|
displayed=REQ(converter=to_non_negative_int, default="(unknown)"),
|
|
|
|
locally_echoed=REQ(validator=check_bool, default=False),
|
|
|
|
rendered_content_disparity=REQ(validator=check_bool, default=False)):
|
2016-06-04 21:47:59 +02:00
|
|
|
# type: (HttpRequest, UserProfile, int, int, int, bool, bool) -> HttpResponse
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms/echo:%s/diff:%s]" \
|
|
|
|
% (time, received, displayed, locally_echoed, rendered_content_disparity)
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.send_time.%s" % (base_key,), time)
|
2015-11-23 17:29:37 +01:00
|
|
|
if received != "(unknown)":
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.receive_time.%s" % (base_key,), received)
|
2015-11-23 17:29:37 +01:00
|
|
|
if displayed != "(unknown)":
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("endtoend.displayed_time.%s" % (base_key,), displayed)
|
2015-11-23 17:29:37 +01:00
|
|
|
if locally_echoed:
|
|
|
|
statsd.incr('locally_echoed')
|
|
|
|
if rendered_content_disparity:
|
|
|
|
statsd.incr('render_disparity')
|
|
|
|
return json_success()
|
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-10-16 22:07:19 +02:00
|
|
|
def report_narrow_times(request, user_profile,
|
|
|
|
initial_core=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free=REQ(converter=to_non_negative_int),
|
|
|
|
network=REQ(converter=to_non_negative_int)):
|
2016-06-04 21:47:59 +02:00
|
|
|
# type: (HttpRequest, UserProfile, int, int, int) -> HttpResponse
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms/%sms]" % (initial_core, initial_free, network)
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("narrow.initial_core.%s" % (base_key,), initial_core)
|
|
|
|
statsd.timing("narrow.initial_free.%s" % (base_key,), initial_free)
|
|
|
|
statsd.timing("narrow.network.%s" % (base_key,), network)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-10-16 22:07:19 +02:00
|
|
|
def report_unnarrow_times(request, user_profile,
|
|
|
|
initial_core=REQ(converter=to_non_negative_int),
|
|
|
|
initial_free=REQ(converter=to_non_negative_int)):
|
2016-06-04 21:47:59 +02:00
|
|
|
# type: (HttpRequest, UserProfile, int, int) -> HttpResponse
|
2015-11-23 17:29:37 +01:00
|
|
|
request._log_data["extra"] = "[%sms/%sms]" % (initial_core, initial_free)
|
2017-03-13 17:50:28 +01:00
|
|
|
base_key = statsd_key(user_profile.realm.string_id, clean_periods=True)
|
2016-07-17 19:41:43 +02:00
|
|
|
statsd.timing("unnarrow.initial_core.%s" % (base_key,), initial_core)
|
|
|
|
statsd.timing("unnarrow.initial_free.%s" % (base_key,), initial_free)
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
|
|
|
|
2017-10-28 00:16:13 +02:00
|
|
|
@human_users_only
|
2015-11-23 17:29:37 +01:00
|
|
|
@has_request_variables
|
2017-10-16 22:07:19 +02:00
|
|
|
def report_error(request, user_profile, message=REQ(), stacktrace=REQ(),
|
|
|
|
ui_message=REQ(validator=check_bool), user_agent=REQ(),
|
|
|
|
href=REQ(), log=REQ(),
|
|
|
|
more_info=REQ(validator=check_dict([]), default=None)):
|
2017-10-13 02:22:47 +02:00
|
|
|
# type: (HttpRequest, UserProfile, Text, Text, bool, Text, Text, Text, Optional[Dict[str, Any]]) -> HttpResponse
|
2017-01-24 07:37:46 +01:00
|
|
|
"""Accepts an error report and stores in a queue for processing. The
|
|
|
|
actual error reports are later handled by do_report_error (below)"""
|
2017-01-24 07:54:18 +01:00
|
|
|
if not settings.BROWSER_ERROR_REPORTING:
|
2015-11-23 17:29:37 +01:00
|
|
|
return json_success()
|
2017-10-13 02:22:47 +02:00
|
|
|
if more_info is None:
|
|
|
|
more_info = {}
|
2015-11-23 17:29:37 +01:00
|
|
|
|
2017-03-01 04:19:56 +01:00
|
|
|
js_source_map = get_js_source_map()
|
2015-11-23 17:29:37 +01:00
|
|
|
if js_source_map:
|
|
|
|
stacktrace = js_source_map.annotate_stacktrace(stacktrace)
|
|
|
|
|
|
|
|
try:
|
2017-02-11 05:45:39 +01:00
|
|
|
version = subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"],
|
|
|
|
universal_newlines=True) # type: Optional[Text]
|
2015-11-23 17:29:37 +01:00
|
|
|
except Exception:
|
|
|
|
version = None
|
|
|
|
|
2017-10-04 19:27:58 +02:00
|
|
|
# Get the IP address of the request
|
|
|
|
remote_ip = request.META.get('HTTP_X_REAL_IP')
|
|
|
|
if remote_ip is None:
|
|
|
|
remote_ip = request.META['REMOTE_ADDR']
|
|
|
|
|
2017-10-12 02:40:42 +02:00
|
|
|
# For the privacy of our users, we remove any actual text content
|
|
|
|
# in draft_content (from drafts rendering exceptions). See the
|
|
|
|
# comment on privacy_clean_markdown for more details.
|
|
|
|
if more_info.get('draft_content'):
|
|
|
|
more_info['draft_content'] = privacy_clean_markdown(more_info['draft_content'])
|
|
|
|
|
2015-11-23 17:29:37 +01:00
|
|
|
queue_json_publish('error_reports', dict(
|
|
|
|
type = "browser",
|
|
|
|
report = dict(
|
2017-01-28 21:04:35 +01:00
|
|
|
host = request.get_host().split(":")[0],
|
2017-10-04 19:27:58 +02:00
|
|
|
ip_address = remote_ip,
|
2015-11-23 17:29:37 +01:00
|
|
|
user_email = user_profile.email,
|
|
|
|
user_full_name = user_profile.full_name,
|
|
|
|
user_visible = ui_message,
|
|
|
|
server_path = settings.DEPLOY_ROOT,
|
|
|
|
version = version,
|
|
|
|
user_agent = user_agent,
|
|
|
|
href = href,
|
|
|
|
message = message,
|
|
|
|
stacktrace = stacktrace,
|
|
|
|
log = log,
|
|
|
|
more_info = more_info,
|
|
|
|
)
|
2017-10-19 15:02:03 +02:00
|
|
|
), lambda x: None, call_consume_in_tests=True)
|
2015-11-23 17:29:37 +01:00
|
|
|
|
|
|
|
return json_success()
|