request: Move log_data from HttpRequest to ZulipRequestNotes.

This commit is contained in:
PIG208 2021-07-09 16:06:04 +08:00 committed by Tim Abbott
parent 75cde8ebcf
commit da6e5ddcae
9 changed files with 89 additions and 34 deletions

View File

@ -43,6 +43,7 @@ from zerver.lib.avatar import avatar_url
from zerver.lib.cache import get_cache_backend
from zerver.lib.db import Params, ParamsT, Query, TimeTrackingCursor
from zerver.lib.integrations import WEBHOOK_INTEGRATIONS
from zerver.lib.request import ZulipRequestNotes, request_notes_map
from zerver.lib.upload import LocalUploadBackend, S3UploadBackend
from zerver.models import (
Client,
@ -320,6 +321,10 @@ class HostRequestMock(HttpRequest):
self.content_type = ""
self.client_name = ""
request_notes_map[self] = ZulipRequestNotes(
log_data={},
)
@property
def body(self) -> bytes:
return super().body

View File

@ -38,7 +38,7 @@ from zerver.lib.exceptions import ErrorCode, JsonableError, MissingAuthenticatio
from zerver.lib.html_to_text import get_content_description
from zerver.lib.markdown import get_markdown_requests, get_markdown_time
from zerver.lib.rate_limiter import RateLimitResult
from zerver.lib.request import set_request, unset_request
from zerver.lib.request import get_request_notes, set_request, unset_request
from zerver.lib.response import json_response, json_response_from_error, json_unauthorized
from zerver.lib.subdomains import get_subdomain
from zerver.lib.types import ViewFuncT
@ -61,7 +61,9 @@ def record_request_stop_data(log_data: MutableMapping[str, Any]) -> None:
def async_request_timer_stop(request: HttpRequest) -> None:
record_request_stop_data(request._log_data)
log_data = get_request_notes(request).log_data
assert log_data is not None
record_request_stop_data(log_data)
def record_request_restart_data(log_data: MutableMapping[str, Any]) -> None:
@ -75,11 +77,13 @@ def record_request_restart_data(log_data: MutableMapping[str, Any]) -> None:
def async_request_timer_restart(request: HttpRequest) -> None:
if "time_restarted" in request._log_data:
log_data = get_request_notes(request).log_data
assert log_data is not None
if "time_restarted" in log_data:
# Don't destroy data when being called from
# finish_current_handler
return
record_request_restart_data(request._log_data)
record_request_restart_data(log_data)
def record_request_start_data(log_data: MutableMapping[str, Any]) -> None:
@ -337,22 +341,28 @@ class LogRequests(MiddlewareMixin):
# method here too
def process_request(self, request: HttpRequest) -> None:
maybe_tracemalloc_listen()
request_notes = get_request_notes(request)
if hasattr(request, "_log_data"):
if request_notes.log_data is not None:
# Sanity check to ensure this is being called from the
# Tornado code path that returns responses asynchronously.
assert getattr(request, "saved_response", False)
# Avoid re-initializing request._log_data if it's already there.
# Avoid re-initializing request_notes.log_data if it's already there.
return
request.client_name, request.client_version = parse_client(request)
request._log_data = {}
record_request_start_data(request._log_data)
request_notes.log_data = {}
record_request_start_data(request_notes.log_data)
def process_view(
self, request: HttpRequest, view_func: ViewFuncT, args: List[str], kwargs: Dict[str, Any]
self,
request: HttpRequest,
view_func: ViewFuncT,
args: List[str],
kwargs: Dict[str, Any],
) -> None:
request_notes = get_request_notes(request)
if hasattr(request, "saved_response"):
# The below logging adjustments are unnecessary (because
# we've already imported everything) and incorrect
@ -364,10 +374,13 @@ class LogRequests(MiddlewareMixin):
# time (i.e. the time between receiving the request and
# figuring out which view function to call, which is primarily
# importing modules on the first start)
request._log_data["startup_time_delta"] = time.time() - request._log_data["time_started"]
assert request_notes.log_data is not None
request_notes.log_data["startup_time_delta"] = (
time.time() - request_notes.log_data["time_started"]
)
# And then completely reset our tracking to only cover work
# done as part of this request
record_request_start_data(request._log_data)
record_request_start_data(request_notes.log_data)
def process_response(
self, request: HttpRequest, response: StreamingHttpResponse
@ -400,8 +413,10 @@ class LogRequests(MiddlewareMixin):
content = response.content
content_iter = None
request_notes = get_request_notes(request)
assert request_notes.log_data is not None
write_log_line(
request._log_data,
request_notes.log_data,
request.path,
request.method,
remote_ip,

View File

@ -44,8 +44,9 @@ def finish_handler(
) -> None:
err_msg = f"Got error finishing handler for queue {event_queue_id}"
try:
# We import async_request_timer_restart during runtime
# to avoid cyclic dependency with zerver.lib.request
# We do the import during runtime to avoid cyclic dependency
# with zerver.lib.request
from zerver.lib.request import get_request_notes
from zerver.middleware import async_request_timer_restart
# We call async_request_timer_restart here in case we are
@ -54,10 +55,12 @@ def finish_handler(
handler = get_handler_by_id(handler_id)
request = handler._request
async_request_timer_restart(request)
log_data = get_request_notes(request).log_data
assert log_data is not None
if len(contents) != 1:
request._log_data["extra"] = f"[{event_queue_id}/1]"
log_data["extra"] = f"[{event_queue_id}/1]"
else:
request._log_data["extra"] = "[{}/1/{}]".format(event_queue_id, contents[0]["type"])
log_data["extra"] = "[{}/1/{}]".format(event_queue_id, contents[0]["type"])
handler.zulip_finish(
dict(result="success", msg="", events=contents, queue_id=event_queue_id),
@ -222,13 +225,16 @@ class AsyncDjangoHandler(tornado.web.RequestHandler, base.BaseHandler):
# HttpResponse with all Django middleware run.
request = self.convert_tornado_request_to_django_request()
# We import get_request_notes during runtime to avoid
# cyclic import
from zerver.lib.request import get_request_notes
request_notes = get_request_notes(request)
old_request_notes = get_request_notes(old_request)
# Add to this new HttpRequest logging data from the processing of
# the original request; we will need these for logging.
#
# TODO: Design a cleaner way to manage these attributes,
# perhaps via creating a ZulipHttpRequest class that contains
# these attributes with a copy method.
request._log_data = old_request._log_data
request_notes.log_data = old_request_notes.log_data
if hasattr(request, "_rate_limit"):
request._rate_limit = old_request._rate_limit
if hasattr(request, "_requestor_for_logs"):

View File

@ -7,6 +7,7 @@ from django.utils.translation import gettext as _
from zerver.decorator import REQ, has_request_variables, internal_notify_view, process_client
from zerver.lib.exceptions import JsonableError
from zerver.lib.request import get_request_notes
from zerver.lib.response import json_success
from zerver.lib.validator import (
check_bool,
@ -36,7 +37,9 @@ def cleanup_event_queue(
raise BadEventQueueIdError(queue_id)
if user_profile.id != client.user_profile_id:
raise JsonableError(_("You are not authorized to access this queue"))
request._log_data["extra"] = f"[{queue_id}]"
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = f"[{queue_id}]"
client.cleanup()
return json_success()
@ -145,7 +148,9 @@ def get_events_backend(
result = fetch_events(events_query)
if "extra_log_data" in result:
request._log_data["extra"] = result["extra_log_data"]
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = result["extra_log_data"]
if result["type"] == "async":
# Mark this response with .asynchronous; this will result in

View File

@ -14,6 +14,7 @@ from zerver.forms import ToSForm
from zerver.lib.actions import do_change_tos_version, realm_user_count
from zerver.lib.compatibility import is_outdated_desktop_app, is_unsupported_browser
from zerver.lib.home import build_page_params_for_home_page_load, get_user_permission_info
from zerver.lib.request import get_request_notes
from zerver.lib.streams import access_stream_by_name
from zerver.lib.subdomains import get_subdomain
from zerver.lib.utils import statsd
@ -189,7 +190,9 @@ def home_real(request: HttpRequest) -> HttpResponse:
needs_tutorial=needs_tutorial,
)
request._log_data["extra"] = "[{}]".format(queue_id)
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = "[{}]".format(queue_id)
csp_nonce = secrets.token_hex(24)

View File

@ -12,6 +12,7 @@ from zerver.lib.actions import check_update_message, do_delete_messages
from zerver.lib.exceptions import JsonableError
from zerver.lib.html_diff import highlight_html_differences
from zerver.lib.message import access_message
from zerver.lib.request import get_request_notes
from zerver.lib.response import json_success
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.lib.topic import LEGACY_PREV_TOPIC, REQ_topic
@ -117,7 +118,9 @@ def update_message_backend(
)
# Include the number of messages changed in the logs
request._log_data["extra"] = f"[{number_changed}]"
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = f"[{number_changed}]"
return json_success()

View File

@ -38,6 +38,7 @@ from zerver.lib.addressee import get_user_profiles, get_user_profiles_by_ids
from zerver.lib.exceptions import ErrorCode, JsonableError, MissingAuthenticationError
from zerver.lib.message import get_first_visible_message_id, messages_for_ids
from zerver.lib.narrow import is_web_public_compatible, is_web_public_narrow
from zerver.lib.request import get_request_notes
from zerver.lib.response import json_success
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
from zerver.lib.streams import (
@ -1043,7 +1044,9 @@ def get_messages_backend(
verbose_operators.append("is:" + term["operand"])
else:
verbose_operators.append(term["operator"])
request._log_data["extra"] = "[{}]".format(",".join(verbose_operators))
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = "[{}]".format(",".join(verbose_operators))
sa_conn = get_sqlalchemy_connection()

View File

@ -10,6 +10,7 @@ from zerver.lib.actions import (
do_update_message_flags,
)
from zerver.lib.exceptions import JsonableError
from zerver.lib.request import get_request_notes
from zerver.lib.response import json_success
from zerver.lib.streams import access_stream_by_id
from zerver.lib.topic import user_message_exists_for_topic
@ -35,12 +36,14 @@ def update_message_flags(
operation: str = REQ("op"),
flag: str = REQ(),
) -> HttpResponse:
request_notes = get_request_notes(request)
assert request_notes.log_data is not None
count = do_update_message_flags(user_profile, request.client, operation, flag, messages)
target_count_str = str(len(messages))
log_data_str = f"[{operation} {flag}/{target_count_str}] actually {count}"
request._log_data["extra"] = log_data_str
request_notes.log_data["extra"] = log_data_str
return json_success({"result": "success", "messages": messages, "msg": ""})
@ -48,9 +51,11 @@ def update_message_flags(
@has_request_variables
def mark_all_as_read(request: HttpRequest, user_profile: UserProfile) -> HttpResponse:
count = do_mark_all_as_read(user_profile, request.client)
request_notes = get_request_notes(request)
log_data_str = f"[{count} updated]"
request._log_data["extra"] = log_data_str
assert request_notes.log_data is not None
request_notes.log_data["extra"] = log_data_str
return json_success({"result": "success", "msg": ""})
@ -63,7 +68,9 @@ def mark_stream_as_read(
count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id)
log_data_str = f"[{count} updated]"
request._log_data["extra"] = log_data_str
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = log_data_str
return json_success({"result": "success", "msg": ""})
@ -90,6 +97,8 @@ def mark_topic_as_read(
count = do_mark_stream_messages_as_read(user_profile, stream.recipient_id, topic_name)
log_data_str = f"[{count} updated]"
request._log_data["extra"] = log_data_str
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = log_data_str
return json_success({"result": "success", "msg": ""})

View File

@ -14,7 +14,7 @@ from zerver.context_processors import get_valid_realm_from_request
from zerver.decorator import human_users_only
from zerver.lib.markdown import privacy_clean_markdown
from zerver.lib.queue import queue_json_publish
from zerver.lib.request import REQ, has_request_variables
from zerver.lib.request import REQ, get_request_notes, has_request_variables
from zerver.lib.response import json_success
from zerver.lib.storage import static_path
from zerver.lib.unminify import SourceMap
@ -54,7 +54,9 @@ def report_send_times(
if displayed > 0:
displayed_str = str(displayed)
request._log_data[
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data[
"extra"
] = f"[{time}ms/{received_str}ms/{displayed_str}ms/echo:{locally_echoed}/diff:{rendered_content_disparity}]"
@ -79,7 +81,9 @@ def report_narrow_times(
initial_free: int = REQ(converter=to_non_negative_int),
network: int = REQ(converter=to_non_negative_int),
) -> HttpResponse:
request._log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]"
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms/{network}ms]"
realm = get_valid_realm_from_request(request)
base_key = statsd_key(realm.string_id, clean_periods=True)
statsd.timing(f"narrow.initial_core.{base_key}", initial_core)
@ -95,7 +99,9 @@ def report_unnarrow_times(
initial_core: int = REQ(converter=to_non_negative_int),
initial_free: int = REQ(converter=to_non_negative_int),
) -> HttpResponse:
request._log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]"
log_data = get_request_notes(request).log_data
assert log_data is not None
log_data["extra"] = f"[{initial_core}ms/{initial_free}ms]"
realm = get_valid_realm_from_request(request)
base_key = statsd_key(realm.string_id, clean_periods=True)
statsd.timing(f"unnarrow.initial_core.{base_key}", initial_core)