refactor: Rename bugdown words to markdown in stats related functions.

This commit is part of series of commits aimed at renaming bugdown to
markdown.
This commit is contained in:
Mohit Gupta 2020-06-27 02:36:05 +05:30 committed by Tim Abbott
parent 97872591ef
commit 44d68c1840
3 changed files with 41 additions and 41 deletions

View File

@ -2386,26 +2386,26 @@ def do_convert(content: str,
_md_engine.zulip_realm = None _md_engine.zulip_realm = None
_md_engine.zulip_db_data = None _md_engine.zulip_db_data = None
bugdown_time_start = 0.0 markdown_time_start = 0.0
bugdown_total_time = 0.0 markdown_total_time = 0.0
bugdown_total_requests = 0 markdown_total_requests = 0
def get_bugdown_time() -> float: def get_markdown_time() -> float:
return bugdown_total_time return markdown_total_time
def get_bugdown_requests() -> int: def get_markdown_requests() -> int:
return bugdown_total_requests return markdown_total_requests
def bugdown_stats_start() -> None: def markdown_stats_start() -> None:
global bugdown_time_start global markdown_time_start
bugdown_time_start = time.time() markdown_time_start = time.time()
def bugdown_stats_finish() -> None: def markdown_stats_finish() -> None:
global bugdown_total_time global markdown_total_time
global bugdown_total_requests global markdown_total_requests
global bugdown_time_start global markdown_time_start
bugdown_total_requests += 1 markdown_total_requests += 1
bugdown_total_time += (time.time() - bugdown_time_start) markdown_total_time += (time.time() - markdown_time_start)
def convert(content: str, def convert(content: str,
realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None, realm_alert_words_automaton: Optional[ahocorasick.Automaton] = None,
@ -2416,10 +2416,10 @@ def convert(content: str,
mention_data: Optional[MentionData]=None, mention_data: Optional[MentionData]=None,
email_gateway: bool=False, email_gateway: bool=False,
no_previews: bool=False) -> str: no_previews: bool=False) -> str:
bugdown_stats_start() markdown_stats_start()
ret = do_convert(content, realm_alert_words_automaton, ret = do_convert(content, realm_alert_words_automaton,
message, message_realm, sent_by_bot, message, message_realm, sent_by_bot,
translate_emoticons, mention_data, email_gateway, translate_emoticons, mention_data, email_gateway,
no_previews=no_previews) no_previews=no_previews)
bugdown_stats_finish() markdown_stats_finish()
return ret return ret

View File

@ -19,7 +19,7 @@ from zerver.lib.db import reset_queries
from zerver.lib.debug import maybe_tracemalloc_listen from zerver.lib.debug import maybe_tracemalloc_listen
from zerver.lib.exceptions import ErrorCode, JsonableError, RateLimited from zerver.lib.exceptions import ErrorCode, JsonableError, RateLimited
from zerver.lib.html_to_text import get_content_description from zerver.lib.html_to_text import get_content_description
from zerver.lib.markdown import get_bugdown_requests, get_bugdown_time from zerver.lib.markdown import get_markdown_requests, get_markdown_time
from zerver.lib.rate_limiter import RateLimitResult from zerver.lib.rate_limiter import RateLimitResult
from zerver.lib.response import json_error, json_response_from_error from zerver.lib.response import json_error, json_response_from_error
from zerver.lib.subdomains import get_subdomain from zerver.lib.subdomains import get_subdomain
@ -34,8 +34,8 @@ def record_request_stop_data(log_data: MutableMapping[str, Any]) -> None:
log_data['time_stopped'] = time.time() log_data['time_stopped'] = time.time()
log_data['remote_cache_time_stopped'] = get_remote_cache_time() log_data['remote_cache_time_stopped'] = get_remote_cache_time()
log_data['remote_cache_requests_stopped'] = get_remote_cache_requests() log_data['remote_cache_requests_stopped'] = get_remote_cache_requests()
log_data['bugdown_time_stopped'] = get_bugdown_time() log_data['markdown_time_stopped'] = get_markdown_time()
log_data['bugdown_requests_stopped'] = get_bugdown_requests() log_data['markdown_requests_stopped'] = get_markdown_requests()
if settings.PROFILE_ALL_REQUESTS: if settings.PROFILE_ALL_REQUESTS:
log_data["prof"].disable() log_data["prof"].disable()
@ -48,8 +48,8 @@ def record_request_restart_data(log_data: MutableMapping[str, Any]) -> None:
log_data['time_restarted'] = time.time() log_data['time_restarted'] = time.time()
log_data['remote_cache_time_restarted'] = get_remote_cache_time() log_data['remote_cache_time_restarted'] = get_remote_cache_time()
log_data['remote_cache_requests_restarted'] = get_remote_cache_requests() log_data['remote_cache_requests_restarted'] = get_remote_cache_requests()
log_data['bugdown_time_restarted'] = get_bugdown_time() log_data['markdown_time_restarted'] = get_markdown_time()
log_data['bugdown_requests_restarted'] = get_bugdown_requests() log_data['markdown_requests_restarted'] = get_markdown_requests()
def async_request_timer_restart(request: HttpRequest) -> None: def async_request_timer_restart(request: HttpRequest) -> None:
if "time_restarted" in request._log_data: if "time_restarted" in request._log_data:
@ -67,8 +67,8 @@ def record_request_start_data(log_data: MutableMapping[str, Any]) -> None:
log_data['time_started'] = time.time() log_data['time_started'] = time.time()
log_data['remote_cache_time_start'] = get_remote_cache_time() log_data['remote_cache_time_start'] = get_remote_cache_time()
log_data['remote_cache_requests_start'] = get_remote_cache_requests() log_data['remote_cache_requests_start'] = get_remote_cache_requests()
log_data['bugdown_time_start'] = get_bugdown_time() log_data['markdown_time_start'] = get_markdown_time()
log_data['bugdown_requests_start'] = get_bugdown_requests() log_data['markdown_requests_start'] = get_markdown_requests()
def timedelta_ms(timedelta: float) -> float: def timedelta_ms(timedelta: float) -> float:
return timedelta * 1000 return timedelta * 1000
@ -157,23 +157,23 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r
if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005: if 'startup_time_delta' in log_data and log_data["startup_time_delta"] > 0.005:
startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"])) startup_output = " (+start: {})".format(format_timedelta(log_data["startup_time_delta"]))
bugdown_output = "" markdown_output = ""
if 'bugdown_time_start' in log_data: if 'markdown_time_start' in log_data:
bugdown_time_delta = get_bugdown_time() - log_data['bugdown_time_start'] markdown_time_delta = get_markdown_time() - log_data['markdown_time_start']
bugdown_count_delta = get_bugdown_requests() - log_data['bugdown_requests_start'] markdown_count_delta = get_markdown_requests() - log_data['markdown_requests_start']
if 'bugdown_requests_stopped' in log_data: if 'markdown_requests_stopped' in log_data:
# (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted) # (now - restarted) + (stopped - start) = (now - start) + (stopped - restarted)
bugdown_time_delta += (log_data['bugdown_time_stopped'] - markdown_time_delta += (log_data['markdown_time_stopped'] -
log_data['bugdown_time_restarted']) log_data['markdown_time_restarted'])
bugdown_count_delta += (log_data['bugdown_requests_stopped'] - markdown_count_delta += (log_data['markdown_requests_stopped'] -
log_data['bugdown_requests_restarted']) log_data['markdown_requests_restarted'])
if (bugdown_time_delta > 0.005): if (markdown_time_delta > 0.005):
bugdown_output = f" (md: {format_timedelta(bugdown_time_delta)}/{bugdown_count_delta})" markdown_output = f" (md: {format_timedelta(markdown_time_delta)}/{markdown_count_delta})"
if not suppress_statsd: if not suppress_statsd:
statsd.timing(f"{statsd_path}.markdown.time", timedelta_ms(bugdown_time_delta)) statsd.timing(f"{statsd_path}.markdown.time", timedelta_ms(markdown_time_delta))
statsd.incr(f"{statsd_path}.markdown.count", bugdown_count_delta) statsd.incr(f"{statsd_path}.markdown.count", markdown_count_delta)
# Get the amount of time spent doing database queries # Get the amount of time spent doing database queries
db_time_output = "" db_time_output = ""
@ -193,7 +193,7 @@ def write_log_line(log_data: MutableMapping[str, Any], path: str, method: str, r
else: else:
extra_request_data = "" extra_request_data = ""
logger_client = f"({requestor_for_logs} via {client_name})" logger_client = f"({requestor_for_logs} via {client_name})"
logger_timing = f'{format_timedelta(time_delta):>5}{optional_orig_delta}{remote_cache_output}{bugdown_output}{db_time_output}{startup_output} {path}' logger_timing = f'{format_timedelta(time_delta):>5}{optional_orig_delta}{remote_cache_output}{markdown_output}{db_time_output}{startup_output} {path}'
logger_line = f'{remote_ip:<15} {method:<7} {status_code:3} {logger_timing}{extra_request_data} {logger_client}' logger_line = f'{remote_ip:<15} {method:<7} {status_code:3} {logger_timing}{extra_request_data} {logger_client}'
if (status_code in [200, 304] and method == "GET" and path.startswith("/static")): if (status_code in [200, 304] and method == "GET" and path.startswith("/static")):
logger.debug(logger_line) logger.debug(logger_line)

View File

@ -14,8 +14,8 @@ class SlowQueryTest(ZulipTestCase):
SLOW_QUERY_TIME = 10 SLOW_QUERY_TIME = 10
log_data = {'extra': '[transport=websocket]', log_data = {'extra': '[transport=websocket]',
'time_started': 0, 'time_started': 0,
'bugdown_requests_start': 0, 'markdown_requests_start': 0,
'bugdown_time_start': 0, 'markdown_time_start': 0,
'remote_cache_time_start': 0, 'remote_cache_time_start': 0,
'remote_cache_requests_start': 0} 'remote_cache_requests_start': 0}