logging: Pass more format arguments to logging.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2022-06-02 20:51:16 -07:00 committed by Tim Abbott
parent dede49ad78
commit a7f9c4f958
6 changed files with 27 additions and 28 deletions

View File

@ -214,11 +214,11 @@ while True:
# Catch up on any historical columns
while True:
rows_updated = update_fts_columns(cursor)
notice = f"process_fts_updates: Processed {rows_updated} rows catching up"
if rows_updated > 0:
logger.info(notice)
else:
logger.debug(notice)
logger.log(
logging.INFO if rows_updated > 0 else logging.DEBUG,
"process_fts_updates: Processed %d rows catching up",
rows_updated,
)
if rows_updated != BATCH_SIZE:
# We're caught up, so proceed to the listening for updates phase.

View File

@ -86,14 +86,12 @@ def check_html_templates(templates: Iterable[str], all_dups: bool, fix: bool) ->
}
for ids, fns in ignorable_ids_dict.items():
logging.warning(
"Duplicate ID(s) detected :Id '" + ids + "' present at following files:"
)
logging.warning("Duplicate ID(s) detected: ID %r present at following files:", ids)
for fn in fns:
print(fn)
for ids, fns in bad_ids_dict.items():
logging.error("Duplicate ID(s) detected :Id '" + ids + "' present at following files:")
logging.error("Duplicate ID(s) detected: ID %r present at following files:", ids)
for fn in fns:
print(fn)
return bad_ids_dict

View File

@ -46,12 +46,13 @@ rules:
languages: [python]
patterns:
- pattern-either:
- pattern: $LOGGER.debug($FORMATTED)
- pattern: $LOGGER.info($FORMATTED)
- pattern: $LOGGER.warning($FORMATTED)
- pattern: $LOGGER.error($FORMATTED)
- pattern: $LOGGER.critical($FORMATTED)
- pattern: $LOGGER.exception($FORMATTED)
- pattern: $LOGGER.debug($FORMATTED, ...)
- pattern: $LOGGER.info($FORMATTED, ...)
- pattern: $LOGGER.warning($FORMATTED, ...)
- pattern: $LOGGER.error($FORMATTED, ...)
- pattern: $LOGGER.critical($FORMATTED, ...)
- pattern: $LOGGER.exception($FORMATTED, ...)
- pattern: $LOGGER.log($LEVEL, $FORMATTED, ...)
- metavariable-pattern:
metavariable: $LOGGER
patterns:

View File

@ -73,7 +73,7 @@ class Command(BaseCommand):
cnt = 0
for queue_name in queues:
if not settings.DEVELOPMENT:
logger.info("launching queue worker thread " + queue_name)
logger.info("launching queue worker thread %s", queue_name)
cnt += 1
td = ThreadedWorker(queue_name, logger)
td.start()
@ -126,5 +126,5 @@ class ThreadedWorker(threading.Thread):
):
scope.set_tag("queue_worker", self.worker.queue_name)
self.worker.setup()
logging.debug("starting consuming " + self.worker.queue_name)
logging.debug("starting consuming %s", self.worker.queue_name)
self.worker.start()

View File

@ -189,7 +189,6 @@ class ClientDescriptor:
def finish_current_handler(self) -> bool:
if self.current_handler_id is not None:
err_msg = f"Got error finishing handler for queue {self.event_queue.id}"
try:
finish_handler(
self.current_handler_id,
@ -198,7 +197,9 @@ class ClientDescriptor:
self.apply_markdown,
)
except Exception:
logging.exception(err_msg, stack_info=True)
logging.exception(
"Got error finishing handler for queue %s", self.event_queue.id, stack_info=True
)
finally:
self.disconnect_handler()
return True

View File

@ -44,7 +44,6 @@ def handler_stats_string() -> str:
def finish_handler(
handler_id: int, event_queue_id: str, contents: List[Dict[str, Any]], apply_markdown: bool
) -> None:
err_msg = f"Got error finishing handler for queue {event_queue_id}"
try:
# We do the import during runtime to avoid cyclic dependency
# with zerver.lib.request
@ -70,14 +69,14 @@ def finish_handler(
request,
apply_markdown=apply_markdown,
)
except OSError as e:
if str(e) != "Stream is closed":
logging.exception(err_msg, stack_info=True)
except AssertionError as e:
if str(e) != "Request closed":
logging.exception(err_msg, stack_info=True)
except Exception:
logging.exception(err_msg, stack_info=True)
except Exception as e:
if not (
(isinstance(e, OSError) and str(e) == "Stream is closed")
or (isinstance(e, AssertionError) and str(e) == "Request closed")
):
logging.exception(
"Got error finishing handler for queue %s", event_queue_id, stack_info=True
)
class AsyncDjangoHandler(tornado.web.RequestHandler, base.BaseHandler):