diff --git a/corporate/views.py b/corporate/views.py index 57f6e8d33b..fe1628ac81 100644 --- a/corporate/views.py +++ b/corporate/views.py @@ -128,8 +128,8 @@ def upgrade(request: HttpRequest, user: UserProfile, schedule, license_management, licenses, stripe_token is not None, ) return json_error(e.message, data={'error_description': e.description}) - except Exception as e: - billing_logger.exception(f"Uncaught exception in billing: {e}") + except Exception: + billing_logger.exception("Uncaught exception in billing:") error_message = BillingError.CONTACT_SUPPORT error_description = "uncaught exception during upgrade" return json_error(error_message, data={'error_description': error_description}) diff --git a/puppet/zulip/files/postgresql/process_fts_updates b/puppet/zulip/files/postgresql/process_fts_updates index d79a6bcc1e..3ede1606d3 100755 --- a/puppet/zulip/files/postgresql/process_fts_updates +++ b/puppet/zulip/files/postgresql/process_fts_updates @@ -163,7 +163,7 @@ while True: retries -= 1 if retries <= 0: raise - logger.info(e) + logger.info(e.pgerror, exc_info=True) logger.info("Sleeping and reconnecting") time.sleep(5) if conn is not None: diff --git a/tools/fetch-contributor-data b/tools/fetch-contributor-data index 327b921e44..2a258a5466 100755 --- a/tools/fetch-contributor-data +++ b/tools/fetch-contributor-data @@ -49,11 +49,8 @@ def fetch_contributors(repo_link: str) -> Optional[List[Dict[str, Dict[str, Any] def write_to_disk(json_data: ContributorsJSON, out_file: str) -> None: with open(out_file, 'w') as f: - try: - f.write(f"{json.dumps(json_data, indent=2, sort_keys=True)}\n") - except OSError as e: - logger.warning(e) - sys.exit(1) + json.dump(json_data, f, indent=2, sort_keys=True) + f.write("\n") def update_contributor_data_file() -> None: """ diff --git a/tools/semgrep.yml b/tools/semgrep.yml index 2841b08f74..fbe63de40f 100644 --- a/tools/semgrep.yml +++ b/tools/semgrep.yml @@ -59,9 +59,11 @@ rules: - pattern-either: - pattern: logging.$Y(... % ...) - pattern: logging.$Y(... .format(...)) + - pattern: logging.$Y(f"...") - pattern: logger.$Y(... % ...) - pattern: logger.$Y(... .format(...)) - - pattern-where-python: "vars['$Y'] in ['debug', 'info', 'warning', 'error', 'critical']" + - pattern: logger.$Y(f"...") + - pattern-where-python: "vars['$Y'] in ['debug', 'info', 'warning', 'error', 'critical', 'exception']" severity: ERROR message: "Pass format arguments to logging (https://docs.python.org/3/howto/logging.html#optimization)" diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py index 3ce3755027..8ef200060d 100644 --- a/zerver/data_import/import_util.py +++ b/zerver/data_import/import_util.py @@ -2,7 +2,6 @@ import logging import os import random import shutil -import traceback from typing import AbstractSet, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, TypeVar import requests @@ -586,8 +585,7 @@ def run_parallel_wrapper(f: Callable[[ListJobData], None], full_items: List[List try: f(item) except Exception: - logging.info("Error processing item: %s", item) - traceback.print_exc() + logging.exception("Error processing item: %s", item) count += 1 if count % 1000 == 0: logging.info("A download thread finished %s items", count) diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py index e06ca39475..491eaa3709 100644 --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -2392,7 +2392,7 @@ def _internal_prep_message(realm: Realm, return check_message(sender, get_client("Internal"), addressee, content, realm=realm) except JsonableError as e: - logging.exception(f"Error queueing internal message by {sender.delivery_email}: {e}") + logging.exception("Error queueing internal message by %s: %s", sender.delivery_email, e.msg) return None diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py index 5bb48f9a90..779ee17d70 100644 --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -6,7 +6,6 @@ import logging import os import re import time -import traceback import urllib import urllib.parse from collections import defaultdict, deque @@ -424,7 +423,7 @@ def fetch_tweet_data(tweet_id: str) -> Optional[Dict[str, Any]]: # but for now it seems reasonable to log at error # level (so that we get notified), but then cache the # failure to proceed with our usual work - bugdown_logger.error(traceback.format_exc()) + bugdown_logger.exception("Unknown error fetching tweet data") return None return res @@ -984,7 +983,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): # We put this in its own try-except because it requires external # connectivity. If Twitter flakes out, we don't want to not-render # the entire message; we just want to not show the Twitter preview. - bugdown_logger.warning(traceback.format_exc()) + bugdown_logger.warning("Error building Twitter link", exc_info=True) return None def get_url_data(self, e: Element) -> Optional[Tuple[str, Optional[str]]]: @@ -2367,9 +2366,11 @@ def do_convert(content: str, # NOTE: Don't change this message without also changing the # logic in logging_handlers.py or we can create recursive # exceptions. - exception_message = ('Exception in Markdown parser: %sInput (sanitized) was: %s\n (message %s)' - % (traceback.format_exc(), cleaned, logging_message_id)) - bugdown_logger.exception(exception_message) + bugdown_logger.exception( + 'Exception in Markdown parser; input (sanitized) was: %s\n (message %s)', + cleaned, + logging_message_id, + ) raise BugdownRenderingException() finally: diff --git a/zerver/lib/email_mirror.py b/zerver/lib/email_mirror.py index bc8765b32d..0344f106aa 100644 --- a/zerver/lib/email_mirror.py +++ b/zerver/lib/email_mirror.py @@ -408,12 +408,11 @@ def process_message(message: EmailMessage, rcpt_to: Optional[str]=None) -> None: process_missed_message(to, message) else: process_stream_message(to, message) + except ZulipEmailForwardUserError as e: + # TODO: notify sender of error, retry if appropriate. + logging.warning(e.args[0]) except ZulipEmailForwardError as e: - if isinstance(e, ZulipEmailForwardUserError): - # TODO: notify sender of error, retry if appropriate. - logging.warning(str(e)) - else: - log_and_report(message, str(e), to) + log_and_report(message, e.args[0], to) def validate_to_address(rcpt_to: str) -> None: if is_missed_message_address(rcpt_to): diff --git a/zerver/lib/outgoing_webhook.py b/zerver/lib/outgoing_webhook.py index bfcc7e2ac9..6c74734c59 100644 --- a/zerver/lib/outgoing_webhook.py +++ b/zerver/lib/outgoing_webhook.py @@ -332,6 +332,6 @@ def do_rest_call(base_url: str, response_message = ("An exception of type *%s* occurred for message `%s`! " "See the Zulip server logs for more information." % ( type(e).__name__, event["command"])) - logging.exception(f"Outhook trigger failed:\n {e}") + logging.exception("Outhook trigger failed:") fail_with_message(event, response_message) notify_bot_owner(event, exception=e) diff --git a/zerver/lib/push_notifications.py b/zerver/lib/push_notifications.py index 1029eab664..bc255d6b82 100644 --- a/zerver/lib/push_notifications.py +++ b/zerver/lib/push_notifications.py @@ -279,8 +279,8 @@ def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, A priority=priority, data=data, retries=10) - except OSError as e: - logger.warning(str(e)) + except OSError: + logger.warning("Error while pushing to GCM", exc_info=True) return if res and 'success' in res: diff --git a/zerver/lib/queue.py b/zerver/lib/queue.py index e5d5489598..56f7f31ec1 100644 --- a/zerver/lib/queue.py +++ b/zerver/lib/queue.py @@ -194,9 +194,9 @@ class ExceptionFreeTornadoConnection(pika.adapters.tornado_connection.TornadoCon super()._adapter_disconnect() except (pika.exceptions.ProbableAuthenticationError, pika.exceptions.ProbableAccessDeniedError, - pika.exceptions.IncompatibleProtocolError) as e: - logging.warning("Caught exception '%r' in ExceptionFreeTornadoConnection when \ -calling _adapter_disconnect, ignoring", e) + pika.exceptions.IncompatibleProtocolError): + logging.warning("Caught exception in ExceptionFreeTornadoConnection when \ +calling _adapter_disconnect, ignoring", exc_info=True) class TornadoQueueClient(SimpleQueueClient): diff --git a/zerver/lib/sessions.py b/zerver/lib/sessions.py index 63422aadcb..5c4265654d 100644 --- a/zerver/lib/sessions.py +++ b/zerver/lib/sessions.py @@ -67,8 +67,8 @@ def get_expirable_session_var(session: Session, var_name: str, default_value: An try: value, expire_at = (session[var_name]['value'], session[var_name]['expire_at']) - except (KeyError, TypeError) as e: - logging.warning("get_expirable_session_var: Variable %s: %s", var_name, e) + except (KeyError, TypeError): + logging.warning("get_expirable_session_var: error getting %s", var_name, exc_info=True) return default_value if timestamp_to_datetime(expire_at) < timezone_now(): diff --git a/zerver/management/commands/sync_ldap_user_data.py b/zerver/management/commands/sync_ldap_user_data.py index a539b8422e..ad8874b0bf 100644 --- a/zerver/management/commands/sync_ldap_user_data.py +++ b/zerver/management/commands/sync_ldap_user_data.py @@ -27,7 +27,7 @@ def sync_ldap_user_data(user_profiles: List[UserProfile], deactivation_protectio sync_user_from_ldap(u, logger) except ZulipLDAPException as e: logger.error("Error attempting to update user %s:", u.delivery_email) - logger.error(e) + logger.error(e.args[0]) if deactivation_protection: if not UserProfile.objects.filter(is_bot=False, is_active=True).exists(): diff --git a/zerver/tests/test_messages.py b/zerver/tests/test_messages.py index 8928ad3df7..1ee2f3718f 100644 --- a/zerver/tests/test_messages.py +++ b/zerver/tests/test_messages.py @@ -538,8 +538,11 @@ class InternalPrepTest(ZulipTestCase): content=bad_content, ) - arg = m.call_args_list[0][0][0] - self.assertIn('Message must not be empty', arg) + m.assert_called_once_with( + "Error queueing internal message by %s: %s", + "cordelia@zulip.com", + "Message must not be empty", + ) with mock.patch('logging.exception') as m: internal_send_huddle_message( @@ -549,8 +552,11 @@ class InternalPrepTest(ZulipTestCase): content=bad_content, ) - arg = m.call_args_list[0][0][0] - self.assertIn('Message must not be empty', arg) + m.assert_called_once_with( + "Error queueing internal message by %s: %s", + "cordelia@zulip.com", + "Message must not be empty", + ) with mock.patch('logging.exception') as m: internal_send_stream_message( @@ -561,8 +567,11 @@ class InternalPrepTest(ZulipTestCase): stream=stream, ) - arg = m.call_args_list[0][0][0] - self.assertIn('Message must not be empty', arg) + m.assert_called_once_with( + "Error queueing internal message by %s: %s", + "cordelia@zulip.com", + "Message must not be empty", + ) with mock.patch('logging.exception') as m: internal_send_stream_message_by_name( @@ -573,8 +582,11 @@ class InternalPrepTest(ZulipTestCase): content=bad_content, ) - arg = m.call_args_list[0][0][0] - self.assertIn('Message must not be empty', arg) + m.assert_called_once_with( + "Error queueing internal message by %s: %s", + "cordelia@zulip.com", + "Message must not be empty", + ) def test_error_handling(self) -> None: realm = get_realm('zulip') @@ -606,9 +618,11 @@ class InternalPrepTest(ZulipTestCase): sender=sender, recipient_user=recipient_user, content=content) - arg = logging_mock.call_args_list[0][0][0] - prefix = "Error queueing internal message by cordelia@zulip.com: You can't send private messages outside of your organization." - self.assertTrue(arg.startswith(prefix)) + logging_mock.assert_called_once_with( + "Error queueing internal message by %s: %s", + "cordelia@zulip.com", + "You can't send private messages outside of your organization.", + ) def test_ensure_stream_gets_called(self) -> None: realm = get_realm('zulip') diff --git a/zerver/tests/test_push_notifications.py b/zerver/tests/test_push_notifications.py index 6255fbc1db..0cc191112e 100644 --- a/zerver/tests/test_push_notifications.py +++ b/zerver/tests/test_push_notifications.py @@ -1654,7 +1654,7 @@ class GCMSendTest(PushNotificationTest): mock_gcm: mock.MagicMock) -> None: mock_gcm.json_request.side_effect = IOError('error') send_android_push_notification_to_user(self.user_profile, {}, {}) - mock_warn.assert_called_with('error') + mock_warn.assert_called_with("Error while pushing to GCM", exc_info=True) @mock.patch('zerver.lib.push_notifications.logger.warning') @mock.patch('zerver.lib.push_notifications.logger.info') diff --git a/zerver/tests/test_queue_worker.py b/zerver/tests/test_queue_worker.py index 241093c30d..599fb09f31 100644 --- a/zerver/tests/test_queue_worker.py +++ b/zerver/tests/test_queue_worker.py @@ -548,7 +548,8 @@ class WorkerTest(ZulipTestCase): with patch('logging.exception') as logging_exception_mock: worker.start() logging_exception_mock.assert_called_once_with( - "Problem handling data on queue unreliable_worker") + "Problem handling data on queue %s", "unreliable_worker", + ) self.assertEqual(processed, ['good', 'fine', 'back to normal']) with open(fn) as f: @@ -586,7 +587,8 @@ class WorkerTest(ZulipTestCase): except AbortLoop: pass logging_exception_mock.assert_called_once_with( - "Problem handling data on queue unreliable_loopworker") + "Problem handling data on queue %s", "unreliable_loopworker", + ) self.assertEqual(processed, ['good', 'fine']) with open(fn) as f: diff --git a/zerver/tornado/event_queue.py b/zerver/tornado/event_queue.py index 6fe5b0de80..edcea98685 100644 --- a/zerver/tornado/event_queue.py +++ b/zerver/tornado/event_queue.py @@ -489,14 +489,14 @@ def load_event_queues(port: int) -> None: except FileNotFoundError: pass except ValueError: - logging.exception("Tornado %d could not deserialize event queues" % (port,)) + logging.exception("Tornado %d could not deserialize event queues", port) else: try: clients = { qid: ClientDescriptor.from_dict(client) for (qid, client) in data } except Exception: - logging.exception("Tornado %d could not deserialize event queues" % (port,)) + logging.exception("Tornado %d could not deserialize event queues", port) for client in clients.values(): # Put code for migrations due to event queue data format changes here diff --git a/zerver/worker/queue_processors.py b/zerver/worker/queue_processors.py index cd31dffc43..e3a4ee8af0 100644 --- a/zerver/worker/queue_processors.py +++ b/zerver/worker/queue_processors.py @@ -148,7 +148,7 @@ def retry_send_email_failures( func(worker, data) except (smtplib.SMTPServerDisconnected, socket.gaierror, EmailNotDeliveredException): def on_failure(event: Dict[str, Any]) -> None: - logging.exception(f"Event {event} failed") + logging.exception("Event %r failed", event) retry_event(worker.queue_name, data, on_failure) @@ -256,7 +256,7 @@ class QueueProcessingWorker(ABC): check_and_send_restart_signal() def _log_problem(self) -> None: - logging.exception(f"Problem handling data on queue {self.queue_name}") + logging.exception("Problem handling data on queue %s", self.queue_name) def setup(self) -> None: self.q = SimpleQueueClient() diff --git a/zproject/backends.py b/zproject/backends.py index 13bdffa005..4b22b1f6e7 100644 --- a/zproject/backends.py +++ b/zproject/backends.py @@ -1397,7 +1397,7 @@ class SocialAuthMixin(ZulipAuthMixin, ExternalAuthMethod): # the flow or the IdP is unreliable and returns a bad http response), # don't throw a 500, just send them back to the # login page and record the event at the info log level. - self.logger.info(f"{e.__class__.__name__}: {str(e)}") + self.logger.info("%s: %s", e.__class__.__name__, str(e)) return None except SocialAuthBaseException as e: # Other python-social-auth exceptions are likely @@ -1813,9 +1813,9 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): try: resp = OneLogin_Saml2_Response(settings={}, response=SAMLResponse) issuers = resp.get_issuers() - except cls.SAMLRESPONSE_PARSING_EXCEPTIONS as e: + except cls.SAMLRESPONSE_PARSING_EXCEPTIONS: logger = logging.getLogger(f"zulip.auth.{cls.name}") - logger.info("Error while parsing SAMLResponse: %s: %s", e.__class__.__name__, e) + logger.info("Error while parsing SAMLResponse:", exc_info=True) return None for idp_name, idp_config in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items(): @@ -1920,9 +1920,9 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): # Call the auth_complete method of SocialAuthMixIn result = super().auth_complete(*args, **kwargs) - except self.SAMLRESPONSE_PARSING_EXCEPTIONS as e: + except self.SAMLRESPONSE_PARSING_EXCEPTIONS: # These can be raised if SAMLResponse is missing or badly formatted. - self.logger.info("/complete/saml/: %s: %s", e.__class__.__name__, e) + self.logger.info("/complete/saml/: error while parsing SAMLResponse:", exc_info=True) # Fall through to returning None. finally: if result is None: diff --git a/zproject/wsgi.py b/zproject/wsgi.py index 75144d5732..293aa38cec 100644 --- a/zproject/wsgi.py +++ b/zproject/wsgi.py @@ -28,7 +28,7 @@ import django try: django.setup() -except Exception as e: +except Exception: # If /etc/zulip/settings.py contains invalid syntax, Django # initialization will fail in django.setup(). In this case, our # normal configuration to logs errors to /var/log/zulip/errors.log @@ -39,7 +39,7 @@ except Exception as e: logging.basicConfig(filename='/var/log/zulip/errors.log', level=logging.INFO, format='%(asctime)s %(levelname)s %(name)s %(message)s') logger = logging.getLogger(__name__) - logger.exception(e) + logger.exception("django.setup() failed:") raise # Because import_module does not correctly handle safe circular imports we