ruff: Fix SIM105 Use `contextlib.suppress` instead of try-except-pass.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-01-17 23:25:49 -05:00 committed by Tim Abbott
parent b0e569f07c
commit ff1971f5ad
22 changed files with 51 additions and 80 deletions

View File

@ -1401,27 +1401,23 @@ class TestLoggingCountStats(AnalyticsTestCase):
assertInviteCountEquals(4) assertInviteCountEquals(4)
# Test mix of good and malformed invite emails # Test mix of good and malformed invite emails
try: with self.assertRaises(InvitationError):
do_invite_users( do_invite_users(
user, user,
["user3@domain.tld", "malformed"], ["user3@domain.tld", "malformed"],
[stream], [stream],
invite_expires_in_minutes=invite_expires_in_minutes, invite_expires_in_minutes=invite_expires_in_minutes,
) )
except InvitationError:
pass
assertInviteCountEquals(4) assertInviteCountEquals(4)
# Test inviting existing users # Test inviting existing users
try: with self.assertRaises(InvitationError):
do_invite_users( do_invite_users(
user, user,
["first@domain.tld", "user4@domain.tld"], ["first@domain.tld", "user4@domain.tld"],
[stream], [stream],
invite_expires_in_minutes=invite_expires_in_minutes, invite_expires_in_minutes=invite_expires_in_minutes,
) )
except InvitationError:
pass
assertInviteCountEquals(5) assertInviteCountEquals(5)
# Revoking invite should not give you credit # Revoking invite should not give you credit

View File

@ -1,6 +1,7 @@
import itertools import itertools
import time import time
from collections import defaultdict from collections import defaultdict
from contextlib import suppress
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union from typing import Callable, Dict, List, Optional, Sequence, Tuple, Union
@ -241,10 +242,8 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
hours = minutes / 60.0 hours = minutes / 60.0
total_hours += hours total_hours += hours
row["hours"] = str(int(hours)) row["hours"] = str(int(hours))
try: with suppress(Exception):
row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"]) row["hours_per_user"] = "{:.1f}".format(hours / row["dau_count"])
except Exception:
pass
# formatting # formatting
for row in rows: for row in rows:

View File

@ -1,4 +1,5 @@
import urllib import urllib
from contextlib import suppress
from dataclasses import dataclass from dataclasses import dataclass
from datetime import timedelta from datetime import timedelta
from decimal import Decimal from decimal import Decimal
@ -285,10 +286,8 @@ def support(
if parse_result.port: if parse_result.port:
hostname = f"{hostname}:{parse_result.port}" hostname = f"{hostname}:{parse_result.port}"
subdomain = get_subdomain_from_hostname(hostname) subdomain = get_subdomain_from_hostname(hostname)
try: with suppress(Realm.DoesNotExist):
realms.add(get_realm(subdomain)) realms.add(get_realm(subdomain))
except Realm.DoesNotExist:
pass
except ValidationError: except ValidationError:
users.update(UserProfile.objects.filter(full_name__iexact=key_word)) users.update(UserProfile.objects.filter(full_name__iexact=key_word))

View File

@ -1,4 +1,5 @@
import logging import logging
from contextlib import suppress
from typing import Any, Callable, Dict, Union from typing import Any, Callable, Dict, Union
import stripe import stripe
@ -86,14 +87,12 @@ def handle_checkout_session_completed_event(
session.payment_intent.status = PaymentIntent.PROCESSING session.payment_intent.status = PaymentIntent.PROCESSING
session.payment_intent.last_payment_error = () session.payment_intent.last_payment_error = ()
session.payment_intent.save(update_fields=["status", "last_payment_error"]) session.payment_intent.save(update_fields=["status", "last_payment_error"])
try: with suppress(stripe.error.CardError):
stripe.PaymentIntent.confirm( stripe.PaymentIntent.confirm(
session.payment_intent.stripe_payment_intent_id, session.payment_intent.stripe_payment_intent_id,
payment_method=payment_method, payment_method=payment_method,
off_session=True, off_session=True,
) )
except stripe.error.CardError:
pass
elif session.type in [ elif session.type in [
Session.FREE_TRIAL_UPGRADE_FROM_BILLING_PAGE, Session.FREE_TRIAL_UPGRADE_FROM_BILLING_PAGE,
Session.FREE_TRIAL_UPGRADE_FROM_ONBOARDING_PAGE, Session.FREE_TRIAL_UPGRADE_FROM_ONBOARDING_PAGE,

View File

@ -2,6 +2,7 @@
# This tools generates /etc/zulip/zulip-secrets.conf # This tools generates /etc/zulip/zulip-secrets.conf
import os import os
import sys import sys
from contextlib import suppress
from typing import Dict, List from typing import Dict, List
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@ -166,10 +167,8 @@ def generate_secrets(development: bool = False) -> None:
) )
break break
try: with suppress(redis.exceptions.ConnectionError):
get_redis_client().config_set("requirepass", redis_password) get_redis_client().config_set("requirepass", redis_password)
except redis.exceptions.ConnectionError:
pass
add_secret("redis_password", redis_password) add_secret("redis_password", redis_password)

View File

@ -4,6 +4,7 @@ import html
import os import os
import sys import sys
from collections import defaultdict from collections import defaultdict
from contextlib import suppress
from typing import Any, Dict, List, Set from typing import Any, Dict, List, Set
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
@ -72,10 +73,8 @@ def create_user_docs() -> None:
fn = "var/url_coverage.txt" # TODO: make path more robust, maybe use json suffix fn = "var/url_coverage.txt" # TODO: make path more robust, maybe use json suffix
out_dir = "var/api_docs" out_dir = "var/api_docs"
try: with suppress(OSError):
os.mkdir(out_dir) os.mkdir(out_dir)
except OSError:
pass
main_page = out_dir + "/index.html" main_page = out_dir + "/index.html"

View File

@ -303,9 +303,8 @@ def do_update_bot_config_data(bot_profile: UserProfile, config_data: Dict[str, s
def get_service_dicts_for_bot(user_profile_id: int) -> List[Dict[str, Any]]: def get_service_dicts_for_bot(user_profile_id: int) -> List[Dict[str, Any]]:
user_profile = get_user_profile_by_id(user_profile_id) user_profile = get_user_profile_by_id(user_profile_id)
services = get_bot_services(user_profile_id) services = get_bot_services(user_profile_id)
service_dicts: List[Dict[str, Any]] = []
if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT: if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
service_dicts = [ return [
{ {
"base_url": service.base_url, "base_url": service.base_url,
"interface": service.interface, "interface": service.interface,
@ -315,7 +314,7 @@ def get_service_dicts_for_bot(user_profile_id: int) -> List[Dict[str, Any]]:
] ]
elif user_profile.bot_type == UserProfile.EMBEDDED_BOT: elif user_profile.bot_type == UserProfile.EMBEDDED_BOT:
try: try:
service_dicts = [ return [
{ {
"config_data": get_bot_config(user_profile), "config_data": get_bot_config(user_profile),
"service_name": services[0].name, "service_name": services[0].name,
@ -323,8 +322,9 @@ def get_service_dicts_for_bot(user_profile_id: int) -> List[Dict[str, Any]]:
] ]
# A ConfigError just means that there are no config entries for user_profile. # A ConfigError just means that there are no config entries for user_profile.
except ConfigError: except ConfigError:
pass return []
return service_dicts else:
return []
def get_service_dicts_for_bots( def get_service_dicts_for_bots(

View File

@ -353,11 +353,10 @@ class ZulipPasswordResetForm(PasswordResetForm):
# The view will handle the RateLimit exception and render an appropriate page # The view will handle the RateLimit exception and render an appropriate page
raise raise
user: Optional[UserProfile] = None
try: try:
user = get_user_by_delivery_email(email, realm) user = get_user_by_delivery_email(email, realm)
except UserProfile.DoesNotExist: except UserProfile.DoesNotExist:
pass user = None
context = { context = {
"email": email, "email": email,

View File

@ -9,6 +9,7 @@ the level of detail we desire or do comparison with OpenAPI types
easily with the native Python type system. easily with the native Python type system.
""" """
from contextlib import suppress
from dataclasses import dataclass from dataclasses import dataclass
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple
@ -206,10 +207,8 @@ class UnionType:
def check_data(self, var_name: str, val: Any) -> None: def check_data(self, var_name: str, val: Any) -> None:
for sub_type in self.sub_types: for sub_type in self.sub_types:
try: with suppress(AssertionError):
check_data(sub_type, var_name, val) check_data(sub_type, var_name, val)
except AssertionError:
pass
# We matched on one of our sub_types, so return # We matched on one of our sub_types, so return
return return

View File

@ -13,6 +13,7 @@ import os
import shutil import shutil
import subprocess import subprocess
import tempfile import tempfile
from contextlib import suppress
from functools import lru_cache from functools import lru_cache
from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Set, Tuple, TypedDict from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Set, Tuple, TypedDict
@ -1941,10 +1942,8 @@ def create_soft_link(source: Path, in_progress: bool = True) -> None:
if in_progress: if in_progress:
new_target = in_progress_link new_target = in_progress_link
else: else:
try: with suppress(FileNotFoundError):
os.remove(in_progress_link) os.remove(in_progress_link)
except FileNotFoundError:
pass
new_target = done_link new_target = done_link
overwrite_symlink(source, new_target) overwrite_symlink(source, new_target)

View File

@ -3,6 +3,7 @@ import hashlib
import logging import logging
import threading import threading
import traceback import traceback
from contextlib import suppress
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from logging import Logger from logging import Logger
from typing import Optional, Tuple, Union from typing import Optional, Tuple, Union
@ -238,10 +239,8 @@ class ZulipWebhookFormatter(ZulipFormatter):
else: else:
payload = request.POST["payload"] payload = request.POST["payload"]
try: with suppress(orjson.JSONDecodeError):
payload = orjson.dumps(orjson.loads(payload), option=orjson.OPT_INDENT_2).decode() payload = orjson.dumps(orjson.loads(payload), option=orjson.OPT_INDENT_2).decode()
except orjson.JSONDecodeError:
pass
header_text = "".join( header_text = "".join(
f"{header}: {value}\n" f"{header}: {value}\n"

View File

@ -1378,7 +1378,6 @@ class CompiledInlineProcessor(markdown.inlinepatterns.InlineProcessor):
class Timestamp(markdown.inlinepatterns.Pattern): class Timestamp(markdown.inlinepatterns.Pattern):
def handleMatch(self, match: Match[str]) -> Optional[Element]: def handleMatch(self, match: Match[str]) -> Optional[Element]:
time_input_string = match.group("time") time_input_string = match.group("time")
timestamp = None
try: try:
timestamp = dateutil.parser.parse(time_input_string, tzinfos=common_timezones) timestamp = dateutil.parser.parse(time_input_string, tzinfos=common_timezones)
except ValueError: except ValueError:
@ -1387,7 +1386,7 @@ class Timestamp(markdown.inlinepatterns.Pattern):
float(time_input_string), tz=datetime.timezone.utc float(time_input_string), tz=datetime.timezone.utc
) )
except ValueError: except ValueError:
pass timestamp = None
if not timestamp: if not timestamp:
error_element = Element("span") error_element = Element("span")

View File

@ -1,6 +1,7 @@
import abc import abc
import json import json
import logging import logging
from contextlib import suppress
from time import perf_counter from time import perf_counter
from typing import Any, AnyStr, Dict, Optional from typing import Any, AnyStr, Dict, Optional
@ -232,12 +233,10 @@ def fail_with_message(event: Dict[str, Any], failure_message: str) -> None:
message_info = event["message"] message_info = event["message"]
content = "Failure! " + failure_message content = "Failure! " + failure_message
response_data = dict(content=content) response_data = dict(content=content)
try:
send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
except StreamDoesNotExistError:
# If the stream has vanished while we were failing, there's no # If the stream has vanished while we were failing, there's no
# reasonable place to report the error. # reasonable place to report the error.
pass with suppress(StreamDoesNotExistError):
send_response_message(bot_id=bot_id, message_info=message_info, response_data=response_data)
def get_message_url(event: Dict[str, Any]) -> str: def get_message_url(event: Dict[str, Any]) -> str:

View File

@ -3,6 +3,7 @@ import hashlib
import logging import logging
import os import os
import smtplib import smtplib
from contextlib import suppress
from email.headerregistry import Address from email.headerregistry import Address
from email.parser import Parser from email.parser import Parser
from email.policy import default from email.policy import default
@ -552,7 +553,7 @@ def send_custom_email(
"realm_name": user_profile.realm.name, "realm_name": user_profile.realm.name,
"unsubscribe_link": one_click_unsubscribe_link(user_profile, "marketing"), "unsubscribe_link": one_click_unsubscribe_link(user_profile, "marketing"),
} }
try: with suppress(EmailNotDeliveredError):
send_email( send_email(
email_id, email_id,
to_user_ids=[user_profile.id], to_user_ids=[user_profile.id],
@ -564,8 +565,6 @@ def send_custom_email(
context=context, context=context,
dry_run=options["dry_run"], dry_run=options["dry_run"],
) )
except EmailNotDeliveredError:
pass
if options["dry_run"]: if options["dry_run"]:
break break

View File

@ -2,6 +2,7 @@ import datetime
import re import re
import secrets import secrets
import time import time
from contextlib import suppress
from datetime import timedelta from datetime import timedelta
from email.headerregistry import Address from email.headerregistry import Address
from typing import ( from typing import (
@ -1364,10 +1365,8 @@ def linkifiers_for_realm_remote_cache(realm_id: int) -> List[LinkifierDict]:
def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None: def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None:
realm_id = instance.realm_id realm_id = instance.realm_id
cache_delete(get_linkifiers_cache_key(realm_id)) cache_delete(get_linkifiers_cache_key(realm_id))
try: with suppress(KeyError):
per_request_linkifiers_cache.pop(realm_id) per_request_linkifiers_cache.pop(realm_id)
except KeyError:
pass
post_save.connect(flush_linkifiers, sender=RealmFilter) post_save.connect(flush_linkifiers, sender=RealmFilter)

View File

@ -4,7 +4,7 @@ import os
import signal import signal
import time import time
from collections import defaultdict from collections import defaultdict
from contextlib import contextmanager from contextlib import contextmanager, suppress
from inspect import isabstract from inspect import isabstract
from typing import Any, Callable, Dict, Iterator, List, Mapping, Optional from typing import Any, Callable, Dict, Iterator, List, Mapping, Optional
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
@ -620,10 +620,8 @@ class WorkerTest(ZulipTestCase):
fake_client.enqueue("unreliable_worker", {"type": msg}) fake_client.enqueue("unreliable_worker", {"type": msg})
fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_worker.errors") fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_worker.errors")
try: with suppress(FileNotFoundError):
os.remove(fn) os.remove(fn)
except OSError: # nocoverage # error handling for the directory not existing
pass
with simulated_queue_client(fake_client): with simulated_queue_client(fake_client):
worker = UnreliableWorker() worker = UnreliableWorker()
@ -657,10 +655,8 @@ class WorkerTest(ZulipTestCase):
fake_client.enqueue("unreliable_loopworker", {"type": msg}) fake_client.enqueue("unreliable_loopworker", {"type": msg})
fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_loopworker.errors") fn = os.path.join(settings.QUEUE_ERROR_DIR, "unreliable_loopworker.errors")
try: with suppress(FileNotFoundError):
os.remove(fn) os.remove(fn)
except OSError: # nocoverage # error handling for the directory not existing
pass
with simulated_queue_client(fake_client): with simulated_queue_client(fake_client):
loopworker = UnreliableLoopWorker() loopworker = UnreliableLoopWorker()
@ -700,10 +696,8 @@ class WorkerTest(ZulipTestCase):
fake_client.enqueue("timeout_worker", {"type": msg}) fake_client.enqueue("timeout_worker", {"type": msg})
fn = os.path.join(settings.QUEUE_ERROR_DIR, "timeout_worker.errors") fn = os.path.join(settings.QUEUE_ERROR_DIR, "timeout_worker.errors")
try: with suppress(FileNotFoundError):
os.remove(fn) os.remove(fn)
except OSError: # nocoverage # error handling for the directory not existing
pass
with simulated_queue_client(fake_client): with simulated_queue_client(fake_client):
worker = TimeoutWorker() worker = TimeoutWorker()

View File

@ -8,6 +8,7 @@ import time
import traceback import traceback
import uuid import uuid
from collections import deque from collections import deque
from contextlib import suppress
from dataclasses import asdict from dataclasses import asdict
from functools import lru_cache from functools import lru_cache
from typing import ( from typing import (
@ -640,10 +641,8 @@ async def setup_event_queue(server: tornado.httpserver.HTTPServer, port: int) ->
load_event_queues(port) load_event_queues(port)
autoreload.add_reload_hook(lambda: dump_event_queues(port)) autoreload.add_reload_hook(lambda: dump_event_queues(port))
try: with suppress(OSError):
os.rename(persistent_queue_filename(port), persistent_queue_filename(port, last=True)) os.rename(persistent_queue_filename(port), persistent_queue_filename(port, last=True))
except OSError:
pass
# Set up event queue garbage collection # Set up event queue garbage collection
pc = tornado.ioloop.PeriodicCallback(lambda: gc_event_queues(port), EVENT_QUEUE_GC_FREQ_MSECS) pc = tornado.ioloop.PeriodicCallback(lambda: gc_event_queues(port), EVENT_QUEUE_GC_FREQ_MSECS)

View File

@ -1,5 +1,6 @@
import logging import logging
import urllib import urllib
from contextlib import suppress
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
import tornado.web import tornado.web
@ -147,12 +148,10 @@ class AsyncDjangoHandler(tornado.web.RequestHandler):
self.write(response.content) self.write(response.content)
# Close the connection. # Close the connection.
try:
await self.finish()
except StreamClosedError:
# While writing the response, we might realize that the # While writing the response, we might realize that the
# user already closed the connection; that is fine. # user already closed the connection; that is fine.
pass with suppress(StreamClosedError):
await self.finish()
async def get(self, *args: Any, **kwargs: Any) -> None: async def get(self, *args: Any, **kwargs: Any) -> None:
request = await self.convert_tornado_request_to_django_request() request = await self.convert_tornado_request_to_django_request()

View File

@ -708,10 +708,8 @@ def redirect_to_deactivation_notice() -> HttpResponse:
def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> None: def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> None:
for key in ("email", "already_registered"): for key in ("email", "already_registered"):
try: if key in request.GET:
context[key] = request.GET[key] context[key] = request.GET[key]
except KeyError:
pass
deactivated_email = request.GET.get("is_deactivated") deactivated_email = request.GET.get("is_deactivated")
if deactivated_email is None: if deactivated_email is None:

View File

@ -1,6 +1,7 @@
import os import os
import subprocess import subprocess
import urllib import urllib
from contextlib import suppress
from typing import Optional from typing import Optional
import orjson import orjson
@ -44,10 +45,8 @@ def email_page(
def clear_emails(request: HttpRequest) -> HttpResponse: def clear_emails(request: HttpRequest) -> HttpResponse:
try: with suppress(FileNotFoundError):
os.remove(settings.EMAIL_CONTENT_LOG_PATH) os.remove(settings.EMAIL_CONTENT_LOG_PATH)
except FileNotFoundError: # nocoverage
pass
return redirect(email_page) return redirect(email_page)

View File

@ -1,4 +1,5 @@
import os import os
from contextlib import suppress
from typing import TYPE_CHECKING, Any, Dict, List, Optional from typing import TYPE_CHECKING, Any, Dict, List, Optional
import orjson import orjson
@ -84,10 +85,9 @@ def get_fixtures(request: HttpRequest, integration_name: str = REQ()) -> HttpRes
fixture_path = os.path.join(fixtures_dir, fixture) fixture_path = os.path.join(fixtures_dir, fixture)
with open(fixture_path) as f: with open(fixture_path) as f:
body = f.read() body = f.read()
try: # The file extension will be used to determine the type.
with suppress(orjson.JSONDecodeError):
body = orjson.loads(body) body = orjson.loads(body)
except orjson.JSONDecodeError:
pass # The file extension will be used to determine the type.
headers_raw = get_fixture_http_headers( headers_raw = get_fixture_http_headers(
valid_integration_name, "".join(fixture.split(".")[:-1]) valid_integration_name, "".join(fixture.split(".")[:-1])

View File

@ -1,5 +1,6 @@
import logging import logging
import urllib import urllib
from contextlib import suppress
from typing import Any, Dict, Iterable, List, Optional from typing import Any, Dict, Iterable, List, Optional
from urllib.parse import urlencode from urllib.parse import urlencode
@ -227,10 +228,8 @@ def accounts_register(
require_ldap_password = False require_ldap_password = False
if from_confirmation: if from_confirmation:
try: with suppress(KeyError):
del request.session["authenticated_full_name"] del request.session["authenticated_full_name"]
except KeyError:
pass
ldap_full_name = None ldap_full_name = None
if settings.POPULATE_PROFILE_VIA_LDAP: if settings.POPULATE_PROFILE_VIA_LDAP: