python: Delete superfluous parens.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-09-12 12:10:57 -07:00 committed by Anders Kaseorg
parent e0d3176098
commit 28597365da
24 changed files with 56 additions and 64 deletions

View File

@ -3320,14 +3320,14 @@ class StripeTest(StripeTestCase):
)
[invoice, _] = stripe.Invoice.list(customer=stripe_customer.id)
invoice_params = {
"amount_due": (8000 * 150 + 8000 * 50),
"amount_due": 8000 * 150 + 8000 * 50,
"amount_paid": 0,
"attempt_count": 0,
"auto_advance": True,
"collection_method": "send_invoice",
"statement_descriptor": "Zulip Cloud Standard",
"status": "open",
"total": (8000 * 150 + 8000 * 50),
"total": 8000 * 150 + 8000 * 50,
}
for key, value in invoice_params.items():
self.assertEqual(invoice.get(key), value)
@ -4833,7 +4833,7 @@ class InvoiceTest(StripeTestCase):
"start": datetime_to_timestamp(self.now + timedelta(days=366)),
"end": datetime_to_timestamp(self.now + timedelta(days=2 * 365 + 1)),
},
"quantity": (self.seat_count + 1),
"quantity": self.seat_count + 1,
}
for key, value in line_item_params.items():
self.assertEqual(item1.get(key), value)

View File

@ -70,7 +70,7 @@ IGNORED_PHRASES = [
r"streamname",
r"user@example\.com",
# Fragments of larger strings
(r"your subscriptions on your Streams page"),
r"your subscriptions on your Streams page",
r"Add global time<br />Everyone sees global times in their own time zone\.",
r"user",
r"an unknown operating system",

View File

@ -75,7 +75,7 @@ def split_for_id_and_class(element: str) -> List[str]:
def build_id_dict(templates: List[str]) -> Dict[str, List[str]]:
template_id_dict: (Dict[str, List[str]]) = defaultdict(list)
template_id_dict: Dict[str, List[str]] = defaultdict(list)
for fn in templates:
with open(fn) as f:

View File

@ -253,17 +253,17 @@ def main() -> None:
parser.add_argument(
"--include-webhooks",
action="store_true",
help=("Include webhook tests. By default, they are skipped for performance."),
help="Include webhook tests. By default, they are skipped for performance.",
)
parser.add_argument(
"--include-transaction-tests",
action="store_true",
help=("Include transaction tests. By default, they are skipped for performance."),
help="Include transaction tests. By default, they are skipped for performance.",
)
parser.add_argument(
"--generate-stripe-fixtures",
action="store_true",
help=("Generate Stripe test fixtures by making requests to Stripe test network"),
help="Generate Stripe test fixtures by making requests to Stripe test network",
)
parser.add_argument("args", nargs="*")
parser.add_argument(

View File

@ -247,9 +247,7 @@ def get_mentions_for_message_updates(message_id: int) -> Set[int]:
message=message_id,
flags=~UserMessage.flags.historical,
)
.filter(
Q(flags__andnz=(UserMessage.flags.mentioned | UserMessage.flags.wildcard_mentioned))
)
.filter(Q(flags__andnz=UserMessage.flags.mentioned | UserMessage.flags.wildcard_mentioned))
.values_list("user_profile_id", flat=True)
)
return set(mentioned_user_ids)

View File

@ -515,7 +515,7 @@ def get_service_bot_events(
if is_stream and user_profile_id in mentioned_user_ids:
trigger = "mention"
# Direct message triggers for personal and huddle messages
elif (not is_stream) and (user_profile_id in active_user_ids):
elif not is_stream and user_profile_id in active_user_ids:
trigger = NotificationTriggers.DIRECT_MESSAGE
else:
return
@ -966,7 +966,7 @@ def do_send_messages(
UserMessageNotificationsData.from_user_id_sets(
user_id=user_id,
flags=user_flags.get(user_id, []),
private_message=(message_type == "private"),
private_message=message_type == "private",
disable_external_notifications=send_request.disable_external_notifications,
online_push_user_ids=send_request.online_push_user_ids,
dm_mention_push_disabled_user_ids=send_request.dm_mention_push_disabled_user_ids,

View File

@ -132,11 +132,9 @@ def der_encode_ticket(tkt: Dict[str, Any]) -> bytes:
der_encode_sequence( # EncryptedData
[
der_encode_int32(tkt["encPart"]["etype"]),
(
der_encode_uint32(tkt["encPart"]["kvno"])
if "kvno" in tkt["encPart"]
else None
),
der_encode_uint32(tkt["encPart"]["kvno"])
if "kvno" in tkt["encPart"]
else None,
der_encode_octet_string(base64.b64decode(tkt["encPart"]["cipher"])),
]
),

View File

@ -183,7 +183,7 @@ class APIArgumentsTablePreprocessor(Preprocessor):
required=required_block,
deprecated=deprecated_block,
description=md_engine.convert(description),
type=(data_type),
type=data_type,
object_details=object_block,
)
)

View File

@ -465,7 +465,7 @@ class FencedBlockPreprocessor(Preprocessor):
css_class=self.codehilite_conf["css_class"][0],
style=self.codehilite_conf["pygments_style"][0],
use_pygments=self.codehilite_conf["use_pygments"][0],
lang=(lang or None),
lang=lang or None,
noclasses=self.codehilite_conf["noclasses"][0],
# By default, the Pygments PHP lexers won't highlight
# code without a `<?php` marker at the start of the

View File

@ -158,16 +158,16 @@ class UserMessageNotificationsData:
mention_push_notify=mention_push_notify,
topic_wildcard_mention_push_notify=topic_wildcard_mention_push_notify,
stream_wildcard_mention_push_notify=stream_wildcard_mention_push_notify,
online_push_enabled=(user_id in online_push_user_ids),
stream_push_notify=(user_id in stream_push_user_ids),
stream_email_notify=(user_id in stream_email_user_ids),
followed_topic_push_notify=(user_id in followed_topic_push_user_ids),
followed_topic_email_notify=(user_id in followed_topic_email_user_ids),
online_push_enabled=user_id in online_push_user_ids,
stream_push_notify=user_id in stream_push_user_ids,
stream_email_notify=user_id in stream_email_user_ids,
followed_topic_push_notify=user_id in followed_topic_push_user_ids,
followed_topic_email_notify=user_id in followed_topic_email_user_ids,
topic_wildcard_mention_in_followed_topic_push_notify=topic_wildcard_mention_in_followed_topic_push_notify,
topic_wildcard_mention_in_followed_topic_email_notify=topic_wildcard_mention_in_followed_topic_email_notify,
stream_wildcard_mention_in_followed_topic_push_notify=stream_wildcard_mention_in_followed_topic_push_notify,
stream_wildcard_mention_in_followed_topic_email_notify=stream_wildcard_mention_in_followed_topic_email_notify,
sender_is_muted=(user_id in muted_sender_user_ids),
sender_is_muted=user_id in muted_sender_user_ids,
disable_external_notifications=disable_external_notifications,
)

View File

@ -1671,12 +1671,12 @@ Output:
do_set_realm_property(realm, "waiting_period_threshold", 1000, acting_user=None)
new_member_user.date_joined = timezone_now() - timedelta(
days=(realm.waiting_period_threshold - 1)
days=realm.waiting_period_threshold - 1
)
new_member_user.save()
member_user.date_joined = timezone_now() - timedelta(
days=(realm.waiting_period_threshold + 1)
days=realm.waiting_period_threshold + 1
)
member_user.save()

View File

@ -166,7 +166,7 @@ class RateLimitTests(ZulipTestCase):
start_time = time.time()
for i in range(6):
with mock.patch("time.time", return_value=(start_time + i * 0.1)):
with mock.patch("time.time", return_value=start_time + i * 0.1):
result = request_func()
if i < 5:
self.assertNotEqual(result.status_code, 429)
@ -176,7 +176,7 @@ class RateLimitTests(ZulipTestCase):
# We simulate waiting a second here, rather than force-clearing our history,
# to make sure the rate-limiting code automatically forgives a user
# after some time has passed.
with mock.patch("time.time", return_value=(start_time + 1.01)):
with mock.patch("time.time", return_value=start_time + 1.01):
result = request_func()
self.assertNotEqual(result.status_code, 429)

View File

@ -372,7 +372,7 @@ class RealmImportExportTest(ExportFile):
self.export_realm(realm, public_only=True)
# The attachment row shouldn't have been exported:
self.assertEqual((read_json("attachment.json")["zerver_attachment"]), [])
self.assertEqual(read_json("attachment.json")["zerver_attachment"], [])
# Aside of the attachment row, we also need to verify that the file itself
# isn't included.

View File

@ -895,7 +895,7 @@ class InviteUserTest(InviteUserBase):
hamlet = self.example_user("hamlet")
hamlet.date_joined = timezone_now() - datetime.timedelta(
days=(realm.waiting_period_threshold - 1)
days=realm.waiting_period_threshold - 1
)
email = "issac-test@zulip.com"

View File

@ -133,7 +133,7 @@ class OpenGraphTest(ZulipTestCase):
self.check_title_and_description(
"/help/",
"Zulip help center",
[("Welcome to the Zulip")],
["Welcome to the Zulip"],
[],
)

View File

@ -518,7 +518,7 @@ class WorkerTest(ZulipTestCase):
self.assertEqual(mock_mirror_email.call_count, 3)
# After some time passes, emails get accepted again:
with patch("time.time", return_value=(start_time + 11.0)):
with patch("time.time", return_value=start_time + 11.0):
fake_client.enqueue("email_mirror", data[0])
worker.start()
self.assertEqual(mock_mirror_email.call_count, 4)

View File

@ -98,28 +98,28 @@ class RateLimiterBackendBase(ZulipTestCase, ABC):
start_time = time.time()
for i in range(3):
with mock.patch("time.time", return_value=(start_time + i * 0.1)):
with mock.patch("time.time", return_value=start_time + i * 0.1):
self.make_request(obj, expect_ratelimited=False)
with mock.patch("time.time", return_value=(start_time + 0.4)):
with mock.patch("time.time", return_value=start_time + 0.4):
self.make_request(obj, expect_ratelimited=True)
with mock.patch("time.time", return_value=(start_time + 2.01)):
with mock.patch("time.time", return_value=start_time + 2.01):
self.make_request(obj, expect_ratelimited=False)
def test_clear_history(self) -> None:
obj = self.create_object("test", [(2, 3)])
start_time = time.time()
for i in range(3):
with mock.patch("time.time", return_value=(start_time + i * 0.1)):
with mock.patch("time.time", return_value=start_time + i * 0.1):
self.make_request(obj, expect_ratelimited=False)
with mock.patch("time.time", return_value=(start_time + 0.4)):
with mock.patch("time.time", return_value=start_time + 0.4):
self.make_request(obj, expect_ratelimited=True)
obj.clear_history()
self.requests_record[obj.key()] = []
for i in range(3):
with mock.patch("time.time", return_value=(start_time + i * 0.1)):
with mock.patch("time.time", return_value=start_time + i * 0.1):
self.make_request(obj, expect_ratelimited=False)
def test_block_unblock_access(self) -> None:
@ -127,11 +127,11 @@ class RateLimiterBackendBase(ZulipTestCase, ABC):
start_time = time.time()
obj.block_access(1)
with mock.patch("time.time", return_value=(start_time)):
with mock.patch("time.time", return_value=start_time):
self.make_request(obj, expect_ratelimited=True, verify_api_calls_left=False)
obj.unblock_access()
with mock.patch("time.time", return_value=(start_time)):
with mock.patch("time.time", return_value=start_time):
self.make_request(obj, expect_ratelimited=False, verify_api_calls_left=False)
def test_api_calls_left(self) -> None:
@ -139,15 +139,15 @@ class RateLimiterBackendBase(ZulipTestCase, ABC):
start_time = time.time()
# Check the edge case when no requests have been made yet.
with mock.patch("time.time", return_value=(start_time)):
with mock.patch("time.time", return_value=start_time):
self.verify_api_calls_left(obj)
with mock.patch("time.time", return_value=(start_time)):
with mock.patch("time.time", return_value=start_time):
self.make_request(obj)
# Check the correct default values again, after the reset has happened on the first rule,
# but not the other.
with mock.patch("time.time", return_value=(start_time + 2.1)):
with mock.patch("time.time", return_value=start_time + 2.1):
self.make_request(obj)
@ -158,7 +158,7 @@ class RedisRateLimiterBackendTest(RateLimiterBackendBase):
self, history: List[float], max_window: int, max_calls: int, now: float
) -> Tuple[int, float]:
latest_timestamp = history[-1]
relevant_requests = [t for t in history if (t >= now - max_window)]
relevant_requests = [t for t in history if t >= now - max_window]
relevant_requests_amount = len(relevant_requests)
return max_calls - relevant_requests_amount, latest_timestamp + max_window - now
@ -211,10 +211,10 @@ class TornadoInMemoryRateLimiterBackendTest(RateLimiterBackendBase):
start_time = time.time()
obj.block_access(1)
with mock.patch("time.time", return_value=(start_time)):
with mock.patch("time.time", return_value=start_time):
self.make_request(obj, expect_ratelimited=True, verify_api_calls_left=False)
with mock.patch("time.time", return_value=(start_time + 1.01)):
with mock.patch("time.time", return_value=start_time + 1.01):
self.make_request(obj, expect_ratelimited=False, verify_api_calls_left=False)

View File

@ -1281,9 +1281,7 @@ class UserGroupAPITestCase(UserGroupTestCase):
result = self.client_post(f"/json/user_groups/{leadership_group.id}/subgroups", info=params)
self.assert_json_error(
result,
(
"User group {user_group_id} is already a subgroup of one of the passed subgroups."
).format(user_group_id=leadership_group.id),
f"User group {leadership_group.id} is already a subgroup of one of the passed subgroups.",
)
self.assert_subgroup_membership(support_group, [leadership_group])
@ -1296,9 +1294,7 @@ class UserGroupAPITestCase(UserGroupTestCase):
result = self.client_post(f"/json/user_groups/{leadership_group.id}/subgroups", info=params)
self.assert_json_error(
result,
(
"User group {user_group_id} is already a subgroup of one of the passed subgroups."
).format(user_group_id=leadership_group.id),
f"User group {leadership_group.id} is already a subgroup of one of the passed subgroups.",
)
self.assert_subgroup_membership(test_group, [support_group])

View File

@ -1340,7 +1340,7 @@ def process_message_update_event(
user_notifications_data = UserMessageNotificationsData.from_user_id_sets(
user_id=user_profile_id,
flags=flags,
private_message=(stream_name is None),
private_message=stream_name is None,
disable_external_notifications=disable_external_notifications,
online_push_user_ids=online_push_user_ids,
dm_mention_push_disabled_user_ids=dm_mention_push_disabled_user_ids,
@ -1361,9 +1361,9 @@ def process_message_update_event(
user_notifications_data=user_notifications_data,
message_id=message_id,
acting_user_id=acting_user_id,
private_message=(stream_name is None),
presence_idle=(user_profile_id in presence_idle_user_ids),
prior_mentioned=(user_profile_id in prior_mention_user_ids),
private_message=stream_name is None,
presence_idle=user_profile_id in presence_idle_user_ids,
prior_mentioned=user_profile_id in prior_mention_user_ids,
)
for client in get_client_descriptors_for_user(user_profile_id):

View File

@ -535,7 +535,7 @@ def update_realm_user_settings_defaults(
check_settings_values(notification_sound, email_notifications_batching_period_seconds)
realm_user_default = RealmUserDefault.objects.get(realm=user_profile.realm)
request_settings = {k: v for k, v in locals().items() if (k in RealmUserDefault.property_types)}
request_settings = {k: v for k, v in locals().items() if k in RealmUserDefault.property_types}
for k, v in request_settings.items():
if v is not None and getattr(realm_user_default, k) != v:
do_set_realm_user_default_setting(realm_user_default, k, v, acting_user=user_profile)

View File

@ -742,7 +742,7 @@ def send_confirm_registration_email(
from_address=FromAddress.tokenized_no_reply_address(),
language=get_language() if request is not None else None,
context={
"create_realm": (realm is None),
"create_realm": realm is None,
"activate_url": activation_url,
"corporate_enabled": settings.CORPORATE_ENABLED,
},

View File

@ -54,7 +54,7 @@ def sentry_tunnel(
item_header = orjson.loads(item_header_line.decode("utf-8"))
length = item_header.get("length")
if length is None:
item_body, envelope_items = ([*rest.split(b"\n", 1), b""])[:2]
item_body, envelope_items = [*rest.split(b"\n", 1), b""][:2]
else:
item_body, envelope_items = rest[0:length], rest[length:]
if item_header.get("type") in ("transaction", "event"):

View File

@ -42,7 +42,7 @@ class LibratoWebhookParser:
def parse_violation(self, violation: Dict[str, Any]) -> Tuple[str, str]:
metric_name = violation["metric"]
recorded_at = datetime.fromtimestamp((violation["recorded_at"]), tz=timezone.utc).strftime(
recorded_at = datetime.fromtimestamp(violation["recorded_at"], tz=timezone.utc).strftime(
"%Y-%m-%d %H:%M:%S"
)
return metric_name, recorded_at
@ -98,7 +98,7 @@ class LibratoWebhookHandler(LibratoWebhookParser):
def handle_alert_clear_message(self) -> str:
alert_clear_template = "Alert [alert_name]({alert_url}) has cleared at {trigger_time} UTC!"
trigger_time = datetime.fromtimestamp(
(self.payload["trigger_time"]), tz=timezone.utc
self.payload["trigger_time"], tz=timezone.utc
).strftime("%Y-%m-%d %H:%M:%S")
alert_id, alert_name, alert_url, alert_runbook_url = self.parse_alert()
content = alert_clear_template.format(

View File

@ -670,7 +670,7 @@ class Command(BaseCommand):
subscriptions_to_add: List[Subscription] = []
event_time = timezone_now()
all_subscription_logs: (List[RealmAuditLog]) = []
all_subscription_logs: List[RealmAuditLog] = []
i = 0
for profile, recipient in subscriptions_list: