ruff: Fix B905 `zip()` without an explicit `strict=` parameter.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2024-07-11 17:30:30 -07:00 committed by Tim Abbott
parent 1464009fae
commit 3f29bc42b1
19 changed files with 32 additions and 22 deletions

View File

@ -69,7 +69,7 @@ def generate_time_series_data(
values = [
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
]
if partial_sum:
for i in range(1, length):

View File

@ -176,7 +176,7 @@ class Command(ZulipBaseCommand):
value=value,
**id_args,
)
for end_time, value in zip(end_times, values)
for end_time, value in zip(end_times, values, strict=False)
if value != 0
)

View File

@ -89,7 +89,7 @@ def get_query_data(query: Composable) -> list[list[Any]]:
def dictfetchall(cursor: CursorWrapper) -> list[dict[str, Any]]:
"""Returns all rows from a cursor as a dict"""
desc = cursor.description
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
return [dict(zip((col[0] for col in desc), row, strict=False)) for row in cursor.fetchall()]
def format_optional_datetime(date: datetime | None, display_none: bool = False) -> str:

View File

@ -93,7 +93,7 @@ full_real_paths = [f"{config_file_path}/{filename}" for filename in base_files]
full_new_paths = [f"{filename}.tmp" for filename in full_real_paths]
try:
write_updated_configs()
for old, new in zip(full_real_paths, full_new_paths):
for old, new in zip(full_real_paths, full_new_paths, strict=False):
if not filecmp.cmp(old, new):
# There are changes; leave .tmp files and exit 0
if "SUPPRESS_SHARDING_NOTICE" not in os.environ:

View File

@ -167,7 +167,7 @@ def send_stream_messages(
global message_thread_ids
message_thread_ids += message_ids
for message, message_id in zip(staged_messages, message_ids):
for message, message_id in zip(staged_messages, message_ids, strict=False):
if message.get("reactions") is not None:
reactions = message["reactions"]
for reaction, user_names in reactions.items():

View File

@ -1610,7 +1610,7 @@ def update_message_foreign_keys(import_dir: Path, sort_by_date: bool) -> None:
new_id_list = allocate_ids(model_class=Message, count=count)
for old_id, new_id in zip(old_id_list, new_id_list):
for old_id, new_id in zip(old_id_list, new_id_list, strict=False):
update_id_map(
table="message",
old_id=old_id,

View File

@ -15,7 +15,7 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str:
"""Given two hex strings of equal length, return a hex string with
the bitwise xor of the two hex strings."""
assert len(bytes_a) == len(bytes_b)
return "".join(f"{int(x, 16) ^ int(y, 16):x}" for x, y in zip(bytes_a, bytes_b))
return "".join(f"{int(x, 16) ^ int(y, 16):x}" for x, y in zip(bytes_a, bytes_b, strict=False))
def ascii_to_hex(input_string: str) -> str:

View File

@ -302,7 +302,7 @@ def send_apple_push_notification(
*(apns_context.apns.send_notification(request) for request in requests),
return_exceptions=True,
)
return zip(devices, results)
return zip(devices, results, strict=False)
results = apns_context.loop.run_until_complete(send_all_notifications())

View File

@ -405,7 +405,7 @@ class RedisRateLimiterBackend(RateLimiterBackend):
return True, blocking_ttl
now = time.time()
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules):
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules, strict=False):
# Check if the nth timestamp is newer than the associated rule. If so,
# it means we've hit our limit for this rule
if timestamp is None:

View File

@ -333,7 +333,7 @@ def has_request_variables(
view_func_full_name = f"{req_func.__module__}.{req_func.__name__}"
for name, value in zip(default_param_names, default_param_values):
for name, value in zip(default_param_names, default_param_values, strict=False):
if isinstance(value, _REQ):
value.func_var_name = name
if value.post_var_name is None:

View File

@ -1471,7 +1471,7 @@ Output:
self.assert_length(subscribed_streams, len(streams))
for x, y in zip(subscribed_streams, streams):
for x, y in zip(subscribed_streams, streams, strict=False):
self.assertEqual(x["name"], y.name)
def resolve_topic_containing_message(

View File

@ -273,7 +273,7 @@ class Command(makemessages.Command):
return new_strings
def write_translation_strings(self, translation_strings: list[str]) -> None:
for locale, output_path in zip(self.get_locales(), self.get_output_paths()):
for locale, output_path in zip(self.get_locales(), self.get_output_paths(), strict=False):
self.stdout.write(f"[frontend] processing locale {locale}")
try:
with open(output_path) as reader:

View File

@ -1274,7 +1274,9 @@ class MarkdownTest(ZulipTestCase):
self, linkifiers: list[RealmFilter], expected_linkifier_reprs: list[str]
) -> None:
self.assert_length(linkifiers, len(expected_linkifier_reprs))
for linkifier, expected_linkifier_repr in zip(linkifiers, expected_linkifier_reprs):
for linkifier, expected_linkifier_repr in zip(
linkifiers, expected_linkifier_reprs, strict=False
):
linkifier.clean()
linkifier.save()
self.assertEqual(repr(linkifier), expected_linkifier_repr)

View File

@ -92,7 +92,7 @@ class ReactionEmojiTest(ZulipTestCase):
emojis = ["smile", "tada"]
expected_emoji_codes = ["1f642", "1f389"]
for sender, emoji in zip(senders, emojis):
for sender, emoji in zip(senders, emojis, strict=False):
reaction_info = {
"emoji_name": emoji,
}
@ -119,7 +119,9 @@ class ReactionEmojiTest(ZulipTestCase):
# It's important that we preserve the loop order in this
# test, since this is our test to verify that we're
# returning reactions in chronological order.
for sender, emoji, emoji_code in zip(senders, emojis, expected_emoji_codes)
for sender, emoji, emoji_code in zip(
senders, emojis, expected_emoji_codes, strict=False
)
]
self.assertEqual(expected_reaction_data, message["reactions"])

View File

@ -358,7 +358,7 @@ class SlackImporter(ZulipTestCase):
("U22222222", "foreignteam2"),
("U33333333", "foreignteam2"),
]
for expected, found in zip(expected_users, later_users):
for expected, found in zip(expected_users, later_users, strict=False):
self.assertEqual(found["id"], expected[0])
self.assertEqual(found["team_domain"], expected[1])
self.assertEqual(found["is_mirror_dummy"], True)

View File

@ -302,7 +302,9 @@ class TestCreateStreams(ZulipTestCase):
"message_retention_days": -1,
"can_remove_subscribers_group": moderators_system_group,
}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
for (stream_name, stream_description) in zip(
stream_names, stream_descriptions, strict=False
)
],
)
@ -325,7 +327,9 @@ class TestCreateStreams(ZulipTestCase):
realm,
[
{"name": stream_name, "description": stream_description, "invite_only": True}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
for (stream_name, stream_description) in zip(
stream_names, stream_descriptions, strict=False
)
],
)
@ -799,7 +803,9 @@ class StreamAdminTest(ZulipTestCase):
stream_descriptions = ["des1", "des2", "des3"]
streams_raw: list[StreamDict] = [
{"name": stream_name, "description": stream_description, "is_web_public": True}
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
for (stream_name, stream_description) in zip(
stream_names, stream_descriptions, strict=False
)
]
self.assertFalse(user_profile.can_create_web_public_streams())

View File

@ -98,7 +98,7 @@ def api_bitbucket2_webhook(
topic_names = get_push_topics(payload)
bodies = get_push_bodies(request, payload)
for b, t in zip(bodies, topic_names):
for b, t in zip(bodies, topic_names, strict=False):
check_send_webhook_message(
request, user_profile, t, b, type, unquote_url_parameters=True
)

View File

@ -135,7 +135,7 @@ class LibratoWebhookHandler(LibratoWebhookParser):
conditions = self.parse_conditions()
violations = self.parse_violations()
content = ""
for condition, violation in zip(conditions, violations):
for condition, violation in zip(conditions, violations, strict=False):
content += self.generate_violated_metric_condition(violation, condition)
return content

View File

@ -23,7 +23,7 @@ class Command(ZulipBaseCommand):
total_count = 0
changed_count = 0
with open(options["dump1"]) as dump1, open(options["dump2"]) as dump2:
for line1, line2 in zip(dump1, dump2):
for line1, line2 in zip(dump1, dump2, strict=False):
m1 = orjson.loads(line1)
m2 = orjson.loads(line2)
total_count += 1