mirror of https://github.com/zulip/zulip.git
ruff: Fix B905 `zip()` without an explicit `strict=` parameter.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
1464009fae
commit
3f29bc42b1
|
@ -69,7 +69,7 @@ def generate_time_series_data(
|
||||||
|
|
||||||
values = [
|
values = [
|
||||||
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
0 if holiday else int(v + sqrt(v) * noise_scalar * spikiness)
|
||||||
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays)
|
for v, noise_scalar, holiday in zip(values_no_noise, noise_scalars, holidays, strict=False)
|
||||||
]
|
]
|
||||||
if partial_sum:
|
if partial_sum:
|
||||||
for i in range(1, length):
|
for i in range(1, length):
|
||||||
|
|
|
@ -176,7 +176,7 @@ class Command(ZulipBaseCommand):
|
||||||
value=value,
|
value=value,
|
||||||
**id_args,
|
**id_args,
|
||||||
)
|
)
|
||||||
for end_time, value in zip(end_times, values)
|
for end_time, value in zip(end_times, values, strict=False)
|
||||||
if value != 0
|
if value != 0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -89,7 +89,7 @@ def get_query_data(query: Composable) -> list[list[Any]]:
|
||||||
def dictfetchall(cursor: CursorWrapper) -> list[dict[str, Any]]:
|
def dictfetchall(cursor: CursorWrapper) -> list[dict[str, Any]]:
|
||||||
"""Returns all rows from a cursor as a dict"""
|
"""Returns all rows from a cursor as a dict"""
|
||||||
desc = cursor.description
|
desc = cursor.description
|
||||||
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
return [dict(zip((col[0] for col in desc), row, strict=False)) for row in cursor.fetchall()]
|
||||||
|
|
||||||
|
|
||||||
def format_optional_datetime(date: datetime | None, display_none: bool = False) -> str:
|
def format_optional_datetime(date: datetime | None, display_none: bool = False) -> str:
|
||||||
|
|
|
@ -93,7 +93,7 @@ full_real_paths = [f"{config_file_path}/{filename}" for filename in base_files]
|
||||||
full_new_paths = [f"{filename}.tmp" for filename in full_real_paths]
|
full_new_paths = [f"{filename}.tmp" for filename in full_real_paths]
|
||||||
try:
|
try:
|
||||||
write_updated_configs()
|
write_updated_configs()
|
||||||
for old, new in zip(full_real_paths, full_new_paths):
|
for old, new in zip(full_real_paths, full_new_paths, strict=False):
|
||||||
if not filecmp.cmp(old, new):
|
if not filecmp.cmp(old, new):
|
||||||
# There are changes; leave .tmp files and exit 0
|
# There are changes; leave .tmp files and exit 0
|
||||||
if "SUPPRESS_SHARDING_NOTICE" not in os.environ:
|
if "SUPPRESS_SHARDING_NOTICE" not in os.environ:
|
||||||
|
|
|
@ -167,7 +167,7 @@ def send_stream_messages(
|
||||||
global message_thread_ids
|
global message_thread_ids
|
||||||
message_thread_ids += message_ids
|
message_thread_ids += message_ids
|
||||||
|
|
||||||
for message, message_id in zip(staged_messages, message_ids):
|
for message, message_id in zip(staged_messages, message_ids, strict=False):
|
||||||
if message.get("reactions") is not None:
|
if message.get("reactions") is not None:
|
||||||
reactions = message["reactions"]
|
reactions = message["reactions"]
|
||||||
for reaction, user_names in reactions.items():
|
for reaction, user_names in reactions.items():
|
||||||
|
|
|
@ -1610,7 +1610,7 @@ def update_message_foreign_keys(import_dir: Path, sort_by_date: bool) -> None:
|
||||||
|
|
||||||
new_id_list = allocate_ids(model_class=Message, count=count)
|
new_id_list = allocate_ids(model_class=Message, count=count)
|
||||||
|
|
||||||
for old_id, new_id in zip(old_id_list, new_id_list):
|
for old_id, new_id in zip(old_id_list, new_id_list, strict=False):
|
||||||
update_id_map(
|
update_id_map(
|
||||||
table="message",
|
table="message",
|
||||||
old_id=old_id,
|
old_id=old_id,
|
||||||
|
|
|
@ -15,7 +15,7 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str:
|
||||||
"""Given two hex strings of equal length, return a hex string with
|
"""Given two hex strings of equal length, return a hex string with
|
||||||
the bitwise xor of the two hex strings."""
|
the bitwise xor of the two hex strings."""
|
||||||
assert len(bytes_a) == len(bytes_b)
|
assert len(bytes_a) == len(bytes_b)
|
||||||
return "".join(f"{int(x, 16) ^ int(y, 16):x}" for x, y in zip(bytes_a, bytes_b))
|
return "".join(f"{int(x, 16) ^ int(y, 16):x}" for x, y in zip(bytes_a, bytes_b, strict=False))
|
||||||
|
|
||||||
|
|
||||||
def ascii_to_hex(input_string: str) -> str:
|
def ascii_to_hex(input_string: str) -> str:
|
||||||
|
|
|
@ -302,7 +302,7 @@ def send_apple_push_notification(
|
||||||
*(apns_context.apns.send_notification(request) for request in requests),
|
*(apns_context.apns.send_notification(request) for request in requests),
|
||||||
return_exceptions=True,
|
return_exceptions=True,
|
||||||
)
|
)
|
||||||
return zip(devices, results)
|
return zip(devices, results, strict=False)
|
||||||
|
|
||||||
results = apns_context.loop.run_until_complete(send_all_notifications())
|
results = apns_context.loop.run_until_complete(send_all_notifications())
|
||||||
|
|
||||||
|
|
|
@ -405,7 +405,7 @@ class RedisRateLimiterBackend(RateLimiterBackend):
|
||||||
return True, blocking_ttl
|
return True, blocking_ttl
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules):
|
for timestamp, (range_seconds, num_requests) in zip(rule_timestamps, rules, strict=False):
|
||||||
# Check if the nth timestamp is newer than the associated rule. If so,
|
# Check if the nth timestamp is newer than the associated rule. If so,
|
||||||
# it means we've hit our limit for this rule
|
# it means we've hit our limit for this rule
|
||||||
if timestamp is None:
|
if timestamp is None:
|
||||||
|
|
|
@ -333,7 +333,7 @@ def has_request_variables(
|
||||||
|
|
||||||
view_func_full_name = f"{req_func.__module__}.{req_func.__name__}"
|
view_func_full_name = f"{req_func.__module__}.{req_func.__name__}"
|
||||||
|
|
||||||
for name, value in zip(default_param_names, default_param_values):
|
for name, value in zip(default_param_names, default_param_values, strict=False):
|
||||||
if isinstance(value, _REQ):
|
if isinstance(value, _REQ):
|
||||||
value.func_var_name = name
|
value.func_var_name = name
|
||||||
if value.post_var_name is None:
|
if value.post_var_name is None:
|
||||||
|
|
|
@ -1471,7 +1471,7 @@ Output:
|
||||||
|
|
||||||
self.assert_length(subscribed_streams, len(streams))
|
self.assert_length(subscribed_streams, len(streams))
|
||||||
|
|
||||||
for x, y in zip(subscribed_streams, streams):
|
for x, y in zip(subscribed_streams, streams, strict=False):
|
||||||
self.assertEqual(x["name"], y.name)
|
self.assertEqual(x["name"], y.name)
|
||||||
|
|
||||||
def resolve_topic_containing_message(
|
def resolve_topic_containing_message(
|
||||||
|
|
|
@ -273,7 +273,7 @@ class Command(makemessages.Command):
|
||||||
return new_strings
|
return new_strings
|
||||||
|
|
||||||
def write_translation_strings(self, translation_strings: list[str]) -> None:
|
def write_translation_strings(self, translation_strings: list[str]) -> None:
|
||||||
for locale, output_path in zip(self.get_locales(), self.get_output_paths()):
|
for locale, output_path in zip(self.get_locales(), self.get_output_paths(), strict=False):
|
||||||
self.stdout.write(f"[frontend] processing locale {locale}")
|
self.stdout.write(f"[frontend] processing locale {locale}")
|
||||||
try:
|
try:
|
||||||
with open(output_path) as reader:
|
with open(output_path) as reader:
|
||||||
|
|
|
@ -1274,7 +1274,9 @@ class MarkdownTest(ZulipTestCase):
|
||||||
self, linkifiers: list[RealmFilter], expected_linkifier_reprs: list[str]
|
self, linkifiers: list[RealmFilter], expected_linkifier_reprs: list[str]
|
||||||
) -> None:
|
) -> None:
|
||||||
self.assert_length(linkifiers, len(expected_linkifier_reprs))
|
self.assert_length(linkifiers, len(expected_linkifier_reprs))
|
||||||
for linkifier, expected_linkifier_repr in zip(linkifiers, expected_linkifier_reprs):
|
for linkifier, expected_linkifier_repr in zip(
|
||||||
|
linkifiers, expected_linkifier_reprs, strict=False
|
||||||
|
):
|
||||||
linkifier.clean()
|
linkifier.clean()
|
||||||
linkifier.save()
|
linkifier.save()
|
||||||
self.assertEqual(repr(linkifier), expected_linkifier_repr)
|
self.assertEqual(repr(linkifier), expected_linkifier_repr)
|
||||||
|
|
|
@ -92,7 +92,7 @@ class ReactionEmojiTest(ZulipTestCase):
|
||||||
emojis = ["smile", "tada"]
|
emojis = ["smile", "tada"]
|
||||||
expected_emoji_codes = ["1f642", "1f389"]
|
expected_emoji_codes = ["1f642", "1f389"]
|
||||||
|
|
||||||
for sender, emoji in zip(senders, emojis):
|
for sender, emoji in zip(senders, emojis, strict=False):
|
||||||
reaction_info = {
|
reaction_info = {
|
||||||
"emoji_name": emoji,
|
"emoji_name": emoji,
|
||||||
}
|
}
|
||||||
|
@ -119,7 +119,9 @@ class ReactionEmojiTest(ZulipTestCase):
|
||||||
# It's important that we preserve the loop order in this
|
# It's important that we preserve the loop order in this
|
||||||
# test, since this is our test to verify that we're
|
# test, since this is our test to verify that we're
|
||||||
# returning reactions in chronological order.
|
# returning reactions in chronological order.
|
||||||
for sender, emoji, emoji_code in zip(senders, emojis, expected_emoji_codes)
|
for sender, emoji, emoji_code in zip(
|
||||||
|
senders, emojis, expected_emoji_codes, strict=False
|
||||||
|
)
|
||||||
]
|
]
|
||||||
self.assertEqual(expected_reaction_data, message["reactions"])
|
self.assertEqual(expected_reaction_data, message["reactions"])
|
||||||
|
|
||||||
|
|
|
@ -358,7 +358,7 @@ class SlackImporter(ZulipTestCase):
|
||||||
("U22222222", "foreignteam2"),
|
("U22222222", "foreignteam2"),
|
||||||
("U33333333", "foreignteam2"),
|
("U33333333", "foreignteam2"),
|
||||||
]
|
]
|
||||||
for expected, found in zip(expected_users, later_users):
|
for expected, found in zip(expected_users, later_users, strict=False):
|
||||||
self.assertEqual(found["id"], expected[0])
|
self.assertEqual(found["id"], expected[0])
|
||||||
self.assertEqual(found["team_domain"], expected[1])
|
self.assertEqual(found["team_domain"], expected[1])
|
||||||
self.assertEqual(found["is_mirror_dummy"], True)
|
self.assertEqual(found["is_mirror_dummy"], True)
|
||||||
|
|
|
@ -302,7 +302,9 @@ class TestCreateStreams(ZulipTestCase):
|
||||||
"message_retention_days": -1,
|
"message_retention_days": -1,
|
||||||
"can_remove_subscribers_group": moderators_system_group,
|
"can_remove_subscribers_group": moderators_system_group,
|
||||||
}
|
}
|
||||||
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
|
for (stream_name, stream_description) in zip(
|
||||||
|
stream_names, stream_descriptions, strict=False
|
||||||
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -325,7 +327,9 @@ class TestCreateStreams(ZulipTestCase):
|
||||||
realm,
|
realm,
|
||||||
[
|
[
|
||||||
{"name": stream_name, "description": stream_description, "invite_only": True}
|
{"name": stream_name, "description": stream_description, "invite_only": True}
|
||||||
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
|
for (stream_name, stream_description) in zip(
|
||||||
|
stream_names, stream_descriptions, strict=False
|
||||||
|
)
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -799,7 +803,9 @@ class StreamAdminTest(ZulipTestCase):
|
||||||
stream_descriptions = ["des1", "des2", "des3"]
|
stream_descriptions = ["des1", "des2", "des3"]
|
||||||
streams_raw: list[StreamDict] = [
|
streams_raw: list[StreamDict] = [
|
||||||
{"name": stream_name, "description": stream_description, "is_web_public": True}
|
{"name": stream_name, "description": stream_description, "is_web_public": True}
|
||||||
for (stream_name, stream_description) in zip(stream_names, stream_descriptions)
|
for (stream_name, stream_description) in zip(
|
||||||
|
stream_names, stream_descriptions, strict=False
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
self.assertFalse(user_profile.can_create_web_public_streams())
|
self.assertFalse(user_profile.can_create_web_public_streams())
|
||||||
|
|
|
@ -98,7 +98,7 @@ def api_bitbucket2_webhook(
|
||||||
topic_names = get_push_topics(payload)
|
topic_names = get_push_topics(payload)
|
||||||
bodies = get_push_bodies(request, payload)
|
bodies = get_push_bodies(request, payload)
|
||||||
|
|
||||||
for b, t in zip(bodies, topic_names):
|
for b, t in zip(bodies, topic_names, strict=False):
|
||||||
check_send_webhook_message(
|
check_send_webhook_message(
|
||||||
request, user_profile, t, b, type, unquote_url_parameters=True
|
request, user_profile, t, b, type, unquote_url_parameters=True
|
||||||
)
|
)
|
||||||
|
|
|
@ -135,7 +135,7 @@ class LibratoWebhookHandler(LibratoWebhookParser):
|
||||||
conditions = self.parse_conditions()
|
conditions = self.parse_conditions()
|
||||||
violations = self.parse_violations()
|
violations = self.parse_violations()
|
||||||
content = ""
|
content = ""
|
||||||
for condition, violation in zip(conditions, violations):
|
for condition, violation in zip(conditions, violations, strict=False):
|
||||||
content += self.generate_violated_metric_condition(violation, condition)
|
content += self.generate_violated_metric_condition(violation, condition)
|
||||||
return content
|
return content
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ class Command(ZulipBaseCommand):
|
||||||
total_count = 0
|
total_count = 0
|
||||||
changed_count = 0
|
changed_count = 0
|
||||||
with open(options["dump1"]) as dump1, open(options["dump2"]) as dump2:
|
with open(options["dump1"]) as dump1, open(options["dump2"]) as dump2:
|
||||||
for line1, line2 in zip(dump1, dump2):
|
for line1, line2 in zip(dump1, dump2, strict=False):
|
||||||
m1 = orjson.loads(line1)
|
m1 = orjson.loads(line1)
|
||||||
m2 = orjson.loads(line2)
|
m2 = orjson.loads(line2)
|
||||||
total_count += 1
|
total_count += 1
|
||||||
|
|
Loading…
Reference in New Issue