mirror of https://github.com/zulip/zulip.git
ruff: Fix SIM118 Use `key in dict` instead of `key in dict.keys()`.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
a6442288cf
commit
e1ed44907b
|
@ -53,7 +53,7 @@ for line in output.split("\n"):
|
|||
|
||||
now = int(time.time())
|
||||
|
||||
for queue_name in consumers.keys():
|
||||
for queue_name in consumers:
|
||||
state_file_path = "/var/lib/nagios_state/check-rabbitmq-consumers-" + queue_name
|
||||
state_file_tmp = state_file_path + "-tmp"
|
||||
|
||||
|
|
|
@ -179,7 +179,7 @@ def main() -> None:
|
|||
# If the alias was specifically specified in custom_emoji_names, then we can keep just
|
||||
# that one, but otherwise there's no particular emoji that is an obvious candidate
|
||||
# for the alias so just remove the alias for all relevant emoji.
|
||||
for alias in alias_to_emoji_code.keys():
|
||||
for alias in alias_to_emoji_code:
|
||||
if len(alias_to_emoji_code[alias]) > 1:
|
||||
for emoji_code in alias_to_emoji_code[alias]:
|
||||
if emoji_code not in CUSTOM_EMOJI_NAME_MAPS:
|
||||
|
|
|
@ -356,7 +356,7 @@ def get_service_dicts_for_bots(
|
|||
for service in services
|
||||
]
|
||||
elif bot_type == UserProfile.EMBEDDED_BOT:
|
||||
if bot_profile_id in embedded_bot_configs.keys():
|
||||
if bot_profile_id in embedded_bot_configs:
|
||||
bot_config = embedded_bot_configs[bot_profile_id]
|
||||
service_dicts = [
|
||||
{
|
||||
|
|
|
@ -910,7 +910,7 @@ def channel_message_to_zerver_message(
|
|||
|
||||
message_id = NEXT_ID("message")
|
||||
|
||||
if "reactions" in message.keys():
|
||||
if "reactions" in message:
|
||||
build_reactions(
|
||||
reaction_list,
|
||||
message["reactions"],
|
||||
|
|
|
@ -894,7 +894,7 @@ def check_realm_update(
|
|||
|
||||
if prop == "plan_type":
|
||||
assert isinstance(value, int)
|
||||
assert "extra_data" in event.keys()
|
||||
assert "extra_data" in event
|
||||
return
|
||||
|
||||
assert "extra_data" not in event.keys()
|
||||
|
@ -1495,7 +1495,7 @@ def check_update_display_settings(
|
|||
assert isinstance(setting, setting_type)
|
||||
|
||||
if setting_name == "default_language":
|
||||
assert "language_name" in event.keys()
|
||||
assert "language_name" in event
|
||||
else:
|
||||
assert "language_name" not in event.keys()
|
||||
|
||||
|
@ -1516,7 +1516,7 @@ def check_user_settings_update(
|
|||
assert isinstance(value, setting_type)
|
||||
|
||||
if setting_name == "default_language":
|
||||
assert "language_name" in event.keys()
|
||||
assert "language_name" in event
|
||||
else:
|
||||
assert "language_name" not in event.keys()
|
||||
|
||||
|
|
|
@ -1011,7 +1011,7 @@ def apply_event(
|
|||
if permission in state:
|
||||
state[permission] = user_profile.has_permission(policy)
|
||||
|
||||
if event["property"] in policy_permission_dict.keys():
|
||||
if event["property"] in policy_permission_dict:
|
||||
if policy_permission_dict[event["property"]] in state:
|
||||
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
|
||||
event["property"]
|
||||
|
|
|
@ -210,7 +210,7 @@ def build_page_params_for_home_page_load(
|
|||
no_event_queue=user_profile is None,
|
||||
)
|
||||
|
||||
for field_name in register_ret.keys():
|
||||
for field_name in register_ret:
|
||||
page_params[field_name] = register_ret[field_name]
|
||||
|
||||
if narrow_stream is not None:
|
||||
|
|
|
@ -77,7 +77,7 @@ def get_next_hotspots(user: UserProfile) -> List[Dict[str, object]]:
|
|||
seen_hotspots = frozenset(
|
||||
UserHotspot.objects.filter(user=user).values_list("hotspot", flat=True)
|
||||
)
|
||||
for hotspot in INTRO_HOTSPOTS.keys():
|
||||
for hotspot in INTRO_HOTSPOTS:
|
||||
if hotspot not in seen_hotspots:
|
||||
return [
|
||||
{
|
||||
|
|
|
@ -1627,7 +1627,7 @@ def parse_message_content_edit_or_delete_limit(
|
|||
*,
|
||||
setting_name: str,
|
||||
) -> Optional[int]:
|
||||
if isinstance(value, str) and value in special_values_map.keys():
|
||||
if isinstance(value, str) and value in special_values_map:
|
||||
return special_values_map[value]
|
||||
if isinstance(value, str) or value <= 0:
|
||||
raise RequestVariableConversionError(setting_name, value)
|
||||
|
|
|
@ -668,7 +668,7 @@ def parse_message_retention_days(
|
|||
value: Union[int, str],
|
||||
special_values_map: Mapping[str, Optional[int]],
|
||||
) -> Optional[int]:
|
||||
if isinstance(value, str) and value in special_values_map.keys():
|
||||
if isinstance(value, str) and value in special_values_map:
|
||||
return special_values_map[value]
|
||||
if isinstance(value, str) or value <= 0:
|
||||
raise RequestVariableConversionError("message_retention_days", value)
|
||||
|
|
|
@ -208,7 +208,7 @@ def create_system_user_groups_for_realm(realm: Realm) -> Dict[int, UserGroup]:
|
|||
which is a copy of this function from when we introduced system groups.
|
||||
"""
|
||||
role_system_groups_dict: Dict[int, UserGroup] = {}
|
||||
for role in UserGroup.SYSTEM_USER_GROUP_ROLE_MAP.keys():
|
||||
for role in UserGroup.SYSTEM_USER_GROUP_ROLE_MAP:
|
||||
user_group_params = UserGroup.SYSTEM_USER_GROUP_ROLE_MAP[role]
|
||||
user_group = UserGroup(
|
||||
name=user_group_params["name"],
|
||||
|
|
|
@ -17,5 +17,5 @@ class Command(BaseCommand):
|
|||
fill_remote_cache(options["cache"])
|
||||
return
|
||||
|
||||
for cache in cache_fillers.keys():
|
||||
for cache in cache_fillers:
|
||||
fill_remote_cache(cache)
|
||||
|
|
|
@ -56,7 +56,7 @@ def create_role_based_system_groups(
|
|||
continue
|
||||
|
||||
role_system_groups_dict = {}
|
||||
for role in SYSTEM_USER_GROUP_ROLE_MAP.keys():
|
||||
for role in SYSTEM_USER_GROUP_ROLE_MAP:
|
||||
user_group_params = SYSTEM_USER_GROUP_ROLE_MAP[role]
|
||||
user_group = UserGroup(
|
||||
name=user_group_params["name"],
|
||||
|
|
|
@ -65,7 +65,7 @@ def create_role_based_system_groups_for_internal_realms(
|
|||
return
|
||||
|
||||
role_system_groups_dict = {}
|
||||
for role in SYSTEM_USER_GROUP_ROLE_MAP.keys():
|
||||
for role in SYSTEM_USER_GROUP_ROLE_MAP:
|
||||
user_group_params = SYSTEM_USER_GROUP_ROLE_MAP[role]
|
||||
user_group = UserGroup(
|
||||
name=user_group_params["name"],
|
||||
|
|
|
@ -243,7 +243,7 @@ class AuthBackendTest(ZulipTestCase):
|
|||
clear_supported_auth_backends_cache()
|
||||
|
||||
# Verify auth fails if the auth backend is disabled for the realm
|
||||
for backend_name in AUTH_BACKEND_NAME_MAP.keys():
|
||||
for backend_name in AUTH_BACKEND_NAME_MAP:
|
||||
if isinstance(backend, AUTH_BACKEND_NAME_MAP[backend_name]):
|
||||
break
|
||||
|
||||
|
|
|
@ -231,7 +231,7 @@ class DocPageTest(ZulipTestCase):
|
|||
],
|
||||
)
|
||||
|
||||
for integration in INTEGRATIONS.keys():
|
||||
for integration in INTEGRATIONS:
|
||||
url = f"/integrations/doc-html/{integration}"
|
||||
self._test(url, "", doc_html_str=True)
|
||||
|
||||
|
@ -253,9 +253,9 @@ class DocPageTest(ZulipTestCase):
|
|||
self._test(url, description, doc_html_str=True)
|
||||
|
||||
# Test category pages
|
||||
for category in CATEGORIES.keys():
|
||||
for category in CATEGORIES:
|
||||
url = f"/integrations/{category}"
|
||||
if category in META_CATEGORY.keys():
|
||||
if category in META_CATEGORY:
|
||||
title = f"<title>{CATEGORIES[category]} | Zulip integrations</title>"
|
||||
og_title = f'<meta property="og:title" content="{CATEGORIES[category]} | Zulip integrations" />'
|
||||
else:
|
||||
|
|
|
@ -1683,11 +1683,11 @@ class EditMessageTest(EditMessageTestCase):
|
|||
msg3 = Message.objects.get(id=id3)
|
||||
|
||||
msg1_edit_history = orjson.loads(assert_is_not_none(msg1.edit_history))
|
||||
self.assertTrue("prev_content" in msg1_edit_history[0].keys())
|
||||
self.assertTrue("prev_content" in msg1_edit_history[0])
|
||||
|
||||
for msg in [msg2, msg3]:
|
||||
self.assertFalse(
|
||||
"prev_content" in orjson.loads(assert_is_not_none(msg.edit_history))[0].keys()
|
||||
"prev_content" in orjson.loads(assert_is_not_none(msg.edit_history))[0]
|
||||
)
|
||||
|
||||
for msg in [msg1, msg2, msg3]:
|
||||
|
|
|
@ -403,7 +403,7 @@ class ChangeSettingsTest(ZulipTestCase):
|
|||
)
|
||||
|
||||
self.login("hamlet")
|
||||
for setting_name in invalid_values_dict.keys():
|
||||
for setting_name in invalid_values_dict:
|
||||
invalid_value = invalid_values_dict.get(setting_name)
|
||||
if isinstance(invalid_value, str):
|
||||
invalid_value = orjson.dumps(invalid_value).decode()
|
||||
|
|
|
@ -1158,7 +1158,7 @@ def process_message_update_event(
|
|||
user_profile_id = user_data["id"]
|
||||
|
||||
user_event = dict(event_template) # shallow copy, but deep enough for our needs
|
||||
for key in user_data.keys():
|
||||
for key in user_data:
|
||||
if key != "id":
|
||||
user_event[key] = user_data[key]
|
||||
|
||||
|
|
|
@ -50,7 +50,7 @@ subject_types: Dict[str, List[List[str]]] = {
|
|||
|
||||
def get_value(_obj: WildValue, key: str) -> str:
|
||||
for _key in key.lstrip("!").split("/"):
|
||||
if _key in _obj.keys():
|
||||
if _key in _obj:
|
||||
_obj = _obj[_key]
|
||||
else:
|
||||
return ""
|
||||
|
|
|
@ -117,7 +117,7 @@ def api_sonarqube_webhook(
|
|||
) -> HttpResponse:
|
||||
project = payload["project"]["name"].tame(check_string)
|
||||
branch = None
|
||||
if "branch" in payload.keys():
|
||||
if "branch" in payload:
|
||||
branch = payload["branch"].get("name").tame(check_none_or(check_string))
|
||||
if branch:
|
||||
topic = TOPIC_WITH_BRANCH.format(project, branch)
|
||||
|
|
|
@ -667,7 +667,7 @@ class MissedMessageWorker(QueueProcessingWorker):
|
|||
else:
|
||||
events_by_recipient[event.user_profile_id] = [entry]
|
||||
|
||||
for user_profile_id in events_by_recipient.keys():
|
||||
for user_profile_id in events_by_recipient:
|
||||
events: List[Dict[str, Any]] = events_by_recipient[user_profile_id]
|
||||
|
||||
logging.info(
|
||||
|
|
Loading…
Reference in New Issue