python: Remove unnecessary intermediate lists.

Generated automatically by pyupgrade.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2021-08-02 14:16:44 -07:00 committed by Tim Abbott
parent 7e0d26cd83
commit 3665deb93a
15 changed files with 30 additions and 32 deletions

View File

@ -187,11 +187,11 @@ def initial_upgrade(request: HttpRequest) -> HttpResponse:
}, },
"realm_org_type": user.realm.org_type, "realm_org_type": user.realm.org_type,
"sorted_org_types": sorted( "sorted_org_types": sorted(
[ (
[org_type_name, org_type] [org_type_name, org_type]
for (org_type_name, org_type) in Realm.ORG_TYPES.items() for (org_type_name, org_type) in Realm.ORG_TYPES.items()
if not org_type.get("hidden") if not org_type.get("hidden")
], ),
key=lambda d: d[1]["display_order"], key=lambda d: d[1]["display_order"],
), ),
} }

View File

@ -21,13 +21,11 @@ def remove_unused_versions_dir(args: argparse.Namespace) -> None:
""" """
current_version_dir = os.path.join(YARN_CACHE_PATH, CURRENT_VERSION) current_version_dir = os.path.join(YARN_CACHE_PATH, CURRENT_VERSION)
try: try:
dirs_to_purge = set( dirs_to_purge = {
[ os.path.join(YARN_CACHE_PATH, directory)
os.path.join(YARN_CACHE_PATH, directory) for directory in os.listdir(YARN_CACHE_PATH)
for directory in os.listdir(YARN_CACHE_PATH) if directory != CURRENT_VERSION
if directory != CURRENT_VERSION }
]
)
except FileNotFoundError: except FileNotFoundError:
return return

View File

@ -1363,7 +1363,7 @@ def check_token_access(token: str) -> None:
if data.status_code != 200 or not data.json()["ok"]: if data.status_code != 200 or not data.json()["ok"]:
raise ValueError("Invalid Slack token: {}".format(token)) raise ValueError("Invalid Slack token: {}".format(token))
has_scopes = set(data.headers.get("x-oauth-scopes", "").split(",")) has_scopes = set(data.headers.get("x-oauth-scopes", "").split(","))
required_scopes = set(["emoji:read", "users:read", "users:read.email", "team:read"]) required_scopes = {"emoji:read", "users:read", "users:read.email", "team:read"}
missing_scopes = required_scopes - has_scopes missing_scopes = required_scopes - has_scopes
if missing_scopes: if missing_scopes:
raise ValueError( raise ValueError(

View File

@ -3640,7 +3640,7 @@ def bulk_get_subscriber_user_ids(
target_stream_dicts.append(stream_dict) target_stream_dicts.append(stream_dict)
recip_to_stream_id = {stream["recipient_id"]: stream["id"] for stream in target_stream_dicts} recip_to_stream_id = {stream["recipient_id"]: stream["id"] for stream in target_stream_dicts}
recipient_ids = sorted([stream["recipient_id"] for stream in target_stream_dicts]) recipient_ids = sorted(stream["recipient_id"] for stream in target_stream_dicts)
result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts} result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts}
if not recipient_ids: if not recipient_ids:

View File

@ -307,7 +307,7 @@ def image_preview_enabled(
def list_of_tlds() -> List[str]: def list_of_tlds() -> List[str]:
# Skip a few overly-common false-positives from file extensions # Skip a few overly-common false-positives from file extensions
common_false_positives = set(["java", "md", "mov", "py", "zip"]) common_false_positives = {"java", "md", "mov", "py", "zip"}
tlds = list(tld_set - common_false_positives) tlds = list(tld_set - common_false_positives)
tlds.sort(key=len, reverse=True) tlds.sort(key=len, reverse=True)

View File

@ -28,7 +28,7 @@ EXCLUDE_UNDOCUMENTED_ENDPOINTS = {
} }
# Consists of endpoints with some documentation remaining. # Consists of endpoints with some documentation remaining.
# These are skipped but return true as the validator cannot exclude objects # These are skipped but return true as the validator cannot exclude objects
EXCLUDE_DOCUMENTED_ENDPOINTS: Set[Tuple[str, str]] = set([]) EXCLUDE_DOCUMENTED_ENDPOINTS: Set[Tuple[str, str]] = set()
# Most of our code expects allOf to be preprocessed away because that is what # Most of our code expects allOf to be preprocessed away because that is what
# yamole did. Its algorithm for doing so is not standards compliant, but we # yamole did. Its algorithm for doing so is not standards compliant, but we

View File

@ -173,7 +173,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
email = "hambot-bot@zulip.testserver" email = "hambot-bot@zulip.testserver"
bot = self.get_bot_user(email) bot = self.get_bot_user(email)
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"] (event,) = (e for e in events if e["event"]["type"] == "realm_bot")
self.assertEqual(result["user_id"], bot.id) self.assertEqual(result["user_id"], bot.id)
self.assertEqual( self.assertEqual(
@ -339,7 +339,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
email = "hambot-bot@zulip.testserver" email = "hambot-bot@zulip.testserver"
bot = self.get_bot_user(email) bot = self.get_bot_user(email)
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"] (event,) = (e for e in events if e["event"]["type"] == "realm_bot")
self.assertEqual( self.assertEqual(
dict( dict(
type="realm_bot", type="realm_bot",
@ -435,7 +435,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
assert profile.default_sending_stream is not None assert profile.default_sending_stream is not None
self.assertEqual(profile.default_sending_stream.name, "Denmark") self.assertEqual(profile.default_sending_stream.name, "Denmark")
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"] (event,) = (e for e in events if e["event"]["type"] == "realm_bot")
self.assertEqual( self.assertEqual(
dict( dict(
type="realm_bot", type="realm_bot",
@ -507,7 +507,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
assert bot_profile.default_events_register_stream is not None assert bot_profile.default_events_register_stream is not None
self.assertEqual(bot_profile.default_events_register_stream.name, "Denmark") self.assertEqual(bot_profile.default_events_register_stream.name, "Denmark")
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"] (event,) = (e for e in events if e["event"]["type"] == "realm_bot")
self.assertEqual( self.assertEqual(
dict( dict(
type="realm_bot", type="realm_bot",

View File

@ -526,12 +526,12 @@ class TestDigestTopics(ZulipTestCase):
lengthy_topic_d, lengthy_topic_d,
] ]
self.assertEqual( self.assertEqual(
get_hot_topics(topics, set([1, 0])), get_hot_topics(topics, {1, 0}),
[diverse_topic_a, diverse_topic_b, lengthy_topic_a, lengthy_topic_b], [diverse_topic_a, diverse_topic_b, lengthy_topic_a, lengthy_topic_b],
) )
self.assertEqual( self.assertEqual(
get_hot_topics(topics, set([1, 2])), get_hot_topics(topics, {1, 2}),
[diverse_topic_a, diverse_topic_c, lengthy_topic_a, lengthy_topic_d], [diverse_topic_a, diverse_topic_c, lengthy_topic_a, lengthy_topic_d],
) )
self.assertEqual(get_hot_topics(topics, set([2])), [diverse_topic_c, lengthy_topic_d]) self.assertEqual(get_hot_topics(topics, {2}), [diverse_topic_c, lengthy_topic_d])
self.assertEqual(get_hot_topics(topics, set()), []) self.assertEqual(get_hot_topics(topics, set()), [])

View File

@ -2147,7 +2147,7 @@ class GetOldMessagesTest(ZulipTestCase):
'<p><a href="https://google.com">https://<span class="highlight">google.com</span></a></p>', '<p><a href="https://google.com">https://<span class="highlight">google.com</span></a></p>',
) )
(meeting_message,) = [m for m in messages if m[TOPIC_NAME] == "meetings"] (meeting_message,) = (m for m in messages if m[TOPIC_NAME] == "meetings")
self.assertEqual(meeting_message[MATCH_TOPIC], "meetings") self.assertEqual(meeting_message[MATCH_TOPIC], "meetings")
self.assertEqual( self.assertEqual(
meeting_message["match_content"], meeting_message["match_content"],
@ -2155,7 +2155,7 @@ class GetOldMessagesTest(ZulipTestCase):
+ '<span class="highlight">lunch</span></p>', + '<span class="highlight">lunch</span></p>',
) )
(lunch_message,) = [m for m in messages if m[TOPIC_NAME] == "lunch plans"] (lunch_message,) = (m for m in messages if m[TOPIC_NAME] == "lunch plans")
self.assertEqual(lunch_message[MATCH_TOPIC], '<span class="highlight">lunch</span> plans') self.assertEqual(lunch_message[MATCH_TOPIC], '<span class="highlight">lunch</span> plans')
self.assertEqual(lunch_message["match_content"], "<p>I am hungry!</p>") self.assertEqual(lunch_message["match_content"], "<p>I am hungry!</p>")
@ -2200,7 +2200,7 @@ class GetOldMessagesTest(ZulipTestCase):
'<p>昨日、<span class="highlight">日本</span>' + " のお菓子を送りました。</p>", '<p>昨日、<span class="highlight">日本</span>' + " のお菓子を送りました。</p>",
) )
(english_message,) = [m for m in messages if m[TOPIC_NAME] == "english"] (english_message,) = (m for m in messages if m[TOPIC_NAME] == "english")
self.assertEqual(english_message[MATCH_TOPIC], "english") self.assertEqual(english_message[MATCH_TOPIC], "english")
self.assertIn( self.assertIn(
english_message["match_content"], english_message["match_content"],
@ -3649,7 +3649,7 @@ WHERE user_profile_id = {hamlet_id} AND (content ILIKE '%jumping%' OR subject IL
self.assert_length(result["messages"], 1) self.assert_length(result["messages"], 1)
messages = result["messages"] messages = result["messages"]
(hello_message,) = [m for m in messages if m[TOPIC_NAME] == "say hello"] (hello_message,) = (m for m in messages if m[TOPIC_NAME] == "say hello")
self.assertEqual(hello_message[MATCH_TOPIC], "say hello") self.assertEqual(hello_message[MATCH_TOPIC], "say hello")
self.assertEqual( self.assertEqual(
hello_message["match_content"], hello_message["match_content"],

View File

@ -271,7 +271,7 @@ class TestNotificationData(ZulipTestCase):
# Personal and user group mentioned. Test that we don't consider the user # Personal and user group mentioned. Test that we don't consider the user
# group mention for Hamlet in this case. # group mention for Hamlet in this case.
result = get_user_group_mentions_data( result = get_user_group_mentions_data(
mentioned_user_ids=set([hamlet.id]), mentioned_user_ids={hamlet.id},
mentioned_user_group_ids=[hamlet_and_cordelia.id], mentioned_user_group_ids=[hamlet_and_cordelia.id],
mention_data=MentionData(realm.id, "hey @*hamlet_and_cordelia*!"), mention_data=MentionData(realm.id, "hey @*hamlet_and_cordelia*!"),
) )

View File

@ -285,7 +285,7 @@ class OpenAPIArgumentsTest(ZulipTestCase):
# Endpoints where the documentation is currently failing our # Endpoints where the documentation is currently failing our
# consistency tests. We aim to keep this list empty. # consistency tests. We aim to keep this list empty.
buggy_documentation_endpoints: Set[str] = set([]) buggy_documentation_endpoints: Set[str] = set()
def convert_regex_to_url_pattern(self, regex_pattern: str) -> str: def convert_regex_to_url_pattern(self, regex_pattern: str) -> str:
"""Convert regular expressions style URL patterns to their """Convert regular expressions style URL patterns to their

View File

@ -68,7 +68,7 @@ class WorkerTest(ZulipTestCase):
chunk = [] chunk = []
def local_queue_size(self) -> int: def local_queue_size(self) -> int:
return sum([len(q) for q in self.queues.values()]) return sum(len(q) for q in self.queues.values())
def test_UserActivityWorker(self) -> None: def test_UserActivityWorker(self) -> None:
fake_client = self.FakeClient() fake_client = self.FakeClient()

View File

@ -2965,7 +2965,7 @@ class SubscriptionAPITest(ZulipTestCase):
expected_stream_ids = {get_stream(stream, self.test_realm).id for stream in add_streams} expected_stream_ids = {get_stream(stream, self.test_realm).id for stream in add_streams}
(peer_add_event,) = [event for event in events if event["event"].get("op") == "peer_add"] (peer_add_event,) = (event for event in events if event["event"].get("op") == "peer_add")
self.assertEqual(set(peer_add_event["event"]["stream_ids"]), expected_stream_ids) self.assertEqual(set(peer_add_event["event"]["stream_ids"]), expected_stream_ids)
self.assertEqual(set(peer_add_event["event"]["user_ids"]), {self.test_user.id}) self.assertEqual(set(peer_add_event["event"]["user_ids"]), {self.test_user.id})

View File

@ -1544,8 +1544,8 @@ class RecipientInfoTest(ZulipTestCase):
stream_topic=stream_topic, stream_topic=stream_topic,
possible_wildcard_mention=False, possible_wildcard_mention=False,
) )
self.assertEqual(info["pm_mention_email_disabled_user_ids"], set([hamlet.id])) self.assertEqual(info["pm_mention_email_disabled_user_ids"], {hamlet.id})
self.assertEqual(info["pm_mention_push_disabled_user_ids"], set([hamlet.id])) self.assertEqual(info["pm_mention_push_disabled_user_ids"], {hamlet.id})
hamlet.enable_offline_email_notifications = True hamlet.enable_offline_email_notifications = True
hamlet.enable_offline_push_notifications = True hamlet.enable_offline_push_notifications = True
hamlet.save() hamlet.save()
@ -1883,7 +1883,7 @@ class GetProfileTest(ZulipTestCase):
result = self.api_get(hamlet, "/api/v1/users") result = self.api_get(hamlet, "/api/v1/users")
self.assert_json_success(result) self.assert_json_success(result)
(my_user,) = [user for user in result.json()["members"] if user["email"] == hamlet.email] (my_user,) = (user for user in result.json()["members"] if user["email"] == hamlet.email)
self.assertEqual( self.assertEqual(
my_user["avatar_url"], my_user["avatar_url"],

View File

@ -19,7 +19,7 @@ class TornadoAdapter(HTTPAdapter):
def __init__(self) -> None: def __init__(self) -> None:
# All of the POST requests we make to Tornado are safe to # All of the POST requests we make to Tornado are safe to
# retry; allow retries of them, which is not the default. # retry; allow retries of them, which is not the default.
retry_methods = Retry.DEFAULT_METHOD_WHITELIST | set(["POST"]) retry_methods = Retry.DEFAULT_METHOD_WHITELIST | {"POST"}
retry = Retry(total=3, backoff_factor=1, method_whitelist=retry_methods) retry = Retry(total=3, backoff_factor=1, method_whitelist=retry_methods)
super().__init__(max_retries=retry) super().__init__(max_retries=retry)