mirror of https://github.com/zulip/zulip.git
python: Remove unnecessary intermediate lists.
Generated automatically by pyupgrade. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
7e0d26cd83
commit
3665deb93a
|
@ -187,11 +187,11 @@ def initial_upgrade(request: HttpRequest) -> HttpResponse:
|
|||
},
|
||||
"realm_org_type": user.realm.org_type,
|
||||
"sorted_org_types": sorted(
|
||||
[
|
||||
(
|
||||
[org_type_name, org_type]
|
||||
for (org_type_name, org_type) in Realm.ORG_TYPES.items()
|
||||
if not org_type.get("hidden")
|
||||
],
|
||||
),
|
||||
key=lambda d: d[1]["display_order"],
|
||||
),
|
||||
}
|
||||
|
|
|
@ -21,13 +21,11 @@ def remove_unused_versions_dir(args: argparse.Namespace) -> None:
|
|||
"""
|
||||
current_version_dir = os.path.join(YARN_CACHE_PATH, CURRENT_VERSION)
|
||||
try:
|
||||
dirs_to_purge = set(
|
||||
[
|
||||
os.path.join(YARN_CACHE_PATH, directory)
|
||||
for directory in os.listdir(YARN_CACHE_PATH)
|
||||
if directory != CURRENT_VERSION
|
||||
]
|
||||
)
|
||||
dirs_to_purge = {
|
||||
os.path.join(YARN_CACHE_PATH, directory)
|
||||
for directory in os.listdir(YARN_CACHE_PATH)
|
||||
if directory != CURRENT_VERSION
|
||||
}
|
||||
except FileNotFoundError:
|
||||
return
|
||||
|
||||
|
|
|
@ -1363,7 +1363,7 @@ def check_token_access(token: str) -> None:
|
|||
if data.status_code != 200 or not data.json()["ok"]:
|
||||
raise ValueError("Invalid Slack token: {}".format(token))
|
||||
has_scopes = set(data.headers.get("x-oauth-scopes", "").split(","))
|
||||
required_scopes = set(["emoji:read", "users:read", "users:read.email", "team:read"])
|
||||
required_scopes = {"emoji:read", "users:read", "users:read.email", "team:read"}
|
||||
missing_scopes = required_scopes - has_scopes
|
||||
if missing_scopes:
|
||||
raise ValueError(
|
||||
|
|
|
@ -3640,7 +3640,7 @@ def bulk_get_subscriber_user_ids(
|
|||
target_stream_dicts.append(stream_dict)
|
||||
|
||||
recip_to_stream_id = {stream["recipient_id"]: stream["id"] for stream in target_stream_dicts}
|
||||
recipient_ids = sorted([stream["recipient_id"] for stream in target_stream_dicts])
|
||||
recipient_ids = sorted(stream["recipient_id"] for stream in target_stream_dicts)
|
||||
|
||||
result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts}
|
||||
if not recipient_ids:
|
||||
|
|
|
@ -307,7 +307,7 @@ def image_preview_enabled(
|
|||
|
||||
def list_of_tlds() -> List[str]:
|
||||
# Skip a few overly-common false-positives from file extensions
|
||||
common_false_positives = set(["java", "md", "mov", "py", "zip"])
|
||||
common_false_positives = {"java", "md", "mov", "py", "zip"}
|
||||
tlds = list(tld_set - common_false_positives)
|
||||
|
||||
tlds.sort(key=len, reverse=True)
|
||||
|
|
|
@ -28,7 +28,7 @@ EXCLUDE_UNDOCUMENTED_ENDPOINTS = {
|
|||
}
|
||||
# Consists of endpoints with some documentation remaining.
|
||||
# These are skipped but return true as the validator cannot exclude objects
|
||||
EXCLUDE_DOCUMENTED_ENDPOINTS: Set[Tuple[str, str]] = set([])
|
||||
EXCLUDE_DOCUMENTED_ENDPOINTS: Set[Tuple[str, str]] = set()
|
||||
|
||||
# Most of our code expects allOf to be preprocessed away because that is what
|
||||
# yamole did. Its algorithm for doing so is not standards compliant, but we
|
||||
|
|
|
@ -173,7 +173,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
|
|||
email = "hambot-bot@zulip.testserver"
|
||||
bot = self.get_bot_user(email)
|
||||
|
||||
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"]
|
||||
(event,) = (e for e in events if e["event"]["type"] == "realm_bot")
|
||||
|
||||
self.assertEqual(result["user_id"], bot.id)
|
||||
self.assertEqual(
|
||||
|
@ -339,7 +339,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
|
|||
email = "hambot-bot@zulip.testserver"
|
||||
bot = self.get_bot_user(email)
|
||||
|
||||
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"]
|
||||
(event,) = (e for e in events if e["event"]["type"] == "realm_bot")
|
||||
self.assertEqual(
|
||||
dict(
|
||||
type="realm_bot",
|
||||
|
@ -435,7 +435,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
|
|||
assert profile.default_sending_stream is not None
|
||||
self.assertEqual(profile.default_sending_stream.name, "Denmark")
|
||||
|
||||
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"]
|
||||
(event,) = (e for e in events if e["event"]["type"] == "realm_bot")
|
||||
self.assertEqual(
|
||||
dict(
|
||||
type="realm_bot",
|
||||
|
@ -507,7 +507,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
|
|||
assert bot_profile.default_events_register_stream is not None
|
||||
self.assertEqual(bot_profile.default_events_register_stream.name, "Denmark")
|
||||
|
||||
(event,) = [e for e in events if e["event"]["type"] == "realm_bot"]
|
||||
(event,) = (e for e in events if e["event"]["type"] == "realm_bot")
|
||||
self.assertEqual(
|
||||
dict(
|
||||
type="realm_bot",
|
||||
|
|
|
@ -526,12 +526,12 @@ class TestDigestTopics(ZulipTestCase):
|
|||
lengthy_topic_d,
|
||||
]
|
||||
self.assertEqual(
|
||||
get_hot_topics(topics, set([1, 0])),
|
||||
get_hot_topics(topics, {1, 0}),
|
||||
[diverse_topic_a, diverse_topic_b, lengthy_topic_a, lengthy_topic_b],
|
||||
)
|
||||
self.assertEqual(
|
||||
get_hot_topics(topics, set([1, 2])),
|
||||
get_hot_topics(topics, {1, 2}),
|
||||
[diverse_topic_a, diverse_topic_c, lengthy_topic_a, lengthy_topic_d],
|
||||
)
|
||||
self.assertEqual(get_hot_topics(topics, set([2])), [diverse_topic_c, lengthy_topic_d])
|
||||
self.assertEqual(get_hot_topics(topics, {2}), [diverse_topic_c, lengthy_topic_d])
|
||||
self.assertEqual(get_hot_topics(topics, set()), [])
|
||||
|
|
|
@ -2147,7 +2147,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
'<p><a href="https://google.com">https://<span class="highlight">google.com</span></a></p>',
|
||||
)
|
||||
|
||||
(meeting_message,) = [m for m in messages if m[TOPIC_NAME] == "meetings"]
|
||||
(meeting_message,) = (m for m in messages if m[TOPIC_NAME] == "meetings")
|
||||
self.assertEqual(meeting_message[MATCH_TOPIC], "meetings")
|
||||
self.assertEqual(
|
||||
meeting_message["match_content"],
|
||||
|
@ -2155,7 +2155,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
+ '<span class="highlight">lunch</span></p>',
|
||||
)
|
||||
|
||||
(lunch_message,) = [m for m in messages if m[TOPIC_NAME] == "lunch plans"]
|
||||
(lunch_message,) = (m for m in messages if m[TOPIC_NAME] == "lunch plans")
|
||||
self.assertEqual(lunch_message[MATCH_TOPIC], '<span class="highlight">lunch</span> plans')
|
||||
self.assertEqual(lunch_message["match_content"], "<p>I am hungry!</p>")
|
||||
|
||||
|
@ -2200,7 +2200,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
'<p>昨日、<span class="highlight">日本</span>' + " のお菓子を送りました。</p>",
|
||||
)
|
||||
|
||||
(english_message,) = [m for m in messages if m[TOPIC_NAME] == "english"]
|
||||
(english_message,) = (m for m in messages if m[TOPIC_NAME] == "english")
|
||||
self.assertEqual(english_message[MATCH_TOPIC], "english")
|
||||
self.assertIn(
|
||||
english_message["match_content"],
|
||||
|
@ -3649,7 +3649,7 @@ WHERE user_profile_id = {hamlet_id} AND (content ILIKE '%jumping%' OR subject IL
|
|||
self.assert_length(result["messages"], 1)
|
||||
messages = result["messages"]
|
||||
|
||||
(hello_message,) = [m for m in messages if m[TOPIC_NAME] == "say hello"]
|
||||
(hello_message,) = (m for m in messages if m[TOPIC_NAME] == "say hello")
|
||||
self.assertEqual(hello_message[MATCH_TOPIC], "say hello")
|
||||
self.assertEqual(
|
||||
hello_message["match_content"],
|
||||
|
|
|
@ -271,7 +271,7 @@ class TestNotificationData(ZulipTestCase):
|
|||
# Personal and user group mentioned. Test that we don't consider the user
|
||||
# group mention for Hamlet in this case.
|
||||
result = get_user_group_mentions_data(
|
||||
mentioned_user_ids=set([hamlet.id]),
|
||||
mentioned_user_ids={hamlet.id},
|
||||
mentioned_user_group_ids=[hamlet_and_cordelia.id],
|
||||
mention_data=MentionData(realm.id, "hey @*hamlet_and_cordelia*!"),
|
||||
)
|
||||
|
|
|
@ -285,7 +285,7 @@ class OpenAPIArgumentsTest(ZulipTestCase):
|
|||
|
||||
# Endpoints where the documentation is currently failing our
|
||||
# consistency tests. We aim to keep this list empty.
|
||||
buggy_documentation_endpoints: Set[str] = set([])
|
||||
buggy_documentation_endpoints: Set[str] = set()
|
||||
|
||||
def convert_regex_to_url_pattern(self, regex_pattern: str) -> str:
|
||||
"""Convert regular expressions style URL patterns to their
|
||||
|
|
|
@ -68,7 +68,7 @@ class WorkerTest(ZulipTestCase):
|
|||
chunk = []
|
||||
|
||||
def local_queue_size(self) -> int:
|
||||
return sum([len(q) for q in self.queues.values()])
|
||||
return sum(len(q) for q in self.queues.values())
|
||||
|
||||
def test_UserActivityWorker(self) -> None:
|
||||
fake_client = self.FakeClient()
|
||||
|
|
|
@ -2965,7 +2965,7 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
|
||||
expected_stream_ids = {get_stream(stream, self.test_realm).id for stream in add_streams}
|
||||
|
||||
(peer_add_event,) = [event for event in events if event["event"].get("op") == "peer_add"]
|
||||
(peer_add_event,) = (event for event in events if event["event"].get("op") == "peer_add")
|
||||
|
||||
self.assertEqual(set(peer_add_event["event"]["stream_ids"]), expected_stream_ids)
|
||||
self.assertEqual(set(peer_add_event["event"]["user_ids"]), {self.test_user.id})
|
||||
|
|
|
@ -1544,8 +1544,8 @@ class RecipientInfoTest(ZulipTestCase):
|
|||
stream_topic=stream_topic,
|
||||
possible_wildcard_mention=False,
|
||||
)
|
||||
self.assertEqual(info["pm_mention_email_disabled_user_ids"], set([hamlet.id]))
|
||||
self.assertEqual(info["pm_mention_push_disabled_user_ids"], set([hamlet.id]))
|
||||
self.assertEqual(info["pm_mention_email_disabled_user_ids"], {hamlet.id})
|
||||
self.assertEqual(info["pm_mention_push_disabled_user_ids"], {hamlet.id})
|
||||
hamlet.enable_offline_email_notifications = True
|
||||
hamlet.enable_offline_push_notifications = True
|
||||
hamlet.save()
|
||||
|
@ -1883,7 +1883,7 @@ class GetProfileTest(ZulipTestCase):
|
|||
result = self.api_get(hamlet, "/api/v1/users")
|
||||
self.assert_json_success(result)
|
||||
|
||||
(my_user,) = [user for user in result.json()["members"] if user["email"] == hamlet.email]
|
||||
(my_user,) = (user for user in result.json()["members"] if user["email"] == hamlet.email)
|
||||
|
||||
self.assertEqual(
|
||||
my_user["avatar_url"],
|
||||
|
|
|
@ -19,7 +19,7 @@ class TornadoAdapter(HTTPAdapter):
|
|||
def __init__(self) -> None:
|
||||
# All of the POST requests we make to Tornado are safe to
|
||||
# retry; allow retries of them, which is not the default.
|
||||
retry_methods = Retry.DEFAULT_METHOD_WHITELIST | set(["POST"])
|
||||
retry_methods = Retry.DEFAULT_METHOD_WHITELIST | {"POST"}
|
||||
retry = Retry(total=3, backoff_factor=1, method_whitelist=retry_methods)
|
||||
super().__init__(max_retries=retry)
|
||||
|
||||
|
|
Loading…
Reference in New Issue