mirror of https://github.com/zulip/zulip.git
python: Replace unnecessary intermediate lists with generators.
Mostly suggested by the flake8-comprehension plugin. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
1ded51aa9d
commit
ab120a03bc
|
@ -692,7 +692,7 @@ def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
|
|||
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
|
||||
]
|
||||
|
||||
return OrderedDict([(stat.property, stat) for stat in count_stats_])
|
||||
return OrderedDict((stat.property, stat) for stat in count_stats_)
|
||||
|
||||
# To avoid refactoring for now COUNT_STATS can be used as before
|
||||
COUNT_STATS = get_count_stats()
|
||||
|
|
|
@ -101,14 +101,14 @@ class TestGetChartData(ZulipTestCase):
|
|||
insert_time = self.end_times_day[2]
|
||||
fill_time = self.end_times_day[-1]
|
||||
|
||||
RealmCount.objects.bulk_create([
|
||||
RealmCount.objects.bulk_create(
|
||||
RealmCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=100+i, realm=self.realm)
|
||||
for i, subgroup in enumerate(realm_subgroups)])
|
||||
UserCount.objects.bulk_create([
|
||||
for i, subgroup in enumerate(realm_subgroups))
|
||||
UserCount.objects.bulk_create(
|
||||
UserCount(property=stat.property, subgroup=subgroup, end_time=insert_time,
|
||||
value=200+i, realm=self.realm, user=self.user)
|
||||
for i, subgroup in enumerate(user_subgroups)])
|
||||
for i, subgroup in enumerate(user_subgroups))
|
||||
FillState.objects.create(property=stat.property, end_time=fill_time, state=FillState.DONE)
|
||||
|
||||
def test_number_of_humans(self) -> None:
|
||||
|
|
|
@ -456,7 +456,7 @@ def dictfetchall(cursor: connection.cursor) -> List[Dict[str, Any]]:
|
|||
"Returns all rows from a cursor as a dict"
|
||||
desc = cursor.description
|
||||
return [
|
||||
dict(list(zip([col[0] for col in desc], row)))
|
||||
dict(zip((col[0] for col in desc), row))
|
||||
for row in cursor.fetchall()
|
||||
]
|
||||
|
||||
|
|
|
@ -93,8 +93,8 @@ def fixture_files_for_function(decorated_function: CallableT) -> List[str]: # n
|
|||
decorated_function_name = decorated_function.__name__
|
||||
if decorated_function_name[:5] == 'test_':
|
||||
decorated_function_name = decorated_function_name[5:]
|
||||
return sorted([f'{STRIPE_FIXTURES_DIR}/{f}' for f in os.listdir(STRIPE_FIXTURES_DIR)
|
||||
if f.startswith(decorated_function_name + '--')])
|
||||
return sorted(f'{STRIPE_FIXTURES_DIR}/{f}' for f in os.listdir(STRIPE_FIXTURES_DIR)
|
||||
if f.startswith(decorated_function_name + '--'))
|
||||
|
||||
def generate_and_save_stripe_fixture(decorated_function_name: str, mocked_function_name: str,
|
||||
mocked_function: CallableT) -> Callable[[Any, Any], Any]: # nocoverage
|
||||
|
|
|
@ -105,7 +105,7 @@ def get_zulips() -> List[Dict[str, Any]]:
|
|||
last_event_id = max(last_event_id, int(event['id']))
|
||||
# If we get a heartbeat event, that means we've been hanging for
|
||||
# 40s, and we should bail.
|
||||
if 'heartbeat' in {event['type'] for event in res['events']}:
|
||||
if 'heartbeat' in (event['type'] for event in res['events']):
|
||||
report("CRITICAL", msg="Got heartbeat waiting for Zulip, which means get_events is hanging")
|
||||
return [event['message'] for event in res['events']]
|
||||
|
||||
|
|
|
@ -39,9 +39,9 @@ def report(state: str, short_msg: str, too_old: AbstractSet[Any] = set()) -> Non
|
|||
too_old_data = ""
|
||||
if too_old:
|
||||
too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join(
|
||||
["{:>16}: {}".format(user.user_profile.email,
|
||||
user.last_visit.strftime("%Y-%m-%d %H:%M %Z"),
|
||||
) for user in too_old],
|
||||
"{:>16}: {}".format(user.user_profile.email,
|
||||
user.last_visit.strftime("%Y-%m-%d %H:%M %Z"),
|
||||
) for user in too_old
|
||||
)
|
||||
print(f"{state}: {short_msg}{too_old_data}")
|
||||
|
||||
|
|
|
@ -203,10 +203,10 @@ def check_rabbitmq_queues() -> None:
|
|||
|
||||
if status > 0:
|
||||
queue_error_template = "queue {} problem: {}:{}"
|
||||
error_message = '; '.join([
|
||||
error_message = '; '.join(
|
||||
queue_error_template.format(result['name'], states[result['status']], result['message'])
|
||||
for result in results if result['status'] > 0
|
||||
])
|
||||
)
|
||||
print(f"{now}|{status}|{states[status]}|{error_message}")
|
||||
else:
|
||||
print(f"{now}|{status}|{states[status]}|queues normal")
|
||||
|
|
|
@ -1230,7 +1230,7 @@ def get_recipient_info(recipient: Recipient,
|
|||
# need this codepath to be fast (it's part of sending messages)
|
||||
query = query_for_ids(
|
||||
query=query,
|
||||
user_ids=sorted(list(user_ids)),
|
||||
user_ids=sorted(user_ids),
|
||||
field='id',
|
||||
)
|
||||
rows = list(query)
|
||||
|
@ -1502,7 +1502,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str,
|
|||
# Save the message receipts in the database
|
||||
user_message_flags: Dict[int, Dict[int, List[str]]] = defaultdict(dict)
|
||||
with transaction.atomic():
|
||||
Message.objects.bulk_create([message['message'] for message in messages])
|
||||
Message.objects.bulk_create(message['message'] for message in messages)
|
||||
|
||||
# Claim attachments in message
|
||||
for message in messages:
|
||||
|
@ -2635,10 +2635,10 @@ def bulk_get_subscriber_user_ids(stream_dicts: Iterable[Mapping[str, Any]],
|
|||
target_stream_dicts.append(stream_dict)
|
||||
|
||||
stream_ids = [stream['id'] for stream in target_stream_dicts]
|
||||
recipient_ids = sorted([
|
||||
recipient_ids = sorted(
|
||||
stream_recipient.recipient_id_for(stream_id)
|
||||
for stream_id in stream_ids
|
||||
])
|
||||
)
|
||||
|
||||
result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts}
|
||||
if not recipient_ids:
|
||||
|
@ -2856,7 +2856,7 @@ def bulk_add_subscriptions(streams: Iterable[Stream],
|
|||
# transaction isolation level.
|
||||
with transaction.atomic():
|
||||
occupied_streams_before = list(get_occupied_streams(realm))
|
||||
Subscription.objects.bulk_create([sub for (sub, stream) in subs_to_add])
|
||||
Subscription.objects.bulk_create(sub for (sub, stream) in subs_to_add)
|
||||
sub_ids = [sub.id for (sub, stream) in subs_to_activate]
|
||||
Subscription.objects.filter(id__in=sub_ids).update(active=True)
|
||||
occupied_streams_after = list(get_occupied_streams(realm))
|
||||
|
@ -3935,10 +3935,10 @@ def get_default_subs(user_profile: UserProfile) -> List[Stream]:
|
|||
|
||||
# returns default streams in json serializeable format
|
||||
def streams_to_dicts_sorted(streams: List[Stream]) -> List[Dict[str, Any]]:
|
||||
return sorted([stream.to_dict() for stream in streams], key=lambda elt: elt["name"])
|
||||
return sorted((stream.to_dict() for stream in streams), key=lambda elt: elt["name"])
|
||||
|
||||
def default_stream_groups_to_dicts_sorted(groups: List[DefaultStreamGroup]) -> List[Dict[str, Any]]:
|
||||
return sorted([group.to_dict() for group in groups], key=lambda elt: elt["name"])
|
||||
return sorted((group.to_dict() for group in groups), key=lambda elt: elt["name"])
|
||||
|
||||
def do_update_user_activity_interval(user_profile: UserProfile,
|
||||
log_time: datetime.datetime) -> None:
|
||||
|
@ -5064,9 +5064,9 @@ def gather_subscriptions(
|
|||
for subs in [subscribed, unsubscribed]:
|
||||
for sub in subs:
|
||||
if 'subscribers' in sub:
|
||||
sub['subscribers'] = sorted([
|
||||
sub['subscribers'] = sorted(
|
||||
email_dict[user_id] for user_id in sub['subscribers']
|
||||
])
|
||||
)
|
||||
|
||||
return (subscribed, unsubscribed)
|
||||
|
||||
|
@ -5128,7 +5128,7 @@ def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
|
|||
).exclude(client__name="ZulipMobile").distinct('user_profile_id').values('user_profile_id')
|
||||
active_user_ids = {row['user_profile_id'] for row in rows}
|
||||
idle_user_ids = user_ids - active_user_ids
|
||||
return sorted(list(idle_user_ids))
|
||||
return sorted(idle_user_ids)
|
||||
|
||||
def do_send_confirmation_email(invitee: PreregistrationUser,
|
||||
referrer: UserProfile) -> str:
|
||||
|
|
|
@ -20,7 +20,7 @@ def bulk_create_users(realm: Realm,
|
|||
"""
|
||||
existing_users = frozenset(UserProfile.objects.filter(
|
||||
realm=realm).values_list('email', flat=True))
|
||||
users = sorted([user_raw for user_raw in users_raw if user_raw[0] not in existing_users])
|
||||
users = sorted(user_raw for user_raw in users_raw if user_raw[0] not in existing_users)
|
||||
|
||||
# Now create user_profiles
|
||||
profiles_to_create: List[UserProfile] = []
|
||||
|
@ -47,9 +47,9 @@ def bulk_create_users(realm: Realm,
|
|||
user_ids = {user.id for user in profiles_to_create}
|
||||
|
||||
RealmAuditLog.objects.bulk_create(
|
||||
[RealmAuditLog(realm=realm, modified_user=profile_,
|
||||
event_type=RealmAuditLog.USER_CREATED, event_time=profile_.date_joined)
|
||||
for profile_ in profiles_to_create])
|
||||
RealmAuditLog(realm=realm, modified_user=profile_,
|
||||
event_type=RealmAuditLog.USER_CREATED, event_time=profile_.date_joined)
|
||||
for profile_ in profiles_to_create)
|
||||
|
||||
recipients_to_create: List[Recipient] = []
|
||||
for user_id in user_ids:
|
||||
|
@ -102,9 +102,10 @@ def bulk_set_users_or_streams_recipient_fields(model: Model,
|
|||
# This is only sed in populate_db, so doesn't really need tests
|
||||
def bulk_create_streams(realm: Realm,
|
||||
stream_dict: Dict[str, Dict[str, Any]]) -> None: # nocoverage
|
||||
existing_streams = frozenset([name.lower() for name in
|
||||
Stream.objects.filter(realm=realm)
|
||||
.values_list('name', flat=True)])
|
||||
existing_streams = {
|
||||
name.lower()
|
||||
for name in Stream.objects.filter(realm=realm).values_list('name', flat=True)
|
||||
}
|
||||
streams_to_create: List[Stream] = []
|
||||
for name, options in stream_dict.items():
|
||||
if 'history_public_to_subscribers' not in options:
|
||||
|
|
|
@ -127,7 +127,7 @@ def gather_hot_conversations(user_profile: UserProfile, messages: List[Message])
|
|||
# out the hot conversations.
|
||||
num_convos = len(hot_conversations)
|
||||
if num_convos < 4:
|
||||
hot_conversations.extend([elt[0] for elt in diversity_list[num_convos:4]])
|
||||
hot_conversations.extend(elt[0] for elt in diversity_list[num_convos:4])
|
||||
|
||||
hot_conversation_render_payloads = []
|
||||
for h in hot_conversations:
|
||||
|
|
|
@ -371,9 +371,9 @@ def apply_event(state: Dict[str, Any],
|
|||
|
||||
if recipient_id not in conversations:
|
||||
conversations[recipient_id] = dict(
|
||||
user_ids=sorted([user_dict['id'] for user_dict in
|
||||
event['message']['display_recipient'] if
|
||||
user_dict['id'] != user_profile.id]),
|
||||
user_ids=sorted(user_dict['id'] for user_dict in
|
||||
event['message']['display_recipient'] if
|
||||
user_dict['id'] != user_profile.id),
|
||||
)
|
||||
conversations[recipient_id]['max_message_id'] = event['message']['id']
|
||||
return
|
||||
|
@ -943,11 +943,11 @@ def post_process_state(user_profile: UserProfile, ret: Dict[str, Any],
|
|||
|
||||
if 'raw_recent_private_conversations' in ret:
|
||||
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
|
||||
ret['recent_private_conversations'] = sorted([
|
||||
ret['recent_private_conversations'] = sorted((
|
||||
dict(
|
||||
**value,
|
||||
) for (recipient_id, value) in ret['raw_recent_private_conversations'].items()
|
||||
], key = lambda x: -x["max_message_id"])
|
||||
), key = lambda x: -x["max_message_id"])
|
||||
del ret['raw_recent_private_conversations']
|
||||
|
||||
if not notification_settings_null and 'subscriptions' in ret:
|
||||
|
|
|
@ -893,7 +893,7 @@ def fetch_attachment_data(response: TableData, realm_id: int, message_ids: Set[i
|
|||
'''
|
||||
for row in response['zerver_attachment']:
|
||||
filterer_message_ids = set(row['messages']).intersection(message_ids)
|
||||
row['messages'] = sorted(list(filterer_message_ids))
|
||||
row['messages'] = sorted(filterer_message_ids)
|
||||
|
||||
'''
|
||||
Attachments can be connected to multiple messages, although
|
||||
|
|
|
@ -507,8 +507,8 @@ def fix_bitfield_keys(data: TableData, table: TableName, field_name: Field) -> N
|
|||
def fix_realm_authentication_bitfield(data: TableData, table: TableName, field_name: Field) -> None:
|
||||
"""Used to fixup the authentication_methods bitfield to be a string"""
|
||||
for item in data[table]:
|
||||
values_as_bitstring = ''.join(['1' if field[1] else '0' for field in
|
||||
item[field_name]])
|
||||
values_as_bitstring = ''.join('1' if field[1] else '0' for field in
|
||||
item[field_name])
|
||||
values_as_int = int(values_as_bitstring, 2)
|
||||
item[field_name] = values_as_int
|
||||
|
||||
|
|
|
@ -1594,7 +1594,7 @@ class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
|
|||
else:
|
||||
open_fences.append(Fence(fence_str, is_code))
|
||||
|
||||
in_code_fence = any([fence.is_code for fence in open_fences])
|
||||
in_code_fence = any(fence.is_code for fence in open_fences)
|
||||
|
||||
# If we're not in a fenced block and we detect an upcoming list
|
||||
# hanging off any block (including a list of another type), add
|
||||
|
|
|
@ -765,7 +765,7 @@ def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]],
|
|||
'''
|
||||
|
||||
for message_id, attribute_dict in input_dict.items():
|
||||
lookup_key = tuple([attribute_dict[f] for f in lookup_fields])
|
||||
lookup_key = tuple(attribute_dict[f] for f in lookup_fields)
|
||||
if lookup_key not in lookup_dict:
|
||||
obj = {}
|
||||
for f in lookup_fields:
|
||||
|
@ -783,7 +783,7 @@ def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]],
|
|||
for dct in lookup_dict.values():
|
||||
dct['unread_message_ids'].sort()
|
||||
if collect_senders:
|
||||
dct['sender_ids'] = sorted(list(dct['sender_ids']))
|
||||
dct['sender_ids'] = sorted(dct['sender_ids'])
|
||||
|
||||
sorted_keys = sorted(lookup_dict.keys())
|
||||
|
||||
|
|
|
@ -16,12 +16,12 @@ def xor_hex_strings(bytes_a: str, bytes_b: str) -> str:
|
|||
"""Given two hex strings of equal length, return a hex string with
|
||||
the bitwise xor of the two hex strings."""
|
||||
assert len(bytes_a) == len(bytes_b)
|
||||
return ''.join([f"{int(x, 16) ^ int(y, 16):x}"
|
||||
for x, y in zip(bytes_a, bytes_b)])
|
||||
return ''.join(f"{int(x, 16) ^ int(y, 16):x}"
|
||||
for x, y in zip(bytes_a, bytes_b))
|
||||
|
||||
def ascii_to_hex(input_string: str) -> str:
|
||||
"""Given an ascii string, encode it as a hex string"""
|
||||
return "".join([hex(ord(c))[2:].zfill(2) for c in input_string])
|
||||
return "".join(hex(ord(c))[2:].zfill(2) for c in input_string)
|
||||
|
||||
def hex_to_ascii(input_string: str) -> str:
|
||||
"""Given a hex array, decode it back to a string"""
|
||||
|
|
|
@ -64,7 +64,7 @@ def build_email(template_prefix: str, to_user_ids: Optional[List[int]]=None,
|
|||
if to_user_ids is not None:
|
||||
to_users = [get_user_profile_by_id(to_user_id) for to_user_id in to_user_ids]
|
||||
if realm is None:
|
||||
assert len(set([to_user.realm_id for to_user in to_users])) == 1
|
||||
assert len({to_user.realm_id for to_user in to_users}) == 1
|
||||
realm = to_users[0].realm
|
||||
to_emails = [str(Address(display_name=to_user.full_name, addr_spec=to_user.delivery_email)) for to_user in to_users]
|
||||
|
||||
|
|
|
@ -31,10 +31,10 @@ class StreamRecipientMap:
|
|||
self.stream_to_recip[stream_id] = recipient_id
|
||||
|
||||
def populate_for_recipient_ids(self, recipient_ids: List[int]) -> None:
|
||||
recipient_ids = sorted([
|
||||
recipient_ids = sorted(
|
||||
recip_id for recip_id in recipient_ids
|
||||
if recip_id not in self.recip_to_stream
|
||||
])
|
||||
)
|
||||
|
||||
if not recipient_ids:
|
||||
return
|
||||
|
|
|
@ -72,10 +72,10 @@ def create_user_group(name: str, members: List[UserProfile], realm: Realm,
|
|||
with transaction.atomic():
|
||||
user_group = UserGroup.objects.create(name=name, realm=realm,
|
||||
description=description)
|
||||
UserGroupMembership.objects.bulk_create([
|
||||
UserGroupMembership.objects.bulk_create(
|
||||
UserGroupMembership(user_profile=member, user_group=user_group)
|
||||
for member in members
|
||||
])
|
||||
)
|
||||
return user_group
|
||||
|
||||
def get_user_group_members(user_group: UserGroup) -> List[UserProfile]:
|
||||
|
|
|
@ -112,7 +112,7 @@ def generate_random_token(length: int) -> str:
|
|||
|
||||
def generate_api_key() -> str:
|
||||
choices = string.ascii_letters + string.digits
|
||||
altchars = ''.join([choices[ord(os.urandom(1)) % 62] for _ in range(2)]).encode("utf-8")
|
||||
altchars = ''.join(choices[ord(os.urandom(1)) % 62] for _ in range(2)).encode("utf-8")
|
||||
api_key = base64.b64encode(os.urandom(24), altchars=altchars).decode("utf-8")
|
||||
return api_key
|
||||
|
||||
|
|
|
@ -299,7 +299,7 @@ def get_all_committers(commits_data: List[Dict[str, Any]]) -> List[Tuple[str, in
|
|||
|
||||
# Sort by commit count, breaking ties alphabetically.
|
||||
committers_items: List[Tuple[str, int]] = sorted(
|
||||
list(committers.items()), key=lambda item: (-item[1], item[0]),
|
||||
committers.items(), key=lambda item: (-item[1], item[0]),
|
||||
)
|
||||
committers_values: List[int] = [c_i[1] for c_i in committers_items]
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ class Command(ZulipBaseCommand):
|
|||
for user_profile in user_profiles:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
_ignore, already_subscribed = bulk_add_subscriptions([stream], [user_profile])
|
||||
was_there_already = user_profile.id in {tup[0].id for tup in already_subscribed}
|
||||
was_there_already = user_profile.id in (tup[0].id for tup in already_subscribed)
|
||||
print("{} {} to {}".format(
|
||||
"Already subscribed" if was_there_already else "Subscribed",
|
||||
user_profile.delivery_email, stream_name))
|
||||
|
|
|
@ -17,7 +17,7 @@ class Command(ZulipBaseCommand):
|
|||
assert realm is not None # True because of required=True above
|
||||
|
||||
admin_users = realm.get_admin_users_and_bots()
|
||||
owner_user_ids = set(list(realm.get_human_owner_users().values_list("id", flat=True)))
|
||||
owner_user_ids = set(realm.get_human_owner_users().values_list("id", flat=True))
|
||||
|
||||
if admin_users:
|
||||
print('Administrators:\n')
|
||||
|
|
|
@ -360,14 +360,14 @@ class RateLimitMiddleware(MiddlewareMixin):
|
|||
def set_response_headers(self, response: HttpResponse,
|
||||
rate_limit_results: List[RateLimitResult]) -> None:
|
||||
# The limit on the action that was requested is the minimum of the limits that get applied:
|
||||
limit = min([result.entity.max_api_calls() for result in rate_limit_results])
|
||||
limit = min(result.entity.max_api_calls() for result in rate_limit_results)
|
||||
response['X-RateLimit-Limit'] = str(limit)
|
||||
# Same principle applies to remaining api calls:
|
||||
remaining_api_calls = min([result.remaining for result in rate_limit_results])
|
||||
remaining_api_calls = min(result.remaining for result in rate_limit_results)
|
||||
response['X-RateLimit-Remaining'] = str(remaining_api_calls)
|
||||
|
||||
# The full reset time is the maximum of the reset times for the limits that get applied:
|
||||
reset_time = time.time() + max([result.secs_to_freedom for result in rate_limit_results])
|
||||
reset_time = time.time() + max(result.secs_to_freedom for result in rate_limit_results)
|
||||
response['X-RateLimit-Reset'] = str(int(reset_time))
|
||||
|
||||
def process_response(self, request: HttpRequest, response: HttpResponse) -> HttpResponse:
|
||||
|
|
|
@ -20,7 +20,7 @@ def fix_duplicate_attachments(apps: StateApps, schema_editor: DatabaseSchemaEdit
|
|||
# Loop through all groups of Attachment objects with the same `path_id`
|
||||
for group in Attachment.objects.values('path_id').annotate(Count('id')).order_by().filter(id__count__gt=1):
|
||||
# Sort by the minimum message ID, to find the first attachment
|
||||
attachments = sorted(list(Attachment.objects.filter(path_id=group['path_id']).order_by("id")),
|
||||
attachments = sorted(Attachment.objects.filter(path_id=group['path_id']).order_by("id"),
|
||||
key = lambda x: min(x.messages.all().values_list('id')[0]))
|
||||
surviving = attachments[0]
|
||||
to_cleanup = attachments[1:]
|
||||
|
|
|
@ -94,7 +94,7 @@ def update_realmauditlog_values(apps: StateApps, schema_editor: DatabaseSchemaEd
|
|||
new_value = new_value['id']
|
||||
|
||||
# Sanity check that the original event has exactly the keys we expect.
|
||||
assert set(extra_data.keys()) <= set([OLD_VALUE, NEW_VALUE])
|
||||
assert set(extra_data.keys()) <= {OLD_VALUE, NEW_VALUE}
|
||||
|
||||
ra.extra_data = json.dumps({
|
||||
OLD_VALUE: old_value,
|
||||
|
|
|
@ -1562,9 +1562,9 @@ class SAMLAuthBackendTest(SocialAuthBase):
|
|||
extra_attrs = ''
|
||||
for extra_attr_name, extra_attr_values in extra_attributes.items():
|
||||
values = ''.join(
|
||||
['<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" ' +
|
||||
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">' +
|
||||
f'{value}</saml2:AttributeValue>' for value in extra_attr_values]
|
||||
'<saml2:AttributeValue xmlns:xs="http://www.w3.org/2001/XMLSchema" ' +
|
||||
'xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:type="xs:string">' +
|
||||
f'{value}</saml2:AttributeValue>' for value in extra_attr_values
|
||||
)
|
||||
extra_attrs += f'<saml2:Attribute Name="{extra_attr_name}" ' + \
|
||||
'NameFormat="urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified">' + \
|
||||
|
|
|
@ -263,7 +263,7 @@ class HomeTest(ZulipTestCase):
|
|||
|
||||
page_params = self._get_page_params(result)
|
||||
|
||||
actual_keys = sorted([str(k) for k in page_params.keys()])
|
||||
actual_keys = sorted(str(k) for k in page_params.keys())
|
||||
|
||||
self.assertEqual(actual_keys, expected_keys)
|
||||
|
||||
|
@ -284,7 +284,7 @@ class HomeTest(ZulipTestCase):
|
|||
'user_id',
|
||||
]
|
||||
|
||||
realm_bots_actual_keys = sorted([str(key) for key in page_params['realm_bots'][0].keys()])
|
||||
realm_bots_actual_keys = sorted(str(key) for key in page_params['realm_bots'][0].keys())
|
||||
self.assertEqual(realm_bots_actual_keys, realm_bots_expected_keys)
|
||||
|
||||
def test_home_under_2fa_without_otp_device(self) -> None:
|
||||
|
|
|
@ -3200,7 +3200,7 @@ class MessageHasKeywordsTest(ZulipTestCase):
|
|||
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
|
||||
'Denmark', content=msg_content))
|
||||
msgs = [Message.objects.get(id=id) for id in msg_ids]
|
||||
self.assertTrue(all([msg.has_link for msg in msgs]))
|
||||
self.assertTrue(all(msg.has_link for msg in msgs))
|
||||
|
||||
def test_finds_only_links(self) -> None:
|
||||
msg_ids = []
|
||||
|
@ -3209,7 +3209,7 @@ class MessageHasKeywordsTest(ZulipTestCase):
|
|||
msg_ids.append(self.send_stream_message(self.example_user('hamlet'),
|
||||
'Denmark', content=msg_content))
|
||||
msgs = [Message.objects.get(id=id) for id in msg_ids]
|
||||
self.assertFalse(all([msg.has_link for msg in msgs]))
|
||||
self.assertFalse(all(msg.has_link for msg in msgs))
|
||||
|
||||
def update_message(self, msg: Message, content: str) -> None:
|
||||
hamlet = self.example_user('hamlet')
|
||||
|
|
|
@ -1188,7 +1188,7 @@ earl-test@zulip.com""", ["Denmark"]))
|
|||
self.login('iago')
|
||||
invitee_emails = "1@zulip.com, 2@zulip.com"
|
||||
self.invite(invitee_emails, ["Denmark"])
|
||||
invitee_emails = ", ".join([str(i) for i in range(get_realm("zulip").max_invites - 1)])
|
||||
invitee_emails = ", ".join(str(i) for i in range(get_realm("zulip").max_invites - 1))
|
||||
self.assert_json_error(self.invite(invitee_emails, ["Denmark"]),
|
||||
"You do not have enough remaining invites. "
|
||||
"Please contact desdemona+admin@zulip.com to have your limit raised. "
|
||||
|
@ -4204,7 +4204,7 @@ class TestFindMyTeam(ZulipTestCase):
|
|||
self.assertEqual(len(outbox), 0)
|
||||
|
||||
def test_find_team_more_than_ten_emails(self) -> None:
|
||||
data = {'emails': ','.join([f'hamlet-{i}@zulip.com' for i in range(11)])}
|
||||
data = {'emails': ','.join(f'hamlet-{i}@zulip.com' for i in range(11))}
|
||||
result = self.client_post('/accounts/find/', data)
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn("Please enter at most 10", result.content.decode('utf8'))
|
||||
|
|
|
@ -3745,8 +3745,8 @@ class GetStreamsTest(ZulipTestCase):
|
|||
self.assert_json_success(owner_subs)
|
||||
owner_subs_json = orjson.loads(owner_subs.content)
|
||||
|
||||
self.assertEqual(sorted([s["name"] for s in json["streams"]]),
|
||||
sorted([s["name"] for s in owner_subs_json["subscriptions"]]))
|
||||
self.assertEqual(sorted(s["name"] for s in json["streams"]),
|
||||
sorted(s["name"] for s in owner_subs_json["subscriptions"]))
|
||||
|
||||
# Check it correctly lists the bot owner's subs and the
|
||||
# bot's subs
|
||||
|
@ -3767,7 +3767,7 @@ class GetStreamsTest(ZulipTestCase):
|
|||
self.assertIn("streams", json)
|
||||
self.assertIsInstance(json["streams"], list)
|
||||
|
||||
actual = sorted([s["name"] for s in json["streams"]])
|
||||
actual = sorted(s["name"] for s in json["streams"])
|
||||
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
|
||||
expected.append('Scotland')
|
||||
expected.sort()
|
||||
|
@ -3787,7 +3787,7 @@ class GetStreamsTest(ZulipTestCase):
|
|||
self.assertIn("streams", json)
|
||||
self.assertIsInstance(json["streams"], list)
|
||||
|
||||
actual = sorted([s["name"] for s in json["streams"]])
|
||||
actual = sorted(s["name"] for s in json["streams"])
|
||||
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
|
||||
expected.extend(['Rome', 'Venice', 'Scotland'])
|
||||
expected.sort()
|
||||
|
@ -3806,7 +3806,7 @@ class GetStreamsTest(ZulipTestCase):
|
|||
self.assertIn("streams", json)
|
||||
self.assertIsInstance(json["streams"], list)
|
||||
|
||||
actual = sorted([s["name"] for s in json["streams"]])
|
||||
actual = sorted(s["name"] for s in json["streams"])
|
||||
expected = [s["name"] for s in owner_subs_json["subscriptions"]]
|
||||
expected.extend(['Rome', 'Venice', 'Scotland', 'private_stream'])
|
||||
expected.sort()
|
||||
|
@ -3878,8 +3878,8 @@ class GetStreamsTest(ZulipTestCase):
|
|||
self.assert_json_success(result2)
|
||||
json2 = orjson.loads(result2.content)
|
||||
|
||||
self.assertEqual(sorted([s["name"] for s in json["streams"]]),
|
||||
sorted([s["name"] for s in json2["subscriptions"]]))
|
||||
self.assertEqual(sorted(s["name"] for s in json["streams"]),
|
||||
sorted(s["name"] for s in json2["subscriptions"]))
|
||||
|
||||
# Check it correctly lists all public streams with include_subscribed=false
|
||||
filters = dict(
|
||||
|
|
|
@ -332,7 +332,7 @@ class EventQueue:
|
|||
virtual_id_map: Dict[str, Dict[str, Any]] = {}
|
||||
for event_type in self.virtual_events:
|
||||
virtual_id_map[self.virtual_events[event_type]["id"]] = self.virtual_events[event_type]
|
||||
virtual_ids = sorted(list(virtual_id_map.keys()))
|
||||
virtual_ids = sorted(virtual_id_map.keys())
|
||||
|
||||
# Merge the virtual events into their final place in the queue
|
||||
index = 0
|
||||
|
|
|
@ -382,7 +382,7 @@ def remove_subscriptions_backend(
|
|||
def you_were_just_subscribed_message(acting_user: UserProfile,
|
||||
recipient_user: UserProfile,
|
||||
stream_names: Set[str]) -> str:
|
||||
subscriptions = sorted(list(stream_names))
|
||||
subscriptions = sorted(stream_names)
|
||||
if len(subscriptions) == 1:
|
||||
with override_language(recipient_user.default_language):
|
||||
return _("{user_full_name} subscribed you to the stream {stream_name}.").format(
|
||||
|
|
|
@ -49,7 +49,7 @@ def api_alertmanager_webhook(request: HttpRequest, user_profile: UserProfile,
|
|||
if len(messages) == 1:
|
||||
body = f"{icon} **{title}** {messages[0]}"
|
||||
else:
|
||||
message_list = "\n".join([f"* {m}" for m in messages])
|
||||
message_list = "\n".join(f"* {m}" for m in messages)
|
||||
body = f"{icon} **{title}**\n{message_list}"
|
||||
|
||||
check_send_webhook_message(request, user_profile, topic, body)
|
||||
|
|
|
@ -20,7 +20,7 @@ def api_opsgenie_webhook(request: HttpRequest, user_profile: UserProfile,
|
|||
"alert_type": payload['action'],
|
||||
"alert_id": payload['alert']['alertId'],
|
||||
"integration_name": payload['integrationName'],
|
||||
"tags": ', '.join(['`' + tag + '`' for tag in payload['alert'].get('tags', [])]),
|
||||
"tags": ', '.join('`' + tag + '`' for tag in payload['alert'].get('tags', [])),
|
||||
}
|
||||
|
||||
topic = info['integration_name']
|
||||
|
|
|
@ -181,8 +181,8 @@ class QueueProcessingWorker(ABC):
|
|||
self.update_statistics(0)
|
||||
|
||||
def update_statistics(self, remaining_queue_size: int) -> None:
|
||||
total_seconds = sum([seconds for _, seconds in self.recent_consume_times])
|
||||
total_events = sum([events_number for events_number, _ in self.recent_consume_times])
|
||||
total_seconds = sum(seconds for _, seconds in self.recent_consume_times)
|
||||
total_events = sum(events_number for events_number, _ in self.recent_consume_times)
|
||||
if total_events == 0:
|
||||
recent_average_consume_time = None
|
||||
else:
|
||||
|
|
Loading…
Reference in New Issue