ruff: Fix PERF401 Use a list comprehension to create a transformed list.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-07-31 13:52:35 -07:00 committed by Tim Abbott
parent 0b95d83f09
commit 562a79ab76
48 changed files with 495 additions and 674 deletions

View File

@ -607,9 +607,8 @@ def get_installation_activity(request: HttpRequest) -> HttpResponse:
data = [ data = [
("Counts", counts_content), ("Counts", counts_content),
("Durations", duration_content), ("Durations", duration_content),
*((page["title"], page["content"]) for page in ad_hoc_queries()),
] ]
for page in ad_hoc_queries():
data.append((page["title"], page["content"]))
title = "Activity" title = "Activity"

View File

@ -376,9 +376,9 @@ def sponsorship(
return json_success(request) return json_success(request)
else: else:
messages = [] message = " ".join(
for error_list in form.errors.get_json_data().values(): error["message"]
for error in error_list: for error_list in form.errors.get_json_data().values()
messages.append(error["message"]) for error in error_list
message = " ".join(messages) )
raise BillingError("Form validation error", message=message) raise BillingError("Form validation error", message=message)

View File

@ -95,8 +95,7 @@ class FilteredManagementUtility(ManagementUtility):
for app in sorted(commands_dict): for app in sorted(commands_dict):
usage.append("") usage.append("")
usage.append(style.NOTICE(f"[{app}]")) usage.append(style.NOTICE(f"[{app}]"))
for name in sorted(commands_dict[app]): usage.extend(f" {name}" for name in sorted(commands_dict[app]))
usage.append(f" {name}")
# Output an extra note if settings are not properly configured # Output an extra note if settings are not properly configured
if self.settings_exception is not None: if self.settings_exception is not None:
usage.append( usage.append(

View File

@ -17,11 +17,7 @@ from scripts.lib.zulip_tools import ENDC, FAIL, WARNING
def find_handlebars(translatable_strings: List[str]) -> List[str]: def find_handlebars(translatable_strings: List[str]) -> List[str]:
errored = [] return [string for string in translatable_strings if "{{" in string]
for string in translatable_strings:
if "{{" in string:
errored.append(string)
return errored
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -4,7 +4,7 @@ import configparser
import os import os
import re import re
import sys import sys
from typing import Any, Dict, Optional from typing import Any, Dict, List, Optional
import requests import requests
@ -64,7 +64,7 @@ def check_issue_labels() -> None:
sys.exit(1) sys.exit(1)
next_page_url: Optional[str] = "https://api.github.com/repos/zulip/zulip/issues" next_page_url: Optional[str] = "https://api.github.com/repos/zulip/zulip/issues"
unlabeled_issue_urls = [] unlabeled_issue_urls: List[str] = []
while next_page_url: while next_page_url:
try: try:
if args.force: if args.force:
@ -83,9 +83,11 @@ def check_issue_labels() -> None:
sys.exit(1) sys.exit(1)
next_page_url = get_next_page_url(response.headers["Link"]) next_page_url = get_next_page_url(response.headers["Link"])
for item in response.json(): unlabeled_issue_urls.extend(
if is_issue(item) and not area_labeled(item): item["html_url"]
unlabeled_issue_urls.append(item["html_url"]) for item in response.json()
if is_issue(item) and not area_labeled(item)
)
if len(unlabeled_issue_urls): if len(unlabeled_issue_urls):
print("The following issues don't have any area labels associated with it") print("The following issues don't have any area labels associated with it")

View File

@ -352,8 +352,7 @@ async def serve() -> None:
else: else:
children.append(start_webpack_watcher()) children.append(start_webpack_watcher())
for cmd in server_processes(): children.extend(subprocess.Popen(cmd) for cmd in server_processes())
children.append(subprocess.Popen(cmd))
client = httpclient.AsyncHTTPClient() client = httpclient.AsyncHTTPClient()
app = Application(enable_logging=options.enable_tornado_logging) app = Application(enable_logging=options.enable_tornado_logging)

View File

@ -138,10 +138,10 @@ def deactivated_streams_by_old_name(realm: Realm, stream_name: str) -> QuerySet[
fixed_length_prefix = ".......!DEACTIVATED:" fixed_length_prefix = ".......!DEACTIVATED:"
truncated_name = stream_name[0 : Stream.MAX_NAME_LENGTH - len(fixed_length_prefix)] truncated_name = stream_name[0 : Stream.MAX_NAME_LENGTH - len(fixed_length_prefix)]
old_names: List[str] = [] old_names: List[str] = [
for bang_length in range(1, 21): ("!" * bang_length + "DEACTIVATED:" + stream_name)[: Stream.MAX_NAME_LENGTH]
name = "!" * bang_length + "DEACTIVATED:" + stream_name for bang_length in range(1, 21)
old_names.append(name[0 : Stream.MAX_NAME_LENGTH]) ]
possible_streams = Stream.objects.filter(realm=realm, deactivated=True).filter( possible_streams = Stream.objects.filter(realm=realm, deactivated=True).filter(
# We go looking for names as they are post-1b6f68bb59dc; 8 # We go looking for names as they are post-1b6f68bb59dc; 8
@ -418,31 +418,22 @@ def bulk_add_subs_to_db_with_logging(
event_time = timezone_now() event_time = timezone_now()
event_last_message_id = get_last_message_id() event_last_message_id = get_last_message_id()
all_subscription_logs: (List[RealmAuditLog]) = [] all_subscription_logs = [
for sub_info in subs_to_add:
all_subscription_logs.append(
RealmAuditLog( RealmAuditLog(
realm=realm, realm=realm,
acting_user=acting_user, acting_user=acting_user,
modified_user=sub_info.user, modified_user=sub_info.user,
modified_stream=sub_info.stream, modified_stream=sub_info.stream,
event_last_message_id=event_last_message_id, event_last_message_id=event_last_message_id,
event_type=RealmAuditLog.SUBSCRIPTION_CREATED, event_type=event_type,
event_time=event_time, event_time=event_time,
) )
) for event_type, subs in [
for sub_info in subs_to_activate: (RealmAuditLog.SUBSCRIPTION_CREATED, subs_to_add),
all_subscription_logs.append( (RealmAuditLog.SUBSCRIPTION_ACTIVATED, subs_to_activate),
RealmAuditLog( ]
realm=realm, for sub_info in subs
acting_user=acting_user, ]
modified_user=sub_info.user,
modified_stream=sub_info.stream,
event_last_message_id=event_last_message_id,
event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
event_time=event_time,
)
)
# Now since we have all log objects generated we can do a bulk insert # Now since we have all log objects generated we can do a bulk insert
RealmAuditLog.objects.bulk_create(all_subscription_logs) RealmAuditLog.objects.bulk_create(all_subscription_logs)
@ -800,24 +791,20 @@ def bulk_remove_subscriptions(
subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info} subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info}
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
for stream_id in not_subscribed_stream_ids: not_subscribed.extend(
stream = stream_dict[stream_id] (user_profile, stream_dict[stream_id]) for stream_id in not_subscribed_stream_ids
not_subscribed.append((user_profile, stream)) )
return not_subscribed return not_subscribed
not_subscribed = get_non_subscribed_subs() not_subscribed = get_non_subscribed_subs()
subs_to_deactivate: List[SubInfo] = []
sub_ids_to_deactivate: List[int] = []
# This loop just flattens out our data into big lists for # This loop just flattens out our data into big lists for
# bulk operations. # bulk operations.
for sub_infos in existing_subs_by_user.values(): subs_to_deactivate = [
for sub_info in sub_infos: sub_info for sub_infos in existing_subs_by_user.values() for sub_info in sub_infos
subs_to_deactivate.append(sub_info) ]
sub_ids_to_deactivate.append(sub_info.sub.id) sub_ids_to_deactivate = [sub_info.sub.id for sub_info in subs_to_deactivate]
streams_to_unsubscribe = [sub_info.stream for sub_info in subs_to_deactivate] streams_to_unsubscribe = [sub_info.stream for sub_info in subs_to_deactivate]
# We do all the database changes in a transaction to ensure # We do all the database changes in a transaction to ensure
# RealmAuditLog entries are atomically created when making changes. # RealmAuditLog entries are atomically created when making changes.

View File

@ -352,10 +352,8 @@ def add_subgroups_to_user_group(
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(), extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(),
) ),
] *(
for subgroup_id in subgroup_ids:
audit_log_entries.append(
RealmAuditLog( RealmAuditLog(
realm=user_group.realm, realm=user_group.realm,
modified_user_group_id=subgroup_id, modified_user_group_id=subgroup_id,
@ -364,7 +362,9 @@ def add_subgroups_to_user_group(
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(), extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(),
) )
) for subgroup_id in subgroup_ids
),
]
RealmAuditLog.objects.bulk_create(audit_log_entries) RealmAuditLog.objects.bulk_create(audit_log_entries)
do_send_subgroups_update_event("add_subgroups", user_group, subgroup_ids) do_send_subgroups_update_event("add_subgroups", user_group, subgroup_ids)
@ -386,10 +386,8 @@ def remove_subgroups_from_user_group(
event_time=now, event_time=now,
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(), extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(),
) ),
] *(
for subgroup_id in subgroup_ids:
audit_log_entries.append(
RealmAuditLog( RealmAuditLog(
realm=user_group.realm, realm=user_group.realm,
modified_user_group_id=subgroup_id, modified_user_group_id=subgroup_id,
@ -398,7 +396,9 @@ def remove_subgroups_from_user_group(
acting_user=acting_user, acting_user=acting_user,
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(), extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(),
) )
) for subgroup_id in subgroup_ids
),
]
RealmAuditLog.objects.bulk_create(audit_log_entries) RealmAuditLog.objects.bulk_create(audit_log_entries)
do_send_subgroups_update_event("remove_subgroups", user_group, subgroup_ids) do_send_subgroups_update_event("remove_subgroups", user_group, subgroup_ids)

View File

@ -100,10 +100,9 @@ def bulk_create_users(
for profile_ in profiles_to_create for profile_ in profiles_to_create
) )
recipients_to_create: List[Recipient] = [] recipients_to_create = [
for user_id in user_ids: Recipient(type_id=user_id, type=Recipient.PERSONAL) for user_id in user_ids
recipient = Recipient(type_id=user_id, type=Recipient.PERSONAL) ]
recipients_to_create.append(recipient)
Recipient.objects.bulk_create(recipients_to_create) Recipient.objects.bulk_create(recipients_to_create)
@ -115,15 +114,14 @@ def bulk_create_users(
for recipient in recipients_to_create: for recipient in recipients_to_create:
recipients_by_user_id[recipient.type_id] = recipient recipients_by_user_id[recipient.type_id] = recipient
subscriptions_to_create: List[Subscription] = [] subscriptions_to_create = [
for user_profile in profiles_to_create: Subscription(
recipient = recipients_by_user_id[user_profile.id]
subscription = Subscription(
user_profile_id=user_profile.id, user_profile_id=user_profile.id,
recipient=recipient, recipient=recipients_by_user_id[user_profile.id],
is_user_active=user_profile.is_active, is_user_active=user_profile.is_active,
) )
subscriptions_to_create.append(subscription) for user_profile in profiles_to_create
]
Subscription.objects.bulk_create(subscriptions_to_create) Subscription.objects.bulk_create(subscriptions_to_create)
@ -233,10 +231,11 @@ def bulk_create_streams(realm: Realm, stream_dict: Dict[str, Dict[str, Any]]) ->
streams_to_create.sort(key=lambda x: x.name) streams_to_create.sort(key=lambda x: x.name)
Stream.objects.bulk_create(streams_to_create) Stream.objects.bulk_create(streams_to_create)
recipients_to_create: List[Recipient] = [] recipients_to_create = [
for stream in Stream.objects.filter(realm=realm).values("id", "name"): Recipient(type_id=stream["id"], type=Recipient.STREAM)
if stream["name"].lower() not in existing_streams: for stream in Stream.objects.filter(realm=realm).values("id", "name")
recipients_to_create.append(Recipient(type_id=stream["id"], type=Recipient.STREAM)) if stream["name"].lower() not in existing_streams
]
Recipient.objects.bulk_create(recipients_to_create) Recipient.objects.bulk_create(recipients_to_create)
bulk_set_users_or_streams_recipient_fields(Stream, streams_to_create, recipients_to_create) bulk_set_users_or_streams_recipient_fields(Stream, streams_to_create, recipients_to_create)

View File

@ -523,8 +523,7 @@ def delete_user_profile_caches(user_profiles: Iterable["UserProfile"], realm: "R
keys = [] keys = []
for user_profile in user_profiles: for user_profile in user_profiles:
keys.append(user_profile_by_id_cache_key(user_profile.id)) keys.append(user_profile_by_id_cache_key(user_profile.id))
for api_key in get_all_api_keys(user_profile): keys += map(user_profile_by_api_key_cache_key, get_all_api_keys(user_profile))
keys.append(user_profile_by_api_key_cache_key(api_key))
keys.append(user_profile_cache_key(user_profile.email, realm)) keys.append(user_profile_cache_key(user_profile.email, realm))
keys.append(user_profile_delivery_email_cache_key(user_profile.delivery_email, realm)) keys.append(user_profile_delivery_email_cache_key(user_profile.delivery_email, realm))
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email): if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):

View File

@ -489,25 +489,21 @@ def fetch_initial_state_data(
# This does not yet have an apply_event counterpart, since currently, # This does not yet have an apply_event counterpart, since currently,
# new entries for EMBEDDED_BOTS can only be added directly in the codebase. # new entries for EMBEDDED_BOTS can only be added directly in the codebase.
if want("realm_embedded_bots"): if want("realm_embedded_bots"):
realm_embedded_bots = [] state["realm_embedded_bots"] = [
for bot in EMBEDDED_BOTS:
realm_embedded_bots.append(
{"name": bot.name, "config": load_bot_config_template(bot.name)} {"name": bot.name, "config": load_bot_config_template(bot.name)}
) for bot in EMBEDDED_BOTS
state["realm_embedded_bots"] = realm_embedded_bots ]
# This does not have an apply_events counterpart either since # This does not have an apply_events counterpart either since
# this data is mostly static. # this data is mostly static.
if want("realm_incoming_webhook_bots"): if want("realm_incoming_webhook_bots"):
realm_incoming_webhook_bots = [] state["realm_incoming_webhook_bots"] = [
for integration in WEBHOOK_INTEGRATIONS:
realm_incoming_webhook_bots.append(
{ {
"name": integration.name, "name": integration.name,
"config": {c[1]: c[0] for c in integration.config_options}, "config": {c[1]: c[0] for c in integration.config_options},
} }
) for integration in WEBHOOK_INTEGRATIONS
state["realm_incoming_webhook_bots"] = realm_incoming_webhook_bots ]
if want("recent_private_conversations"): if want("recent_private_conversations"):
# A data structure containing records of this form: # A data structure containing records of this form:

View File

@ -1,6 +1,6 @@
import logging import logging
import time import time
from typing import Callable, List from typing import Callable, List, TypeVar
from django.db import connection from django.db import connection
from django.db.backends.utils import CursorWrapper from django.db.backends.utils import CursorWrapper
@ -8,6 +8,8 @@ from psycopg2.sql import SQL
from zerver.models import UserProfile from zerver.models import UserProfile
T = TypeVar("T")
""" """
NOTE! Be careful modifying this library, as it is used NOTE! Be careful modifying this library, as it is used
in a migration, and it needs to be valid for the state in a migration, and it needs to be valid for the state
@ -31,18 +33,17 @@ def update_unread_flags(cursor: CursorWrapper, user_message_ids: List[int]) -> N
cursor.execute(query, {"user_message_ids": tuple(user_message_ids)}) cursor.execute(query, {"user_message_ids": tuple(user_message_ids)})
def get_timing(message: str, f: Callable[[], None]) -> None: def get_timing(message: str, f: Callable[[], T]) -> T:
start = time.time() start = time.time()
logger.info(message) logger.info(message)
f() ret = f()
elapsed = time.time() - start elapsed = time.time() - start
logger.info("elapsed time: %.03f\n", elapsed) logger.info("elapsed time: %.03f\n", elapsed)
return ret
def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None: def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
recipient_ids = [] def find_recipients() -> List[int]:
def find_recipients() -> None:
query = SQL( query = SQL(
""" """
SELECT SELECT
@ -61,11 +62,11 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
) )
cursor.execute(query, {"user_profile_id": user_profile.id}) cursor.execute(query, {"user_profile_id": user_profile.id})
rows = cursor.fetchall() rows = cursor.fetchall()
for row in rows: recipient_ids = [row[0] for row in rows]
recipient_ids.append(row[0])
logger.info("%s", recipient_ids) logger.info("%s", recipient_ids)
return recipient_ids
get_timing( recipient_ids = get_timing(
"get recipients", "get recipients",
find_recipients, find_recipients,
) )
@ -73,9 +74,7 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
if not recipient_ids: if not recipient_ids:
return return
user_message_ids = [] def find() -> List[int]:
def find() -> None:
query = SQL( query = SQL(
""" """
SELECT SELECT
@ -101,11 +100,11 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
}, },
) )
rows = cursor.fetchall() rows = cursor.fetchall()
for row in rows: user_message_ids = [row[0] for row in rows]
user_message_ids.append(row[0])
logger.info("rows found: %d", len(user_message_ids)) logger.info("rows found: %d", len(user_message_ids))
return user_message_ids
get_timing( user_message_ids = get_timing(
"finding unread messages for non-active streams", "finding unread messages for non-active streams",
find, find,
) )

View File

@ -19,23 +19,21 @@ def load_config() -> Dict[str, Any]:
def generate_topics(num_topics: int) -> List[str]: def generate_topics(num_topics: int) -> List[str]:
config = load_config()["gen_fodder"] config = load_config()["gen_fodder"]
topics = []
# Make single word topics account for 30% of total topics. # Make single word topics account for 30% of total topics.
# Single word topics are most common, thus # Single word topics are most common, thus
# it is important we test on it. # it is important we test on it.
num_single_word_topics = num_topics // 3 num_single_word_topics = num_topics // 3
for _ in itertools.repeat(None, num_single_word_topics): topics = random.choices(config["nouns"], k=num_single_word_topics)
topics.append(random.choice(config["nouns"]))
sentence = ["adjectives", "nouns", "connectors", "verbs", "adverbs"] sentence = ["adjectives", "nouns", "connectors", "verbs", "adverbs"]
for pos in sentence: for pos in sentence:
# Add an empty string so that we can generate variable length topics. # Add an empty string so that we can generate variable length topics.
config[pos].append("") config[pos].append("")
for _ in itertools.repeat(None, num_topics - num_single_word_topics): topics.extend(
generated_topic = [random.choice(config[pos]) for pos in sentence] " ".join(word for pos in sentence if (word := random.choice(config[pos])) != "")
topic = " ".join(filter(None, generated_topic)) for _ in range(num_topics - num_single_word_topics)
topics.append(topic) )
# Mark a small subset of topics as resolved in some streams, and # Mark a small subset of topics as resolved in some streams, and
# many topics in a few streams. Note that these don't have the # many topics in a few streams. Note that these don't have the
@ -46,14 +44,10 @@ def generate_topics(num_topics: int) -> List[str]:
else: else:
resolved_topic_probability = 0.05 resolved_topic_probability = 0.05
final_topics = [] return [
for topic in topics: RESOLVED_TOPIC_PREFIX + topic if random.random() < resolved_topic_probability else topic
if random.random() < resolved_topic_probability: for topic in topics
final_topics.append(RESOLVED_TOPIC_PREFIX + topic) ]
else:
final_topics.append(topic)
return final_topics
def load_generators(config: Dict[str, Any]) -> Dict[str, Any]: def load_generators(config: Dict[str, Any]) -> Dict[str, Any]:

View File

@ -278,13 +278,14 @@ def fix_customprofilefield(data: TableData) -> None:
In CustomProfileField with 'field_type' like 'USER', the IDs need to be In CustomProfileField with 'field_type' like 'USER', the IDs need to be
re-mapped. re-mapped.
""" """
field_type_USER_id_list = [] field_type_USER_ids = {
for item in data["zerver_customprofilefield"]: item["id"]
if item["field_type"] == CustomProfileField.USER: for item in data["zerver_customprofilefield"]
field_type_USER_id_list.append(item["id"]) if item["field_type"] == CustomProfileField.USER
}
for item in data["zerver_customprofilefieldvalue"]: for item in data["zerver_customprofilefieldvalue"]:
if item["field_id"] in field_type_USER_id_list: if item["field_id"] in field_type_USER_ids:
old_user_id_list = orjson.loads(item["value"]) old_user_id_list = orjson.loads(item["value"])
new_id_list = re_map_foreign_keys_many_to_many_internal( new_id_list = re_map_foreign_keys_many_to_many_internal(
@ -392,10 +393,7 @@ def current_table_ids(data: TableData, table: TableName) -> List[int]:
""" """
Returns the ids present in the current table Returns the ids present in the current table
""" """
id_list = [] return [item["id"] for item in data[table]]
for item in data[table]:
id_list.append(item["id"])
return id_list
def idseq(model_class: Any) -> str: def idseq(model_class: Any) -> str:
@ -1567,16 +1565,17 @@ def import_attachments(data: TableData) -> None:
def format_m2m_data( def format_m2m_data(
child_singular: str, child_plural: str, m2m_table_name: str, child_id: str child_singular: str, child_plural: str, m2m_table_name: str, child_id: str
) -> Tuple[str, List[Record], str]: ) -> Tuple[str, List[Record], str]:
m2m_rows: List[Record] = [] m2m_rows = [
for parent_row in data[parent_db_table_name]: {
for fk_id in parent_row[child_plural]: parent_singular: parent_row["id"],
m2m_row: Record = {}
m2m_row[parent_singular] = parent_row["id"]
# child_singular will generally match the model name (e.g. Message, ScheduledMessage) # child_singular will generally match the model name (e.g. Message, ScheduledMessage)
# after lowercasing, and that's what we enter as ID_MAP keys, so this should be # after lowercasing, and that's what we enter as ID_MAP keys, so this should be
# a reasonable assumption to make. # a reasonable assumption to make.
m2m_row[child_singular] = ID_MAP[child_singular][fk_id] child_singular: ID_MAP[child_singular][fk_id],
m2m_rows.append(m2m_row) }
for parent_row in data[parent_db_table_name]
for fk_id in parent_row[child_plural]
]
# Create our table data for insert. # Create our table data for insert.
m2m_data: TableData = {m2m_table_name: m2m_rows} m2m_data: TableData = {m2m_table_name: m2m_rows}

View File

@ -858,24 +858,21 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
to_process: List[Dict[str, Any]] = [] to_process: List[Dict[str, Any]] = []
# Build dicts for URLs # Build dicts for URLs
for url_data in urls: for url_data in urls:
short_url = url_data["url"] to_process.extend(
full_url = url_data["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
to_process.append(
{ {
"type": "url", "type": "url",
"start": match.start(), "start": match.start(),
"end": match.end(), "end": match.end(),
"url": short_url, "url": url_data["url"],
"text": full_url, "text": url_data["expanded_url"],
} }
for match in re.finditer(re.escape(url_data["url"]), text, re.IGNORECASE)
) )
# Build dicts for mentions # Build dicts for mentions
for user_mention in user_mentions: for user_mention in user_mentions:
screen_name = user_mention["screen_name"] screen_name = user_mention["screen_name"]
mention_string = "@" + screen_name mention_string = "@" + screen_name
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE): to_process.extend(
to_process.append(
{ {
"type": "mention", "type": "mention",
"start": match.start(), "start": match.start(),
@ -883,13 +880,13 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
"url": "https://twitter.com/" + urllib.parse.quote(screen_name), "url": "https://twitter.com/" + urllib.parse.quote(screen_name),
"text": mention_string, "text": mention_string,
} }
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE)
) )
# Build dicts for media # Build dicts for media
for media_item in media: for media_item in media:
short_url = media_item["url"] short_url = media_item["url"]
expanded_url = media_item["expanded_url"] expanded_url = media_item["expanded_url"]
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE): to_process.extend(
to_process.append(
{ {
"type": "media", "type": "media",
"start": match.start(), "start": match.start(),
@ -897,6 +894,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
"url": short_url, "url": short_url,
"text": expanded_url, "text": expanded_url,
} }
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE)
) )
# Build dicts for emojis # Build dicts for emojis
for match in re.finditer(UNICODE_EMOJI_RE, text, re.IGNORECASE): for match in re.finditer(UNICODE_EMOJI_RE, text, re.IGNORECASE):
@ -1938,10 +1936,13 @@ class StreamTopicPattern(CompiledInlineProcessor):
def possible_linked_stream_names(content: str) -> Set[str]: def possible_linked_stream_names(content: str) -> Set[str]:
matches = re.findall(STREAM_LINK_REGEX, content, re.VERBOSE) return {
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE): *re.findall(STREAM_LINK_REGEX, content, re.VERBOSE),
matches.append(match.group("stream_name")) *(
return set(matches) match.group("stream_name")
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE)
),
}
class AlertWordNotificationProcessor(markdown.preprocessors.Preprocessor): class AlertWordNotificationProcessor(markdown.preprocessors.Preprocessor):

View File

@ -347,22 +347,22 @@ def get_users_for_soft_deactivation(
.values("user_profile_id") .values("user_profile_id")
.annotate(last_visit=Max("last_visit")) .annotate(last_visit=Max("last_visit"))
) )
user_ids_to_deactivate = []
today = timezone_now() today = timezone_now()
for user_activity in users_activity: user_ids_to_deactivate = [
if (today - user_activity["last_visit"]).days > inactive_for_days: user_activity["user_profile_id"]
user_ids_to_deactivate.append(user_activity["user_profile_id"]) for user_activity in users_activity
if (today - user_activity["last_visit"]).days > inactive_for_days
]
users_to_deactivate = list(UserProfile.objects.filter(id__in=user_ids_to_deactivate)) users_to_deactivate = list(UserProfile.objects.filter(id__in=user_ids_to_deactivate))
return users_to_deactivate return users_to_deactivate
def do_soft_activate_users(users: List[UserProfile]) -> List[UserProfile]: def do_soft_activate_users(users: List[UserProfile]) -> List[UserProfile]:
users_soft_activated = [] return [
for user_profile in users: user_activated
user_activated = reactivate_user_if_soft_deactivated(user_profile) for user_profile in users
if user_activated: if (user_activated := reactivate_user_if_soft_deactivated(user_profile)) is not None
users_soft_activated.append(user_activated) ]
return users_soft_activated
def do_catch_up_soft_deactivated_users(users: Iterable[UserProfile]) -> List[UserProfile]: def do_catch_up_soft_deactivated_users(users: Iterable[UserProfile]) -> List[UserProfile]:

View File

@ -303,17 +303,15 @@ def get_accounts_for_email(email: str) -> List[Account]:
) )
.order_by("date_joined") .order_by("date_joined")
) )
accounts: List[Account] = [] return [
for profile in profiles:
accounts.append(
dict( dict(
realm_name=profile.realm.name, realm_name=profile.realm.name,
realm_id=profile.realm.id, realm_id=profile.realm.id,
full_name=profile.full_name, full_name=profile.full_name,
avatar=avatar_url(profile), avatar=avatar_url(profile),
) )
) for profile in profiles
return accounts ]
def get_api_key(user_profile: UserProfile) -> str: def get_api_key(user_profile: UserProfile) -> str:
@ -615,13 +613,12 @@ def is_2fa_verified(user: UserProfile) -> bool:
def get_users_with_access_to_real_email(user_profile: UserProfile) -> List[int]: def get_users_with_access_to_real_email(user_profile: UserProfile) -> List[int]:
active_users = user_profile.realm.get_active_users() active_users = user_profile.realm.get_active_users()
user_ids_with_real_email_access = [] return [
for user in active_users: user.id
for user in active_users
if can_access_delivery_email( if can_access_delivery_email(
user, user,
user_profile.id, user_profile.id,
user_profile.email_address_visibility, user_profile.email_address_visibility,
): )
user_ids_with_real_email_access.append(user.id) ]
return user_ids_with_real_email_access

View File

@ -82,9 +82,7 @@ def get_assignee_string(assignees: List[Dict[str, Any]]) -> str:
if len(assignees) == 1: if len(assignees) == 1:
assignees_string = "{username}".format(**assignees[0]) assignees_string = "{username}".format(**assignees[0])
else: else:
usernames = [] usernames = [a["username"] for a in assignees]
for a in assignees:
usernames.append(a["username"])
assignees_string = ", ".join(usernames[:-1]) + " and " + usernames[-1] assignees_string = ", ".join(usernames[:-1]) + " and " + usernames[-1]
return assignees_string return assignees_string

View File

@ -12,11 +12,11 @@ NAME_INVALID_CHARS = ["*", "`", "\\", ">", '"', "@"]
def remove_name_illegal_chars(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None: def remove_name_illegal_chars(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
UserProfile = apps.get_model("zerver", "UserProfile") UserProfile = apps.get_model("zerver", "UserProfile")
for user in UserProfile.objects.all(): for user in UserProfile.objects.all():
stripped = [] user.full_name = "".join(
for char in user.full_name: char
if (char not in NAME_INVALID_CHARS) and (category(char)[0] != "C"): for char in user.full_name
stripped.append(char) if (char not in NAME_INVALID_CHARS) and (category(char)[0] != "C")
user.full_name = "".join(stripped) )
user.save(update_fields=["full_name"]) user.save(update_fields=["full_name"])

View File

@ -13,16 +13,15 @@ def create_nobody_system_user_group_for_existing_realms(
NOBODY_GROUP_NAME = "@role:nobody" NOBODY_GROUP_NAME = "@role:nobody"
NOBODY_GROUP_DESCRIPTION = "Nobody" NOBODY_GROUP_DESCRIPTION = "Nobody"
groups_to_create = [] groups_to_create = [
for realm in Realm.objects.all():
groups_to_create.append(
UserGroup( UserGroup(
name=NOBODY_GROUP_NAME, name=NOBODY_GROUP_NAME,
description=NOBODY_GROUP_DESCRIPTION, description=NOBODY_GROUP_DESCRIPTION,
realm=realm, realm=realm,
is_system_group=True, is_system_group=True,
) )
) for realm in Realm.objects.all()
]
UserGroup.objects.bulk_create(groups_to_create) UserGroup.objects.bulk_create(groups_to_create)

View File

@ -1326,17 +1326,14 @@ def linkifiers_for_realm(realm_id: int) -> List[LinkifierDict]:
@cache_with_key(get_linkifiers_cache_key, timeout=3600 * 24 * 7) @cache_with_key(get_linkifiers_cache_key, timeout=3600 * 24 * 7)
def linkifiers_for_realm_remote_cache(realm_id: int) -> List[LinkifierDict]: def linkifiers_for_realm_remote_cache(realm_id: int) -> List[LinkifierDict]:
linkifiers = [] return [
for linkifier in RealmFilter.objects.filter(realm_id=realm_id).order_by("id"):
linkifiers.append(
LinkifierDict( LinkifierDict(
pattern=linkifier.pattern, pattern=linkifier.pattern,
url_template=linkifier.url_template, url_template=linkifier.url_template,
id=linkifier.id, id=linkifier.id,
) )
) for linkifier in RealmFilter.objects.filter(realm_id=realm_id).order_by("id")
]
return linkifiers
def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None: def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None:
@ -1419,17 +1416,15 @@ class RealmPlayground(models.Model):
def get_realm_playgrounds(realm: Realm) -> List[RealmPlaygroundDict]: def get_realm_playgrounds(realm: Realm) -> List[RealmPlaygroundDict]:
playgrounds: List[RealmPlaygroundDict] = [] return [
for playground in RealmPlayground.objects.filter(realm=realm).all():
playgrounds.append(
RealmPlaygroundDict( RealmPlaygroundDict(
id=playground.id, id=playground.id,
name=playground.name, name=playground.name,
pygments_language=playground.pygments_language, pygments_language=playground.pygments_language,
url_template=playground.url_template, url_template=playground.url_template,
) )
) for playground in RealmPlayground.objects.filter(realm=realm).all()
return playgrounds ]
class Recipient(models.Model): class Recipient(models.Model):

View File

@ -142,9 +142,9 @@ def update_flags_message_ids() -> Dict[str, object]:
stream_name = "Venice" stream_name = "Venice"
helpers.subscribe(helpers.example_user("iago"), stream_name) helpers.subscribe(helpers.example_user("iago"), stream_name)
messages = [] messages = [
for _ in range(3): helpers.send_stream_message(helpers.example_user("iago"), stream_name) for _ in range(3)
messages.append(helpers.send_stream_message(helpers.example_user("iago"), stream_name)) ]
return { return {
"messages": messages, "messages": messages,
} }

View File

@ -149,20 +149,16 @@ def render_python_code_example(
snippets = extract_code_example(function_source_lines, [], PYTHON_EXAMPLE_REGEX) snippets = extract_code_example(function_source_lines, [], PYTHON_EXAMPLE_REGEX)
code_example = ["{tab|python}\n"] return [
code_example.append("```python") "{tab|python}\n",
code_example.extend(config) "```python",
*config,
for snippet in snippets:
for line in snippet:
# Remove one level of indentation and strip newlines # Remove one level of indentation and strip newlines
code_example.append(line[4:].rstrip()) *(line[4:].rstrip() for snippet in snippets for line in snippet),
"print(result)",
code_example.append("print(result)") "\n",
code_example.append("\n") "```",
code_example.append("```") ]
return code_example
def render_javascript_code_example( def render_javascript_code_example(
@ -193,9 +189,8 @@ def render_javascript_code_example(
code_example.append(" const client = await zulipInit(config);") code_example.append(" const client = await zulipInit(config);")
for snippet in snippets: for snippet in snippets:
code_example.append("") code_example.append("")
for line in snippet:
# Strip newlines # Strip newlines
code_example.append(" " + line.rstrip()) code_example.extend(" " + line.rstrip() for line in snippet)
code_example.append("})();") code_example.append("})();")
code_example.append("```") code_example.append("```")

View File

@ -1162,9 +1162,7 @@ def update_message_flags(client: Client) -> None:
"topic": "Castle", "topic": "Castle",
"content": "I come not, friends, to steal away your hearts.", "content": "I come not, friends, to steal away your hearts.",
} }
message_ids = [] message_ids = [client.send_message(request)["id"] for i in range(3)]
for i in range(0, 3):
message_ids.append(client.send_message(request)["id"])
# {code_example|start} # {code_example|start}
# Add the "read" flag to the messages with IDs in "message_ids" # Add the "read" flag to the messages with IDs in "message_ids"

View File

@ -47,15 +47,16 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
for endpoint in endpoint_list: for endpoint in endpoint_list:
article_name = endpoint + ".md" article_name = endpoint + ".md"
file_name = os.path.join(settings.DEPLOY_ROOT, "api_docs/", article_name) file_name = os.path.join(settings.DEPLOY_ROOT, "api_docs/", article_name)
curl_commands_to_test = []
if os.path.exists(file_name): if os.path.exists(file_name):
with open(file_name) as f: with open(file_name) as f:
for line in f: curl_commands_to_test = [
# A typical example from the Markdown source looks like this: # A typical example from the Markdown source looks like this:
# {generate_code_example(curl)|...|...} # {generate_code_example(curl)|...|...}
if line.startswith("{generate_code_example(curl"): line
curl_commands_to_test.append(line) for line in f
if line.startswith("{generate_code_example(curl")
]
else: else:
# If the file doesn't exist, then it has been # If the file doesn't exist, then it has been
# deleted and its page is generated by the # deleted and its page is generated by the
@ -64,7 +65,7 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
endpoint_path, endpoint_method = get_endpoint_from_operationid(endpoint) endpoint_path, endpoint_method = get_endpoint_from_operationid(endpoint)
endpoint_string = endpoint_path + ":" + endpoint_method endpoint_string = endpoint_path + ":" + endpoint_method
command = f"{{generate_code_example(curl)|{endpoint_string}|example}}" command = f"{{generate_code_example(curl)|{endpoint_string}|example}}"
curl_commands_to_test.append(command) curl_commands_to_test = [command]
for line in curl_commands_to_test: for line in curl_commands_to_test:
# To do an end-to-end test on the documentation examples # To do an end-to-end test on the documentation examples

View File

@ -1587,10 +1587,9 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase, ABC):
realm.save() realm.save()
stream_names = ["new_stream_1", "new_stream_2"] stream_names = ["new_stream_1", "new_stream_2"]
streams = [] streams = [
for stream_name in stream_names: ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
stream = ensure_stream(realm, stream_name, acting_user=None) ]
streams.append(stream)
referrer = self.example_user("hamlet") referrer = self.example_user("hamlet")
multiuse_obj = MultiuseInvite.objects.create(realm=realm, referred_by=referrer) multiuse_obj = MultiuseInvite.objects.create(realm=realm, referred_by=referrer)
@ -4598,10 +4597,9 @@ class GoogleAuthBackendTest(SocialAuthBase):
realm.save() realm.save()
stream_names = ["new_stream_1", "new_stream_2"] stream_names = ["new_stream_1", "new_stream_2"]
streams = [] streams = [
for stream_name in stream_names: ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
stream = ensure_stream(realm, stream_name, acting_user=None) ]
streams.append(stream)
# Without the invite link, we can't create an account due to invite_required # Without the invite link, we can't create an account due to invite_required
result = self.get_log_into_subdomain(data) result = self.get_log_into_subdomain(data)
@ -5032,9 +5030,8 @@ class ExternalMethodDictsTests(ZulipTestCase):
expected_button_id_strings = [ expected_button_id_strings = [
'id="{}_auth_button_github"', 'id="{}_auth_button_github"',
'id="{}_auth_button_google"', 'id="{}_auth_button_google"',
*(f'id="{{}}_auth_button_saml:{name}"' for name in saml_idp_names),
] ]
for name in saml_idp_names:
expected_button_id_strings.append(f'id="{{}}_auth_button_saml:{name}"')
result = self.client_get("/login/") result = self.client_get("/login/")
self.assert_in_success_response( self.assert_in_success_response(
@ -5094,9 +5091,11 @@ class FetchAuthBackends(ZulipTestCase):
) -> None: ) -> None:
authentication_methods_list = [ authentication_methods_list = [
("password", check_bool), ("password", check_bool),
*(
(backend_name_with_case.lower(), check_bool)
for backend_name_with_case in AUTH_BACKEND_NAME_MAP
),
] ]
for backend_name_with_case in AUTH_BACKEND_NAME_MAP:
authentication_methods_list.append((backend_name_with_case.lower(), check_bool))
external_auth_methods = get_external_method_dicts() external_auth_methods = get_external_method_dicts()
response_dict = self.assert_json_success(result) response_dict = self.assert_json_success(result)

View File

@ -848,9 +848,10 @@ class NormalActionsTest(BaseAction):
def test_invite_user_event(self) -> None: def test_invite_user_event(self) -> None:
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Scotland"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
events = self.verify_action( events = self.verify_action(
@ -866,9 +867,10 @@ class NormalActionsTest(BaseAction):
def test_create_multiuse_invite_event(self) -> None: def test_create_multiuse_invite_event(self) -> None:
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Verona"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Verona"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
events = self.verify_action( events = self.verify_action(
@ -902,9 +904,10 @@ class NormalActionsTest(BaseAction):
# We need set self.user_profile to be an admin, so that # We need set self.user_profile to be an admin, so that
# we receive the invites_changed event. # we receive the invites_changed event.
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Verona"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Verona"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_invite_users( do_invite_users(
@ -924,9 +927,10 @@ class NormalActionsTest(BaseAction):
def test_revoke_multiuse_invite_event(self) -> None: def test_revoke_multiuse_invite_event(self) -> None:
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Verona"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Verona"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_create_multiuse_invite_link( do_create_multiuse_invite_link(
@ -947,9 +951,10 @@ class NormalActionsTest(BaseAction):
reset_email_visibility_to_everyone_in_zulip_realm() reset_email_visibility_to_everyone_in_zulip_realm()
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Scotland"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_invite_users( do_invite_users(
@ -1462,9 +1467,10 @@ class NormalActionsTest(BaseAction):
check_user_group_remove("events[0]", events[0]) check_user_group_remove("events[0]", events[0])
def test_default_stream_groups_events(self) -> None: def test_default_stream_groups_events(self) -> None:
streams = [] streams = [
for stream_name in ["Scotland", "Rome", "Denmark"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Scotland", "Rome", "Denmark"]
]
events = self.verify_action( events = self.verify_action(
lambda: do_create_default_stream_group( lambda: do_create_default_stream_group(
@ -1509,9 +1515,10 @@ class NormalActionsTest(BaseAction):
check_default_stream_groups("events[0]", events[0]) check_default_stream_groups("events[0]", events[0])
def test_default_stream_group_events_guest(self) -> None: def test_default_stream_group_events_guest(self) -> None:
streams = [] streams = [
for stream_name in ["Scotland", "Rome", "Denmark"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Scotland", "Rome", "Denmark"]
]
do_create_default_stream_group(self.user_profile.realm, "group1", "This is group1", streams) do_create_default_stream_group(self.user_profile.realm, "group1", "This is group1", streams)
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0] group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]

View File

@ -1202,7 +1202,6 @@ class HomeTest(ZulipTestCase):
# performance cost of fetching /. # performance cost of fetching /.
@override_settings(MAX_DRAFTS_IN_REGISTER_RESPONSE=5) @override_settings(MAX_DRAFTS_IN_REGISTER_RESPONSE=5)
def test_limit_drafts(self) -> None: def test_limit_drafts(self) -> None:
draft_objects = []
hamlet = self.example_user("hamlet") hamlet = self.example_user("hamlet")
base_time = timezone_now() base_time = timezone_now()
initial_count = Draft.objects.count() initial_count = Draft.objects.count()
@ -1210,8 +1209,7 @@ class HomeTest(ZulipTestCase):
step_value = timedelta(seconds=1) step_value = timedelta(seconds=1)
# Create 11 drafts. # Create 11 drafts.
# TODO: This would be better done as an API request. # TODO: This would be better done as an API request.
for i in range(0, settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1): draft_objects = [
draft_objects.append(
Draft( Draft(
user_profile=hamlet, user_profile=hamlet,
recipient=None, recipient=None,
@ -1219,7 +1217,8 @@ class HomeTest(ZulipTestCase):
content="sample draft", content="sample draft",
last_edit_time=base_time + i * step_value, last_edit_time=base_time + i * step_value,
) )
) for i in range(settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1)
]
Draft.objects.bulk_create(draft_objects) Draft.objects.bulk_create(draft_objects)
# Now fetch the drafts part of the initial state and make sure # Now fetch the drafts part of the initial state and make sure

View File

@ -178,9 +178,7 @@ class InviteUserBase(ZulipTestCase):
streams should be a list of strings. streams should be a list of strings.
""" """
stream_ids = [] stream_ids = [self.get_stream_id(stream_name, realm=realm) for stream_name in stream_names]
for stream_name in stream_names:
stream_ids.append(self.get_stream_id(stream_name, realm=realm))
invite_expires_in: Union[str, Optional[int]] = invite_expires_in_minutes invite_expires_in: Union[str, Optional[int]] = invite_expires_in_minutes
if invite_expires_in is None: if invite_expires_in is None:
@ -1475,9 +1473,10 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
def test_send_more_than_one_invite_to_same_user(self) -> None: def test_send_more_than_one_invite_to_same_user(self) -> None:
self.user_profile = self.example_user("iago") self.user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, self.user_profile.realm)
streams.append(get_stream(stream_name, self.user_profile.realm)) for stream_name in ["Denmark", "Scotland"]
]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_invite_users( do_invite_users(
@ -1705,9 +1704,9 @@ class InvitationsTestCase(InviteUserBase):
hamlet = self.example_user("hamlet") hamlet = self.example_user("hamlet")
othello = self.example_user("othello") othello = self.example_user("othello")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
streams.append(get_stream(stream_name, user_profile.realm)) ]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_invite_users( do_invite_users(
@ -1761,9 +1760,9 @@ class InvitationsTestCase(InviteUserBase):
hamlet = self.example_user("hamlet") hamlet = self.example_user("hamlet")
othello = self.example_user("othello") othello = self.example_user("othello")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
streams.append(get_stream(stream_name, user_profile.realm)) ]
invite_expires_in_minutes = 2 * 24 * 60 invite_expires_in_minutes = 2 * 24 * 60
do_invite_users( do_invite_users(
@ -1815,9 +1814,9 @@ class InvitationsTestCase(InviteUserBase):
self.login("iago") self.login("iago")
user_profile = self.example_user("iago") user_profile = self.example_user("iago")
streams = [] streams = [
for stream_name in ["Denmark", "Scotland"]: get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
streams.append(get_stream(stream_name, user_profile.realm)) ]
with patch( with patch(
"confirmation.models.timezone_now", "confirmation.models.timezone_now",

View File

@ -672,16 +672,17 @@ class MarkdownTest(ZulipTestCase):
@override_settings(INLINE_IMAGE_PREVIEW=True) @override_settings(INLINE_IMAGE_PREVIEW=True)
def test_max_inline_preview(self) -> None: def test_max_inline_preview(self) -> None:
image_links = [] image_links = [
# Add a youtube link within a spoiler to ensure other link types are counted # Add a youtube link within a spoiler to ensure other link types are counted
image_links.append( """```spoiler Check out this PyCon video\nhttps://www.youtube.com/watch?v=0c46YHS3RY8\n```""",
"""```spoiler Check out this PyCon video\nhttps://www.youtube.com/watch?v=0c46YHS3RY8\n```"""
)
# Add a link within blockquote to test that it does NOT get counted # Add a link within blockquote to test that it does NOT get counted
image_links.append("> http://cdn.wallpapersafari.com/spoiler/dont_count.jpeg\n") "> http://cdn.wallpapersafari.com/spoiler/dont_count.jpeg\n",
# Using INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1 because of the one link in a spoiler added already # Using INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1 because of the one link in a spoiler added already
for x in range(InlineInterestingLinkProcessor.INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1): *(
image_links.append(f"http://cdn.wallpapersafari.com/{x}/6/16eVjx.jpeg") f"http://cdn.wallpapersafari.com/{x}/6/16eVjx.jpeg"
for x in range(InlineInterestingLinkProcessor.INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1)
),
]
within_limit_content = "\n".join(image_links) within_limit_content = "\n".join(image_links)
above_limit_content = ( above_limit_content = (
within_limit_content + "\nhttp://cdn.wallpapersafari.com/above/0/6/16eVjx.jpeg" within_limit_content + "\nhttp://cdn.wallpapersafari.com/above/0/6/16eVjx.jpeg"

View File

@ -2075,9 +2075,7 @@ class GetOldMessagesTest(ZulipTestCase):
self.login_user(me) self.login_user(me)
self.subscribe(self.example_user("hamlet"), "Scotland") self.subscribe(self.example_user("hamlet"), "Scotland")
message_ids = [] message_ids = [self.send_personal_message(me, self.example_user("iago")) for i in range(5)]
for i in range(5):
message_ids.append(self.send_personal_message(me, self.example_user("iago")))
narrow = [dict(operator="dm", operand=self.example_user("iago").email)] narrow = [dict(operator="dm", operand=self.example_user("iago").email)]
self.message_visibility_test(narrow, message_ids, 2) self.message_visibility_test(narrow, message_ids, 2)
@ -2093,64 +2091,46 @@ class GetOldMessagesTest(ZulipTestCase):
cordelia = self.example_user("cordelia") cordelia = self.example_user("cordelia")
othello = self.example_user("othello") othello = self.example_user("othello")
matching_message_ids = [] matching_message_ids = [
# group direct message, sent by current user # group direct message, sent by current user
matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, cordelia, othello], [iago, cordelia, othello],
), ),
)
# group direct message, sent by searched user # group direct message, sent by searched user
matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
cordelia, cordelia,
[me, othello], [me, othello],
), ),
)
# group direct message, sent by another user # group direct message, sent by another user
matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
othello, othello,
[me, cordelia], [me, cordelia],
), ),
)
# direct 1:1 message, sent by current user to searched user # direct 1:1 message, sent by current user to searched user
matching_message_ids.append(
self.send_personal_message(me, cordelia), self.send_personal_message(me, cordelia),
)
# direct 1:1 message, sent by searched user to current user # direct 1:1 message, sent by searched user to current user
matching_message_ids.append(
self.send_personal_message(cordelia, me), self.send_personal_message(cordelia, me),
) ]
non_matching_message_ids = [] non_matching_message_ids = [
# direct 1:1 message, does not include current user # direct 1:1 message, does not include current user
non_matching_message_ids.append(
self.send_personal_message(iago, cordelia), self.send_personal_message(iago, cordelia),
)
# direct 1:1 message, does not include searched user # direct 1:1 message, does not include searched user
non_matching_message_ids.append(
self.send_personal_message(iago, me), self.send_personal_message(iago, me),
)
# direct 1:1 message, current user to self # direct 1:1 message, current user to self
non_matching_message_ids.append(
self.send_personal_message(me, me), self.send_personal_message(me, me),
)
# group direct message, sent by current user # group direct message, sent by current user
non_matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, othello], [iago, othello],
), ),
)
# group direct message, sent by searched user # group direct message, sent by searched user
non_matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
cordelia, cordelia,
[iago, othello], [iago, othello],
), ),
) ]
self.login_user(me) self.login_user(me)
test_operands = [cordelia.email, cordelia.id] test_operands = [cordelia.email, cordelia.id]
@ -2169,27 +2149,22 @@ class GetOldMessagesTest(ZulipTestCase):
cordelia = self.example_user("cordelia") cordelia = self.example_user("cordelia")
othello = self.example_user("othello") othello = self.example_user("othello")
message_ids = [] message_ids = [
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, cordelia, othello], [iago, cordelia, othello],
), ),
) self.send_personal_message(me, cordelia),
message_ids.append(self.send_personal_message(me, cordelia))
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
cordelia, cordelia,
[me, othello], [me, othello],
), ),
) self.send_personal_message(cordelia, me),
message_ids.append(self.send_personal_message(cordelia, me))
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
iago, iago,
[cordelia, me], [cordelia, me],
), ),
) ]
narrow = [dict(operator="dm-including", operand=cordelia.email)] narrow = [dict(operator="dm-including", operand=cordelia.email)]
self.message_visibility_test(narrow, message_ids, 2) self.message_visibility_test(narrow, message_ids, 2)
@ -2205,41 +2180,28 @@ class GetOldMessagesTest(ZulipTestCase):
cordelia = self.example_user("cordelia") cordelia = self.example_user("cordelia")
othello = self.example_user("othello") othello = self.example_user("othello")
matching_message_ids = [] matching_message_ids = [
matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, cordelia, othello], [iago, cordelia, othello],
), ),
)
matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[cordelia, othello], [cordelia, othello],
), ),
) ]
non_matching_message_ids = [] non_matching_message_ids = [
non_matching_message_ids.append(
self.send_personal_message(me, cordelia), self.send_personal_message(me, cordelia),
)
non_matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, othello], [iago, othello],
), ),
)
non_matching_message_ids.append(
self.send_huddle_message( self.send_huddle_message(
self.example_user("cordelia"), self.example_user("cordelia"),
[iago, othello], [iago, othello],
), ),
) ]
self.login_user(me) self.login_user(me)
test_operands = [cordelia.email, cordelia.id] test_operands = [cordelia.email, cordelia.id]
@ -2258,25 +2220,20 @@ class GetOldMessagesTest(ZulipTestCase):
cordelia = self.example_user("cordelia") cordelia = self.example_user("cordelia")
othello = self.example_user("othello") othello = self.example_user("othello")
message_ids = [] message_ids = [
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[iago, cordelia, othello], [iago, cordelia, othello],
), ),
)
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[cordelia, othello], [cordelia, othello],
), ),
)
message_ids.append(
self.send_huddle_message( self.send_huddle_message(
me, me,
[cordelia, iago], [cordelia, iago],
), ),
) ]
narrow = [dict(operator="group-pm-with", operand=cordelia.email)] narrow = [dict(operator="group-pm-with", operand=cordelia.email)]
self.message_visibility_test(narrow, message_ids, 1) self.message_visibility_test(narrow, message_ids, 1)
@ -2352,9 +2309,9 @@ class GetOldMessagesTest(ZulipTestCase):
self.login("hamlet") self.login("hamlet")
self.subscribe(self.example_user("hamlet"), "Scotland") self.subscribe(self.example_user("hamlet"), "Scotland")
message_ids = [] message_ids = [
for i in range(5): self.send_stream_message(self.example_user("iago"), "Scotland") for i in range(5)
message_ids.append(self.send_stream_message(self.example_user("iago"), "Scotland")) ]
narrow = [dict(operator="stream", operand="Scotland")] narrow = [dict(operator="stream", operand="Scotland")]
self.message_visibility_test(narrow, message_ids, 2) self.message_visibility_test(narrow, message_ids, 2)
@ -2695,13 +2652,12 @@ class GetOldMessagesTest(ZulipTestCase):
), ),
] ]
message_ids = [] message_ids = [
for topic, content in messages_to_search:
message_ids.append(
self.send_stream_message( self.send_stream_message(
self.example_user("iago"), "Scotland", topic_name=topic, content=content self.example_user("iago"), "Scotland", topic_name=topic, content=content
) )
) for topic, content in messages_to_search
]
self._update_tsvector_index() self._update_tsvector_index()
narrow = [dict(operator="search", operand="Hogwart's")] narrow = [dict(operator="search", operand="Hogwart's")]
self.message_visibility_test(narrow, message_ids, 2) self.message_visibility_test(narrow, message_ids, 2)
@ -3045,9 +3001,9 @@ class GetOldMessagesTest(ZulipTestCase):
Message.objects.all().delete() Message.objects.all().delete()
message_ids = [] message_ids = [
for i in range(10): self.send_stream_message(self.example_user("cordelia"), "Verona") for i in range(10)
message_ids.append(self.send_stream_message(self.example_user("cordelia"), "Verona")) ]
data = self.get_messages_response(anchor=message_ids[9], num_before=9, num_after=0) data = self.get_messages_response(anchor=message_ids[9], num_before=9, num_after=0)
@ -4261,26 +4217,20 @@ class MessageHasKeywordsTest(ZulipTestCase):
assert_attachment_claimed(dummy_path_ids[1], False) assert_attachment_claimed(dummy_path_ids[1], False)
def test_finds_all_links(self) -> None: def test_finds_all_links(self) -> None:
msg_ids = []
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"] msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
for msg_content in msg_contents: msg_ids = [
msg_ids.append( self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
self.send_stream_message( for msg_content in msg_contents
self.example_user("hamlet"), "Denmark", content=msg_content ]
)
)
msgs = [Message.objects.get(id=id) for id in msg_ids] msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertTrue(all(msg.has_link for msg in msgs)) self.assertTrue(all(msg.has_link for msg in msgs))
def test_finds_only_links(self) -> None: def test_finds_only_links(self) -> None:
msg_ids = []
msg_contents = ["`example.org`", "``example.org```", "$$https://example.org$$", "foo"] msg_contents = ["`example.org`", "``example.org```", "$$https://example.org$$", "foo"]
for msg_content in msg_contents: msg_ids = [
msg_ids.append( self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
self.send_stream_message( for msg_content in msg_contents
self.example_user("hamlet"), "Denmark", content=msg_content ]
)
)
msgs = [Message.objects.get(id=id) for id in msg_ids] msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertFalse(all(msg.has_link for msg in msgs)) self.assertFalse(all(msg.has_link for msg in msgs))
@ -4321,19 +4271,16 @@ class MessageHasKeywordsTest(ZulipTestCase):
self.assertFalse(msg.has_link) self.assertFalse(msg.has_link)
def test_has_image(self) -> None: def test_has_image(self) -> None:
msg_ids = []
msg_contents = [ msg_contents = [
"Link: foo.org", "Link: foo.org",
"Image: https://www.google.com/images/srpr/logo4w.png", "Image: https://www.google.com/images/srpr/logo4w.png",
"Image: https://www.google.com/images/srpr/logo4w.pdf", "Image: https://www.google.com/images/srpr/logo4w.pdf",
"[Google link](https://www.google.com/images/srpr/logo4w.png)", "[Google link](https://www.google.com/images/srpr/logo4w.png)",
] ]
for msg_content in msg_contents: msg_ids = [
msg_ids.append( self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
self.send_stream_message( for msg_content in msg_contents
self.example_user("hamlet"), "Denmark", content=msg_content ]
)
)
msgs = [Message.objects.get(id=id) for id in msg_ids] msgs = [Message.objects.get(id=id) for id in msg_ids]
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs]) self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])

View File

@ -1352,18 +1352,14 @@ class StreamMessagesTest(ZulipTestCase):
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT
] ]
old_subscriber_messages = [] old_subscriber_messages = list(map(message_stream_count, subscribers))
for subscriber in subscribers:
old_subscriber_messages.append(message_stream_count(subscriber))
non_subscribers = [ non_subscribers = [
user_profile user_profile
for user_profile in UserProfile.objects.all() for user_profile in UserProfile.objects.all()
if user_profile not in subscribers if user_profile not in subscribers
] ]
old_non_subscriber_messages = [] old_non_subscriber_messages = list(map(message_stream_count, non_subscribers))
for non_subscriber in non_subscribers:
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
non_bot_subscribers = [ non_bot_subscribers = [
user_profile for user_profile in subscribers if not user_profile.is_bot user_profile for user_profile in subscribers if not user_profile.is_bot
@ -1373,14 +1369,10 @@ class StreamMessagesTest(ZulipTestCase):
self.send_stream_message(a_subscriber, stream_name, content=content, topic_name=topic_name) self.send_stream_message(a_subscriber, stream_name, content=content, topic_name=topic_name)
# Did all of the subscribers get the message? # Did all of the subscribers get the message?
new_subscriber_messages = [] new_subscriber_messages = list(map(message_stream_count, subscribers))
for subscriber in subscribers:
new_subscriber_messages.append(message_stream_count(subscriber))
# Did non-subscribers not get the message? # Did non-subscribers not get the message?
new_non_subscriber_messages = [] new_non_subscriber_messages = list(map(message_stream_count, non_subscribers))
for non_subscriber in non_subscribers:
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages) self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages]) self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
@ -2009,17 +2001,12 @@ class PersonalMessageSendTest(ZulipTestCase):
test_email = self.nonreg_email("test1") test_email = self.nonreg_email("test1")
self.register(test_email, "test1") self.register(test_email, "test1")
old_messages = [] old_messages = list(map(message_stream_count, old_user_profiles))
for user_profile in old_user_profiles:
old_messages.append(message_stream_count(user_profile))
user_profile = self.nonreg_user("test1") user_profile = self.nonreg_user("test1")
self.send_personal_message(user_profile, user_profile) self.send_personal_message(user_profile, user_profile)
new_messages = [] new_messages = list(map(message_stream_count, old_user_profiles))
for user_profile in old_user_profiles:
new_messages.append(message_stream_count(user_profile))
self.assertEqual(old_messages, new_messages) self.assertEqual(old_messages, new_messages)
user_profile = self.nonreg_user("test1") user_profile = self.nonreg_user("test1")
@ -2037,17 +2024,12 @@ class PersonalMessageSendTest(ZulipTestCase):
receiver_messages = message_stream_count(receiver) receiver_messages = message_stream_count(receiver)
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) & ~Q(id=receiver.id)) other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) & ~Q(id=receiver.id))
old_other_messages = [] old_other_messages = list(map(message_stream_count, other_user_profiles))
for user_profile in other_user_profiles:
old_other_messages.append(message_stream_count(user_profile))
self.send_personal_message(sender, receiver, content) self.send_personal_message(sender, receiver, content)
# Users outside the conversation don't get the message. # Users outside the conversation don't get the message.
new_other_messages = [] new_other_messages = list(map(message_stream_count, other_user_profiles))
for user_profile in other_user_profiles:
new_other_messages.append(message_stream_count(user_profile))
self.assertEqual(old_other_messages, new_other_messages) self.assertEqual(old_other_messages, new_other_messages)
# The personal message is in the streams of both the sender and receiver. # The personal message is in the streams of both the sender and receiver.

View File

@ -796,9 +796,7 @@ class WorkerTest(ZulipTestCase):
base_classes = [QueueProcessingWorker] base_classes = [QueueProcessingWorker]
all_classes = [] all_classes = []
while base_classes: while base_classes:
new_subclasses = [] new_subclasses = (base_class.__subclasses__() for base_class in base_classes)
for base_class in base_classes:
new_subclasses.append(base_class.__subclasses__())
base_classes = list(itertools.chain(*new_subclasses)) base_classes = list(itertools.chain(*new_subclasses))
all_classes += base_classes all_classes += base_classes
worker_queue_names = { worker_queue_names = {

View File

@ -278,15 +278,14 @@ class RealmExportTest(ZulipTestCase):
current_log = RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_EXPORTED) current_log = RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_EXPORTED)
self.assert_length(current_log, 0) self.assert_length(current_log, 0)
exports = [] exports = [
for i in range(0, 5):
exports.append(
RealmAuditLog( RealmAuditLog(
realm=admin.realm, realm=admin.realm,
event_type=RealmAuditLog.REALM_EXPORTED, event_type=RealmAuditLog.REALM_EXPORTED,
event_time=timezone_now(), event_time=timezone_now(),
) )
) for i in range(5)
]
RealmAuditLog.objects.bulk_create(exports) RealmAuditLog.objects.bulk_create(exports)
with self.assertRaises(JsonableError) as error: with self.assertRaises(JsonableError) as error:

View File

@ -62,12 +62,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
do_soft_deactivate_users(users) do_soft_deactivate_users(users)
log_output = [] log_output = [
for user in users: *(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}") f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
log_output.append( ]
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
)
self.assertEqual(m.output, log_output) self.assertEqual(m.output, log_output)
@ -118,13 +116,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
do_soft_deactivate_users(users) do_soft_deactivate_users(users)
log_output = [] log_output = [
for user in users: *(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}") f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
log_output.append( ]
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
)
self.assertEqual(m.output, log_output) self.assertEqual(m.output, log_output)
for user in users: for user in users:
@ -133,10 +128,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
do_soft_activate_users(users) do_soft_activate_users(users)
log_output = [] log_output = [f"INFO:{logger_string}:Soft reactivated user {user.id}" for user in users]
for user in users:
log_output.append(f"INFO:{logger_string}:Soft reactivated user {user.id}")
self.assertEqual(m.output, log_output) self.assertEqual(m.output, log_output)
for user in users: for user in users:
@ -181,13 +173,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
do_soft_deactivate_users(users) do_soft_deactivate_users(users)
log_output = [] log_output = [
for user in users: *(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}") f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
log_output.append( ]
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
)
self.assertEqual(m.output, log_output) self.assertEqual(m.output, log_output)
for user in users: for user in users:
@ -243,13 +232,11 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
users_deactivated = do_auto_soft_deactivate_users(-1, realm) users_deactivated = do_auto_soft_deactivate_users(-1, realm)
log_output = [] log_output = [
for user in users: *(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}") f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
log_output.append( f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users",
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process" ]
)
log_output.append(f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users")
self.assertEqual(set(m.output), set(log_output)) self.assertEqual(set(m.output), set(log_output))
self.assert_length(users_deactivated, len(users)) self.assert_length(users_deactivated, len(users))
@ -267,8 +254,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
with self.assertLogs(logger_string, level="INFO") as m: with self.assertLogs(logger_string, level="INFO") as m:
users_deactivated = do_auto_soft_deactivate_users(-1, realm) users_deactivated = do_auto_soft_deactivate_users(-1, realm)
log_output = [] log_output = [f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users"]
log_output.append(f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users")
self.assertEqual(set(m.output), set(log_output)) self.assertEqual(set(m.output), set(log_output))
self.assert_length(users_deactivated, 0) # all users are already deactivated self.assert_length(users_deactivated, 0) # all users are already deactivated

View File

@ -1289,15 +1289,11 @@ class StreamAdminTest(ZulipTestCase):
def test_deactivate_stream_removes_stream_from_default_stream_groups(self) -> None: def test_deactivate_stream_removes_stream_from_default_stream_groups(self) -> None:
realm = get_realm("zulip") realm = get_realm("zulip")
streams_to_keep = [] streams_to_keep = [
for stream_name in ["stream1", "stream2"]: ensure_stream(realm, stream_name, acting_user=None)
stream = ensure_stream(realm, stream_name, acting_user=None) for stream_name in ["stream1", "stream2"]
streams_to_keep.append(stream) ]
streams_to_remove = [ensure_stream(realm, "stream3", acting_user=None)]
streams_to_remove = []
stream = ensure_stream(realm, "stream3", acting_user=None)
streams_to_remove.append(stream)
all_streams = streams_to_keep + streams_to_remove all_streams = streams_to_keep + streams_to_remove
def get_streams(group: DefaultStreamGroup) -> List[Stream]: def get_streams(group: DefaultStreamGroup) -> List[Stream]:
@ -2471,12 +2467,7 @@ class StreamAdminTest(ZulipTestCase):
self.make_stream(stream_name, invite_only=invite_only) self.make_stream(stream_name, invite_only=invite_only)
# Set up the principal to be unsubscribed. # Set up the principal to be unsubscribed.
principals: List[Union[str, int]] = [] principals = [user.email if using_legacy_emails else user.id for user in target_users]
for user in target_users:
if using_legacy_emails:
principals.append(user.email)
else:
principals.append(user.id)
# Subscribe the admin and/or principal as specified in the flags. # Subscribe the admin and/or principal as specified in the flags.
if is_subbed: if is_subbed:
@ -2893,10 +2884,10 @@ class DefaultStreamGroupTest(ZulipTestCase):
default_stream_groups = get_default_stream_groups(realm) default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0) self.assert_length(default_stream_groups, 0)
streams = [] streams = [
for stream_name in ["stream1", "stream2", "stream3"]: ensure_stream(realm, stream_name, acting_user=None)
stream = ensure_stream(realm, stream_name, acting_user=None) for stream_name in ["stream1", "stream2", "stream3"]
streams.append(stream) ]
def get_streams(group: DefaultStreamGroup) -> List[Stream]: def get_streams(group: DefaultStreamGroup) -> List[Stream]:
return list(group.streams.all().order_by("name")) return list(group.streams.all().order_by("name"))
@ -2920,11 +2911,11 @@ class DefaultStreamGroupTest(ZulipTestCase):
"stream8", "stream8",
"stream9", "stream9",
] ]
new_streams = [] new_streams = [
for new_stream_name in new_stream_names: ensure_stream(realm, new_stream_name, acting_user=None)
new_stream = ensure_stream(realm, new_stream_name, acting_user=None) for new_stream_name in new_stream_names
new_streams.append(new_stream) ]
streams.append(new_stream) streams += new_streams
do_add_streams_to_default_stream_group(realm, group, new_streams) do_add_streams_to_default_stream_group(realm, group, new_streams)
default_stream_groups = get_default_stream_groups(realm) default_stream_groups = get_default_stream_groups(realm)
@ -2980,13 +2971,12 @@ class DefaultStreamGroupTest(ZulipTestCase):
stream_names = ["stream1", "stream2", "stream3"] stream_names = ["stream1", "stream2", "stream3"]
group_name = "group1" group_name = "group1"
description = "This is group1" description = "This is group1"
streams = []
default_stream_groups = get_default_stream_groups(realm) default_stream_groups = get_default_stream_groups(realm)
self.assert_length(default_stream_groups, 0) self.assert_length(default_stream_groups, 0)
for stream_name in stream_names: streams = [
stream = ensure_stream(realm, stream_name, acting_user=None) ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
streams.append(stream) ]
result = self.client_post( result = self.client_post(
"/json/default_stream_groups/create", "/json/default_stream_groups/create",
@ -3017,11 +3007,11 @@ class DefaultStreamGroupTest(ZulipTestCase):
# Test adding streams to existing default stream group # Test adding streams to existing default stream group
group_id = default_stream_groups[0].id group_id = default_stream_groups[0].id
new_stream_names = ["stream4", "stream5"] new_stream_names = ["stream4", "stream5"]
new_streams = [] new_streams = [
for new_stream_name in new_stream_names: ensure_stream(realm, new_stream_name, acting_user=None)
new_stream = ensure_stream(realm, new_stream_name, acting_user=None) for new_stream_name in new_stream_names
new_streams.append(new_stream) ]
streams.append(new_stream) streams += new_streams
result = self.client_patch( result = self.client_patch(
f"/json/default_stream_groups/{group_id}/streams", f"/json/default_stream_groups/{group_id}/streams",
@ -3169,11 +3159,8 @@ class DefaultStreamGroupTest(ZulipTestCase):
stream_names = ["stream1", "stream2", "stream3"] stream_names = ["stream1", "stream2", "stream3"]
description = "This is group1" description = "This is group1"
streams = []
for stream_name in stream_names: for stream_name in stream_names:
stream = ensure_stream(realm, stream_name, acting_user=None) ensure_stream(realm, stream_name, acting_user=None)
streams.append(stream)
result = self.client_post( result = self.client_post(
"/json/default_stream_groups/create", "/json/default_stream_groups/create",
@ -3885,15 +3872,14 @@ class SubscriptionAPITest(ZulipTestCase):
""" """
Helper function to make up random stream names. It takes Helper function to make up random stream names. It takes
existing_stream_names and randomly appends a digit to the end of each, existing_stream_names and randomly appends a digit to the end of each,
but avoids names that appear in the list names_to_avoid. but avoids names of streams already in the realm.
""" """
random_streams = []
all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.test_realm)] all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.test_realm)]
for stream in existing_stream_names: return [
random_stream = stream + str(random.randint(0, 9)) random_stream
if random_stream not in all_stream_names: for stream in existing_stream_names
random_streams.append(random_stream) if (random_stream := stream + str(random.randint(0, 9))) not in all_stream_names
return random_streams ]
def test_invalid_stream_name(self) -> None: def test_invalid_stream_name(self) -> None:
""" """
@ -5159,10 +5145,11 @@ class SubscriptionAPITest(ZulipTestCase):
""" """
self.assertGreaterEqual(len(self.streams), 2) self.assertGreaterEqual(len(self.streams), 2)
streams_to_remove = self.streams[1:] streams_to_remove = self.streams[1:]
not_subbed = [] not_subbed = [
for stream in Stream.objects.filter(realm=get_realm("zulip")): stream.name
if stream.name not in self.streams: for stream in Stream.objects.filter(realm=get_realm("zulip"))
not_subbed.append(stream.name) if stream.name not in self.streams
]
random.shuffle(not_subbed) random.shuffle(not_subbed)
self.assertNotEqual(len(not_subbed), 0) # necessary for full test coverage self.assertNotEqual(len(not_subbed), 0) # necessary for full test coverage
try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to

View File

@ -497,9 +497,9 @@ def remove_subscriptions_backend(
) -> HttpResponse: ) -> HttpResponse:
realm = user_profile.realm realm = user_profile.realm
streams_as_dict: List[StreamDict] = [] streams_as_dict: List[StreamDict] = [
for stream_name in streams_raw: {"name": stream_name.strip()} for stream_name in streams_raw
streams_as_dict.append({"name": stream_name.strip()}) ]
unsubscribing_others = False unsubscribing_others = False
if principals: if principals:

View File

@ -71,10 +71,7 @@ def get_code_push_commits_body(payload: WildValue) -> str:
payload["resource"]["refUpdates"][0]["oldObjectId"].tame(check_string), payload["resource"]["refUpdates"][0]["oldObjectId"].tame(check_string),
payload["resource"]["refUpdates"][0]["newObjectId"].tame(check_string), payload["resource"]["refUpdates"][0]["newObjectId"].tame(check_string),
) )
commits_data = [] commits_data = [
if payload["resource"].get("commits"):
for commit in payload["resource"]["commits"]:
commits_data.append(
{ {
"name": commit["author"]["name"].tame(check_string), "name": commit["author"]["name"].tame(check_string),
"sha": commit["commitId"].tame(check_string), "sha": commit["commitId"].tame(check_string),
@ -83,7 +80,8 @@ def get_code_push_commits_body(payload: WildValue) -> str:
), ),
"message": commit["comment"].tame(check_string), "message": commit["comment"].tame(check_string),
} }
) for commit in payload["resource"].get("commits", [])
]
return get_push_commits_event_message( return get_push_commits_event_message(
get_code_push_user_name(payload), get_code_push_user_name(payload),
compare_url, compare_url,

View File

@ -36,17 +36,15 @@ def build_message_from_gitlog(
def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]: def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]:
new_commits_list = [] return [
for commit in commits:
new_commits_list.append(
{ {
"name": commit["author"]["name"].tame(check_string), "name": commit["author"]["name"].tame(check_string),
"sha": commit["id"].tame(check_string), "sha": commit["id"].tame(check_string),
"url": commit["url"].tame(check_string), "url": commit["url"].tame(check_string),
"message": commit["message"].tame(check_string), "message": commit["message"].tame(check_string),
} }
) for commit in commits
return new_commits_list ]
@authenticated_rest_api_view( @authenticated_rest_api_view(

View File

@ -129,9 +129,10 @@ def get_topic_for_http_request(payload: WildValue) -> str:
def get_body_for_maintenance_planned_event(payload: WildValue) -> str: def get_body_for_maintenance_planned_event(payload: WildValue) -> str:
services_data = [] services_data = [
for service in payload["affected_services"].tame(check_string).split(","): {"service_name": service}
services_data.append({"service_name": service}) for service in payload["affected_services"].tame(check_string).split(",")
]
data = { data = {
"title": payload["title"].tame(check_string), "title": payload["title"].tame(check_string),
"description": payload["description"].tame(check_string), "description": payload["description"].tame(check_string),
@ -147,9 +148,10 @@ def get_body_for_maintenance_planned_event(payload: WildValue) -> str:
def get_body_for_incident_open_event(payload: WildValue) -> str: def get_body_for_incident_open_event(payload: WildValue) -> str:
services_data = [] services_data = [
for service in payload["affected_services"].tame(check_string).split(","): {"service_name": service}
services_data.append({"service_name": service}) for service in payload["affected_services"].tame(check_string).split(",")
]
data = { data = {
"title": payload["title"].tame(check_string), "title": payload["title"].tame(check_string),
"description": payload["description"].tame(check_string), "description": payload["description"].tame(check_string),

View File

@ -566,9 +566,6 @@ def get_pull_request_review_comment_body(helper: Helper) -> str:
def get_pull_request_review_requested_body(helper: Helper) -> str: def get_pull_request_review_requested_body(helper: Helper) -> str:
payload = helper.payload payload = helper.payload
include_title = helper.include_title include_title = helper.include_title
requested_reviewer = [payload["requested_reviewer"]] if "requested_reviewer" in payload else []
requested_team = [payload["requested_team"]] if "requested_team" in payload else []
sender = get_sender_name(payload) sender = get_sender_name(payload)
pr_number = payload["pull_request"]["number"].tame(check_int) pr_number = payload["pull_request"]["number"].tame(check_int)
@ -579,26 +576,18 @@ def get_pull_request_review_requested_body(helper: Helper) -> str:
) )
body = message_with_title if include_title else message body = message_with_title if include_title else message
all_reviewers = [] if "requested_reviewer" in payload:
reviewer = payload["requested_reviewer"]
for reviewer in requested_reviewer: reviewers = "[{login}]({html_url})".format(
all_reviewers.append(
"[{login}]({html_url})".format(
login=reviewer["login"].tame(check_string), login=reviewer["login"].tame(check_string),
html_url=reviewer["html_url"].tame(check_string), html_url=reviewer["html_url"].tame(check_string),
) )
) else:
team_reviewer = payload["requested_team"]
for team_reviewer in requested_team: reviewers = "[{name}]({html_url})".format(
all_reviewers.append(
"[{name}]({html_url})".format(
name=team_reviewer["name"].tame(check_string), name=team_reviewer["name"].tame(check_string),
html_url=team_reviewer["html_url"].tame(check_string), html_url=team_reviewer["html_url"].tame(check_string),
) )
)
reviewers = ""
reviewers = all_reviewers[0]
return body.format( return body.format(
sender=sender, sender=sender,

View File

@ -46,9 +46,7 @@ def format_push_event(payload: WildValue) -> str:
def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]: def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]:
new_commits_list = [] return [
for commit in commits:
new_commits_list.append(
{ {
"name": commit["author"]["username"].tame(check_string) "name": commit["author"]["username"].tame(check_string)
or commit["author"]["name"].tame(check_string).split()[0], or commit["author"]["name"].tame(check_string).split()[0],
@ -56,8 +54,8 @@ def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[st
"url": commit["url"].tame(check_string), "url": commit["url"].tame(check_string),
"message": commit["message"].tame(check_string), "message": commit["message"].tame(check_string),
} }
) for commit in commits
return new_commits_list ]
def format_new_branch_event(payload: WildValue) -> str: def format_new_branch_event(payload: WildValue) -> str:

View File

@ -100,10 +100,7 @@ def get_body_for_tracks_retagged_event(payload: WildValue) -> str:
def get_body_for_tracks_imported_upgrade_event(payload: WildValue) -> str: def get_body_for_tracks_imported_upgrade_event(payload: WildValue) -> str:
tracks_data = [] tracks_data = [{"title": track["title"].tame(check_string)} for track in payload["tracks"]]
for track in payload["tracks"]:
tracks_data.append({"title": track["title"].tame(check_string)})
data = { data = {
"artist_name": payload["artist"]["name"].tame(check_string), "artist_name": payload["artist"]["name"].tame(check_string),
"tracks_final_data": get_tracks_content(tracks_data), "tracks_final_data": get_tracks_content(tracks_data),
@ -113,10 +110,7 @@ def get_body_for_tracks_imported_upgrade_event(payload: WildValue) -> str:
def get_body_for_tracks_imported_event(payload: WildValue) -> str: def get_body_for_tracks_imported_event(payload: WildValue) -> str:
tracks_data = [] tracks_data = [{"title": track["title"].tame(check_string)} for track in payload["tracks"]]
for track in payload["tracks"]:
tracks_data.append({"title": track["title"].tame(check_string)})
data = { data = {
"artist_name": payload["artist"]["name"].tame(check_string), "artist_name": payload["artist"]["name"].tame(check_string),
"tracks_final_data": get_tracks_content(tracks_data), "tracks_final_data": get_tracks_content(tracks_data),

View File

@ -14,16 +14,15 @@ from zerver.models import UserProfile
def get_push_commits_body(payload: WildValue) -> str: def get_push_commits_body(payload: WildValue) -> str:
commits_data = [] commits_data = [
for commit in payload["event"]["push"]["commits"]:
commits_data.append(
{ {
"name": commit["author"].tame(check_string), "name": commit["author"].tame(check_string),
"sha": commit["raw_id"].tame(check_string), "sha": commit["raw_id"].tame(check_string),
"url": commit["url"].tame(check_string), "url": commit["url"].tame(check_string),
"message": commit["message"].tame(check_string), "message": commit["message"].tame(check_string),
} }
) for commit in payload["event"]["push"]["commits"]
]
return get_push_commits_event_message( return get_push_commits_event_message(
get_user_name(payload), get_user_name(payload),
None, None,

View File

@ -1,7 +1,7 @@
# Webhooks for external integrations. # Webhooks for external integrations.
import re import re
from itertools import zip_longest from itertools import zip_longest
from typing import Literal, Optional, TypedDict, cast from typing import List, Literal, Optional, TypedDict, cast
from django.http import HttpRequest, HttpResponse from django.http import HttpRequest, HttpResponse
from django.utils.translation import gettext as _ from django.utils.translation import gettext as _
@ -58,14 +58,12 @@ def api_slack_incoming_webhook(
if user_specified_topic is None: if user_specified_topic is None:
user_specified_topic = "(no topic)" user_specified_topic = "(no topic)"
pieces = [] pieces: List[str] = []
if "blocks" in payload and payload["blocks"]: if "blocks" in payload and payload["blocks"]:
for block in payload["blocks"]: pieces += map(render_block, payload["blocks"])
pieces.append(render_block(block))
if "attachments" in payload and payload["attachments"]: if "attachments" in payload and payload["attachments"]:
for attachment in payload["attachments"]: pieces += map(render_attachment, payload["attachments"])
pieces.append(render_attachment(attachment))
body = "\n\n".join(piece.strip() for piece in pieces if piece.strip() != "") body = "\n\n".join(piece.strip() for piece in pieces if piece.strip() != "")
@ -218,8 +216,7 @@ def render_attachment(attachment: WildValue) -> str:
fields.append(f"{value}") fields.append(f"{value}")
pieces.append("\n".join(fields)) pieces.append("\n".join(fields))
if "blocks" in attachment and attachment["blocks"]: if "blocks" in attachment and attachment["blocks"]:
for block in attachment["blocks"]: pieces += map(render_block, attachment["blocks"])
pieces.append(render_block(block))
if "image_url" in attachment and attachment["image_url"]: if "image_url" in attachment and attachment["image_url"]:
pieces.append("[]({})".format(attachment["image_url"].tame(check_url))) pieces.append("[]({})".format(attachment["image_url"].tame(check_url)))
if "footer" in attachment and attachment["footer"]: if "footer" in attachment and attachment["footer"]:

View File

@ -30,10 +30,7 @@ def api_taiga_webhook(
message: WildValue = REQ(argument_type="body", converter=to_wild_value), message: WildValue = REQ(argument_type="body", converter=to_wild_value),
) -> HttpResponse: ) -> HttpResponse:
parsed_events = parse_message(message) parsed_events = parse_message(message)
content_lines = [] content = "".join(sorted(generate_content(event) + "\n" for event in parsed_events))
for event in parsed_events:
content_lines.append(generate_content(event) + "\n")
content = "".join(sorted(content_lines))
topic = "General" topic = "General"
if message["data"].get("milestone") and "name" in message["data"]["milestone"]: if message["data"].get("milestone") and "name" in message["data"]["milestone"]:
topic = message["data"]["milestone"]["name"].tame(check_string) topic = message["data"]["milestone"]["name"].tame(check_string)

View File

@ -183,13 +183,9 @@ def create_alert_words(realm_id: int) -> None:
recs: List[AlertWord] = [] recs: List[AlertWord] = []
for user_id in user_ids: for user_id in user_ids:
random.shuffle(alert_words) random.shuffle(alert_words)
for i in range(4): recs.extend(
recs.append( AlertWord(realm_id=realm_id, user_profile_id=user_id, word=word)
AlertWord( for word in alert_words[:4]
realm_id=realm_id,
user_profile_id=user_id,
word=alert_words[i],
)
) )
AlertWord.objects.bulk_create(recs) AlertWord.objects.bulk_create(recs)
@ -545,9 +541,11 @@ class Command(BaseCommand):
# are needed for the test suite. # are needed for the test suite.
zulip_realm_bots = [ zulip_realm_bots = [
("Zulip Default Bot", "default-bot@zulip.com"), ("Zulip Default Bot", "default-bot@zulip.com"),
*(
(f"Extra Bot {i}", f"extrabot{i}@zulip.com")
for i in range(options["extra_bots"])
),
] ]
for i in range(options["extra_bots"]):
zulip_realm_bots.append((f"Extra Bot {i}", f"extrabot{i}@zulip.com"))
create_users( create_users(
zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT, bot_owner=desdemona zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT, bot_owner=desdemona
@ -1159,11 +1157,7 @@ def send_messages(messages: List[Message]) -> None:
# up with queued events that reference objects from a previous # up with queued events that reference objects from a previous
# life of the database, which naturally throws exceptions. # life of the database, which naturally throws exceptions.
settings.USING_RABBITMQ = False settings.USING_RABBITMQ = False
message_dict_list = [] do_send_messages([build_message_send_dict(message=message) for message in messages])
for message in messages:
message_dict = build_message_send_dict(message=message)
message_dict_list.append(message_dict)
do_send_messages(message_dict_list)
bulk_create_reactions(messages) bulk_create_reactions(messages)
settings.USING_RABBITMQ = True settings.USING_RABBITMQ = True

View File

@ -684,8 +684,7 @@ urls += [
# Incoming webhook URLs # Incoming webhook URLs
# We don't create URLs for particular Git integrations here # We don't create URLs for particular Git integrations here
# because of generic one below # because of generic one below
for incoming_webhook in WEBHOOK_INTEGRATIONS: urls.extend(incoming_webhook.url_object for incoming_webhook in WEBHOOK_INTEGRATIONS)
urls.append(incoming_webhook.url_object)
# Desktop-specific authentication URLs # Desktop-specific authentication URLs
urls += [ urls += [