mirror of https://github.com/zulip/zulip.git
ruff: Fix PERF401 Use a list comprehension to create a transformed list.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
0b95d83f09
commit
562a79ab76
|
@ -607,9 +607,8 @@ def get_installation_activity(request: HttpRequest) -> HttpResponse:
|
|||
data = [
|
||||
("Counts", counts_content),
|
||||
("Durations", duration_content),
|
||||
*((page["title"], page["content"]) for page in ad_hoc_queries()),
|
||||
]
|
||||
for page in ad_hoc_queries():
|
||||
data.append((page["title"], page["content"]))
|
||||
|
||||
title = "Activity"
|
||||
|
||||
|
|
|
@ -376,9 +376,9 @@ def sponsorship(
|
|||
|
||||
return json_success(request)
|
||||
else:
|
||||
messages = []
|
||||
for error_list in form.errors.get_json_data().values():
|
||||
for error in error_list:
|
||||
messages.append(error["message"])
|
||||
message = " ".join(messages)
|
||||
message = " ".join(
|
||||
error["message"]
|
||||
for error_list in form.errors.get_json_data().values()
|
||||
for error in error_list
|
||||
)
|
||||
raise BillingError("Form validation error", message=message)
|
||||
|
|
|
@ -95,8 +95,7 @@ class FilteredManagementUtility(ManagementUtility):
|
|||
for app in sorted(commands_dict):
|
||||
usage.append("")
|
||||
usage.append(style.NOTICE(f"[{app}]"))
|
||||
for name in sorted(commands_dict[app]):
|
||||
usage.append(f" {name}")
|
||||
usage.extend(f" {name}" for name in sorted(commands_dict[app]))
|
||||
# Output an extra note if settings are not properly configured
|
||||
if self.settings_exception is not None:
|
||||
usage.append(
|
||||
|
|
|
@ -17,11 +17,7 @@ from scripts.lib.zulip_tools import ENDC, FAIL, WARNING
|
|||
|
||||
|
||||
def find_handlebars(translatable_strings: List[str]) -> List[str]:
|
||||
errored = []
|
||||
for string in translatable_strings:
|
||||
if "{{" in string:
|
||||
errored.append(string)
|
||||
return errored
|
||||
return [string for string in translatable_strings if "{{" in string]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -4,7 +4,7 @@ import configparser
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -64,7 +64,7 @@ def check_issue_labels() -> None:
|
|||
sys.exit(1)
|
||||
|
||||
next_page_url: Optional[str] = "https://api.github.com/repos/zulip/zulip/issues"
|
||||
unlabeled_issue_urls = []
|
||||
unlabeled_issue_urls: List[str] = []
|
||||
while next_page_url:
|
||||
try:
|
||||
if args.force:
|
||||
|
@ -83,9 +83,11 @@ def check_issue_labels() -> None:
|
|||
sys.exit(1)
|
||||
|
||||
next_page_url = get_next_page_url(response.headers["Link"])
|
||||
for item in response.json():
|
||||
if is_issue(item) and not area_labeled(item):
|
||||
unlabeled_issue_urls.append(item["html_url"])
|
||||
unlabeled_issue_urls.extend(
|
||||
item["html_url"]
|
||||
for item in response.json()
|
||||
if is_issue(item) and not area_labeled(item)
|
||||
)
|
||||
|
||||
if len(unlabeled_issue_urls):
|
||||
print("The following issues don't have any area labels associated with it")
|
||||
|
|
|
@ -352,8 +352,7 @@ async def serve() -> None:
|
|||
else:
|
||||
children.append(start_webpack_watcher())
|
||||
|
||||
for cmd in server_processes():
|
||||
children.append(subprocess.Popen(cmd))
|
||||
children.extend(subprocess.Popen(cmd) for cmd in server_processes())
|
||||
|
||||
client = httpclient.AsyncHTTPClient()
|
||||
app = Application(enable_logging=options.enable_tornado_logging)
|
||||
|
|
|
@ -138,10 +138,10 @@ def deactivated_streams_by_old_name(realm: Realm, stream_name: str) -> QuerySet[
|
|||
fixed_length_prefix = ".......!DEACTIVATED:"
|
||||
truncated_name = stream_name[0 : Stream.MAX_NAME_LENGTH - len(fixed_length_prefix)]
|
||||
|
||||
old_names: List[str] = []
|
||||
for bang_length in range(1, 21):
|
||||
name = "!" * bang_length + "DEACTIVATED:" + stream_name
|
||||
old_names.append(name[0 : Stream.MAX_NAME_LENGTH])
|
||||
old_names: List[str] = [
|
||||
("!" * bang_length + "DEACTIVATED:" + stream_name)[: Stream.MAX_NAME_LENGTH]
|
||||
for bang_length in range(1, 21)
|
||||
]
|
||||
|
||||
possible_streams = Stream.objects.filter(realm=realm, deactivated=True).filter(
|
||||
# We go looking for names as they are post-1b6f68bb59dc; 8
|
||||
|
@ -418,31 +418,22 @@ def bulk_add_subs_to_db_with_logging(
|
|||
event_time = timezone_now()
|
||||
event_last_message_id = get_last_message_id()
|
||||
|
||||
all_subscription_logs: (List[RealmAuditLog]) = []
|
||||
for sub_info in subs_to_add:
|
||||
all_subscription_logs.append(
|
||||
all_subscription_logs = [
|
||||
RealmAuditLog(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
modified_user=sub_info.user,
|
||||
modified_stream=sub_info.stream,
|
||||
event_last_message_id=event_last_message_id,
|
||||
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
|
||||
event_type=event_type,
|
||||
event_time=event_time,
|
||||
)
|
||||
)
|
||||
for sub_info in subs_to_activate:
|
||||
all_subscription_logs.append(
|
||||
RealmAuditLog(
|
||||
realm=realm,
|
||||
acting_user=acting_user,
|
||||
modified_user=sub_info.user,
|
||||
modified_stream=sub_info.stream,
|
||||
event_last_message_id=event_last_message_id,
|
||||
event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
|
||||
event_time=event_time,
|
||||
)
|
||||
)
|
||||
for event_type, subs in [
|
||||
(RealmAuditLog.SUBSCRIPTION_CREATED, subs_to_add),
|
||||
(RealmAuditLog.SUBSCRIPTION_ACTIVATED, subs_to_activate),
|
||||
]
|
||||
for sub_info in subs
|
||||
]
|
||||
# Now since we have all log objects generated we can do a bulk insert
|
||||
RealmAuditLog.objects.bulk_create(all_subscription_logs)
|
||||
|
||||
|
@ -800,24 +791,20 @@ def bulk_remove_subscriptions(
|
|||
subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info}
|
||||
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
|
||||
|
||||
for stream_id in not_subscribed_stream_ids:
|
||||
stream = stream_dict[stream_id]
|
||||
not_subscribed.append((user_profile, stream))
|
||||
not_subscribed.extend(
|
||||
(user_profile, stream_dict[stream_id]) for stream_id in not_subscribed_stream_ids
|
||||
)
|
||||
|
||||
return not_subscribed
|
||||
|
||||
not_subscribed = get_non_subscribed_subs()
|
||||
|
||||
subs_to_deactivate: List[SubInfo] = []
|
||||
sub_ids_to_deactivate: List[int] = []
|
||||
|
||||
# This loop just flattens out our data into big lists for
|
||||
# bulk operations.
|
||||
for sub_infos in existing_subs_by_user.values():
|
||||
for sub_info in sub_infos:
|
||||
subs_to_deactivate.append(sub_info)
|
||||
sub_ids_to_deactivate.append(sub_info.sub.id)
|
||||
|
||||
subs_to_deactivate = [
|
||||
sub_info for sub_infos in existing_subs_by_user.values() for sub_info in sub_infos
|
||||
]
|
||||
sub_ids_to_deactivate = [sub_info.sub.id for sub_info in subs_to_deactivate]
|
||||
streams_to_unsubscribe = [sub_info.stream for sub_info in subs_to_deactivate]
|
||||
# We do all the database changes in a transaction to ensure
|
||||
# RealmAuditLog entries are atomically created when making changes.
|
||||
|
|
|
@ -352,10 +352,8 @@ def add_subgroups_to_user_group(
|
|||
event_time=now,
|
||||
acting_user=acting_user,
|
||||
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(),
|
||||
)
|
||||
]
|
||||
for subgroup_id in subgroup_ids:
|
||||
audit_log_entries.append(
|
||||
),
|
||||
*(
|
||||
RealmAuditLog(
|
||||
realm=user_group.realm,
|
||||
modified_user_group_id=subgroup_id,
|
||||
|
@ -364,7 +362,9 @@ def add_subgroups_to_user_group(
|
|||
acting_user=acting_user,
|
||||
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(),
|
||||
)
|
||||
)
|
||||
for subgroup_id in subgroup_ids
|
||||
),
|
||||
]
|
||||
RealmAuditLog.objects.bulk_create(audit_log_entries)
|
||||
|
||||
do_send_subgroups_update_event("add_subgroups", user_group, subgroup_ids)
|
||||
|
@ -386,10 +386,8 @@ def remove_subgroups_from_user_group(
|
|||
event_time=now,
|
||||
acting_user=acting_user,
|
||||
extra_data=orjson.dumps({"subgroup_ids": subgroup_ids}).decode(),
|
||||
)
|
||||
]
|
||||
for subgroup_id in subgroup_ids:
|
||||
audit_log_entries.append(
|
||||
),
|
||||
*(
|
||||
RealmAuditLog(
|
||||
realm=user_group.realm,
|
||||
modified_user_group_id=subgroup_id,
|
||||
|
@ -398,7 +396,9 @@ def remove_subgroups_from_user_group(
|
|||
acting_user=acting_user,
|
||||
extra_data=orjson.dumps({"supergroup_ids": [user_group.id]}).decode(),
|
||||
)
|
||||
)
|
||||
for subgroup_id in subgroup_ids
|
||||
),
|
||||
]
|
||||
RealmAuditLog.objects.bulk_create(audit_log_entries)
|
||||
|
||||
do_send_subgroups_update_event("remove_subgroups", user_group, subgroup_ids)
|
||||
|
|
|
@ -100,10 +100,9 @@ def bulk_create_users(
|
|||
for profile_ in profiles_to_create
|
||||
)
|
||||
|
||||
recipients_to_create: List[Recipient] = []
|
||||
for user_id in user_ids:
|
||||
recipient = Recipient(type_id=user_id, type=Recipient.PERSONAL)
|
||||
recipients_to_create.append(recipient)
|
||||
recipients_to_create = [
|
||||
Recipient(type_id=user_id, type=Recipient.PERSONAL) for user_id in user_ids
|
||||
]
|
||||
|
||||
Recipient.objects.bulk_create(recipients_to_create)
|
||||
|
||||
|
@ -115,15 +114,14 @@ def bulk_create_users(
|
|||
for recipient in recipients_to_create:
|
||||
recipients_by_user_id[recipient.type_id] = recipient
|
||||
|
||||
subscriptions_to_create: List[Subscription] = []
|
||||
for user_profile in profiles_to_create:
|
||||
recipient = recipients_by_user_id[user_profile.id]
|
||||
subscription = Subscription(
|
||||
subscriptions_to_create = [
|
||||
Subscription(
|
||||
user_profile_id=user_profile.id,
|
||||
recipient=recipient,
|
||||
recipient=recipients_by_user_id[user_profile.id],
|
||||
is_user_active=user_profile.is_active,
|
||||
)
|
||||
subscriptions_to_create.append(subscription)
|
||||
for user_profile in profiles_to_create
|
||||
]
|
||||
|
||||
Subscription.objects.bulk_create(subscriptions_to_create)
|
||||
|
||||
|
@ -233,10 +231,11 @@ def bulk_create_streams(realm: Realm, stream_dict: Dict[str, Dict[str, Any]]) ->
|
|||
streams_to_create.sort(key=lambda x: x.name)
|
||||
Stream.objects.bulk_create(streams_to_create)
|
||||
|
||||
recipients_to_create: List[Recipient] = []
|
||||
for stream in Stream.objects.filter(realm=realm).values("id", "name"):
|
||||
if stream["name"].lower() not in existing_streams:
|
||||
recipients_to_create.append(Recipient(type_id=stream["id"], type=Recipient.STREAM))
|
||||
recipients_to_create = [
|
||||
Recipient(type_id=stream["id"], type=Recipient.STREAM)
|
||||
for stream in Stream.objects.filter(realm=realm).values("id", "name")
|
||||
if stream["name"].lower() not in existing_streams
|
||||
]
|
||||
Recipient.objects.bulk_create(recipients_to_create)
|
||||
|
||||
bulk_set_users_or_streams_recipient_fields(Stream, streams_to_create, recipients_to_create)
|
||||
|
|
|
@ -523,8 +523,7 @@ def delete_user_profile_caches(user_profiles: Iterable["UserProfile"], realm: "R
|
|||
keys = []
|
||||
for user_profile in user_profiles:
|
||||
keys.append(user_profile_by_id_cache_key(user_profile.id))
|
||||
for api_key in get_all_api_keys(user_profile):
|
||||
keys.append(user_profile_by_api_key_cache_key(api_key))
|
||||
keys += map(user_profile_by_api_key_cache_key, get_all_api_keys(user_profile))
|
||||
keys.append(user_profile_cache_key(user_profile.email, realm))
|
||||
keys.append(user_profile_delivery_email_cache_key(user_profile.delivery_email, realm))
|
||||
if user_profile.is_bot and is_cross_realm_bot_email(user_profile.email):
|
||||
|
|
|
@ -489,25 +489,21 @@ def fetch_initial_state_data(
|
|||
# This does not yet have an apply_event counterpart, since currently,
|
||||
# new entries for EMBEDDED_BOTS can only be added directly in the codebase.
|
||||
if want("realm_embedded_bots"):
|
||||
realm_embedded_bots = []
|
||||
for bot in EMBEDDED_BOTS:
|
||||
realm_embedded_bots.append(
|
||||
state["realm_embedded_bots"] = [
|
||||
{"name": bot.name, "config": load_bot_config_template(bot.name)}
|
||||
)
|
||||
state["realm_embedded_bots"] = realm_embedded_bots
|
||||
for bot in EMBEDDED_BOTS
|
||||
]
|
||||
|
||||
# This does not have an apply_events counterpart either since
|
||||
# this data is mostly static.
|
||||
if want("realm_incoming_webhook_bots"):
|
||||
realm_incoming_webhook_bots = []
|
||||
for integration in WEBHOOK_INTEGRATIONS:
|
||||
realm_incoming_webhook_bots.append(
|
||||
state["realm_incoming_webhook_bots"] = [
|
||||
{
|
||||
"name": integration.name,
|
||||
"config": {c[1]: c[0] for c in integration.config_options},
|
||||
}
|
||||
)
|
||||
state["realm_incoming_webhook_bots"] = realm_incoming_webhook_bots
|
||||
for integration in WEBHOOK_INTEGRATIONS
|
||||
]
|
||||
|
||||
if want("recent_private_conversations"):
|
||||
# A data structure containing records of this form:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
import time
|
||||
from typing import Callable, List
|
||||
from typing import Callable, List, TypeVar
|
||||
|
||||
from django.db import connection
|
||||
from django.db.backends.utils import CursorWrapper
|
||||
|
@ -8,6 +8,8 @@ from psycopg2.sql import SQL
|
|||
|
||||
from zerver.models import UserProfile
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
"""
|
||||
NOTE! Be careful modifying this library, as it is used
|
||||
in a migration, and it needs to be valid for the state
|
||||
|
@ -31,18 +33,17 @@ def update_unread_flags(cursor: CursorWrapper, user_message_ids: List[int]) -> N
|
|||
cursor.execute(query, {"user_message_ids": tuple(user_message_ids)})
|
||||
|
||||
|
||||
def get_timing(message: str, f: Callable[[], None]) -> None:
|
||||
def get_timing(message: str, f: Callable[[], T]) -> T:
|
||||
start = time.time()
|
||||
logger.info(message)
|
||||
f()
|
||||
ret = f()
|
||||
elapsed = time.time() - start
|
||||
logger.info("elapsed time: %.03f\n", elapsed)
|
||||
return ret
|
||||
|
||||
|
||||
def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
|
||||
recipient_ids = []
|
||||
|
||||
def find_recipients() -> None:
|
||||
def find_recipients() -> List[int]:
|
||||
query = SQL(
|
||||
"""
|
||||
SELECT
|
||||
|
@ -61,11 +62,11 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
|
|||
)
|
||||
cursor.execute(query, {"user_profile_id": user_profile.id})
|
||||
rows = cursor.fetchall()
|
||||
for row in rows:
|
||||
recipient_ids.append(row[0])
|
||||
recipient_ids = [row[0] for row in rows]
|
||||
logger.info("%s", recipient_ids)
|
||||
return recipient_ids
|
||||
|
||||
get_timing(
|
||||
recipient_ids = get_timing(
|
||||
"get recipients",
|
||||
find_recipients,
|
||||
)
|
||||
|
@ -73,9 +74,7 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
|
|||
if not recipient_ids:
|
||||
return
|
||||
|
||||
user_message_ids = []
|
||||
|
||||
def find() -> None:
|
||||
def find() -> List[int]:
|
||||
query = SQL(
|
||||
"""
|
||||
SELECT
|
||||
|
@ -101,11 +100,11 @@ def fix_unsubscribed(cursor: CursorWrapper, user_profile: UserProfile) -> None:
|
|||
},
|
||||
)
|
||||
rows = cursor.fetchall()
|
||||
for row in rows:
|
||||
user_message_ids.append(row[0])
|
||||
user_message_ids = [row[0] for row in rows]
|
||||
logger.info("rows found: %d", len(user_message_ids))
|
||||
return user_message_ids
|
||||
|
||||
get_timing(
|
||||
user_message_ids = get_timing(
|
||||
"finding unread messages for non-active streams",
|
||||
find,
|
||||
)
|
||||
|
|
|
@ -19,23 +19,21 @@ def load_config() -> Dict[str, Any]:
|
|||
def generate_topics(num_topics: int) -> List[str]:
|
||||
config = load_config()["gen_fodder"]
|
||||
|
||||
topics = []
|
||||
# Make single word topics account for 30% of total topics.
|
||||
# Single word topics are most common, thus
|
||||
# it is important we test on it.
|
||||
num_single_word_topics = num_topics // 3
|
||||
for _ in itertools.repeat(None, num_single_word_topics):
|
||||
topics.append(random.choice(config["nouns"]))
|
||||
topics = random.choices(config["nouns"], k=num_single_word_topics)
|
||||
|
||||
sentence = ["adjectives", "nouns", "connectors", "verbs", "adverbs"]
|
||||
for pos in sentence:
|
||||
# Add an empty string so that we can generate variable length topics.
|
||||
config[pos].append("")
|
||||
|
||||
for _ in itertools.repeat(None, num_topics - num_single_word_topics):
|
||||
generated_topic = [random.choice(config[pos]) for pos in sentence]
|
||||
topic = " ".join(filter(None, generated_topic))
|
||||
topics.append(topic)
|
||||
topics.extend(
|
||||
" ".join(word for pos in sentence if (word := random.choice(config[pos])) != "")
|
||||
for _ in range(num_topics - num_single_word_topics)
|
||||
)
|
||||
|
||||
# Mark a small subset of topics as resolved in some streams, and
|
||||
# many topics in a few streams. Note that these don't have the
|
||||
|
@ -46,14 +44,10 @@ def generate_topics(num_topics: int) -> List[str]:
|
|||
else:
|
||||
resolved_topic_probability = 0.05
|
||||
|
||||
final_topics = []
|
||||
for topic in topics:
|
||||
if random.random() < resolved_topic_probability:
|
||||
final_topics.append(RESOLVED_TOPIC_PREFIX + topic)
|
||||
else:
|
||||
final_topics.append(topic)
|
||||
|
||||
return final_topics
|
||||
return [
|
||||
RESOLVED_TOPIC_PREFIX + topic if random.random() < resolved_topic_probability else topic
|
||||
for topic in topics
|
||||
]
|
||||
|
||||
|
||||
def load_generators(config: Dict[str, Any]) -> Dict[str, Any]:
|
||||
|
|
|
@ -278,13 +278,14 @@ def fix_customprofilefield(data: TableData) -> None:
|
|||
In CustomProfileField with 'field_type' like 'USER', the IDs need to be
|
||||
re-mapped.
|
||||
"""
|
||||
field_type_USER_id_list = []
|
||||
for item in data["zerver_customprofilefield"]:
|
||||
if item["field_type"] == CustomProfileField.USER:
|
||||
field_type_USER_id_list.append(item["id"])
|
||||
field_type_USER_ids = {
|
||||
item["id"]
|
||||
for item in data["zerver_customprofilefield"]
|
||||
if item["field_type"] == CustomProfileField.USER
|
||||
}
|
||||
|
||||
for item in data["zerver_customprofilefieldvalue"]:
|
||||
if item["field_id"] in field_type_USER_id_list:
|
||||
if item["field_id"] in field_type_USER_ids:
|
||||
old_user_id_list = orjson.loads(item["value"])
|
||||
|
||||
new_id_list = re_map_foreign_keys_many_to_many_internal(
|
||||
|
@ -392,10 +393,7 @@ def current_table_ids(data: TableData, table: TableName) -> List[int]:
|
|||
"""
|
||||
Returns the ids present in the current table
|
||||
"""
|
||||
id_list = []
|
||||
for item in data[table]:
|
||||
id_list.append(item["id"])
|
||||
return id_list
|
||||
return [item["id"] for item in data[table]]
|
||||
|
||||
|
||||
def idseq(model_class: Any) -> str:
|
||||
|
@ -1567,16 +1565,17 @@ def import_attachments(data: TableData) -> None:
|
|||
def format_m2m_data(
|
||||
child_singular: str, child_plural: str, m2m_table_name: str, child_id: str
|
||||
) -> Tuple[str, List[Record], str]:
|
||||
m2m_rows: List[Record] = []
|
||||
for parent_row in data[parent_db_table_name]:
|
||||
for fk_id in parent_row[child_plural]:
|
||||
m2m_row: Record = {}
|
||||
m2m_row[parent_singular] = parent_row["id"]
|
||||
m2m_rows = [
|
||||
{
|
||||
parent_singular: parent_row["id"],
|
||||
# child_singular will generally match the model name (e.g. Message, ScheduledMessage)
|
||||
# after lowercasing, and that's what we enter as ID_MAP keys, so this should be
|
||||
# a reasonable assumption to make.
|
||||
m2m_row[child_singular] = ID_MAP[child_singular][fk_id]
|
||||
m2m_rows.append(m2m_row)
|
||||
child_singular: ID_MAP[child_singular][fk_id],
|
||||
}
|
||||
for parent_row in data[parent_db_table_name]
|
||||
for fk_id in parent_row[child_plural]
|
||||
]
|
||||
|
||||
# Create our table data for insert.
|
||||
m2m_data: TableData = {m2m_table_name: m2m_rows}
|
||||
|
|
|
@ -858,24 +858,21 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
to_process: List[Dict[str, Any]] = []
|
||||
# Build dicts for URLs
|
||||
for url_data in urls:
|
||||
short_url = url_data["url"]
|
||||
full_url = url_data["expanded_url"]
|
||||
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
|
||||
to_process.append(
|
||||
to_process.extend(
|
||||
{
|
||||
"type": "url",
|
||||
"start": match.start(),
|
||||
"end": match.end(),
|
||||
"url": short_url,
|
||||
"text": full_url,
|
||||
"url": url_data["url"],
|
||||
"text": url_data["expanded_url"],
|
||||
}
|
||||
for match in re.finditer(re.escape(url_data["url"]), text, re.IGNORECASE)
|
||||
)
|
||||
# Build dicts for mentions
|
||||
for user_mention in user_mentions:
|
||||
screen_name = user_mention["screen_name"]
|
||||
mention_string = "@" + screen_name
|
||||
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE):
|
||||
to_process.append(
|
||||
to_process.extend(
|
||||
{
|
||||
"type": "mention",
|
||||
"start": match.start(),
|
||||
|
@ -883,13 +880,13 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
"url": "https://twitter.com/" + urllib.parse.quote(screen_name),
|
||||
"text": mention_string,
|
||||
}
|
||||
for match in re.finditer(re.escape(mention_string), text, re.IGNORECASE)
|
||||
)
|
||||
# Build dicts for media
|
||||
for media_item in media:
|
||||
short_url = media_item["url"]
|
||||
expanded_url = media_item["expanded_url"]
|
||||
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE):
|
||||
to_process.append(
|
||||
to_process.extend(
|
||||
{
|
||||
"type": "media",
|
||||
"start": match.start(),
|
||||
|
@ -897,6 +894,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
|
|||
"url": short_url,
|
||||
"text": expanded_url,
|
||||
}
|
||||
for match in re.finditer(re.escape(short_url), text, re.IGNORECASE)
|
||||
)
|
||||
# Build dicts for emojis
|
||||
for match in re.finditer(UNICODE_EMOJI_RE, text, re.IGNORECASE):
|
||||
|
@ -1938,10 +1936,13 @@ class StreamTopicPattern(CompiledInlineProcessor):
|
|||
|
||||
|
||||
def possible_linked_stream_names(content: str) -> Set[str]:
|
||||
matches = re.findall(STREAM_LINK_REGEX, content, re.VERBOSE)
|
||||
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE):
|
||||
matches.append(match.group("stream_name"))
|
||||
return set(matches)
|
||||
return {
|
||||
*re.findall(STREAM_LINK_REGEX, content, re.VERBOSE),
|
||||
*(
|
||||
match.group("stream_name")
|
||||
for match in re.finditer(STREAM_TOPIC_LINK_REGEX, content, re.VERBOSE)
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class AlertWordNotificationProcessor(markdown.preprocessors.Preprocessor):
|
||||
|
|
|
@ -347,22 +347,22 @@ def get_users_for_soft_deactivation(
|
|||
.values("user_profile_id")
|
||||
.annotate(last_visit=Max("last_visit"))
|
||||
)
|
||||
user_ids_to_deactivate = []
|
||||
today = timezone_now()
|
||||
for user_activity in users_activity:
|
||||
if (today - user_activity["last_visit"]).days > inactive_for_days:
|
||||
user_ids_to_deactivate.append(user_activity["user_profile_id"])
|
||||
user_ids_to_deactivate = [
|
||||
user_activity["user_profile_id"]
|
||||
for user_activity in users_activity
|
||||
if (today - user_activity["last_visit"]).days > inactive_for_days
|
||||
]
|
||||
users_to_deactivate = list(UserProfile.objects.filter(id__in=user_ids_to_deactivate))
|
||||
return users_to_deactivate
|
||||
|
||||
|
||||
def do_soft_activate_users(users: List[UserProfile]) -> List[UserProfile]:
|
||||
users_soft_activated = []
|
||||
for user_profile in users:
|
||||
user_activated = reactivate_user_if_soft_deactivated(user_profile)
|
||||
if user_activated:
|
||||
users_soft_activated.append(user_activated)
|
||||
return users_soft_activated
|
||||
return [
|
||||
user_activated
|
||||
for user_profile in users
|
||||
if (user_activated := reactivate_user_if_soft_deactivated(user_profile)) is not None
|
||||
]
|
||||
|
||||
|
||||
def do_catch_up_soft_deactivated_users(users: Iterable[UserProfile]) -> List[UserProfile]:
|
||||
|
|
|
@ -303,17 +303,15 @@ def get_accounts_for_email(email: str) -> List[Account]:
|
|||
)
|
||||
.order_by("date_joined")
|
||||
)
|
||||
accounts: List[Account] = []
|
||||
for profile in profiles:
|
||||
accounts.append(
|
||||
return [
|
||||
dict(
|
||||
realm_name=profile.realm.name,
|
||||
realm_id=profile.realm.id,
|
||||
full_name=profile.full_name,
|
||||
avatar=avatar_url(profile),
|
||||
)
|
||||
)
|
||||
return accounts
|
||||
for profile in profiles
|
||||
]
|
||||
|
||||
|
||||
def get_api_key(user_profile: UserProfile) -> str:
|
||||
|
@ -615,13 +613,12 @@ def is_2fa_verified(user: UserProfile) -> bool:
|
|||
|
||||
def get_users_with_access_to_real_email(user_profile: UserProfile) -> List[int]:
|
||||
active_users = user_profile.realm.get_active_users()
|
||||
user_ids_with_real_email_access = []
|
||||
for user in active_users:
|
||||
return [
|
||||
user.id
|
||||
for user in active_users
|
||||
if can_access_delivery_email(
|
||||
user,
|
||||
user_profile.id,
|
||||
user_profile.email_address_visibility,
|
||||
):
|
||||
user_ids_with_real_email_access.append(user.id)
|
||||
|
||||
return user_ids_with_real_email_access
|
||||
)
|
||||
]
|
||||
|
|
|
@ -82,9 +82,7 @@ def get_assignee_string(assignees: List[Dict[str, Any]]) -> str:
|
|||
if len(assignees) == 1:
|
||||
assignees_string = "{username}".format(**assignees[0])
|
||||
else:
|
||||
usernames = []
|
||||
for a in assignees:
|
||||
usernames.append(a["username"])
|
||||
usernames = [a["username"] for a in assignees]
|
||||
|
||||
assignees_string = ", ".join(usernames[:-1]) + " and " + usernames[-1]
|
||||
return assignees_string
|
||||
|
|
|
@ -12,11 +12,11 @@ NAME_INVALID_CHARS = ["*", "`", "\\", ">", '"', "@"]
|
|||
def remove_name_illegal_chars(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:
|
||||
UserProfile = apps.get_model("zerver", "UserProfile")
|
||||
for user in UserProfile.objects.all():
|
||||
stripped = []
|
||||
for char in user.full_name:
|
||||
if (char not in NAME_INVALID_CHARS) and (category(char)[0] != "C"):
|
||||
stripped.append(char)
|
||||
user.full_name = "".join(stripped)
|
||||
user.full_name = "".join(
|
||||
char
|
||||
for char in user.full_name
|
||||
if (char not in NAME_INVALID_CHARS) and (category(char)[0] != "C")
|
||||
)
|
||||
user.save(update_fields=["full_name"])
|
||||
|
||||
|
||||
|
|
|
@ -13,16 +13,15 @@ def create_nobody_system_user_group_for_existing_realms(
|
|||
NOBODY_GROUP_NAME = "@role:nobody"
|
||||
NOBODY_GROUP_DESCRIPTION = "Nobody"
|
||||
|
||||
groups_to_create = []
|
||||
for realm in Realm.objects.all():
|
||||
groups_to_create.append(
|
||||
groups_to_create = [
|
||||
UserGroup(
|
||||
name=NOBODY_GROUP_NAME,
|
||||
description=NOBODY_GROUP_DESCRIPTION,
|
||||
realm=realm,
|
||||
is_system_group=True,
|
||||
)
|
||||
)
|
||||
for realm in Realm.objects.all()
|
||||
]
|
||||
|
||||
UserGroup.objects.bulk_create(groups_to_create)
|
||||
|
||||
|
|
|
@ -1326,17 +1326,14 @@ def linkifiers_for_realm(realm_id: int) -> List[LinkifierDict]:
|
|||
|
||||
@cache_with_key(get_linkifiers_cache_key, timeout=3600 * 24 * 7)
|
||||
def linkifiers_for_realm_remote_cache(realm_id: int) -> List[LinkifierDict]:
|
||||
linkifiers = []
|
||||
for linkifier in RealmFilter.objects.filter(realm_id=realm_id).order_by("id"):
|
||||
linkifiers.append(
|
||||
return [
|
||||
LinkifierDict(
|
||||
pattern=linkifier.pattern,
|
||||
url_template=linkifier.url_template,
|
||||
id=linkifier.id,
|
||||
)
|
||||
)
|
||||
|
||||
return linkifiers
|
||||
for linkifier in RealmFilter.objects.filter(realm_id=realm_id).order_by("id")
|
||||
]
|
||||
|
||||
|
||||
def flush_linkifiers(*, instance: RealmFilter, **kwargs: object) -> None:
|
||||
|
@ -1419,17 +1416,15 @@ class RealmPlayground(models.Model):
|
|||
|
||||
|
||||
def get_realm_playgrounds(realm: Realm) -> List[RealmPlaygroundDict]:
|
||||
playgrounds: List[RealmPlaygroundDict] = []
|
||||
for playground in RealmPlayground.objects.filter(realm=realm).all():
|
||||
playgrounds.append(
|
||||
return [
|
||||
RealmPlaygroundDict(
|
||||
id=playground.id,
|
||||
name=playground.name,
|
||||
pygments_language=playground.pygments_language,
|
||||
url_template=playground.url_template,
|
||||
)
|
||||
)
|
||||
return playgrounds
|
||||
for playground in RealmPlayground.objects.filter(realm=realm).all()
|
||||
]
|
||||
|
||||
|
||||
class Recipient(models.Model):
|
||||
|
|
|
@ -142,9 +142,9 @@ def update_flags_message_ids() -> Dict[str, object]:
|
|||
stream_name = "Venice"
|
||||
helpers.subscribe(helpers.example_user("iago"), stream_name)
|
||||
|
||||
messages = []
|
||||
for _ in range(3):
|
||||
messages.append(helpers.send_stream_message(helpers.example_user("iago"), stream_name))
|
||||
messages = [
|
||||
helpers.send_stream_message(helpers.example_user("iago"), stream_name) for _ in range(3)
|
||||
]
|
||||
return {
|
||||
"messages": messages,
|
||||
}
|
||||
|
|
|
@ -149,20 +149,16 @@ def render_python_code_example(
|
|||
|
||||
snippets = extract_code_example(function_source_lines, [], PYTHON_EXAMPLE_REGEX)
|
||||
|
||||
code_example = ["{tab|python}\n"]
|
||||
code_example.append("```python")
|
||||
code_example.extend(config)
|
||||
|
||||
for snippet in snippets:
|
||||
for line in snippet:
|
||||
return [
|
||||
"{tab|python}\n",
|
||||
"```python",
|
||||
*config,
|
||||
# Remove one level of indentation and strip newlines
|
||||
code_example.append(line[4:].rstrip())
|
||||
|
||||
code_example.append("print(result)")
|
||||
code_example.append("\n")
|
||||
code_example.append("```")
|
||||
|
||||
return code_example
|
||||
*(line[4:].rstrip() for snippet in snippets for line in snippet),
|
||||
"print(result)",
|
||||
"\n",
|
||||
"```",
|
||||
]
|
||||
|
||||
|
||||
def render_javascript_code_example(
|
||||
|
@ -193,9 +189,8 @@ def render_javascript_code_example(
|
|||
code_example.append(" const client = await zulipInit(config);")
|
||||
for snippet in snippets:
|
||||
code_example.append("")
|
||||
for line in snippet:
|
||||
# Strip newlines
|
||||
code_example.append(" " + line.rstrip())
|
||||
code_example.extend(" " + line.rstrip() for line in snippet)
|
||||
code_example.append("})();")
|
||||
|
||||
code_example.append("```")
|
||||
|
|
|
@ -1162,9 +1162,7 @@ def update_message_flags(client: Client) -> None:
|
|||
"topic": "Castle",
|
||||
"content": "I come not, friends, to steal away your hearts.",
|
||||
}
|
||||
message_ids = []
|
||||
for i in range(0, 3):
|
||||
message_ids.append(client.send_message(request)["id"])
|
||||
message_ids = [client.send_message(request)["id"] for i in range(3)]
|
||||
|
||||
# {code_example|start}
|
||||
# Add the "read" flag to the messages with IDs in "message_ids"
|
||||
|
|
|
@ -47,15 +47,16 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
|
|||
for endpoint in endpoint_list:
|
||||
article_name = endpoint + ".md"
|
||||
file_name = os.path.join(settings.DEPLOY_ROOT, "api_docs/", article_name)
|
||||
curl_commands_to_test = []
|
||||
|
||||
if os.path.exists(file_name):
|
||||
with open(file_name) as f:
|
||||
for line in f:
|
||||
curl_commands_to_test = [
|
||||
# A typical example from the Markdown source looks like this:
|
||||
# {generate_code_example(curl)|...|...}
|
||||
if line.startswith("{generate_code_example(curl"):
|
||||
curl_commands_to_test.append(line)
|
||||
line
|
||||
for line in f
|
||||
if line.startswith("{generate_code_example(curl")
|
||||
]
|
||||
else:
|
||||
# If the file doesn't exist, then it has been
|
||||
# deleted and its page is generated by the
|
||||
|
@ -64,7 +65,7 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
|
|||
endpoint_path, endpoint_method = get_endpoint_from_operationid(endpoint)
|
||||
endpoint_string = endpoint_path + ":" + endpoint_method
|
||||
command = f"{{generate_code_example(curl)|{endpoint_string}|example}}"
|
||||
curl_commands_to_test.append(command)
|
||||
curl_commands_to_test = [command]
|
||||
|
||||
for line in curl_commands_to_test:
|
||||
# To do an end-to-end test on the documentation examples
|
||||
|
|
|
@ -1587,10 +1587,9 @@ class SocialAuthBase(DesktopFlowTestingLib, ZulipTestCase, ABC):
|
|||
realm.save()
|
||||
|
||||
stream_names = ["new_stream_1", "new_stream_2"]
|
||||
streams = []
|
||||
for stream_name in stream_names:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams.append(stream)
|
||||
streams = [
|
||||
ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
|
||||
]
|
||||
|
||||
referrer = self.example_user("hamlet")
|
||||
multiuse_obj = MultiuseInvite.objects.create(realm=realm, referred_by=referrer)
|
||||
|
@ -4598,10 +4597,9 @@ class GoogleAuthBackendTest(SocialAuthBase):
|
|||
realm.save()
|
||||
|
||||
stream_names = ["new_stream_1", "new_stream_2"]
|
||||
streams = []
|
||||
for stream_name in stream_names:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams.append(stream)
|
||||
streams = [
|
||||
ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
|
||||
]
|
||||
|
||||
# Without the invite link, we can't create an account due to invite_required
|
||||
result = self.get_log_into_subdomain(data)
|
||||
|
@ -5032,9 +5030,8 @@ class ExternalMethodDictsTests(ZulipTestCase):
|
|||
expected_button_id_strings = [
|
||||
'id="{}_auth_button_github"',
|
||||
'id="{}_auth_button_google"',
|
||||
*(f'id="{{}}_auth_button_saml:{name}"' for name in saml_idp_names),
|
||||
]
|
||||
for name in saml_idp_names:
|
||||
expected_button_id_strings.append(f'id="{{}}_auth_button_saml:{name}"')
|
||||
|
||||
result = self.client_get("/login/")
|
||||
self.assert_in_success_response(
|
||||
|
@ -5094,9 +5091,11 @@ class FetchAuthBackends(ZulipTestCase):
|
|||
) -> None:
|
||||
authentication_methods_list = [
|
||||
("password", check_bool),
|
||||
*(
|
||||
(backend_name_with_case.lower(), check_bool)
|
||||
for backend_name_with_case in AUTH_BACKEND_NAME_MAP
|
||||
),
|
||||
]
|
||||
for backend_name_with_case in AUTH_BACKEND_NAME_MAP:
|
||||
authentication_methods_list.append((backend_name_with_case.lower(), check_bool))
|
||||
external_auth_methods = get_external_method_dicts()
|
||||
|
||||
response_dict = self.assert_json_success(result)
|
||||
|
|
|
@ -848,9 +848,10 @@ class NormalActionsTest(BaseAction):
|
|||
|
||||
def test_invite_user_event(self) -> None:
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
events = self.verify_action(
|
||||
|
@ -866,9 +867,10 @@ class NormalActionsTest(BaseAction):
|
|||
|
||||
def test_create_multiuse_invite_event(self) -> None:
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Verona"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Verona"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
events = self.verify_action(
|
||||
|
@ -902,9 +904,10 @@ class NormalActionsTest(BaseAction):
|
|||
# We need set self.user_profile to be an admin, so that
|
||||
# we receive the invites_changed event.
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Verona"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Verona"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_invite_users(
|
||||
|
@ -924,9 +927,10 @@ class NormalActionsTest(BaseAction):
|
|||
|
||||
def test_revoke_multiuse_invite_event(self) -> None:
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Verona"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Verona"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_create_multiuse_invite_link(
|
||||
|
@ -947,9 +951,10 @@ class NormalActionsTest(BaseAction):
|
|||
reset_email_visibility_to_everyone_in_zulip_realm()
|
||||
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_invite_users(
|
||||
|
@ -1462,9 +1467,10 @@ class NormalActionsTest(BaseAction):
|
|||
check_user_group_remove("events[0]", events[0])
|
||||
|
||||
def test_default_stream_groups_events(self) -> None:
|
||||
streams = []
|
||||
for stream_name in ["Scotland", "Rome", "Denmark"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Scotland", "Rome", "Denmark"]
|
||||
]
|
||||
|
||||
events = self.verify_action(
|
||||
lambda: do_create_default_stream_group(
|
||||
|
@ -1509,9 +1515,10 @@ class NormalActionsTest(BaseAction):
|
|||
check_default_stream_groups("events[0]", events[0])
|
||||
|
||||
def test_default_stream_group_events_guest(self) -> None:
|
||||
streams = []
|
||||
for stream_name in ["Scotland", "Rome", "Denmark"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Scotland", "Rome", "Denmark"]
|
||||
]
|
||||
|
||||
do_create_default_stream_group(self.user_profile.realm, "group1", "This is group1", streams)
|
||||
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
|
||||
|
|
|
@ -1202,7 +1202,6 @@ class HomeTest(ZulipTestCase):
|
|||
# performance cost of fetching /.
|
||||
@override_settings(MAX_DRAFTS_IN_REGISTER_RESPONSE=5)
|
||||
def test_limit_drafts(self) -> None:
|
||||
draft_objects = []
|
||||
hamlet = self.example_user("hamlet")
|
||||
base_time = timezone_now()
|
||||
initial_count = Draft.objects.count()
|
||||
|
@ -1210,8 +1209,7 @@ class HomeTest(ZulipTestCase):
|
|||
step_value = timedelta(seconds=1)
|
||||
# Create 11 drafts.
|
||||
# TODO: This would be better done as an API request.
|
||||
for i in range(0, settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1):
|
||||
draft_objects.append(
|
||||
draft_objects = [
|
||||
Draft(
|
||||
user_profile=hamlet,
|
||||
recipient=None,
|
||||
|
@ -1219,7 +1217,8 @@ class HomeTest(ZulipTestCase):
|
|||
content="sample draft",
|
||||
last_edit_time=base_time + i * step_value,
|
||||
)
|
||||
)
|
||||
for i in range(settings.MAX_DRAFTS_IN_REGISTER_RESPONSE + 1)
|
||||
]
|
||||
Draft.objects.bulk_create(draft_objects)
|
||||
|
||||
# Now fetch the drafts part of the initial state and make sure
|
||||
|
|
|
@ -178,9 +178,7 @@ class InviteUserBase(ZulipTestCase):
|
|||
|
||||
streams should be a list of strings.
|
||||
"""
|
||||
stream_ids = []
|
||||
for stream_name in stream_names:
|
||||
stream_ids.append(self.get_stream_id(stream_name, realm=realm))
|
||||
stream_ids = [self.get_stream_id(stream_name, realm=realm) for stream_name in stream_names]
|
||||
|
||||
invite_expires_in: Union[str, Optional[int]] = invite_expires_in_minutes
|
||||
if invite_expires_in is None:
|
||||
|
@ -1475,9 +1473,10 @@ so we didn't send them an invitation. We did send invitations to everyone else!"
|
|||
|
||||
def test_send_more_than_one_invite_to_same_user(self) -> None:
|
||||
self.user_profile = self.example_user("iago")
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, self.user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, self.user_profile.realm)
|
||||
for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_invite_users(
|
||||
|
@ -1705,9 +1704,9 @@ class InvitationsTestCase(InviteUserBase):
|
|||
hamlet = self.example_user("hamlet")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_invite_users(
|
||||
|
@ -1761,9 +1760,9 @@ class InvitationsTestCase(InviteUserBase):
|
|||
hamlet = self.example_user("hamlet")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
invite_expires_in_minutes = 2 * 24 * 60
|
||||
do_invite_users(
|
||||
|
@ -1815,9 +1814,9 @@ class InvitationsTestCase(InviteUserBase):
|
|||
self.login("iago")
|
||||
user_profile = self.example_user("iago")
|
||||
|
||||
streams = []
|
||||
for stream_name in ["Denmark", "Scotland"]:
|
||||
streams.append(get_stream(stream_name, user_profile.realm))
|
||||
streams = [
|
||||
get_stream(stream_name, user_profile.realm) for stream_name in ["Denmark", "Scotland"]
|
||||
]
|
||||
|
||||
with patch(
|
||||
"confirmation.models.timezone_now",
|
||||
|
|
|
@ -672,16 +672,17 @@ class MarkdownTest(ZulipTestCase):
|
|||
|
||||
@override_settings(INLINE_IMAGE_PREVIEW=True)
|
||||
def test_max_inline_preview(self) -> None:
|
||||
image_links = []
|
||||
image_links = [
|
||||
# Add a youtube link within a spoiler to ensure other link types are counted
|
||||
image_links.append(
|
||||
"""```spoiler Check out this PyCon video\nhttps://www.youtube.com/watch?v=0c46YHS3RY8\n```"""
|
||||
)
|
||||
"""```spoiler Check out this PyCon video\nhttps://www.youtube.com/watch?v=0c46YHS3RY8\n```""",
|
||||
# Add a link within blockquote to test that it does NOT get counted
|
||||
image_links.append("> http://cdn.wallpapersafari.com/spoiler/dont_count.jpeg\n")
|
||||
"> http://cdn.wallpapersafari.com/spoiler/dont_count.jpeg\n",
|
||||
# Using INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1 because of the one link in a spoiler added already
|
||||
for x in range(InlineInterestingLinkProcessor.INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1):
|
||||
image_links.append(f"http://cdn.wallpapersafari.com/{x}/6/16eVjx.jpeg")
|
||||
*(
|
||||
f"http://cdn.wallpapersafari.com/{x}/6/16eVjx.jpeg"
|
||||
for x in range(InlineInterestingLinkProcessor.INLINE_PREVIEW_LIMIT_PER_MESSAGE - 1)
|
||||
),
|
||||
]
|
||||
within_limit_content = "\n".join(image_links)
|
||||
above_limit_content = (
|
||||
within_limit_content + "\nhttp://cdn.wallpapersafari.com/above/0/6/16eVjx.jpeg"
|
||||
|
|
|
@ -2075,9 +2075,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
self.login_user(me)
|
||||
self.subscribe(self.example_user("hamlet"), "Scotland")
|
||||
|
||||
message_ids = []
|
||||
for i in range(5):
|
||||
message_ids.append(self.send_personal_message(me, self.example_user("iago")))
|
||||
message_ids = [self.send_personal_message(me, self.example_user("iago")) for i in range(5)]
|
||||
|
||||
narrow = [dict(operator="dm", operand=self.example_user("iago").email)]
|
||||
self.message_visibility_test(narrow, message_ids, 2)
|
||||
|
@ -2093,64 +2091,46 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
cordelia = self.example_user("cordelia")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
matching_message_ids = []
|
||||
matching_message_ids = [
|
||||
# group direct message, sent by current user
|
||||
matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, cordelia, othello],
|
||||
),
|
||||
)
|
||||
# group direct message, sent by searched user
|
||||
matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
cordelia,
|
||||
[me, othello],
|
||||
),
|
||||
)
|
||||
# group direct message, sent by another user
|
||||
matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
othello,
|
||||
[me, cordelia],
|
||||
),
|
||||
)
|
||||
# direct 1:1 message, sent by current user to searched user
|
||||
matching_message_ids.append(
|
||||
self.send_personal_message(me, cordelia),
|
||||
)
|
||||
# direct 1:1 message, sent by searched user to current user
|
||||
matching_message_ids.append(
|
||||
self.send_personal_message(cordelia, me),
|
||||
)
|
||||
]
|
||||
|
||||
non_matching_message_ids = []
|
||||
non_matching_message_ids = [
|
||||
# direct 1:1 message, does not include current user
|
||||
non_matching_message_ids.append(
|
||||
self.send_personal_message(iago, cordelia),
|
||||
)
|
||||
# direct 1:1 message, does not include searched user
|
||||
non_matching_message_ids.append(
|
||||
self.send_personal_message(iago, me),
|
||||
)
|
||||
# direct 1:1 message, current user to self
|
||||
non_matching_message_ids.append(
|
||||
self.send_personal_message(me, me),
|
||||
)
|
||||
# group direct message, sent by current user
|
||||
non_matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, othello],
|
||||
),
|
||||
)
|
||||
# group direct message, sent by searched user
|
||||
non_matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
cordelia,
|
||||
[iago, othello],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
self.login_user(me)
|
||||
test_operands = [cordelia.email, cordelia.id]
|
||||
|
@ -2169,27 +2149,22 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
cordelia = self.example_user("cordelia")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
message_ids = []
|
||||
message_ids.append(
|
||||
message_ids = [
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, cordelia, othello],
|
||||
),
|
||||
)
|
||||
message_ids.append(self.send_personal_message(me, cordelia))
|
||||
message_ids.append(
|
||||
self.send_personal_message(me, cordelia),
|
||||
self.send_huddle_message(
|
||||
cordelia,
|
||||
[me, othello],
|
||||
),
|
||||
)
|
||||
message_ids.append(self.send_personal_message(cordelia, me))
|
||||
message_ids.append(
|
||||
self.send_personal_message(cordelia, me),
|
||||
self.send_huddle_message(
|
||||
iago,
|
||||
[cordelia, me],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
narrow = [dict(operator="dm-including", operand=cordelia.email)]
|
||||
self.message_visibility_test(narrow, message_ids, 2)
|
||||
|
@ -2205,41 +2180,28 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
cordelia = self.example_user("cordelia")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
matching_message_ids = []
|
||||
|
||||
matching_message_ids.append(
|
||||
matching_message_ids = [
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, cordelia, othello],
|
||||
),
|
||||
)
|
||||
|
||||
matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[cordelia, othello],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
non_matching_message_ids = []
|
||||
|
||||
non_matching_message_ids.append(
|
||||
non_matching_message_ids = [
|
||||
self.send_personal_message(me, cordelia),
|
||||
)
|
||||
|
||||
non_matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, othello],
|
||||
),
|
||||
)
|
||||
|
||||
non_matching_message_ids.append(
|
||||
self.send_huddle_message(
|
||||
self.example_user("cordelia"),
|
||||
[iago, othello],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
self.login_user(me)
|
||||
test_operands = [cordelia.email, cordelia.id]
|
||||
|
@ -2258,25 +2220,20 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
cordelia = self.example_user("cordelia")
|
||||
othello = self.example_user("othello")
|
||||
|
||||
message_ids = []
|
||||
message_ids.append(
|
||||
message_ids = [
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[iago, cordelia, othello],
|
||||
),
|
||||
)
|
||||
message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[cordelia, othello],
|
||||
),
|
||||
)
|
||||
message_ids.append(
|
||||
self.send_huddle_message(
|
||||
me,
|
||||
[cordelia, iago],
|
||||
),
|
||||
)
|
||||
]
|
||||
|
||||
narrow = [dict(operator="group-pm-with", operand=cordelia.email)]
|
||||
self.message_visibility_test(narrow, message_ids, 1)
|
||||
|
@ -2352,9 +2309,9 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
self.login("hamlet")
|
||||
self.subscribe(self.example_user("hamlet"), "Scotland")
|
||||
|
||||
message_ids = []
|
||||
for i in range(5):
|
||||
message_ids.append(self.send_stream_message(self.example_user("iago"), "Scotland"))
|
||||
message_ids = [
|
||||
self.send_stream_message(self.example_user("iago"), "Scotland") for i in range(5)
|
||||
]
|
||||
|
||||
narrow = [dict(operator="stream", operand="Scotland")]
|
||||
self.message_visibility_test(narrow, message_ids, 2)
|
||||
|
@ -2695,13 +2652,12 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
),
|
||||
]
|
||||
|
||||
message_ids = []
|
||||
for topic, content in messages_to_search:
|
||||
message_ids.append(
|
||||
message_ids = [
|
||||
self.send_stream_message(
|
||||
self.example_user("iago"), "Scotland", topic_name=topic, content=content
|
||||
)
|
||||
)
|
||||
for topic, content in messages_to_search
|
||||
]
|
||||
self._update_tsvector_index()
|
||||
narrow = [dict(operator="search", operand="Hogwart's")]
|
||||
self.message_visibility_test(narrow, message_ids, 2)
|
||||
|
@ -3045,9 +3001,9 @@ class GetOldMessagesTest(ZulipTestCase):
|
|||
|
||||
Message.objects.all().delete()
|
||||
|
||||
message_ids = []
|
||||
for i in range(10):
|
||||
message_ids.append(self.send_stream_message(self.example_user("cordelia"), "Verona"))
|
||||
message_ids = [
|
||||
self.send_stream_message(self.example_user("cordelia"), "Verona") for i in range(10)
|
||||
]
|
||||
|
||||
data = self.get_messages_response(anchor=message_ids[9], num_before=9, num_after=0)
|
||||
|
||||
|
@ -4261,26 +4217,20 @@ class MessageHasKeywordsTest(ZulipTestCase):
|
|||
assert_attachment_claimed(dummy_path_ids[1], False)
|
||||
|
||||
def test_finds_all_links(self) -> None:
|
||||
msg_ids = []
|
||||
msg_contents = ["foo.org", "[bar](baz.gov)", "http://quux.ca"]
|
||||
for msg_content in msg_contents:
|
||||
msg_ids.append(
|
||||
self.send_stream_message(
|
||||
self.example_user("hamlet"), "Denmark", content=msg_content
|
||||
)
|
||||
)
|
||||
msg_ids = [
|
||||
self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
|
||||
for msg_content in msg_contents
|
||||
]
|
||||
msgs = [Message.objects.get(id=id) for id in msg_ids]
|
||||
self.assertTrue(all(msg.has_link for msg in msgs))
|
||||
|
||||
def test_finds_only_links(self) -> None:
|
||||
msg_ids = []
|
||||
msg_contents = ["`example.org`", "``example.org```", "$$https://example.org$$", "foo"]
|
||||
for msg_content in msg_contents:
|
||||
msg_ids.append(
|
||||
self.send_stream_message(
|
||||
self.example_user("hamlet"), "Denmark", content=msg_content
|
||||
)
|
||||
)
|
||||
msg_ids = [
|
||||
self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
|
||||
for msg_content in msg_contents
|
||||
]
|
||||
msgs = [Message.objects.get(id=id) for id in msg_ids]
|
||||
self.assertFalse(all(msg.has_link for msg in msgs))
|
||||
|
||||
|
@ -4321,19 +4271,16 @@ class MessageHasKeywordsTest(ZulipTestCase):
|
|||
self.assertFalse(msg.has_link)
|
||||
|
||||
def test_has_image(self) -> None:
|
||||
msg_ids = []
|
||||
msg_contents = [
|
||||
"Link: foo.org",
|
||||
"Image: https://www.google.com/images/srpr/logo4w.png",
|
||||
"Image: https://www.google.com/images/srpr/logo4w.pdf",
|
||||
"[Google link](https://www.google.com/images/srpr/logo4w.png)",
|
||||
]
|
||||
for msg_content in msg_contents:
|
||||
msg_ids.append(
|
||||
self.send_stream_message(
|
||||
self.example_user("hamlet"), "Denmark", content=msg_content
|
||||
)
|
||||
)
|
||||
msg_ids = [
|
||||
self.send_stream_message(self.example_user("hamlet"), "Denmark", content=msg_content)
|
||||
for msg_content in msg_contents
|
||||
]
|
||||
msgs = [Message.objects.get(id=id) for id in msg_ids]
|
||||
self.assertEqual([False, True, False, True], [msg.has_image for msg in msgs])
|
||||
|
||||
|
|
|
@ -1352,18 +1352,14 @@ class StreamMessagesTest(ZulipTestCase):
|
|||
if subscriber.bot_type != UserProfile.OUTGOING_WEBHOOK_BOT
|
||||
]
|
||||
|
||||
old_subscriber_messages = []
|
||||
for subscriber in subscribers:
|
||||
old_subscriber_messages.append(message_stream_count(subscriber))
|
||||
old_subscriber_messages = list(map(message_stream_count, subscribers))
|
||||
|
||||
non_subscribers = [
|
||||
user_profile
|
||||
for user_profile in UserProfile.objects.all()
|
||||
if user_profile not in subscribers
|
||||
]
|
||||
old_non_subscriber_messages = []
|
||||
for non_subscriber in non_subscribers:
|
||||
old_non_subscriber_messages.append(message_stream_count(non_subscriber))
|
||||
old_non_subscriber_messages = list(map(message_stream_count, non_subscribers))
|
||||
|
||||
non_bot_subscribers = [
|
||||
user_profile for user_profile in subscribers if not user_profile.is_bot
|
||||
|
@ -1373,14 +1369,10 @@ class StreamMessagesTest(ZulipTestCase):
|
|||
self.send_stream_message(a_subscriber, stream_name, content=content, topic_name=topic_name)
|
||||
|
||||
# Did all of the subscribers get the message?
|
||||
new_subscriber_messages = []
|
||||
for subscriber in subscribers:
|
||||
new_subscriber_messages.append(message_stream_count(subscriber))
|
||||
new_subscriber_messages = list(map(message_stream_count, subscribers))
|
||||
|
||||
# Did non-subscribers not get the message?
|
||||
new_non_subscriber_messages = []
|
||||
for non_subscriber in non_subscribers:
|
||||
new_non_subscriber_messages.append(message_stream_count(non_subscriber))
|
||||
new_non_subscriber_messages = list(map(message_stream_count, non_subscribers))
|
||||
|
||||
self.assertEqual(old_non_subscriber_messages, new_non_subscriber_messages)
|
||||
self.assertEqual(new_subscriber_messages, [elt + 1 for elt in old_subscriber_messages])
|
||||
|
@ -2009,17 +2001,12 @@ class PersonalMessageSendTest(ZulipTestCase):
|
|||
test_email = self.nonreg_email("test1")
|
||||
self.register(test_email, "test1")
|
||||
|
||||
old_messages = []
|
||||
for user_profile in old_user_profiles:
|
||||
old_messages.append(message_stream_count(user_profile))
|
||||
old_messages = list(map(message_stream_count, old_user_profiles))
|
||||
|
||||
user_profile = self.nonreg_user("test1")
|
||||
self.send_personal_message(user_profile, user_profile)
|
||||
|
||||
new_messages = []
|
||||
for user_profile in old_user_profiles:
|
||||
new_messages.append(message_stream_count(user_profile))
|
||||
|
||||
new_messages = list(map(message_stream_count, old_user_profiles))
|
||||
self.assertEqual(old_messages, new_messages)
|
||||
|
||||
user_profile = self.nonreg_user("test1")
|
||||
|
@ -2037,17 +2024,12 @@ class PersonalMessageSendTest(ZulipTestCase):
|
|||
receiver_messages = message_stream_count(receiver)
|
||||
|
||||
other_user_profiles = UserProfile.objects.filter(~Q(id=sender.id) & ~Q(id=receiver.id))
|
||||
old_other_messages = []
|
||||
for user_profile in other_user_profiles:
|
||||
old_other_messages.append(message_stream_count(user_profile))
|
||||
old_other_messages = list(map(message_stream_count, other_user_profiles))
|
||||
|
||||
self.send_personal_message(sender, receiver, content)
|
||||
|
||||
# Users outside the conversation don't get the message.
|
||||
new_other_messages = []
|
||||
for user_profile in other_user_profiles:
|
||||
new_other_messages.append(message_stream_count(user_profile))
|
||||
|
||||
new_other_messages = list(map(message_stream_count, other_user_profiles))
|
||||
self.assertEqual(old_other_messages, new_other_messages)
|
||||
|
||||
# The personal message is in the streams of both the sender and receiver.
|
||||
|
|
|
@ -796,9 +796,7 @@ class WorkerTest(ZulipTestCase):
|
|||
base_classes = [QueueProcessingWorker]
|
||||
all_classes = []
|
||||
while base_classes:
|
||||
new_subclasses = []
|
||||
for base_class in base_classes:
|
||||
new_subclasses.append(base_class.__subclasses__())
|
||||
new_subclasses = (base_class.__subclasses__() for base_class in base_classes)
|
||||
base_classes = list(itertools.chain(*new_subclasses))
|
||||
all_classes += base_classes
|
||||
worker_queue_names = {
|
||||
|
|
|
@ -278,15 +278,14 @@ class RealmExportTest(ZulipTestCase):
|
|||
current_log = RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_EXPORTED)
|
||||
self.assert_length(current_log, 0)
|
||||
|
||||
exports = []
|
||||
for i in range(0, 5):
|
||||
exports.append(
|
||||
exports = [
|
||||
RealmAuditLog(
|
||||
realm=admin.realm,
|
||||
event_type=RealmAuditLog.REALM_EXPORTED,
|
||||
event_time=timezone_now(),
|
||||
)
|
||||
)
|
||||
for i in range(5)
|
||||
]
|
||||
RealmAuditLog.objects.bulk_create(exports)
|
||||
|
||||
with self.assertRaises(JsonableError) as error:
|
||||
|
|
|
@ -62,12 +62,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
do_soft_deactivate_users(users)
|
||||
|
||||
log_output = []
|
||||
for user in users:
|
||||
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
|
||||
log_output.append(
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
|
||||
)
|
||||
log_output = [
|
||||
*(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
|
||||
]
|
||||
|
||||
self.assertEqual(m.output, log_output)
|
||||
|
||||
|
@ -118,13 +116,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
do_soft_deactivate_users(users)
|
||||
|
||||
log_output = []
|
||||
for user in users:
|
||||
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
|
||||
log_output.append(
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
|
||||
)
|
||||
|
||||
log_output = [
|
||||
*(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
|
||||
]
|
||||
self.assertEqual(m.output, log_output)
|
||||
|
||||
for user in users:
|
||||
|
@ -133,10 +128,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
do_soft_activate_users(users)
|
||||
|
||||
log_output = []
|
||||
for user in users:
|
||||
log_output.append(f"INFO:{logger_string}:Soft reactivated user {user.id}")
|
||||
|
||||
log_output = [f"INFO:{logger_string}:Soft reactivated user {user.id}" for user in users]
|
||||
self.assertEqual(m.output, log_output)
|
||||
|
||||
for user in users:
|
||||
|
@ -181,13 +173,10 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
do_soft_deactivate_users(users)
|
||||
|
||||
log_output = []
|
||||
for user in users:
|
||||
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
|
||||
log_output.append(
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
|
||||
)
|
||||
|
||||
log_output = [
|
||||
*(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
|
||||
]
|
||||
self.assertEqual(m.output, log_output)
|
||||
|
||||
for user in users:
|
||||
|
@ -243,13 +232,11 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
users_deactivated = do_auto_soft_deactivate_users(-1, realm)
|
||||
|
||||
log_output = []
|
||||
for user in users:
|
||||
log_output.append(f"INFO:{logger_string}:Soft deactivated user {user.id}")
|
||||
log_output.append(
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process"
|
||||
)
|
||||
log_output.append(f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users")
|
||||
log_output = [
|
||||
*(f"INFO:{logger_string}:Soft deactivated user {user.id}" for user in users),
|
||||
f"INFO:{logger_string}:Soft-deactivated batch of {len(users[:100])} users; {len(users[100:])} remain to process",
|
||||
f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users",
|
||||
]
|
||||
self.assertEqual(set(m.output), set(log_output))
|
||||
|
||||
self.assert_length(users_deactivated, len(users))
|
||||
|
@ -267,8 +254,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
|||
with self.assertLogs(logger_string, level="INFO") as m:
|
||||
users_deactivated = do_auto_soft_deactivate_users(-1, realm)
|
||||
|
||||
log_output = []
|
||||
log_output.append(f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users")
|
||||
log_output = [f"INFO:{logger_string}:Caught up {len(users)} soft-deactivated users"]
|
||||
self.assertEqual(set(m.output), set(log_output))
|
||||
|
||||
self.assert_length(users_deactivated, 0) # all users are already deactivated
|
||||
|
|
|
@ -1289,15 +1289,11 @@ class StreamAdminTest(ZulipTestCase):
|
|||
|
||||
def test_deactivate_stream_removes_stream_from_default_stream_groups(self) -> None:
|
||||
realm = get_realm("zulip")
|
||||
streams_to_keep = []
|
||||
for stream_name in ["stream1", "stream2"]:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams_to_keep.append(stream)
|
||||
|
||||
streams_to_remove = []
|
||||
stream = ensure_stream(realm, "stream3", acting_user=None)
|
||||
streams_to_remove.append(stream)
|
||||
|
||||
streams_to_keep = [
|
||||
ensure_stream(realm, stream_name, acting_user=None)
|
||||
for stream_name in ["stream1", "stream2"]
|
||||
]
|
||||
streams_to_remove = [ensure_stream(realm, "stream3", acting_user=None)]
|
||||
all_streams = streams_to_keep + streams_to_remove
|
||||
|
||||
def get_streams(group: DefaultStreamGroup) -> List[Stream]:
|
||||
|
@ -2471,12 +2467,7 @@ class StreamAdminTest(ZulipTestCase):
|
|||
self.make_stream(stream_name, invite_only=invite_only)
|
||||
|
||||
# Set up the principal to be unsubscribed.
|
||||
principals: List[Union[str, int]] = []
|
||||
for user in target_users:
|
||||
if using_legacy_emails:
|
||||
principals.append(user.email)
|
||||
else:
|
||||
principals.append(user.id)
|
||||
principals = [user.email if using_legacy_emails else user.id for user in target_users]
|
||||
|
||||
# Subscribe the admin and/or principal as specified in the flags.
|
||||
if is_subbed:
|
||||
|
@ -2893,10 +2884,10 @@ class DefaultStreamGroupTest(ZulipTestCase):
|
|||
default_stream_groups = get_default_stream_groups(realm)
|
||||
self.assert_length(default_stream_groups, 0)
|
||||
|
||||
streams = []
|
||||
for stream_name in ["stream1", "stream2", "stream3"]:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams.append(stream)
|
||||
streams = [
|
||||
ensure_stream(realm, stream_name, acting_user=None)
|
||||
for stream_name in ["stream1", "stream2", "stream3"]
|
||||
]
|
||||
|
||||
def get_streams(group: DefaultStreamGroup) -> List[Stream]:
|
||||
return list(group.streams.all().order_by("name"))
|
||||
|
@ -2920,11 +2911,11 @@ class DefaultStreamGroupTest(ZulipTestCase):
|
|||
"stream8",
|
||||
"stream9",
|
||||
]
|
||||
new_streams = []
|
||||
for new_stream_name in new_stream_names:
|
||||
new_stream = ensure_stream(realm, new_stream_name, acting_user=None)
|
||||
new_streams.append(new_stream)
|
||||
streams.append(new_stream)
|
||||
new_streams = [
|
||||
ensure_stream(realm, new_stream_name, acting_user=None)
|
||||
for new_stream_name in new_stream_names
|
||||
]
|
||||
streams += new_streams
|
||||
|
||||
do_add_streams_to_default_stream_group(realm, group, new_streams)
|
||||
default_stream_groups = get_default_stream_groups(realm)
|
||||
|
@ -2980,13 +2971,12 @@ class DefaultStreamGroupTest(ZulipTestCase):
|
|||
stream_names = ["stream1", "stream2", "stream3"]
|
||||
group_name = "group1"
|
||||
description = "This is group1"
|
||||
streams = []
|
||||
default_stream_groups = get_default_stream_groups(realm)
|
||||
self.assert_length(default_stream_groups, 0)
|
||||
|
||||
for stream_name in stream_names:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams.append(stream)
|
||||
streams = [
|
||||
ensure_stream(realm, stream_name, acting_user=None) for stream_name in stream_names
|
||||
]
|
||||
|
||||
result = self.client_post(
|
||||
"/json/default_stream_groups/create",
|
||||
|
@ -3017,11 +3007,11 @@ class DefaultStreamGroupTest(ZulipTestCase):
|
|||
# Test adding streams to existing default stream group
|
||||
group_id = default_stream_groups[0].id
|
||||
new_stream_names = ["stream4", "stream5"]
|
||||
new_streams = []
|
||||
for new_stream_name in new_stream_names:
|
||||
new_stream = ensure_stream(realm, new_stream_name, acting_user=None)
|
||||
new_streams.append(new_stream)
|
||||
streams.append(new_stream)
|
||||
new_streams = [
|
||||
ensure_stream(realm, new_stream_name, acting_user=None)
|
||||
for new_stream_name in new_stream_names
|
||||
]
|
||||
streams += new_streams
|
||||
|
||||
result = self.client_patch(
|
||||
f"/json/default_stream_groups/{group_id}/streams",
|
||||
|
@ -3169,11 +3159,8 @@ class DefaultStreamGroupTest(ZulipTestCase):
|
|||
|
||||
stream_names = ["stream1", "stream2", "stream3"]
|
||||
description = "This is group1"
|
||||
streams = []
|
||||
|
||||
for stream_name in stream_names:
|
||||
stream = ensure_stream(realm, stream_name, acting_user=None)
|
||||
streams.append(stream)
|
||||
ensure_stream(realm, stream_name, acting_user=None)
|
||||
|
||||
result = self.client_post(
|
||||
"/json/default_stream_groups/create",
|
||||
|
@ -3885,15 +3872,14 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
"""
|
||||
Helper function to make up random stream names. It takes
|
||||
existing_stream_names and randomly appends a digit to the end of each,
|
||||
but avoids names that appear in the list names_to_avoid.
|
||||
but avoids names of streams already in the realm.
|
||||
"""
|
||||
random_streams = []
|
||||
all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.test_realm)]
|
||||
for stream in existing_stream_names:
|
||||
random_stream = stream + str(random.randint(0, 9))
|
||||
if random_stream not in all_stream_names:
|
||||
random_streams.append(random_stream)
|
||||
return random_streams
|
||||
return [
|
||||
random_stream
|
||||
for stream in existing_stream_names
|
||||
if (random_stream := stream + str(random.randint(0, 9))) not in all_stream_names
|
||||
]
|
||||
|
||||
def test_invalid_stream_name(self) -> None:
|
||||
"""
|
||||
|
@ -5159,10 +5145,11 @@ class SubscriptionAPITest(ZulipTestCase):
|
|||
"""
|
||||
self.assertGreaterEqual(len(self.streams), 2)
|
||||
streams_to_remove = self.streams[1:]
|
||||
not_subbed = []
|
||||
for stream in Stream.objects.filter(realm=get_realm("zulip")):
|
||||
if stream.name not in self.streams:
|
||||
not_subbed.append(stream.name)
|
||||
not_subbed = [
|
||||
stream.name
|
||||
for stream in Stream.objects.filter(realm=get_realm("zulip"))
|
||||
if stream.name not in self.streams
|
||||
]
|
||||
random.shuffle(not_subbed)
|
||||
self.assertNotEqual(len(not_subbed), 0) # necessary for full test coverage
|
||||
try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to
|
||||
|
|
|
@ -497,9 +497,9 @@ def remove_subscriptions_backend(
|
|||
) -> HttpResponse:
|
||||
realm = user_profile.realm
|
||||
|
||||
streams_as_dict: List[StreamDict] = []
|
||||
for stream_name in streams_raw:
|
||||
streams_as_dict.append({"name": stream_name.strip()})
|
||||
streams_as_dict: List[StreamDict] = [
|
||||
{"name": stream_name.strip()} for stream_name in streams_raw
|
||||
]
|
||||
|
||||
unsubscribing_others = False
|
||||
if principals:
|
||||
|
|
|
@ -71,10 +71,7 @@ def get_code_push_commits_body(payload: WildValue) -> str:
|
|||
payload["resource"]["refUpdates"][0]["oldObjectId"].tame(check_string),
|
||||
payload["resource"]["refUpdates"][0]["newObjectId"].tame(check_string),
|
||||
)
|
||||
commits_data = []
|
||||
if payload["resource"].get("commits"):
|
||||
for commit in payload["resource"]["commits"]:
|
||||
commits_data.append(
|
||||
commits_data = [
|
||||
{
|
||||
"name": commit["author"]["name"].tame(check_string),
|
||||
"sha": commit["commitId"].tame(check_string),
|
||||
|
@ -83,7 +80,8 @@ def get_code_push_commits_body(payload: WildValue) -> str:
|
|||
),
|
||||
"message": commit["comment"].tame(check_string),
|
||||
}
|
||||
)
|
||||
for commit in payload["resource"].get("commits", [])
|
||||
]
|
||||
return get_push_commits_event_message(
|
||||
get_code_push_user_name(payload),
|
||||
compare_url,
|
||||
|
|
|
@ -36,17 +36,15 @@ def build_message_from_gitlog(
|
|||
|
||||
|
||||
def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]:
|
||||
new_commits_list = []
|
||||
for commit in commits:
|
||||
new_commits_list.append(
|
||||
return [
|
||||
{
|
||||
"name": commit["author"]["name"].tame(check_string),
|
||||
"sha": commit["id"].tame(check_string),
|
||||
"url": commit["url"].tame(check_string),
|
||||
"message": commit["message"].tame(check_string),
|
||||
}
|
||||
)
|
||||
return new_commits_list
|
||||
for commit in commits
|
||||
]
|
||||
|
||||
|
||||
@authenticated_rest_api_view(
|
||||
|
|
|
@ -129,9 +129,10 @@ def get_topic_for_http_request(payload: WildValue) -> str:
|
|||
|
||||
|
||||
def get_body_for_maintenance_planned_event(payload: WildValue) -> str:
|
||||
services_data = []
|
||||
for service in payload["affected_services"].tame(check_string).split(","):
|
||||
services_data.append({"service_name": service})
|
||||
services_data = [
|
||||
{"service_name": service}
|
||||
for service in payload["affected_services"].tame(check_string).split(",")
|
||||
]
|
||||
data = {
|
||||
"title": payload["title"].tame(check_string),
|
||||
"description": payload["description"].tame(check_string),
|
||||
|
@ -147,9 +148,10 @@ def get_body_for_maintenance_planned_event(payload: WildValue) -> str:
|
|||
|
||||
|
||||
def get_body_for_incident_open_event(payload: WildValue) -> str:
|
||||
services_data = []
|
||||
for service in payload["affected_services"].tame(check_string).split(","):
|
||||
services_data.append({"service_name": service})
|
||||
services_data = [
|
||||
{"service_name": service}
|
||||
for service in payload["affected_services"].tame(check_string).split(",")
|
||||
]
|
||||
data = {
|
||||
"title": payload["title"].tame(check_string),
|
||||
"description": payload["description"].tame(check_string),
|
||||
|
|
|
@ -566,9 +566,6 @@ def get_pull_request_review_comment_body(helper: Helper) -> str:
|
|||
def get_pull_request_review_requested_body(helper: Helper) -> str:
|
||||
payload = helper.payload
|
||||
include_title = helper.include_title
|
||||
requested_reviewer = [payload["requested_reviewer"]] if "requested_reviewer" in payload else []
|
||||
|
||||
requested_team = [payload["requested_team"]] if "requested_team" in payload else []
|
||||
|
||||
sender = get_sender_name(payload)
|
||||
pr_number = payload["pull_request"]["number"].tame(check_int)
|
||||
|
@ -579,26 +576,18 @@ def get_pull_request_review_requested_body(helper: Helper) -> str:
|
|||
)
|
||||
body = message_with_title if include_title else message
|
||||
|
||||
all_reviewers = []
|
||||
|
||||
for reviewer in requested_reviewer:
|
||||
all_reviewers.append(
|
||||
"[{login}]({html_url})".format(
|
||||
if "requested_reviewer" in payload:
|
||||
reviewer = payload["requested_reviewer"]
|
||||
reviewers = "[{login}]({html_url})".format(
|
||||
login=reviewer["login"].tame(check_string),
|
||||
html_url=reviewer["html_url"].tame(check_string),
|
||||
)
|
||||
)
|
||||
|
||||
for team_reviewer in requested_team:
|
||||
all_reviewers.append(
|
||||
"[{name}]({html_url})".format(
|
||||
else:
|
||||
team_reviewer = payload["requested_team"]
|
||||
reviewers = "[{name}]({html_url})".format(
|
||||
name=team_reviewer["name"].tame(check_string),
|
||||
html_url=team_reviewer["html_url"].tame(check_string),
|
||||
)
|
||||
)
|
||||
|
||||
reviewers = ""
|
||||
reviewers = all_reviewers[0]
|
||||
|
||||
return body.format(
|
||||
sender=sender,
|
||||
|
|
|
@ -46,9 +46,7 @@ def format_push_event(payload: WildValue) -> str:
|
|||
|
||||
|
||||
def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[str, str]]:
|
||||
new_commits_list = []
|
||||
for commit in commits:
|
||||
new_commits_list.append(
|
||||
return [
|
||||
{
|
||||
"name": commit["author"]["username"].tame(check_string)
|
||||
or commit["author"]["name"].tame(check_string).split()[0],
|
||||
|
@ -56,8 +54,8 @@ def _transform_commits_list_to_common_format(commits: WildValue) -> List[Dict[st
|
|||
"url": commit["url"].tame(check_string),
|
||||
"message": commit["message"].tame(check_string),
|
||||
}
|
||||
)
|
||||
return new_commits_list
|
||||
for commit in commits
|
||||
]
|
||||
|
||||
|
||||
def format_new_branch_event(payload: WildValue) -> str:
|
||||
|
|
|
@ -100,10 +100,7 @@ def get_body_for_tracks_retagged_event(payload: WildValue) -> str:
|
|||
|
||||
|
||||
def get_body_for_tracks_imported_upgrade_event(payload: WildValue) -> str:
|
||||
tracks_data = []
|
||||
for track in payload["tracks"]:
|
||||
tracks_data.append({"title": track["title"].tame(check_string)})
|
||||
|
||||
tracks_data = [{"title": track["title"].tame(check_string)} for track in payload["tracks"]]
|
||||
data = {
|
||||
"artist_name": payload["artist"]["name"].tame(check_string),
|
||||
"tracks_final_data": get_tracks_content(tracks_data),
|
||||
|
@ -113,10 +110,7 @@ def get_body_for_tracks_imported_upgrade_event(payload: WildValue) -> str:
|
|||
|
||||
|
||||
def get_body_for_tracks_imported_event(payload: WildValue) -> str:
|
||||
tracks_data = []
|
||||
for track in payload["tracks"]:
|
||||
tracks_data.append({"title": track["title"].tame(check_string)})
|
||||
|
||||
tracks_data = [{"title": track["title"].tame(check_string)} for track in payload["tracks"]]
|
||||
data = {
|
||||
"artist_name": payload["artist"]["name"].tame(check_string),
|
||||
"tracks_final_data": get_tracks_content(tracks_data),
|
||||
|
|
|
@ -14,16 +14,15 @@ from zerver.models import UserProfile
|
|||
|
||||
|
||||
def get_push_commits_body(payload: WildValue) -> str:
|
||||
commits_data = []
|
||||
for commit in payload["event"]["push"]["commits"]:
|
||||
commits_data.append(
|
||||
commits_data = [
|
||||
{
|
||||
"name": commit["author"].tame(check_string),
|
||||
"sha": commit["raw_id"].tame(check_string),
|
||||
"url": commit["url"].tame(check_string),
|
||||
"message": commit["message"].tame(check_string),
|
||||
}
|
||||
)
|
||||
for commit in payload["event"]["push"]["commits"]
|
||||
]
|
||||
return get_push_commits_event_message(
|
||||
get_user_name(payload),
|
||||
None,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# Webhooks for external integrations.
|
||||
import re
|
||||
from itertools import zip_longest
|
||||
from typing import Literal, Optional, TypedDict, cast
|
||||
from typing import List, Literal, Optional, TypedDict, cast
|
||||
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
from django.utils.translation import gettext as _
|
||||
|
@ -58,14 +58,12 @@ def api_slack_incoming_webhook(
|
|||
if user_specified_topic is None:
|
||||
user_specified_topic = "(no topic)"
|
||||
|
||||
pieces = []
|
||||
pieces: List[str] = []
|
||||
if "blocks" in payload and payload["blocks"]:
|
||||
for block in payload["blocks"]:
|
||||
pieces.append(render_block(block))
|
||||
pieces += map(render_block, payload["blocks"])
|
||||
|
||||
if "attachments" in payload and payload["attachments"]:
|
||||
for attachment in payload["attachments"]:
|
||||
pieces.append(render_attachment(attachment))
|
||||
pieces += map(render_attachment, payload["attachments"])
|
||||
|
||||
body = "\n\n".join(piece.strip() for piece in pieces if piece.strip() != "")
|
||||
|
||||
|
@ -218,8 +216,7 @@ def render_attachment(attachment: WildValue) -> str:
|
|||
fields.append(f"{value}")
|
||||
pieces.append("\n".join(fields))
|
||||
if "blocks" in attachment and attachment["blocks"]:
|
||||
for block in attachment["blocks"]:
|
||||
pieces.append(render_block(block))
|
||||
pieces += map(render_block, attachment["blocks"])
|
||||
if "image_url" in attachment and attachment["image_url"]:
|
||||
pieces.append("[]({})".format(attachment["image_url"].tame(check_url)))
|
||||
if "footer" in attachment and attachment["footer"]:
|
||||
|
|
|
@ -30,10 +30,7 @@ def api_taiga_webhook(
|
|||
message: WildValue = REQ(argument_type="body", converter=to_wild_value),
|
||||
) -> HttpResponse:
|
||||
parsed_events = parse_message(message)
|
||||
content_lines = []
|
||||
for event in parsed_events:
|
||||
content_lines.append(generate_content(event) + "\n")
|
||||
content = "".join(sorted(content_lines))
|
||||
content = "".join(sorted(generate_content(event) + "\n" for event in parsed_events))
|
||||
topic = "General"
|
||||
if message["data"].get("milestone") and "name" in message["data"]["milestone"]:
|
||||
topic = message["data"]["milestone"]["name"].tame(check_string)
|
||||
|
|
|
@ -183,13 +183,9 @@ def create_alert_words(realm_id: int) -> None:
|
|||
recs: List[AlertWord] = []
|
||||
for user_id in user_ids:
|
||||
random.shuffle(alert_words)
|
||||
for i in range(4):
|
||||
recs.append(
|
||||
AlertWord(
|
||||
realm_id=realm_id,
|
||||
user_profile_id=user_id,
|
||||
word=alert_words[i],
|
||||
)
|
||||
recs.extend(
|
||||
AlertWord(realm_id=realm_id, user_profile_id=user_id, word=word)
|
||||
for word in alert_words[:4]
|
||||
)
|
||||
|
||||
AlertWord.objects.bulk_create(recs)
|
||||
|
@ -545,9 +541,11 @@ class Command(BaseCommand):
|
|||
# are needed for the test suite.
|
||||
zulip_realm_bots = [
|
||||
("Zulip Default Bot", "default-bot@zulip.com"),
|
||||
*(
|
||||
(f"Extra Bot {i}", f"extrabot{i}@zulip.com")
|
||||
for i in range(options["extra_bots"])
|
||||
),
|
||||
]
|
||||
for i in range(options["extra_bots"]):
|
||||
zulip_realm_bots.append((f"Extra Bot {i}", f"extrabot{i}@zulip.com"))
|
||||
|
||||
create_users(
|
||||
zulip_realm, zulip_realm_bots, bot_type=UserProfile.DEFAULT_BOT, bot_owner=desdemona
|
||||
|
@ -1159,11 +1157,7 @@ def send_messages(messages: List[Message]) -> None:
|
|||
# up with queued events that reference objects from a previous
|
||||
# life of the database, which naturally throws exceptions.
|
||||
settings.USING_RABBITMQ = False
|
||||
message_dict_list = []
|
||||
for message in messages:
|
||||
message_dict = build_message_send_dict(message=message)
|
||||
message_dict_list.append(message_dict)
|
||||
do_send_messages(message_dict_list)
|
||||
do_send_messages([build_message_send_dict(message=message) for message in messages])
|
||||
bulk_create_reactions(messages)
|
||||
settings.USING_RABBITMQ = True
|
||||
|
||||
|
|
|
@ -684,8 +684,7 @@ urls += [
|
|||
# Incoming webhook URLs
|
||||
# We don't create URLs for particular Git integrations here
|
||||
# because of generic one below
|
||||
for incoming_webhook in WEBHOOK_INTEGRATIONS:
|
||||
urls.append(incoming_webhook.url_object)
|
||||
urls.extend(incoming_webhook.url_object for incoming_webhook in WEBHOOK_INTEGRATIONS)
|
||||
|
||||
# Desktop-specific authentication URLs
|
||||
urls += [
|
||||
|
|
Loading…
Reference in New Issue