diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py index 9c788eb6ef..1f6f556873 100644 --- a/zerver/data_import/import_util.py +++ b/zerver/data_import/import_util.py @@ -634,12 +634,10 @@ def run_parallel_wrapper( logging.info("Distributing %s items across %s threads", len(full_items), threads) with ProcessPoolExecutor(max_workers=threads) as executor: - count = 0 - for future in as_completed( - executor.submit(wrapping_function, f, item) for item in full_items + for count, future in enumerate( + as_completed(executor.submit(wrapping_function, f, item) for item in full_items), 1 ): future.result() - count += 1 if count % 1000 == 0: logging.info("Finished %s items", count) diff --git a/zerver/data_import/slack.py b/zerver/data_import/slack.py index efa2d5008e..8f98f6cb32 100644 --- a/zerver/data_import/slack.py +++ b/zerver/data_import/slack.py @@ -1,3 +1,4 @@ +import itertools import logging import os import posixpath @@ -754,17 +755,7 @@ def convert_slack_workspace_messages( zerver_subscription=realm["zerver_subscription"], ) - while True: - message_data = [] - _counter = 0 - for msg in all_messages: - _counter += 1 - message_data.append(msg) - if _counter == chunk_size: - break - if len(message_data) == 0: - break - + while message_data := list(itertools.islice(all_messages, chunk_size)): ( zerver_message, zerver_usermessage, diff --git a/zerver/lib/export.py b/zerver/lib/export.py index 136e582f86..d96d37433b 100644 --- a/zerver/lib/export.py +++ b/zerver/lib/export.py @@ -1715,9 +1715,8 @@ def export_files_from_s3( def export_uploads_from_local( realm: Realm, local_dir: Path, output_dir: Path, attachments: List[Attachment] ) -> None: - count = 0 records = [] - for attachment in attachments: + for count, attachment in enumerate(attachments, 1): # Use 'mark_sanitized' to work around false positive caused by Pysa # thinking that 'realm' (and thus 'attachment' and 'attachment.path_id') # are user controlled @@ -1741,8 +1740,6 @@ def export_uploads_from_local( ) records.append(record) - count += 1 - if count % 100 == 0: logging.info("Finished %s", count) @@ -1831,9 +1828,8 @@ def get_emoji_path(realm_emoji: RealmEmoji) -> str: def export_emoji_from_local( realm: Realm, local_dir: Path, output_dir: Path, realm_emojis: List[RealmEmoji] ) -> None: - count = 0 records = [] - for realm_emoji in realm_emojis: + for count, realm_emoji in enumerate(realm_emojis, 1): emoji_path = get_emoji_path(realm_emoji) # Use 'mark_sanitized' to work around false positive caused by Pysa @@ -1862,7 +1858,6 @@ def export_emoji_from_local( ) records.append(record) - count += 1 if count % 100 == 0: logging.info("Finished %s", count) diff --git a/zerver/lib/import_realm.py b/zerver/lib/import_realm.py index c368739640..bfe13219a8 100644 --- a/zerver/lib/import_realm.py +++ b/zerver/lib/import_realm.py @@ -786,12 +786,7 @@ def import_uploads( bucket_name = settings.S3_AUTH_UPLOADS_BUCKET bucket = get_bucket(bucket_name) - count = 0 - for record in records: - count += 1 - if count % 1000 == 0: - logging.info("Processed %s/%s uploads", count, len(records)) - + for count, record in enumerate(records, 1): if processing_avatars: # For avatars, we need to rehash the user ID with the # new server's avatar salt @@ -878,6 +873,9 @@ def import_uploads( os.makedirs(os.path.dirname(file_path), exist_ok=True) shutil.copy(orig_file_path, file_path) + if count % 1000 == 0: + logging.info("Processed %s/%s uploads", count, len(records)) + if processing_avatars: # Ensure that we have medium-size avatar images for every # avatar. TODO: This implementation is hacky, both in that it diff --git a/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py b/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py index bcc1bc354c..a56649209e 100644 --- a/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py +++ b/zerver/migrations/0177_user_message_add_and_index_is_private_flag.py @@ -15,10 +15,9 @@ def reset_is_private_flag(apps: StateApps, schema_editor: BaseDatabaseSchemaEdit # zerver/migrations/0100_usermessage_remove_is_me_message.py # didn't clean the field after removing it. - i = 0 total = len(user_profile_ids) print("Setting default values for the new flag...", flush=True) - for user_id in user_profile_ids: + for i, user_id in enumerate(user_profile_ids, 1): while True: # Ideally, we'd just do a single database query per user. # Unfortunately, Django doesn't use the fancy new index on @@ -39,7 +38,6 @@ def reset_is_private_flag(apps: StateApps, schema_editor: BaseDatabaseSchemaEdit if count < 1000: break - i += 1 if i % 50 == 0 or i == total: percent = round((i / total) * 100, 2) print(f"Processed {i}/{total} {percent}%", flush=True) diff --git a/zerver/tests/test_message_edit.py b/zerver/tests/test_message_edit.py index 1ce1e8cacf..31d8746329 100644 --- a/zerver/tests/test_message_edit.py +++ b/zerver/tests/test_message_edit.py @@ -996,8 +996,7 @@ class EditMessageTest(EditMessageTestCase): # We reverse the message history view output so that the IDs line up with the above. message_history = list(reversed(json_response["message_history"])) - i = 0 - for entry in message_history: + for i, entry in enumerate(message_history): expected_entries = {"content", "rendered_content", "topic", "timestamp", "user_id"} if i in {0, 2, 4}: expected_entries.add("prev_topic") @@ -1009,7 +1008,6 @@ class EditMessageTest(EditMessageTestCase): if i in {0, 3}: expected_entries.add("prev_stream") expected_entries.add("stream") - i += 1 self.assertEqual(expected_entries, set(entry.keys())) self.assert_length(message_history, 7) self.assertEqual(message_history[0]["topic"], "topic 4")