From e331a758c3142e39d8877b7c96eee7c12f174f05 Mon Sep 17 00:00:00 2001 From: Wyatt Hoodes Date: Sun, 14 Jul 2019 09:37:08 -1000 Subject: [PATCH] python: Migrate open statements to use with. This is low priority, but it's nice to be consistently using the best practice pattern. Fixes: #12419. --- .../zulip_app_frontend/check_cron_file | 3 +- .../check_personal_zephyr_mirrors | 3 +- .../zulip_zephyr_mirror/check_zephyr_mirror | 3 +- scripts/lib/setup_venv.py | 13 ++++----- scripts/nagios/cron_file_helper.py | 3 +- scripts/setup/generate_secrets.py | 9 +++--- tools/get-handlebar-vars | 3 +- tools/lib/html_branches.py | 3 +- tools/lib/html_grep.py | 6 ++-- tools/lib/pretty_print.py | 5 ++-- tools/lib/template_parser.py | 3 +- tools/lib/test_script.py | 3 +- tools/pretty-print-html | 10 +++---- tools/run-dev.py | 5 ++-- tools/setup/emoji/build_emoji | 12 ++++---- tools/setup/emoji/export_emoji_names_to_csv | 4 +-- tools/zulip-export/zulip-export | 5 ++-- zerver/lib/api_test_helpers.py | 28 +++++++++---------- zerver/lib/debug.py | 3 +- zerver/lib/export.py | 6 ++-- zerver/lib/upload.py | 6 ++-- zerver/logging_handlers.py | 3 +- zerver/management/commands/create_user.py | 3 +- zerver/tests/test_auth_backends.py | 3 +- zerver/tests/test_bugdown.py | 4 +-- zerver/tests/test_import_export.py | 16 +++++++---- zerver/tests/test_queue_worker.py | 3 +- zerver/tests/test_slack_message_conversion.py | 5 ++-- zerver/tests/test_upload.py | 3 +- zerver/views/development/integrations.py | 6 ++-- 30 files changed, 101 insertions(+), 81 deletions(-) diff --git a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file index 6aca1d95e0..123fe8f257 100755 --- a/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file +++ b/puppet/zulip/files/nagios_plugins/zulip_app_frontend/check_cron_file @@ -18,7 +18,8 @@ def nagios_from_file(results_file: str, max_time_diff: int=60 * 2) -> 'Tuple[int This file is created by various nagios checking cron jobs such as check-rabbitmq-queues and check-rabbitmq-consumers""" - data = open(results_file).read().strip() + with open(results_file, 'r') as f: + data = f.read().strip() pieces = data.split('|') if not len(pieces) == 4: diff --git a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors index 1e339eb5d9..c8e78d84d3 100755 --- a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors +++ b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_personal_zephyr_mirrors @@ -34,7 +34,8 @@ down_count = 0 for results_file_name in os.listdir(RESULTS_DIR): this_state = "OK" results_file = os.path.join(RESULTS_DIR, results_file_name) - data = open(results_file).read().strip() + with open(results_file, 'r') as f: + data = f.read().strip() last_check = os.stat(results_file).st_mtime time_since_last_check = time.time() - last_check # time_since_last_check threshold needs to be strictly greater diff --git a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror index 66b70ccdd4..69569d9360 100755 --- a/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror +++ b/puppet/zulip_ops/files/nagios_plugins/zulip_zephyr_mirror/check_zephyr_mirror @@ -32,7 +32,8 @@ def report(state, data, last_check): data)) exit(states[state]) -data = open(RESULTS_FILE).read().strip() +with open(RESULTS_FILE, 'r') as f: + data = f.read().strip() if data.split("\n")[-1].strip() == "0": state = "OK" else: diff --git a/scripts/lib/setup_venv.py b/scripts/lib/setup_venv.py index 533ad2dda5..92c8c2d55e 100644 --- a/scripts/lib/setup_venv.py +++ b/scripts/lib/setup_venv.py @@ -260,16 +260,14 @@ def do_patch_activate_script(venv_path): # venv_path should be what we want to have in VIRTUAL_ENV after patching script_path = os.path.join(venv_path, "bin", "activate") - file_obj = open(script_path) - lines = file_obj.readlines() + with open(script_path, 'r') as f: + lines = f.readlines() for i, line in enumerate(lines): if line.startswith('VIRTUAL_ENV='): lines[i] = 'VIRTUAL_ENV="%s"\n' % (venv_path,) - file_obj.close() - file_obj = open(script_path, 'w') - file_obj.write("".join(lines)) - file_obj.close() + with open(script_path, 'w') as f: + f.write("".join(lines)) def setup_virtualenv(target_venv_path, requirements_file, virtualenv_args=None, patch_activate_script=False): # type: (Optional[str], str, Optional[List[str]], bool) -> str @@ -285,7 +283,8 @@ def setup_virtualenv(target_venv_path, requirements_file, virtualenv_args=None, success_stamp = os.path.join(cached_venv_path, "success-stamp") if not os.path.exists(success_stamp): do_setup_virtualenv(cached_venv_path, requirements_file, virtualenv_args or []) - open(success_stamp, 'w').close() + with open(success_stamp, 'w') as f: + f.close() print("Using cached Python venv from %s" % (cached_venv_path,)) if target_venv_path is not None: diff --git a/scripts/nagios/cron_file_helper.py b/scripts/nagios/cron_file_helper.py index abdf90dcc5..268274050a 100644 --- a/scripts/nagios/cron_file_helper.py +++ b/scripts/nagios/cron_file_helper.py @@ -14,7 +14,8 @@ def nagios_from_file(results_file): This file is created by various nagios checking cron jobs such as check-rabbitmq-queues and check-rabbitmq-consumers""" - data = open(results_file).read().strip() + with open(results_file) as f: + data = f.read().strip() pieces = data.split('|') if not len(pieces) == 4: diff --git a/scripts/setup/generate_secrets.py b/scripts/setup/generate_secrets.py index 0596ddd549..5f8c2aa0ae 100755 --- a/scripts/setup/generate_secrets.py +++ b/scripts/setup/generate_secrets.py @@ -108,11 +108,10 @@ def generate_secrets(development=False): print("generate_secrets: No new secrets to generate.") return - out = open(OUTPUT_SETTINGS_FILENAME, 'a') - # Write a newline at the start, in case there was no newline at - # the end of the file due to human editing. - out.write("\n" + "".join(lines)) - out.close() + with open(OUTPUT_SETTINGS_FILENAME, 'a') as f: + # Write a newline at the start, in case there was no newline at + # the end of the file due to human editing. + f.write("\n" + "".join(lines)) print("Generated new secrets in %s." % (OUTPUT_SETTINGS_FILENAME,)) diff --git a/tools/get-handlebar-vars b/tools/get-handlebar-vars index f4a049dc19..75a64093ea 100755 --- a/tools/get-handlebar-vars +++ b/tools/get-handlebar-vars @@ -15,7 +15,8 @@ def debug(obj): def parse_file(fn): # type: (str) -> Dict[str, Any] - text = open(fn).read() + with open(fn, 'r') as f: + text = f.read() tags = re.findall(r'{+\s*(.*?)\s*}+', text) root = {} # type: Dict[str, Any] context = root diff --git a/tools/lib/html_branches.py b/tools/lib/html_branches.py index 23b8e1431f..702914dd04 100644 --- a/tools/lib/html_branches.py +++ b/tools/lib/html_branches.py @@ -193,7 +193,8 @@ def build_id_dict(templates): template_id_dict = defaultdict(list) # type: (Dict[str, List[str]]) for fn in templates: - text = open(fn).read() + with open(fn, 'r') as f: + text = f.read() list_tags = tokenize(text) for tag in list_tags: diff --git a/tools/lib/html_grep.py b/tools/lib/html_grep.py index 4f68ad82cf..7a9387ed15 100644 --- a/tools/lib/html_grep.py +++ b/tools/lib/html_grep.py @@ -7,7 +7,8 @@ def show_all_branches(fns): # type: (List[str]) -> None for fn in fns: print(fn) - text = open(fn).read() + with open(fn, 'r') as f: + text = f.read() branches = html_branches(text, fn=fn) for branch in branches: print(branch.text()) @@ -25,7 +26,8 @@ class Grepper: all_branches = [] # type: List[HtmlTreeBranch] for fn in fns: - text = open(fn).read() + with open(fn, 'r') as f: + text = f.read() branches = html_branches(text, fn=fn) all_branches += branches diff --git a/tools/lib/pretty_print.py b/tools/lib/pretty_print.py index 3c257941e8..a44ae6a7b1 100644 --- a/tools/lib/pretty_print.py +++ b/tools/lib/pretty_print.py @@ -194,10 +194,9 @@ def pretty_print_html(html, num_spaces=4): def validate_indent_html(fn, fix): # type: (str, bool) -> int - file = open(fn) - html = file.read() + with open(fn, 'r') as f: + html = f.read() phtml = pretty_print_html(html) - file.close() if not html.split('\n') == phtml.split('\n'): if fix: print(GREEN + "Automatically fixing problems..." + ENDC) diff --git a/tools/lib/template_parser.py b/tools/lib/template_parser.py index b49f53e78a..24510dc2f9 100644 --- a/tools/lib/template_parser.py +++ b/tools/lib/template_parser.py @@ -214,7 +214,8 @@ def validate(fn=None, text=None, check_indent=True): fn = '' if text is None: - text = open(fn).read() + with open(fn, 'r') as f: + text = f.read() tokens = tokenize(text) diff --git a/tools/lib/test_script.py b/tools/lib/test_script.py index 51139de3ac..4d4b1b6ca0 100644 --- a/tools/lib/test_script.py +++ b/tools/lib/test_script.py @@ -56,7 +56,8 @@ def get_provisioning_status(): # their own dependencies and not running provision. return True, None - version = open(version_file).read().strip() + with open(version_file, 'r') as f: + version = f.read().strip() # Normal path for people that provision--we're all good! if version == PROVISION_VERSION: diff --git a/tools/pretty-print-html b/tools/pretty-print-html index 6352d35c4f..2d104e701e 100755 --- a/tools/pretty-print-html +++ b/tools/pretty-print-html @@ -7,13 +7,11 @@ def clean_html(filenames): # type: (List[str]) -> None for fn in filenames: print('Prettifying: %s' % (fn,)) - file = open(fn) - html = file.read() + with open(fn, 'r') as f: + html = f.read() phtml = pretty_print_html(html) - file.close() - file = open(fn, 'w') - file.write(phtml) - file.close() + with open(fn, 'w') as f: + f.write(phtml) if __name__ == '__main__': # If command arguments are provided, we only check those filenames. diff --git a/tools/run-dev.py b/tools/run-dev.py index d8861a76eb..4b3e978bd3 100755 --- a/tools/run-dev.py +++ b/tools/run-dev.py @@ -133,9 +133,8 @@ else: # Required for compatibility python versions. if not os.path.exists(os.path.dirname(pid_file_path)): os.makedirs(os.path.dirname(pid_file_path)) -pid_file = open(pid_file_path, 'w+') -pid_file.write(str(os.getpgrp()) + "\n") -pid_file.close() +with open(pid_file_path, 'w+') as f: + f.write(str(os.getpgrp()) + "\n") # Pass --nostatic because we configure static serving ourselves in # zulip/urls.py. diff --git a/tools/setup/emoji/build_emoji b/tools/setup/emoji/build_emoji index e7308b1d7d..3cc4e5f996 100755 --- a/tools/setup/emoji/build_emoji +++ b/tools/setup/emoji/build_emoji @@ -92,7 +92,8 @@ def main() -> None: if not os.path.exists(success_stamp): print("Dumping emojis ...") dump_emojis(source_emoji_dump) - open(success_stamp, 'w').close() + with open(success_stamp, 'w') as f: + f.close() print("Using cached emojis from {}".format(source_emoji_dump)) if os.path.lexists(TARGET_EMOJI_DUMP): @@ -135,11 +136,10 @@ def generate_sprite_css_files(cache_path: str, } SPRITE_CSS_PATH = os.path.join(cache_path, '%s-sprite.css' % (emojiset,)) - sprite_css_file = open(SPRITE_CSS_PATH, 'w') - sprite_css_file.write(SPRITE_CSS_FILE_TEMPLATE % {'emojiset': emojiset, - 'emoji_positions': emoji_positions, - }) - sprite_css_file.close() + with open(SPRITE_CSS_PATH, 'w') as f: + f.write(SPRITE_CSS_FILE_TEMPLATE % {'emojiset': emojiset, + 'emoji_positions': emoji_positions, + }) def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None: def ensure_emoji_image(emoji_dict: Dict[str, Any], diff --git a/tools/setup/emoji/export_emoji_names_to_csv b/tools/setup/emoji/export_emoji_names_to_csv index b3cfe9d668..db90880e2d 100755 --- a/tools/setup/emoji/export_emoji_names_to_csv +++ b/tools/setup/emoji/export_emoji_names_to_csv @@ -115,8 +115,8 @@ def main() -> None: explanation_line = match.group('explanation_line').strip() explanation_lines.append(explanation_line) - fp = open(args.output_file_path, 'w') - writer = csv.writer(fp, dialect='excel') + with open(args.output_file_path, 'w') as f: + writer = csv.writer(f, dialect='excel') writer.writerows(output_data) # The CSV file exported by google sheets doesn't have a newline # character in the end. So we also strip the last newline character diff --git a/tools/zulip-export/zulip-export b/tools/zulip-export/zulip-export index e531354f2d..feb79524b3 100755 --- a/tools/zulip-export/zulip-export +++ b/tools/zulip-export/zulip-export @@ -83,8 +83,7 @@ for msg in result['messages']: messages.append(msg) filename = "zulip-%s.json" % (options.stream,) -f = open(filename, mode="wb") -f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8')) -f.close() +with open(filename, 'wb') as f: + f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8')) print("%d messages exported to %s" % (len(messages), filename,)) sys.exit(0) diff --git a/zerver/lib/api_test_helpers.py b/zerver/lib/api_test_helpers.py index 23d7986ae9..635859bc7b 100644 --- a/zerver/lib/api_test_helpers.py +++ b/zerver/lib/api_test_helpers.py @@ -800,12 +800,12 @@ def upload_file(client): # {code_example|start} # Upload a file - fp = open(path_to_file, 'rb') - result = client.call_endpoint( - 'user_uploads', - method='POST', - files=[fp] - ) + with open(path_to_file, 'rb') as fp: + result = client.call_endpoint( + 'user_uploads', + method='POST', + files=[fp] + ) # {code_example|end} validate_against_openapi_schema(result, '/user_uploads', 'post', '200') @@ -851,15 +851,15 @@ def upload_custom_emoji(client): # {code_example|start} # Upload a custom emoji; assume `emoji_path` is the path to your image. - fp = open(emoji_path, 'rb') - emoji_name = 'my_custom_emoji' - result = client.call_endpoint( - 'realm/emoji/{}'.format(emoji_name), - method='POST', - files=[fp] - ) + with open(emoji_path, 'rb') as fp: + emoji_name = 'my_custom_emoji' + result = client.call_endpoint( + 'realm/emoji/{}'.format(emoji_name), + method='POST', + files=[fp] + ) # {code_example|end} - fp.close() + validate_against_openapi_schema(result, '/realm/emoji/{emoji_name}', 'post', '200') diff --git a/zerver/lib/debug.py b/zerver/lib/debug.py index 165a158726..fc112ccffe 100644 --- a/zerver/lib/debug.py +++ b/zerver/lib/debug.py @@ -53,7 +53,8 @@ def tracemalloc_dump() -> None: gc.collect() tracemalloc.take_snapshot().dump(path) - procstat = open('/proc/{}/stat'.format(os.getpid()), 'rb').read().split() + with open('/proc/{}/stat'.format(os.getpid()), 'rb') as f: + procstat = f.read().split() rss_pages = int(procstat[23]) logger.info("tracemalloc dump: tracing {} MiB ({} MiB peak), using {} MiB; rss {} MiB; dumped {}" .format(tracemalloc.get_traced_memory()[0] // 1048576, diff --git a/zerver/lib/export.py b/zerver/lib/export.py index df6556c7ff..5856eccb54 100644 --- a/zerver/lib/export.py +++ b/zerver/lib/export.py @@ -1375,7 +1375,8 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None: with open(stats_file, 'w') as f: for fn in fns: f.write(os.path.basename(fn) + '\n') - payload = open(fn).read() + with open(fn, 'r') as filename: + payload = filename.read() data = ujson.loads(payload) for k in sorted(data): f.write('%5d %s\n' % (len(data[k]), k)) @@ -1386,7 +1387,8 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None: for fn in [avatar_file, uploads_file]: f.write(fn+'\n') - payload = open(fn).read() + with open(fn, 'r') as filename: + payload = filename.read() data = ujson.loads(payload) f.write('%5d records\n' % (len(data),)) f.write('\n') diff --git a/zerver/lib/upload.py b/zerver/lib/upload.py index 9bab2bd94d..d08c4b1562 100644 --- a/zerver/lib/upload.py +++ b/zerver/lib/upload.py @@ -735,7 +735,8 @@ class LocalUploadBackend(ZulipUploadBackend): return image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original") - image_data = open(image_path, "rb").read() + with open(image_path, "rb") as f: + image_data = f.read() resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) write_local_file('avatars', file_path + '-medium.png', resized_medium) @@ -748,7 +749,8 @@ class LocalUploadBackend(ZulipUploadBackend): return image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original") - image_data = open(image_path, "rb").read() + with open(image_path, "rb") as f: + image_data = f.read() resized_avatar = resize_avatar(image_data) write_local_file('avatars', file_path + '.png', resized_avatar) diff --git a/zerver/logging_handlers.py b/zerver/logging_handlers.py index a2b8290c14..1bf6b78740 100644 --- a/zerver/logging_handlers.py +++ b/zerver/logging_handlers.py @@ -31,7 +31,8 @@ def add_deployment_metadata(report: Dict[str, Any]) -> None: version_path = os.path.join(os.path.dirname(__file__), '../version') if os.path.exists(version_path): - report['zulip_version_file'] = open(version_path).read().strip() # nocoverage + with open(version_path, 'r') as f: # nocoverage + report['zulip_version_file'] = f.read().strip() def add_request_metadata(report: Dict[str, Any], request: HttpRequest) -> None: report['has_request'] = True diff --git a/zerver/management/commands/create_user.py b/zerver/management/commands/create_user.py index 1b6a2dacce..72e8c11497 100644 --- a/zerver/management/commands/create_user.py +++ b/zerver/management/commands/create_user.py @@ -77,7 +77,8 @@ parameters, or specify no parameters for interactive user creation.""") try: if options['password_file']: - pw = open(options['password_file'], 'r').read() + with open(options['password_file'], 'r') as f: + pw = f.read() elif options['password']: pw = options['password'] else: diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py index 2668ea5927..2eb1782ea4 100644 --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -3062,7 +3062,8 @@ class TestZulipLDAPUserPopulator(ZulipLDAPTestCase): @use_s3_backend def test_update_user_avatar_for_s3(self) -> None: bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0] - test_image_data = open(get_test_image_file('img.png').name, 'rb').read() + with open(get_test_image_file('img.png').name, 'rb') as f: + test_image_data = f.read() self.mock_ldap.directory = { 'uid=hamlet,ou=users,dc=zulip,dc=com': { diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py index f0e4246449..add925debf 100644 --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -262,8 +262,8 @@ class BugdownTest(ZulipTestCase): def load_bugdown_tests(self) -> Tuple[Dict[str, Any], List[List[str]]]: test_fixtures = {} - data_file = open(os.path.join(os.path.dirname(__file__), 'fixtures/markdown_test_cases.json'), 'r') - data = ujson.loads('\n'.join(data_file.readlines())) + with open(os.path.join(os.path.dirname(__file__), 'fixtures/markdown_test_cases.json'), 'r') as f: + data = ujson.loads('\n'.join(f.readlines())) for test in data['regular_tests']: test_fixtures[test['name']] = test diff --git a/zerver/tests/test_import_export.py b/zerver/tests/test_import_export.py index 715ea24558..f0bf3ac70d 100644 --- a/zerver/tests/test_import_export.py +++ b/zerver/tests/test_import_export.py @@ -301,7 +301,8 @@ class ImportExportTest(ZulipTestCase): upload_emoji_image(img_file, '1.png', user_profile) with get_test_image_file('img.png') as img_file: upload_avatar_image(img_file, user_profile, user_profile) - test_image = open(get_test_image_file('img.png').name, 'rb').read() + with open(get_test_image_file('img.png').name, 'rb') as f: + test_image = f.read() message.sender.avatar_source = 'U' message.sender.save() @@ -323,7 +324,7 @@ class ImportExportTest(ZulipTestCase): # Test uploads fn = os.path.join(full_data['uploads_dir'], path_id) - with open(fn) as f: + with open(fn, 'r') as f: self.assertEqual(f.read(), 'zulip!') records = full_data['uploads_dir_records'] self.assertEqual(records[0]['path'], path_id) @@ -340,7 +341,8 @@ class ImportExportTest(ZulipTestCase): # Test avatars fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id) - fn_data = open(fn, 'rb').read() + with open(fn, 'rb') as fb: + fn_data = fb.read() self.assertEqual(fn_data, test_image) records = full_data['avatar_dir_records'] record_path = [record['path'] for record in records] @@ -370,7 +372,7 @@ class ImportExportTest(ZulipTestCase): # Test uploads fields = attachment_path_id.split('/') fn = os.path.join(full_data['uploads_dir'], os.path.join(fields[0], fields[1], fields[2])) - with open(fn) as f: + with open(fn, 'r') as f: self.assertEqual(f.read(), 'zulip!') records = full_data['uploads_dir_records'] self.assertEqual(records[0]['path'], os.path.join(fields[0], fields[1], fields[2])) @@ -390,7 +392,8 @@ class ImportExportTest(ZulipTestCase): # Test avatars fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id) - fn_data = open(fn, 'rb').read() + with open(fn, 'rb') as file: + fn_data = file.read() self.assertEqual(fn_data, test_image) records = full_data['avatar_dir_records'] record_path = [record['path'] for record in records] @@ -994,7 +997,8 @@ class ImportExportTest(ZulipTestCase): do_import_realm(os.path.join(settings.TEST_WORKER_DIR, 'test-export'), 'test-zulip') imported_realm = Realm.objects.get(string_id='test-zulip') - test_image_data = open(get_test_image_file('img.png').name, 'rb').read() + with open(get_test_image_file('img.png').name, 'rb') as f: + test_image_data = f.read() # Test attachments uploaded_file = Attachment.objects.get(realm=imported_realm) diff --git a/zerver/tests/test_queue_worker.py b/zerver/tests/test_queue_worker.py index c486e480cd..a6cc0e4116 100644 --- a/zerver/tests/test_queue_worker.py +++ b/zerver/tests/test_queue_worker.py @@ -490,7 +490,8 @@ class WorkerTest(ZulipTestCase): "Problem handling data on queue unreliable_worker") self.assertEqual(processed, ['good', 'fine', 'back to normal']) - line = open(fn).readline().strip() + with open(fn, 'r') as f: + line = f.readline().strip() event = ujson.loads(line.split('\t')[1]) self.assertEqual(event["type"], 'unexpected behaviour') diff --git a/zerver/tests/test_slack_message_conversion.py b/zerver/tests/test_slack_message_conversion.py index a3003b497c..28e31ead57 100644 --- a/zerver/tests/test_slack_message_conversion.py +++ b/zerver/tests/test_slack_message_conversion.py @@ -24,8 +24,9 @@ class SlackMessageConversion(ZulipTestCase): def load_slack_message_conversion_tests(self) -> Dict[Any, Any]: test_fixtures = {} - data_file = open(os.path.join(os.path.dirname(__file__), 'fixtures/slack_message_conversion.json'), 'r') - data = ujson.loads('\n'.join(data_file.readlines())) + with open(os.path.join(os.path.dirname(__file__), + 'fixtures/slack_message_conversion.json'), 'r') as f: + data = ujson.loads('\n'.join(f.readlines())) for test in data['regular_tests']: test_fixtures[test['name']] = test diff --git a/zerver/tests/test_upload.py b/zerver/tests/test_upload.py index cb0be85fe9..648254e993 100644 --- a/zerver/tests/test_upload.py +++ b/zerver/tests/test_upload.py @@ -1558,7 +1558,8 @@ class S3Test(ZulipTestCase): with get_test_image_file('img.png') as image_file: zerver.lib.upload.upload_backend.upload_avatar_image(image_file, user_profile, user_profile) - test_image_data = open(get_test_image_file('img.png').name, 'rb').read() + with open(get_test_image_file('img.png').name, 'rb') as f: + test_image_data = f.read() test_medium_image_data = resize_avatar(test_image_data, MEDIUM_AVATAR_SIZE) original_image_key = bucket.get_key(original_image_path_id) diff --git a/zerver/views/development/integrations.py b/zerver/views/development/integrations.py index 6561d5a491..fba4308be9 100644 --- a/zerver/views/development/integrations.py +++ b/zerver/views/development/integrations.py @@ -60,7 +60,8 @@ def get_fixtures(request: HttpResponse, for fixture in os.listdir(fixtures_dir): fixture_path = os.path.join(fixtures_dir, fixture) - body = open(fixture_path).read() + with open(fixture_path, 'r') as f: + body = f.read() try: body = ujson.loads(body) except ValueError: @@ -115,7 +116,8 @@ def send_all_webhook_fixture_messages(request: HttpRequest, responses = [] for fixture in os.listdir(fixtures_dir): fixture_path = os.path.join(fixtures_dir, fixture) - content = open(fixture_path).read() + with open(fixture_path, 'r') as f: + content = f.read() x = fixture.split(".") fixture_name, fixture_format = "".join(_ for _ in x[:-1]), x[-1] headers = get_fixture_http_headers(integration_name, fixture_name)