python: Replace universal_newlines with text.

This is supported in Python ≥ 3.7.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2022-01-21 22:52:54 -08:00 committed by Tim Abbott
parent fd6d048efd
commit 97e4e9886c
28 changed files with 31 additions and 45 deletions

View File

@ -57,9 +57,7 @@ def run_sql_query(query: str) -> List[List[str]]:
f"SELECT {query}", f"SELECT {query}",
] ]
try: try:
output = subprocess.check_output( output = subprocess.check_output(command, stderr=subprocess.STDOUT, text=True).strip()
command, stderr=subprocess.STDOUT, universal_newlines=True
).strip()
if not output: if not output:
return [] return []
return [x.split("\0") for x in output.split("\n")] return [x.split("\0") for x in output.split("\n")]

View File

@ -21,7 +21,7 @@ def run(args: List[str], dry_run: bool = False) -> str:
print("Would have run: " + " ".join(map(shlex.quote, args))) print("Would have run: " + " ".join(map(shlex.quote, args)))
return "" return ""
return subprocess.check_output(args, stdin=subprocess.DEVNULL, universal_newlines=True) return subprocess.check_output(args, stdin=subprocess.DEVNULL, text=True)
recovery_val = run( recovery_val = run(

View File

@ -134,7 +134,7 @@ def check_rabbitmq_queues() -> None:
list_queues_output = subprocess.check_output( list_queues_output = subprocess.check_output(
["/usr/sbin/rabbitmqctl", "list_queues", "name", "messages", "consumers"], ["/usr/sbin/rabbitmqctl", "list_queues", "name", "messages", "consumers"],
universal_newlines=True, text=True,
) )
queue_counts_rabbitmqctl = {} queue_counts_rabbitmqctl = {}
queues_with_consumers = [] queues_with_consumers = []
@ -151,7 +151,7 @@ def check_rabbitmq_queues() -> None:
queue_stats_dir = subprocess.check_output( queue_stats_dir = subprocess.check_output(
[os.path.join(ZULIP_PATH, "scripts/get-django-setting"), "QUEUE_STATS_DIR"], [os.path.join(ZULIP_PATH, "scripts/get-django-setting"), "QUEUE_STATS_DIR"],
universal_newlines=True, text=True,
).strip() ).strip()
queue_stats: Dict[str, Dict[str, Any]] = {} queue_stats: Dict[str, Dict[str, Any]] = {}
queues_to_check = set(normal_queues).intersection(set(queues_with_consumers)) queues_to_check = set(normal_queues).intersection(set(queues_with_consumers))

View File

@ -35,7 +35,7 @@ def python_version() -> str:
""" """
Returns the Python version as string 'Python major.minor.patchlevel' Returns the Python version as string 'Python major.minor.patchlevel'
""" """
return subprocess.check_output(["/usr/bin/python3", "-VV"], universal_newlines=True) return subprocess.check_output(["/usr/bin/python3", "-VV"], text=True)
def hash_deps(deps: Iterable[str]) -> str: def hash_deps(deps: Iterable[str]) -> str:

View File

@ -42,9 +42,7 @@ def generate_sha1sum_node_modules(
data[YARN_LOCK_FILE_PATH] = f.read().strip() data[YARN_LOCK_FILE_PATH] = f.read().strip()
with open(YARN_PACKAGE_JSON) as f: with open(YARN_PACKAGE_JSON) as f:
data["yarn-package-version"] = json.load(f)["version"] data["yarn-package-version"] = json.load(f)["version"]
data["node-version"] = subprocess.check_output( data["node-version"] = subprocess.check_output(["node", "--version"], text=True).strip()
["node", "--version"], universal_newlines=True
).strip()
data["yarn-args"] = get_yarn_args(production=production) data["yarn-args"] = get_yarn_args(production=production)
sha1sum = hashlib.sha1() sha1sum = hashlib.sha1()

View File

@ -23,7 +23,7 @@ def generate_sha1sum_puppet_modules() -> str:
data["puppet-version"] = subprocess.check_output( data["puppet-version"] = subprocess.check_output(
# This is 10x faster than `puppet --version` # This is 10x faster than `puppet --version`
["ruby", "-r", "puppet/version", "-e", "puts Puppet.version"], ["ruby", "-r", "puppet/version", "-e", "puts Puppet.version"],
universal_newlines=True, text=True,
).strip() ).strip()
sha1sum = hashlib.sha1() sha1sum = hashlib.sha1()

View File

@ -178,9 +178,7 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
venv_python3 = os.path.join(curr_venv_path, "bin", "python3") venv_python3 = os.path.join(curr_venv_path, "bin", "python3")
if not os.path.exists(venv_python3): if not os.path.exists(venv_python3):
continue continue
venv_python_version = subprocess.check_output( venv_python_version = subprocess.check_output([venv_python3, "-VV"], text=True)
[venv_python3, "-VV"], universal_newlines=True
)
if desired_python_version != venv_python_version: if desired_python_version != venv_python_version:
continue continue
@ -282,7 +280,7 @@ def do_patch_activate_script(venv_path: str) -> None:
def generate_hash(requirements_file: str) -> str: def generate_hash(requirements_file: str) -> str:
path = os.path.join(ZULIP_PATH, "scripts", "lib", "hash_reqs.py") path = os.path.join(ZULIP_PATH, "scripts", "lib", "hash_reqs.py")
output = subprocess.check_output([path, requirements_file], universal_newlines=True) output = subprocess.check_output([path, requirements_file], text=True)
return output.split()[0] return output.split()[0]

View File

@ -53,7 +53,7 @@ def write_updated_configs() -> None:
shard_map: Dict[str, int] = {} shard_map: Dict[str, int] = {}
external_host = subprocess.check_output( external_host = subprocess.check_output(
[os.path.join(BASE_DIR, "scripts/get-django-setting"), "EXTERNAL_HOST"], [os.path.join(BASE_DIR, "scripts/get-django-setting"), "EXTERNAL_HOST"],
universal_newlines=True, text=True,
).strip() ).strip()
for port in config_file["tornado_sharding"]: for port in config_file["tornado_sharding"]:
shards = config_file["tornado_sharding"][port].strip() shards = config_file["tornado_sharding"][port].strip()

View File

@ -54,7 +54,7 @@ try:
logging.info("Unpacking the tarball") logging.info("Unpacking the tarball")
unpack_zulip = os.path.realpath(os.path.join(os.path.dirname(__file__), "unpack-zulip")) unpack_zulip = os.path.realpath(os.path.join(os.path.dirname(__file__), "unpack-zulip"))
deploy_path = subprocess.check_output( deploy_path = subprocess.check_output(
[unpack_zulip, archived_tarball_path], preexec_fn=su_to_zulip, universal_newlines=True [unpack_zulip, archived_tarball_path], preexec_fn=su_to_zulip, text=True
) )
# Chdir to deploy_path and then run upgrade-zulip-stage-2 from the # Chdir to deploy_path and then run upgrade-zulip-stage-2 from the

View File

@ -263,7 +263,7 @@ migrations_needed = False
if not args.skip_migrations: if not args.skip_migrations:
logging.info("Checking for needed migrations") logging.info("Checking for needed migrations")
migrations_output = subprocess.check_output( migrations_output = subprocess.check_output(
["./manage.py", "showmigrations"], preexec_fn=su_to_zulip, universal_newlines=True ["./manage.py", "showmigrations"], preexec_fn=su_to_zulip, text=True
) )
need_create_large_indexes = False need_create_large_indexes = False
for ln in migrations_output.split("\n"): for ln in migrations_output.split("\n"):

View File

@ -620,7 +620,7 @@ def has_application_server(once: bool = False) -> bool:
def list_supervisor_processes(*args: str) -> List[str]: def list_supervisor_processes(*args: str) -> List[str]:
worker_status = subprocess.run( worker_status = subprocess.run(
["supervisorctl", "status", *args], ["supervisorctl", "status", *args],
universal_newlines=True, text=True,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
) )
# `supervisorctl status` returns 3 if any are stopped, which is # `supervisorctl status` returns 3 if any are stopped, which is

View File

@ -33,9 +33,7 @@ options = parser.parse_args()
config_file = get_config_file() config_file = get_config_file()
TORNADO_PROCESSES = len(get_tornado_ports(config_file)) TORNADO_PROCESSES = len(get_tornado_ports(config_file))
output = subprocess.check_output( output = subprocess.check_output(["/usr/sbin/rabbitmqctl", "list_consumers"], text=True)
["/usr/sbin/rabbitmqctl", "list_consumers"], universal_newlines=True
)
consumers: Dict[str, int] = defaultdict(int) consumers: Dict[str, int] = defaultdict(int)

View File

@ -124,7 +124,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
os.path.join(settings.DEPLOY_ROOT, "scripts", "get-django-setting"), os.path.join(settings.DEPLOY_ROOT, "scripts", "get-django-setting"),
"RABBITMQ_HOST", "RABBITMQ_HOST",
], ],
universal_newlines=True, text=True,
).strip() ).strip()
if rabbitmq_host in ["127.0.0.1", "::1", "localhost", "localhost6"]: if rabbitmq_host in ["127.0.0.1", "::1", "localhost", "localhost6"]:
run([os.path.join(settings.DEPLOY_ROOT, "scripts", "setup", "configure-rabbitmq")]) run([os.path.join(settings.DEPLOY_ROOT, "scripts", "setup", "configure-rabbitmq")])

View File

@ -114,7 +114,7 @@ def check_venv() -> bool:
cache_dir = "/srv/zulip-venv-cache/" cache_dir = "/srv/zulip-venv-cache/"
for fn in ["dev.txt"]: for fn in ["dev.txt"]:
requirements_file = os.path.join(ROOT_DIR, "requirements", fn) requirements_file = os.path.join(ROOT_DIR, "requirements", fn)
output = subprocess.check_output([path, requirements_file], universal_newlines=True) output = subprocess.check_output([path, requirements_file], text=True)
sha1sum = output.split()[0] sha1sum = output.split()[0]
print(fn, "venv sha: ", sha1sum) print(fn, "venv sha: ", sha1sum)
if not os.path.exists(os.path.join(cache_dir, sha1sum)): if not os.path.exists(os.path.join(cache_dir, sha1sum)):

View File

@ -11,7 +11,7 @@ def get_json_filename(locale: str) -> str:
def get_locales() -> List[str]: def get_locales() -> List[str]:
output = check_output(["git", "ls-files", "locale"], universal_newlines=True) output = check_output(["git", "ls-files", "locale"], text=True)
tracked_files = output.split() tracked_files = output.split()
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po") regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
locales = ["en"] locales = ["en"]

View File

@ -177,7 +177,7 @@ Proposed {BOLDRED}diff{ENDC} for {CYAN}{fn}{ENDC}:
""", """,
flush=True, flush=True,
) )
subprocess.run(["diff", fn, "-"], input=phtml, universal_newlines=True) subprocess.run(["diff", fn, "-"], input=phtml, text=True)
print( print(
f""" f"""
--- ---

View File

@ -17,7 +17,7 @@ def run(command: List[str]) -> None:
def check_output(command: List[str]) -> str: def check_output(command: List[str]) -> str:
return subprocess.check_output(command, universal_newlines=True) return subprocess.check_output(command, text=True)
def get_git_branch() -> str: def get_git_branch() -> str:

View File

@ -66,7 +66,7 @@ if __name__ == "__main__":
bufsize=1, # line buffered bufsize=1, # line buffered
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT, stderr=subprocess.STDOUT,
universal_newlines=True, text=True,
) )
failed = check_worker_launch(run_dev) failed = check_worker_launch(run_dev)

View File

@ -62,9 +62,7 @@ for zuliprc_path in zuliprc_paths_list:
arguments = ["./manage.py", "print_initial_password", email] arguments = ["./manage.py", "print_initial_password", email]
# We redirect 'stderr' to 'stdout' to avoid 'Connection to 127.0.0.1 closed' # We redirect 'stderr' to 'stdout' to avoid 'Connection to 127.0.0.1 closed'
# appearing after this script finishes. # appearing after this script finishes.
output = subprocess.check_output( output = subprocess.check_output(arguments, stderr=subprocess.STDOUT, text=True)
arguments, stderr=subprocess.STDOUT, universal_newlines=True
)
new_key = output.split()[6] new_key = output.split()[6]
if new_key != key: if new_key != key:

View File

@ -16,5 +16,5 @@ def diff_strings(output: str, expected_output: str) -> str:
raise DiffException(msg) raise DiffException(msg)
command = ["node", mdiff_path, output, expected_output] command = ["node", mdiff_path, output, expected_output]
diff = subprocess.check_output(command, universal_newlines=True) diff = subprocess.check_output(command, text=True)
return diff return diff

View File

@ -370,7 +370,7 @@ def destroy_leaked_test_databases(expiry_time: int = 60 * 60) -> int:
["psql", "-q", "-v", "ON_ERROR_STOP=1", "-h", "localhost", "postgres", "zulip_test"], ["psql", "-q", "-v", "ON_ERROR_STOP=1", "-h", "localhost", "postgres", "zulip_test"],
input=commands, input=commands,
check=True, check=True,
universal_newlines=True, text=True,
) )
return len(databases_to_drop) return len(databases_to_drop)

View File

@ -34,9 +34,7 @@ def render_tex(tex: str, is_inline: bool = True) -> Optional[str]:
if not is_inline: if not is_inline:
command.extend(["--display-mode"]) command.extend(["--display-mode"])
try: try:
stdout = subprocess.check_output( stdout = subprocess.check_output(command, input=tex, stderr=subprocess.DEVNULL, text=True)
command, input=tex, stderr=subprocess.DEVNULL, universal_newlines=True
)
# stdout contains a newline at the end # stdout contains a newline at the end
return stdout.strip() return stdout.strip()
except subprocess.CalledProcessError: except subprocess.CalledProcessError:

View File

@ -25,7 +25,7 @@ def try_git_describe() -> Optional[str]:
["git", "describe", "--tags", "--match=[0-9]*", "--always", "--dirty", "--long"], ["git", "describe", "--tags", "--match=[0-9]*", "--always", "--dirty", "--long"],
stderr=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=os.path.join(os.path.dirname(__file__), ".."), cwd=os.path.join(os.path.dirname(__file__), ".."),
universal_newlines=True, text=True,
).strip() ).strip()
except (FileNotFoundError, subprocess.CalledProcessError): # nocoverage except (FileNotFoundError, subprocess.CalledProcessError): # nocoverage
return None return None

View File

@ -70,7 +70,7 @@ class Command(compilemessages.Command):
raise Exception(f"Unknown language {locale}") raise Exception(f"Unknown language {locale}")
def get_locales(self) -> List[str]: def get_locales(self) -> List[str]:
output = check_output(["git", "ls-files", "locale"], universal_newlines=True) output = check_output(["git", "ls-files", "locale"], text=True)
tracked_files = output.split() tracked_files = output.split()
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po") regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
locales = ["en"] locales = ["en"]

View File

@ -20,7 +20,7 @@ def test_js_bindings(client: Client) -> None:
output = subprocess.check_output( output = subprocess.check_output(
args=["node", "--unhandled-rejections=strict", "zerver/openapi/javascript_examples.js"], args=["node", "--unhandled-rejections=strict", "zerver/openapi/javascript_examples.js"],
universal_newlines=True, text=True,
) )
endpoint_responses = json.loads(output) endpoint_responses = json.loads(output)

View File

@ -98,9 +98,7 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
try: try:
# We split this across two lines so if curl fails and # We split this across two lines so if curl fails and
# returns non-JSON output, we'll still print it. # returns non-JSON output, we'll still print it.
response_json = subprocess.check_output( response_json = subprocess.check_output(generated_curl_command, text=True)
generated_curl_command, universal_newlines=True
)
response = json.loads(response_json) response = json.loads(response_json)
assert response["result"] == "success" assert response["result"] == "success"
except (AssertionError, Exception): except (AssertionError, Exception):

View File

@ -1283,7 +1283,7 @@ class TestScriptMTA(ZulipTestCase):
[script, "-r", stream_to_address, "-s", settings.SHARED_SECRET, "-t"], [script, "-r", stream_to_address, "-s", settings.SHARED_SECRET, "-t"],
input=mail, input=mail,
check=True, check=True,
universal_newlines=True, text=True,
) )
def test_error_no_recipient(self) -> None: def test_error_no_recipient(self) -> None:
@ -1298,7 +1298,7 @@ class TestScriptMTA(ZulipTestCase):
[script, "-s", settings.SHARED_SECRET, "-t"], [script, "-s", settings.SHARED_SECRET, "-t"],
input=mail, input=mail,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
universal_newlines=True, text=True,
) )
self.assertEqual( self.assertEqual(
p.stdout, p.stdout,

View File

@ -134,7 +134,7 @@ def report_error(
try: try:
version: Optional[str] = subprocess.check_output( version: Optional[str] = subprocess.check_output(
["git", "show", "-s", "--oneline"], ["git", "show", "-s", "--oneline"],
universal_newlines=True, text=True,
) )
except (FileNotFoundError, subprocess.CalledProcessError): except (FileNotFoundError, subprocess.CalledProcessError):
version = None version = None