python: Replace universal_newlines with text.

This is supported in Python ≥ 3.7.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2022-01-21 22:52:54 -08:00 committed by Tim Abbott
parent fd6d048efd
commit 97e4e9886c
28 changed files with 31 additions and 45 deletions

View File

@ -57,9 +57,7 @@ def run_sql_query(query: str) -> List[List[str]]:
f"SELECT {query}",
]
try:
output = subprocess.check_output(
command, stderr=subprocess.STDOUT, universal_newlines=True
).strip()
output = subprocess.check_output(command, stderr=subprocess.STDOUT, text=True).strip()
if not output:
return []
return [x.split("\0") for x in output.split("\n")]

View File

@ -21,7 +21,7 @@ def run(args: List[str], dry_run: bool = False) -> str:
print("Would have run: " + " ".join(map(shlex.quote, args)))
return ""
return subprocess.check_output(args, stdin=subprocess.DEVNULL, universal_newlines=True)
return subprocess.check_output(args, stdin=subprocess.DEVNULL, text=True)
recovery_val = run(

View File

@ -134,7 +134,7 @@ def check_rabbitmq_queues() -> None:
list_queues_output = subprocess.check_output(
["/usr/sbin/rabbitmqctl", "list_queues", "name", "messages", "consumers"],
universal_newlines=True,
text=True,
)
queue_counts_rabbitmqctl = {}
queues_with_consumers = []
@ -151,7 +151,7 @@ def check_rabbitmq_queues() -> None:
queue_stats_dir = subprocess.check_output(
[os.path.join(ZULIP_PATH, "scripts/get-django-setting"), "QUEUE_STATS_DIR"],
universal_newlines=True,
text=True,
).strip()
queue_stats: Dict[str, Dict[str, Any]] = {}
queues_to_check = set(normal_queues).intersection(set(queues_with_consumers))

View File

@ -35,7 +35,7 @@ def python_version() -> str:
"""
Returns the Python version as string 'Python major.minor.patchlevel'
"""
return subprocess.check_output(["/usr/bin/python3", "-VV"], universal_newlines=True)
return subprocess.check_output(["/usr/bin/python3", "-VV"], text=True)
def hash_deps(deps: Iterable[str]) -> str:

View File

@ -42,9 +42,7 @@ def generate_sha1sum_node_modules(
data[YARN_LOCK_FILE_PATH] = f.read().strip()
with open(YARN_PACKAGE_JSON) as f:
data["yarn-package-version"] = json.load(f)["version"]
data["node-version"] = subprocess.check_output(
["node", "--version"], universal_newlines=True
).strip()
data["node-version"] = subprocess.check_output(["node", "--version"], text=True).strip()
data["yarn-args"] = get_yarn_args(production=production)
sha1sum = hashlib.sha1()

View File

@ -23,7 +23,7 @@ def generate_sha1sum_puppet_modules() -> str:
data["puppet-version"] = subprocess.check_output(
# This is 10x faster than `puppet --version`
["ruby", "-r", "puppet/version", "-e", "puts Puppet.version"],
universal_newlines=True,
text=True,
).strip()
sha1sum = hashlib.sha1()

View File

@ -178,9 +178,7 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
venv_python3 = os.path.join(curr_venv_path, "bin", "python3")
if not os.path.exists(venv_python3):
continue
venv_python_version = subprocess.check_output(
[venv_python3, "-VV"], universal_newlines=True
)
venv_python_version = subprocess.check_output([venv_python3, "-VV"], text=True)
if desired_python_version != venv_python_version:
continue
@ -282,7 +280,7 @@ def do_patch_activate_script(venv_path: str) -> None:
def generate_hash(requirements_file: str) -> str:
path = os.path.join(ZULIP_PATH, "scripts", "lib", "hash_reqs.py")
output = subprocess.check_output([path, requirements_file], universal_newlines=True)
output = subprocess.check_output([path, requirements_file], text=True)
return output.split()[0]

View File

@ -53,7 +53,7 @@ def write_updated_configs() -> None:
shard_map: Dict[str, int] = {}
external_host = subprocess.check_output(
[os.path.join(BASE_DIR, "scripts/get-django-setting"), "EXTERNAL_HOST"],
universal_newlines=True,
text=True,
).strip()
for port in config_file["tornado_sharding"]:
shards = config_file["tornado_sharding"][port].strip()

View File

@ -54,7 +54,7 @@ try:
logging.info("Unpacking the tarball")
unpack_zulip = os.path.realpath(os.path.join(os.path.dirname(__file__), "unpack-zulip"))
deploy_path = subprocess.check_output(
[unpack_zulip, archived_tarball_path], preexec_fn=su_to_zulip, universal_newlines=True
[unpack_zulip, archived_tarball_path], preexec_fn=su_to_zulip, text=True
)
# Chdir to deploy_path and then run upgrade-zulip-stage-2 from the

View File

@ -263,7 +263,7 @@ migrations_needed = False
if not args.skip_migrations:
logging.info("Checking for needed migrations")
migrations_output = subprocess.check_output(
["./manage.py", "showmigrations"], preexec_fn=su_to_zulip, universal_newlines=True
["./manage.py", "showmigrations"], preexec_fn=su_to_zulip, text=True
)
need_create_large_indexes = False
for ln in migrations_output.split("\n"):

View File

@ -620,7 +620,7 @@ def has_application_server(once: bool = False) -> bool:
def list_supervisor_processes(*args: str) -> List[str]:
worker_status = subprocess.run(
["supervisorctl", "status", *args],
universal_newlines=True,
text=True,
stdout=subprocess.PIPE,
)
# `supervisorctl status` returns 3 if any are stopped, which is

View File

@ -33,9 +33,7 @@ options = parser.parse_args()
config_file = get_config_file()
TORNADO_PROCESSES = len(get_tornado_ports(config_file))
output = subprocess.check_output(
["/usr/sbin/rabbitmqctl", "list_consumers"], universal_newlines=True
)
output = subprocess.check_output(["/usr/sbin/rabbitmqctl", "list_consumers"], text=True)
consumers: Dict[str, int] = defaultdict(int)

View File

@ -124,7 +124,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
os.path.join(settings.DEPLOY_ROOT, "scripts", "get-django-setting"),
"RABBITMQ_HOST",
],
universal_newlines=True,
text=True,
).strip()
if rabbitmq_host in ["127.0.0.1", "::1", "localhost", "localhost6"]:
run([os.path.join(settings.DEPLOY_ROOT, "scripts", "setup", "configure-rabbitmq")])

View File

@ -114,7 +114,7 @@ def check_venv() -> bool:
cache_dir = "/srv/zulip-venv-cache/"
for fn in ["dev.txt"]:
requirements_file = os.path.join(ROOT_DIR, "requirements", fn)
output = subprocess.check_output([path, requirements_file], universal_newlines=True)
output = subprocess.check_output([path, requirements_file], text=True)
sha1sum = output.split()[0]
print(fn, "venv sha: ", sha1sum)
if not os.path.exists(os.path.join(cache_dir, sha1sum)):

View File

@ -11,7 +11,7 @@ def get_json_filename(locale: str) -> str:
def get_locales() -> List[str]:
output = check_output(["git", "ls-files", "locale"], universal_newlines=True)
output = check_output(["git", "ls-files", "locale"], text=True)
tracked_files = output.split()
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
locales = ["en"]

View File

@ -177,7 +177,7 @@ Proposed {BOLDRED}diff{ENDC} for {CYAN}{fn}{ENDC}:
""",
flush=True,
)
subprocess.run(["diff", fn, "-"], input=phtml, universal_newlines=True)
subprocess.run(["diff", fn, "-"], input=phtml, text=True)
print(
f"""
---

View File

@ -17,7 +17,7 @@ def run(command: List[str]) -> None:
def check_output(command: List[str]) -> str:
return subprocess.check_output(command, universal_newlines=True)
return subprocess.check_output(command, text=True)
def get_git_branch() -> str:

View File

@ -66,7 +66,7 @@ if __name__ == "__main__":
bufsize=1, # line buffered
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
universal_newlines=True,
text=True,
)
failed = check_worker_launch(run_dev)

View File

@ -62,9 +62,7 @@ for zuliprc_path in zuliprc_paths_list:
arguments = ["./manage.py", "print_initial_password", email]
# We redirect 'stderr' to 'stdout' to avoid 'Connection to 127.0.0.1 closed'
# appearing after this script finishes.
output = subprocess.check_output(
arguments, stderr=subprocess.STDOUT, universal_newlines=True
)
output = subprocess.check_output(arguments, stderr=subprocess.STDOUT, text=True)
new_key = output.split()[6]
if new_key != key:

View File

@ -16,5 +16,5 @@ def diff_strings(output: str, expected_output: str) -> str:
raise DiffException(msg)
command = ["node", mdiff_path, output, expected_output]
diff = subprocess.check_output(command, universal_newlines=True)
diff = subprocess.check_output(command, text=True)
return diff

View File

@ -370,7 +370,7 @@ def destroy_leaked_test_databases(expiry_time: int = 60 * 60) -> int:
["psql", "-q", "-v", "ON_ERROR_STOP=1", "-h", "localhost", "postgres", "zulip_test"],
input=commands,
check=True,
universal_newlines=True,
text=True,
)
return len(databases_to_drop)

View File

@ -34,9 +34,7 @@ def render_tex(tex: str, is_inline: bool = True) -> Optional[str]:
if not is_inline:
command.extend(["--display-mode"])
try:
stdout = subprocess.check_output(
command, input=tex, stderr=subprocess.DEVNULL, universal_newlines=True
)
stdout = subprocess.check_output(command, input=tex, stderr=subprocess.DEVNULL, text=True)
# stdout contains a newline at the end
return stdout.strip()
except subprocess.CalledProcessError:

View File

@ -25,7 +25,7 @@ def try_git_describe() -> Optional[str]:
["git", "describe", "--tags", "--match=[0-9]*", "--always", "--dirty", "--long"],
stderr=subprocess.PIPE,
cwd=os.path.join(os.path.dirname(__file__), ".."),
universal_newlines=True,
text=True,
).strip()
except (FileNotFoundError, subprocess.CalledProcessError): # nocoverage
return None

View File

@ -70,7 +70,7 @@ class Command(compilemessages.Command):
raise Exception(f"Unknown language {locale}")
def get_locales(self) -> List[str]:
output = check_output(["git", "ls-files", "locale"], universal_newlines=True)
output = check_output(["git", "ls-files", "locale"], text=True)
tracked_files = output.split()
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
locales = ["en"]

View File

@ -20,7 +20,7 @@ def test_js_bindings(client: Client) -> None:
output = subprocess.check_output(
args=["node", "--unhandled-rejections=strict", "zerver/openapi/javascript_examples.js"],
universal_newlines=True,
text=True,
)
endpoint_responses = json.loads(output)

View File

@ -98,9 +98,7 @@ def test_generated_curl_examples_for_success(client: Client) -> None:
try:
# We split this across two lines so if curl fails and
# returns non-JSON output, we'll still print it.
response_json = subprocess.check_output(
generated_curl_command, universal_newlines=True
)
response_json = subprocess.check_output(generated_curl_command, text=True)
response = json.loads(response_json)
assert response["result"] == "success"
except (AssertionError, Exception):

View File

@ -1283,7 +1283,7 @@ class TestScriptMTA(ZulipTestCase):
[script, "-r", stream_to_address, "-s", settings.SHARED_SECRET, "-t"],
input=mail,
check=True,
universal_newlines=True,
text=True,
)
def test_error_no_recipient(self) -> None:
@ -1298,7 +1298,7 @@ class TestScriptMTA(ZulipTestCase):
[script, "-s", settings.SHARED_SECRET, "-t"],
input=mail,
stdout=subprocess.PIPE,
universal_newlines=True,
text=True,
)
self.assertEqual(
p.stdout,

View File

@ -134,7 +134,7 @@ def report_error(
try:
version: Optional[str] = subprocess.check_output(
["git", "show", "-s", "--oneline"],
universal_newlines=True,
text=True,
)
except (FileNotFoundError, subprocess.CalledProcessError):
version = None