mirror of https://github.com/zulip/zulip.git
python: Prefer --flag=option over --flag option.
For less inflation by Black. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
bef46dab3c
commit
bb4fc3c4c7
|
@ -101,8 +101,8 @@ def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
|
|||
|
||||
def install_venv_deps(pip: str, requirements_file: str) -> None:
|
||||
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
|
||||
run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements])
|
||||
run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file])
|
||||
run([pip, "install", "--force-reinstall", "--require-hashes", "-r", pip_requirements])
|
||||
run([pip, "install", "--no-deps", "--require-hashes", "-r", requirements_file])
|
||||
|
||||
def get_index_filename(venv_path: str) -> str:
|
||||
return os.path.join(venv_path, 'package_index')
|
||||
|
|
|
@ -203,7 +203,7 @@ elif args.from_git:
|
|||
# update-prod-static with the git upgrade process. But it'll fail
|
||||
# safely; this seems like a worthwhile tradeoff to minimize downtime.
|
||||
logging.info("Building static assets...")
|
||||
subprocess.check_call(["./tools/update-prod-static", "--prev-deploy",
|
||||
subprocess.check_call(["./tools/update-prod-static", "--prev-deploy=" +
|
||||
os.path.join(DEPLOYMENTS_DIR, 'current')],
|
||||
preexec_fn=su_to_zulip)
|
||||
logging.info("Caching zulip git version...")
|
||||
|
|
|
@ -34,11 +34,10 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
|||
subprocess.call(
|
||||
[
|
||||
"tar",
|
||||
"-C",
|
||||
"/etc/zulip",
|
||||
"--directory=/etc/zulip",
|
||||
"--strip-components=2",
|
||||
"-xz",
|
||||
"zulip-backup/settings",
|
||||
"zulip-backup/settings"
|
||||
],
|
||||
stdin=tarball_file,
|
||||
)
|
||||
|
@ -105,7 +104,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
|||
env=postgres_env,
|
||||
)
|
||||
run(["dropdb", "--if-exists", "--", db["NAME"]], cwd="/", env=postgres_env)
|
||||
run(["createdb", "-O", "zulip", "-T", "template0", "--", db["NAME"]], cwd="/", env=postgres_env)
|
||||
run(["createdb", "--owner=zulip", "--template=template0", "--", db["NAME"]], cwd="/", env=postgres_env)
|
||||
os.setresuid(0, 0, 0)
|
||||
|
||||
if settings.PRODUCTION:
|
||||
|
@ -145,7 +144,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
|||
# needs to run after zulip-puppet-apply to ensure full-text
|
||||
# search extensions are available and installed.
|
||||
os.setresuid(POSTGRES_PWENT.pw_uid, POSTGRES_PWENT.pw_uid, 0)
|
||||
run(["pg_restore", "-d", db["NAME"], "--", db_dir], cwd="/", env=postgres_env)
|
||||
run(["pg_restore", "--dbname=" + db["NAME"], "--", db_dir], cwd="/", env=postgres_env)
|
||||
os.setresuid(0, 0, 0)
|
||||
run(["chown", "-R", str(uid), "--", tmp])
|
||||
os.setresuid(uid, uid, 0)
|
||||
|
|
|
@ -35,7 +35,7 @@ for pclass in re.split(r'\s*,\s*', config.get('machine', 'puppet_classes')):
|
|||
# We use the puppet configuration from the same Zulip checkout as this script
|
||||
scripts_path = os.path.join(BASE_DIR, "scripts")
|
||||
puppet_module_path = os.path.join(BASE_DIR, "puppet")
|
||||
puppet_cmd = ["puppet", "apply", "--modulepath", puppet_module_path, "-e", puppet_config]
|
||||
puppet_cmd = ["puppet", "apply", f"--modulepath={puppet_module_path}", "-e", puppet_config]
|
||||
if args.noop:
|
||||
puppet_cmd += ["--noop"]
|
||||
puppet_cmd += extra_args
|
||||
|
|
|
@ -150,8 +150,8 @@ def server_processes() -> List[List[str]]:
|
|||
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
||||
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
||||
['./manage.py', 'deliver_scheduled_messages'],
|
||||
['/srv/zulip-thumbor-venv/bin/thumbor', '-c', './zthumbor/thumbor_settings.py',
|
||||
'-p', f'{thumbor_port}'],
|
||||
['/srv/zulip-thumbor-venv/bin/thumbor', '--conf=./zthumbor/thumbor_settings.py',
|
||||
f'--port={thumbor_port}'],
|
||||
]
|
||||
|
||||
# NORMAL (but slower) operation:
|
||||
|
@ -165,7 +165,7 @@ def do_one_time_webpack_compile() -> None:
|
|||
subprocess.check_call(['./tools/webpack', '--quiet', '--test'])
|
||||
|
||||
def start_webpack_watcher() -> None:
|
||||
webpack_cmd = ['./tools/webpack', '--watch', '--port', str(webpack_port)]
|
||||
webpack_cmd = ['./tools/webpack', '--watch', f'--port={webpack_port}']
|
||||
if options.minify:
|
||||
webpack_cmd.append('--minify')
|
||||
if options.interface is None:
|
||||
|
@ -173,9 +173,9 @@ def start_webpack_watcher() -> None:
|
|||
# to disable the webpack host check so that webpack will serve assets.
|
||||
webpack_cmd.append('--disable-host-check')
|
||||
if options.interface:
|
||||
webpack_cmd += ["--host", options.interface]
|
||||
webpack_cmd.append(f"--host={options.interface}")
|
||||
else:
|
||||
webpack_cmd += ["--host", "0.0.0.0"]
|
||||
webpack_cmd.append("--host=0.0.0.0")
|
||||
subprocess.Popen(webpack_cmd)
|
||||
|
||||
def transform_url(protocol: str, path: str, query: str, target_port: int, target_host: str) -> str:
|
||||
|
|
|
@ -19,21 +19,21 @@ def generate_files(source_file: str, tmp_dir: str) -> None:
|
|||
input_file_path = output_file_path
|
||||
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_A')
|
||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
||||
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||
stdout=subprocess.DEVNULL)
|
||||
|
||||
# Regenerate the CSV file from name map.
|
||||
input_file_path = output_file_path
|
||||
output_file_path = os.path.join(tmp_dir, 'CSV_B')
|
||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'export_emoji_names_to_csv'),
|
||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
||||
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||
stdout=subprocess.DEVNULL)
|
||||
|
||||
# Regenerate the name map file from the regenerated CSV file.
|
||||
input_file_path = output_file_path
|
||||
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_B')
|
||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
||||
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||
stdout=subprocess.DEVNULL)
|
||||
|
||||
def print_diff(path_file1: str, path_file2: str) -> None:
|
||||
|
|
|
@ -52,12 +52,12 @@ run([
|
|||
'./manage.py', 'collectstatic', '--no-default-ignore',
|
||||
'-v0',
|
||||
'--noinput',
|
||||
'-i', 'assets',
|
||||
'-i', 'emoji-styles',
|
||||
'-i', 'html',
|
||||
'-i', 'js',
|
||||
'-i', 'styles',
|
||||
'-i', 'templates',
|
||||
'--ignore=assets',
|
||||
'--ignore=emoji-styles',
|
||||
'--ignore=html',
|
||||
'--ignore=js',
|
||||
'--ignore=styles',
|
||||
'--ignore=templates',
|
||||
])
|
||||
|
||||
# Compile translation strings to generate `.mo` files.
|
||||
|
|
|
@ -14,9 +14,9 @@ def build_for_prod_or_casper(quiet: bool) -> NoReturn:
|
|||
|
||||
webpack_args = ['node', 'node_modules/.bin/webpack-cli',
|
||||
'-p',
|
||||
'--env', 'production']
|
||||
'--env=production']
|
||||
if quiet:
|
||||
webpack_args += ['--display', 'errors-only']
|
||||
webpack_args.append('--display=errors-only')
|
||||
os.execvp(webpack_args[0], webpack_args)
|
||||
|
||||
def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check: bool) -> None:
|
||||
|
@ -29,10 +29,10 @@ def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check:
|
|||
webpack_args += [
|
||||
# webpack-cli has a bug where it ignores --watch-poll with
|
||||
# multi-config, and we don't need the katex-cli part anyway.
|
||||
'--config-name', 'frontend',
|
||||
'--allowed-hosts', ','.join([host, '.zulipdev.com', '.zulipdev.org']),
|
||||
'--host', host,
|
||||
'--port', port,
|
||||
'--config-name=frontend',
|
||||
'--allowed-hosts=' + ','.join([host, '.zulipdev.com', '.zulipdev.org']),
|
||||
f'--host={host}',
|
||||
f'--port={port}',
|
||||
# We add the hot flag using the cli because it takes care
|
||||
# of addition to entry points and adding the plugin
|
||||
# automatically
|
||||
|
|
|
@ -1602,11 +1602,11 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path,
|
|||
arguments = [
|
||||
os.path.join(settings.DEPLOY_ROOT, "manage.py"),
|
||||
'export_usermessage_batch',
|
||||
'--path', str(output_dir),
|
||||
'--thread', str(shard_id),
|
||||
f'--path={output_dir}',
|
||||
f'--thread={shard_id}',
|
||||
]
|
||||
if consent_message_id is not None:
|
||||
arguments.extend(['--consent-message-id', str(consent_message_id)])
|
||||
arguments.append(f'--consent-message-id={consent_message_id}')
|
||||
|
||||
process = subprocess.Popen(arguments)
|
||||
pids[process.pid] = shard_id
|
||||
|
|
|
@ -68,11 +68,11 @@ class Command(ZulipBaseCommand):
|
|||
pg_dump_command = [
|
||||
"pg_dump",
|
||||
"--format=directory",
|
||||
"--file", os.path.join(tmp, "zulip-backup", "database"),
|
||||
"--host", settings.DATABASES["default"]["HOST"],
|
||||
"--port", settings.DATABASES["default"]["PORT"],
|
||||
"--username", settings.DATABASES["default"]["USER"],
|
||||
"--dbname", settings.DATABASES["default"]["NAME"],
|
||||
"--file=" + os.path.join(tmp, "zulip-backup", "database"),
|
||||
"--host=" + settings.DATABASES["default"]["HOST"],
|
||||
"--port=" + settings.DATABASES["default"]["PORT"],
|
||||
"--username=" + settings.DATABASES["default"]["USER"],
|
||||
"--dbname=" + settings.DATABASES["default"]["NAME"],
|
||||
"--no-password",
|
||||
]
|
||||
os.environ["PGPASSWORD"] = settings.DATABASES["default"]["PASSWORD"]
|
||||
|
@ -115,7 +115,7 @@ class Command(ZulipBaseCommand):
|
|||
tarball_path = options["output"]
|
||||
|
||||
run(
|
||||
["tar", "-C", tmp, "-cPzf", tarball_path,
|
||||
["tar", f"--directory={tmp}", "-cPzf", tarball_path,
|
||||
*transform_args,
|
||||
"--",
|
||||
*members]
|
||||
|
|
Loading…
Reference in New Issue