mirror of https://github.com/zulip/zulip.git
python: Prefer --flag=option over --flag option.
For less inflation by Black. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
bef46dab3c
commit
bb4fc3c4c7
|
@ -101,8 +101,8 @@ def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
|
||||||
|
|
||||||
def install_venv_deps(pip: str, requirements_file: str) -> None:
|
def install_venv_deps(pip: str, requirements_file: str) -> None:
|
||||||
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
|
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip.txt")
|
||||||
run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements])
|
run([pip, "install", "--force-reinstall", "--require-hashes", "-r", pip_requirements])
|
||||||
run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file])
|
run([pip, "install", "--no-deps", "--require-hashes", "-r", requirements_file])
|
||||||
|
|
||||||
def get_index_filename(venv_path: str) -> str:
|
def get_index_filename(venv_path: str) -> str:
|
||||||
return os.path.join(venv_path, 'package_index')
|
return os.path.join(venv_path, 'package_index')
|
||||||
|
|
|
@ -203,7 +203,7 @@ elif args.from_git:
|
||||||
# update-prod-static with the git upgrade process. But it'll fail
|
# update-prod-static with the git upgrade process. But it'll fail
|
||||||
# safely; this seems like a worthwhile tradeoff to minimize downtime.
|
# safely; this seems like a worthwhile tradeoff to minimize downtime.
|
||||||
logging.info("Building static assets...")
|
logging.info("Building static assets...")
|
||||||
subprocess.check_call(["./tools/update-prod-static", "--prev-deploy",
|
subprocess.check_call(["./tools/update-prod-static", "--prev-deploy=" +
|
||||||
os.path.join(DEPLOYMENTS_DIR, 'current')],
|
os.path.join(DEPLOYMENTS_DIR, 'current')],
|
||||||
preexec_fn=su_to_zulip)
|
preexec_fn=su_to_zulip)
|
||||||
logging.info("Caching zulip git version...")
|
logging.info("Caching zulip git version...")
|
||||||
|
|
|
@ -34,11 +34,10 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
||||||
subprocess.call(
|
subprocess.call(
|
||||||
[
|
[
|
||||||
"tar",
|
"tar",
|
||||||
"-C",
|
"--directory=/etc/zulip",
|
||||||
"/etc/zulip",
|
|
||||||
"--strip-components=2",
|
"--strip-components=2",
|
||||||
"-xz",
|
"-xz",
|
||||||
"zulip-backup/settings",
|
"zulip-backup/settings"
|
||||||
],
|
],
|
||||||
stdin=tarball_file,
|
stdin=tarball_file,
|
||||||
)
|
)
|
||||||
|
@ -105,7 +104,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
||||||
env=postgres_env,
|
env=postgres_env,
|
||||||
)
|
)
|
||||||
run(["dropdb", "--if-exists", "--", db["NAME"]], cwd="/", env=postgres_env)
|
run(["dropdb", "--if-exists", "--", db["NAME"]], cwd="/", env=postgres_env)
|
||||||
run(["createdb", "-O", "zulip", "-T", "template0", "--", db["NAME"]], cwd="/", env=postgres_env)
|
run(["createdb", "--owner=zulip", "--template=template0", "--", db["NAME"]], cwd="/", env=postgres_env)
|
||||||
os.setresuid(0, 0, 0)
|
os.setresuid(0, 0, 0)
|
||||||
|
|
||||||
if settings.PRODUCTION:
|
if settings.PRODUCTION:
|
||||||
|
@ -145,7 +144,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
||||||
# needs to run after zulip-puppet-apply to ensure full-text
|
# needs to run after zulip-puppet-apply to ensure full-text
|
||||||
# search extensions are available and installed.
|
# search extensions are available and installed.
|
||||||
os.setresuid(POSTGRES_PWENT.pw_uid, POSTGRES_PWENT.pw_uid, 0)
|
os.setresuid(POSTGRES_PWENT.pw_uid, POSTGRES_PWENT.pw_uid, 0)
|
||||||
run(["pg_restore", "-d", db["NAME"], "--", db_dir], cwd="/", env=postgres_env)
|
run(["pg_restore", "--dbname=" + db["NAME"], "--", db_dir], cwd="/", env=postgres_env)
|
||||||
os.setresuid(0, 0, 0)
|
os.setresuid(0, 0, 0)
|
||||||
run(["chown", "-R", str(uid), "--", tmp])
|
run(["chown", "-R", str(uid), "--", tmp])
|
||||||
os.setresuid(uid, uid, 0)
|
os.setresuid(uid, uid, 0)
|
||||||
|
|
|
@ -35,7 +35,7 @@ for pclass in re.split(r'\s*,\s*', config.get('machine', 'puppet_classes')):
|
||||||
# We use the puppet configuration from the same Zulip checkout as this script
|
# We use the puppet configuration from the same Zulip checkout as this script
|
||||||
scripts_path = os.path.join(BASE_DIR, "scripts")
|
scripts_path = os.path.join(BASE_DIR, "scripts")
|
||||||
puppet_module_path = os.path.join(BASE_DIR, "puppet")
|
puppet_module_path = os.path.join(BASE_DIR, "puppet")
|
||||||
puppet_cmd = ["puppet", "apply", "--modulepath", puppet_module_path, "-e", puppet_config]
|
puppet_cmd = ["puppet", "apply", f"--modulepath={puppet_module_path}", "-e", puppet_config]
|
||||||
if args.noop:
|
if args.noop:
|
||||||
puppet_cmd += ["--noop"]
|
puppet_cmd += ["--noop"]
|
||||||
puppet_cmd += extra_args
|
puppet_cmd += extra_args
|
||||||
|
|
|
@ -150,8 +150,8 @@ def server_processes() -> List[List[str]]:
|
||||||
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
||||||
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
||||||
['./manage.py', 'deliver_scheduled_messages'],
|
['./manage.py', 'deliver_scheduled_messages'],
|
||||||
['/srv/zulip-thumbor-venv/bin/thumbor', '-c', './zthumbor/thumbor_settings.py',
|
['/srv/zulip-thumbor-venv/bin/thumbor', '--conf=./zthumbor/thumbor_settings.py',
|
||||||
'-p', f'{thumbor_port}'],
|
f'--port={thumbor_port}'],
|
||||||
]
|
]
|
||||||
|
|
||||||
# NORMAL (but slower) operation:
|
# NORMAL (but slower) operation:
|
||||||
|
@ -165,7 +165,7 @@ def do_one_time_webpack_compile() -> None:
|
||||||
subprocess.check_call(['./tools/webpack', '--quiet', '--test'])
|
subprocess.check_call(['./tools/webpack', '--quiet', '--test'])
|
||||||
|
|
||||||
def start_webpack_watcher() -> None:
|
def start_webpack_watcher() -> None:
|
||||||
webpack_cmd = ['./tools/webpack', '--watch', '--port', str(webpack_port)]
|
webpack_cmd = ['./tools/webpack', '--watch', f'--port={webpack_port}']
|
||||||
if options.minify:
|
if options.minify:
|
||||||
webpack_cmd.append('--minify')
|
webpack_cmd.append('--minify')
|
||||||
if options.interface is None:
|
if options.interface is None:
|
||||||
|
@ -173,9 +173,9 @@ def start_webpack_watcher() -> None:
|
||||||
# to disable the webpack host check so that webpack will serve assets.
|
# to disable the webpack host check so that webpack will serve assets.
|
||||||
webpack_cmd.append('--disable-host-check')
|
webpack_cmd.append('--disable-host-check')
|
||||||
if options.interface:
|
if options.interface:
|
||||||
webpack_cmd += ["--host", options.interface]
|
webpack_cmd.append(f"--host={options.interface}")
|
||||||
else:
|
else:
|
||||||
webpack_cmd += ["--host", "0.0.0.0"]
|
webpack_cmd.append("--host=0.0.0.0")
|
||||||
subprocess.Popen(webpack_cmd)
|
subprocess.Popen(webpack_cmd)
|
||||||
|
|
||||||
def transform_url(protocol: str, path: str, query: str, target_port: int, target_host: str) -> str:
|
def transform_url(protocol: str, path: str, query: str, target_port: int, target_host: str) -> str:
|
||||||
|
|
|
@ -19,21 +19,21 @@ def generate_files(source_file: str, tmp_dir: str) -> None:
|
||||||
input_file_path = output_file_path
|
input_file_path = output_file_path
|
||||||
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_A')
|
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_A')
|
||||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
||||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||||
stdout=subprocess.DEVNULL)
|
stdout=subprocess.DEVNULL)
|
||||||
|
|
||||||
# Regenerate the CSV file from name map.
|
# Regenerate the CSV file from name map.
|
||||||
input_file_path = output_file_path
|
input_file_path = output_file_path
|
||||||
output_file_path = os.path.join(tmp_dir, 'CSV_B')
|
output_file_path = os.path.join(tmp_dir, 'CSV_B')
|
||||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'export_emoji_names_to_csv'),
|
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'export_emoji_names_to_csv'),
|
||||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||||
stdout=subprocess.DEVNULL)
|
stdout=subprocess.DEVNULL)
|
||||||
|
|
||||||
# Regenerate the name map file from the regenerated CSV file.
|
# Regenerate the name map file from the regenerated CSV file.
|
||||||
input_file_path = output_file_path
|
input_file_path = output_file_path
|
||||||
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_B')
|
output_file_path = os.path.join(tmp_dir, 'NAME_MAP_B')
|
||||||
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
subprocess.check_call([os.path.join(TOOLS_DIR, 'setup', 'emoji', 'import_emoji_names_from_csv'),
|
||||||
'--input-file', input_file_path, '--output-file', output_file_path],
|
f'--input-file={input_file_path}', f'--output-file={output_file_path}'],
|
||||||
stdout=subprocess.DEVNULL)
|
stdout=subprocess.DEVNULL)
|
||||||
|
|
||||||
def print_diff(path_file1: str, path_file2: str) -> None:
|
def print_diff(path_file1: str, path_file2: str) -> None:
|
||||||
|
|
|
@ -52,12 +52,12 @@ run([
|
||||||
'./manage.py', 'collectstatic', '--no-default-ignore',
|
'./manage.py', 'collectstatic', '--no-default-ignore',
|
||||||
'-v0',
|
'-v0',
|
||||||
'--noinput',
|
'--noinput',
|
||||||
'-i', 'assets',
|
'--ignore=assets',
|
||||||
'-i', 'emoji-styles',
|
'--ignore=emoji-styles',
|
||||||
'-i', 'html',
|
'--ignore=html',
|
||||||
'-i', 'js',
|
'--ignore=js',
|
||||||
'-i', 'styles',
|
'--ignore=styles',
|
||||||
'-i', 'templates',
|
'--ignore=templates',
|
||||||
])
|
])
|
||||||
|
|
||||||
# Compile translation strings to generate `.mo` files.
|
# Compile translation strings to generate `.mo` files.
|
||||||
|
|
|
@ -14,9 +14,9 @@ def build_for_prod_or_casper(quiet: bool) -> NoReturn:
|
||||||
|
|
||||||
webpack_args = ['node', 'node_modules/.bin/webpack-cli',
|
webpack_args = ['node', 'node_modules/.bin/webpack-cli',
|
||||||
'-p',
|
'-p',
|
||||||
'--env', 'production']
|
'--env=production']
|
||||||
if quiet:
|
if quiet:
|
||||||
webpack_args += ['--display', 'errors-only']
|
webpack_args.append('--display=errors-only')
|
||||||
os.execvp(webpack_args[0], webpack_args)
|
os.execvp(webpack_args[0], webpack_args)
|
||||||
|
|
||||||
def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check: bool) -> None:
|
def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check: bool) -> None:
|
||||||
|
@ -29,10 +29,10 @@ def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check:
|
||||||
webpack_args += [
|
webpack_args += [
|
||||||
# webpack-cli has a bug where it ignores --watch-poll with
|
# webpack-cli has a bug where it ignores --watch-poll with
|
||||||
# multi-config, and we don't need the katex-cli part anyway.
|
# multi-config, and we don't need the katex-cli part anyway.
|
||||||
'--config-name', 'frontend',
|
'--config-name=frontend',
|
||||||
'--allowed-hosts', ','.join([host, '.zulipdev.com', '.zulipdev.org']),
|
'--allowed-hosts=' + ','.join([host, '.zulipdev.com', '.zulipdev.org']),
|
||||||
'--host', host,
|
f'--host={host}',
|
||||||
'--port', port,
|
f'--port={port}',
|
||||||
# We add the hot flag using the cli because it takes care
|
# We add the hot flag using the cli because it takes care
|
||||||
# of addition to entry points and adding the plugin
|
# of addition to entry points and adding the plugin
|
||||||
# automatically
|
# automatically
|
||||||
|
|
|
@ -1602,11 +1602,11 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path,
|
||||||
arguments = [
|
arguments = [
|
||||||
os.path.join(settings.DEPLOY_ROOT, "manage.py"),
|
os.path.join(settings.DEPLOY_ROOT, "manage.py"),
|
||||||
'export_usermessage_batch',
|
'export_usermessage_batch',
|
||||||
'--path', str(output_dir),
|
f'--path={output_dir}',
|
||||||
'--thread', str(shard_id),
|
f'--thread={shard_id}',
|
||||||
]
|
]
|
||||||
if consent_message_id is not None:
|
if consent_message_id is not None:
|
||||||
arguments.extend(['--consent-message-id', str(consent_message_id)])
|
arguments.append(f'--consent-message-id={consent_message_id}')
|
||||||
|
|
||||||
process = subprocess.Popen(arguments)
|
process = subprocess.Popen(arguments)
|
||||||
pids[process.pid] = shard_id
|
pids[process.pid] = shard_id
|
||||||
|
|
|
@ -68,11 +68,11 @@ class Command(ZulipBaseCommand):
|
||||||
pg_dump_command = [
|
pg_dump_command = [
|
||||||
"pg_dump",
|
"pg_dump",
|
||||||
"--format=directory",
|
"--format=directory",
|
||||||
"--file", os.path.join(tmp, "zulip-backup", "database"),
|
"--file=" + os.path.join(tmp, "zulip-backup", "database"),
|
||||||
"--host", settings.DATABASES["default"]["HOST"],
|
"--host=" + settings.DATABASES["default"]["HOST"],
|
||||||
"--port", settings.DATABASES["default"]["PORT"],
|
"--port=" + settings.DATABASES["default"]["PORT"],
|
||||||
"--username", settings.DATABASES["default"]["USER"],
|
"--username=" + settings.DATABASES["default"]["USER"],
|
||||||
"--dbname", settings.DATABASES["default"]["NAME"],
|
"--dbname=" + settings.DATABASES["default"]["NAME"],
|
||||||
"--no-password",
|
"--no-password",
|
||||||
]
|
]
|
||||||
os.environ["PGPASSWORD"] = settings.DATABASES["default"]["PASSWORD"]
|
os.environ["PGPASSWORD"] = settings.DATABASES["default"]["PASSWORD"]
|
||||||
|
@ -115,7 +115,7 @@ class Command(ZulipBaseCommand):
|
||||||
tarball_path = options["output"]
|
tarball_path = options["output"]
|
||||||
|
|
||||||
run(
|
run(
|
||||||
["tar", "-C", tmp, "-cPzf", tarball_path,
|
["tar", f"--directory={tmp}", "-cPzf", tarball_path,
|
||||||
*transform_args,
|
*transform_args,
|
||||||
"--",
|
"--",
|
||||||
*members]
|
*members]
|
||||||
|
|
Loading…
Reference in New Issue