2019-07-19 08:06:34 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import argparse
|
|
|
|
import glob
|
|
|
|
import shutil
|
|
|
|
|
2020-04-20 13:18:06 +02:00
|
|
|
from typing import List
|
|
|
|
|
2019-07-19 08:06:34 +02:00
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
|
|
|
|
sys.path.append(ZULIP_PATH)
|
provision: Let build_emoji build its own cache.
We no longer need to maintain duplicate code
related to where we set up the emoji
cache directory.
And we no longer need two extra steps for
people doing advanced (i.e. manual) setup.
There was no clear benefit to having provision
build the cache directory for `build_emoji`,
when it was easy to make `build_emoji` more
self-sufficient. The `build_emoji` tool
was already importing the library that has
`run_as_root`, and it was already responsible
for 99% of the create-directory kind of tasks.
(We always call `build_emoji` unconditionally from
`provision`, so there's no rationale in terms
of avoiding startup time or something.)
ASIDE:
Its not completely clear to me why we need
to put this directory in "/srv", instead of
somewhere more local (like we already do for
Travis), but maybe it's just to be like
its siblings in "/srv":
node_modules
yarn.lock
zulip-emoji-cache
zulip-npm-cache
zulip-py3-venv
zulip-thumbor-venv
zulip-venv-cache
zulip-yarn
I guess the caches that we keep in var are
dev-only, although I think some of what's under
`zulip-emoji-cache` is also dev-only in nature?
./var/webpack-cache
./var/mypy-cache
In `docs/subsystems/emoji.md` we say this:
```
The `build_emoji` tool generates the set of files under
`static/generated/emoji` (or really, it generates the
`/srv/zulip-emoji-cache/<sha1>/emoji` tree, and
`static/generated/emoji` is a symlink to that tree;we do this in
order to cache old versions to make provisioning and production
deployments super fast in the common case that we haven't changed the
emoji tooling). [...]
```
I don't really understand that rationale for the development
case, since `static/generated` is as much ignored by `git` as
'/srv' is, without the complications of needing `sudo` to create it.
And in production, I'm not sure how much time we're really saving,
as it takes me about 1.4s to fully rebuild the cache in dev, not to
mention we're taking on upgrade risk by sharing files between versions.
2020-04-17 14:08:55 +02:00
|
|
|
from scripts.lib.zulip_tools import run, OKBLUE, ENDC, \
|
2020-04-20 15:16:16 +02:00
|
|
|
get_dev_uuid_var_path, is_digest_obsolete, write_new_digest
|
2019-07-19 08:06:34 +02:00
|
|
|
|
|
|
|
from version import PROVISION_VERSION
|
2020-04-20 14:31:45 +02:00
|
|
|
from pygments import __version__ as pygments_version
|
2019-07-19 08:06:34 +02:00
|
|
|
|
|
|
|
from tools.setup.generate_zulip_bots_static_files import generate_zulip_bots_static_files
|
|
|
|
|
|
|
|
VENV_PATH = "/srv/zulip-py3-venv"
|
|
|
|
UUID_VAR_PATH = get_dev_uuid_var_path()
|
|
|
|
|
2020-04-17 12:42:06 +02:00
|
|
|
def create_var_directories() -> None:
|
|
|
|
# create var/coverage, var/log, etc.
|
|
|
|
var_dir = os.path.join(ZULIP_PATH, 'var')
|
|
|
|
sub_dirs = [
|
|
|
|
'coverage',
|
|
|
|
'log',
|
|
|
|
'node-coverage',
|
|
|
|
'test_uploads',
|
|
|
|
'uploads',
|
|
|
|
'xunit-test-results',
|
|
|
|
]
|
|
|
|
for sub_dir in sub_dirs:
|
|
|
|
path = os.path.join(var_dir, sub_dir)
|
|
|
|
os.makedirs(path, exist_ok=True)
|
|
|
|
|
2020-04-20 13:18:06 +02:00
|
|
|
def build_pygments_data_paths() -> List[str]:
|
|
|
|
paths = [
|
|
|
|
"tools/setup/build_pygments_data",
|
|
|
|
"tools/setup/lang.json",
|
|
|
|
]
|
|
|
|
return paths
|
|
|
|
|
|
|
|
def compilemessages_paths() -> List[str]:
|
|
|
|
paths = ['zerver/management/commands/compilemessages.py']
|
|
|
|
paths += glob.glob('locale/*/LC_MESSAGES/*.po')
|
|
|
|
paths += glob.glob('locale/*/translations.json')
|
|
|
|
return paths
|
|
|
|
|
|
|
|
def inline_email_css_paths() -> List[str]:
|
|
|
|
paths = [
|
|
|
|
"scripts/setup/inline_email_css.py",
|
|
|
|
"templates/zerver/emails/email.css",
|
|
|
|
]
|
|
|
|
paths += glob.glob('templates/zerver/emails/*.source.html')
|
|
|
|
return paths
|
|
|
|
|
2020-04-29 12:03:15 +02:00
|
|
|
def configure_rabbitmq_paths() -> List[str]:
|
|
|
|
paths = [
|
|
|
|
"scripts/setup/configure-rabbitmq",
|
|
|
|
]
|
|
|
|
return paths
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def setup_shell_profile(shell_profile: str) -> None:
|
2019-07-19 08:06:34 +02:00
|
|
|
shell_profile_path = os.path.expanduser(shell_profile)
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def write_command(command: str) -> None:
|
2019-07-19 08:06:34 +02:00
|
|
|
if os.path.exists(shell_profile_path):
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(shell_profile_path) as shell_profile_file:
|
2019-07-19 08:06:34 +02:00
|
|
|
lines = [line.strip() for line in shell_profile_file.readlines()]
|
|
|
|
if command not in lines:
|
|
|
|
with open(shell_profile_path, 'a+') as shell_profile_file:
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
else:
|
|
|
|
with open(shell_profile_path, 'w') as shell_profile_file:
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
|
|
|
|
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
|
|
|
|
write_command(source_activate_command)
|
|
|
|
if os.path.exists('/srv/zulip'):
|
|
|
|
write_command('cd /srv/zulip')
|
|
|
|
|
2019-09-07 01:57:44 +02:00
|
|
|
def setup_bash_profile() -> None:
|
|
|
|
"""Select a bash profile file to add setup code to."""
|
|
|
|
|
|
|
|
BASH_PROFILES = [
|
|
|
|
os.path.expanduser(p) for p in
|
|
|
|
("~/.bash_profile", "~/.bash_login", "~/.profile")
|
|
|
|
]
|
|
|
|
|
|
|
|
def clear_old_profile() -> None:
|
|
|
|
# An earlier version of this script would output a fresh .bash_profile
|
|
|
|
# even though a .profile existed in the image used. As a convenience to
|
|
|
|
# existing developers (and, perhaps, future developers git-bisecting the
|
|
|
|
# provisioning scripts), check for this situation, and blow away the
|
|
|
|
# created .bash_profile if one is found.
|
|
|
|
|
|
|
|
BASH_PROFILE = BASH_PROFILES[0]
|
|
|
|
DOT_PROFILE = BASH_PROFILES[2]
|
|
|
|
OLD_PROFILE_TEXT = "source /srv/zulip-py3-venv/bin/activate\n" + \
|
|
|
|
"cd /srv/zulip\n"
|
|
|
|
|
|
|
|
if os.path.exists(DOT_PROFILE):
|
|
|
|
try:
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(BASH_PROFILE) as f:
|
2019-09-07 01:57:44 +02:00
|
|
|
profile_contents = f.read()
|
|
|
|
if profile_contents == OLD_PROFILE_TEXT:
|
|
|
|
os.unlink(BASH_PROFILE)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
clear_old_profile()
|
|
|
|
|
|
|
|
for candidate_profile in BASH_PROFILES:
|
|
|
|
if os.path.exists(candidate_profile):
|
|
|
|
setup_shell_profile(candidate_profile)
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
# no existing bash profile found; claim .bash_profile
|
|
|
|
setup_shell_profile(BASH_PROFILES[0])
|
|
|
|
|
2020-04-16 15:00:48 +02:00
|
|
|
def need_to_run_build_pygments_data() -> bool:
|
|
|
|
if not os.path.exists("static/generated/pygments_data.json"):
|
|
|
|
return True
|
|
|
|
|
2020-04-20 15:16:16 +02:00
|
|
|
return is_digest_obsolete(
|
2020-04-16 15:00:48 +02:00
|
|
|
"build_pygments_data_hash",
|
2020-04-20 14:24:36 +02:00
|
|
|
build_pygments_data_paths(),
|
2020-04-16 15:00:48 +02:00
|
|
|
[pygments_version]
|
|
|
|
)
|
|
|
|
|
2020-04-16 13:27:47 +02:00
|
|
|
def need_to_run_compilemessages() -> bool:
|
2020-04-16 13:29:50 +02:00
|
|
|
if not os.path.exists('locale/language_name_map.json'):
|
|
|
|
# User may have cleaned their git checkout.
|
|
|
|
print('Need to run compilemessages due to missing language_name_map.json')
|
|
|
|
return True
|
|
|
|
|
2020-04-20 15:16:16 +02:00
|
|
|
return is_digest_obsolete(
|
2020-04-20 14:24:36 +02:00
|
|
|
"last_compilemessages_hash",
|
2020-04-20 13:18:06 +02:00
|
|
|
compilemessages_paths(),
|
|
|
|
)
|
2020-04-16 13:27:47 +02:00
|
|
|
|
2020-04-16 17:39:12 +02:00
|
|
|
def need_to_run_inline_email_css() -> bool:
|
2020-04-16 17:46:01 +02:00
|
|
|
if not os.path.exists('templates/zerver/emails/compiled/'):
|
|
|
|
return True
|
|
|
|
|
2020-04-20 15:16:16 +02:00
|
|
|
return is_digest_obsolete(
|
2020-04-20 14:24:36 +02:00
|
|
|
"last_email_source_files_hash",
|
2020-04-20 13:18:06 +02:00
|
|
|
inline_email_css_paths(),
|
|
|
|
)
|
2020-04-16 17:39:12 +02:00
|
|
|
|
2020-04-30 07:52:54 +02:00
|
|
|
def need_to_run_configure_rabbitmq(settings_list: List[str]) -> bool:
|
2020-04-29 12:03:15 +02:00
|
|
|
obsolete = is_digest_obsolete(
|
|
|
|
'last_configure_rabbitmq_hash',
|
|
|
|
configure_rabbitmq_paths(),
|
2020-04-30 07:52:54 +02:00
|
|
|
settings_list
|
2020-04-29 12:03:15 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
if obsolete:
|
|
|
|
return True
|
|
|
|
|
|
|
|
try:
|
|
|
|
from zerver.lib.queue import SimpleQueueClient
|
|
|
|
SimpleQueueClient()
|
|
|
|
return False
|
|
|
|
except Exception:
|
|
|
|
return True
|
|
|
|
|
2020-04-30 19:12:13 +02:00
|
|
|
|
|
|
|
def clean_unused_caches() -> None:
|
|
|
|
args = argparse.Namespace(
|
|
|
|
threshold_days=6,
|
|
|
|
# The defaults here should match parse_cache_script_args in zulip_tools.py
|
|
|
|
dry_run=False,
|
|
|
|
verbose=False,
|
|
|
|
no_headings=True,
|
|
|
|
)
|
|
|
|
from scripts.lib import clean_venv_cache, clean_node_cache, clean_emoji_cache
|
|
|
|
clean_venv_cache.main(args)
|
|
|
|
clean_node_cache.main(args)
|
|
|
|
clean_emoji_cache.main(args)
|
|
|
|
|
2019-07-19 08:06:34 +02:00
|
|
|
def main(options: argparse.Namespace) -> int:
|
2019-09-07 01:57:44 +02:00
|
|
|
setup_bash_profile()
|
2019-07-19 08:06:34 +02:00
|
|
|
setup_shell_profile('~/.zprofile')
|
|
|
|
|
|
|
|
# This needs to happen before anything that imports zproject.settings.
|
|
|
|
run(["scripts/setup/generate_secrets.py", "--development"])
|
|
|
|
|
2020-04-17 12:42:06 +02:00
|
|
|
create_var_directories()
|
2019-07-19 08:06:34 +02:00
|
|
|
|
|
|
|
# The `build_emoji` script requires `emoji-datasource` package
|
|
|
|
# which we install via npm; thus this step is after installing npm
|
|
|
|
# packages.
|
|
|
|
run(["tools/setup/emoji/build_emoji"])
|
|
|
|
|
|
|
|
# copy over static files from the zulip_bots package
|
|
|
|
generate_zulip_bots_static_files()
|
|
|
|
|
2020-04-16 15:00:48 +02:00
|
|
|
if options.is_force or need_to_run_build_pygments_data():
|
2019-07-19 08:06:34 +02:00
|
|
|
run(["tools/setup/build_pygments_data"])
|
2020-04-20 15:16:16 +02:00
|
|
|
write_new_digest(
|
|
|
|
'build_pygments_data_hash',
|
|
|
|
build_pygments_data_paths(),
|
|
|
|
[pygments_version]
|
|
|
|
)
|
2019-07-19 08:06:34 +02:00
|
|
|
else:
|
|
|
|
print("No need to run `tools/setup/build_pygments_data`.")
|
|
|
|
|
2020-04-16 17:39:12 +02:00
|
|
|
if options.is_force or need_to_run_inline_email_css():
|
2020-04-06 02:40:19 +02:00
|
|
|
run(["scripts/setup/inline_email_css.py"])
|
2020-04-20 15:16:16 +02:00
|
|
|
write_new_digest(
|
|
|
|
"last_email_source_files_hash",
|
|
|
|
inline_email_css_paths(),
|
|
|
|
)
|
2019-07-19 08:06:34 +02:00
|
|
|
else:
|
2020-04-06 02:40:19 +02:00
|
|
|
print("No need to run `scripts/setup/inline_email_css.py`.")
|
2019-07-19 08:06:34 +02:00
|
|
|
|
2020-04-20 16:54:20 +02:00
|
|
|
if not options.is_production_test_suite:
|
|
|
|
# The following block is skipped for the production test
|
2019-07-19 08:06:34 +02:00
|
|
|
# suite, because that suite doesn't make use of these elements
|
|
|
|
# of the development environment (it just uses the development
|
|
|
|
# environment to build a release tarball).
|
|
|
|
|
2020-04-20 20:41:44 +02:00
|
|
|
# Need to set up Django before using template_status
|
2019-07-19 08:06:34 +02:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
|
|
|
|
import django
|
|
|
|
django.setup()
|
|
|
|
|
2020-04-20 17:21:22 +02:00
|
|
|
from zerver.lib.test_fixtures import (
|
|
|
|
DEV_DATABASE,
|
|
|
|
TEST_DATABASE,
|
|
|
|
destroy_leaked_test_databases,
|
|
|
|
)
|
2020-04-30 07:52:54 +02:00
|
|
|
from django.conf import settings
|
2019-07-19 08:06:34 +02:00
|
|
|
|
2020-04-30 07:52:54 +02:00
|
|
|
if options.is_force or need_to_run_configure_rabbitmq(
|
|
|
|
[settings.RABBITMQ_PASSWORD]):
|
2019-07-19 08:06:34 +02:00
|
|
|
run(["scripts/setup/configure-rabbitmq"])
|
2020-04-29 12:03:15 +02:00
|
|
|
write_new_digest(
|
|
|
|
'last_configure_rabbitmq_hash',
|
|
|
|
configure_rabbitmq_paths(),
|
2020-04-30 07:52:54 +02:00
|
|
|
[settings.RABBITMQ_PASSWORD]
|
2020-04-29 12:03:15 +02:00
|
|
|
)
|
2019-07-19 08:06:34 +02:00
|
|
|
else:
|
2020-04-16 15:15:11 +02:00
|
|
|
print("No need to run `scripts/setup/configure-rabbitmq.")
|
2019-07-19 08:06:34 +02:00
|
|
|
|
2020-04-20 20:41:44 +02:00
|
|
|
dev_template_db_status = DEV_DATABASE.template_status()
|
2019-07-19 08:06:34 +02:00
|
|
|
if options.is_force or dev_template_db_status == 'needs_rebuild':
|
|
|
|
run(["tools/setup/postgres-init-dev-db"])
|
2020-04-21 22:03:12 +02:00
|
|
|
run(["tools/rebuild-dev-database"])
|
2020-04-30 09:25:29 +02:00
|
|
|
DEV_DATABASE.write_new_db_digest()
|
2019-07-19 08:06:34 +02:00
|
|
|
elif dev_template_db_status == 'run_migrations':
|
2020-04-20 17:21:22 +02:00
|
|
|
DEV_DATABASE.run_db_migrations()
|
2019-07-19 08:06:34 +02:00
|
|
|
elif dev_template_db_status == 'current':
|
|
|
|
print("No need to regenerate the dev DB.")
|
|
|
|
|
2020-04-20 20:41:44 +02:00
|
|
|
test_template_db_status = TEST_DATABASE.template_status()
|
2019-07-19 08:06:34 +02:00
|
|
|
if options.is_force or test_template_db_status == 'needs_rebuild':
|
|
|
|
run(["tools/setup/postgres-init-test-db"])
|
2020-04-21 22:03:12 +02:00
|
|
|
run(["tools/rebuild-test-database"])
|
2020-04-30 09:25:29 +02:00
|
|
|
TEST_DATABASE.write_new_db_digest()
|
2019-07-19 08:06:34 +02:00
|
|
|
elif test_template_db_status == 'run_migrations':
|
2020-04-20 17:21:22 +02:00
|
|
|
TEST_DATABASE.run_db_migrations()
|
2019-07-19 08:06:34 +02:00
|
|
|
elif test_template_db_status == 'current':
|
|
|
|
print("No need to regenerate the test DB.")
|
|
|
|
|
2020-04-16 13:27:47 +02:00
|
|
|
if options.is_force or need_to_run_compilemessages():
|
2019-07-19 08:06:34 +02:00
|
|
|
run(["./manage.py", "compilemessages"])
|
2020-04-20 15:16:16 +02:00
|
|
|
write_new_digest(
|
|
|
|
"last_compilemessages_hash",
|
|
|
|
compilemessages_paths(),
|
|
|
|
)
|
2019-07-19 08:06:34 +02:00
|
|
|
else:
|
|
|
|
print("No need to run `manage.py compilemessages`.")
|
|
|
|
|
|
|
|
destroyed = destroy_leaked_test_databases()
|
|
|
|
if destroyed:
|
|
|
|
print("Dropped %s stale test databases!" % (destroyed,))
|
|
|
|
|
2020-04-30 19:12:13 +02:00
|
|
|
clean_unused_caches()
|
2019-07-19 08:06:34 +02:00
|
|
|
|
|
|
|
# Keeping this cache file around can cause eslint to throw
|
|
|
|
# random TypeErrors when new/updated dependencies are added
|
|
|
|
if os.path.isfile('.eslintcache'):
|
|
|
|
# Remove this block when
|
|
|
|
# https://github.com/eslint/eslint/issues/11639 is fixed
|
|
|
|
# upstream.
|
|
|
|
os.remove('.eslintcache')
|
|
|
|
|
|
|
|
# Clean up the root of the `var/` directory for various
|
|
|
|
# testing-related files that we have migrated to
|
|
|
|
# `var/<uuid>/test-backend`.
|
|
|
|
print("Cleaning var/ directory files...")
|
|
|
|
var_paths = glob.glob('var/test*')
|
|
|
|
var_paths.append('var/bot_avatar')
|
|
|
|
for path in var_paths:
|
|
|
|
try:
|
|
|
|
if os.path.isdir(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
else:
|
|
|
|
os.remove(path)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
|
|
|
|
print('writing to %s\n' % (version_file,))
|
|
|
|
open(version_file, 'w').write(PROVISION_VERSION + '\n')
|
|
|
|
|
|
|
|
print()
|
|
|
|
print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
|
|
|
|
return 0
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument('--force', action='store_true', dest='is_force',
|
|
|
|
default=False,
|
|
|
|
help="Ignore all provisioning optimizations.")
|
|
|
|
|
2020-04-20 16:54:20 +02:00
|
|
|
parser.add_argument('--production-test-suite', action='store_true',
|
|
|
|
dest='is_production_test_suite',
|
2019-07-19 08:06:34 +02:00
|
|
|
default=False,
|
2020-04-20 16:54:20 +02:00
|
|
|
help="Provision for test suite with production settings.")
|
2019-07-19 08:06:34 +02:00
|
|
|
|
|
|
|
options = parser.parse_args()
|
|
|
|
sys.exit(main(options))
|