py3: Switch almost all shebang lines to use `python3`.
This causes `upgrade-zulip-from-git`, as well as a no-option run of
`tools/build-release-tarball`, to produce a Zulip install running
Python 3, rather than Python 2. In particular this means that the
virtualenv we create, in which all application code runs, is Python 3.
One shebang line, on `zulip-ec2-configure-interfaces`, explicitly
keeps Python 2, and at least one external ops script, `wal-e`, also
still runs on Python 2. See discussion on the respective previous
commits that made those explicit. There may also be some other
third-party scripts we use, outside of this source tree and running
outside our virtualenv, that still run on Python 2.
2017-08-02 23:15:16 +02:00
|
|
|
#!/usr/bin/env python3
|
2015-08-17 06:37:14 +02:00
|
|
|
import os
|
2015-08-20 02:46:50 +02:00
|
|
|
import sys
|
2015-08-19 04:18:08 +02:00
|
|
|
import logging
|
2016-10-21 09:45:21 +02:00
|
|
|
import argparse
|
2015-08-19 04:18:08 +02:00
|
|
|
import platform
|
2016-04-06 20:12:32 +02:00
|
|
|
import subprocess
|
2017-06-14 11:32:34 +02:00
|
|
|
import glob
|
2017-06-15 12:13:35 +02:00
|
|
|
import hashlib
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
os.environ["PYTHONUNBUFFERED"] = "y"
|
|
|
|
|
2017-01-14 11:19:26 +01:00
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2016-06-27 23:50:38 +02:00
|
|
|
|
|
|
|
sys.path.append(ZULIP_PATH)
|
2017-10-18 04:14:06 +02:00
|
|
|
from scripts.lib.zulip_tools import run, subprocess_text_output, OKBLUE, ENDC, WARNING, \
|
|
|
|
get_dev_uuid_var_path
|
2017-09-30 14:40:18 +02:00
|
|
|
from scripts.lib.setup_venv import VENV_DEPENDENCIES
|
2017-07-22 00:51:36 +02:00
|
|
|
from scripts.lib.node_cache import setup_node_modules, NODE_MODULES_CACHE_PATH
|
2016-08-18 13:53:16 +02:00
|
|
|
|
2016-10-14 00:53:01 +02:00
|
|
|
from version import PROVISION_VERSION
|
2016-10-21 09:45:21 +02:00
|
|
|
if False:
|
|
|
|
from typing import Any
|
2016-10-14 00:53:01 +02:00
|
|
|
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-19 04:18:08 +02:00
|
|
|
SUPPORTED_PLATFORMS = {
|
|
|
|
"Ubuntu": [
|
|
|
|
"trusty",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial",
|
2017-07-30 13:37:57 +02:00
|
|
|
# Platforms that are blocked on on tsearch_extras
|
|
|
|
# "stretch",
|
|
|
|
# "zesty",
|
2015-08-19 04:18:08 +02:00
|
|
|
],
|
|
|
|
}
|
|
|
|
|
2017-08-09 00:38:30 +02:00
|
|
|
VENV_PATH = "/srv/zulip-py3-venv"
|
2016-07-09 09:08:41 +02:00
|
|
|
VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
|
|
|
|
LOG_DIR_PATH = os.path.join(VAR_DIR_PATH, 'log')
|
2016-07-16 16:13:17 +02:00
|
|
|
UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'uploads')
|
2016-07-09 19:01:36 +02:00
|
|
|
TEST_UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'test_uploads')
|
2016-07-13 10:49:26 +02:00
|
|
|
COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'coverage')
|
2016-07-16 16:44:41 +02:00
|
|
|
LINECOVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'linecoverage-report')
|
2016-08-11 18:33:52 +02:00
|
|
|
NODE_TEST_COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'node-coverage')
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2016-10-19 05:03:55 +02:00
|
|
|
# TODO: De-duplicate this with emoji_dump.py
|
|
|
|
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
|
2017-10-06 08:12:16 +02:00
|
|
|
is_travis = 'TRAVIS' in os.environ
|
|
|
|
if is_travis:
|
2016-10-19 05:03:55 +02:00
|
|
|
# In Travis CI, we don't have root access
|
|
|
|
EMOJI_CACHE_PATH = "/home/travis/zulip-emoji-cache"
|
|
|
|
|
2016-04-06 19:09:16 +02:00
|
|
|
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
|
|
|
|
print("Error: No Zulip git repository present!")
|
2015-11-01 17:11:06 +01:00
|
|
|
print("To setup the Zulip development environment, you should clone the code")
|
|
|
|
print("from GitHub, rather than using a Zulip production release tarball.")
|
2015-10-14 01:18:49 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-12-01 00:23:05 +01:00
|
|
|
# Check the RAM on the user's system, and throw an effort if <1.5GB.
|
|
|
|
# This avoids users getting segfaults running `pip install` that are
|
|
|
|
# generally more annoying to debug.
|
|
|
|
with open("/proc/meminfo") as meminfo:
|
|
|
|
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
|
|
|
|
ram_gb = float(ram_size) / 1024.0 / 1024.0
|
|
|
|
if ram_gb < 1.5:
|
|
|
|
print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
|
|
|
|
round(ram_gb, 2),))
|
|
|
|
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-09 00:10:46 +01:00
|
|
|
try:
|
2017-10-18 04:14:06 +02:00
|
|
|
UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
|
|
|
|
run(["mkdir", "-p", UUID_VAR_PATH])
|
2016-11-09 00:10:46 +01:00
|
|
|
if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
|
|
|
os.symlink(
|
|
|
|
os.path.join(ZULIP_PATH, 'README.md'),
|
|
|
|
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
|
|
|
|
)
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
|
|
|
except OSError as err:
|
|
|
|
print("Error: Unable to create symlinks. Make sure you have permission to create symbolic links.")
|
|
|
|
print("See this page for more information:")
|
2017-11-16 19:54:24 +01:00
|
|
|
print(" https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
|
2016-11-09 00:10:46 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:27:42 +02:00
|
|
|
if platform.architecture()[0] == '64bit':
|
|
|
|
arch = 'amd64'
|
|
|
|
elif platform.architecture()[0] == '32bit':
|
|
|
|
arch = "i386"
|
|
|
|
else:
|
2017-11-08 03:47:28 +01:00
|
|
|
logging.critical("Only x86 is supported;"
|
|
|
|
"ping zulip-devel@googlegroups.com if you want another architecture.")
|
2016-04-06 19:27:42 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 20:12:32 +02:00
|
|
|
# Ideally we wouldn't need to install a dependency here, before we
|
|
|
|
# know the codename.
|
2016-10-12 03:14:58 +02:00
|
|
|
subprocess.check_call(["sudo", "apt-get", "install", "-y", "lsb-release"])
|
2016-07-12 16:55:20 +02:00
|
|
|
vendor = subprocess_text_output(["lsb_release", "-is"])
|
|
|
|
codename = subprocess_text_output(["lsb_release", "-cs"])
|
2016-04-06 19:27:42 +02:00
|
|
|
if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
|
|
|
|
logging.critical("Unsupported platform: {} {}".format(vendor, codename))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:30:16 +02:00
|
|
|
POSTGRES_VERSION_MAP = {
|
2017-07-15 01:22:46 +02:00
|
|
|
"stretch": "9.6",
|
2016-04-06 19:30:16 +02:00
|
|
|
"trusty": "9.3",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial": "9.5",
|
2017-07-30 13:37:57 +02:00
|
|
|
"zesty": "9.6",
|
2016-04-06 19:30:16 +02:00
|
|
|
}
|
|
|
|
POSTGRES_VERSION = POSTGRES_VERSION_MAP[codename]
|
|
|
|
|
2016-04-04 23:22:14 +02:00
|
|
|
UBUNTU_COMMON_APT_DEPENDENCIES = [
|
|
|
|
"closure-compiler",
|
|
|
|
"memcached",
|
|
|
|
"rabbitmq-server",
|
|
|
|
"redis-server",
|
|
|
|
"hunspell-en-us",
|
|
|
|
"supervisor",
|
|
|
|
"git",
|
2016-09-21 08:44:01 +02:00
|
|
|
"libssl-dev",
|
2016-04-04 23:22:14 +02:00
|
|
|
"yui-compressor",
|
|
|
|
"wget",
|
|
|
|
"ca-certificates", # Explicit dependency in case e.g. wget is already installed
|
2017-04-21 23:07:06 +02:00
|
|
|
"puppet", # Used by lint
|
2016-04-04 23:22:14 +02:00
|
|
|
"gettext", # Used by makemessages i18n
|
|
|
|
"curl", # Used for fetching PhantomJS as wget occasionally fails on redirects
|
|
|
|
"netcat", # Used for flushing memcached
|
2017-09-22 23:04:57 +02:00
|
|
|
"moreutils", # Used for sponge command
|
2016-06-22 18:17:46 +02:00
|
|
|
] + VENV_DEPENDENCIES
|
2016-04-04 23:22:14 +02:00
|
|
|
|
|
|
|
APT_DEPENDENCIES = {
|
2017-07-15 01:22:46 +02:00
|
|
|
"stretch": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.6",
|
2017-07-15 01:24:22 +02:00
|
|
|
# tsearch-extras removed because there's no apt repository hosting it for Debian.
|
|
|
|
# "postgresql-9.6-tsearch-extras",
|
2017-07-15 01:22:46 +02:00
|
|
|
"postgresql-9.6-pgroonga",
|
2017-07-15 01:36:19 +02:00
|
|
|
# Technically, this should be in VENV_DEPENDENCIES, but it
|
|
|
|
# doesn't exist in trusty and we don't have a conditional on
|
|
|
|
# platform there.
|
|
|
|
"virtualenv",
|
2017-07-15 01:22:46 +02:00
|
|
|
],
|
2016-04-04 23:22:14 +02:00
|
|
|
"trusty": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.3",
|
2016-07-27 22:27:13 +02:00
|
|
|
"postgresql-9.3-tsearch-extras",
|
2016-08-26 02:25:53 +02:00
|
|
|
"postgresql-9.3-pgroonga",
|
2016-04-04 23:22:14 +02:00
|
|
|
],
|
|
|
|
"xenial": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.5",
|
2016-07-27 22:27:13 +02:00
|
|
|
"postgresql-9.5-tsearch-extras",
|
2016-08-26 02:25:53 +02:00
|
|
|
"postgresql-9.5-pgroonga",
|
2016-04-04 23:22:14 +02:00
|
|
|
],
|
2017-07-30 13:37:57 +02:00
|
|
|
"zesty": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.6",
|
|
|
|
"postgresql-9.6-pgroonga",
|
|
|
|
"virtualenv",
|
|
|
|
],
|
2016-04-04 23:22:14 +02:00
|
|
|
}
|
|
|
|
|
2016-04-06 19:30:16 +02:00
|
|
|
TSEARCH_STOPWORDS_PATH = "/usr/share/postgresql/%s/tsearch_data/" % (POSTGRES_VERSION,)
|
2015-08-17 06:37:14 +02:00
|
|
|
REPO_STOPWORDS_PATH = os.path.join(
|
|
|
|
ZULIP_PATH,
|
|
|
|
"puppet",
|
|
|
|
"zulip",
|
|
|
|
"files",
|
|
|
|
"postgresql",
|
|
|
|
"zulip_english.stop",
|
|
|
|
)
|
|
|
|
|
2015-08-21 03:06:18 +02:00
|
|
|
LOUD = dict(_out=sys.stdout, _err=sys.stderr)
|
|
|
|
|
2016-10-26 06:55:39 +02:00
|
|
|
user_id = os.getuid()
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
def setup_shell_profile(shell_profile):
|
|
|
|
# type: (str) -> None
|
|
|
|
shell_profile_path = os.path.expanduser(shell_profile)
|
|
|
|
|
2017-05-29 15:27:24 +02:00
|
|
|
def write_command(command):
|
|
|
|
# type: (str) -> None
|
|
|
|
if os.path.exists(shell_profile_path):
|
2017-10-18 06:22:32 +02:00
|
|
|
with open(shell_profile_path, 'r') as shell_profile_file:
|
|
|
|
lines = [line.strip() for line in shell_profile_file.readlines()]
|
|
|
|
if command not in lines:
|
|
|
|
with open(shell_profile_path, 'a+') as shell_profile_file:
|
2017-05-29 15:27:24 +02:00
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
else:
|
|
|
|
with open(shell_profile_path, 'w') as shell_profile_file:
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
|
|
|
|
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
|
|
|
|
write_command(source_activate_command)
|
|
|
|
write_command('cd /srv/zulip')
|
2017-01-16 13:00:54 +01:00
|
|
|
|
2017-06-13 19:02:39 +02:00
|
|
|
def install_apt_deps():
|
|
|
|
# type: () -> None
|
|
|
|
# setup-apt-repo does an `apt-get update`
|
|
|
|
run(["sudo", "./scripts/lib/setup-apt-repo"])
|
|
|
|
run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + APT_DEPENDENCIES[codename])
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
def main(options):
|
|
|
|
# type: (Any) -> int
|
2016-07-27 22:27:13 +02:00
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# yarn and management commands expect to be run from the root of the
|
2016-07-27 22:27:13 +02:00
|
|
|
# project.
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
|
|
|
2017-06-15 12:13:35 +02:00
|
|
|
# setup-apt-repo does an `apt-get update`
|
|
|
|
# hash the apt dependencies
|
|
|
|
sha_sum = hashlib.sha1()
|
|
|
|
|
|
|
|
for apt_depedency in APT_DEPENDENCIES[codename]:
|
|
|
|
sha_sum.update(apt_depedency.encode('utf8'))
|
|
|
|
# hash the content of setup-apt-repo
|
|
|
|
sha_sum.update(open('scripts/lib/setup-apt-repo').read().encode('utf8'))
|
|
|
|
|
|
|
|
new_apt_dependencies_hash = sha_sum.hexdigest()
|
|
|
|
last_apt_dependencies_hash = None
|
2017-10-18 04:14:06 +02:00
|
|
|
apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
|
2017-06-13 19:02:39 +02:00
|
|
|
try:
|
2017-09-25 23:45:34 +02:00
|
|
|
hash_file = open(apt_hash_file_path, 'r+')
|
2017-06-15 12:13:35 +02:00
|
|
|
last_apt_dependencies_hash = hash_file.read()
|
|
|
|
except IOError:
|
2017-09-25 23:45:34 +02:00
|
|
|
run(['touch', apt_hash_file_path])
|
|
|
|
hash_file = open(apt_hash_file_path, 'r+')
|
2017-06-15 12:13:35 +02:00
|
|
|
|
|
|
|
if (new_apt_dependencies_hash != last_apt_dependencies_hash):
|
|
|
|
try:
|
|
|
|
install_apt_deps()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
# Might be a failure due to network connection issues. Retrying...
|
|
|
|
print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
|
2017-09-30 00:27:25 +02:00
|
|
|
# Since a common failure mode is for the caching in
|
|
|
|
# `setup-apt-repo` to optimize the fast code path to skip
|
|
|
|
# running `apt-get update` when the target apt repository
|
|
|
|
# is out of date, we run it explicitly here so that we
|
|
|
|
# recover automatically.
|
|
|
|
run(['sudo', 'apt-get', 'update'])
|
2017-06-15 12:13:35 +02:00
|
|
|
install_apt_deps()
|
|
|
|
hash_file.write(new_apt_dependencies_hash)
|
|
|
|
else:
|
2017-09-25 23:47:18 +02:00
|
|
|
print("No changes to apt dependencies, so skipping apt operations.")
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# Here we install node.
|
2017-07-08 21:27:25 +02:00
|
|
|
run(["sudo", "scripts/lib/install-node"])
|
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# This is a wrapper around `yarn`, which we run last since
|
|
|
|
# it can often fail due to network issues beyond our control.
|
2017-07-08 21:27:25 +02:00
|
|
|
try:
|
|
|
|
# Hack: We remove `node_modules` as root to work around an
|
|
|
|
# issue with the symlinks being improperly owned by root.
|
|
|
|
if os.path.islink("node_modules"):
|
|
|
|
run(["sudo", "rm", "-f", "node_modules"])
|
2017-11-12 11:28:23 +01:00
|
|
|
run(["sudo", "mkdir", "-p", NODE_MODULES_CACHE_PATH])
|
2017-07-22 00:51:36 +02:00
|
|
|
run(["sudo", "chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
|
2017-07-27 23:22:52 +02:00
|
|
|
setup_node_modules(prefer_offline=True)
|
2017-07-08 21:27:25 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2017-07-27 23:22:52 +02:00
|
|
|
print(WARNING + "`yarn install` failed; retrying..." + ENDC)
|
2017-07-08 21:27:25 +02:00
|
|
|
setup_node_modules()
|
|
|
|
|
2017-06-17 15:14:59 +02:00
|
|
|
# Import tools/setup_venv.py instead of running it so that we get an
|
|
|
|
# activated virtualenv for the rest of the provisioning process.
|
|
|
|
from tools.setup import setup_venvs
|
2017-12-19 06:49:51 +01:00
|
|
|
setup_venvs.main()
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
setup_shell_profile('~/.bash_profile')
|
|
|
|
setup_shell_profile('~/.zprofile')
|
2017-01-13 21:33:02 +01:00
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
|
2015-08-20 22:40:06 +02:00
|
|
|
|
2016-07-09 09:08:41 +02:00
|
|
|
# create log directory `zulip/var/log`
|
|
|
|
run(["mkdir", "-p", LOG_DIR_PATH])
|
2016-07-16 16:13:17 +02:00
|
|
|
# create upload directory `var/uploads`
|
|
|
|
run(["mkdir", "-p", UPLOAD_DIR_PATH])
|
2016-07-09 19:01:36 +02:00
|
|
|
# create test upload directory `var/test_upload`
|
|
|
|
run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
|
2016-07-13 10:49:26 +02:00
|
|
|
# create coverage directory`var/coverage`
|
|
|
|
run(["mkdir", "-p", COVERAGE_DIR_PATH])
|
2016-07-16 16:44:41 +02:00
|
|
|
# create linecoverage directory`var/linecoverage-report`
|
|
|
|
run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
|
2016-08-11 18:33:52 +02:00
|
|
|
# create linecoverage directory`var/node-coverage`
|
|
|
|
run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])
|
2016-07-09 09:08:41 +02:00
|
|
|
|
2017-05-28 13:11:57 +02:00
|
|
|
# `build_emoji` script requires `emoji-datasource` package which we install
|
|
|
|
# via npm and hence it should be executed after we are done installing npm
|
|
|
|
# packages.
|
2017-05-24 19:35:00 +02:00
|
|
|
if not os.path.isdir(EMOJI_CACHE_PATH):
|
|
|
|
run(["sudo", "mkdir", EMOJI_CACHE_PATH])
|
|
|
|
run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
|
2016-12-28 04:58:41 +01:00
|
|
|
run(["tools/setup/emoji/build_emoji"])
|
2017-05-28 13:11:57 +02:00
|
|
|
|
2017-09-17 02:14:56 +02:00
|
|
|
# copy over static files from the zulip_bots package
|
|
|
|
run(["tools/setup/generate_zulip_bots_static_files"])
|
|
|
|
|
2017-11-12 10:13:09 +01:00
|
|
|
run(["tools/generate-custom-icon-webfont"])
|
2017-09-26 05:10:46 +02:00
|
|
|
run(["tools/setup/build_pygments_data"])
|
2016-10-05 11:13:19 +02:00
|
|
|
run(["scripts/setup/generate_secrets.py", "--development"])
|
2017-01-06 18:56:36 +01:00
|
|
|
run(["tools/update-authors-json", "--use-fixture"])
|
2017-09-16 14:30:57 +02:00
|
|
|
run(["tools/inline-email-css"])
|
2017-10-06 08:12:16 +02:00
|
|
|
if is_travis and not options.is_production_travis:
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "service", "rabbitmq-server", "restart"])
|
|
|
|
run(["sudo", "service", "redis-server", "restart"])
|
|
|
|
run(["sudo", "service", "memcached", "restart"])
|
2016-10-21 09:45:21 +02:00
|
|
|
elif options.is_docker:
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "service", "rabbitmq-server", "restart"])
|
|
|
|
run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
|
|
|
|
run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
|
|
|
|
run(["sudo", "service", "redis-server", "restart"])
|
|
|
|
run(["sudo", "service", "memcached", "restart"])
|
2016-10-21 09:45:21 +02:00
|
|
|
if not options.is_production_travis:
|
2017-07-07 02:39:49 +02:00
|
|
|
# The following block is skipped for the production Travis
|
|
|
|
# suite, because that suite doesn't make use of these elements
|
|
|
|
# of the development environment (it just uses the development
|
|
|
|
# environment to build a release tarball).
|
|
|
|
|
2017-06-21 21:26:13 +02:00
|
|
|
# Need to set up Django before using is_template_database_current
|
2016-10-19 12:34:55 +02:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
|
|
|
|
import django
|
|
|
|
django.setup()
|
2017-06-21 21:26:13 +02:00
|
|
|
|
2017-06-14 11:32:34 +02:00
|
|
|
from zerver.lib.str_utils import force_bytes
|
2016-10-19 12:34:55 +02:00
|
|
|
from zerver.lib.test_fixtures import is_template_database_current
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-21 21:26:13 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.queue import SimpleQueueClient
|
|
|
|
SimpleQueueClient()
|
|
|
|
rabbitmq_is_configured = True
|
|
|
|
except Exception:
|
|
|
|
rabbitmq_is_configured = False
|
|
|
|
|
|
|
|
if options.is_force or not rabbitmq_is_configured:
|
|
|
|
run(["scripts/setup/configure-rabbitmq"])
|
|
|
|
else:
|
|
|
|
print("RabbitMQ is already configured.")
|
|
|
|
|
2017-10-18 04:23:06 +02:00
|
|
|
migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
|
2017-06-02 21:50:24 +02:00
|
|
|
if options.is_force or not is_template_database_current(
|
2017-10-18 04:23:06 +02:00
|
|
|
migration_status=migration_status_path,
|
2017-06-02 21:50:24 +02:00
|
|
|
settings="zproject.settings",
|
|
|
|
database_name="zulip",
|
|
|
|
):
|
|
|
|
run(["tools/setup/postgres-init-dev-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-database"])
|
|
|
|
else:
|
|
|
|
print("No need to regenerate the dev DB.")
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
if options.is_force or not is_template_database_current():
|
2016-10-19 12:34:55 +02:00
|
|
|
run(["tools/setup/postgres-init-test-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-test-database"])
|
|
|
|
else:
|
2016-10-19 13:13:16 +02:00
|
|
|
print("No need to regenerate the test DB.")
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-14 11:32:34 +02:00
|
|
|
# Consider updating generated translations data: both `.mo`
|
|
|
|
# files and `language-options.json`.
|
|
|
|
sha1sum = hashlib.sha1()
|
|
|
|
paths = ['zerver/management/commands/compilemessages.py']
|
|
|
|
paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
|
|
|
|
paths += glob.glob('static/locale/*/translations.json')
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
with open(path, 'r') as file_to_hash:
|
|
|
|
sha1sum.update(force_bytes(file_to_hash.read()))
|
|
|
|
|
2017-10-18 04:17:14 +02:00
|
|
|
compilemessages_hash_path = os.path.join(UUID_VAR_PATH, "last_compilemessages_hash")
|
2017-06-14 11:32:34 +02:00
|
|
|
new_compilemessages_hash = sha1sum.hexdigest()
|
2017-10-18 04:17:14 +02:00
|
|
|
run(['touch', compilemessages_hash_path])
|
|
|
|
with open(compilemessages_hash_path, 'r') as hash_file:
|
2017-06-14 11:32:34 +02:00
|
|
|
last_compilemessages_hash = hash_file.read()
|
|
|
|
|
|
|
|
if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
|
2017-10-18 04:17:14 +02:00
|
|
|
with open(compilemessages_hash_path, 'w') as hash_file:
|
2017-06-14 11:32:34 +02:00
|
|
|
hash_file.write(new_compilemessages_hash)
|
|
|
|
run(["./manage.py", "compilemessages"])
|
|
|
|
else:
|
|
|
|
print("No need to run `manage.py compilemessages`.")
|
2016-09-21 08:44:01 +02:00
|
|
|
|
2017-11-18 21:57:08 +01:00
|
|
|
run(["scripts/lib/clean-unused-caches"])
|
|
|
|
|
2017-10-18 05:02:44 +02:00
|
|
|
version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
|
2016-10-14 00:53:01 +02:00
|
|
|
print('writing to %s\n' % (version_file,))
|
|
|
|
open(version_file, 'w').write(PROVISION_VERSION + '\n')
|
|
|
|
|
2016-07-29 00:30:47 +02:00
|
|
|
print()
|
2016-08-11 20:43:30 +02:00
|
|
|
print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
|
2016-01-26 02:59:30 +01:00
|
|
|
return 0
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-20 02:46:50 +02:00
|
|
|
if __name__ == "__main__":
|
2016-10-21 09:45:21 +02:00
|
|
|
description = ("Provision script to install Zulip")
|
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument('--force', action='store_true', dest='is_force',
|
|
|
|
default=False,
|
|
|
|
help="Ignore all provisioning optimizations.")
|
|
|
|
|
|
|
|
parser.add_argument('--production-travis', action='store_true',
|
|
|
|
dest='is_production_travis',
|
|
|
|
default=False,
|
2017-10-06 08:12:16 +02:00
|
|
|
help="Provision for Travis with production settings.")
|
2016-10-21 09:45:21 +02:00
|
|
|
|
|
|
|
parser.add_argument('--docker', action='store_true',
|
|
|
|
dest='is_docker',
|
|
|
|
default=False,
|
|
|
|
help="Provision for Docker.")
|
|
|
|
|
|
|
|
options = parser.parse_args()
|
|
|
|
sys.exit(main(options))
|