2016-07-01 07:34:58 +02:00
|
|
|
#!/usr/bin/env python
|
2015-11-01 17:11:06 +01:00
|
|
|
from __future__ import print_function
|
2015-08-17 06:37:14 +02:00
|
|
|
import os
|
2015-08-20 02:46:50 +02:00
|
|
|
import sys
|
2015-08-19 04:18:08 +02:00
|
|
|
import logging
|
2016-10-21 09:45:21 +02:00
|
|
|
import argparse
|
2015-08-19 04:18:08 +02:00
|
|
|
import platform
|
2016-04-06 20:12:32 +02:00
|
|
|
import subprocess
|
2017-06-14 11:32:34 +02:00
|
|
|
import glob
|
2017-06-15 12:13:35 +02:00
|
|
|
import hashlib
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
os.environ["PYTHONUNBUFFERED"] = "y"
|
|
|
|
|
2016-07-02 22:15:55 +02:00
|
|
|
PY2 = sys.version_info[0] == 2
|
|
|
|
|
2017-01-14 11:19:26 +01:00
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2016-06-27 23:50:38 +02:00
|
|
|
|
|
|
|
sys.path.append(ZULIP_PATH)
|
2016-08-13 17:46:19 +02:00
|
|
|
from scripts.lib.zulip_tools import run, subprocess_text_output, OKBLUE, ENDC, WARNING
|
2016-06-22 18:17:46 +02:00
|
|
|
from scripts.lib.setup_venv import setup_virtualenv, VENV_DEPENDENCIES
|
2016-10-03 07:45:23 +02:00
|
|
|
from scripts.lib.node_cache import setup_node_modules, NPM_CACHE_PATH
|
2016-08-18 13:53:16 +02:00
|
|
|
|
2016-10-14 00:53:01 +02:00
|
|
|
from version import PROVISION_VERSION
|
2016-10-21 09:45:21 +02:00
|
|
|
if False:
|
|
|
|
from typing import Any
|
2016-10-14 00:53:01 +02:00
|
|
|
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-19 04:18:08 +02:00
|
|
|
SUPPORTED_PLATFORMS = {
|
|
|
|
"Ubuntu": [
|
|
|
|
"trusty",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial",
|
2015-08-19 04:18:08 +02:00
|
|
|
],
|
|
|
|
}
|
|
|
|
|
2016-07-02 22:15:55 +02:00
|
|
|
PY2_VENV_PATH = "/srv/zulip-venv"
|
2016-04-24 18:31:09 +02:00
|
|
|
PY3_VENV_PATH = "/srv/zulip-py3-venv"
|
2016-07-09 09:08:41 +02:00
|
|
|
VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
|
|
|
|
LOG_DIR_PATH = os.path.join(VAR_DIR_PATH, 'log')
|
2016-07-16 16:13:17 +02:00
|
|
|
UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'uploads')
|
2016-07-09 19:01:36 +02:00
|
|
|
TEST_UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'test_uploads')
|
2016-07-13 10:49:26 +02:00
|
|
|
COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'coverage')
|
2016-07-16 16:44:41 +02:00
|
|
|
LINECOVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'linecoverage-report')
|
2016-08-11 18:33:52 +02:00
|
|
|
NODE_TEST_COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'node-coverage')
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2016-10-19 05:03:55 +02:00
|
|
|
# TODO: De-duplicate this with emoji_dump.py
|
|
|
|
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
|
|
|
|
if 'TRAVIS' in os.environ:
|
|
|
|
# In Travis CI, we don't have root access
|
|
|
|
EMOJI_CACHE_PATH = "/home/travis/zulip-emoji-cache"
|
|
|
|
|
2016-07-02 22:15:55 +02:00
|
|
|
if PY2:
|
|
|
|
VENV_PATH = PY2_VENV_PATH
|
|
|
|
else:
|
|
|
|
VENV_PATH = PY3_VENV_PATH
|
|
|
|
|
2016-04-06 19:09:16 +02:00
|
|
|
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
|
|
|
|
print("Error: No Zulip git repository present!")
|
2015-11-01 17:11:06 +01:00
|
|
|
print("To setup the Zulip development environment, you should clone the code")
|
|
|
|
print("from GitHub, rather than using a Zulip production release tarball.")
|
2015-10-14 01:18:49 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-12-01 00:23:05 +01:00
|
|
|
# Check the RAM on the user's system, and throw an effort if <1.5GB.
|
|
|
|
# This avoids users getting segfaults running `pip install` that are
|
|
|
|
# generally more annoying to debug.
|
|
|
|
with open("/proc/meminfo") as meminfo:
|
|
|
|
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
|
|
|
|
ram_gb = float(ram_size) / 1024.0 / 1024.0
|
|
|
|
if ram_gb < 1.5:
|
|
|
|
print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
|
|
|
|
round(ram_gb, 2),))
|
|
|
|
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-09 00:10:46 +01:00
|
|
|
try:
|
|
|
|
run(["mkdir", "-p", VAR_DIR_PATH])
|
|
|
|
if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
|
|
|
os.symlink(
|
|
|
|
os.path.join(ZULIP_PATH, 'README.md'),
|
|
|
|
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
|
|
|
|
)
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
|
|
|
except OSError as err:
|
|
|
|
print("Error: Unable to create symlinks. Make sure you have permission to create symbolic links.")
|
|
|
|
print("See this page for more information:")
|
|
|
|
print(" http://zulip.readthedocs.io/en/latest/dev-env-first-time-contributors.html#os-symlink-error")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:27:42 +02:00
|
|
|
if platform.architecture()[0] == '64bit':
|
|
|
|
arch = 'amd64'
|
|
|
|
elif platform.architecture()[0] == '32bit':
|
|
|
|
arch = "i386"
|
|
|
|
else:
|
|
|
|
logging.critical("Only x86 is supported; ping zulip-devel@googlegroups.com if you want another architecture.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 20:12:32 +02:00
|
|
|
# Ideally we wouldn't need to install a dependency here, before we
|
|
|
|
# know the codename.
|
2016-10-12 03:14:58 +02:00
|
|
|
subprocess.check_call(["sudo", "apt-get", "install", "-y", "lsb-release"])
|
2016-07-12 16:55:20 +02:00
|
|
|
vendor = subprocess_text_output(["lsb_release", "-is"])
|
|
|
|
codename = subprocess_text_output(["lsb_release", "-cs"])
|
2016-04-06 19:27:42 +02:00
|
|
|
if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
|
|
|
|
logging.critical("Unsupported platform: {} {}".format(vendor, codename))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:30:16 +02:00
|
|
|
POSTGRES_VERSION_MAP = {
|
2017-07-15 01:22:46 +02:00
|
|
|
"stretch": "9.6",
|
2016-04-06 19:30:16 +02:00
|
|
|
"trusty": "9.3",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial": "9.5",
|
2016-04-06 19:30:16 +02:00
|
|
|
}
|
|
|
|
POSTGRES_VERSION = POSTGRES_VERSION_MAP[codename]
|
|
|
|
|
2016-04-04 23:22:14 +02:00
|
|
|
UBUNTU_COMMON_APT_DEPENDENCIES = [
|
|
|
|
"closure-compiler",
|
|
|
|
"memcached",
|
|
|
|
"rabbitmq-server",
|
|
|
|
"redis-server",
|
|
|
|
"hunspell-en-us",
|
|
|
|
"supervisor",
|
|
|
|
"git",
|
2016-09-21 08:44:01 +02:00
|
|
|
"libssl-dev",
|
2016-04-04 23:22:14 +02:00
|
|
|
"yui-compressor",
|
|
|
|
"wget",
|
|
|
|
"ca-certificates", # Explicit dependency in case e.g. wget is already installed
|
2017-04-21 23:07:06 +02:00
|
|
|
"puppet", # Used by lint
|
2016-04-04 23:22:14 +02:00
|
|
|
"gettext", # Used by makemessages i18n
|
|
|
|
"curl", # Used for fetching PhantomJS as wget occasionally fails on redirects
|
|
|
|
"netcat", # Used for flushing memcached
|
2016-06-22 18:17:46 +02:00
|
|
|
] + VENV_DEPENDENCIES
|
2016-04-04 23:22:14 +02:00
|
|
|
|
|
|
|
APT_DEPENDENCIES = {
|
2017-07-15 01:22:46 +02:00
|
|
|
"stretch": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.6",
|
2017-07-15 01:24:22 +02:00
|
|
|
# tsearch-extras removed because there's no apt repository hosting it for Debian.
|
|
|
|
# "postgresql-9.6-tsearch-extras",
|
2017-07-15 01:22:46 +02:00
|
|
|
"postgresql-9.6-pgroonga",
|
|
|
|
],
|
2016-04-04 23:22:14 +02:00
|
|
|
"trusty": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.3",
|
2016-07-27 22:27:13 +02:00
|
|
|
"postgresql-9.3-tsearch-extras",
|
2016-08-26 02:25:53 +02:00
|
|
|
"postgresql-9.3-pgroonga",
|
2016-04-04 23:22:14 +02:00
|
|
|
],
|
|
|
|
"xenial": UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
"postgresql-9.5",
|
2016-07-27 22:27:13 +02:00
|
|
|
"postgresql-9.5-tsearch-extras",
|
2016-08-26 02:25:53 +02:00
|
|
|
"postgresql-9.5-pgroonga",
|
2016-04-04 23:22:14 +02:00
|
|
|
],
|
|
|
|
}
|
|
|
|
|
2016-04-06 19:30:16 +02:00
|
|
|
TSEARCH_STOPWORDS_PATH = "/usr/share/postgresql/%s/tsearch_data/" % (POSTGRES_VERSION,)
|
2015-08-17 06:37:14 +02:00
|
|
|
REPO_STOPWORDS_PATH = os.path.join(
|
|
|
|
ZULIP_PATH,
|
|
|
|
"puppet",
|
|
|
|
"zulip",
|
|
|
|
"files",
|
|
|
|
"postgresql",
|
|
|
|
"zulip_english.stop",
|
|
|
|
)
|
|
|
|
|
2015-08-21 03:06:18 +02:00
|
|
|
LOUD = dict(_out=sys.stdout, _err=sys.stderr)
|
|
|
|
|
2016-10-26 06:55:39 +02:00
|
|
|
user_id = os.getuid()
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
def setup_shell_profile(shell_profile):
|
|
|
|
# type: (str) -> None
|
|
|
|
shell_profile_path = os.path.expanduser(shell_profile)
|
|
|
|
|
2017-05-29 15:27:24 +02:00
|
|
|
def write_command(command):
|
|
|
|
# type: (str) -> None
|
|
|
|
if os.path.exists(shell_profile_path):
|
|
|
|
with open(shell_profile_path, 'a+') as shell_profile_file:
|
|
|
|
if command not in shell_profile_file.read():
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
else:
|
|
|
|
with open(shell_profile_path, 'w') as shell_profile_file:
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
|
|
|
|
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
|
|
|
|
write_command(source_activate_command)
|
|
|
|
write_command('cd /srv/zulip')
|
2017-01-16 13:00:54 +01:00
|
|
|
|
2017-06-13 19:02:39 +02:00
|
|
|
def install_apt_deps():
|
|
|
|
# type: () -> None
|
|
|
|
# setup-apt-repo does an `apt-get update`
|
|
|
|
run(["sudo", "./scripts/lib/setup-apt-repo"])
|
|
|
|
run(["sudo", "apt-get", "-y", "install", "--no-install-recommends"] + APT_DEPENDENCIES[codename])
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
def main(options):
|
|
|
|
# type: (Any) -> int
|
2016-07-27 22:27:13 +02:00
|
|
|
|
|
|
|
# npm install and management commands expect to be run from the root of the
|
|
|
|
# project.
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
|
|
|
2017-06-15 12:13:35 +02:00
|
|
|
# setup-apt-repo does an `apt-get update`
|
|
|
|
# hash the apt dependencies
|
|
|
|
sha_sum = hashlib.sha1()
|
|
|
|
|
|
|
|
for apt_depedency in APT_DEPENDENCIES[codename]:
|
|
|
|
sha_sum.update(apt_depedency.encode('utf8'))
|
|
|
|
# hash the content of setup-apt-repo
|
|
|
|
sha_sum.update(open('scripts/lib/setup-apt-repo').read().encode('utf8'))
|
|
|
|
|
|
|
|
new_apt_dependencies_hash = sha_sum.hexdigest()
|
|
|
|
last_apt_dependencies_hash = None
|
|
|
|
|
2017-06-13 19:02:39 +02:00
|
|
|
try:
|
2017-06-15 12:13:35 +02:00
|
|
|
hash_file = open('var/apt_dependenices_hash', 'r+')
|
|
|
|
last_apt_dependencies_hash = hash_file.read()
|
|
|
|
except IOError:
|
|
|
|
run(['touch', 'var/apt_dependenices_hash'])
|
|
|
|
hash_file = open('var/apt_dependenices_hash', 'r+')
|
|
|
|
|
|
|
|
if (new_apt_dependencies_hash != last_apt_dependencies_hash):
|
|
|
|
try:
|
|
|
|
install_apt_deps()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
# Might be a failure due to network connection issues. Retrying...
|
|
|
|
print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
|
|
|
|
install_apt_deps()
|
|
|
|
hash_file.write(new_apt_dependencies_hash)
|
|
|
|
else:
|
|
|
|
print("No need to apt operations.")
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2017-07-08 21:27:25 +02:00
|
|
|
# Here we install nvm, node, and npm.
|
|
|
|
run(["sudo", "scripts/lib/install-node"])
|
|
|
|
|
|
|
|
# Install NPM packages before running other scripts so that if a script
|
|
|
|
# requires any NPM package it can use it.
|
|
|
|
try:
|
|
|
|
# Hack: We remove `node_modules` as root to work around an
|
|
|
|
# issue with the symlinks being improperly owned by root.
|
|
|
|
if os.path.islink("node_modules"):
|
|
|
|
run(["sudo", "rm", "-f", "node_modules"])
|
|
|
|
if not os.path.isdir(NPM_CACHE_PATH):
|
|
|
|
run(["sudo", "mkdir", NPM_CACHE_PATH])
|
|
|
|
run(["sudo", "chown", "%s:%s" % (user_id, user_id), NPM_CACHE_PATH])
|
|
|
|
setup_node_modules()
|
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
print(WARNING + "`npm install` failed; retrying..." + ENDC)
|
|
|
|
setup_node_modules()
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
if options.is_travis:
|
2016-07-19 17:39:16 +02:00
|
|
|
if PY2:
|
|
|
|
MYPY_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "mypy.txt")
|
2016-07-20 21:42:33 +02:00
|
|
|
setup_virtualenv(PY3_VENV_PATH, MYPY_REQS_FILE, patch_activate_script=True,
|
|
|
|
virtualenv_args=['-p', 'python3'])
|
2016-07-19 17:39:16 +02:00
|
|
|
DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py2_dev.txt")
|
2016-07-20 21:42:33 +02:00
|
|
|
setup_virtualenv(PY2_VENV_PATH, DEV_REQS_FILE, patch_activate_script=True)
|
2016-07-19 17:39:16 +02:00
|
|
|
else:
|
|
|
|
DEV_REQS_FILE = os.path.join(ZULIP_PATH, "requirements", "py3_dev.txt")
|
2016-07-20 21:42:33 +02:00
|
|
|
setup_virtualenv(VENV_PATH, DEV_REQS_FILE, patch_activate_script=True,
|
|
|
|
virtualenv_args=['-p', 'python3'])
|
2016-07-19 17:39:16 +02:00
|
|
|
else:
|
2016-07-21 01:56:50 +02:00
|
|
|
# Import tools/setup_venv.py instead of running it so that we get an
|
|
|
|
# activated virtualenv for the rest of the provisioning process.
|
|
|
|
from tools.setup import setup_venvs
|
|
|
|
setup_venvs.main()
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
# Put Python2 virtualenv activation in .bash_profile.
|
|
|
|
setup_shell_profile('~/.bash_profile')
|
2015-08-20 22:40:06 +02:00
|
|
|
|
2017-01-13 21:33:02 +01:00
|
|
|
# Put Python2 virtualenv activation in .zprofile (for Zsh users).
|
2017-01-16 13:00:54 +01:00
|
|
|
setup_shell_profile('~/.zprofile')
|
2017-01-13 21:33:02 +01:00
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
|
2015-08-20 22:40:06 +02:00
|
|
|
|
2016-07-09 09:08:41 +02:00
|
|
|
# create log directory `zulip/var/log`
|
|
|
|
run(["mkdir", "-p", LOG_DIR_PATH])
|
2016-07-16 16:13:17 +02:00
|
|
|
# create upload directory `var/uploads`
|
|
|
|
run(["mkdir", "-p", UPLOAD_DIR_PATH])
|
2016-07-09 19:01:36 +02:00
|
|
|
# create test upload directory `var/test_upload`
|
|
|
|
run(["mkdir", "-p", TEST_UPLOAD_DIR_PATH])
|
2016-07-13 10:49:26 +02:00
|
|
|
# create coverage directory`var/coverage`
|
|
|
|
run(["mkdir", "-p", COVERAGE_DIR_PATH])
|
2016-07-16 16:44:41 +02:00
|
|
|
# create linecoverage directory`var/linecoverage-report`
|
|
|
|
run(["mkdir", "-p", LINECOVERAGE_DIR_PATH])
|
2016-08-11 18:33:52 +02:00
|
|
|
# create linecoverage directory`var/node-coverage`
|
|
|
|
run(["mkdir", "-p", NODE_TEST_COVERAGE_DIR_PATH])
|
2016-07-09 09:08:41 +02:00
|
|
|
|
2017-05-28 13:11:57 +02:00
|
|
|
# `build_emoji` script requires `emoji-datasource` package which we install
|
|
|
|
# via npm and hence it should be executed after we are done installing npm
|
|
|
|
# packages.
|
2017-05-24 19:35:00 +02:00
|
|
|
if not os.path.isdir(EMOJI_CACHE_PATH):
|
|
|
|
run(["sudo", "mkdir", EMOJI_CACHE_PATH])
|
|
|
|
run(["sudo", "chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
|
2016-12-28 04:58:41 +01:00
|
|
|
run(["tools/setup/emoji/build_emoji"])
|
2017-05-28 13:11:57 +02:00
|
|
|
|
2017-04-26 09:32:46 +02:00
|
|
|
run(["tools/setup/build_pygments_data.py"])
|
2016-10-05 11:13:19 +02:00
|
|
|
run(["scripts/setup/generate_secrets.py", "--development"])
|
2017-01-06 18:56:36 +01:00
|
|
|
run(["tools/update-authors-json", "--use-fixture"])
|
2016-10-21 09:45:21 +02:00
|
|
|
if options.is_travis and not options.is_production_travis:
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "service", "rabbitmq-server", "restart"])
|
|
|
|
run(["sudo", "service", "redis-server", "restart"])
|
|
|
|
run(["sudo", "service", "memcached", "restart"])
|
2016-10-21 09:45:21 +02:00
|
|
|
elif options.is_docker:
|
2016-04-06 17:15:31 +02:00
|
|
|
run(["sudo", "service", "rabbitmq-server", "restart"])
|
|
|
|
run(["sudo", "pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
|
|
|
|
run(["sudo", "pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
|
|
|
|
run(["sudo", "service", "redis-server", "restart"])
|
|
|
|
run(["sudo", "service", "memcached", "restart"])
|
2016-10-21 09:45:21 +02:00
|
|
|
if not options.is_production_travis:
|
2017-07-07 02:39:49 +02:00
|
|
|
# The following block is skipped for the production Travis
|
|
|
|
# suite, because that suite doesn't make use of these elements
|
|
|
|
# of the development environment (it just uses the development
|
|
|
|
# environment to build a release tarball).
|
|
|
|
|
2017-06-21 21:26:13 +02:00
|
|
|
# Need to set up Django before using is_template_database_current
|
2016-10-19 12:34:55 +02:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
|
|
|
|
import django
|
|
|
|
django.setup()
|
2017-06-21 21:26:13 +02:00
|
|
|
|
2017-06-14 11:32:34 +02:00
|
|
|
from zerver.lib.str_utils import force_bytes
|
2016-10-19 12:34:55 +02:00
|
|
|
from zerver.lib.test_fixtures import is_template_database_current
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-21 21:26:13 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.queue import SimpleQueueClient
|
|
|
|
SimpleQueueClient()
|
|
|
|
rabbitmq_is_configured = True
|
|
|
|
except Exception:
|
|
|
|
rabbitmq_is_configured = False
|
|
|
|
|
|
|
|
if options.is_force or not rabbitmq_is_configured:
|
|
|
|
run(["scripts/setup/configure-rabbitmq"])
|
|
|
|
else:
|
|
|
|
print("RabbitMQ is already configured.")
|
|
|
|
|
2017-06-02 21:50:24 +02:00
|
|
|
if options.is_force or not is_template_database_current(
|
|
|
|
migration_status="var/migration_status_dev",
|
|
|
|
settings="zproject.settings",
|
|
|
|
database_name="zulip",
|
|
|
|
):
|
|
|
|
run(["tools/setup/postgres-init-dev-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-database"])
|
|
|
|
else:
|
|
|
|
print("No need to regenerate the dev DB.")
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
if options.is_force or not is_template_database_current():
|
2016-10-19 12:34:55 +02:00
|
|
|
run(["tools/setup/postgres-init-test-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-test-database"])
|
|
|
|
else:
|
2016-10-19 13:13:16 +02:00
|
|
|
print("No need to regenerate the test DB.")
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-14 11:32:34 +02:00
|
|
|
# Consider updating generated translations data: both `.mo`
|
|
|
|
# files and `language-options.json`.
|
|
|
|
sha1sum = hashlib.sha1()
|
|
|
|
paths = ['zerver/management/commands/compilemessages.py']
|
|
|
|
paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
|
|
|
|
paths += glob.glob('static/locale/*/translations.json')
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
with open(path, 'r') as file_to_hash:
|
|
|
|
sha1sum.update(force_bytes(file_to_hash.read()))
|
|
|
|
|
|
|
|
new_compilemessages_hash = sha1sum.hexdigest()
|
|
|
|
run(['touch', 'var/last_compilemessages_hash'])
|
|
|
|
with open('var/last_compilemessages_hash', 'r') as hash_file:
|
|
|
|
last_compilemessages_hash = hash_file.read()
|
|
|
|
|
|
|
|
if options.is_force or (new_compilemessages_hash != last_compilemessages_hash):
|
|
|
|
with open('var/last_compilemessages_hash', 'w') as hash_file:
|
|
|
|
hash_file.write(new_compilemessages_hash)
|
|
|
|
run(["./manage.py", "compilemessages"])
|
|
|
|
else:
|
|
|
|
print("No need to run `manage.py compilemessages`.")
|
2016-09-21 08:44:01 +02:00
|
|
|
|
2016-10-14 00:53:01 +02:00
|
|
|
version_file = os.path.join(ZULIP_PATH, 'var/provision_version')
|
|
|
|
print('writing to %s\n' % (version_file,))
|
|
|
|
open(version_file, 'w').write(PROVISION_VERSION + '\n')
|
|
|
|
|
2016-07-29 00:30:47 +02:00
|
|
|
print()
|
2016-08-11 20:43:30 +02:00
|
|
|
print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
|
2016-01-26 02:59:30 +01:00
|
|
|
return 0
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-20 02:46:50 +02:00
|
|
|
if __name__ == "__main__":
|
2016-10-21 09:45:21 +02:00
|
|
|
description = ("Provision script to install Zulip")
|
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument('--force', action='store_true', dest='is_force',
|
|
|
|
default=False,
|
|
|
|
help="Ignore all provisioning optimizations.")
|
|
|
|
|
|
|
|
parser.add_argument('--travis', action='store_true', dest='is_travis',
|
|
|
|
default=False,
|
|
|
|
help="Provision for Travis but without production settings.")
|
|
|
|
|
|
|
|
parser.add_argument('--production-travis', action='store_true',
|
|
|
|
dest='is_production_travis',
|
|
|
|
default=False,
|
|
|
|
help="Provision for Travis but with production settings.")
|
|
|
|
|
|
|
|
parser.add_argument('--docker', action='store_true',
|
|
|
|
dest='is_docker',
|
|
|
|
default=False,
|
|
|
|
help="Provision for Docker.")
|
|
|
|
|
|
|
|
options = parser.parse_args()
|
|
|
|
sys.exit(main(options))
|