py3: Switch almost all shebang lines to use `python3`.
This causes `upgrade-zulip-from-git`, as well as a no-option run of
`tools/build-release-tarball`, to produce a Zulip install running
Python 3, rather than Python 2. In particular this means that the
virtualenv we create, in which all application code runs, is Python 3.
One shebang line, on `zulip-ec2-configure-interfaces`, explicitly
keeps Python 2, and at least one external ops script, `wal-e`, also
still runs on Python 2. See discussion on the respective previous
commits that made those explicit. There may also be some other
third-party scripts we use, outside of this source tree and running
outside our virtualenv, that still run on Python 2.
2017-08-02 23:15:16 +02:00
|
|
|
#!/usr/bin/env python3
|
2015-08-17 06:37:14 +02:00
|
|
|
import os
|
2015-08-20 02:46:50 +02:00
|
|
|
import sys
|
2015-08-19 04:18:08 +02:00
|
|
|
import logging
|
2016-10-21 09:45:21 +02:00
|
|
|
import argparse
|
2015-08-19 04:18:08 +02:00
|
|
|
import platform
|
2016-04-06 20:12:32 +02:00
|
|
|
import subprocess
|
2017-06-14 11:32:34 +02:00
|
|
|
import glob
|
2017-06-15 12:13:35 +02:00
|
|
|
import hashlib
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
os.environ["PYTHONUNBUFFERED"] = "y"
|
|
|
|
|
2017-01-14 11:19:26 +01:00
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2016-06-27 23:50:38 +02:00
|
|
|
|
|
|
|
sys.path.append(ZULIP_PATH)
|
2019-02-26 20:20:46 +01:00
|
|
|
from scripts.lib.zulip_tools import run, run_as_root, OKBLUE, ENDC, WARNING, \
|
2019-03-06 00:34:02 +01:00
|
|
|
get_dev_uuid_var_path, FAIL, parse_lsb_release, file_or_package_hash_updated, \
|
|
|
|
overwrite_symlink
|
2017-11-09 16:31:57 +01:00
|
|
|
from scripts.lib.setup_venv import (
|
2019-02-02 23:53:29 +01:00
|
|
|
VENV_DEPENDENCIES, REDHAT_VENV_DEPENDENCIES,
|
2018-12-19 02:45:32 +01:00
|
|
|
THUMBOR_VENV_DEPENDENCIES, YUM_THUMBOR_VENV_DEPENDENCIES,
|
|
|
|
FEDORA_VENV_DEPENDENCIES
|
2017-11-09 16:31:57 +01:00
|
|
|
)
|
2017-07-22 00:51:36 +02:00
|
|
|
from scripts.lib.node_cache import setup_node_modules, NODE_MODULES_CACHE_PATH
|
2016-08-18 13:53:16 +02:00
|
|
|
|
2016-10-14 00:53:01 +02:00
|
|
|
from version import PROVISION_VERSION
|
2016-10-21 09:45:21 +02:00
|
|
|
if False:
|
2018-12-17 19:52:08 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/testing/mypy.html#mypy-in-production-scripts
|
2018-12-16 22:32:01 +01:00
|
|
|
from typing import Any, List
|
2016-10-14 00:53:01 +02:00
|
|
|
|
2018-12-09 16:18:49 +01:00
|
|
|
from tools.setup.generate_zulip_bots_static_files import generate_zulip_bots_static_files
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-19 04:18:08 +02:00
|
|
|
SUPPORTED_PLATFORMS = {
|
|
|
|
"Ubuntu": [
|
|
|
|
"trusty",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial",
|
2018-05-21 06:21:11 +02:00
|
|
|
"bionic",
|
2019-03-09 18:54:40 +01:00
|
|
|
"cosmic",
|
2018-05-05 20:00:47 +02:00
|
|
|
],
|
|
|
|
"Debian": [
|
|
|
|
"stretch",
|
2019-04-26 00:07:24 +02:00
|
|
|
"buster",
|
2015-08-19 04:18:08 +02:00
|
|
|
],
|
2018-12-10 15:21:46 +01:00
|
|
|
"CentOS": [
|
|
|
|
"centos7",
|
2018-12-17 19:41:03 +01:00
|
|
|
],
|
|
|
|
"Fedora": [
|
|
|
|
"fedora29",
|
2018-12-18 15:40:05 +01:00
|
|
|
],
|
|
|
|
"RedHat": [
|
|
|
|
"rhel7",
|
2018-12-10 15:21:46 +01:00
|
|
|
]
|
2015-08-19 04:18:08 +02:00
|
|
|
}
|
|
|
|
|
2017-08-09 00:38:30 +02:00
|
|
|
VENV_PATH = "/srv/zulip-py3-venv"
|
2016-07-09 09:08:41 +02:00
|
|
|
VAR_DIR_PATH = os.path.join(ZULIP_PATH, 'var')
|
|
|
|
LOG_DIR_PATH = os.path.join(VAR_DIR_PATH, 'log')
|
2016-07-16 16:13:17 +02:00
|
|
|
UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'uploads')
|
2016-07-09 19:01:36 +02:00
|
|
|
TEST_UPLOAD_DIR_PATH = os.path.join(VAR_DIR_PATH, 'test_uploads')
|
2016-07-13 10:49:26 +02:00
|
|
|
COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'coverage')
|
2016-08-11 18:33:52 +02:00
|
|
|
NODE_TEST_COVERAGE_DIR_PATH = os.path.join(VAR_DIR_PATH, 'node-coverage')
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2017-12-02 00:47:56 +01:00
|
|
|
is_travis = 'TRAVIS' in os.environ
|
|
|
|
is_circleci = 'CIRCLECI' in os.environ
|
|
|
|
|
2016-10-19 05:03:55 +02:00
|
|
|
# TODO: De-duplicate this with emoji_dump.py
|
|
|
|
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
|
2017-10-06 08:12:16 +02:00
|
|
|
if is_travis:
|
2016-10-19 05:03:55 +02:00
|
|
|
# In Travis CI, we don't have root access
|
|
|
|
EMOJI_CACHE_PATH = "/home/travis/zulip-emoji-cache"
|
|
|
|
|
2016-04-06 19:09:16 +02:00
|
|
|
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
|
2018-01-25 13:04:58 +01:00
|
|
|
print(FAIL + "Error: No Zulip git repository present!" + ENDC)
|
2015-11-01 17:11:06 +01:00
|
|
|
print("To setup the Zulip development environment, you should clone the code")
|
|
|
|
print("from GitHub, rather than using a Zulip production release tarball.")
|
2015-10-14 01:18:49 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-12-01 00:23:05 +01:00
|
|
|
# Check the RAM on the user's system, and throw an effort if <1.5GB.
|
|
|
|
# This avoids users getting segfaults running `pip install` that are
|
|
|
|
# generally more annoying to debug.
|
|
|
|
with open("/proc/meminfo") as meminfo:
|
|
|
|
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
|
|
|
|
ram_gb = float(ram_size) / 1024.0 / 1024.0
|
|
|
|
if ram_gb < 1.5:
|
|
|
|
print("You have insufficient RAM (%s GB) to run the Zulip development environment." % (
|
|
|
|
round(ram_gb, 2),))
|
|
|
|
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-11-09 00:10:46 +01:00
|
|
|
try:
|
2017-10-18 04:14:06 +02:00
|
|
|
UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(UUID_VAR_PATH, exist_ok=True)
|
2016-11-09 00:10:46 +01:00
|
|
|
if os.path.exists(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')):
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
|
|
|
os.symlink(
|
|
|
|
os.path.join(ZULIP_PATH, 'README.md'),
|
|
|
|
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')
|
|
|
|
)
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
|
2018-05-24 16:41:34 +02:00
|
|
|
except OSError:
|
2018-01-25 13:04:58 +01:00
|
|
|
print(FAIL + "Error: Unable to create symlinks."
|
|
|
|
"Make sure you have permission to create symbolic links." + ENDC)
|
2016-11-09 00:10:46 +01:00
|
|
|
print("See this page for more information:")
|
2017-11-16 19:54:24 +01:00
|
|
|
print(" https://zulip.readthedocs.io/en/latest/development/setup-vagrant.html#os-symlink-error")
|
2016-11-09 00:10:46 +01:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:27:42 +02:00
|
|
|
if platform.architecture()[0] == '64bit':
|
|
|
|
arch = 'amd64'
|
|
|
|
elif platform.architecture()[0] == '32bit':
|
|
|
|
arch = "i386"
|
|
|
|
else:
|
2017-11-08 03:47:28 +01:00
|
|
|
logging.critical("Only x86 is supported;"
|
2019-05-21 00:23:40 +02:00
|
|
|
" ask on chat.zulip.org if you want another architecture.")
|
|
|
|
# Note: It's probably actually not hard to add additional
|
|
|
|
# architectures; the main problem is that we may not have
|
|
|
|
# tsearch_extras binaries compiled (and some testing is required).
|
2016-04-06 19:27:42 +02:00
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 20:12:32 +02:00
|
|
|
# Ideally we wouldn't need to install a dependency here, before we
|
|
|
|
# know the codename.
|
2018-12-10 15:15:05 +01:00
|
|
|
is_rhel_based = os.path.exists("/etc/redhat-release")
|
|
|
|
if (not is_rhel_based) and (not os.path.exists("/usr/bin/lsb_release")):
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["apt-get", "install", "-y", "lsb-release"])
|
2018-05-20 23:15:40 +02:00
|
|
|
|
2018-05-28 21:55:07 +02:00
|
|
|
distro_info = parse_lsb_release()
|
|
|
|
vendor = distro_info['DISTRIB_ID']
|
|
|
|
codename = distro_info['DISTRIB_CODENAME']
|
2019-01-06 02:28:02 +01:00
|
|
|
family = distro_info['DISTRIB_FAMILY']
|
2016-04-06 19:27:42 +02:00
|
|
|
if not (vendor in SUPPORTED_PLATFORMS and codename in SUPPORTED_PLATFORMS[vendor]):
|
|
|
|
logging.critical("Unsupported platform: {} {}".format(vendor, codename))
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-04-06 19:30:16 +02:00
|
|
|
POSTGRES_VERSION_MAP = {
|
2017-07-15 01:22:46 +02:00
|
|
|
"stretch": "9.6",
|
2019-04-26 00:07:24 +02:00
|
|
|
"buster": "11",
|
2016-04-06 19:30:16 +02:00
|
|
|
"trusty": "9.3",
|
2016-04-04 23:22:14 +02:00
|
|
|
"xenial": "9.5",
|
2018-05-05 19:31:45 +02:00
|
|
|
"bionic": "10",
|
2019-03-09 18:54:40 +01:00
|
|
|
"cosmic": "10",
|
2018-12-10 14:45:56 +01:00
|
|
|
"centos7": "10",
|
2018-12-17 19:41:03 +01:00
|
|
|
"fedora29": "10",
|
2018-12-18 15:40:05 +01:00
|
|
|
"rhel7": "10",
|
2016-04-06 19:30:16 +02:00
|
|
|
}
|
|
|
|
POSTGRES_VERSION = POSTGRES_VERSION_MAP[codename]
|
|
|
|
|
2018-12-08 22:53:46 +01:00
|
|
|
COMMON_DEPENDENCIES = [
|
2016-04-04 23:22:14 +02:00
|
|
|
"closure-compiler",
|
|
|
|
"memcached",
|
|
|
|
"rabbitmq-server",
|
|
|
|
"supervisor",
|
|
|
|
"git",
|
|
|
|
"wget",
|
|
|
|
"ca-certificates", # Explicit dependency in case e.g. wget is already installed
|
2018-12-06 20:12:52 +01:00
|
|
|
"puppet", # Used by lint (`puppet parser validate`)
|
2016-04-04 23:22:14 +02:00
|
|
|
"gettext", # Used by makemessages i18n
|
|
|
|
"curl", # Used for fetching PhantomJS as wget occasionally fails on redirects
|
2017-09-22 23:04:57 +02:00
|
|
|
"moreutils", # Used for sponge command
|
2018-12-08 22:53:46 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
UBUNTU_COMMON_APT_DEPENDENCIES = COMMON_DEPENDENCIES + [
|
|
|
|
"redis-server",
|
|
|
|
"hunspell-en-us",
|
|
|
|
"yui-compressor",
|
|
|
|
"puppet-lint",
|
|
|
|
"netcat", # Used for flushing memcached
|
2018-05-23 03:44:29 +02:00
|
|
|
"libfontconfig1", # Required by phantomjs
|
2017-11-09 16:31:57 +01:00
|
|
|
] + VENV_DEPENDENCIES + THUMBOR_VENV_DEPENDENCIES
|
2016-04-04 23:22:14 +02:00
|
|
|
|
2018-12-08 22:53:46 +01:00
|
|
|
COMMON_YUM_DEPENDENCIES = COMMON_DEPENDENCIES + [
|
|
|
|
"redis",
|
|
|
|
"hunspell-en-US",
|
|
|
|
"yuicompressor",
|
|
|
|
"rubygem-puppet-lint",
|
|
|
|
"nmap-ncat",
|
|
|
|
"fontconfig", # phantomjs dependencies from here until libstdc++
|
|
|
|
"freetype",
|
|
|
|
"freetype-devel",
|
|
|
|
"fontconfig-devel",
|
|
|
|
"libstdc++"
|
2018-12-19 02:45:32 +01:00
|
|
|
] + YUM_THUMBOR_VENV_DEPENDENCIES
|
2018-12-08 22:53:46 +01:00
|
|
|
|
2019-03-06 00:27:45 +01:00
|
|
|
BUILD_TSEARCH_FROM_SOURCE = False
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = False
|
2018-12-17 20:14:09 +01:00
|
|
|
if vendor in ["Ubuntu", "Debian"]:
|
2019-03-06 00:27:45 +01:00
|
|
|
if codename == "cosmic":
|
2019-03-09 18:54:40 +01:00
|
|
|
# For platforms without a tsearch-extras package distributed
|
|
|
|
# from our PPA, we need to build from source.
|
2019-03-06 00:27:45 +01:00
|
|
|
BUILD_TSEARCH_FROM_SOURCE = True
|
|
|
|
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
pkg.format(POSTGRES_VERSION) for pkg in [
|
|
|
|
"postgresql-{0}",
|
2019-03-09 18:54:40 +01:00
|
|
|
"postgresql-{0}-pgroonga",
|
|
|
|
# Dependency for building tsearch_extras from source
|
2019-03-06 00:27:45 +01:00
|
|
|
"postgresql-server-dev-{0}",
|
|
|
|
]
|
|
|
|
]
|
2019-04-26 00:07:24 +02:00
|
|
|
elif codename == "buster":
|
|
|
|
# For platforms without a tsearch-extras package distributed
|
|
|
|
# from our PPA or a pgroonga release, we need to build both
|
|
|
|
# from source.
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = True
|
|
|
|
BUILD_TSEARCH_FROM_SOURCE = True
|
|
|
|
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
pkg.format(POSTGRES_VERSION) for pkg in [
|
|
|
|
"postgresql-{0}",
|
|
|
|
# Dependency for building tsearch_extras from source
|
|
|
|
"postgresql-server-dev-{0}",
|
|
|
|
# Dependency for building pgroonga from source
|
|
|
|
"libgroonga-dev",
|
|
|
|
"libmsgpack-dev",
|
|
|
|
]
|
|
|
|
]
|
2019-03-06 00:27:45 +01:00
|
|
|
else:
|
|
|
|
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
|
|
|
|
pkg.format(POSTGRES_VERSION) for pkg in [
|
|
|
|
"postgresql-{0}",
|
|
|
|
"postgresql-{0}-pgroonga",
|
2019-03-09 18:54:40 +01:00
|
|
|
"postgresql-{0}-tsearch-extras",
|
2019-03-06 00:27:45 +01:00
|
|
|
]
|
2018-12-17 20:14:09 +01:00
|
|
|
]
|
2018-12-18 15:40:05 +01:00
|
|
|
elif vendor in ["CentOS", "RedHat"]:
|
2018-12-17 20:14:09 +01:00
|
|
|
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
|
2018-12-16 20:30:06 +01:00
|
|
|
pkg.format(POSTGRES_VERSION) for pkg in [
|
|
|
|
"postgresql{0}-server",
|
|
|
|
"postgresql{0}",
|
|
|
|
"postgresql{0}-devel",
|
|
|
|
"postgresql{0}-pgroonga",
|
|
|
|
]
|
2018-12-19 02:45:32 +01:00
|
|
|
] + REDHAT_VENV_DEPENDENCIES
|
2019-03-06 00:27:45 +01:00
|
|
|
BUILD_TSEARCH_FROM_SOURCE = True
|
2018-12-17 20:14:09 +01:00
|
|
|
elif vendor == "Fedora":
|
|
|
|
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
|
2018-12-17 19:41:03 +01:00
|
|
|
pkg.format(POSTGRES_VERSION) for pkg in [
|
|
|
|
"postgresql{0}-server",
|
|
|
|
"postgresql{0}",
|
|
|
|
"postgresql{0}-devel",
|
2019-03-06 00:42:17 +01:00
|
|
|
# Needed to build pgroonga from source
|
|
|
|
"groonga-devel",
|
|
|
|
"msgpack-devel",
|
2018-12-17 19:41:03 +01:00
|
|
|
]
|
2018-12-19 02:45:32 +01:00
|
|
|
] + FEDORA_VENV_DEPENDENCIES
|
2019-03-06 00:27:45 +01:00
|
|
|
BUILD_TSEARCH_FROM_SOURCE = True
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = True
|
2016-04-04 23:22:14 +02:00
|
|
|
|
2018-12-17 19:41:03 +01:00
|
|
|
if family == 'redhat':
|
2018-12-16 20:30:06 +01:00
|
|
|
TSEARCH_STOPWORDS_PATH = "/usr/pgsql-%s/share/tsearch_data/" % (POSTGRES_VERSION,)
|
2018-12-15 04:24:14 +01:00
|
|
|
else:
|
|
|
|
TSEARCH_STOPWORDS_PATH = "/usr/share/postgresql/%s/tsearch_data/" % (POSTGRES_VERSION,)
|
2015-08-17 06:37:14 +02:00
|
|
|
REPO_STOPWORDS_PATH = os.path.join(
|
|
|
|
ZULIP_PATH,
|
|
|
|
"puppet",
|
|
|
|
"zulip",
|
|
|
|
"files",
|
|
|
|
"postgresql",
|
|
|
|
"zulip_english.stop",
|
|
|
|
)
|
|
|
|
|
2016-10-26 06:55:39 +02:00
|
|
|
user_id = os.getuid()
|
2016-06-14 10:33:23 +02:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
def setup_shell_profile(shell_profile):
|
|
|
|
# type: (str) -> None
|
|
|
|
shell_profile_path = os.path.expanduser(shell_profile)
|
|
|
|
|
2017-05-29 15:27:24 +02:00
|
|
|
def write_command(command):
|
|
|
|
# type: (str) -> None
|
|
|
|
if os.path.exists(shell_profile_path):
|
2017-10-18 06:22:32 +02:00
|
|
|
with open(shell_profile_path, 'r') as shell_profile_file:
|
|
|
|
lines = [line.strip() for line in shell_profile_file.readlines()]
|
|
|
|
if command not in lines:
|
|
|
|
with open(shell_profile_path, 'a+') as shell_profile_file:
|
2017-05-29 15:27:24 +02:00
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
else:
|
|
|
|
with open(shell_profile_path, 'w') as shell_profile_file:
|
|
|
|
shell_profile_file.writelines(command + '\n')
|
|
|
|
|
|
|
|
source_activate_command = "source " + os.path.join(VENV_PATH, "bin", "activate")
|
|
|
|
write_command(source_activate_command)
|
|
|
|
write_command('cd /srv/zulip')
|
2017-01-16 13:00:54 +01:00
|
|
|
|
2018-12-16 22:32:01 +01:00
|
|
|
def install_system_deps(retry=False):
|
|
|
|
# type: (bool) -> None
|
|
|
|
|
|
|
|
# By doing list -> set -> list conversion, we remove duplicates.
|
2018-12-17 20:14:09 +01:00
|
|
|
deps_to_install = list(set(SYSTEM_DEPENDENCIES))
|
2018-12-16 22:32:01 +01:00
|
|
|
|
2018-12-17 19:41:03 +01:00
|
|
|
if family == 'redhat':
|
2018-12-16 22:32:01 +01:00
|
|
|
install_yum_deps(deps_to_install, retry=retry)
|
2019-03-06 00:27:45 +01:00
|
|
|
elif vendor in ["Debian", "Ubuntu"]:
|
2018-12-16 22:32:01 +01:00
|
|
|
install_apt_deps(deps_to_install, retry=retry)
|
2019-03-06 00:27:45 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Invalid vendor")
|
2018-12-16 22:32:01 +01:00
|
|
|
|
2019-03-06 00:27:45 +01:00
|
|
|
# For some platforms, there aren't published pgroonga or
|
|
|
|
# tsearch-extra packages available, so we build them from source.
|
|
|
|
if BUILD_PGROONGA_FROM_SOURCE:
|
|
|
|
run_as_root(["./scripts/lib/build-pgroonga"])
|
|
|
|
if BUILD_TSEARCH_FROM_SOURCE:
|
|
|
|
run_as_root(["./scripts/lib/build-tsearch-extras"])
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
|
|
def install_apt_deps(deps_to_install, retry=False):
|
|
|
|
# type: (List[str], bool) -> None
|
|
|
|
if retry:
|
|
|
|
print(WARNING + "`apt-get -y install` failed while installing dependencies; retrying..." + ENDC)
|
|
|
|
# Since a common failure mode is for the caching in
|
|
|
|
# `setup-apt-repo` to optimize the fast code path to skip
|
|
|
|
# running `apt-get update` when the target apt repository
|
|
|
|
# is out of date, we run it explicitly here so that we
|
|
|
|
# recover automatically.
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(['apt-get', 'update'])
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
|
|
# setup-apt-repo does an `apt-get update`
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["./scripts/lib/setup-apt-repo"])
|
2019-05-20 06:07:25 +02:00
|
|
|
run_as_root(
|
|
|
|
[
|
|
|
|
"env", "DEBIAN_FRONTEND=noninteractive",
|
|
|
|
"apt-get", "-y", "install", "--no-install-recommends",
|
|
|
|
]
|
|
|
|
+ deps_to_install
|
|
|
|
)
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
|
|
def install_yum_deps(deps_to_install, retry=False):
|
|
|
|
# type: (List[str], bool) -> None
|
2018-12-17 19:41:03 +01:00
|
|
|
print(WARNING + "RedHat support is still experimental.")
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["./scripts/lib/setup-yum-repo"])
|
2018-12-18 15:40:05 +01:00
|
|
|
|
|
|
|
# Hack specific to unregistered RHEL system. The moreutils
|
|
|
|
# package requires a perl module package, which isn't available in
|
|
|
|
# the unregistered RHEL repositories.
|
|
|
|
#
|
|
|
|
# Error: Package: moreutils-0.49-2.el7.x86_64 (epel)
|
|
|
|
# Requires: perl(IPC::Run)
|
|
|
|
yum_extra_flags = [] # type: List[str]
|
|
|
|
if vendor == 'RedHat':
|
|
|
|
exitcode, subs_status = subprocess.getstatusoutput("sudo subscription-manager status")
|
|
|
|
if exitcode == 1:
|
|
|
|
# TODO this might overkill since `subscription-manager` is already
|
|
|
|
# called in setup-yum-repo
|
|
|
|
if 'Status' in subs_status:
|
|
|
|
# The output is well-formed
|
|
|
|
yum_extra_flags = ["--skip-broken"]
|
|
|
|
else:
|
|
|
|
print("Unrecognized output. `subscription-manager` might not be available")
|
|
|
|
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["yum", "install", "-y"] + yum_extra_flags + deps_to_install)
|
2018-12-17 22:04:18 +01:00
|
|
|
if vendor in ["CentOS", "RedHat"]:
|
|
|
|
# This is how a pip3 is installed to /usr/bin in CentOS/RHEL
|
|
|
|
# for python35 and later.
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["python36", "-m", "ensurepip"])
|
2019-01-04 21:29:52 +01:00
|
|
|
# `python36` is not aliased to `python3` by default
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["ln", "-nsf", "/usr/bin/python36", "/usr/bin/python3"])
|
2018-12-16 20:43:27 +01:00
|
|
|
postgres_dir = 'pgsql-%s' % (POSTGRES_VERSION,)
|
|
|
|
for cmd in ['pg_config', 'pg_isready', 'psql']:
|
|
|
|
# Our tooling expects these postgres scripts to be at
|
|
|
|
# well-known paths. There's an argument for eventually
|
|
|
|
# making our tooling auto-detect, but this is simpler.
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["ln", "-nsf", "/usr/%s/bin/%s" % (postgres_dir, cmd),
|
|
|
|
"/usr/bin/%s" % (cmd,)])
|
2018-12-17 21:30:55 +01:00
|
|
|
|
|
|
|
# From here, we do the first-time setup/initialization for the postgres database.
|
|
|
|
pg_datadir = "/var/lib/pgsql/%s/data" % (POSTGRES_VERSION,)
|
2018-12-18 16:17:11 +01:00
|
|
|
pg_hba_conf = os.path.join(pg_datadir, "pg_hba.conf")
|
2018-12-17 21:30:55 +01:00
|
|
|
|
|
|
|
# We can't just check if the file exists with os.path, since the
|
|
|
|
# current user likely doesn't have permission to read the
|
|
|
|
# pg_datadir directory.
|
2018-12-18 16:17:11 +01:00
|
|
|
if subprocess.call(["sudo", "test", "-e", pg_hba_conf]) == 0:
|
2018-12-17 21:30:55 +01:00
|
|
|
# Skip setup if it has been applied previously
|
|
|
|
return
|
|
|
|
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["/usr/%s/bin/postgresql-%s-setup" % (postgres_dir, POSTGRES_VERSION), "initdb"],
|
|
|
|
sudo_args = ['-H'])
|
2018-12-17 21:30:55 +01:00
|
|
|
# Use vendored pg_hba.conf, which enables password authentication.
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["cp", "-a", "puppet/zulip/files/postgresql/centos_pg_hba.conf", pg_hba_conf])
|
2018-12-17 21:30:55 +01:00
|
|
|
# Later steps will ensure postgres is started
|
2018-12-16 20:43:27 +01:00
|
|
|
|
2019-03-06 00:34:02 +01:00
|
|
|
# Link in tsearch data files
|
|
|
|
overwrite_symlink("/usr/share/myspell/en_US.dic", "/usr/pgsql-%s/share/tsearch_data/en_us.dict"
|
|
|
|
% (POSTGRES_VERSION,))
|
|
|
|
overwrite_symlink("/usr/share/myspell/en_US.aff", "/usr/pgsql-%s/share/tsearch_data/en_us.affix"
|
|
|
|
% (POSTGRES_VERSION,))
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
def main(options):
|
|
|
|
# type: (Any) -> int
|
2016-07-27 22:27:13 +02:00
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# yarn and management commands expect to be run from the root of the
|
2016-07-27 22:27:13 +02:00
|
|
|
# project.
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
|
|
|
2017-06-15 12:13:35 +02:00
|
|
|
# hash the apt dependencies
|
|
|
|
sha_sum = hashlib.sha1()
|
|
|
|
|
2018-12-17 20:14:09 +01:00
|
|
|
for apt_depedency in SYSTEM_DEPENDENCIES:
|
2017-06-15 12:13:35 +02:00
|
|
|
sha_sum.update(apt_depedency.encode('utf8'))
|
2018-12-17 20:20:02 +01:00
|
|
|
if vendor in ["Ubuntu", "Debian"]:
|
|
|
|
sha_sum.update(open('scripts/lib/setup-apt-repo', 'rb').read())
|
|
|
|
else:
|
|
|
|
# hash the content of setup-yum-repo and build-*
|
|
|
|
sha_sum.update(open('scripts/lib/setup-yum-repo', 'rb').read())
|
|
|
|
build_paths = glob.glob("scripts/lib/build-")
|
|
|
|
for bp in build_paths:
|
|
|
|
sha_sum.update(open(bp, 'rb').read())
|
2017-06-15 12:13:35 +02:00
|
|
|
|
|
|
|
new_apt_dependencies_hash = sha_sum.hexdigest()
|
|
|
|
last_apt_dependencies_hash = None
|
2017-10-18 04:14:06 +02:00
|
|
|
apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
|
2018-07-18 23:50:16 +02:00
|
|
|
with open(apt_hash_file_path, 'a+') as hash_file:
|
|
|
|
hash_file.seek(0)
|
|
|
|
last_apt_dependencies_hash = hash_file.read()
|
2017-06-15 12:13:35 +02:00
|
|
|
|
|
|
|
if (new_apt_dependencies_hash != last_apt_dependencies_hash):
|
|
|
|
try:
|
2018-12-16 20:45:29 +01:00
|
|
|
install_system_deps()
|
2017-06-15 12:13:35 +02:00
|
|
|
except subprocess.CalledProcessError:
|
|
|
|
# Might be a failure due to network connection issues. Retrying...
|
2018-12-16 22:32:01 +01:00
|
|
|
install_system_deps(retry=True)
|
2018-06-08 18:51:24 +02:00
|
|
|
with open(apt_hash_file_path, 'w') as hash_file:
|
|
|
|
hash_file.write(new_apt_dependencies_hash)
|
2017-06-15 12:13:35 +02:00
|
|
|
else:
|
2017-09-25 23:47:18 +02:00
|
|
|
print("No changes to apt dependencies, so skipping apt operations.")
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# Here we install node.
|
2018-09-12 21:04:29 +02:00
|
|
|
proxy_env = [
|
|
|
|
"env",
|
|
|
|
"http_proxy=" + os.environ.get("http_proxy", ""),
|
|
|
|
"https_proxy=" + os.environ.get("https_proxy", ""),
|
|
|
|
"no_proxy=" + os.environ.get("no_proxy", ""),
|
|
|
|
]
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args = ['-H'])
|
2017-07-08 21:27:25 +02:00
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
# This is a wrapper around `yarn`, which we run last since
|
|
|
|
# it can often fail due to network issues beyond our control.
|
2017-07-08 21:27:25 +02:00
|
|
|
try:
|
|
|
|
# Hack: We remove `node_modules` as root to work around an
|
|
|
|
# issue with the symlinks being improperly owned by root.
|
|
|
|
if os.path.islink("node_modules"):
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["rm", "-f", "node_modules"])
|
|
|
|
run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
|
|
|
|
run_as_root(["chown", "%s:%s" % (user_id, user_id), NODE_MODULES_CACHE_PATH])
|
2017-07-27 23:22:52 +02:00
|
|
|
setup_node_modules(prefer_offline=True)
|
2017-07-08 21:27:25 +02:00
|
|
|
except subprocess.CalledProcessError:
|
2017-07-27 23:22:52 +02:00
|
|
|
print(WARNING + "`yarn install` failed; retrying..." + ENDC)
|
2017-07-08 21:27:25 +02:00
|
|
|
setup_node_modules()
|
|
|
|
|
2018-08-03 02:09:42 +02:00
|
|
|
# Install shellcheck.
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["scripts/lib/install-shellcheck"])
|
2018-08-03 02:09:42 +02:00
|
|
|
|
2017-06-17 15:14:59 +02:00
|
|
|
from tools.setup import setup_venvs
|
2017-12-19 06:49:51 +01:00
|
|
|
setup_venvs.main()
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2019-02-01 02:05:44 +01:00
|
|
|
activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
|
scripts: Fix exec invocation for in-process virtualenv activation.
activate_this.py has always documented that it should be exec()ed with
locals = globals, and in virtualenv 16.0.0 it raises a NameError
otherwise.
As a simplified demonstration of the weird things that can go wrong
when locals ≠ globals:
>>> exec('a = 1; print([a])', {}, {})
[1]
>>> exec('a = 1; print([a for b in [1]])', {}, {})
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<string>", line 1, in <module>
File "<string>", line 1, in <listcomp>
NameError: name 'a' is not defined
>>> exec('a = 1; print([a for b in [1]])', {})
[1]
Top-level assignments go into locals, but from inside a new scope like
a list comprehension, they’re read out of globals, which doesn’t work.
Fixes #12030.
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2019-04-17 01:17:57 +02:00
|
|
|
exec(open(activate_this).read(), dict(__file__=activate_this))
|
2019-02-01 02:05:44 +01:00
|
|
|
|
2017-01-16 13:00:54 +01:00
|
|
|
setup_shell_profile('~/.bash_profile')
|
|
|
|
setup_shell_profile('~/.zprofile')
|
2017-01-13 21:33:02 +01:00
|
|
|
|
2019-02-08 02:27:02 +01:00
|
|
|
# This needs to happen before anything that imports zproject.settings.
|
|
|
|
run(["scripts/setup/generate_secrets.py", "--development"])
|
|
|
|
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
|
2015-08-20 22:40:06 +02:00
|
|
|
|
2016-07-09 09:08:41 +02:00
|
|
|
# create log directory `zulip/var/log`
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(LOG_DIR_PATH, exist_ok=True)
|
2016-07-16 16:13:17 +02:00
|
|
|
# create upload directory `var/uploads`
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(UPLOAD_DIR_PATH, exist_ok=True)
|
2016-07-09 19:01:36 +02:00
|
|
|
# create test upload directory `var/test_upload`
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(TEST_UPLOAD_DIR_PATH, exist_ok=True)
|
2019-01-05 00:15:36 +01:00
|
|
|
# create coverage directory `var/coverage`
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(COVERAGE_DIR_PATH, exist_ok=True)
|
2019-01-05 00:15:36 +01:00
|
|
|
# create linecoverage directory `var/node-coverage`
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(NODE_TEST_COVERAGE_DIR_PATH, exist_ok=True)
|
2016-07-09 09:08:41 +02:00
|
|
|
|
2019-01-05 00:15:36 +01:00
|
|
|
# The `build_emoji` script requires `emoji-datasource` package
|
|
|
|
# which we install via npm; thus this step is after installing npm
|
2017-05-28 13:11:57 +02:00
|
|
|
# packages.
|
2017-05-24 19:35:00 +02:00
|
|
|
if not os.path.isdir(EMOJI_CACHE_PATH):
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["mkdir", EMOJI_CACHE_PATH])
|
|
|
|
run_as_root(["chown", "%s:%s" % (user_id, user_id), EMOJI_CACHE_PATH])
|
2016-12-28 04:58:41 +01:00
|
|
|
run(["tools/setup/emoji/build_emoji"])
|
2017-05-28 13:11:57 +02:00
|
|
|
|
2017-09-17 02:14:56 +02:00
|
|
|
# copy over static files from the zulip_bots package
|
2018-12-09 16:18:49 +01:00
|
|
|
generate_zulip_bots_static_files()
|
2017-09-17 02:14:56 +02:00
|
|
|
|
2018-08-10 00:32:21 +02:00
|
|
|
webfont_paths = ["tools/setup/generate-custom-icon-webfont", "static/icons/fonts/template.hbs"]
|
2018-06-22 11:43:20 +02:00
|
|
|
webfont_paths += glob.glob('static/assets/icons/*')
|
2018-06-22 12:56:25 +02:00
|
|
|
if file_or_package_hash_updated(webfont_paths, "webfont_files_hash", options.is_force):
|
2018-08-10 00:32:21 +02:00
|
|
|
run(["tools/setup/generate-custom-icon-webfont"])
|
2018-06-22 11:43:20 +02:00
|
|
|
else:
|
2018-08-10 00:32:21 +02:00
|
|
|
print("No need to run `tools/setup/generate-custom-icon-webfont`.")
|
2018-06-22 11:43:20 +02:00
|
|
|
|
2018-06-22 14:04:07 +02:00
|
|
|
build_pygments_data_paths = ["tools/setup/build_pygments_data", "tools/setup/lang.json"]
|
|
|
|
from pygments import __version__ as pygments_version
|
|
|
|
if file_or_package_hash_updated(build_pygments_data_paths, "build_pygments_data_hash", options.is_force,
|
|
|
|
[pygments_version]):
|
|
|
|
run(["tools/setup/build_pygments_data"])
|
|
|
|
else:
|
|
|
|
print("No need to run `tools/setup/build_pygments_data`.")
|
|
|
|
|
2018-12-08 13:05:46 +01:00
|
|
|
update_authors_json_paths = ["tools/update-authors-json", "zerver/tests/fixtures/authors.json"]
|
|
|
|
if file_or_package_hash_updated(update_authors_json_paths, "update_authors_json_hash", options.is_force):
|
|
|
|
run(["tools/update-authors-json", "--use-fixture"])
|
|
|
|
else:
|
|
|
|
print("No need to run `tools/update-authors-json`.")
|
2018-06-22 11:43:20 +02:00
|
|
|
|
2018-06-14 14:20:07 +02:00
|
|
|
email_source_paths = ["tools/inline-email-css", "templates/zerver/emails/email.css"]
|
|
|
|
email_source_paths += glob.glob('templates/zerver/emails/*.source.html')
|
2018-06-22 12:56:25 +02:00
|
|
|
if file_or_package_hash_updated(email_source_paths, "last_email_source_files_hash", options.is_force):
|
2018-06-14 14:20:07 +02:00
|
|
|
run(["tools/inline-email-css"])
|
|
|
|
else:
|
|
|
|
print("No need to run `tools/inline-email-css`.")
|
2018-06-22 11:43:20 +02:00
|
|
|
|
2017-12-02 00:47:56 +01:00
|
|
|
if is_circleci or (is_travis and not options.is_production_travis):
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["service", "rabbitmq-server", "restart"])
|
|
|
|
run_as_root(["service", "redis-server", "restart"])
|
|
|
|
run_as_root(["service", "memcached", "restart"])
|
|
|
|
run_as_root(["service", "postgresql", "restart"])
|
2018-12-17 19:41:03 +01:00
|
|
|
elif family == 'redhat':
|
2018-12-17 22:23:09 +01:00
|
|
|
for service in ["postgresql-%s" % (POSTGRES_VERSION,), "rabbitmq-server", "memcached", "redis"]:
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["systemctl", "enable", service], sudo_args = ['-H'])
|
|
|
|
run_as_root(["systemctl", "start", service], sudo_args = ['-H'])
|
2016-10-21 09:45:21 +02:00
|
|
|
elif options.is_docker:
|
2019-02-26 20:20:46 +01:00
|
|
|
run_as_root(["service", "rabbitmq-server", "restart"])
|
|
|
|
run_as_root(["pg_dropcluster", "--stop", POSTGRES_VERSION, "main"])
|
|
|
|
run_as_root(["pg_createcluster", "-e", "utf8", "--start", POSTGRES_VERSION, "main"])
|
|
|
|
run_as_root(["service", "redis-server", "restart"])
|
|
|
|
run_as_root(["service", "memcached", "restart"])
|
2016-10-21 09:45:21 +02:00
|
|
|
if not options.is_production_travis:
|
2017-07-07 02:39:49 +02:00
|
|
|
# The following block is skipped for the production Travis
|
|
|
|
# suite, because that suite doesn't make use of these elements
|
|
|
|
# of the development environment (it just uses the development
|
|
|
|
# environment to build a release tarball).
|
|
|
|
|
2018-06-06 00:16:27 +02:00
|
|
|
# Need to set up Django before using template_database_status
|
2016-10-19 12:34:55 +02:00
|
|
|
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "zproject.settings")
|
|
|
|
import django
|
|
|
|
django.setup()
|
2017-06-21 21:26:13 +02:00
|
|
|
|
2018-06-06 01:08:27 +02:00
|
|
|
from zerver.lib.test_fixtures import template_database_status, run_db_migrations
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-21 21:26:13 +02:00
|
|
|
try:
|
|
|
|
from zerver.lib.queue import SimpleQueueClient
|
|
|
|
SimpleQueueClient()
|
|
|
|
rabbitmq_is_configured = True
|
|
|
|
except Exception:
|
|
|
|
rabbitmq_is_configured = False
|
|
|
|
|
|
|
|
if options.is_force or not rabbitmq_is_configured:
|
|
|
|
run(["scripts/setup/configure-rabbitmq"])
|
|
|
|
else:
|
|
|
|
print("RabbitMQ is already configured.")
|
|
|
|
|
2017-10-18 04:23:06 +02:00
|
|
|
migration_status_path = os.path.join(UUID_VAR_PATH, "migration_status_dev")
|
2018-06-06 00:16:27 +02:00
|
|
|
dev_template_db_status = template_database_status(
|
|
|
|
migration_status=migration_status_path,
|
|
|
|
settings="zproject.settings",
|
|
|
|
database_name="zulip",
|
|
|
|
)
|
|
|
|
if options.is_force or dev_template_db_status == 'needs_rebuild':
|
2017-06-02 21:50:24 +02:00
|
|
|
run(["tools/setup/postgres-init-dev-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-database"])
|
2018-06-06 01:08:27 +02:00
|
|
|
elif dev_template_db_status == 'run_migrations':
|
|
|
|
run_db_migrations('dev')
|
2018-06-06 00:16:27 +02:00
|
|
|
elif dev_template_db_status == 'current':
|
2017-06-02 21:50:24 +02:00
|
|
|
print("No need to regenerate the dev DB.")
|
|
|
|
|
2018-06-06 00:16:27 +02:00
|
|
|
test_template_db_status = template_database_status()
|
|
|
|
if options.is_force or test_template_db_status == 'needs_rebuild':
|
2016-10-19 12:34:55 +02:00
|
|
|
run(["tools/setup/postgres-init-test-db"])
|
|
|
|
run(["tools/do-destroy-rebuild-test-database"])
|
2018-06-06 01:08:27 +02:00
|
|
|
elif test_template_db_status == 'run_migrations':
|
|
|
|
run_db_migrations('test')
|
2018-06-06 00:16:27 +02:00
|
|
|
elif test_template_db_status == 'current':
|
2016-10-19 13:13:16 +02:00
|
|
|
print("No need to regenerate the test DB.")
|
2017-06-02 21:50:24 +02:00
|
|
|
|
2017-06-14 11:32:34 +02:00
|
|
|
# Consider updating generated translations data: both `.mo`
|
|
|
|
# files and `language-options.json`.
|
|
|
|
paths = ['zerver/management/commands/compilemessages.py']
|
|
|
|
paths += glob.glob('static/locale/*/LC_MESSAGES/*.po')
|
|
|
|
paths += glob.glob('static/locale/*/translations.json')
|
|
|
|
|
2018-06-22 12:56:25 +02:00
|
|
|
if file_or_package_hash_updated(paths, "last_compilemessages_hash", options.is_force):
|
2017-06-14 11:32:34 +02:00
|
|
|
run(["./manage.py", "compilemessages"])
|
|
|
|
else:
|
|
|
|
print("No need to run `manage.py compilemessages`.")
|
2016-09-21 08:44:01 +02:00
|
|
|
|
2017-11-18 21:57:08 +01:00
|
|
|
run(["scripts/lib/clean-unused-caches"])
|
|
|
|
|
2019-04-19 10:10:16 +02:00
|
|
|
# Keeping this cache file around can cause eslint to throw
|
|
|
|
# random TypeErrors when new/updated dependencies are added
|
|
|
|
if os.path.isfile('.eslintcache'):
|
|
|
|
# Remove this block when
|
|
|
|
# https://github.com/eslint/eslint/issues/11639 is fixed
|
|
|
|
# upstream.
|
|
|
|
os.remove('.eslintcache')
|
|
|
|
|
2017-10-18 05:02:44 +02:00
|
|
|
version_file = os.path.join(UUID_VAR_PATH, 'provision_version')
|
2016-10-14 00:53:01 +02:00
|
|
|
print('writing to %s\n' % (version_file,))
|
|
|
|
open(version_file, 'w').write(PROVISION_VERSION + '\n')
|
|
|
|
|
2016-07-29 00:30:47 +02:00
|
|
|
print()
|
2016-08-11 20:43:30 +02:00
|
|
|
print(OKBLUE + "Zulip development environment setup succeeded!" + ENDC)
|
2016-01-26 02:59:30 +01:00
|
|
|
return 0
|
2015-08-17 06:37:14 +02:00
|
|
|
|
2015-08-20 02:46:50 +02:00
|
|
|
if __name__ == "__main__":
|
2016-10-21 09:45:21 +02:00
|
|
|
description = ("Provision script to install Zulip")
|
|
|
|
parser = argparse.ArgumentParser(description=description)
|
|
|
|
parser.add_argument('--force', action='store_true', dest='is_force',
|
|
|
|
default=False,
|
|
|
|
help="Ignore all provisioning optimizations.")
|
|
|
|
|
|
|
|
parser.add_argument('--production-travis', action='store_true',
|
|
|
|
dest='is_production_travis',
|
|
|
|
default=False,
|
2017-10-06 08:12:16 +02:00
|
|
|
help="Provision for Travis with production settings.")
|
2016-10-21 09:45:21 +02:00
|
|
|
|
|
|
|
parser.add_argument('--docker', action='store_true',
|
|
|
|
dest='is_docker',
|
|
|
|
default=False,
|
|
|
|
help="Provision for Docker.")
|
|
|
|
|
|
|
|
options = parser.parse_args()
|
|
|
|
sys.exit(main(options))
|