py3: Switch almost all shebang lines to use `python3`.
This causes `upgrade-zulip-from-git`, as well as a no-option run of
`tools/build-release-tarball`, to produce a Zulip install running
Python 3, rather than Python 2. In particular this means that the
virtualenv we create, in which all application code runs, is Python 3.
One shebang line, on `zulip-ec2-configure-interfaces`, explicitly
keeps Python 2, and at least one external ops script, `wal-e`, also
still runs on Python 2. See discussion on the respective previous
commits that made those explicit. There may also be some other
third-party scripts we use, outside of this source tree and running
outside our virtualenv, that still run on Python 2.
2017-08-02 23:15:16 +02:00
|
|
|
|
#!/usr/bin/env python3
|
2016-10-21 09:45:21 +02:00
|
|
|
|
import argparse
|
2020-06-11 00:54:34 +02:00
|
|
|
|
import hashlib
|
|
|
|
|
import logging
|
|
|
|
|
import os
|
2015-08-19 04:18:08 +02:00
|
|
|
|
import platform
|
2016-04-06 20:12:32 +02:00
|
|
|
|
import subprocess
|
2020-06-11 00:54:34 +02:00
|
|
|
|
import sys
|
2024-07-12 02:30:17 +02:00
|
|
|
|
from typing import NoReturn
|
2015-08-17 06:37:14 +02:00
|
|
|
|
|
2016-04-06 17:15:31 +02:00
|
|
|
|
os.environ["PYTHONUNBUFFERED"] = "y"
|
|
|
|
|
|
2017-01-14 11:19:26 +01:00
|
|
|
|
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
2016-06-27 23:50:38 +02:00
|
|
|
|
|
|
|
|
|
sys.path.append(ZULIP_PATH)
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
2023-03-20 19:52:59 +01:00
|
|
|
|
from scripts.lib.node_cache import setup_node_modules
|
2021-05-07 00:38:24 +02:00
|
|
|
|
from scripts.lib.setup_venv import get_venv_dependencies
|
2020-06-11 00:54:34 +02:00
|
|
|
|
from scripts.lib.zulip_tools import (
|
|
|
|
|
ENDC,
|
|
|
|
|
FAIL,
|
|
|
|
|
WARNING,
|
|
|
|
|
get_dev_uuid_var_path,
|
|
|
|
|
os_families,
|
|
|
|
|
parse_os_release,
|
|
|
|
|
run_as_root,
|
2017-11-09 16:31:57 +01:00
|
|
|
|
)
|
2019-07-19 08:06:34 +02:00
|
|
|
|
from tools.setup import setup_venvs
|
2016-08-18 13:53:16 +02:00
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
|
VAR_DIR_PATH = os.path.join(ZULIP_PATH, "var")
|
2016-06-14 10:33:23 +02:00
|
|
|
|
|
2021-03-15 18:39:44 +01:00
|
|
|
|
CONTINUOUS_INTEGRATION = "GITHUB_ACTIONS" in os.environ
|
2017-12-02 00:47:56 +01:00
|
|
|
|
|
2016-04-06 19:09:16 +02:00
|
|
|
|
if not os.path.exists(os.path.join(ZULIP_PATH, ".git")):
|
2020-10-23 02:43:28 +02:00
|
|
|
|
print(FAIL + "Error: No Zulip Git repository present!" + ENDC)
|
2020-10-13 23:50:18 +02:00
|
|
|
|
print("To set up the Zulip development environment, you should clone the code")
|
2015-11-01 17:11:06 +01:00
|
|
|
|
print("from GitHub, rather than using a Zulip production release tarball.")
|
2015-10-14 01:18:49 +02:00
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
2016-12-01 00:23:05 +01:00
|
|
|
|
# Check the RAM on the user's system, and throw an effort if <1.5GB.
|
|
|
|
|
# This avoids users getting segfaults running `pip install` that are
|
|
|
|
|
# generally more annoying to debug.
|
|
|
|
|
with open("/proc/meminfo") as meminfo:
|
|
|
|
|
ram_size = meminfo.readlines()[0].strip().split(" ")[-2]
|
|
|
|
|
ram_gb = float(ram_size) / 1024.0 / 1024.0
|
|
|
|
|
if ram_gb < 1.5:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
print(
|
2024-03-21 03:43:05 +01:00
|
|
|
|
f"You have insufficient RAM ({round(ram_gb, 2)} GB) to run the Zulip development environment."
|
2021-02-12 08:19:30 +01:00
|
|
|
|
)
|
2016-12-01 00:23:05 +01:00
|
|
|
|
print("We recommend at least 2 GB of RAM, and require at least 1.5 GB.")
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
2016-11-09 00:10:46 +01:00
|
|
|
|
try:
|
2017-10-18 04:14:06 +02:00
|
|
|
|
UUID_VAR_PATH = get_dev_uuid_var_path(create_if_missing=True)
|
2018-07-18 23:50:16 +02:00
|
|
|
|
os.makedirs(UUID_VAR_PATH, exist_ok=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
|
if os.path.exists(os.path.join(VAR_DIR_PATH, "zulip-test-symlink")):
|
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, "zulip-test-symlink"))
|
2016-11-09 00:10:46 +01:00
|
|
|
|
os.symlink(
|
2021-02-12 08:20:45 +01:00
|
|
|
|
os.path.join(ZULIP_PATH, "README.md"),
|
|
|
|
|
os.path.join(VAR_DIR_PATH, "zulip-test-symlink"),
|
2016-11-09 00:10:46 +01:00
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
|
os.remove(os.path.join(VAR_DIR_PATH, "zulip-test-symlink"))
|
2018-05-24 16:41:34 +02:00
|
|
|
|
except OSError:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
print(
|
2022-10-24 22:59:54 +02:00
|
|
|
|
FAIL + "Error: Unable to create symlinks. "
|
2021-02-12 08:19:30 +01:00
|
|
|
|
"Make sure you have permission to create symbolic links." + ENDC
|
|
|
|
|
)
|
2016-11-09 00:10:46 +01:00
|
|
|
|
print("See this page for more information:")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
print(
|
2022-09-10 02:31:23 +02:00
|
|
|
|
" https://zulip.readthedocs.io/en/latest/development/setup-recommended.html#os-symlink-error"
|
2021-02-12 08:19:30 +01:00
|
|
|
|
)
|
2016-11-09 00:10:46 +01:00
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
2019-08-25 01:23:14 +02:00
|
|
|
|
distro_info = parse_os_release()
|
2021-02-12 08:20:45 +01:00
|
|
|
|
vendor = distro_info["ID"]
|
|
|
|
|
os_version = distro_info["VERSION_ID"]
|
2024-03-25 22:31:39 +01:00
|
|
|
|
if vendor == "debian" and os_version == "12": # bookworm
|
2023-05-10 23:20:46 +02:00
|
|
|
|
POSTGRESQL_VERSION = "15"
|
2022-02-25 03:13:16 +01:00
|
|
|
|
elif vendor == "ubuntu" and os_version == "22.04": # jammy
|
|
|
|
|
POSTGRESQL_VERSION = "14"
|
2024-02-27 00:57:25 +01:00
|
|
|
|
elif vendor == "ubuntu" and os_version == "24.04": # noble
|
|
|
|
|
POSTGRESQL_VERSION = "16"
|
2023-08-22 20:26:29 +02:00
|
|
|
|
elif vendor == "fedora" and os_version == "38":
|
|
|
|
|
POSTGRESQL_VERSION = "15"
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif vendor == "rhel" and os_version.startswith("7."):
|
2020-10-26 22:54:50 +01:00
|
|
|
|
POSTGRESQL_VERSION = "10"
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif vendor == "centos" and os_version == "7":
|
2020-10-26 22:54:50 +01:00
|
|
|
|
POSTGRESQL_VERSION = "10"
|
2019-08-30 00:14:43 +02:00
|
|
|
|
else:
|
2020-05-02 08:44:14 +02:00
|
|
|
|
logging.critical("Unsupported platform: %s %s", vendor, os_version)
|
2016-04-06 19:27:42 +02:00
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
2020-03-17 20:27:35 +01:00
|
|
|
|
VENV_DEPENDENCIES = get_venv_dependencies(vendor, os_version)
|
|
|
|
|
|
2018-12-08 22:53:46 +01:00
|
|
|
|
COMMON_DEPENDENCIES = [
|
2016-04-04 23:22:14 +02:00
|
|
|
|
"memcached",
|
|
|
|
|
"rabbitmq-server",
|
|
|
|
|
"supervisor",
|
|
|
|
|
"git",
|
2021-06-25 01:28:27 +02:00
|
|
|
|
"curl",
|
|
|
|
|
"ca-certificates", # Explicit dependency in case e.g. curl is already installed
|
2021-02-12 08:19:30 +01:00
|
|
|
|
"puppet", # Used by lint (`puppet parser validate`)
|
|
|
|
|
"gettext", # Used by makemessages i18n
|
|
|
|
|
"curl", # Used for testing our API documentation
|
|
|
|
|
"moreutils", # Used for sponge command
|
|
|
|
|
"unzip", # Needed for Slack import
|
|
|
|
|
"crudini", # Used for shell tooling w/ zulip.conf
|
2020-03-12 15:46:09 +01:00
|
|
|
|
# Puppeteer dependencies from here
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
|
"xdg-utils",
|
2020-03-12 15:46:09 +01:00
|
|
|
|
# Puppeteer dependencies end here.
|
2018-12-08 22:53:46 +01:00
|
|
|
|
]
|
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
|
UBUNTU_COMMON_APT_DEPENDENCIES = [
|
|
|
|
|
*COMMON_DEPENDENCIES,
|
2018-12-08 22:53:46 +01:00
|
|
|
|
"redis-server",
|
|
|
|
|
"hunspell-en-us",
|
|
|
|
|
"puppet-lint",
|
2019-08-09 01:06:42 +02:00
|
|
|
|
"default-jre-headless", # Required by vnu-jar
|
2021-03-22 05:46:27 +01:00
|
|
|
|
# Puppeteer dependencies from here
|
|
|
|
|
"fonts-freefont-ttf",
|
|
|
|
|
"libatk-bridge2.0-0",
|
|
|
|
|
"libgbm1",
|
|
|
|
|
"libgtk-3-0",
|
|
|
|
|
"libx11-xcb1",
|
|
|
|
|
"libxcb-dri3-0",
|
|
|
|
|
"libxss1",
|
|
|
|
|
"xvfb",
|
|
|
|
|
# Puppeteer dependencies end here.
|
2020-09-02 06:59:07 +02:00
|
|
|
|
]
|
2016-04-04 23:22:14 +02:00
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
|
COMMON_YUM_DEPENDENCIES = [
|
|
|
|
|
*COMMON_DEPENDENCIES,
|
2018-12-08 22:53:46 +01:00
|
|
|
|
"redis",
|
|
|
|
|
"hunspell-en-US",
|
|
|
|
|
"rubygem-puppet-lint",
|
|
|
|
|
"nmap-ncat",
|
2021-03-22 05:46:27 +01:00
|
|
|
|
"ccache", # Required to build pgroonga from source.
|
|
|
|
|
# Puppeteer dependencies from here
|
|
|
|
|
"at-spi2-atk",
|
|
|
|
|
"GConf2",
|
|
|
|
|
"gtk3",
|
|
|
|
|
"libX11-xcb",
|
|
|
|
|
"libxcb",
|
|
|
|
|
"libXScrnSaver",
|
|
|
|
|
"mesa-libgbm",
|
|
|
|
|
"xorg-x11-server-Xvfb",
|
|
|
|
|
# Puppeteer dependencies end here.
|
2020-09-02 06:59:07 +02:00
|
|
|
|
]
|
2018-12-08 22:53:46 +01:00
|
|
|
|
|
2023-08-22 20:26:29 +02:00
|
|
|
|
BUILD_GROONGA_FROM_SOURCE = False
|
2019-03-06 00:27:45 +01:00
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = False
|
2024-02-27 00:57:25 +01:00
|
|
|
|
if (vendor == "debian" and os_version in []) or (vendor == "ubuntu" and os_version in ["24.04"]):
|
2020-10-23 02:43:28 +02:00
|
|
|
|
# For platforms without a PGroonga release, we need to build it
|
2019-08-25 01:23:14 +02:00
|
|
|
|
# from source.
|
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = True
|
2020-09-02 06:59:07 +02:00
|
|
|
|
SYSTEM_DEPENDENCIES = [
|
|
|
|
|
*UBUNTU_COMMON_APT_DEPENDENCIES,
|
2020-10-26 22:54:50 +01:00
|
|
|
|
f"postgresql-{POSTGRESQL_VERSION}",
|
2020-10-23 02:43:28 +02:00
|
|
|
|
# Dependency for building PGroonga from source
|
2020-10-26 22:54:50 +01:00
|
|
|
|
f"postgresql-server-dev-{POSTGRESQL_VERSION}",
|
2020-09-02 06:59:07 +02:00
|
|
|
|
"libgroonga-dev",
|
|
|
|
|
"libmsgpack-dev",
|
2022-04-30 00:13:04 +02:00
|
|
|
|
"clang",
|
2020-09-02 06:59:07 +02:00
|
|
|
|
*VENV_DEPENDENCIES,
|
|
|
|
|
]
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif "debian" in os_families():
|
2023-01-02 20:50:23 +01:00
|
|
|
|
DEBIAN_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES
|
2021-05-26 23:28:58 +02:00
|
|
|
|
|
|
|
|
|
# If we are on an aarch64 processor, ninja will be built from source,
|
|
|
|
|
# so cmake is required
|
|
|
|
|
if platform.machine() == "aarch64":
|
2023-01-02 20:50:23 +01:00
|
|
|
|
DEBIAN_DEPENDENCIES.append("cmake")
|
2021-05-26 23:28:58 +02:00
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
|
SYSTEM_DEPENDENCIES = [
|
2023-01-02 20:50:23 +01:00
|
|
|
|
*DEBIAN_DEPENDENCIES,
|
2020-10-26 22:54:50 +01:00
|
|
|
|
f"postgresql-{POSTGRESQL_VERSION}",
|
2022-01-14 01:44:53 +01:00
|
|
|
|
f"postgresql-{POSTGRESQL_VERSION}-pgroonga",
|
2020-09-02 06:59:07 +02:00
|
|
|
|
*VENV_DEPENDENCIES,
|
|
|
|
|
]
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif "rhel" in os_families():
|
2020-09-02 06:59:07 +02:00
|
|
|
|
SYSTEM_DEPENDENCIES = [
|
|
|
|
|
*COMMON_YUM_DEPENDENCIES,
|
2020-10-26 22:54:50 +01:00
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}-server",
|
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}",
|
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}-devel",
|
2020-11-30 01:27:45 +01:00
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}-pgdg-pgroonga",
|
2020-09-02 06:59:07 +02:00
|
|
|
|
*VENV_DEPENDENCIES,
|
|
|
|
|
]
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif "fedora" in os_families():
|
2020-09-02 06:59:07 +02:00
|
|
|
|
SYSTEM_DEPENDENCIES = [
|
|
|
|
|
*COMMON_YUM_DEPENDENCIES,
|
2020-10-26 22:54:50 +01:00
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}-server",
|
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}",
|
|
|
|
|
f"postgresql{POSTGRESQL_VERSION}-devel",
|
2020-10-23 02:43:28 +02:00
|
|
|
|
# Needed to build PGroonga from source
|
2020-09-02 06:59:07 +02:00
|
|
|
|
"msgpack-devel",
|
|
|
|
|
*VENV_DEPENDENCIES,
|
|
|
|
|
]
|
2023-08-22 20:26:29 +02:00
|
|
|
|
BUILD_GROONGA_FROM_SOURCE = True
|
2019-03-06 00:27:45 +01:00
|
|
|
|
BUILD_PGROONGA_FROM_SOURCE = True
|
2016-04-04 23:22:14 +02:00
|
|
|
|
|
2019-08-30 00:14:43 +02:00
|
|
|
|
if "fedora" in os_families():
|
2020-10-26 22:54:50 +01:00
|
|
|
|
TSEARCH_STOPWORDS_PATH = f"/usr/pgsql-{POSTGRESQL_VERSION}/share/tsearch_data/"
|
2018-12-15 04:24:14 +01:00
|
|
|
|
else:
|
2020-10-26 22:54:50 +01:00
|
|
|
|
TSEARCH_STOPWORDS_PATH = f"/usr/share/postgresql/{POSTGRESQL_VERSION}/tsearch_data/"
|
2015-08-17 06:37:14 +02:00
|
|
|
|
REPO_STOPWORDS_PATH = os.path.join(
|
|
|
|
|
ZULIP_PATH,
|
|
|
|
|
"puppet",
|
|
|
|
|
"zulip",
|
|
|
|
|
"files",
|
|
|
|
|
"postgresql",
|
|
|
|
|
"zulip_english.stop",
|
|
|
|
|
)
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
|
def install_system_deps() -> None:
|
2018-12-16 22:32:01 +01:00
|
|
|
|
# By doing list -> set -> list conversion, we remove duplicates.
|
2019-07-01 23:53:23 +02:00
|
|
|
|
deps_to_install = sorted(set(SYSTEM_DEPENDENCIES))
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
2019-08-30 00:14:43 +02:00
|
|
|
|
if "fedora" in os_families():
|
2019-05-25 01:09:00 +02:00
|
|
|
|
install_yum_deps(deps_to_install)
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif "debian" in os_families():
|
2019-05-25 01:09:00 +02:00
|
|
|
|
install_apt_deps(deps_to_install)
|
2019-03-06 00:27:45 +01:00
|
|
|
|
else:
|
|
|
|
|
raise AssertionError("Invalid vendor")
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
|
# For some platforms, there aren't published PGroonga
|
2019-08-28 11:58:53 +02:00
|
|
|
|
# packages available, so we build them from source.
|
2023-08-22 20:26:29 +02:00
|
|
|
|
if BUILD_GROONGA_FROM_SOURCE:
|
|
|
|
|
run_as_root(["./scripts/lib/build-groonga"])
|
2019-03-06 00:27:45 +01:00
|
|
|
|
if BUILD_PGROONGA_FROM_SOURCE:
|
|
|
|
|
run_as_root(["./scripts/lib/build-pgroonga"])
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
|
def install_apt_deps(deps_to_install: list[str]) -> None:
|
2020-03-23 21:45:49 +01:00
|
|
|
|
# setup-apt-repo does an `apt-get update` if the sources.list files changed.
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["./scripts/lib/setup-apt-repo"])
|
2020-03-23 21:45:49 +01:00
|
|
|
|
|
|
|
|
|
# But we still need to do our own to make sure we have up-to-date
|
|
|
|
|
# data before installing new packages, as the system might not have
|
|
|
|
|
# done an apt update in weeks otherwise, which could result in 404s
|
|
|
|
|
# trying to download old versions that were already removed from mirrors.
|
|
|
|
|
run_as_root(["apt-get", "update"])
|
2019-05-20 06:07:25 +02:00
|
|
|
|
run_as_root(
|
|
|
|
|
[
|
2021-02-12 08:19:30 +01:00
|
|
|
|
"env",
|
|
|
|
|
"DEBIAN_FRONTEND=noninteractive",
|
|
|
|
|
"apt-get",
|
|
|
|
|
"-y",
|
|
|
|
|
"install",
|
2022-02-15 00:26:41 +01:00
|
|
|
|
"--allow-downgrades",
|
2021-02-12 08:19:30 +01:00
|
|
|
|
"--no-install-recommends",
|
|
|
|
|
*deps_to_install,
|
2019-05-20 06:07:25 +02:00
|
|
|
|
]
|
|
|
|
|
)
|
2018-12-16 22:32:01 +01:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
|
def install_yum_deps(deps_to_install: list[str]) -> None:
|
2022-05-03 21:41:43 +02:00
|
|
|
|
print(WARNING + "RedHat support is still experimental." + ENDC)
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["./scripts/lib/setup-yum-repo"])
|
2018-12-18 15:40:05 +01:00
|
|
|
|
|
|
|
|
|
# Hack specific to unregistered RHEL system. The moreutils
|
|
|
|
|
# package requires a perl module package, which isn't available in
|
|
|
|
|
# the unregistered RHEL repositories.
|
|
|
|
|
#
|
|
|
|
|
# Error: Package: moreutils-0.49-2.el7.x86_64 (epel)
|
|
|
|
|
# Requires: perl(IPC::Run)
|
2024-07-12 02:30:17 +02:00
|
|
|
|
yum_extra_flags: list[str] = []
|
2019-08-30 00:14:43 +02:00
|
|
|
|
if vendor == "rhel":
|
2024-03-21 03:48:17 +01:00
|
|
|
|
proc = subprocess.run(
|
|
|
|
|
["sudo", "subscription-manager", "status"],
|
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
|
text=True,
|
|
|
|
|
check=False,
|
|
|
|
|
)
|
|
|
|
|
if proc.returncode == 1:
|
2018-12-18 15:40:05 +01:00
|
|
|
|
# TODO this might overkill since `subscription-manager` is already
|
|
|
|
|
# called in setup-yum-repo
|
2024-03-21 03:48:17 +01:00
|
|
|
|
if "Status" in proc.stdout:
|
2018-12-18 15:40:05 +01:00
|
|
|
|
# The output is well-formed
|
|
|
|
|
yum_extra_flags = ["--skip-broken"]
|
|
|
|
|
else:
|
|
|
|
|
print("Unrecognized output. `subscription-manager` might not be available")
|
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
|
run_as_root(["yum", "install", "-y", *yum_extra_flags, *deps_to_install])
|
2019-08-30 00:14:43 +02:00
|
|
|
|
if "rhel" in os_families():
|
2018-12-17 22:04:18 +01:00
|
|
|
|
# This is how a pip3 is installed to /usr/bin in CentOS/RHEL
|
|
|
|
|
# for python35 and later.
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["python36", "-m", "ensurepip"])
|
2019-01-04 21:29:52 +01:00
|
|
|
|
# `python36` is not aliased to `python3` by default
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["ln", "-nsf", "/usr/bin/python36", "/usr/bin/python3"])
|
2021-02-12 08:20:45 +01:00
|
|
|
|
postgresql_dir = f"pgsql-{POSTGRESQL_VERSION}"
|
|
|
|
|
for cmd in ["pg_config", "pg_isready", "psql"]:
|
2020-10-26 22:27:53 +01:00
|
|
|
|
# Our tooling expects these PostgreSQL scripts to be at
|
2018-12-16 20:43:27 +01:00
|
|
|
|
# well-known paths. There's an argument for eventually
|
|
|
|
|
# making our tooling auto-detect, but this is simpler.
|
2021-02-12 08:19:30 +01:00
|
|
|
|
run_as_root(["ln", "-nsf", f"/usr/{postgresql_dir}/bin/{cmd}", f"/usr/bin/{cmd}"])
|
2018-12-17 21:30:55 +01:00
|
|
|
|
|
2020-10-26 22:27:53 +01:00
|
|
|
|
# From here, we do the first-time setup/initialization for the PostgreSQL database.
|
2020-10-26 22:54:50 +01:00
|
|
|
|
pg_datadir = f"/var/lib/pgsql/{POSTGRESQL_VERSION}/data"
|
2018-12-18 16:17:11 +01:00
|
|
|
|
pg_hba_conf = os.path.join(pg_datadir, "pg_hba.conf")
|
2018-12-17 21:30:55 +01:00
|
|
|
|
|
|
|
|
|
# We can't just check if the file exists with os.path, since the
|
|
|
|
|
# current user likely doesn't have permission to read the
|
|
|
|
|
# pg_datadir directory.
|
2018-12-18 16:17:11 +01:00
|
|
|
|
if subprocess.call(["sudo", "test", "-e", pg_hba_conf]) == 0:
|
2018-12-17 21:30:55 +01:00
|
|
|
|
# Skip setup if it has been applied previously
|
|
|
|
|
return
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
run_as_root(
|
|
|
|
|
[f"/usr/{postgresql_dir}/bin/postgresql-{POSTGRESQL_VERSION}-setup", "initdb"],
|
2021-02-12 08:20:45 +01:00
|
|
|
|
sudo_args=["-H"],
|
2021-02-12 08:19:30 +01:00
|
|
|
|
)
|
2018-12-17 21:30:55 +01:00
|
|
|
|
# Use vendored pg_hba.conf, which enables password authentication.
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["cp", "-a", "puppet/zulip/files/postgresql/centos_pg_hba.conf", pg_hba_conf])
|
2020-10-26 22:27:53 +01:00
|
|
|
|
# Later steps will ensure PostgreSQL is started
|
2018-12-16 20:43:27 +01:00
|
|
|
|
|
2019-03-06 00:34:02 +01:00
|
|
|
|
# Link in tsearch data files
|
2023-08-22 20:26:29 +02:00
|
|
|
|
if vendor == "fedora":
|
|
|
|
|
# Since F36 dictionary files were moved away from /usr/share/myspell
|
|
|
|
|
tsearch_source_prefix = "/usr/share/hunspell"
|
|
|
|
|
else:
|
|
|
|
|
tsearch_source_prefix = "/usr/share/myspell"
|
2021-03-22 05:46:27 +01:00
|
|
|
|
run_as_root(
|
|
|
|
|
[
|
|
|
|
|
"ln",
|
|
|
|
|
"-nsf",
|
2023-08-22 20:26:29 +02:00
|
|
|
|
os.path.join(tsearch_source_prefix, "en_US.dic"),
|
2021-03-22 05:46:27 +01:00
|
|
|
|
f"/usr/pgsql-{POSTGRESQL_VERSION}/share/tsearch_data/en_us.dict",
|
|
|
|
|
]
|
2020-06-14 02:57:50 +02:00
|
|
|
|
)
|
2021-03-22 05:46:27 +01:00
|
|
|
|
run_as_root(
|
|
|
|
|
[
|
|
|
|
|
"ln",
|
|
|
|
|
"-nsf",
|
2023-08-22 20:26:29 +02:00
|
|
|
|
os.path.join(tsearch_source_prefix, "en_US.aff"),
|
2021-03-22 05:46:27 +01:00
|
|
|
|
f"/usr/pgsql-{POSTGRESQL_VERSION}/share/tsearch_data/en_us.affix",
|
|
|
|
|
]
|
2020-06-14 02:57:50 +02:00
|
|
|
|
)
|
2019-03-06 00:34:02 +01:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
2022-06-26 10:03:34 +02:00
|
|
|
|
def main(options: argparse.Namespace) -> NoReturn:
|
2023-03-20 19:52:59 +01:00
|
|
|
|
# pnpm and management commands expect to be run from the root of the
|
2016-07-27 22:27:13 +02:00
|
|
|
|
# project.
|
|
|
|
|
os.chdir(ZULIP_PATH)
|
|
|
|
|
|
2017-06-15 12:13:35 +02:00
|
|
|
|
# hash the apt dependencies
|
|
|
|
|
sha_sum = hashlib.sha1()
|
|
|
|
|
|
2023-01-02 20:50:23 +01:00
|
|
|
|
for apt_dependency in SYSTEM_DEPENDENCIES:
|
|
|
|
|
sha_sum.update(apt_dependency.encode())
|
2019-08-30 00:14:43 +02:00
|
|
|
|
if "debian" in os_families():
|
2021-02-12 08:20:45 +01:00
|
|
|
|
with open("scripts/lib/setup-apt-repo", "rb") as fb:
|
2020-10-24 09:33:54 +02:00
|
|
|
|
sha_sum.update(fb.read())
|
2018-12-17 20:20:02 +01:00
|
|
|
|
else:
|
2020-03-17 19:09:38 +01:00
|
|
|
|
# hash the content of setup-yum-repo*
|
2021-02-12 08:20:45 +01:00
|
|
|
|
with open("scripts/lib/setup-yum-repo", "rb") as fb:
|
2020-10-24 09:33:54 +02:00
|
|
|
|
sha_sum.update(fb.read())
|
2020-03-17 19:09:38 +01:00
|
|
|
|
|
2023-08-22 20:26:29 +02:00
|
|
|
|
# hash the content of build-pgroonga if Groonga is built from source
|
|
|
|
|
if BUILD_GROONGA_FROM_SOURCE:
|
|
|
|
|
with open("scripts/lib/build-groonga", "rb") as fb:
|
|
|
|
|
sha_sum.update(fb.read())
|
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
|
# hash the content of build-pgroonga if PGroonga is built from source
|
2020-03-17 19:09:38 +01:00
|
|
|
|
if BUILD_PGROONGA_FROM_SOURCE:
|
2021-02-12 08:20:45 +01:00
|
|
|
|
with open("scripts/lib/build-pgroonga", "rb") as fb:
|
2020-10-24 09:33:54 +02:00
|
|
|
|
sha_sum.update(fb.read())
|
2017-06-15 12:13:35 +02:00
|
|
|
|
|
|
|
|
|
new_apt_dependencies_hash = sha_sum.hexdigest()
|
|
|
|
|
last_apt_dependencies_hash = None
|
2017-10-18 04:14:06 +02:00
|
|
|
|
apt_hash_file_path = os.path.join(UUID_VAR_PATH, "apt_dependencies_hash")
|
2021-02-12 08:20:45 +01:00
|
|
|
|
with open(apt_hash_file_path, "a+") as hash_file:
|
2018-07-18 23:50:16 +02:00
|
|
|
|
hash_file.seek(0)
|
|
|
|
|
last_apt_dependencies_hash = hash_file.read()
|
2017-06-15 12:13:35 +02:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
if new_apt_dependencies_hash != last_apt_dependencies_hash:
|
2017-06-15 12:13:35 +02:00
|
|
|
|
try:
|
2018-12-16 20:45:29 +01:00
|
|
|
|
install_system_deps()
|
2017-06-15 12:13:35 +02:00
|
|
|
|
except subprocess.CalledProcessError:
|
2021-03-03 19:28:21 +01:00
|
|
|
|
try:
|
|
|
|
|
# Might be a failure due to network connection issues. Retrying...
|
|
|
|
|
print(WARNING + "Installing system dependencies failed; retrying..." + ENDC)
|
|
|
|
|
install_system_deps()
|
|
|
|
|
except BaseException as e:
|
|
|
|
|
# Suppress exception chaining
|
|
|
|
|
raise e from None
|
2021-02-12 08:20:45 +01:00
|
|
|
|
with open(apt_hash_file_path, "w") as hash_file:
|
2018-06-08 18:51:24 +02:00
|
|
|
|
hash_file.write(new_apt_dependencies_hash)
|
2017-06-15 12:13:35 +02:00
|
|
|
|
else:
|
2017-09-25 23:47:18 +02:00
|
|
|
|
print("No changes to apt dependencies, so skipping apt operations.")
|
2015-08-17 06:37:14 +02:00
|
|
|
|
|
2017-07-27 23:22:52 +02:00
|
|
|
|
# Here we install node.
|
2018-09-12 21:04:29 +02:00
|
|
|
|
proxy_env = [
|
|
|
|
|
"env",
|
2023-11-15 23:51:08 +01:00
|
|
|
|
"http_proxy=" + os.environ.get("http_proxy", ""),
|
|
|
|
|
"https_proxy=" + os.environ.get("https_proxy", ""),
|
|
|
|
|
"no_proxy=" + os.environ.get("no_proxy", ""),
|
2018-09-12 21:04:29 +02:00
|
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
|
run_as_root([*proxy_env, "scripts/lib/install-node"], sudo_args=["-H"])
|
2017-07-08 21:27:25 +02:00
|
|
|
|
|
|
|
|
|
try:
|
2023-03-20 19:52:59 +01:00
|
|
|
|
setup_node_modules()
|
2017-07-08 21:27:25 +02:00
|
|
|
|
except subprocess.CalledProcessError:
|
2023-03-20 19:52:59 +01:00
|
|
|
|
print(WARNING + "`pnpm install` failed; retrying..." + ENDC)
|
2019-07-03 02:52:56 +02:00
|
|
|
|
try:
|
|
|
|
|
setup_node_modules()
|
|
|
|
|
except subprocess.CalledProcessError:
|
2021-02-12 08:19:30 +01:00
|
|
|
|
print(
|
|
|
|
|
FAIL
|
2023-03-20 19:52:59 +01:00
|
|
|
|
+ "`pnpm install` is failing; check your network connection (and proxy settings)."
|
2021-02-12 08:19:30 +01:00
|
|
|
|
+ ENDC
|
|
|
|
|
)
|
2019-07-03 02:52:56 +02:00
|
|
|
|
sys.exit(1)
|
2017-07-08 21:27:25 +02:00
|
|
|
|
|
2018-08-03 02:09:42 +02:00
|
|
|
|
# Install shellcheck.
|
2022-12-08 08:27:33 +01:00
|
|
|
|
run_as_root([*proxy_env, "tools/setup/install-shellcheck"])
|
2020-10-15 04:56:18 +02:00
|
|
|
|
# Install shfmt.
|
2022-12-08 08:27:33 +01:00
|
|
|
|
run_as_root([*proxy_env, "tools/setup/install-shfmt"])
|
2018-08-03 02:09:42 +02:00
|
|
|
|
|
2022-12-08 08:25:46 +01:00
|
|
|
|
# Install transifex-cli.
|
|
|
|
|
run_as_root([*proxy_env, "tools/setup/install-transifex-cli"])
|
|
|
|
|
|
2017-12-19 06:49:51 +01:00
|
|
|
|
setup_venvs.main()
|
2015-08-17 06:37:14 +02:00
|
|
|
|
|
2019-02-26 20:20:46 +01:00
|
|
|
|
run_as_root(["cp", REPO_STOPWORDS_PATH, TSEARCH_STOPWORDS_PATH])
|
2015-08-20 22:40:06 +02:00
|
|
|
|
|
2020-06-23 19:00:13 +02:00
|
|
|
|
if CONTINUOUS_INTEGRATION and not options.is_build_release_tarball_only:
|
2020-06-26 21:17:45 +02:00
|
|
|
|
run_as_root(["service", "redis-server", "start"])
|
|
|
|
|
run_as_root(["service", "memcached", "start"])
|
|
|
|
|
run_as_root(["service", "rabbitmq-server", "start"])
|
|
|
|
|
run_as_root(["service", "postgresql", "start"])
|
2019-08-30 00:14:43 +02:00
|
|
|
|
elif "fedora" in os_families():
|
|
|
|
|
# These platforms don't enable and start services on
|
|
|
|
|
# installing their package, so we do that here.
|
2021-02-12 08:19:30 +01:00
|
|
|
|
for service in [
|
|
|
|
|
f"postgresql-{POSTGRESQL_VERSION}",
|
|
|
|
|
"rabbitmq-server",
|
|
|
|
|
"memcached",
|
|
|
|
|
"redis",
|
|
|
|
|
]:
|
2021-02-12 08:20:45 +01:00
|
|
|
|
run_as_root(["systemctl", "enable", service], sudo_args=["-H"])
|
|
|
|
|
run_as_root(["systemctl", "start", service], sudo_args=["-H"])
|
2019-06-10 03:00:04 +02:00
|
|
|
|
|
2019-07-19 08:06:34 +02:00
|
|
|
|
# If we imported modules after activating the virtualenv in this
|
|
|
|
|
# Python process, they could end up mismatching with modules we’ve
|
|
|
|
|
# already imported from outside the virtualenv. That seems like a
|
|
|
|
|
# bad idea, and empirically it can cause Python to segfault on
|
|
|
|
|
# certain cffi-related imports. Instead, start a new Python
|
|
|
|
|
# process inside the virtualenv.
|
|
|
|
|
activate_this = "/srv/zulip-py3-venv/bin/activate_this.py"
|
|
|
|
|
provision_inner = os.path.join(ZULIP_PATH, "tools", "lib", "provision_inner.py")
|
2020-10-24 09:33:54 +02:00
|
|
|
|
with open(activate_this) as f:
|
2023-01-03 02:18:00 +01:00
|
|
|
|
exec(f.read(), dict(__file__=activate_this)) # noqa: S102
|
2019-07-19 08:06:34 +02:00
|
|
|
|
os.execvp(
|
|
|
|
|
provision_inner,
|
|
|
|
|
[
|
|
|
|
|
provision_inner,
|
|
|
|
|
*(["--force"] if options.is_force else []),
|
2020-06-03 21:07:33 +02:00
|
|
|
|
*(["--build-release-tarball-only"] if options.is_build_release_tarball_only else []),
|
2020-05-18 17:19:15 +02:00
|
|
|
|
*(["--skip-dev-db-build"] if options.skip_dev_db_build else []),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
|
],
|
2019-07-19 08:06:34 +02:00
|
|
|
|
)
|
2015-08-17 06:37:14 +02:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
2015-08-20 02:46:50 +02:00
|
|
|
|
if __name__ == "__main__":
|
2021-02-12 08:19:30 +01:00
|
|
|
|
description = "Provision script to install Zulip"
|
2016-10-21 09:45:21 +02:00
|
|
|
|
parser = argparse.ArgumentParser(description=description)
|
2021-02-12 08:19:30 +01:00
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
|
"--force",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="is_force",
|
2021-02-12 08:19:30 +01:00
|
|
|
|
help="Ignore all provisioning optimizations.",
|
|
|
|
|
)
|
2016-10-21 09:45:21 +02:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
|
"--build-release-tarball-only",
|
|
|
|
|
action="store_true",
|
|
|
|
|
dest="is_build_release_tarball_only",
|
2021-02-12 08:19:30 +01:00
|
|
|
|
help="Provision needed to build release tarball.",
|
|
|
|
|
)
|
2016-10-21 09:45:21 +02:00
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
|
"--skip-dev-db-build", action="store_true", help="Don't run migrations on dev database."
|
2021-02-12 08:19:30 +01:00
|
|
|
|
)
|
2020-05-18 17:19:15 +02:00
|
|
|
|
|
2016-10-21 09:45:21 +02:00
|
|
|
|
options = parser.parse_args()
|
2019-07-19 08:06:34 +02:00
|
|
|
|
main(options)
|