2017-09-25 23:52:59 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
import difflib
|
|
|
|
import filecmp
|
2017-10-28 19:12:45 +02:00
|
|
|
import glob
|
|
|
|
import hashlib
|
2017-09-25 23:52:59 +02:00
|
|
|
import os
|
2018-07-18 23:50:16 +02:00
|
|
|
import shutil
|
2017-09-25 23:52:59 +02:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2019-01-15 02:59:43 +01:00
|
|
|
import tempfile
|
2017-09-25 23:52:59 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2017-09-25 23:52:59 +02:00
|
|
|
|
2017-10-28 19:12:45 +02:00
|
|
|
TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
|
2017-09-25 23:52:59 +02:00
|
|
|
ZULIP_PATH = os.path.dirname(TOOLS_DIR)
|
2021-02-12 08:20:45 +01:00
|
|
|
REQS_DIR = os.path.join(ZULIP_PATH, "requirements")
|
|
|
|
CACHE_DIR = os.path.join(ZULIP_PATH, "var", "tmp")
|
|
|
|
CACHE_FILE = os.path.join(CACHE_DIR, "requirements_hashes")
|
2017-09-25 23:52:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def print_diff(path_file1: str, path_file2: str) -> None:
|
2017-09-25 23:52:59 +02:00
|
|
|
with open(path_file1) as file1:
|
|
|
|
with open(path_file2) as file2:
|
|
|
|
diff = difflib.unified_diff(
|
|
|
|
file1.readlines(),
|
|
|
|
file2.readlines(),
|
|
|
|
fromfile=path_file1,
|
|
|
|
tofile=path_file2,
|
|
|
|
)
|
2019-08-25 01:21:21 +02:00
|
|
|
sys.stdout.writelines(diff)
|
2017-09-25 23:52:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def test_locked_requirements(tmp_dir: str) -> bool:
|
2017-09-25 23:52:59 +02:00
|
|
|
# `pip-compile` tries to avoid unnecessarily updating recursive dependencies
|
|
|
|
# if lock files are present already. If we don't copy these files to the tmp
|
|
|
|
# dir then recursive dependencies will get updated to their latest version
|
|
|
|
# without any change in the input requirements file and the test will not pass.
|
2020-04-04 04:12:23 +02:00
|
|
|
for locked_file in glob.glob(os.path.join(REQS_DIR, "*.txt")):
|
|
|
|
fn = os.path.basename(locked_file)
|
2017-10-28 19:12:45 +02:00
|
|
|
locked_file = os.path.join(REQS_DIR, fn)
|
2019-01-15 02:59:43 +01:00
|
|
|
test_locked_file = os.path.join(tmp_dir, fn)
|
2018-07-18 23:50:16 +02:00
|
|
|
shutil.copyfile(locked_file, test_locked_file)
|
2021-02-12 08:19:30 +01:00
|
|
|
subprocess.check_call(
|
2021-02-12 08:20:45 +01:00
|
|
|
[os.path.join(TOOLS_DIR, "update-locked-requirements"), "--output-dir", tmp_dir]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-28 19:12:45 +02:00
|
|
|
|
|
|
|
same = True
|
2020-04-04 04:12:23 +02:00
|
|
|
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
|
|
|
|
fn = os.path.basename(test_locked_file)
|
2017-10-28 19:12:45 +02:00
|
|
|
locked_file = os.path.join(REQS_DIR, fn)
|
|
|
|
same = same and filecmp.cmp(test_locked_file, locked_file, shallow=False)
|
|
|
|
|
|
|
|
return same
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-13 01:57:21 +02:00
|
|
|
def get_requirements_hash(tmp_dir: str, use_test_lock_files: bool = False) -> str:
|
2017-10-28 19:12:45 +02:00
|
|
|
sha1 = hashlib.sha1()
|
2020-04-04 04:12:23 +02:00
|
|
|
reqs_files = sorted(glob.glob(os.path.join(REQS_DIR, "*.in")))
|
2017-10-28 19:12:45 +02:00
|
|
|
lock_files_path = REQS_DIR
|
|
|
|
if use_test_lock_files:
|
2019-01-15 02:59:43 +01:00
|
|
|
lock_files_path = tmp_dir
|
2020-04-04 04:12:23 +02:00
|
|
|
reqs_files.extend(sorted(glob.glob(os.path.join(lock_files_path, "*.txt"))))
|
2017-10-28 19:12:45 +02:00
|
|
|
for file_path in reqs_files:
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(file_path, "rb") as fp:
|
2020-10-30 02:02:10 +01:00
|
|
|
sha1.update(fp.read())
|
2017-10-28 19:12:45 +02:00
|
|
|
return sha1.hexdigest()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-01-12 21:53:04 +01:00
|
|
|
def maybe_set_up_cache() -> None:
|
2018-07-18 23:50:16 +02:00
|
|
|
os.makedirs(CACHE_DIR, exist_ok=True)
|
2017-10-28 19:12:45 +02:00
|
|
|
if not os.path.exists(CACHE_FILE):
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(CACHE_FILE, "wb") as fp:
|
2020-08-07 01:09:47 +02:00
|
|
|
fp.write(orjson.dumps([]))
|
2017-10-28 19:12:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def load_cache() -> list[str]:
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(CACHE_FILE, "rb") as fp:
|
|
|
|
hash_list = orjson.loads(fp.read())
|
2017-10-28 19:12:45 +02:00
|
|
|
return hash_list
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def update_cache(hash_list: list[str]) -> None:
|
2017-10-28 19:12:45 +02:00
|
|
|
# We store last 100 hash entries. Aggressive caching is
|
|
|
|
# not a problem as it is cheap to do.
|
|
|
|
if len(hash_list) > 100:
|
|
|
|
hash_list = hash_list[-100:]
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(CACHE_FILE, "wb") as fp:
|
2020-08-07 01:09:47 +02:00
|
|
|
fp.write(orjson.dumps(hash_list))
|
2017-10-28 19:12:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert function type annotations to Python 3 style.
Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:
- def __init__(self, token: Token, parent: Optional[Node]) -> None:
+ def __init__(self, token: Token, parent: "Optional[Node]") -> None:
-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":
-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
- method_kwarg_pairs: List[FuncKwargPair],
+ method_kwarg_pairs: "List[FuncKwargPair]",
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-19 03:48:37 +02:00
|
|
|
def main() -> None:
|
2022-01-12 21:53:04 +01:00
|
|
|
maybe_set_up_cache()
|
2017-10-28 19:12:45 +02:00
|
|
|
hash_list = load_cache()
|
2019-01-15 02:59:43 +01:00
|
|
|
tmp = tempfile.TemporaryDirectory()
|
|
|
|
tmp_dir = tmp.name
|
|
|
|
curr_hash = get_requirements_hash(tmp_dir)
|
2017-10-28 19:12:45 +02:00
|
|
|
|
|
|
|
if curr_hash in hash_list:
|
|
|
|
# We have already checked this set of requirements and they
|
|
|
|
# were consistent so no need to check again.
|
|
|
|
return
|
|
|
|
|
2019-01-15 02:59:43 +01:00
|
|
|
requirements_are_consistent = test_locked_requirements(tmp_dir)
|
2017-10-28 19:12:45 +02:00
|
|
|
|
|
|
|
# Cache the hash so that we need not to run the `update_locked_requirements`
|
|
|
|
# tool again for checking this set of requirements.
|
2019-01-15 02:59:43 +01:00
|
|
|
valid_hash = get_requirements_hash(tmp_dir, use_test_lock_files=True)
|
2020-09-02 06:59:07 +02:00
|
|
|
update_cache([*(h for h in hash_list if h != valid_hash), valid_hash])
|
2017-10-28 19:12:45 +02:00
|
|
|
if not requirements_are_consistent:
|
2020-04-04 04:12:23 +02:00
|
|
|
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
|
|
|
|
fn = os.path.basename(test_locked_file)
|
2017-10-28 19:12:45 +02:00
|
|
|
locked_file = os.path.join(REQS_DIR, fn)
|
2019-08-25 01:21:21 +02:00
|
|
|
print_diff(locked_file, test_locked_file)
|
2017-09-25 23:52:59 +02:00
|
|
|
# Flush the output to ensure we print the error at the end.
|
|
|
|
sys.stdout.flush()
|
2021-02-12 08:19:30 +01:00
|
|
|
raise Exception(
|
|
|
|
"It looks like you have updated some python dependencies but haven't "
|
|
|
|
"updated locked requirements files. Please update them by running "
|
|
|
|
"`tools/update-locked-requirements`. For more information please "
|
|
|
|
"refer to `requirements/README.md`."
|
|
|
|
)
|
|
|
|
|
2017-09-25 23:52:59 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if __name__ == "__main__":
|
2017-09-25 23:52:59 +02:00
|
|
|
main()
|