zulip/tools/test-documentation

111 lines
2.9 KiB
Plaintext
Raw Permalink Normal View History

#!/usr/bin/env python3
import argparse
import os
import subprocess
import sys
from collections.abc import Sequence
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.chdir(ZULIP_PATH)
sys.path.insert(0, ZULIP_PATH)
# check for the venv
from tools.lib import sanity_check
sanity_check.check_venv(__file__)
def color_message(color_code: str, message: str) -> None:
print(f"\033[0;{color_code}m{message}\033[0m", file=sys.stderr)
parser = argparse.ArgumentParser()
parser.add_argument(
"-L",
"--loglevel",
dest="loglevel",
nargs=1,
help="Log level (default: ERROR)",
)
parser.add_argument(
"--skip-check-links",
dest="skip_check_links",
action="store_true",
help="Skip checking of links.",
)
parser.add_argument(
"--skip-external-links",
dest="skip_external_link_check",
# default = false
action="store_true",
help="Skip checking of external links.",
)
options = parser.parse_args()
# loglevel should be used as an extra command line option
# in the form of ["-L", "LOGLEVEL"]
# if loglevel is defined, insert "-L" ahead of loglevel
if options.loglevel:
options.loglevel.insert(0, "-L")
# if loglevel is undefined, make loglevel an empty list
else:
options.loglevel = []
os.chdir("docs")
# collapse_navigation is set to False in conf.py to improve sidebar navigation for users.
# However, we must change its value to True before we begin testing links.
# Otherwise, sphinx would generate a large number of links we don't need to test.
# The crawler would take a very long time to finish and GitHub Actions would fail as a result.
subprocess.check_call(
["make", "clean", "html", "O='-D html_theme_options.collapse_navigation=True'"]
)
err = 0
def check(args: Sequence[str]) -> None:
global err
if subprocess.call(args) == 0:
color_message("92", "Passed!")
else:
color_message("91", "Failed!")
err = 1
color_message("94", "Validating HTML...")
check(
[
"java",
"-jar",
"../node_modules/vnu-jar/build/dist/vnu.jar",
"--filterfile",
"../tools/documentation.vnufilter",
"--skip-non-html",
"_build/html",
]
)
if options.skip_check_links:
color_message("94", "Skipped testing links in documentation.")
else:
os.chdir("../tools/documentation_crawler")
if options.skip_external_link_check:
color_message("94", "Testing only internal links in documentation...")
check(
[
"scrapy",
"crawl_with_status",
"documentation_crawler",
"-a",
"skip_external=set",
*options.loglevel,
]
)
# calling crawl directly as parameter needs to be passed
else:
color_message("94", "Testing links in documentation...")
check(["scrapy", "crawl_with_status", "documentation_crawler", *options.loglevel])
sys.exit(err)