2016-10-21 09:19:33 +02:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
set -e
|
|
|
|
|
2018-01-09 19:15:08 +01:00
|
|
|
color_message () {
|
|
|
|
local color_code="$1" message="$2"
|
2018-08-03 02:14:50 +02:00
|
|
|
printf '\e[%sm%s\e[0m\n' "$color_code" "$message" >&2
|
2018-01-09 19:15:08 +01:00
|
|
|
}
|
2016-10-21 09:19:33 +02:00
|
|
|
|
2018-08-03 02:14:50 +02:00
|
|
|
loglevel=()
|
|
|
|
|
2019-05-30 08:54:16 +02:00
|
|
|
usage () {
|
|
|
|
cat <<EOF
|
|
|
|
usage:
|
|
|
|
--help, -h show this help message and exit
|
|
|
|
--loglevel=LEVEL, -L LEVEL log level (default: ERROR)
|
|
|
|
--skip-check-links skip checking of links
|
|
|
|
--skip-external-links skip checking of external links
|
|
|
|
EOF
|
|
|
|
}
|
|
|
|
|
|
|
|
args="$(getopt -o hL: --long help,loglevel:,skip-check-links,skip-external-links -- "$@")" ||
|
|
|
|
{ usage >&2; exit 1; }
|
|
|
|
eval "set -- $args"
|
|
|
|
while true; do
|
|
|
|
case "$1" in
|
|
|
|
-h|--help) usage; exit 0;;
|
|
|
|
-L|--loglevel) loglevel=("$1" "$2"); shift 2;;
|
|
|
|
--skip-check-links) skip_check_links=1; shift;;
|
|
|
|
--skip-external-links) skip_external_links=1; shift;;
|
|
|
|
--) shift; break;;
|
|
|
|
*) exit 1;;
|
|
|
|
esac
|
|
|
|
done
|
2016-10-21 09:19:33 +02:00
|
|
|
|
2017-11-08 17:55:36 +01:00
|
|
|
cd "$(dirname "$0")"/../docs
|
|
|
|
rm -rf _build
|
2018-01-09 19:07:42 +01:00
|
|
|
|
2017-11-08 17:55:36 +01:00
|
|
|
# collapse_navigation is set to False in conf.py to improve sidebar navigation for users.
|
|
|
|
# However, we must change its value to True before we begin testing links.
|
|
|
|
# Otherwise, sphinx would generate a large number of links we don't need to test.
|
|
|
|
# The crawler would take a very long time to finish and TravisCI would fail as a result.
|
2017-11-17 20:02:28 +01:00
|
|
|
sphinx-build -j8 -b html -d _build/doctrees -D html_theme_options.collapse_navigation=True . _build/html
|
2016-10-21 09:19:33 +02:00
|
|
|
|
2018-01-09 19:44:21 +01:00
|
|
|
if [ -n "$skip_check_links" ]; then
|
|
|
|
color_message 94 "Skipped testing links in documentation."
|
|
|
|
exit 0
|
|
|
|
fi
|
|
|
|
|
2018-12-29 15:38:20 +01:00
|
|
|
cd ../tools/documentation_crawler
|
|
|
|
set +e
|
2018-12-05 05:42:46 +01:00
|
|
|
if [ -n "$skip_external_links" ]; then
|
|
|
|
color_message 94 "Testing only internal links in documentation..."
|
2018-12-29 15:38:20 +01:00
|
|
|
scrapy crawl_with_status documentation_crawler -a skip_external=set "${loglevel[@]}"
|
2018-12-05 05:42:46 +01:00
|
|
|
# calling crawl directly as parameter needs to be passed
|
2018-12-29 15:38:20 +01:00
|
|
|
else
|
|
|
|
color_message 94 "Testing links in documentation..."
|
|
|
|
scrapy crawl_with_status documentation_crawler "${loglevel[@]}"
|
2018-12-05 05:42:46 +01:00
|
|
|
fi
|
|
|
|
|
2016-10-25 20:41:12 +02:00
|
|
|
result=$?
|
2019-05-30 09:52:22 +02:00
|
|
|
if [ "$result" -ne 0 ]; then
|
2018-01-09 19:15:08 +01:00
|
|
|
color_message 91 "Failed!"
|
2016-10-25 20:41:12 +02:00
|
|
|
exit 1
|
|
|
|
else
|
2018-01-09 19:15:08 +01:00
|
|
|
color_message 92 "Passed!"
|
2016-10-25 20:41:12 +02:00
|
|
|
exit 0
|
|
|
|
fi
|