test-documentation: Simplify --skip-external-links code path.

The crawler used to be called directly for checking external links.
Now the scrapy command calls the crawl_with_status wrapper.
Crawl_with_status has been modified to pass the external parameter in
the previous commit, so we can now use this simpler approach.
This commit is contained in:
Sumanth V Rao 2018-12-29 20:08:20 +05:30 committed by Tim Abbott
parent 776b981dde
commit ccd0badeed
1 changed files with 9 additions and 19 deletions

View File

@ -41,27 +41,17 @@ if [ -n "$skip_check_links" ]; then
exit 0
fi
if [ -n "$skip_external_links" ]; then
color_message 94 "Testing only internal links in documentation..."
cd ../tools/documentation_crawler
set +e
scrapy crawl documentation_crawler -a skip_external=set "${loglevel[@]}"
# calling crawl directly as parameter needs to be passed
result=$?
if [ "$result" = 1 ]; then
color_message 91 "Failed!"
exit 1
else
color_message 92 "Passed!"
exit 0
fi
fi
color_message 94 "Testing links in documentation..."
cd ../tools/documentation_crawler
set +e
scrapy crawl_with_status documentation_crawler "${loglevel[@]}"
if [ -n "$skip_external_links" ]; then
color_message 94 "Testing only internal links in documentation..."
scrapy crawl_with_status documentation_crawler -a skip_external=set "${loglevel[@]}"
# calling crawl directly as parameter needs to be passed
else
color_message 94 "Testing links in documentation..."
scrapy crawl_with_status documentation_crawler "${loglevel[@]}"
fi
result=$?
if [ "$result" = 1 ]; then
color_message 91 "Failed!"