tools/test-help-documentation: Verify api/ docs.

This commit is contained in:
Eeshan Garg 2017-11-11 22:12:34 -03:30 committed by Tim Abbott
parent 9dd5d6e999
commit e85d0747d4
2 changed files with 31 additions and 18 deletions

View File

@ -8,37 +8,48 @@ from .common.spiders import BaseDocumentationSpider
from typing import Any, List, Set from typing import Any, List, Set
def get_help_images_dir(help_images_path: str) -> str: def get_images_dir(images_path: str) -> str:
# Get index html file as start url and convert it to file uri # Get index html file as start url and convert it to file uri
dir_path = os.path.dirname(os.path.realpath(__file__)) dir_path = os.path.dirname(os.path.realpath(__file__))
target_path = os.path.join(dir_path, os.path.join(*[os.pardir] * 4), help_images_path) target_path = os.path.join(dir_path, os.path.join(*[os.pardir] * 4), images_path)
return os.path.realpath(target_path) return os.path.realpath(target_path)
class HelpDocumentationSpider(BaseDocumentationSpider): class UnusedImagesLinterSpider(BaseDocumentationSpider):
name = "help_documentation_crawler" images_path = ""
start_urls = ['http://localhost:9981/help']
deny_domains = [] # type: List[str]
deny = ['/privacy']
help_images_path = "static/images/help"
help_images_static_dir = get_help_images_dir(help_images_path)
def __init__(self, *args: Any, **kwargs: Any) -> None: def __init__(self, *args: Any, **kwargs: Any) -> None:
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.static_images = set() # type: Set[str] self.static_images = set() # type: Set[str]
self.images_static_dir = get_images_dir(self.images_path) # type: str
def _is_external_url(self, url: str) -> bool: def _is_external_url(self, url: str) -> bool:
is_external = url.startswith('http') and 'localhost:9981/help' not in url is_external = url.startswith('http') and self.start_urls[0] not in url
if self._has_extension(url) and 'localhost:9981/static/images/help' in url: if self._has_extension(url) and 'localhost:9981/{}'.format(self.images_path) in url:
self.static_images.add(basename(urlparse(url).path)) self.static_images.add(basename(urlparse(url).path))
return is_external or self._has_extension(url) return is_external or self._has_extension(url)
def closed(self, *args: Any, **kwargs: Any) -> None: def closed(self, *args: Any, **kwargs: Any) -> None:
unused_images = set(os.listdir(self.help_images_static_dir)) - self.static_images unused_images = set(os.listdir(self.images_static_dir)) - self.static_images
if unused_images: if unused_images:
exception_message = "The following images are not used in help documentation " \ exception_message = "The following images are not used in documentation " \
"and can be removed: {}" "and can be removed: {}"
self._set_error_state() self._set_error_state()
unused_images_relatedpath = [ unused_images_relatedpath = [
os.path.join(self.help_images_path, img) for img in unused_images] os.path.join(self.images_path, img) for img in unused_images]
raise Exception(exception_message.format(', '.join(unused_images_relatedpath))) raise Exception(exception_message.format(', '.join(unused_images_relatedpath)))
class HelpDocumentationSpider(UnusedImagesLinterSpider):
name = "help_documentation_crawler"
start_urls = ['http://localhost:9981/help']
deny_domains = [] # type: List[str]
deny = ['/privacy']
images_path = "static/images/help"
class APIDocumentationSpider(UnusedImagesLinterSpider):
name = 'api_documentation_crawler'
start_urls = ['http://localhost:9981/api']
deny_domains = [] # type: List[str]
images_path = "static/images/api"

View File

@ -26,10 +26,12 @@ LOG_FILE = 'var/help-documentation/server.log'
external_host = "localhost:9981" external_host = "localhost:9981"
with test_server_running(options.force, external_host, log_file=LOG_FILE, dots=True, use_db=False): with test_server_running(options.force, external_host, log_file=LOG_FILE, dots=True, use_db=False):
ret = subprocess.call(('scrapy', 'crawl_with_status', 'help_documentation_crawler'), ret_help_doc = subprocess.call(('scrapy', 'crawl_with_status', 'help_documentation_crawler'),
cwd='tools/documentation_crawler') cwd='tools/documentation_crawler')
ret_api_doc = subprocess.call(('scrapy', 'crawl_with_status', 'api_documentation_crawler'),
cwd='tools/documentation_crawler')
if ret != 0: if ret_help_doc != 0 or ret_api_doc != 0:
print("\033[0;91m") print("\033[0;91m")
print("Failed") print("Failed")
print("\033[0m") print("\033[0m")
@ -39,4 +41,4 @@ else:
print("\033[0m") print("\033[0m")
sys.exit(ret) sys.exit(ret_help_doc or ret_api_doc)