zulip/tools/test-js-with-puppeteer

101 lines
3.6 KiB
Plaintext
Raw Normal View History

#!/usr/bin/env python3
import argparse
import os
import shlex
import subprocess
import sys
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Request the special webpack setup for frontend integration tests,
# where webpack assets are compiled up front rather than running in
# watch mode. We prefer the same for puppeteer, so the below line
# makes the webpack similar to casper.
#
# TODO: Eventually, we'll want to rename this parameter.
os.environ["CASPER_TESTS"] = "1"
os.environ["CHROMIUM_EXECUTABLE"] = os.path.join(ZULIP_PATH, "node_modules/.bin/chromium")
os.environ.pop("http_proxy", "")
os.environ.pop("https_proxy", "")
usage = """test-js-with-puppeteer [options]
test-js-with-puppeteer # Run all test files
test-js-with-puppeteer 09-navigation.js # Run a single test file
test-js-with-puppeteer 09 # Run a single test file 09-navigation.js
test-js-with-puppeteer 01-login.js 03-narrow.js # Run a few test files
test-js-with-puppeteer 01 03 # Run a few test files, 01-login.js and 03-narrow.js here"""
parser = argparse.ArgumentParser(usage)
parser.add_argument('--interactive', dest='interactive',
action="store_true",
default=False, help='Run tests interactively')
parser.add_argument('--force', dest='force',
action="store_true",
default=False, help='Run tests despite possible problems.')
parser.add_argument('tests', nargs=argparse.REMAINDER,
help='Specific tests to run; by default, runs all tests')
options = parser.parse_args()
sys.path.insert(0, ZULIP_PATH)
# check for the venv
from tools.lib import sanity_check
sanity_check.check_venv(__file__)
from typing import Iterable
from tools.lib.test_script import (
assert_provisioning_status_ok,
find_js_test_files,
prepare_puppeteer_run,
)
from tools.lib.test_server import test_server_running
def run_tests(files: Iterable[str], external_host: str) -> None:
test_dir = os.path.join(ZULIP_PATH, 'frontend_tests/puppeteer_tests')
test_files = find_js_test_files(test_dir, files)
def run_tests() -> int:
ret = 1
for test_file in test_files:
test_name = os.path.basename(test_file)
cmd = ["node"] + [test_file]
print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True)
ret = subprocess.call(cmd)
if ret != 0:
return ret
return 0
with test_server_running(False, external_host):
# Important: do this next call inside the `with` block, when Django
# will be pointing at the test database.
subprocess.check_call('tools/setup/generate-test-credentials')
if options.interactive:
response = input('Press Enter to run tests, "q" to quit: ')
ret = 1
while response != 'q':
ret = run_tests()
if ret != 0:
response = input('Tests failed. Press Enter to re-run tests, "q" to quit: ')
else:
ret = 1
ret = run_tests()
if ret != 0:
print("""
The Puppeteer frontend tests failed! Please report and ask for help in chat.zulip.org""", file=sys.stderr)
if os.environ.get("CIRCLECI"):
print("", file=sys.stderr)
print("In CircleCI, the Artifacts tab contains screenshots of the failure.", file=sys.stderr)
print("", file=sys.stderr)
sys.exit(ret)
external_host = "zulipdev.com:9981"
assert_provisioning_status_ok(options.force)
prepare_puppeteer_run()
run_tests(options.tests, external_host)
sys.exit(0)