2016-04-07 15:03:22 +02:00
|
|
|
#!/usr/bin/env python
|
2016-03-10 17:15:34 +01:00
|
|
|
from __future__ import print_function
|
2012-11-09 21:03:57 +01:00
|
|
|
import subprocess
|
2013-05-21 23:09:08 +02:00
|
|
|
import optparse
|
2012-11-09 21:03:57 +01:00
|
|
|
import time
|
|
|
|
import sys
|
|
|
|
import os
|
2013-03-05 22:50:06 +01:00
|
|
|
import glob
|
2016-06-04 00:05:06 +02:00
|
|
|
try:
|
2016-06-06 22:40:30 +02:00
|
|
|
# We don't actually need typing, but it's a good guard for being
|
|
|
|
# outside a Zulip virtualenv.
|
2016-07-24 07:22:54 +02:00
|
|
|
from typing import Iterable
|
2016-06-04 00:05:06 +02:00
|
|
|
import requests
|
2016-09-13 22:40:13 +02:00
|
|
|
import django
|
2016-06-04 00:05:06 +02:00
|
|
|
except ImportError as e:
|
|
|
|
print("ImportError: {}".format(e))
|
|
|
|
print("You need to run the Zulip tests inside a Zulip dev environment.")
|
|
|
|
print("If you are using Vagrant, you can `vagrant ssh` to enter the Vagrant guest.")
|
|
|
|
sys.exit(1)
|
2012-11-09 21:03:57 +01:00
|
|
|
|
2013-05-21 23:09:08 +02:00
|
|
|
#
|
|
|
|
# In order to use remote casperjs debugging, pass the --remote-debug flag
|
|
|
|
# This will start a remote debugging session listening on port 7777
|
|
|
|
#
|
2013-07-24 23:29:47 +02:00
|
|
|
# See https://wiki.zulip.net/wiki/Testing_the_app for more information
|
2013-05-21 23:09:08 +02:00
|
|
|
# on how to use remote debugging
|
|
|
|
#
|
|
|
|
|
2016-07-27 20:54:00 +02:00
|
|
|
os.environ["TORNADO_SERVER"] = "http://127.0.0.1:9983"
|
2016-12-15 07:02:42 +01:00
|
|
|
os.environ["CASPER_TESTS"] = "1"
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
os.environ["PHANTOMJS_EXECUTABLE"] = os.path.join(os.path.dirname(__file__), "../node_modules/.bin/phantomjs")
|
2013-01-10 19:05:53 +01:00
|
|
|
|
2016-06-27 23:15:20 +02:00
|
|
|
usage = """%prog [options]
|
|
|
|
test-js-with-casper # Run all test files
|
|
|
|
test-js-with-casper 09-navigation.js # Run a single test file
|
2016-11-02 17:40:06 +01:00
|
|
|
test-js-with-casper 09 # Run a single test file 09-navigation.js
|
|
|
|
test-js-with-casper 01-login.js 03-narrow.js # Run a few test files
|
|
|
|
test-js-with-casper 01 03 # Run a few test files, 01-login.js and 03-narrow.js here"""
|
2016-06-27 23:15:20 +02:00
|
|
|
parser = optparse.OptionParser(usage)
|
2013-05-21 23:09:08 +02:00
|
|
|
|
2016-10-15 17:39:27 +02:00
|
|
|
parser.add_option('--force', dest='force',
|
|
|
|
action="store_true",
|
|
|
|
default=False, help='Run tests despite possible problems.')
|
2013-05-21 23:09:08 +02:00
|
|
|
parser.add_option('--remote-debug',
|
|
|
|
help='Whether or not to enable remote debugging on port 7777',
|
|
|
|
action="store_true",
|
|
|
|
default=False)
|
|
|
|
(options, args) = parser.parse_args()
|
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
|
2016-10-15 17:23:28 +02:00
|
|
|
|
|
|
|
from tools.lib.test_script import (
|
|
|
|
get_provisioning_status,
|
|
|
|
)
|
|
|
|
|
2016-10-15 17:39:27 +02:00
|
|
|
if not options.force:
|
|
|
|
ok, msg = get_provisioning_status()
|
|
|
|
if not ok:
|
|
|
|
print(msg)
|
|
|
|
print('If you really know what you are doing, use --force to run anyway.')
|
|
|
|
sys.exit(1)
|
2016-10-15 17:23:28 +02:00
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
from zerver.lib.test_fixtures import is_template_database_current
|
|
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = 'zproject.test_settings'
|
|
|
|
django.setup()
|
|
|
|
os.environ['PYTHONUNBUFFERED'] = 'y'
|
|
|
|
|
2015-10-13 23:34:50 +02:00
|
|
|
os.chdir(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..'))
|
2012-11-09 21:03:57 +01:00
|
|
|
|
2016-05-26 12:36:22 +02:00
|
|
|
subprocess.check_call('tools/setup/generate-test-credentials')
|
2015-08-20 02:57:30 +02:00
|
|
|
|
2016-07-24 07:02:22 +02:00
|
|
|
subprocess.check_call(['mkdir', '-p', 'var/casper'])
|
|
|
|
|
2016-09-27 10:12:40 +02:00
|
|
|
subprocess.check_call(['rm', '-f'] + glob.glob('var/casper/casper-failure*.png'))
|
|
|
|
|
2016-11-02 01:01:21 +01:00
|
|
|
LOG_FILE = 'var/casper/server.log'
|
|
|
|
if os.path.exists(LOG_FILE) and os.path.getsize(LOG_FILE) < 100000:
|
|
|
|
log = open(LOG_FILE, 'a')
|
|
|
|
log.write('\n\n')
|
|
|
|
else:
|
|
|
|
log = open(LOG_FILE, 'w')
|
2012-11-09 21:03:57 +01:00
|
|
|
|
2016-07-21 05:21:42 +02:00
|
|
|
def assert_server_running(server):
|
2016-07-24 07:22:54 +02:00
|
|
|
# type: (subprocess.Popen) -> None
|
2016-06-06 22:41:44 +02:00
|
|
|
"""Get the exit code of the server, or None if it is still running."""
|
2012-11-14 20:04:31 +01:00
|
|
|
if server.poll() is not None:
|
2016-11-02 01:01:21 +01:00
|
|
|
raise RuntimeError('Server died unexpectedly! Check %s' % (LOG_FILE,))
|
2012-11-14 20:04:31 +01:00
|
|
|
|
2016-07-21 05:21:42 +02:00
|
|
|
def server_is_up(server):
|
2016-07-24 07:22:54 +02:00
|
|
|
# type: (subprocess.Popen) -> bool
|
2016-07-21 05:21:42 +02:00
|
|
|
assert_server_running(server)
|
2012-11-13 23:54:08 +01:00
|
|
|
try:
|
|
|
|
# We could get a 501 error if the reverse proxy is up but the Django app isn't.
|
2016-07-27 20:54:00 +02:00
|
|
|
return requests.get('http://127.0.0.1:9981/accounts/home').status_code == 200
|
2012-11-13 23:54:08 +01:00
|
|
|
except:
|
|
|
|
return False
|
|
|
|
|
2016-07-19 14:35:08 +02:00
|
|
|
def run_tests(realms_have_subdomains, files):
|
|
|
|
# type: (bool, Iterable[str]) -> None
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
test_dir = os.path.join(os.path.dirname(__file__), '../frontend_tests/casper_tests')
|
2016-07-21 05:20:34 +02:00
|
|
|
test_files = []
|
2016-07-21 05:23:07 +02:00
|
|
|
for file in files:
|
2016-11-02 17:40:06 +01:00
|
|
|
for file_name in os.listdir(test_dir):
|
|
|
|
if file_name.startswith(file):
|
|
|
|
file = file_name
|
|
|
|
break
|
2016-07-21 05:20:34 +02:00
|
|
|
if not os.path.exists(file):
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
file = os.path.join(test_dir, file)
|
2016-07-21 05:20:34 +02:00
|
|
|
test_files.append(os.path.abspath(file))
|
2016-07-21 05:17:40 +02:00
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
generate_fixtures_command = ['tools/setup/generate-fixtures']
|
|
|
|
if not is_template_database_current():
|
|
|
|
generate_fixtures_command.append('--force')
|
|
|
|
|
|
|
|
subprocess.check_call(generate_fixtures_command)
|
2016-07-21 05:17:40 +02:00
|
|
|
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
if not test_files:
|
|
|
|
test_files = sorted(glob.glob(os.path.join(test_dir, '*.js')))
|
|
|
|
|
2016-07-21 05:26:45 +02:00
|
|
|
remote_debug = ""
|
|
|
|
if options.remote_debug:
|
|
|
|
remote_debug = "--remote-debugger-port=7777 --remote-debugger-autorun=yes"
|
|
|
|
|
2016-07-21 05:20:34 +02:00
|
|
|
# Run this not through the shell, so that we have the actual PID.
|
|
|
|
server = subprocess.Popen(('tools/run-dev.py', '--test'),
|
|
|
|
stdout=log, stderr=log)
|
2016-07-21 05:17:40 +02:00
|
|
|
|
2016-07-21 05:20:34 +02:00
|
|
|
ret = 1
|
2012-11-09 21:03:57 +01:00
|
|
|
|
2016-07-21 05:20:34 +02:00
|
|
|
try:
|
|
|
|
# Wait for the server to start up.
|
|
|
|
sys.stdout.write('Waiting for test server')
|
|
|
|
while not server_is_up(server):
|
|
|
|
sys.stdout.write('.')
|
|
|
|
sys.stdout.flush()
|
|
|
|
time.sleep(0.1)
|
|
|
|
sys.stdout.write('\n')
|
|
|
|
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
for test_file in test_files:
|
|
|
|
cmd = "node_modules/.bin/casperjs %s test --subdomains=%s %s" % (
|
|
|
|
remote_debug, realms_have_subdomains, test_file)
|
|
|
|
print("\n\nRunning %s" % (cmd,))
|
|
|
|
ret = subprocess.call(cmd, shell=True)
|
|
|
|
if ret != 0:
|
|
|
|
break
|
2016-07-21 05:20:34 +02:00
|
|
|
finally:
|
|
|
|
assert_server_running(server)
|
|
|
|
server.terminate()
|
|
|
|
|
|
|
|
if ret != 0:
|
|
|
|
print("""
|
2015-08-21 22:48:57 +02:00
|
|
|
Oops, the frontend tests failed. Tips for debugging:
|
2016-11-02 01:01:21 +01:00
|
|
|
* Check the frontend test server logs at %s
|
2016-09-27 10:12:40 +02:00
|
|
|
* Check the screenshots of failed tests at var/casper/casper-failure*.png
|
2016-12-03 12:25:35 +01:00
|
|
|
* Try remote debugging the test web browser as described in docs/testing-with-casper.md
|
2016-11-02 01:01:21 +01:00
|
|
|
""" % (LOG_FILE,), file=sys.stderr)
|
2015-08-21 22:48:57 +02:00
|
|
|
|
2016-07-21 05:20:34 +02:00
|
|
|
sys.exit(ret)
|
|
|
|
|
2016-10-23 00:32:32 +02:00
|
|
|
os.environ["EXTERNAL_HOST"] = "localhost:9981"
|
2016-10-05 03:16:53 +02:00
|
|
|
# First, run all tests with REALMS_HAVE_SUBDOMAINS set to False
|
2016-07-19 14:35:08 +02:00
|
|
|
run_tests(False, args)
|
Upgrade caspersjs to version 1.1.3. (w/acrefoot)
(Most of this work was done by acrefoot in an earlier branch.
I took over the branch to fix casper tests that were broken during
the upgrade (which were fixed in a different commit). I also
made most of the changes to run-casper.)
This also upgrades phantomjs to 2.1.7.
The huge structural change here is that we no longer vendor casperjs
or download phantomjs with our own script. Instead, we just use
casperjs and phantomjs from npm, via package.json.
Another thing that we do now is run casperjs tests individually, so
that we don't get strange test flakes from test interactions. (Tests
can still influence each other in terms of changing data, since we
don't yet have code to clear the test database in between tests.)
A lot of this diff is just removing files and obsolete configurations.
The main new piece is in package.json, which causes npm to install the
new version.
Also, run-casper now runs files individually, as mentioned above.
We had vendored casperjs in the past. I didn't bring over any of our
changes. Some of the changes were performance-related (primarily
5fd58cf24927359dce26588d59690c40c6ce6d4c), so the upgraded version may
be slower in some instances. (I didn't do much measurement of that,
since most of our slowness when running tests is about the setup
environment, not casper itself.) Any bug fixes that we may have
implemented in the past were either magically fixed by changes to
casper itself or by improvements we have made in the tests themselves
over the years.
Tim tested the Casper suite on his machine and running the full Casper
test suite is faster than it was before this change (1m30 vs. 1m50),
so we're at least not regressing overall performance.
2016-10-07 18:20:59 +02:00
|
|
|
# Now run a subset of the tests with REALMS_HAVE_SUBDOMAINS set to True
|
2016-07-19 14:35:08 +02:00
|
|
|
os.environ["REALMS_HAVE_SUBDOMAINS"] = "True"
|
2016-10-23 00:32:32 +02:00
|
|
|
os.environ["EXTERNAL_HOST"] = "zulipdev.com:9981"
|
2016-07-19 14:35:08 +02:00
|
|
|
if len(args) == 0:
|
|
|
|
run_tests(True, ["00-realm-creation.js", "01-login.js", "02-site.js"])
|
|
|
|
else:
|
|
|
|
run_tests(True, args)
|
2016-07-21 05:20:34 +02:00
|
|
|
sys.exit(0)
|