py3: Switch almost all shebang lines to use `python3`.
This causes `upgrade-zulip-from-git`, as well as a no-option run of
`tools/build-release-tarball`, to produce a Zulip install running
Python 3, rather than Python 2. In particular this means that the
virtualenv we create, in which all application code runs, is Python 3.
One shebang line, on `zulip-ec2-configure-interfaces`, explicitly
keeps Python 2, and at least one external ops script, `wal-e`, also
still runs on Python 2. See discussion on the respective previous
commits that made those explicit. There may also be some other
third-party scripts we use, outside of this source tree and running
outside our virtualenv, that still run on Python 2.
2017-08-02 23:15:16 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-07-18 15:24:41 +02:00
|
|
|
from __future__ import print_function
|
2017-02-05 21:24:28 +01:00
|
|
|
from __future__ import absolute_import
|
2016-07-18 15:24:41 +02:00
|
|
|
|
2012-11-08 23:23:25 +01:00
|
|
|
import optparse
|
2016-10-12 15:09:32 +02:00
|
|
|
import os
|
2016-10-04 01:36:38 +02:00
|
|
|
import pwd
|
2012-11-09 20:59:43 +01:00
|
|
|
import signal
|
2016-10-12 15:09:32 +02:00
|
|
|
import subprocess
|
2013-10-23 19:12:03 +02:00
|
|
|
import sys
|
2016-10-12 15:09:32 +02:00
|
|
|
import time
|
|
|
|
import traceback
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
from six.moves.urllib.parse import urlunparse
|
2016-07-30 00:51:14 +02:00
|
|
|
|
2017-02-05 21:24:28 +01:00
|
|
|
# check for the venv
|
|
|
|
from lib import sanity_check
|
|
|
|
sanity_check.check_venv(__file__)
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
from tornado import httpclient
|
|
|
|
from tornado import httputil
|
|
|
|
from tornado import gen
|
|
|
|
from tornado import web
|
|
|
|
from tornado.ioloop import IOLoop
|
2016-10-28 11:58:36 +02:00
|
|
|
from tornado.websocket import WebSocketHandler, websocket_connect
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-11-30 21:45:02 +01:00
|
|
|
if False:
|
2017-03-03 19:01:52 +01:00
|
|
|
from typing import Any, Callable, Generator, List, Optional
|
2013-08-21 18:01:40 +02:00
|
|
|
|
2015-10-15 18:44:48 +02:00
|
|
|
if 'posix' in os.name and os.geteuid() == 0:
|
|
|
|
raise RuntimeError("run-dev.py should not be run as root.")
|
|
|
|
|
2012-11-08 23:23:25 +01:00
|
|
|
parser = optparse.OptionParser(r"""
|
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
Starts the app listening on localhost, for local development.
|
|
|
|
|
|
|
|
This script launches the Django and Tornado servers, then runs a reverse proxy
|
|
|
|
which serves to both of them. After it's all up and running, browse to
|
|
|
|
|
|
|
|
http://localhost:9991/
|
|
|
|
|
|
|
|
Note that, while runserver and runtornado have the usual auto-restarting
|
|
|
|
behavior, the reverse proxy itself does *not* automatically restart on changes
|
|
|
|
to this file.
|
2012-11-08 23:23:25 +01:00
|
|
|
""")
|
|
|
|
|
2016-11-07 08:06:34 +01:00
|
|
|
|
|
|
|
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
|
2016-11-17 01:39:47 +01:00
|
|
|
from tools.lib.test_script import (
|
2016-11-07 08:06:34 +01:00
|
|
|
get_provisioning_status,
|
|
|
|
)
|
|
|
|
|
2012-11-08 23:23:25 +01:00
|
|
|
parser.add_option('--test',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store_true', dest='test',
|
|
|
|
help='Use the testing database and ports')
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2017-06-10 20:28:48 +02:00
|
|
|
parser.add_option('--minify',
|
|
|
|
action='store_true', dest='minify',
|
|
|
|
help='Minifies assets for testing in dev')
|
|
|
|
|
2014-02-12 20:03:58 +01:00
|
|
|
parser.add_option('--interface',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store', dest='interface',
|
|
|
|
default=None, help='Set the IP or hostname for the proxy to listen on')
|
2014-02-12 20:03:58 +01:00
|
|
|
|
2016-07-18 15:24:41 +02:00
|
|
|
parser.add_option('--no-clear-memcached',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store_false', dest='clear_memcached',
|
|
|
|
default=True, help='Do not clear memcached')
|
2016-07-18 15:24:41 +02:00
|
|
|
|
2016-11-07 08:06:34 +01:00
|
|
|
parser.add_option('--force', dest='force',
|
2016-11-30 14:17:35 +01:00
|
|
|
action="store_true",
|
2016-12-30 00:59:46 +01:00
|
|
|
default=False, help='Run command despite possible problems.')
|
2016-11-07 08:06:34 +01:00
|
|
|
|
2016-12-19 10:47:46 +01:00
|
|
|
parser.add_option('--enable-tornado-logging', dest='enable_tornado_logging',
|
|
|
|
action="store_true",
|
|
|
|
default=False, help='Enable access logs from tornado proxy server.')
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
(options, arguments) = parser.parse_args()
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2016-11-07 08:06:34 +01:00
|
|
|
if not options.force:
|
|
|
|
ok, msg = get_provisioning_status()
|
|
|
|
if not ok:
|
|
|
|
print(msg)
|
|
|
|
print('If you really know what you are doing, use --force to run anyway.')
|
|
|
|
sys.exit(1)
|
|
|
|
|
2016-10-04 01:36:38 +02:00
|
|
|
if options.interface is None:
|
|
|
|
user_id = os.getuid()
|
|
|
|
user_name = pwd.getpwuid(user_id).pw_name
|
2016-11-20 22:30:28 +01:00
|
|
|
if user_name in ["vagrant", "zulipdev"]:
|
2016-10-04 01:36:38 +02:00
|
|
|
# In the Vagrant development environment, we need to listen on
|
|
|
|
# all ports, and it's safe to do so, because Vagrant is only
|
2016-11-20 22:30:28 +01:00
|
|
|
# exposing certain guest ports (by default just 9991) to the
|
|
|
|
# host. The same argument applies to the remote development
|
|
|
|
# servers using username "zulipdev".
|
|
|
|
options.interface = None
|
2016-10-04 01:36:38 +02:00
|
|
|
else:
|
|
|
|
# Otherwise, only listen to requests on localhost for security.
|
|
|
|
options.interface = "127.0.0.1"
|
2016-11-20 22:30:28 +01:00
|
|
|
elif options.interface == "":
|
|
|
|
options.interface = None
|
2016-10-04 01:36:38 +02:00
|
|
|
|
2017-07-09 01:34:59 +02:00
|
|
|
runserver_args = [] # type: List[str]
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9991
|
2012-11-08 23:23:25 +01:00
|
|
|
if options.test:
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9981
|
2013-10-23 19:12:03 +02:00
|
|
|
settings_module = "zproject.test_settings"
|
2017-03-18 04:43:27 +01:00
|
|
|
# Don't auto-reload when running casper tests
|
|
|
|
runserver_args = ['--noreload']
|
2013-10-23 19:12:03 +02:00
|
|
|
else:
|
|
|
|
settings_module = "zproject.settings"
|
|
|
|
|
2015-08-21 00:30:50 +02:00
|
|
|
manage_args = ['--settings=%s' % (settings_module,)]
|
2013-10-23 19:12:03 +02:00
|
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
|
|
|
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-06 07:07:20 +02:00
|
|
|
from scripts.lib.zulip_tools import WARNING, ENDC
|
2017-03-30 15:12:44 +02:00
|
|
|
from django.conf import settings
|
|
|
|
|
2016-11-02 04:27:44 +01:00
|
|
|
proxy_port = base_port
|
|
|
|
django_port = base_port + 1
|
|
|
|
tornado_port = base_port + 2
|
|
|
|
webpack_port = base_port + 3
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-03-25 23:21:39 +01:00
|
|
|
# Clean up stale .pyc files etc.
|
|
|
|
subprocess.check_call('./tools/clean-repo')
|
|
|
|
|
2016-07-18 15:24:41 +02:00
|
|
|
if options.clear_memcached:
|
|
|
|
print("Clearing memcached ...")
|
|
|
|
subprocess.check_call('./scripts/setup/flush-memcached')
|
|
|
|
|
2012-11-09 20:59:43 +01:00
|
|
|
# Set up a new process group, so that we can later kill run{server,tornado}
|
|
|
|
# and all of the processes they spawn.
|
|
|
|
os.setpgrp()
|
|
|
|
|
2017-01-26 09:57:16 +01:00
|
|
|
# Save pid of parent process to the pid file. It can be used later by
|
|
|
|
# tools/stop-run-dev to kill the server without having to find the
|
|
|
|
# terminal in question.
|
2017-02-18 01:30:19 +01:00
|
|
|
|
|
|
|
if options.test:
|
|
|
|
pid_file_path = os.path.join(os.path.join(os.getcwd(), 'var/casper/run_dev.pid'))
|
|
|
|
else:
|
|
|
|
pid_file_path = os.path.join(os.path.join(os.getcwd(), 'var/run/run_dev.pid'))
|
2017-01-26 09:57:16 +01:00
|
|
|
|
|
|
|
# Required for compatibility python versions.
|
|
|
|
if not os.path.exists(os.path.dirname(pid_file_path)):
|
|
|
|
os.makedirs(os.path.dirname(pid_file_path))
|
|
|
|
pid_file = open(pid_file_path, 'w+')
|
|
|
|
pid_file.write(str(os.getpgrp()) + "\n")
|
|
|
|
pid_file.close()
|
|
|
|
|
2013-01-30 23:35:24 +01:00
|
|
|
# Pass --nostatic because we configure static serving ourselves in
|
2013-10-04 19:27:01 +02:00
|
|
|
# zulip/urls.py.
|
2015-08-21 00:32:15 +02:00
|
|
|
cmds = [['./tools/compile-handlebars-templates', 'forever'],
|
2017-01-18 12:52:01 +01:00
|
|
|
['./manage.py', 'runserver'] +
|
2017-03-18 04:43:27 +01:00
|
|
|
manage_args + runserver_args + ['127.0.0.1:%d' % (django_port,)],
|
2016-11-22 01:44:16 +01:00
|
|
|
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado'] +
|
2016-11-02 04:27:44 +01:00
|
|
|
manage_args + ['127.0.0.1:%d' % (tornado_port,)],
|
2015-08-21 00:30:50 +02:00
|
|
|
['./tools/run-dev-queue-processors'] + manage_args,
|
2016-10-12 15:09:32 +02:00
|
|
|
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
2015-08-21 23:10:52 +02:00
|
|
|
'./puppet/zulip/files/postgresql/process_fts_updates']]
|
2016-06-30 23:07:30 +02:00
|
|
|
if options.test:
|
|
|
|
# Webpack doesn't support 2 copies running on the same system, so
|
|
|
|
# in order to support running the Casper tests while a Zulip
|
|
|
|
# development server is running, we use webpack in production mode
|
|
|
|
# for the Casper tests.
|
|
|
|
subprocess.check_call('./tools/webpack')
|
|
|
|
else:
|
2017-06-10 20:28:48 +02:00
|
|
|
webpack_cmd = ['./tools/webpack', '--watch', '--port', str(webpack_port)]
|
|
|
|
if options.minify:
|
|
|
|
webpack_cmd.append('--minify')
|
2017-07-16 21:14:03 +02:00
|
|
|
if options.interface:
|
|
|
|
webpack_cmd += ["--host", options.interface]
|
|
|
|
else:
|
|
|
|
webpack_cmd += ["--host", "0.0.0.0"]
|
2017-06-10 20:28:48 +02:00
|
|
|
cmds.append(webpack_cmd)
|
2013-10-23 19:12:03 +02:00
|
|
|
for cmd in cmds:
|
2015-08-21 00:30:50 +02:00
|
|
|
subprocess.Popen(cmd)
|
2012-10-10 00:16:25 +02:00
|
|
|
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
def transform_url(protocol, path, query, target_port, target_host):
|
|
|
|
# type: (str, str, str, int, str) -> str
|
|
|
|
# generate url with target host
|
|
|
|
host = ":".join((target_host, str(target_port)))
|
|
|
|
newpath = urlunparse((protocol, host, path, '', query, ''))
|
|
|
|
return newpath
|
|
|
|
|
|
|
|
|
|
|
|
@gen.engine
|
|
|
|
def fetch_request(url, callback, **kwargs):
|
|
|
|
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
|
|
|
|
# use large timeouts to handle polling requests
|
|
|
|
req = httpclient.HTTPRequest(url, connect_timeout=240.0, request_timeout=240.0, **kwargs)
|
|
|
|
client = httpclient.AsyncHTTPClient()
|
|
|
|
# wait for response
|
|
|
|
response = yield gen.Task(client.fetch, req)
|
|
|
|
callback(response)
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class BaseWebsocketHandler(WebSocketHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
# target server ip
|
|
|
|
target_host = '127.0.0.1' # type: str
|
|
|
|
# target server port
|
|
|
|
target_port = None # type: int
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
|
|
|
super(BaseWebsocketHandler, self).__init__(*args, **kwargs)
|
|
|
|
# define client for target websocket server
|
2017-07-09 01:34:59 +02:00
|
|
|
self.client = None # type: Any
|
2016-10-28 11:58:36 +02:00
|
|
|
|
|
|
|
def get(self, *args, **kwargs):
|
2017-05-24 02:09:12 +02:00
|
|
|
# type: (*Any, **Any) -> Optional[Callable]
|
2016-10-28 11:58:36 +02:00
|
|
|
# use get method from WebsocketHandler
|
|
|
|
return super(BaseWebsocketHandler, self).get(*args, **kwargs)
|
|
|
|
|
|
|
|
def open(self):
|
|
|
|
# type: () -> None
|
|
|
|
# setup connection with target websocket server
|
|
|
|
websocket_url = "ws://{host}:{port}{uri}".format(
|
|
|
|
host=self.target_host,
|
|
|
|
port=self.target_port,
|
|
|
|
uri=self.request.uri
|
|
|
|
)
|
|
|
|
request = httpclient.HTTPRequest(websocket_url)
|
|
|
|
request.headers = self._add_request_headers(['sec-websocket-extensions'])
|
|
|
|
websocket_connect(request, callback=self.open_callback,
|
|
|
|
on_message_callback=self.on_client_message)
|
|
|
|
|
|
|
|
def open_callback(self, future):
|
|
|
|
# type: (Any) -> None
|
|
|
|
# callback on connect with target websocket server
|
|
|
|
self.client = future.result()
|
|
|
|
|
|
|
|
def on_client_message(self, message):
|
|
|
|
# type: (str) -> None
|
|
|
|
if not message:
|
|
|
|
# if message empty -> target websocket server close connection
|
|
|
|
return self.close()
|
|
|
|
if self.ws_connection:
|
|
|
|
# send message to client if connection exists
|
|
|
|
self.write_message(message, False)
|
|
|
|
|
|
|
|
def on_message(self, message, binary=False):
|
|
|
|
# type: (str, bool) -> Optional[Callable]
|
|
|
|
if not self.client:
|
|
|
|
# close websocket proxy connection if no connection with target websocket server
|
|
|
|
return self.close()
|
|
|
|
self.client.write_message(message, binary)
|
2017-03-03 20:30:49 +01:00
|
|
|
return None
|
2016-10-28 11:58:36 +02:00
|
|
|
|
|
|
|
def check_origin(self, origin):
|
|
|
|
# type: (str) -> bool
|
|
|
|
return True
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
def _add_request_headers(self, exclude_lower_headers_list=None):
|
|
|
|
# type: (Optional[List[str]]) -> httputil.HTTPHeaders
|
|
|
|
exclude_lower_headers_list = exclude_lower_headers_list or []
|
|
|
|
headers = httputil.HTTPHeaders()
|
|
|
|
for header, v in self.request.headers.get_all():
|
|
|
|
if header.lower() not in exclude_lower_headers_list:
|
|
|
|
headers.add(header, v)
|
|
|
|
return headers
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
|
|
|
|
class CombineHandler(BaseWebsocketHandler):
|
|
|
|
|
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Optional[Callable]
|
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
|
|
|
return super(CombineHandler, self).get(*args, **kwargs)
|
2017-03-03 20:30:49 +01:00
|
|
|
return None
|
2016-10-12 15:09:32 +02:00
|
|
|
|
|
|
|
def head(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def post(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def put(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def patch(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def options(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def delete(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def handle_response(self, response):
|
|
|
|
# type: (Any) -> None
|
|
|
|
if response.error and not isinstance(response.error, httpclient.HTTPError):
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(response.error))
|
|
|
|
else:
|
|
|
|
self.set_status(response.code, response.reason)
|
|
|
|
self._headers = httputil.HTTPHeaders() # clear tornado default header
|
|
|
|
|
|
|
|
for header, v in response.headers.get_all():
|
|
|
|
if header != 'Content-Length':
|
|
|
|
# some header appear multiple times, eg 'Set-Cookie'
|
|
|
|
self.add_header(header, v)
|
|
|
|
if response.body:
|
|
|
|
# rewrite Content-Length Header by the response
|
|
|
|
self.set_header('Content-Length', len(response.body))
|
|
|
|
self.write(response.body)
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
@web.asynchronous
|
|
|
|
def prepare(self):
|
|
|
|
# type: () -> None
|
2016-12-21 11:40:22 +01:00
|
|
|
if 'X-REAL-IP' not in self.request.headers:
|
|
|
|
self.request.headers['X-REAL-IP'] = self.request.remote_ip
|
2016-10-28 11:58:36 +02:00
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
|
|
|
return super(CombineHandler, self).prepare()
|
2016-10-12 15:09:32 +02:00
|
|
|
url = transform_url(
|
|
|
|
self.request.protocol,
|
|
|
|
self.request.path,
|
|
|
|
self.request.query,
|
|
|
|
self.target_port,
|
|
|
|
self.target_host,
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
fetch_request(
|
|
|
|
url=url,
|
|
|
|
callback=self.handle_response,
|
|
|
|
method=self.request.method,
|
|
|
|
headers=self._add_request_headers(["upgrade-insecure-requests"]),
|
|
|
|
follow_redirects=False,
|
|
|
|
body=getattr(self.request, 'body'),
|
|
|
|
allow_nonstandard_methods=True
|
|
|
|
)
|
|
|
|
except httpclient.HTTPError as e:
|
|
|
|
if hasattr(e, 'response') and e.response:
|
|
|
|
self.handle_response(e.response)
|
|
|
|
else:
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(e))
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class WebPackHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = webpack_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class DjangoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = django_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class TornadoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = tornado_port
|
|
|
|
|
|
|
|
|
|
|
|
class Application(web.Application):
|
2016-12-19 10:47:46 +01:00
|
|
|
def __init__(self, enable_logging=False):
|
|
|
|
# type: (bool) -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
handlers = [
|
|
|
|
(r"/json/events.*", TornadoHandler),
|
|
|
|
(r"/api/v1/events.*", TornadoHandler),
|
|
|
|
(r"/webpack.*", WebPackHandler),
|
|
|
|
(r"/sockjs.*", TornadoHandler),
|
|
|
|
(r"/.*", DjangoHandler)
|
|
|
|
]
|
2016-12-19 10:47:46 +01:00
|
|
|
super(Application, self).__init__(handlers, enable_logging=enable_logging)
|
|
|
|
|
|
|
|
def log_request(self, handler):
|
|
|
|
# type: (BaseWebsocketHandler) -> None
|
|
|
|
if self.settings['enable_logging']:
|
|
|
|
super(Application, self).log_request(handler)
|
2016-10-12 15:09:32 +02:00
|
|
|
|
|
|
|
|
|
|
|
def on_shutdown():
|
|
|
|
# type: () -> None
|
|
|
|
IOLoop.instance().stop()
|
|
|
|
|
|
|
|
|
|
|
|
def shutdown_handler(*args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
|
|
|
io_loop = IOLoop.instance()
|
|
|
|
if io_loop._callbacks:
|
|
|
|
io_loop.add_timeout(time.time() + 1, shutdown_handler)
|
2016-10-02 18:20:06 +02:00
|
|
|
else:
|
2016-10-12 15:09:32 +02:00
|
|
|
io_loop.stop()
|
|
|
|
|
|
|
|
# log which services/ports will be started
|
|
|
|
print("Starting Zulip services on ports: web proxy: {},".format(proxy_port),
|
|
|
|
"Django: {}, Tornado: {}".format(django_port, tornado_port), end='')
|
|
|
|
if options.test:
|
|
|
|
print("") # no webpack for --test
|
|
|
|
else:
|
|
|
|
print(", webpack: {}".format(webpack_port))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
print("".join((WARNING,
|
|
|
|
"Note: only port {} is exposed to the host in a Vagrant environment.".format(
|
|
|
|
proxy_port), ENDC)))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
try:
|
2016-12-19 10:47:46 +01:00
|
|
|
app = Application(enable_logging=options.enable_tornado_logging)
|
2016-11-20 22:30:28 +01:00
|
|
|
app.listen(proxy_port, address=options.interface)
|
2016-10-12 15:09:32 +02:00
|
|
|
ioloop = IOLoop.instance()
|
|
|
|
for s in (signal.SIGINT, signal.SIGTERM):
|
|
|
|
signal.signal(s, shutdown_handler)
|
|
|
|
ioloop.start()
|
2017-03-05 10:25:27 +01:00
|
|
|
except Exception:
|
2012-11-14 19:46:12 +01:00
|
|
|
# Print the traceback before we get SIGTERM and die.
|
|
|
|
traceback.print_exc()
|
|
|
|
raise
|
2012-10-10 00:16:25 +02:00
|
|
|
finally:
|
2012-11-09 20:59:43 +01:00
|
|
|
# Kill everything in our process group.
|
|
|
|
os.killpg(0, signal.SIGTERM)
|
2017-01-26 09:57:16 +01:00
|
|
|
# Remove pid file when development server closed correctly.
|
|
|
|
os.remove(pid_file_path)
|