py3: Switch almost all shebang lines to use `python3`.
This causes `upgrade-zulip-from-git`, as well as a no-option run of
`tools/build-release-tarball`, to produce a Zulip install running
Python 3, rather than Python 2. In particular this means that the
virtualenv we create, in which all application code runs, is Python 3.
One shebang line, on `zulip-ec2-configure-interfaces`, explicitly
keeps Python 2, and at least one external ops script, `wal-e`, also
still runs on Python 2. See discussion on the respective previous
commits that made those explicit. There may also be some other
third-party scripts we use, outside of this source tree and running
outside our virtualenv, that still run on Python 2.
2017-08-02 23:15:16 +02:00
|
|
|
#!/usr/bin/env python3
|
2016-07-18 15:24:41 +02:00
|
|
|
|
2017-09-30 08:42:22 +02:00
|
|
|
import argparse
|
2016-10-12 15:09:32 +02:00
|
|
|
import os
|
2016-10-04 01:36:38 +02:00
|
|
|
import pwd
|
2012-11-09 20:59:43 +01:00
|
|
|
import signal
|
2016-10-12 15:09:32 +02:00
|
|
|
import subprocess
|
2013-10-23 19:12:03 +02:00
|
|
|
import sys
|
2016-10-12 15:09:32 +02:00
|
|
|
import traceback
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2017-11-06 03:14:57 +01:00
|
|
|
from urllib.parse import urlunparse
|
2016-07-30 00:51:14 +02:00
|
|
|
|
2017-02-05 21:24:28 +01:00
|
|
|
# check for the venv
|
|
|
|
from lib import sanity_check
|
|
|
|
sanity_check.check_venv(__file__)
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
from tornado import httpclient
|
|
|
|
from tornado import httputil
|
|
|
|
from tornado import gen
|
|
|
|
from tornado import web
|
|
|
|
from tornado.ioloop import IOLoop
|
2016-10-28 11:58:36 +02:00
|
|
|
from tornado.websocket import WebSocketHandler, websocket_connect
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2017-12-13 05:47:55 +01:00
|
|
|
from typing import Any, Callable, Generator, List, Optional
|
2013-08-21 18:01:40 +02:00
|
|
|
|
2015-10-15 18:44:48 +02:00
|
|
|
if 'posix' in os.name and os.geteuid() == 0:
|
|
|
|
raise RuntimeError("run-dev.py should not be run as root.")
|
|
|
|
|
2017-09-30 08:42:22 +02:00
|
|
|
parser = argparse.ArgumentParser(description=r"""
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
Starts the app listening on localhost, for local development.
|
|
|
|
|
|
|
|
This script launches the Django and Tornado servers, then runs a reverse proxy
|
|
|
|
which serves to both of them. After it's all up and running, browse to
|
|
|
|
|
|
|
|
http://localhost:9991/
|
|
|
|
|
|
|
|
Note that, while runserver and runtornado have the usual auto-restarting
|
|
|
|
behavior, the reverse proxy itself does *not* automatically restart on changes
|
|
|
|
to this file.
|
2017-09-30 08:42:22 +02:00
|
|
|
""",
|
|
|
|
formatter_class=argparse.RawTextHelpFormatter)
|
2016-11-07 08:06:34 +01:00
|
|
|
|
|
|
|
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
|
|
|
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
|
2016-11-17 01:39:47 +01:00
|
|
|
from tools.lib.test_script import (
|
2019-06-20 18:27:09 +02:00
|
|
|
assert_provisioning_status_ok,
|
2016-11-07 08:06:34 +01:00
|
|
|
)
|
|
|
|
|
2017-09-30 08:42:22 +02:00
|
|
|
parser.add_argument('--test',
|
|
|
|
action='store_true',
|
|
|
|
help='Use the testing database and ports')
|
|
|
|
parser.add_argument('--minify',
|
|
|
|
action='store_true',
|
|
|
|
help='Minifies assets for testing in dev')
|
|
|
|
parser.add_argument('--interface',
|
|
|
|
action='store',
|
|
|
|
default=None, help='Set the IP or hostname for the proxy to listen on')
|
|
|
|
parser.add_argument('--no-clear-memcached',
|
|
|
|
action='store_false', dest='clear_memcached',
|
|
|
|
default=True, help='Do not clear memcached')
|
|
|
|
parser.add_argument('--force',
|
|
|
|
action="store_true",
|
|
|
|
default=False, help='Run command despite possible problems.')
|
|
|
|
parser.add_argument('--enable-tornado-logging',
|
|
|
|
action="store_true",
|
|
|
|
default=False, help='Enable access logs from tornado proxy server.')
|
|
|
|
options = parser.parse_args()
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2019-06-20 18:27:09 +02:00
|
|
|
assert_provisioning_status_ok(options.force)
|
2016-11-07 08:06:34 +01:00
|
|
|
|
2016-10-04 01:36:38 +02:00
|
|
|
if options.interface is None:
|
|
|
|
user_id = os.getuid()
|
|
|
|
user_name = pwd.getpwuid(user_id).pw_name
|
2016-11-20 22:30:28 +01:00
|
|
|
if user_name in ["vagrant", "zulipdev"]:
|
2016-10-04 01:36:38 +02:00
|
|
|
# In the Vagrant development environment, we need to listen on
|
|
|
|
# all ports, and it's safe to do so, because Vagrant is only
|
2016-11-20 22:30:28 +01:00
|
|
|
# exposing certain guest ports (by default just 9991) to the
|
|
|
|
# host. The same argument applies to the remote development
|
|
|
|
# servers using username "zulipdev".
|
|
|
|
options.interface = None
|
2016-10-04 01:36:38 +02:00
|
|
|
else:
|
|
|
|
# Otherwise, only listen to requests on localhost for security.
|
|
|
|
options.interface = "127.0.0.1"
|
2016-11-20 22:30:28 +01:00
|
|
|
elif options.interface == "":
|
|
|
|
options.interface = None
|
2016-10-04 01:36:38 +02:00
|
|
|
|
2017-07-09 01:34:59 +02:00
|
|
|
runserver_args = [] # type: List[str]
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9991
|
2012-11-08 23:23:25 +01:00
|
|
|
if options.test:
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9981
|
2013-10-23 19:12:03 +02:00
|
|
|
settings_module = "zproject.test_settings"
|
2017-03-18 04:43:27 +01:00
|
|
|
# Don't auto-reload when running casper tests
|
|
|
|
runserver_args = ['--noreload']
|
2013-10-23 19:12:03 +02:00
|
|
|
else:
|
|
|
|
settings_module = "zproject.settings"
|
|
|
|
|
2015-08-21 00:30:50 +02:00
|
|
|
manage_args = ['--settings=%s' % (settings_module,)]
|
2013-10-23 19:12:03 +02:00
|
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
|
|
|
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-06 07:07:20 +02:00
|
|
|
from scripts.lib.zulip_tools import WARNING, ENDC
|
2017-03-30 15:12:44 +02:00
|
|
|
|
2016-11-02 04:27:44 +01:00
|
|
|
proxy_port = base_port
|
|
|
|
django_port = base_port + 1
|
|
|
|
tornado_port = base_port + 2
|
|
|
|
webpack_port = base_port + 3
|
2017-11-09 16:31:57 +01:00
|
|
|
thumbor_port = base_port + 4
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-03-25 23:21:39 +01:00
|
|
|
# Clean up stale .pyc files etc.
|
|
|
|
subprocess.check_call('./tools/clean-repo')
|
|
|
|
|
2016-07-18 15:24:41 +02:00
|
|
|
if options.clear_memcached:
|
|
|
|
print("Clearing memcached ...")
|
|
|
|
subprocess.check_call('./scripts/setup/flush-memcached')
|
|
|
|
|
2012-11-09 20:59:43 +01:00
|
|
|
# Set up a new process group, so that we can later kill run{server,tornado}
|
|
|
|
# and all of the processes they spawn.
|
|
|
|
os.setpgrp()
|
|
|
|
|
2017-01-26 09:57:16 +01:00
|
|
|
# Save pid of parent process to the pid file. It can be used later by
|
|
|
|
# tools/stop-run-dev to kill the server without having to find the
|
|
|
|
# terminal in question.
|
2017-02-18 01:30:19 +01:00
|
|
|
|
|
|
|
if options.test:
|
|
|
|
pid_file_path = os.path.join(os.path.join(os.getcwd(), 'var/casper/run_dev.pid'))
|
|
|
|
else:
|
|
|
|
pid_file_path = os.path.join(os.path.join(os.getcwd(), 'var/run/run_dev.pid'))
|
2017-01-26 09:57:16 +01:00
|
|
|
|
|
|
|
# Required for compatibility python versions.
|
|
|
|
if not os.path.exists(os.path.dirname(pid_file_path)):
|
|
|
|
os.makedirs(os.path.dirname(pid_file_path))
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(pid_file_path, 'w+') as f:
|
|
|
|
f.write(str(os.getpgrp()) + "\n")
|
2017-01-26 09:57:16 +01:00
|
|
|
|
2013-01-30 23:35:24 +01:00
|
|
|
# Pass --nostatic because we configure static serving ourselves in
|
2013-10-04 19:27:01 +02:00
|
|
|
# zulip/urls.py.
|
2018-05-31 03:42:40 +02:00
|
|
|
cmds = [['./manage.py', 'runserver'] +
|
2017-03-18 04:43:27 +01:00
|
|
|
manage_args + runserver_args + ['127.0.0.1:%d' % (django_port,)],
|
2016-11-22 01:44:16 +01:00
|
|
|
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado'] +
|
2016-11-02 04:27:44 +01:00
|
|
|
manage_args + ['127.0.0.1:%d' % (tornado_port,)],
|
2019-04-05 00:50:40 +02:00
|
|
|
['./manage.py', 'process_queue', '--all'] + manage_args,
|
2016-10-12 15:09:32 +02:00
|
|
|
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
2018-01-07 23:52:01 +01:00
|
|
|
'./puppet/zulip/files/postgresql/process_fts_updates'],
|
2017-11-09 16:31:57 +01:00
|
|
|
['./manage.py', 'deliver_scheduled_messages'],
|
|
|
|
['/srv/zulip-thumbor-venv/bin/thumbor', '-c', './zthumbor/thumbor.conf',
|
|
|
|
'-p', '%s' % (thumbor_port,)]]
|
2016-06-30 23:07:30 +02:00
|
|
|
if options.test:
|
2019-06-25 11:39:03 +02:00
|
|
|
# We just need to compile webpack assets once at startup, not run a daemon,
|
|
|
|
# in test mode. Additionally, webpack-dev-server doesn't support running 2
|
|
|
|
# copies on the same system, so this model lets us run the casper tests
|
|
|
|
# with a running development server.
|
tests: Clean up calls to tools/webpack.
Before this change, the way we loaded
webpack for various tools was brittle.
First, I addressed test-api and test-help-documentation.
These tools used to be unable to run standalone on a
clean provision, because they were (indirectly)
calling tools/webpack without the `--test` option.
The problem was a bit obscure, since running things
like `./tools/test-backend` or `./tools/test-all` in
your workflow would create `./var/webpack-stats-test.json`
for the broken tools (and then they would work).
The tools themselves weren't broken; they were the
only relying on the common `test_server_running` helper.
And even that helper wasn't broken; it was just that
`run-dev.py` wasn't respecting the `--test` option.
So I made it so that `./tools/run-dev` passes in `--test` to
`./tools/webpack`.
To confuse matters even more, for some reason Casper
uses `./webpack-stats-production.json` via various
hacks for its webpack configuration, so when I fixed
the other tests, it broke Casper.
Here is the Casper-related hack in zproject/test_settings.py,
which was in place before my change and remains
after it:
if CASPER_TESTS:
WEBPACK_FILE = 'webpack-stats-production.json'
else:
WEBPACK_FILE = os.path.join('var', 'webpack-stats-test.json')
I added similar logic in tools/webpack:
if "CASPER_TESTS" in os.environ:
build_for_prod_or_casper(args.quiet)
I also made the helper functions in `./tools/webpack` have
nicer names.
So, now tools should all be able to run standalone and not
rely on previous tools creating webpack stats files for
them and leaving them in the file system. That's good.
Things are still a bit janky, though. It's not completely
clear to me why `test-js-with-casper` should work off of
a different webpack configuration than the other tests.
For now most of the jankiness is around Casper, and we have
hacks in two different places, `zproject/test_settings.py` and
`tools/webpack` to force it to use the production stats
file instead of the "test" one, even though Casper uses
test-like settings for other things like which database
you're using.
2018-09-06 21:01:45 +02:00
|
|
|
subprocess.check_call(['./tools/webpack', '--quiet', '--test'])
|
2016-06-30 23:07:30 +02:00
|
|
|
else:
|
2017-06-10 20:28:48 +02:00
|
|
|
webpack_cmd = ['./tools/webpack', '--watch', '--port', str(webpack_port)]
|
|
|
|
if options.minify:
|
|
|
|
webpack_cmd.append('--minify')
|
2018-04-24 15:27:15 +02:00
|
|
|
if options.interface is None:
|
|
|
|
# If interface is None and we're listening on all ports, we also need
|
|
|
|
# to disable the webpack host check so that webpack will serve assets.
|
|
|
|
webpack_cmd.append('--disable-host-check')
|
2017-07-16 21:14:03 +02:00
|
|
|
if options.interface:
|
|
|
|
webpack_cmd += ["--host", options.interface]
|
|
|
|
else:
|
|
|
|
webpack_cmd += ["--host", "0.0.0.0"]
|
2017-06-10 20:28:48 +02:00
|
|
|
cmds.append(webpack_cmd)
|
2013-10-23 19:12:03 +02:00
|
|
|
for cmd in cmds:
|
2015-08-21 00:30:50 +02:00
|
|
|
subprocess.Popen(cmd)
|
2012-10-10 00:16:25 +02:00
|
|
|
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def transform_url(protocol, path, query, target_port, target_host):
|
|
|
|
# type: (str, str, str, int, str) -> str
|
2016-10-12 15:09:32 +02:00
|
|
|
# generate url with target host
|
|
|
|
host = ":".join((target_host, str(target_port)))
|
2017-11-09 16:31:57 +01:00
|
|
|
# Here we are going to rewrite the path a bit so that it is in parity with
|
|
|
|
# what we will have for production
|
|
|
|
if path.startswith('/thumbor'):
|
|
|
|
path = path[len('/thumbor'):]
|
2016-10-12 15:09:32 +02:00
|
|
|
newpath = urlunparse((protocol, host, path, '', query, ''))
|
|
|
|
return newpath
|
|
|
|
|
|
|
|
|
|
|
|
@gen.engine
|
2017-12-13 19:38:15 +01:00
|
|
|
def fetch_request(url, callback, **kwargs):
|
|
|
|
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
|
2016-10-12 15:09:32 +02:00
|
|
|
# use large timeouts to handle polling requests
|
2019-09-24 23:16:07 +02:00
|
|
|
req = httpclient.HTTPRequest(
|
|
|
|
url,
|
|
|
|
connect_timeout=240.0,
|
|
|
|
request_timeout=240.0,
|
|
|
|
decompress_response=False,
|
|
|
|
**kwargs
|
|
|
|
)
|
2016-10-12 15:09:32 +02:00
|
|
|
client = httpclient.AsyncHTTPClient()
|
|
|
|
# wait for response
|
|
|
|
response = yield gen.Task(client.fetch, req)
|
|
|
|
callback(response)
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class BaseWebsocketHandler(WebSocketHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
# target server ip
|
|
|
|
target_host = '127.0.0.1' # type: str
|
|
|
|
# target server port
|
|
|
|
target_port = None # type: int
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(*args, **kwargs)
|
2016-10-28 11:58:36 +02:00
|
|
|
# define client for target websocket server
|
2017-07-09 01:34:59 +02:00
|
|
|
self.client = None # type: Any
|
2016-10-28 11:58:36 +02:00
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Optional[Callable[..., Any]]
|
2016-10-28 11:58:36 +02:00
|
|
|
# use get method from WebsocketHandler
|
2017-10-27 08:28:23 +02:00
|
|
|
return super().get(*args, **kwargs)
|
2016-10-28 11:58:36 +02:00
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def open(self):
|
|
|
|
# type: () -> None
|
2016-10-28 11:58:36 +02:00
|
|
|
# setup connection with target websocket server
|
|
|
|
websocket_url = "ws://{host}:{port}{uri}".format(
|
|
|
|
host=self.target_host,
|
|
|
|
port=self.target_port,
|
|
|
|
uri=self.request.uri
|
|
|
|
)
|
|
|
|
request = httpclient.HTTPRequest(websocket_url)
|
|
|
|
request.headers = self._add_request_headers(['sec-websocket-extensions'])
|
|
|
|
websocket_connect(request, callback=self.open_callback,
|
|
|
|
on_message_callback=self.on_client_message)
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def open_callback(self, future):
|
|
|
|
# type: (Any) -> None
|
2016-10-28 11:58:36 +02:00
|
|
|
# callback on connect with target websocket server
|
|
|
|
self.client = future.result()
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def on_client_message(self, message):
|
|
|
|
# type: (str) -> None
|
2016-10-28 11:58:36 +02:00
|
|
|
if not message:
|
|
|
|
# if message empty -> target websocket server close connection
|
|
|
|
return self.close()
|
|
|
|
if self.ws_connection:
|
|
|
|
# send message to client if connection exists
|
|
|
|
self.write_message(message, False)
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def on_message(self, message, binary=False):
|
|
|
|
# type: (str, bool) -> Optional[Callable[..., Any]]
|
2016-10-28 11:58:36 +02:00
|
|
|
if not self.client:
|
|
|
|
# close websocket proxy connection if no connection with target websocket server
|
|
|
|
return self.close()
|
|
|
|
self.client.write_message(message, binary)
|
2017-03-03 20:30:49 +01:00
|
|
|
return None
|
2016-10-28 11:58:36 +02:00
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def check_origin(self, origin):
|
|
|
|
# type: (str) -> bool
|
2016-10-28 11:58:36 +02:00
|
|
|
return True
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def _add_request_headers(self, exclude_lower_headers_list=None):
|
|
|
|
# type: (Optional[List[str]]) -> httputil.HTTPHeaders
|
2016-10-12 15:09:32 +02:00
|
|
|
exclude_lower_headers_list = exclude_lower_headers_list or []
|
|
|
|
headers = httputil.HTTPHeaders()
|
|
|
|
for header, v in self.request.headers.get_all():
|
|
|
|
if header.lower() not in exclude_lower_headers_list:
|
|
|
|
headers.add(header, v)
|
|
|
|
return headers
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
|
|
|
|
class CombineHandler(BaseWebsocketHandler):
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Optional[Callable[..., Any]]
|
2016-10-28 11:58:36 +02:00
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
2017-10-27 08:28:23 +02:00
|
|
|
return super().get(*args, **kwargs)
|
2017-03-03 20:30:49 +01:00
|
|
|
return None
|
2016-10-12 15:09:32 +02:00
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def head(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def post(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def put(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def patch(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def options(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def delete(self):
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
pass
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def handle_response(self, response):
|
|
|
|
# type: (Any) -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
if response.error and not isinstance(response.error, httpclient.HTTPError):
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(response.error))
|
|
|
|
else:
|
|
|
|
self.set_status(response.code, response.reason)
|
|
|
|
self._headers = httputil.HTTPHeaders() # clear tornado default header
|
|
|
|
|
|
|
|
for header, v in response.headers.get_all():
|
2019-08-12 05:25:32 +02:00
|
|
|
# some header appear multiple times, eg 'Set-Cookie'
|
|
|
|
self.add_header(header, v)
|
2016-10-12 15:09:32 +02:00
|
|
|
if response.body:
|
|
|
|
self.write(response.body)
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
@web.asynchronous
|
2017-12-13 19:38:15 +01:00
|
|
|
def prepare(self):
|
|
|
|
# type: () -> None
|
2016-12-21 11:40:22 +01:00
|
|
|
if 'X-REAL-IP' not in self.request.headers:
|
|
|
|
self.request.headers['X-REAL-IP'] = self.request.remote_ip
|
2019-10-07 06:16:51 +02:00
|
|
|
if 'X-FORWARDED_PORT' not in self.request.headers:
|
|
|
|
self.request.headers['X-FORWARDED-PORT'] = str(proxy_port)
|
2016-10-28 11:58:36 +02:00
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
2017-10-27 08:28:23 +02:00
|
|
|
return super().prepare()
|
2016-10-12 15:09:32 +02:00
|
|
|
url = transform_url(
|
|
|
|
self.request.protocol,
|
|
|
|
self.request.path,
|
|
|
|
self.request.query,
|
|
|
|
self.target_port,
|
|
|
|
self.target_host,
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
fetch_request(
|
|
|
|
url=url,
|
|
|
|
callback=self.handle_response,
|
|
|
|
method=self.request.method,
|
|
|
|
headers=self._add_request_headers(["upgrade-insecure-requests"]),
|
|
|
|
follow_redirects=False,
|
|
|
|
body=getattr(self.request, 'body'),
|
|
|
|
allow_nonstandard_methods=True
|
|
|
|
)
|
|
|
|
except httpclient.HTTPError as e:
|
|
|
|
if hasattr(e, 'response') and e.response:
|
|
|
|
self.handle_response(e.response)
|
|
|
|
else:
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(e))
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class WebPackHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = webpack_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class DjangoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = django_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class TornadoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = tornado_port
|
|
|
|
|
|
|
|
|
2017-11-09 16:31:57 +01:00
|
|
|
class ThumborHandler(CombineHandler):
|
|
|
|
target_port = thumbor_port
|
|
|
|
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
class Application(web.Application):
|
2017-12-13 19:38:15 +01:00
|
|
|
def __init__(self, enable_logging=False):
|
|
|
|
# type: (bool) -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
handlers = [
|
|
|
|
(r"/json/events.*", TornadoHandler),
|
|
|
|
(r"/api/v1/events.*", TornadoHandler),
|
|
|
|
(r"/webpack.*", WebPackHandler),
|
|
|
|
(r"/sockjs.*", TornadoHandler),
|
2017-11-09 16:31:57 +01:00
|
|
|
(r"/thumbor.*", ThumborHandler),
|
2016-10-12 15:09:32 +02:00
|
|
|
(r"/.*", DjangoHandler)
|
|
|
|
]
|
2017-10-27 08:28:23 +02:00
|
|
|
super().__init__(handlers, enable_logging=enable_logging)
|
2016-12-19 10:47:46 +01:00
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def log_request(self, handler):
|
|
|
|
# type: (BaseWebsocketHandler) -> None
|
2016-12-19 10:47:46 +01:00
|
|
|
if self.settings['enable_logging']:
|
2017-10-27 08:28:23 +02:00
|
|
|
super().log_request(handler)
|
2016-10-12 15:09:32 +02:00
|
|
|
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def on_shutdown():
|
|
|
|
# type: () -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
IOLoop.instance().stop()
|
|
|
|
|
|
|
|
|
2017-12-13 19:38:15 +01:00
|
|
|
def shutdown_handler(*args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
2016-10-12 15:09:32 +02:00
|
|
|
io_loop = IOLoop.instance()
|
|
|
|
if io_loop._callbacks:
|
2017-11-30 00:15:49 +01:00
|
|
|
io_loop.call_later(1, shutdown_handler)
|
2016-10-02 18:20:06 +02:00
|
|
|
else:
|
2016-10-12 15:09:32 +02:00
|
|
|
io_loop.stop()
|
|
|
|
|
|
|
|
# log which services/ports will be started
|
|
|
|
print("Starting Zulip services on ports: web proxy: {},".format(proxy_port),
|
2017-11-09 16:31:57 +01:00
|
|
|
"Django: {}, Tornado: {}, Thumbor: {}".format(django_port, tornado_port, thumbor_port),
|
|
|
|
end='')
|
2016-10-12 15:09:32 +02:00
|
|
|
if options.test:
|
|
|
|
print("") # no webpack for --test
|
|
|
|
else:
|
|
|
|
print(", webpack: {}".format(webpack_port))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
print("".join((WARNING,
|
|
|
|
"Note: only port {} is exposed to the host in a Vagrant environment.".format(
|
|
|
|
proxy_port), ENDC)))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
try:
|
2016-12-19 10:47:46 +01:00
|
|
|
app = Application(enable_logging=options.enable_tornado_logging)
|
2019-01-29 20:28:48 +01:00
|
|
|
try:
|
|
|
|
app.listen(proxy_port, address=options.interface)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno == 98:
|
|
|
|
print('\n\nERROR: You probably have another server running!!!\n\n')
|
|
|
|
raise
|
2016-10-12 15:09:32 +02:00
|
|
|
ioloop = IOLoop.instance()
|
|
|
|
for s in (signal.SIGINT, signal.SIGTERM):
|
|
|
|
signal.signal(s, shutdown_handler)
|
|
|
|
ioloop.start()
|
2017-03-05 10:25:27 +01:00
|
|
|
except Exception:
|
2012-11-14 19:46:12 +01:00
|
|
|
# Print the traceback before we get SIGTERM and die.
|
|
|
|
traceback.print_exc()
|
|
|
|
raise
|
2012-10-10 00:16:25 +02:00
|
|
|
finally:
|
2012-11-09 20:59:43 +01:00
|
|
|
# Kill everything in our process group.
|
|
|
|
os.killpg(0, signal.SIGTERM)
|
2017-01-26 09:57:16 +01:00
|
|
|
# Remove pid file when development server closed correctly.
|
|
|
|
os.remove(pid_file_path)
|