2016-10-05 10:47:16 +02:00
|
|
|
#!/usr/bin/env python
|
2016-07-18 15:24:41 +02:00
|
|
|
from __future__ import print_function
|
|
|
|
|
2012-11-08 23:23:25 +01:00
|
|
|
import optparse
|
2016-10-12 15:09:32 +02:00
|
|
|
import os
|
2016-10-04 01:36:38 +02:00
|
|
|
import pwd
|
2012-11-09 20:59:43 +01:00
|
|
|
import signal
|
2016-10-12 15:09:32 +02:00
|
|
|
import subprocess
|
2013-10-23 19:12:03 +02:00
|
|
|
import sys
|
2016-10-12 15:09:32 +02:00
|
|
|
import time
|
|
|
|
import traceback
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
from six.moves.urllib.parse import urlunparse
|
2016-07-30 00:51:14 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
from tornado import httpclient
|
|
|
|
from tornado import httputil
|
|
|
|
from tornado import gen
|
|
|
|
from tornado import web
|
|
|
|
from tornado.ioloop import IOLoop
|
2016-10-28 11:58:36 +02:00
|
|
|
from tornado.websocket import WebSocketHandler, websocket_connect
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
if False: from typing import Any, Callable, Generator, Optional
|
2013-08-21 18:01:40 +02:00
|
|
|
|
2015-10-15 18:44:48 +02:00
|
|
|
if 'posix' in os.name and os.geteuid() == 0:
|
|
|
|
raise RuntimeError("run-dev.py should not be run as root.")
|
|
|
|
|
2012-11-08 23:23:25 +01:00
|
|
|
parser = optparse.OptionParser(r"""
|
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
Starts the app listening on localhost, for local development.
|
|
|
|
|
|
|
|
This script launches the Django and Tornado servers, then runs a reverse proxy
|
|
|
|
which serves to both of them. After it's all up and running, browse to
|
|
|
|
|
|
|
|
http://localhost:9991/
|
|
|
|
|
|
|
|
Note that, while runserver and runtornado have the usual auto-restarting
|
|
|
|
behavior, the reverse proxy itself does *not* automatically restart on changes
|
|
|
|
to this file.
|
2012-11-08 23:23:25 +01:00
|
|
|
""")
|
|
|
|
|
|
|
|
parser.add_option('--test',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store_true', dest='test',
|
|
|
|
help='Use the testing database and ports')
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2014-02-12 20:03:58 +01:00
|
|
|
parser.add_option('--interface',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store', dest='interface',
|
|
|
|
default=None, help='Set the IP or hostname for the proxy to listen on')
|
2014-02-12 20:03:58 +01:00
|
|
|
|
2016-07-18 15:24:41 +02:00
|
|
|
parser.add_option('--no-clear-memcached',
|
2016-11-02 04:27:44 +01:00
|
|
|
action='store_false', dest='clear_memcached',
|
|
|
|
default=True, help='Do not clear memcached')
|
2016-07-18 15:24:41 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
(options, arguments) = parser.parse_args()
|
2012-11-08 23:23:25 +01:00
|
|
|
|
2016-10-04 01:36:38 +02:00
|
|
|
if options.interface is None:
|
|
|
|
user_id = os.getuid()
|
|
|
|
user_name = pwd.getpwuid(user_id).pw_name
|
|
|
|
if user_name == "vagrant":
|
|
|
|
# In the Vagrant development environment, we need to listen on
|
|
|
|
# all ports, and it's safe to do so, because Vagrant is only
|
|
|
|
# exposing certain guest ports (by default just 9991) to the host.
|
|
|
|
options.interface = ""
|
|
|
|
else:
|
|
|
|
# Otherwise, only listen to requests on localhost for security.
|
|
|
|
options.interface = "127.0.0.1"
|
|
|
|
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9991
|
2012-11-08 23:23:25 +01:00
|
|
|
if options.test:
|
2016-11-02 04:27:44 +01:00
|
|
|
base_port = 9981
|
2013-10-23 19:12:03 +02:00
|
|
|
settings_module = "zproject.test_settings"
|
|
|
|
else:
|
|
|
|
settings_module = "zproject.settings"
|
|
|
|
|
2015-08-21 00:30:50 +02:00
|
|
|
manage_args = ['--settings=%s' % (settings_module,)]
|
2013-10-23 19:12:03 +02:00
|
|
|
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
|
|
|
|
|
|
|
|
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2016-10-06 07:07:20 +02:00
|
|
|
from scripts.lib.zulip_tools import WARNING, ENDC
|
|
|
|
|
2016-11-02 04:27:44 +01:00
|
|
|
proxy_port = base_port
|
|
|
|
django_port = base_port + 1
|
|
|
|
tornado_port = base_port + 2
|
|
|
|
webpack_port = base_port + 3
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-10-28 15:54:32 +01:00
|
|
|
os.chdir(os.path.join(os.path.dirname(__file__), '..'))
|
2012-10-10 00:16:25 +02:00
|
|
|
|
2013-03-25 23:21:39 +01:00
|
|
|
# Clean up stale .pyc files etc.
|
|
|
|
subprocess.check_call('./tools/clean-repo')
|
|
|
|
|
2016-09-08 04:35:25 +02:00
|
|
|
# HACK to fix up node_modules/.bin/handlebars deletion issue
|
|
|
|
if not os.path.exists("node_modules/.bin/handlebars") and os.path.exists("node_modules/handlebars"):
|
|
|
|
print("Handlebars binary missing due to rebase past .gitignore fixup; fixing...")
|
|
|
|
subprocess.check_call(["rm", "-rf", "node_modules/handlebars"])
|
|
|
|
subprocess.check_call(["npm", "install"])
|
|
|
|
|
2016-07-18 15:24:41 +02:00
|
|
|
if options.clear_memcached:
|
|
|
|
print("Clearing memcached ...")
|
|
|
|
subprocess.check_call('./scripts/setup/flush-memcached')
|
|
|
|
|
2012-11-09 20:59:43 +01:00
|
|
|
# Set up a new process group, so that we can later kill run{server,tornado}
|
|
|
|
# and all of the processes they spawn.
|
|
|
|
os.setpgrp()
|
|
|
|
|
2013-01-30 23:35:24 +01:00
|
|
|
# Pass --nostatic because we configure static serving ourselves in
|
2013-10-04 19:27:01 +02:00
|
|
|
# zulip/urls.py.
|
2015-08-21 00:32:15 +02:00
|
|
|
cmds = [['./tools/compile-handlebars-templates', 'forever'],
|
2015-12-06 19:53:17 +01:00
|
|
|
['python', 'manage.py', 'rundjango'] +
|
2016-11-02 04:27:44 +01:00
|
|
|
manage_args + ['127.0.0.1:%d' % (django_port,)],
|
2016-07-13 14:34:31 +02:00
|
|
|
['python', '-u', 'manage.py', 'runtornado'] +
|
2016-11-02 04:27:44 +01:00
|
|
|
manage_args + ['127.0.0.1:%d' % (tornado_port,)],
|
2015-08-21 00:30:50 +02:00
|
|
|
['./tools/run-dev-queue-processors'] + manage_args,
|
2016-10-12 15:09:32 +02:00
|
|
|
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
2015-08-21 23:10:52 +02:00
|
|
|
'./puppet/zulip/files/postgresql/process_fts_updates']]
|
2016-06-30 23:07:30 +02:00
|
|
|
if options.test:
|
|
|
|
# Webpack doesn't support 2 copies running on the same system, so
|
|
|
|
# in order to support running the Casper tests while a Zulip
|
|
|
|
# development server is running, we use webpack in production mode
|
|
|
|
# for the Casper tests.
|
|
|
|
subprocess.check_call('./tools/webpack')
|
|
|
|
else:
|
|
|
|
cmds += [['./tools/webpack', '--watch', '--port', str(webpack_port)]]
|
2013-10-23 19:12:03 +02:00
|
|
|
for cmd in cmds:
|
2015-08-21 00:30:50 +02:00
|
|
|
subprocess.Popen(cmd)
|
2012-10-10 00:16:25 +02:00
|
|
|
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
def transform_url(protocol, path, query, target_port, target_host):
|
|
|
|
# type: (str, str, str, int, str) -> str
|
|
|
|
# generate url with target host
|
|
|
|
host = ":".join((target_host, str(target_port)))
|
|
|
|
newpath = urlunparse((protocol, host, path, '', query, ''))
|
|
|
|
return newpath
|
|
|
|
|
|
|
|
|
|
|
|
@gen.engine
|
|
|
|
def fetch_request(url, callback, **kwargs):
|
|
|
|
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
|
|
|
|
# use large timeouts to handle polling requests
|
|
|
|
req = httpclient.HTTPRequest(url, connect_timeout=240.0, request_timeout=240.0, **kwargs)
|
|
|
|
client = httpclient.AsyncHTTPClient()
|
|
|
|
# wait for response
|
|
|
|
response = yield gen.Task(client.fetch, req)
|
|
|
|
callback(response)
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class BaseWebsocketHandler(WebSocketHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
# target server ip
|
|
|
|
target_host = '127.0.0.1' # type: str
|
|
|
|
# target server port
|
|
|
|
target_port = None # type: int
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
|
|
|
super(BaseWebsocketHandler, self).__init__(*args, **kwargs)
|
|
|
|
# define client for target websocket server
|
|
|
|
self.client = None # type: Any
|
|
|
|
|
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Callable
|
|
|
|
# use get method from WebsocketHandler
|
|
|
|
return super(BaseWebsocketHandler, self).get(*args, **kwargs)
|
|
|
|
|
|
|
|
def open(self):
|
|
|
|
# type: () -> None
|
|
|
|
# setup connection with target websocket server
|
|
|
|
websocket_url = "ws://{host}:{port}{uri}".format(
|
|
|
|
host=self.target_host,
|
|
|
|
port=self.target_port,
|
|
|
|
uri=self.request.uri
|
|
|
|
)
|
|
|
|
request = httpclient.HTTPRequest(websocket_url)
|
|
|
|
request.headers = self._add_request_headers(['sec-websocket-extensions'])
|
|
|
|
websocket_connect(request, callback=self.open_callback,
|
|
|
|
on_message_callback=self.on_client_message)
|
|
|
|
|
|
|
|
def open_callback(self, future):
|
|
|
|
# type: (Any) -> None
|
|
|
|
# callback on connect with target websocket server
|
|
|
|
self.client = future.result()
|
|
|
|
|
|
|
|
def on_client_message(self, message):
|
|
|
|
# type: (str) -> None
|
|
|
|
if not message:
|
|
|
|
# if message empty -> target websocket server close connection
|
|
|
|
return self.close()
|
|
|
|
if self.ws_connection:
|
|
|
|
# send message to client if connection exists
|
|
|
|
self.write_message(message, False)
|
|
|
|
|
|
|
|
def on_message(self, message, binary=False):
|
|
|
|
# type: (str, bool) -> Optional[Callable]
|
|
|
|
if not self.client:
|
|
|
|
# close websocket proxy connection if no connection with target websocket server
|
|
|
|
return self.close()
|
|
|
|
self.client.write_message(message, binary)
|
|
|
|
|
|
|
|
def check_origin(self, origin):
|
|
|
|
# type: (str) -> bool
|
|
|
|
return True
|
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
def _add_request_headers(self, exclude_lower_headers_list=None):
|
|
|
|
# type: (Optional[List[str]]) -> httputil.HTTPHeaders
|
|
|
|
exclude_lower_headers_list = exclude_lower_headers_list or []
|
|
|
|
headers = httputil.HTTPHeaders()
|
|
|
|
for header, v in self.request.headers.get_all():
|
|
|
|
if header.lower() not in exclude_lower_headers_list:
|
|
|
|
headers.add(header, v)
|
|
|
|
return headers
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
|
|
|
|
class CombineHandler(BaseWebsocketHandler):
|
|
|
|
|
|
|
|
def get(self, *args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> Optional[Callable]
|
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
|
|
|
return super(CombineHandler, self).get(*args, **kwargs)
|
2016-10-12 15:09:32 +02:00
|
|
|
|
|
|
|
def head(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def post(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def put(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def patch(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def options(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def delete(self):
|
|
|
|
# type: () -> None
|
|
|
|
pass
|
|
|
|
|
|
|
|
def handle_response(self, response):
|
|
|
|
# type: (Any) -> None
|
|
|
|
if response.error and not isinstance(response.error, httpclient.HTTPError):
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(response.error))
|
|
|
|
else:
|
|
|
|
self.set_status(response.code, response.reason)
|
|
|
|
self._headers = httputil.HTTPHeaders() # clear tornado default header
|
|
|
|
|
|
|
|
for header, v in response.headers.get_all():
|
|
|
|
if header != 'Content-Length':
|
|
|
|
# some header appear multiple times, eg 'Set-Cookie'
|
|
|
|
self.add_header(header, v)
|
|
|
|
if response.body:
|
|
|
|
# rewrite Content-Length Header by the response
|
|
|
|
self.set_header('Content-Length', len(response.body))
|
|
|
|
self.write(response.body)
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
@web.asynchronous
|
|
|
|
def prepare(self):
|
|
|
|
# type: () -> None
|
2016-10-28 11:58:36 +02:00
|
|
|
if self.request.headers.get("Upgrade", "").lower() == 'websocket':
|
|
|
|
return super(CombineHandler, self).prepare()
|
2016-10-12 15:09:32 +02:00
|
|
|
url = transform_url(
|
|
|
|
self.request.protocol,
|
|
|
|
self.request.path,
|
|
|
|
self.request.query,
|
|
|
|
self.target_port,
|
|
|
|
self.target_host,
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
fetch_request(
|
|
|
|
url=url,
|
|
|
|
callback=self.handle_response,
|
|
|
|
method=self.request.method,
|
|
|
|
headers=self._add_request_headers(["upgrade-insecure-requests"]),
|
|
|
|
follow_redirects=False,
|
|
|
|
body=getattr(self.request, 'body'),
|
|
|
|
allow_nonstandard_methods=True
|
|
|
|
)
|
|
|
|
except httpclient.HTTPError as e:
|
|
|
|
if hasattr(e, 'response') and e.response:
|
|
|
|
self.handle_response(e.response)
|
|
|
|
else:
|
|
|
|
self.set_status(500)
|
|
|
|
self.write('Internal server error:\n' + str(e))
|
|
|
|
self.finish()
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class WebPackHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = webpack_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class DjangoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = django_port
|
|
|
|
|
|
|
|
|
2016-10-28 11:58:36 +02:00
|
|
|
class TornadoHandler(CombineHandler):
|
2016-10-12 15:09:32 +02:00
|
|
|
target_port = tornado_port
|
|
|
|
|
|
|
|
|
|
|
|
class Application(web.Application):
|
|
|
|
def __init__(self):
|
|
|
|
# type: () -> None
|
|
|
|
handlers = [
|
|
|
|
(r"/json/events.*", TornadoHandler),
|
|
|
|
(r"/api/v1/events.*", TornadoHandler),
|
|
|
|
(r"/webpack.*", WebPackHandler),
|
|
|
|
(r"/sockjs.*", TornadoHandler),
|
|
|
|
(r"/socket.io.*", WebPackHandler),
|
|
|
|
(r"/.*", DjangoHandler)
|
|
|
|
]
|
|
|
|
super(Application, self).__init__(handlers)
|
|
|
|
|
|
|
|
|
|
|
|
def on_shutdown():
|
|
|
|
# type: () -> None
|
|
|
|
IOLoop.instance().stop()
|
|
|
|
|
|
|
|
|
|
|
|
def shutdown_handler(*args, **kwargs):
|
|
|
|
# type: (*Any, **Any) -> None
|
|
|
|
io_loop = IOLoop.instance()
|
|
|
|
if io_loop._callbacks:
|
|
|
|
io_loop.add_timeout(time.time() + 1, shutdown_handler)
|
2016-10-02 18:20:06 +02:00
|
|
|
else:
|
2016-10-12 15:09:32 +02:00
|
|
|
io_loop.stop()
|
|
|
|
|
|
|
|
# log which services/ports will be started
|
|
|
|
print("Starting Zulip services on ports: web proxy: {},".format(proxy_port),
|
|
|
|
"Django: {}, Tornado: {}".format(django_port, tornado_port), end='')
|
|
|
|
if options.test:
|
|
|
|
print("") # no webpack for --test
|
|
|
|
else:
|
|
|
|
print(", webpack: {}".format(webpack_port))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2016-10-12 15:09:32 +02:00
|
|
|
print("".join((WARNING,
|
|
|
|
"Note: only port {} is exposed to the host in a Vagrant environment.".format(
|
|
|
|
proxy_port), ENDC)))
|
2016-10-02 18:20:06 +02:00
|
|
|
|
2012-10-10 00:16:25 +02:00
|
|
|
try:
|
2016-10-12 15:09:32 +02:00
|
|
|
app = Application()
|
|
|
|
app.listen(proxy_port)
|
|
|
|
ioloop = IOLoop.instance()
|
|
|
|
for s in (signal.SIGINT, signal.SIGTERM):
|
|
|
|
signal.signal(s, shutdown_handler)
|
|
|
|
ioloop.start()
|
2012-11-14 19:46:12 +01:00
|
|
|
except:
|
|
|
|
# Print the traceback before we get SIGTERM and die.
|
|
|
|
traceback.print_exc()
|
|
|
|
raise
|
2012-10-10 00:16:25 +02:00
|
|
|
finally:
|
2012-11-09 20:59:43 +01:00
|
|
|
# Kill everything in our process group.
|
|
|
|
os.killpg(0, signal.SIGTERM)
|