2013-12-11 17:08:21 +01:00
|
|
|
# Django settings for zulip project.
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# Here's how settings for the Zulip project work:
|
2013-09-24 20:28:28 +02:00
|
|
|
#
|
2014-10-28 16:54:54 +01:00
|
|
|
# * settings.py contains non-site-specific and settings configuration
|
|
|
|
# for the Zulip Django app.
|
2016-07-20 05:42:43 +02:00
|
|
|
# * settings.py imports prod_settings.py, and any site-specific configuration
|
2016-07-20 05:45:50 +02:00
|
|
|
# belongs there. The template for prod_settings.py is prod_settings_template.py
|
2016-06-16 08:44:01 +02:00
|
|
|
#
|
2017-11-16 19:51:44 +01:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/settings.html for more information
|
2016-06-16 08:44:01 +02:00
|
|
|
#
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
2017-06-04 03:33:32 +02:00
|
|
|
from copy import deepcopy
|
2012-08-28 18:45:32 +02:00
|
|
|
import os
|
2013-03-26 22:08:25 +01:00
|
|
|
import time
|
2013-09-26 23:06:01 +02:00
|
|
|
import sys
|
2019-11-13 02:53:01 +01:00
|
|
|
from typing import Any, Dict, List, Union
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2014-01-07 22:20:29 +01:00
|
|
|
from zerver.lib.db import TimeTrackingConnection
|
2017-01-18 12:52:01 +01:00
|
|
|
import zerver.lib.logging_util
|
2013-02-20 22:26:06 +01:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# INITIAL SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2019-11-13 01:11:56 +01:00
|
|
|
from .config import DEPLOY_ROOT, PRODUCTION, DEVELOPMENT, get_secret, get_config, get_from_file_if_exists
|
2019-09-29 06:32:56 +02:00
|
|
|
|
2015-08-19 01:26:51 +02:00
|
|
|
# Make this unique, and don't share it with anybody.
|
|
|
|
SECRET_KEY = get_secret("secret_key")
|
|
|
|
|
|
|
|
# A shared secret, used to authenticate different parts of the app to each other.
|
|
|
|
SHARED_SECRET = get_secret("shared_secret")
|
|
|
|
|
|
|
|
# We use this salt to hash a user's email into a filename for their user-uploaded
|
|
|
|
# avatar. If this salt is discovered, attackers will only be able to determine
|
|
|
|
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
|
|
|
|
# the end of the world. Don't use the salt where there is more security exposure.
|
|
|
|
AVATAR_SALT = get_secret("avatar_salt")
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# SERVER_GENERATION is used to track whether the server has been
|
|
|
|
# restarted for triggering browser clients to reload.
|
2013-09-24 20:28:28 +02:00
|
|
|
SERVER_GENERATION = int(time.time())
|
2012-09-17 23:30:29 +02:00
|
|
|
|
2016-10-28 00:04:05 +02:00
|
|
|
# Key to authenticate this server to zulip.org for push notifications, etc.
|
|
|
|
ZULIP_ORG_KEY = get_secret("zulip_org_key")
|
|
|
|
ZULIP_ORG_ID = get_secret("zulip_org_id")
|
|
|
|
|
2016-05-10 01:55:43 +02:00
|
|
|
if 'DEBUG' not in globals():
|
2017-06-10 20:28:48 +02:00
|
|
|
# Uncomment end of next line to test CSS minification.
|
|
|
|
# For webpack JS minification use tools/run_dev.py --minify
|
2017-07-09 01:45:19 +02:00
|
|
|
DEBUG = DEVELOPMENT # and platform.node() != 'your-machine'
|
2013-11-12 20:37:23 +01:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
if DEBUG:
|
|
|
|
INTERNAL_IPS = ('127.0.0.1',)
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# Detect whether we're running as a queue worker; this impacts the logging configuration.
|
2014-01-07 22:27:52 +01:00
|
|
|
if len(sys.argv) > 2 and sys.argv[0].endswith('manage.py') and sys.argv[1] == 'process_queue':
|
|
|
|
IS_WORKER = True
|
|
|
|
else:
|
|
|
|
IS_WORKER = False
|
|
|
|
|
2012-09-19 22:25:13 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
TEST_SUITE = False
|
|
|
|
# The new user tutorial is enabled by default, but disabled for client tests.
|
|
|
|
TUTORIAL_ENABLED = True
|
2016-12-15 07:02:42 +01:00
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
CASPER_TESTS = False
|
2019-10-21 12:43:00 +02:00
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
RUNNING_OPENAPI_CURL_TEST = False
|
2012-09-14 22:29:53 +02:00
|
|
|
|
2017-10-09 04:42:12 +02:00
|
|
|
# Google Compute Engine has an /etc/boto.cfg that is "nicely
|
|
|
|
# configured" to work with GCE's storage service. However, their
|
|
|
|
# configuration is super aggressive broken, in that it means importing
|
|
|
|
# boto in a virtualenv that doesn't contain the GCE tools crashes.
|
|
|
|
#
|
|
|
|
# By using our own path for BOTO_CONFIG, we can cause boto to not
|
|
|
|
# process /etc/boto.cfg.
|
|
|
|
os.environ['BOTO_CONFIG'] = '/etc/zulip/boto.cfg'
|
|
|
|
|
2015-08-22 22:31:19 +02:00
|
|
|
########################################################################
|
|
|
|
# DEFAULT VALUES FOR SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2017-09-30 03:22:58 +02:00
|
|
|
# For any settings that are not set in the site-specific configuration file
|
|
|
|
# (/etc/zulip/settings.py in production, or dev_settings.py or test_settings.py
|
|
|
|
# in dev and test), we want to initialize them to sane defaults.
|
2019-11-13 01:22:15 +01:00
|
|
|
from .default_settings import *
|
2017-09-30 03:22:58 +02:00
|
|
|
|
2019-11-13 01:22:15 +01:00
|
|
|
# Import variables like secrets from the prod_settings file
|
|
|
|
# Import prod_settings after determining the deployment/machine type
|
|
|
|
if PRODUCTION:
|
|
|
|
from .prod_settings import *
|
|
|
|
else:
|
|
|
|
from .dev_settings import *
|
2015-08-22 22:31:19 +02:00
|
|
|
|
|
|
|
# These are the settings that we will check that the user has filled in for
|
|
|
|
# production deployments before starting the app. It consists of a series
|
|
|
|
# of pairs of (setting name, default value that it must be changed from)
|
|
|
|
REQUIRED_SETTINGS = [("EXTERNAL_HOST", "zulip.example.com"),
|
|
|
|
("ZULIP_ADMINISTRATOR", "zulip-admin@example.com"),
|
|
|
|
# SECRET_KEY doesn't really need to be here, in
|
|
|
|
# that we set it automatically, but just in
|
|
|
|
# case, it seems worth having in this list
|
|
|
|
("SECRET_KEY", ""),
|
|
|
|
("AUTHENTICATION_BACKENDS", ()),
|
|
|
|
]
|
|
|
|
|
|
|
|
MANAGERS = ADMINS
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# STANDARD DJANGO SETTINGS
|
|
|
|
########################################################################
|
2012-08-28 18:44:51 +02:00
|
|
|
|
|
|
|
# Local time zone for this installation. Choices can be found here:
|
|
|
|
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
|
|
|
# although not all choices may be available on all operating systems.
|
|
|
|
# In a Windows environment this must be set to your system time zone.
|
2017-02-26 06:11:21 +01:00
|
|
|
TIME_ZONE = 'UTC'
|
2012-08-28 18:44:51 +02:00
|
|
|
|
|
|
|
# Language code for this installation. All choices can be found here:
|
|
|
|
# http://www.i18nguy.com/unicode/language-identifiers.html
|
|
|
|
LANGUAGE_CODE = 'en-us'
|
|
|
|
|
|
|
|
# If you set this to False, Django will make some optimizations so as not
|
|
|
|
# to load the internationalization machinery.
|
|
|
|
USE_I18N = True
|
|
|
|
|
|
|
|
# If you set this to False, Django will not format dates, numbers and
|
|
|
|
# calendars according to the current locale.
|
|
|
|
USE_L10N = True
|
|
|
|
|
|
|
|
# If you set this to False, Django will not use timezone-aware datetimes.
|
|
|
|
USE_TZ = True
|
|
|
|
|
2016-07-09 18:09:30 +02:00
|
|
|
# this directory will be used to store logs for development environment
|
|
|
|
DEVELOPMENT_LOG_DIRECTORY = os.path.join(DEPLOY_ROOT, 'var', 'log')
|
2012-10-09 23:52:48 +02:00
|
|
|
# Make redirects work properly behind a reverse proxy
|
|
|
|
USE_X_FORWARDED_HOST = True
|
|
|
|
|
2017-10-31 01:40:18 +01:00
|
|
|
# Extend ALLOWED_HOSTS with localhost (needed to RPC to Tornado),
|
|
|
|
ALLOWED_HOSTS += ['127.0.0.1', 'localhost']
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
# ... with hosts corresponding to EXTERNAL_HOST,
|
2017-10-31 01:40:18 +01:00
|
|
|
ALLOWED_HOSTS += [EXTERNAL_HOST.split(":")[0],
|
|
|
|
'.' + EXTERNAL_HOST.split(":")[0]]
|
alias domains: Add a v1 of this feature.
The main limitation of this version is that it's controlled entirely
from settings, with nothing in the database and no web UI or even
management command to control it. That makes it a bit more of a
burden for the server admins than it'd ideally be, but that's fine
for now.
Relatedly, the web flow for realm creation still requires choosing a
subdomain even if the realm is destined to live at an alias domain.
Specific to the dev environment, there is an annoying quirk: the
special dev login flow doesn't work on a REALM_HOSTS realm. Also,
in this version the `add_new_realm` and `add_new_user` management
commands, which are intended for use in development environments only,
don't support this feature.
In manual testing, I've confirmed that a REALM_HOSTS realm works for
signup and login, with email/password, Google SSO, or GitHub SSO.
Most of that was in dev; I used zulipstaging.com to also test
* logging in with email and password;
* logging in with Google SSO... far enough to correctly determine
that my email address is associated with some other realm.
2017-10-20 06:36:50 +02:00
|
|
|
# ... and with the hosts in REALM_HOSTS.
|
|
|
|
ALLOWED_HOSTS += REALM_HOSTS.values()
|
2017-10-31 01:40:18 +01:00
|
|
|
|
2017-07-12 09:52:32 +02:00
|
|
|
from django.template.loaders import app_directories
|
|
|
|
class TwoFactorLoader(app_directories.Loader):
|
2019-11-13 02:53:01 +01:00
|
|
|
def get_dirs(self) -> List[str]:
|
2018-06-05 05:55:42 +02:00
|
|
|
dirs = super().get_dirs()
|
2017-07-12 09:52:32 +02:00
|
|
|
return [d for d in dirs if 'two_factor' in d]
|
|
|
|
|
2017-05-18 11:56:03 +02:00
|
|
|
MIDDLEWARE = (
|
2017-01-30 23:19:38 +01:00
|
|
|
# With the exception of it's dependencies,
|
|
|
|
# our logging middleware should be the top middleware item.
|
2013-12-17 22:18:13 +01:00
|
|
|
'zerver.middleware.TagRequests',
|
2017-01-30 23:19:38 +01:00
|
|
|
'zerver.middleware.SetRemoteAddrFromForwardedFor',
|
2013-07-29 23:03:31 +02:00
|
|
|
'zerver.middleware.LogRequests',
|
|
|
|
'zerver.middleware.JsonErrorHandler',
|
|
|
|
'zerver.middleware.RateLimitMiddleware',
|
2013-11-08 21:13:34 +01:00
|
|
|
'zerver.middleware.FlushDisplayRecipientCache',
|
2019-10-30 04:23:54 +01:00
|
|
|
'django_cookies_samesite.middleware.CookiesSameSite',
|
2012-08-28 18:44:51 +02:00
|
|
|
'django.middleware.common.CommonMiddleware',
|
2015-01-16 05:59:20 +01:00
|
|
|
'zerver.middleware.SessionHostDomainMiddleware',
|
2016-04-29 20:03:26 +02:00
|
|
|
'django.middleware.locale.LocaleMiddleware',
|
2012-08-28 18:44:51 +02:00
|
|
|
'django.middleware.csrf.CsrfViewMiddleware',
|
|
|
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
2018-05-03 07:39:59 +02:00
|
|
|
# Make sure 2FA middlewares come after authentication middleware.
|
2017-07-12 09:36:51 +02:00
|
|
|
'django_otp.middleware.OTPMiddleware', # Required by Two Factor auth.
|
|
|
|
'two_factor.middleware.threadlocals.ThreadLocals', # Required by Twilio
|
2018-12-14 23:28:00 +01:00
|
|
|
# Needs to be after CommonMiddleware, which sets Content-Length
|
|
|
|
'zerver.middleware.FinalizeOpenGraphDescription',
|
2017-07-12 09:36:51 +02:00
|
|
|
)
|
|
|
|
|
2013-06-22 00:05:48 +02:00
|
|
|
ANONYMOUS_USER_ID = None
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
AUTH_USER_MODEL = "zerver.UserProfile"
|
2012-09-21 16:10:36 +02:00
|
|
|
|
2014-01-29 00:47:48 +01:00
|
|
|
TEST_RUNNER = 'zerver.lib.test_runner.Runner'
|
2012-11-14 20:50:47 +01:00
|
|
|
|
2013-08-06 22:51:47 +02:00
|
|
|
ROOT_URLCONF = 'zproject.urls'
|
2012-08-28 18:44:51 +02:00
|
|
|
|
|
|
|
# Python dotted path to the WSGI application used by Django's runserver.
|
2013-08-06 22:51:47 +02:00
|
|
|
WSGI_APPLICATION = 'zproject.wsgi.application'
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2016-04-27 06:28:51 +02:00
|
|
|
# A site can include additional installed apps via the
|
|
|
|
# EXTRA_INSTALLED_APPS setting
|
2013-11-06 21:09:52 +01:00
|
|
|
INSTALLED_APPS = [
|
2012-08-28 18:44:51 +02:00
|
|
|
'django.contrib.auth',
|
|
|
|
'django.contrib.contenttypes',
|
|
|
|
'django.contrib.sessions',
|
2013-01-30 23:11:34 +01:00
|
|
|
'django.contrib.staticfiles',
|
2012-09-28 22:47:05 +02:00
|
|
|
'confirmation',
|
2017-05-24 00:03:53 +02:00
|
|
|
'webpack_loader',
|
2013-07-29 23:03:31 +02:00
|
|
|
'zerver',
|
2017-01-21 16:52:59 +01:00
|
|
|
'social_django',
|
2018-05-03 07:39:59 +02:00
|
|
|
# 2FA related apps.
|
|
|
|
'django_otp',
|
|
|
|
'django_otp.plugins.otp_static',
|
|
|
|
'django_otp.plugins.otp_totp',
|
|
|
|
'two_factor',
|
2016-04-24 17:08:51 +02:00
|
|
|
]
|
|
|
|
if USING_PGROONGA:
|
|
|
|
INSTALLED_APPS += ['pgroonga']
|
|
|
|
INSTALLED_APPS += EXTRA_INSTALLED_APPS
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2016-07-19 06:38:23 +02:00
|
|
|
ZILENCER_ENABLED = 'zilencer' in INSTALLED_APPS
|
2018-09-25 14:02:43 +02:00
|
|
|
CORPORATE_ENABLED = 'corporate' in INSTALLED_APPS
|
2016-07-19 06:38:23 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# Base URL of the Tornado server
|
|
|
|
# We set it to None when running backend tests or populate_db.
|
|
|
|
# We override the port number when running frontend tests.
|
2019-11-13 02:53:01 +01:00
|
|
|
TORNADO_PROCESSES = int(get_config('application_server', 'tornado_processes', '1'))
|
|
|
|
TORNADO_SERVER = 'http://127.0.0.1:9993' # type: Optional[str]
|
2014-10-28 16:54:54 +01:00
|
|
|
RUNNING_INSIDE_TORNADO = False
|
2017-03-18 04:43:27 +01:00
|
|
|
AUTORELOAD = DEBUG
|
2014-10-28 16:54:54 +01:00
|
|
|
|
2017-11-22 20:22:11 +01:00
|
|
|
SILENCED_SYSTEM_CHECKS = [
|
|
|
|
# auth.W004 checks that the UserProfile field named by USERNAME_FIELD has
|
|
|
|
# `unique=True`. For us this is `email`, and it's unique only per-realm.
|
|
|
|
# Per Django docs, this is perfectly fine so long as our authentication
|
|
|
|
# backends support the username not being unique; and they do.
|
|
|
|
# See: https://docs.djangoproject.com/en/1.11/topics/auth/customizing/#django.contrib.auth.models.CustomUser.USERNAME_FIELD
|
|
|
|
"auth.W004",
|
2020-01-27 12:05:32 +01:00
|
|
|
# urls.W003 warns against using colons in the name in url(..., name) because colons are used
|
|
|
|
# for namespaces. We need to override a url entry in the social: namespace, so we use
|
|
|
|
# the colon in this way intentionally.
|
|
|
|
"urls.W003",
|
2017-11-22 20:22:11 +01:00
|
|
|
]
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# DATABASE CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
2019-09-17 01:31:21 +02:00
|
|
|
# Zulip's Django configuration supports 4 different ways to do
|
|
|
|
# postgres authentication:
|
|
|
|
#
|
|
|
|
# * The development environment uses the `local_database_password`
|
|
|
|
# secret from `zulip-secrets.conf` to authenticate with a local
|
|
|
|
# database. The password is automatically generated and managed by
|
|
|
|
# `generate_secrets.py` during or provision.
|
|
|
|
#
|
|
|
|
# The remaining 3 options are for production use:
|
|
|
|
#
|
|
|
|
# * Using postgres' "peer" authentication to authenticate to a
|
|
|
|
# database on the local system using one's user ID (processes
|
|
|
|
# running as user `zulip` on the system are automatically
|
|
|
|
# authenticated as database user `zulip`). This is the default in
|
|
|
|
# production. We don't use this in the development environment,
|
|
|
|
# because it requires the developer's user to be called `zulip`.
|
|
|
|
#
|
|
|
|
# * Using password authentication with a remote postgres server using
|
|
|
|
# the `REMOTE_POSTGRES_HOST` setting and the password from the
|
|
|
|
# `postgres_password` secret.
|
|
|
|
#
|
|
|
|
# * Using passwordless authentication with a remote postgres server
|
|
|
|
# using the `REMOTE_POSTGRES_HOST` setting and a client certificate
|
|
|
|
# under `/home/zulip/.postgresql/`.
|
|
|
|
#
|
|
|
|
# We implement these options with a default DATABASES configuration
|
|
|
|
# supporting peer authentication, with logic to override it as
|
|
|
|
# appropriate if DEVELOPMENT or REMOTE_POSTGRES_HOST is set.
|
2014-10-28 16:54:54 +01:00
|
|
|
DATABASES = {"default": {
|
2017-02-17 08:07:17 +01:00
|
|
|
'ENGINE': 'django.db.backends.postgresql',
|
2014-10-28 16:54:54 +01:00
|
|
|
'NAME': 'zulip',
|
|
|
|
'USER': 'zulip',
|
2019-09-17 01:31:21 +02:00
|
|
|
# Password = '' => peer/certificate authentication (no password)
|
|
|
|
'PASSWORD': '',
|
|
|
|
# Host = '' => connect to localhost by default
|
|
|
|
'HOST': '',
|
2014-10-28 16:54:54 +01:00
|
|
|
'SCHEMA': 'zulip',
|
|
|
|
'CONN_MAX_AGE': 600,
|
|
|
|
'OPTIONS': {
|
|
|
|
'connection_factory': TimeTrackingConnection
|
|
|
|
},
|
2019-11-13 02:53:01 +01:00
|
|
|
}} # type: Dict[str, Dict[str, Any]]
|
2014-10-28 16:54:54 +01:00
|
|
|
|
2015-08-21 21:24:34 +02:00
|
|
|
if DEVELOPMENT:
|
2015-08-20 08:15:21 +02:00
|
|
|
LOCAL_DATABASE_PASSWORD = get_secret("local_database_password")
|
2014-10-28 16:54:54 +01:00
|
|
|
DATABASES["default"].update({
|
2017-01-24 07:06:13 +01:00
|
|
|
'PASSWORD': LOCAL_DATABASE_PASSWORD,
|
|
|
|
'HOST': 'localhost'
|
2017-01-24 06:34:26 +01:00
|
|
|
})
|
2015-08-22 22:24:39 +02:00
|
|
|
elif REMOTE_POSTGRES_HOST != '':
|
|
|
|
DATABASES['default'].update({
|
2017-01-24 07:06:13 +01:00
|
|
|
'HOST': REMOTE_POSTGRES_HOST,
|
2019-12-12 10:48:49 +01:00
|
|
|
'PORT': REMOTE_POSTGRES_PORT
|
2017-01-24 06:34:26 +01:00
|
|
|
})
|
2015-12-10 23:52:52 +01:00
|
|
|
if get_secret("postgres_password") is not None:
|
|
|
|
DATABASES['default'].update({
|
|
|
|
'PASSWORD': get_secret("postgres_password"),
|
2017-01-24 06:34:26 +01:00
|
|
|
})
|
2015-12-10 23:52:52 +01:00
|
|
|
if REMOTE_POSTGRES_SSLMODE != '':
|
|
|
|
DATABASES['default']['OPTIONS']['sslmode'] = REMOTE_POSTGRES_SSLMODE
|
|
|
|
else:
|
|
|
|
DATABASES['default']['OPTIONS']['sslmode'] = 'verify-full'
|
2014-10-28 16:54:54 +01:00
|
|
|
|
2019-12-13 08:16:55 +01:00
|
|
|
POSTGRES_MISSING_DICTIONARIES = bool(get_config('postgresql', 'missing_dictionaries', None))
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# RABBITMQ CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
USING_RABBITMQ = True
|
|
|
|
RABBITMQ_PASSWORD = get_secret("rabbitmq_password")
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# CACHING CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
2018-12-01 00:54:13 +01:00
|
|
|
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
|
2014-10-28 16:54:54 +01:00
|
|
|
|
2018-06-11 04:20:07 +02:00
|
|
|
# Compress large values being stored in memcached; this is important
|
|
|
|
# for at least the realm_users cache.
|
|
|
|
PYLIBMC_MIN_COMPRESS_LEN = 100 * 1024
|
|
|
|
PYLIBMC_COMPRESS_LEVEL = 1
|
|
|
|
|
2020-01-02 23:19:27 +01:00
|
|
|
MEMCACHED_PASSWORD = get_secret("memcached_password")
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
CACHES = {
|
|
|
|
'default': {
|
2018-06-11 04:19:16 +02:00
|
|
|
'BACKEND': 'django_pylibmc.memcached.PyLibMCCache',
|
2016-01-21 12:52:24 +01:00
|
|
|
'LOCATION': MEMCACHED_LOCATION,
|
2017-01-24 06:21:14 +01:00
|
|
|
'TIMEOUT': 3600,
|
2020-01-02 23:19:27 +01:00
|
|
|
'BINARY': True,
|
|
|
|
'USERNAME': MEMCACHED_USERNAME,
|
|
|
|
'PASSWORD': MEMCACHED_PASSWORD,
|
2016-08-12 05:50:39 +02:00
|
|
|
'OPTIONS': {
|
2016-08-12 05:59:47 +02:00
|
|
|
'tcp_nodelay': True,
|
2016-08-12 06:00:06 +02:00
|
|
|
'retry_timeout': 1,
|
2016-08-12 05:50:39 +02:00
|
|
|
}
|
2014-10-28 16:54:54 +01:00
|
|
|
},
|
|
|
|
'database': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
|
|
|
|
'LOCATION': 'third_party_api_results',
|
2017-10-18 14:59:23 +02:00
|
|
|
# This cache shouldn't timeout; we're really just using the
|
|
|
|
# cache API to store the results of requests to third-party
|
|
|
|
# APIs like the Twitter API permanently.
|
|
|
|
'TIMEOUT': None,
|
2014-10-28 16:54:54 +01:00
|
|
|
'OPTIONS': {
|
|
|
|
'MAX_ENTRIES': 100000000,
|
|
|
|
'CULL_FREQUENCY': 10,
|
|
|
|
}
|
|
|
|
},
|
2019-05-02 17:14:08 +02:00
|
|
|
'in-memory': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
2014-10-28 16:54:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# REDIS-BASED RATE LIMITING CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
2019-04-01 21:18:26 +02:00
|
|
|
RATE_LIMITING_RULES = {
|
2019-08-03 20:39:49 +02:00
|
|
|
'api_by_user': [
|
2019-04-01 21:18:26 +02:00
|
|
|
(60, 200), # 200 requests max every minute
|
|
|
|
],
|
2019-12-30 21:17:11 +01:00
|
|
|
'authenticate_by_username': [
|
2019-08-01 15:09:27 +02:00
|
|
|
(1800, 5), # 5 login attempts within 30 minutes
|
|
|
|
],
|
2019-12-30 21:13:02 +01:00
|
|
|
'password_reset_form_by_email': [
|
|
|
|
(3600, 2), # 2 reset emails per hour
|
|
|
|
(86400, 5), # 5 per day
|
|
|
|
],
|
2019-04-01 21:18:26 +02:00
|
|
|
}
|
2019-03-16 11:39:09 +01:00
|
|
|
|
|
|
|
RATE_LIMITING_MIRROR_REALM_RULES = [
|
|
|
|
(60, 50), # 50 emails per minute
|
|
|
|
(300, 120), # 120 emails per 5 minutes
|
|
|
|
(3600, 600), # 600 emails per hour
|
|
|
|
]
|
|
|
|
|
2016-07-30 04:29:58 +02:00
|
|
|
DEBUG_RATE_LIMITING = DEBUG
|
2016-08-01 04:51:00 +02:00
|
|
|
REDIS_PASSWORD = get_secret('redis_password')
|
2013-05-29 23:58:07 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# SECURITY SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Tell the browser to never send our cookies without encryption, e.g.
|
|
|
|
# when executing the initial http -> https redirect.
|
|
|
|
#
|
|
|
|
# Turn it off for local testing because we don't have SSL.
|
2015-08-21 09:18:44 +02:00
|
|
|
if PRODUCTION:
|
2014-10-28 16:54:54 +01:00
|
|
|
SESSION_COOKIE_SECURE = True
|
2016-05-10 01:53:12 +02:00
|
|
|
CSRF_COOKIE_SECURE = True
|
2014-10-28 16:54:54 +01:00
|
|
|
|
|
|
|
# For get_updates hostname sharding.
|
2018-11-02 22:33:37 +01:00
|
|
|
domain = get_config('django', 'cookie_domain', None)
|
|
|
|
if domain is not None:
|
|
|
|
CSRF_COOKIE_DOMAIN = '.' + domain
|
2014-10-28 16:54:54 +01:00
|
|
|
|
2019-10-30 04:23:54 +01:00
|
|
|
# Enable SameSite cookies (default in Django 2.1)
|
|
|
|
SESSION_COOKIE_SAMESITE = 'Lax'
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# Prevent Javascript from reading the CSRF token from cookies. Our code gets
|
|
|
|
# the token from the DOM, which means malicious code could too. But hiding the
|
|
|
|
# cookie will slow down some attackers.
|
|
|
|
CSRF_COOKIE_PATH = '/;HttpOnly'
|
|
|
|
CSRF_FAILURE_VIEW = 'zerver.middleware.csrf_failure'
|
|
|
|
|
2015-08-21 09:18:44 +02:00
|
|
|
if DEVELOPMENT:
|
2014-10-28 16:54:54 +01:00
|
|
|
# Use fast password hashing for creating testing users when not
|
2015-08-21 09:18:44 +02:00
|
|
|
# PRODUCTION. Saves a bunch of time.
|
2014-10-28 16:54:54 +01:00
|
|
|
PASSWORD_HASHERS = (
|
2017-01-24 07:06:13 +01:00
|
|
|
'django.contrib.auth.hashers.SHA1PasswordHasher',
|
|
|
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher'
|
|
|
|
)
|
2015-08-20 08:15:21 +02:00
|
|
|
# Also we auto-generate passwords for the default users which you
|
|
|
|
# can query using ./manage.py print_initial_password
|
|
|
|
INITIAL_PASSWORD_SALT = get_secret("initial_password_salt")
|
2017-01-20 20:12:33 +01:00
|
|
|
else:
|
|
|
|
# For production, use the best password hashing algorithm: Argon2
|
|
|
|
# Zulip was originally on PBKDF2 so we need it for compatibility
|
|
|
|
PASSWORD_HASHERS = ('django.contrib.auth.hashers.Argon2PasswordHasher',
|
|
|
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher')
|
2014-10-28 16:54:54 +01:00
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# API/BOT SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2017-08-28 23:01:18 +02:00
|
|
|
ROOT_DOMAIN_URI = EXTERNAL_URI_SCHEME + EXTERNAL_HOST
|
2013-12-04 20:38:01 +01:00
|
|
|
|
2016-10-29 05:52:05 +02:00
|
|
|
if "NAGIOS_BOT_HOST" not in vars():
|
|
|
|
NAGIOS_BOT_HOST = EXTERNAL_HOST
|
|
|
|
|
2015-08-21 03:24:55 +02:00
|
|
|
S3_KEY = get_secret("s3_key")
|
|
|
|
S3_SECRET_KEY = get_secret("s3_secret_key")
|
|
|
|
|
2018-02-15 23:59:55 +01:00
|
|
|
if LOCAL_UPLOADS_DIR is not None:
|
|
|
|
if SENDFILE_BACKEND is None:
|
2020-02-05 07:27:45 +01:00
|
|
|
SENDFILE_BACKEND = 'django_sendfile.backends.nginx'
|
2018-02-15 23:59:55 +01:00
|
|
|
SENDFILE_ROOT = os.path.join(LOCAL_UPLOADS_DIR, "files")
|
|
|
|
SENDFILE_URL = '/serve_uploads'
|
|
|
|
|
2015-08-21 01:48:50 +02:00
|
|
|
# GCM tokens are IP-whitelisted; if we deploy to additional
|
|
|
|
# servers you will need to explicitly add their IPs here:
|
|
|
|
# https://cloud.google.com/console/project/apps~zulip-android/apiui/credential
|
|
|
|
ANDROID_GCM_API_KEY = get_secret("android_gcm_api_key")
|
|
|
|
|
2015-08-21 05:24:21 +02:00
|
|
|
DROPBOX_APP_KEY = get_secret("dropbox_app_key")
|
|
|
|
|
2015-08-21 07:12:15 +02:00
|
|
|
MAILCHIMP_API_KEY = get_secret("mailchimp_api_key")
|
|
|
|
|
2015-08-21 05:02:07 +02:00
|
|
|
# Twitter API credentials
|
|
|
|
# Secrecy not required because its only used for R/O requests.
|
|
|
|
# Please don't make us go over our rate limit.
|
|
|
|
TWITTER_CONSUMER_KEY = get_secret("twitter_consumer_key")
|
|
|
|
TWITTER_CONSUMER_SECRET = get_secret("twitter_consumer_secret")
|
|
|
|
TWITTER_ACCESS_TOKEN_KEY = get_secret("twitter_access_token_key")
|
|
|
|
TWITTER_ACCESS_TOKEN_SECRET = get_secret("twitter_access_token_secret")
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# These are the bots that Zulip sends automated messages as.
|
2016-05-10 01:53:12 +02:00
|
|
|
INTERNAL_BOTS = [{'var_name': 'NOTIFICATION_BOT',
|
|
|
|
'email_template': 'notification-bot@%s',
|
|
|
|
'name': 'Notification Bot'},
|
|
|
|
{'var_name': 'EMAIL_GATEWAY_BOT',
|
|
|
|
'email_template': 'emailgateway@%s',
|
|
|
|
'name': 'Email Gateway'},
|
|
|
|
{'var_name': 'NAGIOS_SEND_BOT',
|
|
|
|
'email_template': 'nagios-send-bot@%s',
|
|
|
|
'name': 'Nagios Send Bot'},
|
|
|
|
{'var_name': 'NAGIOS_RECEIVE_BOT',
|
|
|
|
'email_template': 'nagios-receive-bot@%s',
|
|
|
|
'name': 'Nagios Receive Bot'},
|
2016-10-26 18:21:24 +02:00
|
|
|
{'var_name': 'WELCOME_BOT',
|
|
|
|
'email_template': 'welcome-bot@%s',
|
|
|
|
'name': 'Welcome Bot'}]
|
|
|
|
|
2018-09-24 11:47:47 +02:00
|
|
|
# Bots that are created for each realm like the reminder-bot goes here.
|
2019-11-13 02:53:01 +01:00
|
|
|
REALM_INTERNAL_BOTS = [] # type: List[Dict[str, str]]
|
2018-12-03 20:36:11 +01:00
|
|
|
# These are realm-internal bots that may exist in some organizations,
|
|
|
|
# so configure power the setting, but should not be auto-created at this time.
|
|
|
|
DISABLED_REALM_INTERNAL_BOTS = [
|
|
|
|
{'var_name': 'REMINDER_BOT',
|
|
|
|
'email_template': 'reminder-bot@%s',
|
|
|
|
'name': 'Reminder Bot'}
|
|
|
|
]
|
2018-01-12 09:10:43 +01:00
|
|
|
|
2016-10-26 18:21:24 +02:00
|
|
|
if PRODUCTION:
|
|
|
|
INTERNAL_BOTS += [
|
2017-01-24 07:06:13 +01:00
|
|
|
{'var_name': 'NAGIOS_STAGING_SEND_BOT',
|
|
|
|
'email_template': 'nagios-staging-send-bot@%s',
|
|
|
|
'name': 'Nagios Staging Send Bot'},
|
|
|
|
{'var_name': 'NAGIOS_STAGING_RECEIVE_BOT',
|
|
|
|
'email_template': 'nagios-staging-receive-bot@%s',
|
|
|
|
'name': 'Nagios Staging Receive Bot'},
|
2017-01-24 06:02:39 +01:00
|
|
|
]
|
2013-11-14 16:59:10 +01:00
|
|
|
|
|
|
|
INTERNAL_BOT_DOMAIN = "zulip.com"
|
2013-10-31 18:33:19 +01:00
|
|
|
|
|
|
|
# Set the realm-specific bot names
|
2018-12-03 20:36:11 +01:00
|
|
|
for bot in INTERNAL_BOTS + REALM_INTERNAL_BOTS + DISABLED_REALM_INTERNAL_BOTS:
|
2016-04-13 17:50:19 +02:00
|
|
|
if vars().get(bot['var_name']) is None:
|
2013-11-14 16:59:10 +01:00
|
|
|
bot_email = bot['email_template'] % (INTERNAL_BOT_DOMAIN,)
|
2016-05-10 01:53:12 +02:00
|
|
|
vars()[bot['var_name']] = bot_email
|
2013-10-31 18:33:19 +01:00
|
|
|
|
2015-08-21 18:58:43 +02:00
|
|
|
########################################################################
|
|
|
|
# STATSD CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
2015-08-22 22:18:55 +02:00
|
|
|
# Statsd is not super well supported; if you want to use it you'll need
|
|
|
|
# to set STATSD_HOST and STATSD_PREFIX.
|
|
|
|
if STATSD_HOST != '':
|
2015-08-21 18:58:43 +02:00
|
|
|
INSTALLED_APPS += ['django_statsd']
|
|
|
|
STATSD_PORT = 8125
|
|
|
|
STATSD_CLIENT = 'django_statsd.clients.normal'
|
|
|
|
|
2015-08-21 01:27:35 +02:00
|
|
|
########################################################################
|
|
|
|
# CAMO HTTPS CACHE CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
if CAMO_URI != '':
|
|
|
|
# This needs to be synced with the Camo installation
|
|
|
|
CAMO_KEY = get_secret("camo_key")
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# STATIC CONTENT AND MINIFICATION SETTINGS
|
|
|
|
########################################################################
|
2013-01-30 23:11:34 +01:00
|
|
|
|
|
|
|
STATIC_URL = '/static/'
|
|
|
|
|
2019-07-18 11:36:56 +02:00
|
|
|
# ZulipStorage is a modified version of ManifestStaticFilesStorage,
|
2013-06-20 19:44:44 +02:00
|
|
|
# and, like that class, it inserts a file hash into filenames
|
2013-02-24 19:28:43 +01:00
|
|
|
# to prevent the browser from using stale files from cache.
|
2013-02-25 23:19:20 +01:00
|
|
|
#
|
|
|
|
# Unlike PipelineStorage, it requires the files to exist in
|
|
|
|
# STATIC_ROOT even for dev servers. So we only use
|
2013-08-06 21:36:30 +02:00
|
|
|
# ZulipStorage when not DEBUG.
|
2013-02-25 23:19:20 +01:00
|
|
|
|
2019-07-18 11:36:56 +02:00
|
|
|
if not DEBUG:
|
2018-06-02 01:57:06 +02:00
|
|
|
STATICFILES_STORAGE = 'zerver.lib.storage.ZulipStorage'
|
2015-08-21 11:24:18 +02:00
|
|
|
if PRODUCTION:
|
2013-10-04 19:19:57 +02:00
|
|
|
STATIC_ROOT = '/home/zulip/prod-static'
|
2013-06-12 19:56:12 +02:00
|
|
|
else:
|
2018-12-06 20:04:28 +01:00
|
|
|
STATIC_ROOT = os.path.abspath(os.path.join(DEPLOY_ROOT, 'prod-static/serve'))
|
2013-01-30 23:11:34 +01:00
|
|
|
|
2017-02-04 02:06:58 +01:00
|
|
|
# If changing this, you need to also the hack modifications to this in
|
|
|
|
# our compilemessages management command.
|
2019-07-02 22:38:09 +02:00
|
|
|
LOCALE_PATHS = (os.path.join(DEPLOY_ROOT, 'locale'),)
|
2016-07-14 12:00:26 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
# We want all temporary uploaded files to be stored on disk.
|
|
|
|
FILE_UPLOAD_MAX_MEMORY_SIZE = 0
|
|
|
|
|
2013-07-25 22:41:44 +02:00
|
|
|
STATICFILES_DIRS = ['static/']
|
2013-01-30 23:11:34 +01:00
|
|
|
|
2019-07-18 11:36:56 +02:00
|
|
|
if DEBUG:
|
2017-07-18 21:47:47 +02:00
|
|
|
WEBPACK_STATS_FILE = os.path.join('var', 'webpack-stats-dev.json')
|
|
|
|
else:
|
|
|
|
WEBPACK_STATS_FILE = 'webpack-stats-production.json'
|
2017-05-24 00:03:53 +02:00
|
|
|
WEBPACK_LOADER = {
|
|
|
|
'DEFAULT': {
|
2018-06-01 01:21:36 +02:00
|
|
|
'CACHE': not DEBUG,
|
2017-05-24 00:03:53 +02:00
|
|
|
'BUNDLE_DIR_NAME': 'webpack-bundles/',
|
2017-07-18 21:47:47 +02:00
|
|
|
'STATS_FILE': os.path.join(DEPLOY_ROOT, WEBPACK_STATS_FILE),
|
2017-05-24 00:03:53 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-23 00:12:13 +02:00
|
|
|
########################################################################
|
|
|
|
# TEMPLATES SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# List of callables that know how to import templates from various sources.
|
|
|
|
LOADERS = [
|
|
|
|
'django.template.loaders.filesystem.Loader',
|
|
|
|
'django.template.loaders.app_directories.Loader',
|
2019-11-13 02:53:01 +01:00
|
|
|
] # type: List[Union[str, Tuple[object, ...]]]
|
2017-09-23 00:12:13 +02:00
|
|
|
if PRODUCTION:
|
|
|
|
# Template caching is a significant performance win in production.
|
|
|
|
LOADERS = [('django.template.loaders.cached.Loader', LOADERS)]
|
|
|
|
|
|
|
|
base_template_engine_settings = {
|
|
|
|
'BACKEND': 'django.template.backends.jinja2.Jinja2',
|
|
|
|
'OPTIONS': {
|
|
|
|
'environment': 'zproject.jinja2.environment',
|
|
|
|
'extensions': [
|
|
|
|
'jinja2.ext.i18n',
|
|
|
|
'jinja2.ext.autoescape',
|
|
|
|
'webpack_loader.contrib.jinja2ext.WebpackExtension',
|
|
|
|
],
|
|
|
|
'context_processors': [
|
|
|
|
'zerver.context_processors.zulip_default_context',
|
|
|
|
'django.template.context_processors.i18n',
|
|
|
|
],
|
|
|
|
},
|
2019-11-13 02:53:01 +01:00
|
|
|
} # type: Dict[str, Any]
|
2017-09-23 00:12:13 +02:00
|
|
|
|
|
|
|
default_template_engine_settings = deepcopy(base_template_engine_settings)
|
|
|
|
default_template_engine_settings.update({
|
|
|
|
'NAME': 'Jinja2',
|
|
|
|
'DIRS': [
|
2017-09-17 02:14:56 +02:00
|
|
|
# The main templates directory
|
2017-09-23 00:12:13 +02:00
|
|
|
os.path.join(DEPLOY_ROOT, 'templates'),
|
2017-09-17 02:14:56 +02:00
|
|
|
# The webhook integration templates
|
2017-09-23 00:12:13 +02:00
|
|
|
os.path.join(DEPLOY_ROOT, 'zerver', 'webhooks'),
|
2017-09-17 02:14:56 +02:00
|
|
|
# The python-zulip-api:zulip_bots package templates
|
2019-07-18 11:36:56 +02:00
|
|
|
os.path.join('static' if DEBUG else STATIC_ROOT, 'generated', 'bots'),
|
2017-09-23 00:12:13 +02:00
|
|
|
],
|
|
|
|
'APP_DIRS': True,
|
|
|
|
})
|
|
|
|
|
|
|
|
non_html_template_engine_settings = deepcopy(base_template_engine_settings)
|
|
|
|
non_html_template_engine_settings.update({
|
|
|
|
'NAME': 'Jinja2_plaintext',
|
|
|
|
'DIRS': [os.path.join(DEPLOY_ROOT, 'templates')],
|
|
|
|
'APP_DIRS': False,
|
|
|
|
})
|
|
|
|
non_html_template_engine_settings['OPTIONS'].update({
|
|
|
|
'autoescape': False,
|
|
|
|
'trim_blocks': True,
|
|
|
|
'lstrip_blocks': True,
|
|
|
|
})
|
|
|
|
|
2017-07-12 09:52:32 +02:00
|
|
|
# django-two-factor uses the default Django template engine (not Jinja2), so we
|
|
|
|
# need to add config for it here.
|
|
|
|
two_factor_template_options = deepcopy(default_template_engine_settings['OPTIONS'])
|
|
|
|
del two_factor_template_options['environment']
|
|
|
|
del two_factor_template_options['extensions']
|
|
|
|
two_factor_template_options['loaders'] = ['zproject.settings.TwoFactorLoader']
|
|
|
|
|
|
|
|
two_factor_template_engine_settings = {
|
|
|
|
'NAME': 'Two_Factor',
|
|
|
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
|
|
|
'DIRS': [],
|
|
|
|
'APP_DIRS': False,
|
|
|
|
'OPTIONS': two_factor_template_options,
|
|
|
|
}
|
|
|
|
|
2017-09-23 00:12:13 +02:00
|
|
|
# The order here is important; get_template and related/parent functions try
|
|
|
|
# the template engines in order until one succeeds.
|
|
|
|
TEMPLATES = [
|
|
|
|
default_template_engine_settings,
|
|
|
|
non_html_template_engine_settings,
|
2017-07-12 09:52:32 +02:00
|
|
|
two_factor_template_engine_settings,
|
2017-09-23 00:12:13 +02:00
|
|
|
]
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# LOGGING SETTINGS
|
|
|
|
########################################################################
|
2012-09-19 17:58:22 +02:00
|
|
|
|
2019-11-13 02:56:12 +01:00
|
|
|
def zulip_path(path: str) -> str:
|
2015-08-21 09:18:44 +02:00
|
|
|
if DEVELOPMENT:
|
|
|
|
# if DEVELOPMENT, store these files in the Zulip checkout
|
2017-09-30 07:33:32 +02:00
|
|
|
if path.startswith("/var/log"):
|
|
|
|
path = os.path.join(DEVELOPMENT_LOG_DIRECTORY, os.path.basename(path))
|
|
|
|
else:
|
2016-07-16 14:47:15 +02:00
|
|
|
path = os.path.join(os.path.join(DEPLOY_ROOT, 'var'), os.path.basename(path))
|
2019-11-13 02:56:12 +01:00
|
|
|
return path
|
|
|
|
|
|
|
|
SERVER_LOG_PATH = zulip_path("/var/log/zulip/server.log")
|
|
|
|
ERROR_FILE_LOG_PATH = zulip_path("/var/log/zulip/errors.log")
|
|
|
|
MANAGEMENT_LOG_PATH = zulip_path("/var/log/zulip/manage.log")
|
|
|
|
WORKER_LOG_PATH = zulip_path("/var/log/zulip/workers.log")
|
|
|
|
JSON_PERSISTENT_QUEUE_FILENAME_PATTERN = zulip_path("/home/zulip/tornado/event_queues%s.json")
|
|
|
|
EMAIL_LOG_PATH = zulip_path("/var/log/zulip/send_email.log")
|
|
|
|
EMAIL_MIRROR_LOG_PATH = zulip_path("/var/log/zulip/email_mirror.log")
|
|
|
|
EMAIL_DELIVERER_LOG_PATH = zulip_path("/var/log/zulip/email-deliverer.log")
|
|
|
|
EMAIL_CONTENT_LOG_PATH = zulip_path("/var/log/zulip/email_content.log")
|
2019-12-27 23:03:00 +01:00
|
|
|
LDAP_LOG_PATH = zulip_path("/var/log/zulip/ldap.log")
|
2019-11-13 02:56:12 +01:00
|
|
|
LDAP_SYNC_LOG_PATH = zulip_path("/var/log/zulip/sync_ldap_user_data.log")
|
|
|
|
QUEUE_ERROR_DIR = zulip_path("/var/log/zulip/queue_error")
|
|
|
|
DIGEST_LOG_PATH = zulip_path("/var/log/zulip/digest.log")
|
|
|
|
ANALYTICS_LOG_PATH = zulip_path("/var/log/zulip/analytics.log")
|
|
|
|
ANALYTICS_LOCK_DIR = zulip_path("/home/zulip/deployments/analytics-lock-dir")
|
|
|
|
API_KEY_ONLY_WEBHOOK_LOG_PATH = zulip_path("/var/log/zulip/webhooks_errors.log")
|
|
|
|
WEBHOOK_UNEXPECTED_EVENTS_LOG_PATH = zulip_path("/var/log/zulip/webhooks_unexpected_events.log")
|
|
|
|
SOFT_DEACTIVATION_LOG_PATH = zulip_path("/var/log/zulip/soft_deactivation.log")
|
|
|
|
TRACEMALLOC_DUMP_DIR = zulip_path("/var/log/zulip/tracemalloc")
|
|
|
|
SCHEDULED_MESSAGE_DELIVERER_LOG_PATH = zulip_path("/var/log/zulip/scheduled_message_deliverer.log")
|
|
|
|
RETENTION_LOG_PATH = zulip_path("/var/log/zulip/message_retention.log")
|
|
|
|
|
2019-11-13 21:23:51 +01:00
|
|
|
# The EVENT_LOGS feature is an ultra-legacy piece of code, which
|
|
|
|
# originally logged all significant database changes for debugging.
|
|
|
|
# We plan to replace it with RealmAuditLog, stored in the database,
|
|
|
|
# everywhere that code mentioning it appears.
|
2019-11-13 02:56:12 +01:00
|
|
|
if EVENT_LOGS_ENABLED:
|
|
|
|
EVENT_LOG_DIR = zulip_path("/home/zulip/logs/event_log") # type: Optional[str]
|
|
|
|
else:
|
|
|
|
EVENT_LOG_DIR = None
|
2013-04-09 22:01:38 +02:00
|
|
|
|
2013-10-30 16:01:18 +01:00
|
|
|
ZULIP_WORKER_TEST_FILE = '/tmp/zulip-worker-test-file'
|
|
|
|
|
|
|
|
|
2014-01-07 22:27:52 +01:00
|
|
|
if IS_WORKER:
|
2013-09-26 23:06:01 +02:00
|
|
|
FILE_LOG_PATH = WORKER_LOG_PATH
|
|
|
|
else:
|
|
|
|
FILE_LOG_PATH = SERVER_LOG_PATH
|
2018-03-21 00:53:39 +01:00
|
|
|
|
|
|
|
# This is disabled in a few tests.
|
|
|
|
LOGGING_ENABLED = True
|
2013-09-26 23:06:01 +02:00
|
|
|
|
2017-03-21 08:37:39 +01:00
|
|
|
DEFAULT_ZULIP_HANDLERS = (
|
|
|
|
(['zulip_admins'] if ERROR_REPORTING else []) +
|
|
|
|
['console', 'file', 'errors_file']
|
|
|
|
)
|
|
|
|
|
2012-08-28 18:44:51 +02:00
|
|
|
LOGGING = {
|
|
|
|
'version': 1,
|
2017-09-25 23:46:44 +02:00
|
|
|
'disable_existing_loggers': False,
|
2012-09-18 22:40:19 +02:00
|
|
|
'formatters': {
|
|
|
|
'default': {
|
2017-09-27 23:40:38 +02:00
|
|
|
'()': 'zerver.lib.logging_util.ZulipFormatter',
|
2012-09-18 22:40:19 +02:00
|
|
|
}
|
|
|
|
},
|
2012-12-05 18:01:43 +01:00
|
|
|
'filters': {
|
2013-08-06 21:37:34 +02:00
|
|
|
'ZulipLimiter': {
|
|
|
|
'()': 'zerver.lib.logging_util.ZulipLimiter',
|
2012-12-06 22:00:34 +01:00
|
|
|
},
|
|
|
|
'EmailLimiter': {
|
2013-07-29 23:03:31 +02:00
|
|
|
'()': 'zerver.lib.logging_util.EmailLimiter',
|
2012-12-06 22:00:34 +01:00
|
|
|
},
|
|
|
|
'require_debug_false': {
|
|
|
|
'()': 'django.utils.log.RequireDebugFalse',
|
2013-03-15 17:03:56 +01:00
|
|
|
},
|
2017-01-19 07:45:01 +01:00
|
|
|
'require_debug_true': {
|
|
|
|
'()': 'django.utils.log.RequireDebugTrue',
|
|
|
|
},
|
2013-03-15 17:03:56 +01:00
|
|
|
'nop': {
|
2013-07-29 23:03:31 +02:00
|
|
|
'()': 'zerver.lib.logging_util.ReturnTrue',
|
2013-03-15 17:03:56 +01:00
|
|
|
},
|
2017-03-22 22:09:38 +01:00
|
|
|
'require_logging_enabled': {
|
|
|
|
'()': 'zerver.lib.logging_util.ReturnEnabled',
|
|
|
|
},
|
2013-06-10 18:59:10 +02:00
|
|
|
'require_really_deployed': {
|
2013-07-29 23:03:31 +02:00
|
|
|
'()': 'zerver.lib.logging_util.RequireReallyDeployed',
|
2013-06-10 18:59:10 +02:00
|
|
|
},
|
2017-01-18 12:52:01 +01:00
|
|
|
'skip_200_and_304': {
|
|
|
|
'()': 'django.utils.log.CallbackFilter',
|
|
|
|
'callback': zerver.lib.logging_util.skip_200_and_304,
|
|
|
|
},
|
2017-09-16 17:07:06 +02:00
|
|
|
'skip_boring_404s': {
|
|
|
|
'()': 'django.utils.log.CallbackFilter',
|
|
|
|
'callback': zerver.lib.logging_util.skip_boring_404s,
|
|
|
|
},
|
2017-03-15 10:08:27 +01:00
|
|
|
'skip_site_packages_logs': {
|
|
|
|
'()': 'django.utils.log.CallbackFilter',
|
|
|
|
'callback': zerver.lib.logging_util.skip_site_packages_logs,
|
|
|
|
},
|
2012-12-05 18:01:43 +01:00
|
|
|
},
|
2012-09-18 22:40:19 +02:00
|
|
|
'handlers': {
|
2013-08-06 22:30:41 +02:00
|
|
|
'zulip_admins': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'ERROR',
|
2017-11-30 23:06:21 +01:00
|
|
|
'class': 'zerver.logging_handlers.AdminNotifyHandler',
|
2018-03-21 01:03:12 +01:00
|
|
|
'filters': (['ZulipLimiter', 'require_debug_false', 'require_really_deployed']
|
|
|
|
if not DEBUG_ERROR_REPORTING else []),
|
2012-12-06 22:00:34 +01:00
|
|
|
'formatter': 'default'
|
|
|
|
},
|
2012-09-18 22:40:19 +02:00
|
|
|
'console': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.StreamHandler',
|
2012-09-18 22:40:19 +02:00
|
|
|
'formatter': 'default'
|
2012-10-12 23:47:43 +02:00
|
|
|
},
|
|
|
|
'file': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': FILE_LOG_PATH,
|
2012-12-05 18:01:43 +01:00
|
|
|
},
|
2013-12-16 21:44:50 +01:00
|
|
|
'errors_file': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'WARNING',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': ERROR_FILE_LOG_PATH,
|
2013-12-16 21:44:50 +01:00
|
|
|
},
|
2019-12-27 23:03:00 +01:00
|
|
|
'ldap_file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': LDAP_LOG_PATH,
|
|
|
|
},
|
2012-09-18 22:40:19 +02:00
|
|
|
},
|
|
|
|
'loggers': {
|
2017-09-27 02:28:28 +02:00
|
|
|
# The Python logging module uses a hierarchy of logger names for config:
|
|
|
|
# "foo.bar" has parent "foo" has parent "", the root. But the semantics
|
|
|
|
# are subtle: it walks this hierarchy once to find the log level to
|
|
|
|
# decide whether to log the record at all, then a separate time to find
|
|
|
|
# handlers to emit the record.
|
|
|
|
#
|
|
|
|
# For `level`, the most specific ancestor that has a `level` counts.
|
|
|
|
# For `handlers`, the most specific ancestor that has a `handlers`
|
|
|
|
# counts (assuming we set `propagate=False`, which we always do.)
|
|
|
|
# These are independent -- they might come at the same layer, or
|
|
|
|
# either one could come before the other.
|
|
|
|
#
|
|
|
|
# For `filters`, no ancestors count at all -- only the exact logger name
|
|
|
|
# the record was logged at.
|
|
|
|
#
|
|
|
|
# Upstream docs: https://docs.python.org/3/library/logging
|
|
|
|
#
|
|
|
|
# Style rules:
|
|
|
|
# * Always set `propagate=False` if setting `handlers`.
|
|
|
|
# * Setting `level` equal to the parent is redundant; don't.
|
|
|
|
# * Setting `handlers` equal to the parent is redundant; don't.
|
|
|
|
# * Always write in order: level, filters, handlers, propagate.
|
|
|
|
|
2017-09-27 01:54:32 +02:00
|
|
|
# root logger
|
2012-10-09 17:41:13 +02:00
|
|
|
'': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'INFO',
|
2017-09-27 02:28:28 +02:00
|
|
|
'filters': ['require_logging_enabled'],
|
|
|
|
'handlers': DEFAULT_ZULIP_HANDLERS,
|
2013-03-27 22:07:45 +01:00
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
|
|
|
|
# Django, alphabetized
|
logging: Clear out the Django default config on logger `django`.
By default, Django sets up two handlers on this logger, one of them
its AdminEmailHandler. We have our own handler for sending email on
error, and we want to stick to that -- we like the format somewhat
better, and crucially we've given it some rate-limiting through
ZulipLimiter.
Since we cleaned out our logging config in e0a5e6fad, though, we've
been sending error emails through both paths. The config we'd had
before that for `django` was redundant with the config on the root --
but having *a* config there was essential for causing
`logging.config.dictConfig`, when Django passes it our LOGGING dict,
to clear out that logger's previous config. So, give it an empty
config.
Django by default configures two loggers: `django` and
`django.server`. We have our own settings for `django.server`
anyway, so this is the only one we need to add.
The stdlib `logging` and `logging.config` docs aren't 100% clear, and
while the source of `logging` is admirably straightforward the source
of `logging.config` is a little twisty, so it's not easy to become
totally confident that this has the right effect just by reading.
Fortunately we can put some of that source-diving to work in writing
a test for it.
2017-10-13 02:43:44 +02:00
|
|
|
'django': {
|
|
|
|
# Django's default logging config has already set some
|
|
|
|
# things on this logger. Just mentioning it here causes
|
|
|
|
# `logging.config` to reset it to defaults, as if never
|
|
|
|
# configured; which is what we want for it.
|
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
'django.request': {
|
2017-01-24 06:21:14 +01:00
|
|
|
'level': 'WARNING',
|
2017-09-27 01:54:32 +02:00
|
|
|
'filters': ['skip_boring_404s'],
|
2015-12-14 07:13:42 +01:00
|
|
|
},
|
2016-08-19 01:57:05 +02:00
|
|
|
'django.security.DisallowedHost': {
|
|
|
|
'handlers': ['file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2017-01-18 12:52:01 +01:00
|
|
|
'django.server': {
|
2017-09-27 02:28:28 +02:00
|
|
|
'filters': ['skip_200_and_304'],
|
2017-02-17 23:35:23 +01:00
|
|
|
'handlers': ['console', 'file'],
|
2017-01-18 12:52:01 +01:00
|
|
|
'propagate': False,
|
|
|
|
},
|
2017-01-19 07:45:01 +01:00
|
|
|
'django.template': {
|
|
|
|
'level': 'DEBUG',
|
2017-09-27 02:28:28 +02:00
|
|
|
'filters': ['require_debug_true', 'skip_site_packages_logs'],
|
|
|
|
'handlers': ['console'],
|
2017-01-19 07:45:01 +01:00
|
|
|
'propagate': False,
|
2017-05-12 05:21:09 +02:00
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
|
|
|
|
## Uncomment the following to get all database queries logged to the console
|
|
|
|
# 'django.db': {
|
|
|
|
# 'level': 'DEBUG',
|
2017-09-27 02:28:28 +02:00
|
|
|
# 'handlers': ['console'],
|
2017-09-27 01:54:32 +02:00
|
|
|
# 'propagate': False,
|
|
|
|
# },
|
|
|
|
|
|
|
|
# other libraries, alphabetized
|
2019-12-27 23:03:00 +01:00
|
|
|
'django_auth_ldap': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'handlers': ['console', 'ldap_file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2018-03-20 03:50:40 +01:00
|
|
|
'pika.adapters': {
|
|
|
|
# pika is super chatty on INFO.
|
2017-09-27 02:32:44 +02:00
|
|
|
'level': 'WARNING',
|
2018-03-20 03:50:40 +01:00
|
|
|
# pika spews a lot of ERROR logs when a connection fails.
|
|
|
|
# We reconnect automatically, so those should be treated as WARNING --
|
|
|
|
# write to the log for use in debugging, but no error emails/Zulips.
|
|
|
|
'handlers': ['console', 'file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'pika.connection': {
|
|
|
|
# Leave `zulip_admins` out of the handlers. See pika.adapters above.
|
|
|
|
'handlers': ['console', 'file', 'errors_file'],
|
|
|
|
'propagate': False,
|
2017-09-27 02:32:44 +02:00
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
'requests': {
|
|
|
|
'level': 'WARNING',
|
|
|
|
},
|
|
|
|
|
|
|
|
# our own loggers, alphabetized
|
2017-12-13 01:22:10 +01:00
|
|
|
'zerver.lib.digest': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zerver.management.commands.deliver_email': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zerver.management.commands.enqueue_digest_emails': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
2018-01-07 23:52:01 +01:00
|
|
|
'zerver.management.commands.deliver_scheduled_messages': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
2019-12-27 23:03:00 +01:00
|
|
|
'zulip.ldap': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'handlers': ['console', 'ldap_file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
'zulip.management': {
|
2017-05-12 05:21:09 +02:00
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
2017-01-19 07:45:01 +01:00
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
'zulip.queue': {
|
|
|
|
'level': 'WARNING',
|
|
|
|
},
|
2019-06-10 19:20:09 +02:00
|
|
|
'zulip.retention': {
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2017-08-28 03:30:51 +02:00
|
|
|
'zulip.soft_deactivation': {
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
2017-09-27 01:54:32 +02:00
|
|
|
},
|
2019-06-06 05:55:09 +02:00
|
|
|
'zulip.zerver.lib.webhooks.common': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2017-09-27 01:54:32 +02:00
|
|
|
'zulip.zerver.webhooks': {
|
2017-12-13 01:22:10 +01:00
|
|
|
'level': 'DEBUG',
|
2017-09-27 01:54:32 +02:00
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2012-09-18 22:40:19 +02:00
|
|
|
}
|
2019-11-13 02:53:01 +01:00
|
|
|
} # type: Dict[str, Any]
|
2012-08-28 18:44:51 +02:00
|
|
|
|
2016-05-10 01:53:12 +02:00
|
|
|
LOGIN_REDIRECT_URL = '/'
|
2012-09-27 19:58:42 +02:00
|
|
|
|
2013-03-28 22:38:45 +01:00
|
|
|
# Client-side polling timeout for get_events, in milliseconds.
|
2012-12-11 18:08:18 +01:00
|
|
|
# We configure this here so that the client test suite can override it.
|
2013-03-28 22:38:45 +01:00
|
|
|
# We already kill the connection server-side with heartbeat events,
|
|
|
|
# but it's good to have a safety. This value should be greater than
|
|
|
|
# (HEARTBEAT_MIN_FREQ_SECS + 10)
|
|
|
|
POLL_TIMEOUT = 90 * 1000
|
2012-12-11 18:08:18 +01:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# SSO AND LDAP SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2015-08-19 04:36:24 +02:00
|
|
|
USING_APACHE_SSO = ('zproject.backends.ZulipRemoteUserBackend' in AUTHENTICATION_BACKENDS)
|
2013-11-13 22:35:14 +01:00
|
|
|
|
2019-01-13 13:53:52 +01:00
|
|
|
if 'LDAP_DEACTIVATE_NON_MATCHING_USERS' not in vars():
|
|
|
|
LDAP_DEACTIVATE_NON_MATCHING_USERS = (
|
|
|
|
len(AUTHENTICATION_BACKENDS) == 1 and (AUTHENTICATION_BACKENDS[0] ==
|
|
|
|
"zproject.backends.ZulipLDAPAuthBackend"))
|
|
|
|
|
2016-12-28 05:16:12 +01:00
|
|
|
if len(AUTHENTICATION_BACKENDS) == 1 and (AUTHENTICATION_BACKENDS[0] ==
|
|
|
|
"zproject.backends.ZulipRemoteUserBackend"):
|
2018-12-04 02:12:08 +01:00
|
|
|
HOME_NOT_LOGGED_IN = "/accounts/login/sso/"
|
2013-11-13 23:11:59 +01:00
|
|
|
ONLY_SSO = True
|
2013-11-13 17:21:41 +01:00
|
|
|
else:
|
2018-12-04 02:12:08 +01:00
|
|
|
HOME_NOT_LOGGED_IN = '/login/'
|
2013-11-14 14:15:23 +01:00
|
|
|
ONLY_SSO = False
|
2013-11-21 04:57:23 +01:00
|
|
|
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipDummyBackend',)
|
2013-11-13 17:21:41 +01:00
|
|
|
|
2018-12-04 02:12:08 +01:00
|
|
|
# Redirect to /devlogin/ by default in dev mode
|
2017-03-18 01:58:45 +01:00
|
|
|
if DEVELOPMENT:
|
2018-12-04 02:12:08 +01:00
|
|
|
HOME_NOT_LOGGED_IN = '/devlogin/'
|
|
|
|
LOGIN_URL = '/devlogin/'
|
2017-03-18 01:58:45 +01:00
|
|
|
|
2013-11-26 00:44:37 +01:00
|
|
|
POPULATE_PROFILE_VIA_LDAP = bool(AUTH_LDAP_SERVER_URI)
|
2013-11-21 01:30:20 +01:00
|
|
|
|
|
|
|
if POPULATE_PROFILE_VIA_LDAP and \
|
2016-05-10 01:55:43 +02:00
|
|
|
'zproject.backends.ZulipLDAPAuthBackend' not in AUTHENTICATION_BACKENDS:
|
2013-11-21 01:30:20 +01:00
|
|
|
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipLDAPUserPopulator',)
|
|
|
|
else:
|
2017-10-27 02:36:54 +02:00
|
|
|
POPULATE_PROFILE_VIA_LDAP = (
|
|
|
|
'zproject.backends.ZulipLDAPAuthBackend' in AUTHENTICATION_BACKENDS or
|
|
|
|
POPULATE_PROFILE_VIA_LDAP)
|
2013-11-21 01:30:20 +01:00
|
|
|
|
ldap: Skip following dubious referrals.
Some admins setting up Zulip's LDAP auth against Active Directory see
a rather baffling error message: "In order to perform this operation a
successful bind must be completed on the connection". This happens
despite AUTH_LDAP_BIND_DN and auth_ldap_bind_password being set
perfectly correctly, and on a query that the `ldapsearch` command-line
tool performs quite happily.
Empirically, adding a setting like this to /etc/zulip/settings.py
resolves the issue:
AUTH_LDAP_CONNECTION_OPTIONS = {
ldap.OPT_REFERRALS: 0
}
Some useful, concise background on the LDAP "referral" concept is here:
https://docs.oracle.com/javase/jndi/tutorial/ldap/referral/overview.html
and a pertinent bit of docs for the underlying Python `ldap` client:
https://www.python-ldap.org/en/latest/faq.html
and some very helpful documentation for Active Directory:
https://docs.microsoft.com/en-us/windows/desktop/ad/referrals
Based on the docs above, the story appears to be something like this:
* This server has the information for part of the scope of our query
-- in particular it happens to have the information we actually want.
* But there are other areas ("subordinate domains") that our query is
in principle asking about, and this server doesn't know if there are
matches there, so it gives us a referral.
* And by default, python-ldap lets `libldap` run ahead and attempt to
bind to those referrals and do those queries too -- which raises an
error because, unlike Microsoft's "LDAP API", it doesn't reuse the
credentials.
So if we simply skip trying to follow the referrals, there's no
error... and we already have, from the original response, the answer
we actually need. That's what the `ldap.OPT_REFERRALS` option does.
There may be more complex situations where the referral really is
relevant, because the desired user info is split across servers. Even
then, unless an anonymous query will be acceptable, there's no point
in letting `libldap` follow the referral and setting this option is
still the right thing. When someone eventually comes to this bridge,
some code will be required to cross it, by following the referrals.
That code might look a bit like this (unfinished) example:
https://bugs.launchpad.net/nav/+bug/1209178
Manually tested by tabbott.
Fixes #343, which was effectively a report of the need for this
OPT_REFERRALS setting.
Fixes #349, since with this change, we no longer require tricky manual
configuration to get Active Directory up and running.
2018-09-26 01:22:31 +02:00
|
|
|
if POPULATE_PROFILE_VIA_LDAP:
|
|
|
|
import ldap
|
2018-10-12 00:13:06 +02:00
|
|
|
if (AUTH_LDAP_BIND_DN and ldap.OPT_REFERRALS not in AUTH_LDAP_CONNECTION_OPTIONS):
|
ldap: Skip following dubious referrals.
Some admins setting up Zulip's LDAP auth against Active Directory see
a rather baffling error message: "In order to perform this operation a
successful bind must be completed on the connection". This happens
despite AUTH_LDAP_BIND_DN and auth_ldap_bind_password being set
perfectly correctly, and on a query that the `ldapsearch` command-line
tool performs quite happily.
Empirically, adding a setting like this to /etc/zulip/settings.py
resolves the issue:
AUTH_LDAP_CONNECTION_OPTIONS = {
ldap.OPT_REFERRALS: 0
}
Some useful, concise background on the LDAP "referral" concept is here:
https://docs.oracle.com/javase/jndi/tutorial/ldap/referral/overview.html
and a pertinent bit of docs for the underlying Python `ldap` client:
https://www.python-ldap.org/en/latest/faq.html
and some very helpful documentation for Active Directory:
https://docs.microsoft.com/en-us/windows/desktop/ad/referrals
Based on the docs above, the story appears to be something like this:
* This server has the information for part of the scope of our query
-- in particular it happens to have the information we actually want.
* But there are other areas ("subordinate domains") that our query is
in principle asking about, and this server doesn't know if there are
matches there, so it gives us a referral.
* And by default, python-ldap lets `libldap` run ahead and attempt to
bind to those referrals and do those queries too -- which raises an
error because, unlike Microsoft's "LDAP API", it doesn't reuse the
credentials.
So if we simply skip trying to follow the referrals, there's no
error... and we already have, from the original response, the answer
we actually need. That's what the `ldap.OPT_REFERRALS` option does.
There may be more complex situations where the referral really is
relevant, because the desired user info is split across servers. Even
then, unless an anonymous query will be acceptable, there's no point
in letting `libldap` follow the referral and setting this option is
still the right thing. When someone eventually comes to this bridge,
some code will be required to cross it, by following the referrals.
That code might look a bit like this (unfinished) example:
https://bugs.launchpad.net/nav/+bug/1209178
Manually tested by tabbott.
Fixes #343, which was effectively a report of the need for this
OPT_REFERRALS setting.
Fixes #349, since with this change, we no longer require tricky manual
configuration to get Active Directory up and running.
2018-09-26 01:22:31 +02:00
|
|
|
# The default behavior of python-ldap (without setting option
|
|
|
|
# `ldap.OPT_REFERRALS`) is to follow referrals, but anonymously.
|
|
|
|
# If our original query was non-anonymous, that's unlikely to
|
|
|
|
# work; skip the referral.
|
|
|
|
#
|
|
|
|
# The common case of this is that the server is Active Directory,
|
|
|
|
# it's already given us the answer we need, and the referral is
|
|
|
|
# just speculation about someplace else that has data our query
|
|
|
|
# could in principle match.
|
|
|
|
AUTH_LDAP_CONNECTION_OPTIONS[ldap.OPT_REFERRALS] = 0
|
|
|
|
|
2017-10-24 19:30:11 +02:00
|
|
|
if REGISTER_LINK_DISABLED is None:
|
|
|
|
# The default for REGISTER_LINK_DISABLED is a bit more
|
|
|
|
# complicated: we want it to be disabled by default for people
|
|
|
|
# using the LDAP backend that auto-creates users on login.
|
|
|
|
if (len(AUTHENTICATION_BACKENDS) == 2 and
|
2017-10-24 20:46:27 +02:00
|
|
|
('zproject.backends.ZulipLDAPAuthBackend' in AUTHENTICATION_BACKENDS)):
|
2017-10-24 19:30:11 +02:00
|
|
|
REGISTER_LINK_DISABLED = True
|
|
|
|
else:
|
|
|
|
REGISTER_LINK_DISABLED = False
|
|
|
|
|
2016-07-20 13:33:27 +02:00
|
|
|
########################################################################
|
2017-09-30 07:23:29 +02:00
|
|
|
# SOCIAL AUTHENTICATION SETTINGS
|
2016-07-20 13:33:27 +02:00
|
|
|
########################################################################
|
2016-07-30 00:16:18 +02:00
|
|
|
|
2020-01-23 14:22:28 +01:00
|
|
|
SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = ['subdomain', 'is_signup', 'mobile_flow_otp', 'desktop_flow_otp',
|
|
|
|
'multiuse_object_key']
|
2016-07-20 13:33:27 +02:00
|
|
|
SOCIAL_AUTH_LOGIN_ERROR_URL = '/login/'
|
2017-09-30 07:23:29 +02:00
|
|
|
|
|
|
|
SOCIAL_AUTH_GITHUB_SECRET = get_secret('social_auth_github_secret')
|
2017-05-24 11:15:22 +02:00
|
|
|
SOCIAL_AUTH_GITHUB_SCOPE = ['user:email']
|
2016-08-02 09:42:50 +02:00
|
|
|
SOCIAL_AUTH_GITHUB_ORG_KEY = SOCIAL_AUTH_GITHUB_KEY
|
|
|
|
SOCIAL_AUTH_GITHUB_ORG_SECRET = SOCIAL_AUTH_GITHUB_SECRET
|
|
|
|
SOCIAL_AUTH_GITHUB_TEAM_KEY = SOCIAL_AUTH_GITHUB_KEY
|
|
|
|
SOCIAL_AUTH_GITHUB_TEAM_SECRET = SOCIAL_AUTH_GITHUB_SECRET
|
2016-07-20 13:33:27 +02:00
|
|
|
|
2019-02-02 16:51:26 +01:00
|
|
|
SOCIAL_AUTH_GOOGLE_SECRET = get_secret('social_auth_google_secret')
|
|
|
|
# Fallback to google-oauth settings in case social auth settings for
|
|
|
|
# google are missing; this is for backwards-compatibility with older
|
|
|
|
# Zulip versions where /etc/zulip/settings.py has not been migrated yet.
|
|
|
|
GOOGLE_OAUTH2_CLIENT_SECRET = get_secret('google_oauth2_client_secret')
|
|
|
|
SOCIAL_AUTH_GOOGLE_KEY = SOCIAL_AUTH_GOOGLE_KEY or GOOGLE_OAUTH2_CLIENT_ID
|
|
|
|
SOCIAL_AUTH_GOOGLE_SECRET = SOCIAL_AUTH_GOOGLE_SECRET or GOOGLE_OAUTH2_CLIENT_SECRET
|
|
|
|
|
2019-09-29 06:32:56 +02:00
|
|
|
if PRODUCTION:
|
|
|
|
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = get_from_file_if_exists("/etc/zulip/saml/zulip-cert.crt")
|
|
|
|
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = get_from_file_if_exists("/etc/zulip/saml/zulip-private-key.key")
|
|
|
|
|
|
|
|
for idp_name, idp_dict in SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
|
|
|
if DEVELOPMENT:
|
|
|
|
idp_dict['entity_id'] = get_secret('saml_entity_id', '')
|
|
|
|
idp_dict['url'] = get_secret('saml_url', '')
|
|
|
|
idp_dict['x509cert_path'] = 'zproject/dev_saml.cert'
|
|
|
|
|
|
|
|
# Set `x509cert` if not specified already; also support an override path.
|
|
|
|
if 'x509cert' in idp_dict:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if 'x509cert_path' in idp_dict:
|
|
|
|
path = idp_dict['x509cert_path']
|
|
|
|
else:
|
|
|
|
path = "/etc/zulip/saml/idps/{}.crt".format(idp_name)
|
|
|
|
idp_dict['x509cert'] = get_from_file_if_exists(path)
|
|
|
|
|
2018-05-31 00:12:39 +02:00
|
|
|
SOCIAL_AUTH_PIPELINE = [
|
|
|
|
'social_core.pipeline.social_auth.social_details',
|
|
|
|
'zproject.backends.social_auth_associate_user',
|
|
|
|
'zproject.backends.social_auth_finish',
|
|
|
|
]
|
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# EMAIL SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2017-06-28 06:03:17 +02:00
|
|
|
# Django setting. Not used in the Zulip codebase.
|
|
|
|
DEFAULT_FROM_EMAIL = ZULIP_ADMINISTRATOR
|
|
|
|
|
2017-02-22 06:15:27 +01:00
|
|
|
if EMAIL_BACKEND is not None:
|
|
|
|
# If the server admin specified a custom email backend, use that.
|
|
|
|
pass
|
2015-08-21 09:18:44 +02:00
|
|
|
elif DEVELOPMENT:
|
2014-10-28 16:54:54 +01:00
|
|
|
# In the dev environment, emails are printed to the run-dev.py console.
|
2017-10-25 01:54:43 +02:00
|
|
|
EMAIL_BACKEND = 'zproject.email_backends.EmailLogBackEnd'
|
2018-01-24 23:00:50 +01:00
|
|
|
elif not EMAIL_HOST:
|
2018-01-24 23:18:50 +01:00
|
|
|
# If an email host is not specified, fail gracefully
|
|
|
|
WARN_NO_EMAIL = True
|
2018-01-24 23:00:50 +01:00
|
|
|
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
|
2013-11-05 01:19:52 +01:00
|
|
|
else:
|
|
|
|
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
2012-10-20 21:25:33 +02:00
|
|
|
|
2015-08-21 07:12:15 +02:00
|
|
|
EMAIL_HOST_PASSWORD = get_secret('email_password')
|
2017-09-30 06:35:26 +02:00
|
|
|
EMAIL_GATEWAY_PASSWORD = get_secret('email_gateway_password')
|
2018-08-09 18:57:54 +02:00
|
|
|
AUTH_LDAP_BIND_PASSWORD = get_secret('auth_ldap_bind_password', '')
|
2015-08-21 07:12:15 +02:00
|
|
|
|
2014-10-28 16:54:54 +01:00
|
|
|
########################################################################
|
|
|
|
# MISC SETTINGS
|
|
|
|
########################################################################
|
2012-12-07 23:00:13 +01:00
|
|
|
|
2015-08-21 09:18:44 +02:00
|
|
|
if PRODUCTION:
|
2012-12-07 23:00:13 +01:00
|
|
|
# Filter out user data
|
2013-08-06 21:38:05 +02:00
|
|
|
DEFAULT_EXCEPTION_REPORTER_FILTER = 'zerver.filters.ZulipExceptionReporterFilter'
|
2013-03-14 22:12:25 +01:00
|
|
|
|
2013-11-18 18:55:19 +01:00
|
|
|
# This is a debugging option only
|
|
|
|
PROFILE_ALL_REQUESTS = False
|
2015-01-31 07:55:18 +01:00
|
|
|
|
2017-11-10 23:36:13 +01:00
|
|
|
CROSS_REALM_BOT_EMAILS = {
|
|
|
|
'notification-bot@zulip.com',
|
|
|
|
'welcome-bot@zulip.com',
|
|
|
|
'emailgateway@zulip.com',
|
|
|
|
}
|
2017-01-06 18:56:36 +01:00
|
|
|
|
2017-11-02 19:48:29 +01:00
|
|
|
THUMBOR_KEY = get_secret('thumbor_key')
|
2020-02-04 15:27:19 +01:00
|
|
|
|
|
|
|
TWO_FACTOR_PATCH_ADMIN = False
|