2020-06-08 03:58:37 +02:00
|
|
|
import os
|
|
|
|
import sys
|
2020-06-11 00:54:34 +02:00
|
|
|
import time
|
|
|
|
from copy import deepcopy
|
2020-09-15 01:35:44 +02:00
|
|
|
from typing import Any, Dict, List, Tuple, Union
|
2020-06-08 03:58:37 +02:00
|
|
|
from urllib.parse import urljoin
|
|
|
|
|
2020-06-19 00:32:55 +02:00
|
|
|
from django.template.loaders import app_directories
|
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
import zerver.lib.logging_util
|
2020-09-15 02:01:33 +02:00
|
|
|
from scripts.lib.zulip_tools import get_tornado_ports
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.db import TimeTrackingConnection
|
2020-06-08 03:58:37 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from .config import (
|
|
|
|
DEPLOY_ROOT,
|
|
|
|
DEVELOPMENT,
|
|
|
|
PRODUCTION,
|
2020-09-15 02:01:33 +02:00
|
|
|
config_file,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_config,
|
|
|
|
get_from_file_if_exists,
|
|
|
|
get_secret,
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
from .configured_settings import (
|
|
|
|
ADMINS,
|
|
|
|
ALLOWED_HOSTS,
|
|
|
|
AUTH_LDAP_BIND_DN,
|
|
|
|
AUTH_LDAP_CONNECTION_OPTIONS,
|
|
|
|
AUTH_LDAP_SERVER_URI,
|
2020-06-11 00:54:34 +02:00
|
|
|
AUTHENTICATION_BACKENDS,
|
2020-06-08 03:58:37 +02:00
|
|
|
CAMO_URI,
|
|
|
|
DEBUG,
|
|
|
|
DEBUG_ERROR_REPORTING,
|
|
|
|
EMAIL_BACKEND,
|
|
|
|
EMAIL_HOST,
|
|
|
|
ERROR_REPORTING,
|
|
|
|
EXTERNAL_HOST,
|
2020-06-27 02:37:49 +02:00
|
|
|
EXTERNAL_HOST_WITHOUT_PORT,
|
2020-06-08 03:58:37 +02:00
|
|
|
EXTERNAL_URI_SCHEME,
|
|
|
|
EXTRA_INSTALLED_APPS,
|
|
|
|
GOOGLE_OAUTH2_CLIENT_ID,
|
|
|
|
IS_DEV_DROPLET,
|
|
|
|
LOCAL_UPLOADS_DIR,
|
|
|
|
MEMCACHED_LOCATION,
|
|
|
|
MEMCACHED_USERNAME,
|
|
|
|
REALM_HOSTS,
|
|
|
|
REGISTER_LINK_DISABLED,
|
|
|
|
REMOTE_POSTGRES_HOST,
|
|
|
|
REMOTE_POSTGRES_PORT,
|
|
|
|
REMOTE_POSTGRES_SSLMODE,
|
|
|
|
SENDFILE_BACKEND,
|
2020-07-02 02:20:55 +02:00
|
|
|
SENTRY_DSN,
|
2020-06-26 12:22:48 +02:00
|
|
|
SOCIAL_AUTH_APPLE_APP_ID,
|
2020-06-08 03:58:37 +02:00
|
|
|
SOCIAL_AUTH_APPLE_SERVICES_ID,
|
|
|
|
SOCIAL_AUTH_GITHUB_KEY,
|
|
|
|
SOCIAL_AUTH_GITHUB_ORG_NAME,
|
|
|
|
SOCIAL_AUTH_GITHUB_TEAM_ID,
|
|
|
|
SOCIAL_AUTH_GOOGLE_KEY,
|
|
|
|
SOCIAL_AUTH_SAML_ENABLED_IDPS,
|
|
|
|
SOCIAL_AUTH_SAML_SECURITY_CONFIG,
|
|
|
|
STATSD_HOST,
|
|
|
|
USING_PGROONGA,
|
|
|
|
ZULIP_ADMINISTRATOR,
|
|
|
|
)
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# INITIAL SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Make this unique, and don't share it with anybody.
|
|
|
|
SECRET_KEY = get_secret("secret_key")
|
|
|
|
|
|
|
|
# A shared secret, used to authenticate different parts of the app to each other.
|
|
|
|
SHARED_SECRET = get_secret("shared_secret")
|
|
|
|
|
|
|
|
# We use this salt to hash a user's email into a filename for their user-uploaded
|
|
|
|
# avatar. If this salt is discovered, attackers will only be able to determine
|
|
|
|
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
|
|
|
|
# the end of the world. Don't use the salt where there is more security exposure.
|
|
|
|
AVATAR_SALT = get_secret("avatar_salt")
|
|
|
|
|
|
|
|
# SERVER_GENERATION is used to track whether the server has been
|
|
|
|
# restarted for triggering browser clients to reload.
|
|
|
|
SERVER_GENERATION = int(time.time())
|
|
|
|
|
|
|
|
# Key to authenticate this server to zulip.org for push notifications, etc.
|
|
|
|
ZULIP_ORG_KEY = get_secret("zulip_org_key")
|
|
|
|
ZULIP_ORG_ID = get_secret("zulip_org_id")
|
|
|
|
|
|
|
|
if DEBUG:
|
|
|
|
INTERNAL_IPS = ('127.0.0.1',)
|
|
|
|
|
|
|
|
# Detect whether we're running as a queue worker; this impacts the logging configuration.
|
|
|
|
if len(sys.argv) > 2 and sys.argv[0].endswith('manage.py') and sys.argv[1] == 'process_queue':
|
|
|
|
IS_WORKER = True
|
|
|
|
else:
|
|
|
|
IS_WORKER = False
|
|
|
|
|
|
|
|
|
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
TEST_SUITE = False
|
|
|
|
# The new user tutorial is enabled by default, but disabled for client tests.
|
|
|
|
TUTORIAL_ENABLED = True
|
|
|
|
# This is overridden in test_settings.py for the test suites
|
2020-09-05 15:37:07 +02:00
|
|
|
PUPPETEER_TESTS = False
|
2020-06-08 03:58:37 +02:00
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
RUNNING_OPENAPI_CURL_TEST = False
|
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
GENERATE_STRIPE_FIXTURES = False
|
2020-08-19 12:40:10 +02:00
|
|
|
# This is overridden in test_settings.py for the test suites
|
|
|
|
BAN_CONSOLE_OUTPUT = False
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
# Google Compute Engine has an /etc/boto.cfg that is "nicely
|
|
|
|
# configured" to work with GCE's storage service. However, their
|
|
|
|
# configuration is super aggressive broken, in that it means importing
|
|
|
|
# boto in a virtualenv that doesn't contain the GCE tools crashes.
|
|
|
|
#
|
|
|
|
# By using our own path for BOTO_CONFIG, we can cause boto to not
|
|
|
|
# process /etc/boto.cfg.
|
|
|
|
os.environ['BOTO_CONFIG'] = '/etc/zulip/boto.cfg'
|
|
|
|
|
|
|
|
# These are the settings that we will check that the user has filled in for
|
|
|
|
# production deployments before starting the app. It consists of a series
|
|
|
|
# of pairs of (setting name, default value that it must be changed from)
|
|
|
|
REQUIRED_SETTINGS = [("EXTERNAL_HOST", "zulip.example.com"),
|
|
|
|
("ZULIP_ADMINISTRATOR", "zulip-admin@example.com"),
|
|
|
|
# SECRET_KEY doesn't really need to be here, in
|
|
|
|
# that we set it automatically, but just in
|
|
|
|
# case, it seems worth having in this list
|
|
|
|
("SECRET_KEY", ""),
|
|
|
|
("AUTHENTICATION_BACKENDS", ()),
|
|
|
|
]
|
|
|
|
|
|
|
|
MANAGERS = ADMINS
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# STANDARD DJANGO SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Local time zone for this installation. Choices can be found here:
|
|
|
|
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name
|
|
|
|
# although not all choices may be available on all operating systems.
|
|
|
|
# In a Windows environment this must be set to your system time zone.
|
|
|
|
TIME_ZONE = 'UTC'
|
|
|
|
|
|
|
|
# Language code for this installation. All choices can be found here:
|
|
|
|
# http://www.i18nguy.com/unicode/language-identifiers.html
|
|
|
|
LANGUAGE_CODE = 'en-us'
|
|
|
|
|
|
|
|
# If you set this to False, Django will make some optimizations so as not
|
|
|
|
# to load the internationalization machinery.
|
|
|
|
USE_I18N = True
|
|
|
|
|
|
|
|
# If you set this to False, Django will not format dates, numbers and
|
|
|
|
# calendars according to the current locale.
|
|
|
|
USE_L10N = True
|
|
|
|
|
|
|
|
# If you set this to False, Django will not use timezone-aware datetimes.
|
|
|
|
USE_TZ = True
|
|
|
|
|
|
|
|
# this directory will be used to store logs for development environment
|
|
|
|
DEVELOPMENT_LOG_DIRECTORY = os.path.join(DEPLOY_ROOT, 'var', 'log')
|
|
|
|
# Make redirects work properly behind a reverse proxy
|
|
|
|
USE_X_FORWARDED_HOST = True
|
|
|
|
|
|
|
|
# Extend ALLOWED_HOSTS with localhost (needed to RPC to Tornado),
|
|
|
|
ALLOWED_HOSTS += ['127.0.0.1', 'localhost']
|
|
|
|
# ... with hosts corresponding to EXTERNAL_HOST,
|
2020-06-27 02:37:49 +02:00
|
|
|
ALLOWED_HOSTS += [EXTERNAL_HOST_WITHOUT_PORT, "." + EXTERNAL_HOST_WITHOUT_PORT]
|
2020-06-08 03:58:37 +02:00
|
|
|
# ... and with the hosts in REALM_HOSTS.
|
|
|
|
ALLOWED_HOSTS += REALM_HOSTS.values()
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
class TwoFactorLoader(app_directories.Loader):
|
|
|
|
def get_dirs(self) -> List[str]:
|
|
|
|
dirs = super().get_dirs()
|
|
|
|
return [d for d in dirs if 'two_factor' in d]
|
|
|
|
|
|
|
|
MIDDLEWARE = (
|
|
|
|
# With the exception of it's dependencies,
|
|
|
|
# our logging middleware should be the top middleware item.
|
|
|
|
'zerver.middleware.TagRequests',
|
|
|
|
'zerver.middleware.SetRemoteAddrFromForwardedFor',
|
2020-09-04 00:32:43 +02:00
|
|
|
'zerver.middleware.RequestContext',
|
2020-06-08 03:58:37 +02:00
|
|
|
'zerver.middleware.LogRequests',
|
|
|
|
'zerver.middleware.JsonErrorHandler',
|
|
|
|
'zerver.middleware.RateLimitMiddleware',
|
|
|
|
'zerver.middleware.FlushDisplayRecipientCache',
|
|
|
|
'zerver.middleware.ZulipCommonMiddleware',
|
|
|
|
'django.contrib.sessions.middleware.SessionMiddleware',
|
2020-09-01 02:56:35 +02:00
|
|
|
'zerver.middleware.LocaleMiddleware',
|
|
|
|
'zerver.middleware.HostDomainMiddleware',
|
2020-06-08 03:58:37 +02:00
|
|
|
'django.middleware.csrf.CsrfViewMiddleware',
|
|
|
|
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
|
|
|
# Make sure 2FA middlewares come after authentication middleware.
|
|
|
|
'django_otp.middleware.OTPMiddleware', # Required by Two Factor auth.
|
|
|
|
'two_factor.middleware.threadlocals.ThreadLocals', # Required by Twilio
|
|
|
|
# Needs to be after CommonMiddleware, which sets Content-Length
|
|
|
|
'zerver.middleware.FinalizeOpenGraphDescription',
|
|
|
|
)
|
|
|
|
|
|
|
|
ANONYMOUS_USER_ID = None
|
|
|
|
|
|
|
|
AUTH_USER_MODEL = "zerver.UserProfile"
|
|
|
|
|
|
|
|
TEST_RUNNER = 'zerver.lib.test_runner.Runner'
|
|
|
|
|
|
|
|
ROOT_URLCONF = 'zproject.urls'
|
|
|
|
|
|
|
|
# Python dotted path to the WSGI application used by Django's runserver.
|
|
|
|
WSGI_APPLICATION = 'zproject.wsgi.application'
|
|
|
|
|
|
|
|
# A site can include additional installed apps via the
|
|
|
|
# EXTRA_INSTALLED_APPS setting
|
|
|
|
INSTALLED_APPS = [
|
|
|
|
'django.contrib.auth',
|
|
|
|
'django.contrib.contenttypes',
|
|
|
|
'django.contrib.sessions',
|
|
|
|
'django.contrib.staticfiles',
|
|
|
|
'confirmation',
|
|
|
|
'webpack_loader',
|
|
|
|
'zerver',
|
|
|
|
'social_django',
|
|
|
|
# 2FA related apps.
|
|
|
|
'django_otp',
|
|
|
|
'django_otp.plugins.otp_static',
|
|
|
|
'django_otp.plugins.otp_totp',
|
|
|
|
'two_factor',
|
|
|
|
]
|
|
|
|
if USING_PGROONGA:
|
|
|
|
INSTALLED_APPS += ['pgroonga']
|
|
|
|
INSTALLED_APPS += EXTRA_INSTALLED_APPS
|
|
|
|
|
|
|
|
ZILENCER_ENABLED = 'zilencer' in INSTALLED_APPS
|
|
|
|
CORPORATE_ENABLED = 'corporate' in INSTALLED_APPS
|
|
|
|
|
2020-09-15 02:01:33 +02:00
|
|
|
TORNADO_PORTS = get_tornado_ports(config_file)
|
|
|
|
TORNADO_PROCESSES = len(TORNADO_PORTS)
|
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
RUNNING_INSIDE_TORNADO = False
|
|
|
|
AUTORELOAD = DEBUG
|
|
|
|
|
|
|
|
SILENCED_SYSTEM_CHECKS = [
|
|
|
|
# auth.W004 checks that the UserProfile field named by USERNAME_FIELD has
|
|
|
|
# `unique=True`. For us this is `email`, and it's unique only per-realm.
|
|
|
|
# Per Django docs, this is perfectly fine so long as our authentication
|
|
|
|
# backends support the username not being unique; and they do.
|
|
|
|
# See: https://docs.djangoproject.com/en/2.2/topics/auth/customizing/#django.contrib.auth.models.CustomUser.USERNAME_FIELD
|
|
|
|
"auth.W004",
|
|
|
|
]
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# DATABASE CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Zulip's Django configuration supports 4 different ways to do
|
|
|
|
# postgres authentication:
|
|
|
|
#
|
|
|
|
# * The development environment uses the `local_database_password`
|
|
|
|
# secret from `zulip-secrets.conf` to authenticate with a local
|
|
|
|
# database. The password is automatically generated and managed by
|
|
|
|
# `generate_secrets.py` during or provision.
|
|
|
|
#
|
|
|
|
# The remaining 3 options are for production use:
|
|
|
|
#
|
|
|
|
# * Using postgres' "peer" authentication to authenticate to a
|
|
|
|
# database on the local system using one's user ID (processes
|
|
|
|
# running as user `zulip` on the system are automatically
|
|
|
|
# authenticated as database user `zulip`). This is the default in
|
|
|
|
# production. We don't use this in the development environment,
|
|
|
|
# because it requires the developer's user to be called `zulip`.
|
|
|
|
#
|
|
|
|
# * Using password authentication with a remote postgres server using
|
|
|
|
# the `REMOTE_POSTGRES_HOST` setting and the password from the
|
|
|
|
# `postgres_password` secret.
|
|
|
|
#
|
|
|
|
# * Using passwordless authentication with a remote postgres server
|
|
|
|
# using the `REMOTE_POSTGRES_HOST` setting and a client certificate
|
|
|
|
# under `/home/zulip/.postgresql/`.
|
|
|
|
#
|
|
|
|
# We implement these options with a default DATABASES configuration
|
|
|
|
# supporting peer authentication, with logic to override it as
|
|
|
|
# appropriate if DEVELOPMENT or REMOTE_POSTGRES_HOST is set.
|
|
|
|
DATABASES: Dict[str, Dict[str, Any]] = {"default": {
|
|
|
|
'ENGINE': 'django.db.backends.postgresql',
|
|
|
|
'NAME': 'zulip',
|
|
|
|
'USER': 'zulip',
|
|
|
|
# Password = '' => peer/certificate authentication (no password)
|
|
|
|
'PASSWORD': '',
|
|
|
|
# Host = '' => connect to localhost by default
|
|
|
|
'HOST': '',
|
|
|
|
'SCHEMA': 'zulip',
|
|
|
|
'CONN_MAX_AGE': 600,
|
|
|
|
'OPTIONS': {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'connection_factory': TimeTrackingConnection,
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
}}
|
|
|
|
|
|
|
|
if DEVELOPMENT:
|
|
|
|
LOCAL_DATABASE_PASSWORD = get_secret("local_database_password")
|
2020-09-03 05:32:15 +02:00
|
|
|
DATABASES["default"].update(
|
|
|
|
PASSWORD=LOCAL_DATABASE_PASSWORD,
|
|
|
|
HOST='localhost',
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
elif REMOTE_POSTGRES_HOST != '':
|
2020-09-03 05:32:15 +02:00
|
|
|
DATABASES['default'].update(
|
|
|
|
HOST=REMOTE_POSTGRES_HOST,
|
|
|
|
PORT=REMOTE_POSTGRES_PORT,
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
if get_secret("postgres_password") is not None:
|
2020-09-03 05:32:15 +02:00
|
|
|
DATABASES['default'].update(
|
|
|
|
PASSWORD=get_secret("postgres_password"),
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
if REMOTE_POSTGRES_SSLMODE != '':
|
|
|
|
DATABASES['default']['OPTIONS']['sslmode'] = REMOTE_POSTGRES_SSLMODE
|
|
|
|
else:
|
|
|
|
DATABASES['default']['OPTIONS']['sslmode'] = 'verify-full'
|
|
|
|
|
|
|
|
POSTGRES_MISSING_DICTIONARIES = bool(get_config('postgresql', 'missing_dictionaries', None))
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# RABBITMQ CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
USING_RABBITMQ = True
|
|
|
|
RABBITMQ_PASSWORD = get_secret("rabbitmq_password")
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# CACHING CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
|
|
|
|
|
|
|
|
MEMCACHED_PASSWORD = get_secret("memcached_password")
|
|
|
|
|
|
|
|
CACHES = {
|
|
|
|
'default': {
|
2020-05-09 22:23:33 +02:00
|
|
|
'BACKEND': 'django_bmemcached.memcached.BMemcached',
|
2020-06-08 03:58:37 +02:00
|
|
|
'LOCATION': MEMCACHED_LOCATION,
|
|
|
|
'OPTIONS': {
|
2020-05-09 22:23:33 +02:00
|
|
|
'socket_timeout': 3600,
|
|
|
|
'username': MEMCACHED_USERNAME,
|
|
|
|
'password': MEMCACHED_PASSWORD,
|
|
|
|
'pickle_protocol': 4,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'database': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
|
|
|
|
'LOCATION': 'third_party_api_results',
|
|
|
|
# This cache shouldn't timeout; we're really just using the
|
|
|
|
# cache API to store the results of requests to third-party
|
|
|
|
# APIs like the Twitter API permanently.
|
|
|
|
'TIMEOUT': None,
|
|
|
|
'OPTIONS': {
|
|
|
|
'MAX_ENTRIES': 100000000,
|
|
|
|
'CULL_FREQUENCY': 10,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'in-memory': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# REDIS-BASED RATE LIMITING CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
RATE_LIMITING_RULES = {
|
|
|
|
'api_by_user': [
|
|
|
|
(60, 200), # 200 requests max every minute
|
|
|
|
],
|
|
|
|
'authenticate_by_username': [
|
|
|
|
(1800, 5), # 5 login attempts within 30 minutes
|
|
|
|
],
|
|
|
|
'password_reset_form_by_email': [
|
|
|
|
(3600, 2), # 2 reset emails per hour
|
|
|
|
(86400, 5), # 5 per day
|
|
|
|
],
|
|
|
|
}
|
|
|
|
|
|
|
|
# List of domains that, when applied to a request in a Tornado process,
|
|
|
|
# will be handled with the separate in-memory rate limiting backend for Tornado,
|
|
|
|
# which has its own buckets separate from the default backend.
|
|
|
|
# In principle, it should be impossible to make requests to tornado that fall into
|
|
|
|
# other domains, but we use this list as an extra precaution.
|
|
|
|
RATE_LIMITING_DOMAINS_FOR_TORNADO = ['api_by_user']
|
|
|
|
|
|
|
|
RATE_LIMITING_MIRROR_REALM_RULES = [
|
|
|
|
(60, 50), # 50 emails per minute
|
|
|
|
(300, 120), # 120 emails per 5 minutes
|
|
|
|
(3600, 600), # 600 emails per hour
|
|
|
|
]
|
|
|
|
|
|
|
|
DEBUG_RATE_LIMITING = DEBUG
|
|
|
|
REDIS_PASSWORD = get_secret('redis_password')
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# SECURITY SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Tell the browser to never send our cookies without encryption, e.g.
|
|
|
|
# when executing the initial http -> https redirect.
|
|
|
|
#
|
|
|
|
# Turn it off for local testing because we don't have SSL.
|
|
|
|
if PRODUCTION:
|
|
|
|
SESSION_COOKIE_SECURE = True
|
|
|
|
CSRF_COOKIE_SECURE = True
|
|
|
|
|
|
|
|
# https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-05#section-4.1.3.2
|
|
|
|
SESSION_COOKIE_NAME = "__Host-sessionid"
|
|
|
|
CSRF_COOKIE_NAME = "__Host-csrftoken"
|
|
|
|
|
|
|
|
# Prevent Javascript from reading the CSRF token from cookies. Our code gets
|
|
|
|
# the token from the DOM, which means malicious code could too. But hiding the
|
|
|
|
# cookie will slow down some attackers.
|
|
|
|
CSRF_COOKIE_HTTPONLY = True
|
|
|
|
CSRF_FAILURE_VIEW = 'zerver.middleware.csrf_failure'
|
|
|
|
|
|
|
|
if DEVELOPMENT:
|
|
|
|
# Use fast password hashing for creating testing users when not
|
|
|
|
# PRODUCTION. Saves a bunch of time.
|
|
|
|
PASSWORD_HASHERS = (
|
|
|
|
'django.contrib.auth.hashers.SHA1PasswordHasher',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
|
2020-06-08 03:58:37 +02:00
|
|
|
)
|
|
|
|
# Also we auto-generate passwords for the default users which you
|
|
|
|
# can query using ./manage.py print_initial_password
|
|
|
|
INITIAL_PASSWORD_SALT = get_secret("initial_password_salt")
|
|
|
|
else:
|
|
|
|
# For production, use the best password hashing algorithm: Argon2
|
|
|
|
# Zulip was originally on PBKDF2 so we need it for compatibility
|
|
|
|
PASSWORD_HASHERS = ('django.contrib.auth.hashers.Argon2PasswordHasher',
|
|
|
|
'django.contrib.auth.hashers.PBKDF2PasswordHasher')
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# API/BOT SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
ROOT_DOMAIN_URI = EXTERNAL_URI_SCHEME + EXTERNAL_HOST
|
|
|
|
|
|
|
|
S3_KEY = get_secret("s3_key")
|
|
|
|
S3_SECRET_KEY = get_secret("s3_secret_key")
|
|
|
|
|
|
|
|
if LOCAL_UPLOADS_DIR is not None:
|
|
|
|
if SENDFILE_BACKEND is None:
|
|
|
|
SENDFILE_BACKEND = 'django_sendfile.backends.nginx'
|
|
|
|
SENDFILE_ROOT = os.path.join(LOCAL_UPLOADS_DIR, "files")
|
|
|
|
SENDFILE_URL = '/serve_uploads'
|
|
|
|
|
|
|
|
# GCM tokens are IP-whitelisted; if we deploy to additional
|
|
|
|
# servers you will need to explicitly add their IPs here:
|
|
|
|
# https://cloud.google.com/console/project/apps~zulip-android/apiui/credential
|
|
|
|
ANDROID_GCM_API_KEY = get_secret("android_gcm_api_key")
|
|
|
|
|
|
|
|
DROPBOX_APP_KEY = get_secret("dropbox_app_key")
|
|
|
|
|
2020-04-27 22:41:31 +02:00
|
|
|
BIG_BLUE_BUTTON_SECRET = get_secret('big_blue_button_secret')
|
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
MAILCHIMP_API_KEY = get_secret("mailchimp_api_key")
|
|
|
|
|
|
|
|
# Twitter API credentials
|
|
|
|
# Secrecy not required because its only used for R/O requests.
|
|
|
|
# Please don't make us go over our rate limit.
|
|
|
|
TWITTER_CONSUMER_KEY = get_secret("twitter_consumer_key")
|
|
|
|
TWITTER_CONSUMER_SECRET = get_secret("twitter_consumer_secret")
|
|
|
|
TWITTER_ACCESS_TOKEN_KEY = get_secret("twitter_access_token_key")
|
|
|
|
TWITTER_ACCESS_TOKEN_SECRET = get_secret("twitter_access_token_secret")
|
|
|
|
|
|
|
|
# These are the bots that Zulip sends automated messages as.
|
|
|
|
INTERNAL_BOTS = [{'var_name': 'NOTIFICATION_BOT',
|
|
|
|
'email_template': 'notification-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Notification Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
{'var_name': 'EMAIL_GATEWAY_BOT',
|
|
|
|
'email_template': 'emailgateway@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Email Gateway',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
{'var_name': 'NAGIOS_SEND_BOT',
|
|
|
|
'email_template': 'nagios-send-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Nagios Send Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
{'var_name': 'NAGIOS_RECEIVE_BOT',
|
|
|
|
'email_template': 'nagios-receive-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Nagios Receive Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
{'var_name': 'WELCOME_BOT',
|
|
|
|
'email_template': 'welcome-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Welcome Bot',
|
|
|
|
}]
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
# Bots that are created for each realm like the reminder-bot goes here.
|
|
|
|
REALM_INTERNAL_BOTS: List[Dict[str, str]] = []
|
|
|
|
# These are realm-internal bots that may exist in some organizations,
|
|
|
|
# so configure power the setting, but should not be auto-created at this time.
|
|
|
|
DISABLED_REALM_INTERNAL_BOTS = [
|
|
|
|
{'var_name': 'REMINDER_BOT',
|
|
|
|
'email_template': 'reminder-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Reminder Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
if PRODUCTION:
|
|
|
|
INTERNAL_BOTS += [
|
|
|
|
{'var_name': 'NAGIOS_STAGING_SEND_BOT',
|
|
|
|
'email_template': 'nagios-staging-send-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Nagios Staging Send Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
{'var_name': 'NAGIOS_STAGING_RECEIVE_BOT',
|
|
|
|
'email_template': 'nagios-staging-receive-bot@%s',
|
2020-09-02 02:50:08 +02:00
|
|
|
'name': 'Nagios Staging Receive Bot',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
INTERNAL_BOT_DOMAIN = "zulip.com"
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# STATSD CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Statsd is not super well supported; if you want to use it you'll need
|
|
|
|
# to set STATSD_HOST and STATSD_PREFIX.
|
|
|
|
if STATSD_HOST != '':
|
|
|
|
INSTALLED_APPS += ['django_statsd']
|
|
|
|
STATSD_PORT = 8125
|
|
|
|
STATSD_CLIENT = 'django_statsd.clients.normal'
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# CAMO HTTPS CACHE CONFIGURATION
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
if CAMO_URI != '':
|
|
|
|
# This needs to be synced with the Camo installation
|
|
|
|
CAMO_KEY = get_secret("camo_key")
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# STATIC CONTENT AND MINIFICATION SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
if PRODUCTION or IS_DEV_DROPLET or os.getenv('EXTERNAL_HOST') is not None:
|
|
|
|
STATIC_URL = urljoin(ROOT_DOMAIN_URI, '/static/')
|
|
|
|
else:
|
|
|
|
STATIC_URL = 'http://localhost:9991/static/'
|
|
|
|
|
|
|
|
# ZulipStorage is a modified version of ManifestStaticFilesStorage,
|
|
|
|
# and, like that class, it inserts a file hash into filenames
|
|
|
|
# to prevent the browser from using stale files from cache.
|
|
|
|
#
|
|
|
|
# Unlike PipelineStorage, it requires the files to exist in
|
|
|
|
# STATIC_ROOT even for dev servers. So we only use
|
|
|
|
# ZulipStorage when not DEBUG.
|
|
|
|
|
|
|
|
if not DEBUG:
|
|
|
|
STATICFILES_STORAGE = 'zerver.lib.storage.ZulipStorage'
|
|
|
|
if PRODUCTION:
|
|
|
|
STATIC_ROOT = '/home/zulip/prod-static'
|
|
|
|
else:
|
|
|
|
STATIC_ROOT = os.path.abspath(os.path.join(DEPLOY_ROOT, 'prod-static/serve'))
|
|
|
|
|
|
|
|
# If changing this, you need to also the hack modifications to this in
|
|
|
|
# our compilemessages management command.
|
|
|
|
LOCALE_PATHS = (os.path.join(DEPLOY_ROOT, 'locale'),)
|
|
|
|
|
|
|
|
# We want all temporary uploaded files to be stored on disk.
|
|
|
|
FILE_UPLOAD_MAX_MEMORY_SIZE = 0
|
|
|
|
|
|
|
|
STATICFILES_DIRS = ['static/']
|
|
|
|
|
|
|
|
if DEBUG:
|
|
|
|
WEBPACK_STATS_FILE = os.path.join('var', 'webpack-stats-dev.json')
|
|
|
|
else:
|
|
|
|
WEBPACK_STATS_FILE = 'webpack-stats-production.json'
|
|
|
|
WEBPACK_LOADER = {
|
|
|
|
'DEFAULT': {
|
|
|
|
'CACHE': not DEBUG,
|
|
|
|
'BUNDLE_DIR_NAME': '../webpack/' if DEBUG else 'webpack-bundles/',
|
|
|
|
'STATS_FILE': os.path.join(DEPLOY_ROOT, WEBPACK_STATS_FILE),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# TEMPLATES SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# List of callables that know how to import templates from various sources.
|
|
|
|
LOADERS: List[Union[str, Tuple[object, ...]]] = [
|
|
|
|
'django.template.loaders.filesystem.Loader',
|
|
|
|
'django.template.loaders.app_directories.Loader',
|
|
|
|
]
|
|
|
|
if PRODUCTION:
|
|
|
|
# Template caching is a significant performance win in production.
|
|
|
|
LOADERS = [('django.template.loaders.cached.Loader', LOADERS)]
|
|
|
|
|
|
|
|
base_template_engine_settings: Dict[str, Any] = {
|
|
|
|
'BACKEND': 'django.template.backends.jinja2.Jinja2',
|
|
|
|
'OPTIONS': {
|
|
|
|
'environment': 'zproject.jinja2.environment',
|
|
|
|
'extensions': [
|
|
|
|
'jinja2.ext.i18n',
|
|
|
|
'jinja2.ext.autoescape',
|
|
|
|
'webpack_loader.contrib.jinja2ext.WebpackExtension',
|
|
|
|
],
|
|
|
|
'context_processors': [
|
|
|
|
'zerver.context_processors.zulip_default_context',
|
|
|
|
'django.template.context_processors.i18n',
|
|
|
|
],
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
default_template_engine_settings = deepcopy(base_template_engine_settings)
|
2020-09-03 05:32:15 +02:00
|
|
|
default_template_engine_settings.update(
|
|
|
|
NAME='Jinja2',
|
|
|
|
DIRS=[
|
2020-06-08 03:58:37 +02:00
|
|
|
# The main templates directory
|
|
|
|
os.path.join(DEPLOY_ROOT, 'templates'),
|
|
|
|
# The webhook integration templates
|
|
|
|
os.path.join(DEPLOY_ROOT, 'zerver', 'webhooks'),
|
|
|
|
# The python-zulip-api:zulip_bots package templates
|
|
|
|
os.path.join('static' if DEBUG else STATIC_ROOT, 'generated', 'bots'),
|
|
|
|
],
|
2020-09-03 05:32:15 +02:00
|
|
|
APP_DIRS=True,
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
non_html_template_engine_settings = deepcopy(base_template_engine_settings)
|
2020-09-03 05:32:15 +02:00
|
|
|
non_html_template_engine_settings.update(
|
|
|
|
NAME='Jinja2_plaintext',
|
|
|
|
DIRS=[os.path.join(DEPLOY_ROOT, 'templates')],
|
|
|
|
APP_DIRS=False,
|
|
|
|
)
|
|
|
|
non_html_template_engine_settings['OPTIONS'].update(
|
|
|
|
autoescape=False,
|
|
|
|
trim_blocks=True,
|
|
|
|
lstrip_blocks=True,
|
|
|
|
)
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
# django-two-factor uses the default Django template engine (not Jinja2), so we
|
|
|
|
# need to add config for it here.
|
|
|
|
two_factor_template_options = deepcopy(default_template_engine_settings['OPTIONS'])
|
|
|
|
del two_factor_template_options['environment']
|
|
|
|
del two_factor_template_options['extensions']
|
|
|
|
two_factor_template_options['loaders'] = ['zproject.settings.TwoFactorLoader']
|
|
|
|
|
|
|
|
two_factor_template_engine_settings = {
|
|
|
|
'NAME': 'Two_Factor',
|
|
|
|
'BACKEND': 'django.template.backends.django.DjangoTemplates',
|
|
|
|
'DIRS': [],
|
|
|
|
'APP_DIRS': False,
|
|
|
|
'OPTIONS': two_factor_template_options,
|
|
|
|
}
|
|
|
|
|
|
|
|
# The order here is important; get_template and related/parent functions try
|
|
|
|
# the template engines in order until one succeeds.
|
|
|
|
TEMPLATES = [
|
|
|
|
default_template_engine_settings,
|
|
|
|
non_html_template_engine_settings,
|
|
|
|
two_factor_template_engine_settings,
|
|
|
|
]
|
|
|
|
########################################################################
|
|
|
|
# LOGGING SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
def zulip_path(path: str) -> str:
|
|
|
|
if DEVELOPMENT:
|
|
|
|
# if DEVELOPMENT, store these files in the Zulip checkout
|
|
|
|
if path.startswith("/var/log"):
|
|
|
|
path = os.path.join(DEVELOPMENT_LOG_DIRECTORY, os.path.basename(path))
|
|
|
|
else:
|
|
|
|
path = os.path.join(os.path.join(DEPLOY_ROOT, 'var'), os.path.basename(path))
|
|
|
|
return path
|
|
|
|
|
|
|
|
SERVER_LOG_PATH = zulip_path("/var/log/zulip/server.log")
|
|
|
|
ERROR_FILE_LOG_PATH = zulip_path("/var/log/zulip/errors.log")
|
|
|
|
MANAGEMENT_LOG_PATH = zulip_path("/var/log/zulip/manage.log")
|
|
|
|
WORKER_LOG_PATH = zulip_path("/var/log/zulip/workers.log")
|
|
|
|
SLOW_QUERIES_LOG_PATH = zulip_path("/var/log/zulip/slow_queries.log")
|
|
|
|
JSON_PERSISTENT_QUEUE_FILENAME_PATTERN = zulip_path("/home/zulip/tornado/event_queues%s.json")
|
|
|
|
EMAIL_LOG_PATH = zulip_path("/var/log/zulip/send_email.log")
|
|
|
|
EMAIL_MIRROR_LOG_PATH = zulip_path("/var/log/zulip/email_mirror.log")
|
|
|
|
EMAIL_DELIVERER_LOG_PATH = zulip_path("/var/log/zulip/email-deliverer.log")
|
|
|
|
EMAIL_CONTENT_LOG_PATH = zulip_path("/var/log/zulip/email_content.log")
|
|
|
|
LDAP_LOG_PATH = zulip_path("/var/log/zulip/ldap.log")
|
|
|
|
LDAP_SYNC_LOG_PATH = zulip_path("/var/log/zulip/sync_ldap_user_data.log")
|
|
|
|
QUEUE_ERROR_DIR = zulip_path("/var/log/zulip/queue_error")
|
|
|
|
QUEUE_STATS_DIR = zulip_path("/var/log/zulip/queue_stats")
|
|
|
|
DIGEST_LOG_PATH = zulip_path("/var/log/zulip/digest.log")
|
|
|
|
ANALYTICS_LOG_PATH = zulip_path("/var/log/zulip/analytics.log")
|
|
|
|
ANALYTICS_LOCK_DIR = zulip_path("/home/zulip/deployments/analytics-lock-dir")
|
2020-08-20 00:34:12 +02:00
|
|
|
WEBHOOK_LOG_PATH = zulip_path("/var/log/zulip/webhooks_errors.log")
|
2020-08-19 22:26:38 +02:00
|
|
|
WEBHOOK_UNSUPPORTED_EVENTS_LOG_PATH = zulip_path("/var/log/zulip/webhooks_unsupported_events.log")
|
2020-06-08 03:58:37 +02:00
|
|
|
SOFT_DEACTIVATION_LOG_PATH = zulip_path("/var/log/zulip/soft_deactivation.log")
|
|
|
|
TRACEMALLOC_DUMP_DIR = zulip_path("/var/log/zulip/tracemalloc")
|
|
|
|
SCHEDULED_MESSAGE_DELIVERER_LOG_PATH = zulip_path("/var/log/zulip/scheduled_message_deliverer.log")
|
|
|
|
RETENTION_LOG_PATH = zulip_path("/var/log/zulip/message_retention.log")
|
|
|
|
AUTH_LOG_PATH = zulip_path("/var/log/zulip/auth.log")
|
|
|
|
|
|
|
|
ZULIP_WORKER_TEST_FILE = '/tmp/zulip-worker-test-file'
|
|
|
|
|
|
|
|
|
|
|
|
if IS_WORKER:
|
|
|
|
FILE_LOG_PATH = WORKER_LOG_PATH
|
|
|
|
else:
|
|
|
|
FILE_LOG_PATH = SERVER_LOG_PATH
|
|
|
|
|
|
|
|
# This is disabled in a few tests.
|
|
|
|
LOGGING_ENABLED = True
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
DEFAULT_ZULIP_HANDLERS = [
|
|
|
|
*(['zulip_admins'] if ERROR_REPORTING else []),
|
|
|
|
'console', 'file', 'errors_file',
|
|
|
|
]
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
LOGGING: Dict[str, Any] = {
|
|
|
|
'version': 1,
|
|
|
|
'disable_existing_loggers': False,
|
|
|
|
'formatters': {
|
|
|
|
'default': {
|
|
|
|
'()': 'zerver.lib.logging_util.ZulipFormatter',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2020-09-04 03:38:24 +02:00
|
|
|
'webhook_request_data': {
|
|
|
|
'()': 'zerver.lib.logging_util.ZulipWebhookFormatter',
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'filters': {
|
|
|
|
'ZulipLimiter': {
|
|
|
|
'()': 'zerver.lib.logging_util.ZulipLimiter',
|
|
|
|
},
|
|
|
|
'EmailLimiter': {
|
|
|
|
'()': 'zerver.lib.logging_util.EmailLimiter',
|
|
|
|
},
|
|
|
|
'require_debug_false': {
|
|
|
|
'()': 'django.utils.log.RequireDebugFalse',
|
|
|
|
},
|
|
|
|
'require_debug_true': {
|
|
|
|
'()': 'django.utils.log.RequireDebugTrue',
|
|
|
|
},
|
|
|
|
'nop': {
|
|
|
|
'()': 'zerver.lib.logging_util.ReturnTrue',
|
|
|
|
},
|
|
|
|
'require_logging_enabled': {
|
|
|
|
'()': 'zerver.lib.logging_util.ReturnEnabled',
|
|
|
|
},
|
|
|
|
'require_really_deployed': {
|
|
|
|
'()': 'zerver.lib.logging_util.RequireReallyDeployed',
|
|
|
|
},
|
|
|
|
'skip_200_and_304': {
|
|
|
|
'()': 'django.utils.log.CallbackFilter',
|
|
|
|
'callback': zerver.lib.logging_util.skip_200_and_304,
|
|
|
|
},
|
|
|
|
'skip_site_packages_logs': {
|
|
|
|
'()': 'django.utils.log.CallbackFilter',
|
|
|
|
'callback': zerver.lib.logging_util.skip_site_packages_logs,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
'handlers': {
|
|
|
|
'zulip_admins': {
|
|
|
|
'level': 'ERROR',
|
|
|
|
'class': 'zerver.logging_handlers.AdminNotifyHandler',
|
|
|
|
'filters': (['ZulipLimiter', 'require_debug_false', 'require_really_deployed']
|
|
|
|
if not DEBUG_ERROR_REPORTING else []),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'formatter': 'default',
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'auth_file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': AUTH_LOG_PATH,
|
|
|
|
},
|
|
|
|
'console': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.StreamHandler',
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'formatter': 'default',
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': FILE_LOG_PATH,
|
|
|
|
},
|
|
|
|
'errors_file': {
|
|
|
|
'level': 'WARNING',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': ERROR_FILE_LOG_PATH,
|
|
|
|
},
|
|
|
|
'ldap_file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': LDAP_LOG_PATH,
|
|
|
|
},
|
|
|
|
'slow_queries_file': {
|
|
|
|
'level': 'INFO',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
|
|
|
'formatter': 'default',
|
|
|
|
'filename': SLOW_QUERIES_LOG_PATH,
|
|
|
|
},
|
2020-09-02 03:15:08 +02:00
|
|
|
'webhook_file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
2020-09-04 03:38:24 +02:00
|
|
|
'formatter': 'webhook_request_data',
|
2020-09-02 03:15:08 +02:00
|
|
|
'filename': WEBHOOK_LOG_PATH,
|
|
|
|
},
|
|
|
|
'webhook_unsupported_file': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'class': 'logging.handlers.WatchedFileHandler',
|
2020-09-04 03:38:24 +02:00
|
|
|
'formatter': 'webhook_request_data',
|
2020-09-02 03:15:08 +02:00
|
|
|
'filename': WEBHOOK_UNSUPPORTED_EVENTS_LOG_PATH,
|
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'loggers': {
|
|
|
|
# The Python logging module uses a hierarchy of logger names for config:
|
|
|
|
# "foo.bar" has parent "foo" has parent "", the root. But the semantics
|
|
|
|
# are subtle: it walks this hierarchy once to find the log level to
|
|
|
|
# decide whether to log the record at all, then a separate time to find
|
|
|
|
# handlers to emit the record.
|
|
|
|
#
|
|
|
|
# For `level`, the most specific ancestor that has a `level` counts.
|
|
|
|
# For `handlers`, the most specific ancestor that has a `handlers`
|
|
|
|
# counts (assuming we set `propagate=False`, which we always do.)
|
|
|
|
# These are independent -- they might come at the same layer, or
|
|
|
|
# either one could come before the other.
|
|
|
|
#
|
|
|
|
# For `filters`, no ancestors count at all -- only the exact logger name
|
|
|
|
# the record was logged at.
|
|
|
|
#
|
|
|
|
# Upstream docs: https://docs.python.org/3/library/logging
|
|
|
|
#
|
|
|
|
# Style rules:
|
|
|
|
# * Always set `propagate=False` if setting `handlers`.
|
|
|
|
# * Setting `level` equal to the parent is redundant; don't.
|
|
|
|
# * Setting `handlers` equal to the parent is redundant; don't.
|
|
|
|
# * Always write in order: level, filters, handlers, propagate.
|
|
|
|
|
|
|
|
# root logger
|
|
|
|
'': {
|
|
|
|
'level': 'INFO',
|
|
|
|
'filters': ['require_logging_enabled'],
|
|
|
|
'handlers': DEFAULT_ZULIP_HANDLERS,
|
|
|
|
},
|
|
|
|
|
|
|
|
# Django, alphabetized
|
|
|
|
'django': {
|
|
|
|
# Django's default logging config has already set some
|
|
|
|
# things on this logger. Just mentioning it here causes
|
|
|
|
# `logging.config` to reset it to defaults, as if never
|
|
|
|
# configured; which is what we want for it.
|
|
|
|
},
|
|
|
|
'django.request': {
|
|
|
|
# We set this to ERROR to prevent Django's default
|
|
|
|
# low-value logs with lines like "Not Found: /robots.txt"
|
|
|
|
# from being logged for every HTTP 4xx error at WARNING
|
|
|
|
# level, which would otherwise end up spamming our
|
|
|
|
# errors.log. We'll still get logs in errors.log
|
|
|
|
# including tracebacks for 5xx errors (i.e. Python
|
|
|
|
# exceptions).
|
|
|
|
'level': 'ERROR',
|
|
|
|
},
|
|
|
|
'django.security.DisallowedHost': {
|
|
|
|
'handlers': ['file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'django.server': {
|
|
|
|
'filters': ['skip_200_and_304'],
|
|
|
|
'handlers': ['console', 'file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'django.utils.autoreload': {
|
|
|
|
# We don't want logging spam from the autoreloaders in development.
|
|
|
|
'level': 'WARNING',
|
|
|
|
},
|
|
|
|
'django.template': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'filters': ['require_debug_true', 'skip_site_packages_logs'],
|
|
|
|
'handlers': ['console'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
|
|
|
|
## Uncomment the following to get all database queries logged to the console
|
|
|
|
# 'django.db': {
|
|
|
|
# 'level': 'DEBUG',
|
|
|
|
# 'handlers': ['console'],
|
|
|
|
# 'propagate': False,
|
|
|
|
# },
|
|
|
|
|
|
|
|
# other libraries, alphabetized
|
|
|
|
'django_auth_ldap': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'handlers': ['console', 'ldap_file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'pika': {
|
|
|
|
# pika is super chatty on INFO.
|
|
|
|
'level': 'WARNING',
|
|
|
|
# pika spews a lot of ERROR logs when a connection fails.
|
|
|
|
# We reconnect automatically, so those should be treated as WARNING --
|
|
|
|
# write to the log for use in debugging, but no error emails/Zulips.
|
|
|
|
'handlers': ['console', 'file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'requests': {
|
|
|
|
'level': 'WARNING',
|
|
|
|
},
|
|
|
|
|
|
|
|
# our own loggers, alphabetized
|
|
|
|
'zerver.lib.digest': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zerver.management.commands.deliver_email': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zerver.management.commands.enqueue_digest_emails': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zerver.management.commands.deliver_scheduled_messages': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
},
|
|
|
|
'zulip.auth': {
|
|
|
|
'level': 'DEBUG',
|
2020-09-02 06:59:07 +02:00
|
|
|
'handlers': [*DEFAULT_ZULIP_HANDLERS, 'auth_file'],
|
2020-06-08 03:58:37 +02:00
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'zulip.ldap': {
|
|
|
|
'level': 'DEBUG',
|
|
|
|
'handlers': ['console', 'ldap_file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'zulip.management': {
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'zulip.queue': {
|
|
|
|
'level': 'WARNING',
|
|
|
|
},
|
|
|
|
'zulip.retention': {
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
|
|
|
'zulip.slow_queries': {
|
|
|
|
'level': 'INFO',
|
|
|
|
'handlers': ['slow_queries_file'],
|
2020-07-22 21:08:11 +02:00
|
|
|
'propagate': False,
|
2020-06-08 03:58:37 +02:00
|
|
|
},
|
|
|
|
'zulip.soft_deactivation': {
|
|
|
|
'handlers': ['file', 'errors_file'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2020-07-28 01:29:34 +02:00
|
|
|
# This logger is used only for automated tests validating the
|
|
|
|
# error-handling behavior of the zulip_admins handler.
|
|
|
|
'zulip.test_zulip_admins_handler': {
|
|
|
|
'handlers': ['zulip_admins'],
|
|
|
|
'propagate': False,
|
|
|
|
},
|
2020-08-19 22:36:07 +02:00
|
|
|
'zulip.zerver.webhooks': {
|
2020-06-08 03:58:37 +02:00
|
|
|
'level': 'DEBUG',
|
2020-09-14 21:58:16 +02:00
|
|
|
'handlers': ['file', 'errors_file', 'webhook_file'],
|
2020-06-08 03:58:37 +02:00
|
|
|
'propagate': False,
|
|
|
|
},
|
2020-08-19 22:36:07 +02:00
|
|
|
'zulip.zerver.webhooks.unsupported': {
|
2020-06-08 03:58:37 +02:00
|
|
|
'level': 'DEBUG',
|
2020-09-02 03:15:08 +02:00
|
|
|
'handlers': ['webhook_unsupported_file'],
|
2020-06-08 03:58:37 +02:00
|
|
|
'propagate': False,
|
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
2020-06-08 03:58:37 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if DEVELOPMENT:
|
|
|
|
CONTRIBUTOR_DATA_FILE_PATH = os.path.join(DEPLOY_ROOT, 'var/github-contributors.json')
|
|
|
|
else:
|
|
|
|
CONTRIBUTOR_DATA_FILE_PATH = '/var/lib/zulip/github-contributors.json'
|
|
|
|
|
|
|
|
LOGIN_REDIRECT_URL = '/'
|
|
|
|
|
|
|
|
# Client-side polling timeout for get_events, in milliseconds.
|
|
|
|
# We configure this here so that the client test suite can override it.
|
|
|
|
# We already kill the connection server-side with heartbeat events,
|
|
|
|
# but it's good to have a safety. This value should be greater than
|
|
|
|
# (HEARTBEAT_MIN_FREQ_SECS + 10)
|
|
|
|
POLL_TIMEOUT = 90 * 1000
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# SSO AND LDAP SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
2020-09-03 06:54:56 +02:00
|
|
|
USING_LDAP = "zproject.backends.ZulipLDAPAuthBackend" in AUTHENTICATION_BACKENDS
|
|
|
|
ONLY_LDAP = AUTHENTICATION_BACKENDS == ("zproject.backends.ZulipLDAPAuthBackend",)
|
2020-06-08 03:58:37 +02:00
|
|
|
USING_APACHE_SSO = ('zproject.backends.ZulipRemoteUserBackend' in AUTHENTICATION_BACKENDS)
|
2020-09-03 06:54:56 +02:00
|
|
|
ONLY_SSO = AUTHENTICATION_BACKENDS == ("zproject.backends.ZulipRemoteUserBackend",)
|
2020-06-08 03:58:37 +02:00
|
|
|
|
2020-09-03 06:54:56 +02:00
|
|
|
if ONLY_SSO:
|
2020-06-08 03:58:37 +02:00
|
|
|
HOME_NOT_LOGGED_IN = "/accounts/login/sso/"
|
|
|
|
else:
|
|
|
|
HOME_NOT_LOGGED_IN = '/login/'
|
2020-09-03 06:54:56 +02:00
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipDummyBackend',)
|
|
|
|
|
|
|
|
# Redirect to /devlogin/ by default in dev mode
|
|
|
|
if DEVELOPMENT:
|
|
|
|
HOME_NOT_LOGGED_IN = '/devlogin/'
|
|
|
|
LOGIN_URL = '/devlogin/'
|
|
|
|
|
|
|
|
POPULATE_PROFILE_VIA_LDAP = bool(AUTH_LDAP_SERVER_URI)
|
|
|
|
|
2020-09-03 06:54:56 +02:00
|
|
|
if POPULATE_PROFILE_VIA_LDAP and not USING_LDAP:
|
2020-06-08 03:58:37 +02:00
|
|
|
AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipLDAPUserPopulator',)
|
|
|
|
else:
|
2020-09-03 06:54:56 +02:00
|
|
|
POPULATE_PROFILE_VIA_LDAP = USING_LDAP or POPULATE_PROFILE_VIA_LDAP
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
if POPULATE_PROFILE_VIA_LDAP:
|
|
|
|
import ldap
|
|
|
|
if (AUTH_LDAP_BIND_DN and ldap.OPT_REFERRALS not in AUTH_LDAP_CONNECTION_OPTIONS):
|
|
|
|
# The default behavior of python-ldap (without setting option
|
|
|
|
# `ldap.OPT_REFERRALS`) is to follow referrals, but anonymously.
|
|
|
|
# If our original query was non-anonymous, that's unlikely to
|
|
|
|
# work; skip the referral.
|
|
|
|
#
|
|
|
|
# The common case of this is that the server is Active Directory,
|
|
|
|
# it's already given us the answer we need, and the referral is
|
|
|
|
# just speculation about someplace else that has data our query
|
|
|
|
# could in principle match.
|
|
|
|
AUTH_LDAP_CONNECTION_OPTIONS[ldap.OPT_REFERRALS] = 0
|
|
|
|
|
|
|
|
if REGISTER_LINK_DISABLED is None:
|
|
|
|
# The default for REGISTER_LINK_DISABLED is a bit more
|
|
|
|
# complicated: we want it to be disabled by default for people
|
|
|
|
# using the LDAP backend that auto-creates users on login.
|
2020-09-03 06:54:56 +02:00
|
|
|
REGISTER_LINK_DISABLED = ONLY_LDAP
|
2020-06-08 03:58:37 +02:00
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# SOCIAL AUTHENTICATION SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
SOCIAL_AUTH_FIELDS_STORED_IN_SESSION = ['subdomain', 'is_signup', 'mobile_flow_otp', 'desktop_flow_otp',
|
|
|
|
'multiuse_object_key']
|
|
|
|
SOCIAL_AUTH_LOGIN_ERROR_URL = '/login/'
|
|
|
|
|
|
|
|
# CLIENT is required by PSA's internal implementation. We name it
|
|
|
|
# SERVICES_ID to make things more readable in the configuration
|
|
|
|
# and our own custom backend code.
|
|
|
|
SOCIAL_AUTH_APPLE_CLIENT = SOCIAL_AUTH_APPLE_SERVICES_ID
|
2020-06-26 12:22:48 +02:00
|
|
|
SOCIAL_AUTH_APPLE_AUDIENCE = [id for id in [SOCIAL_AUTH_APPLE_CLIENT, SOCIAL_AUTH_APPLE_APP_ID] if id is not None]
|
2020-06-24 15:28:47 +02:00
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
if PRODUCTION:
|
2020-06-25 13:26:17 +02:00
|
|
|
SOCIAL_AUTH_APPLE_SECRET = get_from_file_if_exists("/etc/zulip/apple-auth-key.p8")
|
2020-06-08 03:58:37 +02:00
|
|
|
else:
|
|
|
|
SOCIAL_AUTH_APPLE_SECRET = get_from_file_if_exists("zproject/dev_apple.key")
|
|
|
|
|
|
|
|
SOCIAL_AUTH_GITHUB_SECRET = get_secret('social_auth_github_secret')
|
|
|
|
SOCIAL_AUTH_GITLAB_SECRET = get_secret('social_auth_gitlab_secret')
|
|
|
|
SOCIAL_AUTH_GITHUB_SCOPE = ['user:email']
|
|
|
|
if SOCIAL_AUTH_GITHUB_ORG_NAME or SOCIAL_AUTH_GITHUB_TEAM_ID:
|
|
|
|
SOCIAL_AUTH_GITHUB_SCOPE.append("read:org")
|
|
|
|
SOCIAL_AUTH_GITHUB_ORG_KEY = SOCIAL_AUTH_GITHUB_KEY
|
|
|
|
SOCIAL_AUTH_GITHUB_ORG_SECRET = SOCIAL_AUTH_GITHUB_SECRET
|
|
|
|
SOCIAL_AUTH_GITHUB_TEAM_KEY = SOCIAL_AUTH_GITHUB_KEY
|
|
|
|
SOCIAL_AUTH_GITHUB_TEAM_SECRET = SOCIAL_AUTH_GITHUB_SECRET
|
|
|
|
|
|
|
|
SOCIAL_AUTH_GOOGLE_SECRET = get_secret('social_auth_google_secret')
|
|
|
|
# Fallback to google-oauth settings in case social auth settings for
|
|
|
|
# google are missing; this is for backwards-compatibility with older
|
|
|
|
# Zulip versions where /etc/zulip/settings.py has not been migrated yet.
|
|
|
|
GOOGLE_OAUTH2_CLIENT_SECRET = get_secret('google_oauth2_client_secret')
|
|
|
|
SOCIAL_AUTH_GOOGLE_KEY = SOCIAL_AUTH_GOOGLE_KEY or GOOGLE_OAUTH2_CLIENT_ID
|
|
|
|
SOCIAL_AUTH_GOOGLE_SECRET = SOCIAL_AUTH_GOOGLE_SECRET or GOOGLE_OAUTH2_CLIENT_SECRET
|
|
|
|
|
|
|
|
if PRODUCTION:
|
|
|
|
SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = get_from_file_if_exists("/etc/zulip/saml/zulip-cert.crt")
|
|
|
|
SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = get_from_file_if_exists("/etc/zulip/saml/zulip-private-key.key")
|
|
|
|
|
|
|
|
if "signatureAlgorithm" not in SOCIAL_AUTH_SAML_SECURITY_CONFIG:
|
|
|
|
# If the configuration doesn't explicitly specify the algorithm,
|
|
|
|
# we set RSA1 with SHA256 to override the python3-saml default, which uses
|
|
|
|
# insecure SHA1.
|
|
|
|
default_signature_alg = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"
|
|
|
|
SOCIAL_AUTH_SAML_SECURITY_CONFIG["signatureAlgorithm"] = default_signature_alg
|
|
|
|
|
|
|
|
for idp_name, idp_dict in SOCIAL_AUTH_SAML_ENABLED_IDPS.items():
|
|
|
|
if DEVELOPMENT:
|
|
|
|
idp_dict['entity_id'] = get_secret('saml_entity_id', '')
|
|
|
|
idp_dict['url'] = get_secret('saml_url', '')
|
|
|
|
idp_dict['x509cert_path'] = 'zproject/dev_saml.cert'
|
|
|
|
|
|
|
|
# Set `x509cert` if not specified already; also support an override path.
|
|
|
|
if 'x509cert' in idp_dict:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if 'x509cert_path' in idp_dict:
|
|
|
|
path = idp_dict['x509cert_path']
|
|
|
|
else:
|
|
|
|
path = f"/etc/zulip/saml/idps/{idp_name}.crt"
|
|
|
|
idp_dict['x509cert'] = get_from_file_if_exists(path)
|
|
|
|
|
|
|
|
SOCIAL_AUTH_PIPELINE = [
|
|
|
|
'social_core.pipeline.social_auth.social_details',
|
|
|
|
'zproject.backends.social_auth_associate_user',
|
|
|
|
'zproject.backends.social_auth_finish',
|
|
|
|
]
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# EMAIL SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
# Django setting. Not used in the Zulip codebase.
|
|
|
|
DEFAULT_FROM_EMAIL = ZULIP_ADMINISTRATOR
|
|
|
|
|
|
|
|
if EMAIL_BACKEND is not None:
|
|
|
|
# If the server admin specified a custom email backend, use that.
|
|
|
|
pass
|
|
|
|
elif DEVELOPMENT:
|
|
|
|
# In the dev environment, emails are printed to the run-dev.py console.
|
|
|
|
EMAIL_BACKEND = 'zproject.email_backends.EmailLogBackEnd'
|
|
|
|
elif not EMAIL_HOST:
|
|
|
|
# If an email host is not specified, fail gracefully
|
|
|
|
WARN_NO_EMAIL = True
|
|
|
|
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
|
|
|
|
else:
|
|
|
|
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
|
|
|
|
|
2020-07-03 13:46:48 +02:00
|
|
|
EMAIL_TIMEOUT = 15
|
|
|
|
|
2020-06-08 03:58:37 +02:00
|
|
|
EMAIL_HOST_PASSWORD = get_secret('email_password')
|
|
|
|
EMAIL_GATEWAY_PASSWORD = get_secret('email_gateway_password')
|
|
|
|
AUTH_LDAP_BIND_PASSWORD = get_secret('auth_ldap_bind_password', '')
|
|
|
|
|
|
|
|
########################################################################
|
|
|
|
# MISC SETTINGS
|
|
|
|
########################################################################
|
|
|
|
|
|
|
|
if PRODUCTION:
|
|
|
|
# Filter out user data
|
|
|
|
DEFAULT_EXCEPTION_REPORTER_FILTER = 'zerver.filters.ZulipExceptionReporterFilter'
|
|
|
|
|
|
|
|
# This is a debugging option only
|
|
|
|
PROFILE_ALL_REQUESTS = False
|
|
|
|
|
|
|
|
CROSS_REALM_BOT_EMAILS = {
|
|
|
|
'notification-bot@zulip.com',
|
|
|
|
'welcome-bot@zulip.com',
|
|
|
|
'emailgateway@zulip.com',
|
|
|
|
}
|
|
|
|
|
|
|
|
THUMBOR_KEY = get_secret('thumbor_key')
|
|
|
|
|
|
|
|
TWO_FACTOR_PATCH_ADMIN = False
|
2020-07-02 02:20:55 +02:00
|
|
|
|
2020-07-02 03:40:33 +02:00
|
|
|
# Allow the environment to override the default DSN
|
|
|
|
SENTRY_DSN = os.environ.get("SENTRY_DSN", SENTRY_DSN)
|
2020-07-02 02:20:55 +02:00
|
|
|
if SENTRY_DSN:
|
|
|
|
from .sentry import setup_sentry
|
|
|
|
setup_sentry(SENTRY_DSN)
|