uploads: Support non-AWS S3-compatible server.

Boto3 does not allow setting the endpoint url from
the config file. Thus we create a django setting
variable (`S3_ENDPOINT_URL`) which is passed to
service clients and resources of `boto3.Session`.

We also update the uploads-backend documentation
and remove the config environment variable as now
AWS supports the SIGv4 signature format by default.
And the region name is passed as a parameter instead
of creating a config file for just this value.

Fixes #16246.
This commit is contained in:
ryanreh99 2020-10-23 03:02:45 +05:30 committed by Tim Abbott
parent 1c370a975c
commit dfa7ce5637
6 changed files with 19 additions and 29 deletions

View File

@ -31,25 +31,17 @@ as world-readable, whereas the "uploaded files" one is not.
1. Set the `S3_AUTH_UPLOADS_BUCKET` and `S3_AVATAR_BUCKET` settings in
`/etc/zulip/settings.py` to be the names of the S3 buckets you
created (e.g. `exampleinc-zulip-uploads`).
created (e.g. `"exampleinc-zulip-uploads"`).
1. Comment out the `LOCAL_UPLOADS_DIR` setting in
`/etc/zulip/settings.py` (add a `#` at the start of the line).
1. If you are using a non-AWS block storage provider, or certain AWS
regions, you may need to explicitly
[configure boto](http://boto.cloudhackers.com/en/latest/boto_config_tut.html).
For AWS, you may need to use AWS's SIGv4 signature format (because AWS has stopped
supporting the older v3 format in those regions); for other
providers, you may just need to set the hostname. You can do this
by adding an `/etc/zulip/boto.cfg` containing the following:
```
[s3]
use-sigv4 = True
# Edit to provide your bucket's AWS region or hostname here.
host = s3.eu-central-1.amazonaws.com
```
1. If you are using a non-AWS block storage provider,
you need to set the `S3_ENDPOINT_URL` setting to your
endpoint url (e.g. `"https://s3.eu-central-1.amazonaws.com"`).
For certain AWS regions, you may need to set the `S3_REGION`
setting to your default AWS region's code (e.g. `"eu-central-1"`).
1. You will need to configure `nginx` to direct requests for uploaded
files to the Zulip server (which will then serve a redirect to the

View File

@ -272,7 +272,8 @@ def get_bucket(bucket_name: str, session: Optional[Session]=None) -> ServiceReso
# for why this return type is a `ServiceResource`.
if session is None:
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = session.resource('s3').Bucket(bucket_name)
bucket = session.resource('s3', region_name=settings.S3_REGION,
endpoint_url=settings.S3_ENDPOINT_URL).Bucket(bucket_name)
return bucket
def upload_image_to_s3(
@ -326,7 +327,9 @@ def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Opti
def get_signed_upload_url(path: str) -> str:
client = boto3.client('s3', aws_access_key_id=settings.S3_KEY,
aws_secret_access_key=settings.S3_SECRET_KEY)
aws_secret_access_key=settings.S3_SECRET_KEY,
region_name=settings.S3_REGION,
endpoint_url=settings.S3_ENDPOINT_URL)
return client.generate_presigned_url(ClientMethod='get_object',
Params={
'Bucket': settings.S3_AUTH_UPLOADS_BUCKET,
@ -601,7 +604,9 @@ class S3UploadBackend(ZulipUploadBackend):
session = botocore.session.get_session()
config = Config(signature_version=botocore.UNSIGNED)
public_url = session.create_client('s3', config=config).generate_presigned_url(
public_url = session.create_client('s3', region_name=settings.S3_REGION,
endpoint_url=settings.S3_ENDPOINT_URL,
config=config).generate_presigned_url(
'get_object',
Params={
'Bucket': self.avatar_bucket.name,

View File

@ -55,7 +55,7 @@ class S3Uploader(Uploader):
super().__init__()
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
self.bucket_name = settings.S3_AVATAR_BUCKET
self.bucket = session.resource('s3').Bucket(self.bucket_name)
self.bucket = session.resource('s3', region_name=settings.S3_REGION, endpoint_url=settings.S3_ENDPOINT_URL).Bucket(self.bucket_name)
def copy_files(self, src_key: str, dst_key: str) -> None:
source = dict(Bucket=self.bucket_name, Key=src_key)

View File

@ -110,15 +110,6 @@ GENERATE_STRIPE_FIXTURES = False
# This is overridden in test_settings.py for the test suites
BAN_CONSOLE_OUTPUT = False
# Google Compute Engine has an /etc/boto.cfg that is "nicely
# configured" to work with GCE's storage service. However, their
# configuration is super aggressive broken, in that it means importing
# boto in a virtualenv that doesn't contain the GCE tools crashes.
#
# By using our own path for BOTO_CONFIG, we can cause boto to not
# process /etc/boto.cfg.
os.environ['BOTO_CONFIG'] = '/etc/zulip/boto.cfg'
# These are the settings that we will check that the user has filled in for
# production deployments before starting the app. It consists of a series
# of pairs of (setting name, default value that it must be changed from)

View File

@ -116,7 +116,8 @@ DEFAULT_AVATAR_URI = '/static/images/default-avatar.png'
DEFAULT_LOGO_URI = '/static/images/logo/zulip-org-logo.svg'
S3_AVATAR_BUCKET = ''
S3_AUTH_UPLOADS_BUCKET = ''
S3_REGION = ''
S3_REGION = None
S3_ENDPOINT_URL = None
LOCAL_UPLOADS_DIR: Optional[str] = None
MAX_FILE_UPLOAD_SIZE = 25

View File

@ -406,7 +406,8 @@ ENABLE_FILE_LINKS = False
LOCAL_UPLOADS_DIR = "/home/zulip/uploads"
#S3_AUTH_UPLOADS_BUCKET = ""
#S3_AVATAR_BUCKET = ""
#S3_REGION = ""
#S3_REGION = None
#S3_ENDPOINT_URL = None
# Maximum allowed size of uploaded files, in megabytes. DO NOT SET
# ABOVE 80MB. The file upload implementation doesn't support chunked