diff --git a/docs/production/upload-backends.md b/docs/production/upload-backends.md index fb11b1cde6..7ce9bdea88 100644 --- a/docs/production/upload-backends.md +++ b/docs/production/upload-backends.md @@ -31,25 +31,17 @@ as world-readable, whereas the "uploaded files" one is not. 1. Set the `S3_AUTH_UPLOADS_BUCKET` and `S3_AVATAR_BUCKET` settings in `/etc/zulip/settings.py` to be the names of the S3 buckets you - created (e.g. `exampleinc-zulip-uploads`). + created (e.g. `"exampleinc-zulip-uploads"`). 1. Comment out the `LOCAL_UPLOADS_DIR` setting in `/etc/zulip/settings.py` (add a `#` at the start of the line). -1. If you are using a non-AWS block storage provider, or certain AWS - regions, you may need to explicitly - [configure boto](http://boto.cloudhackers.com/en/latest/boto_config_tut.html). - For AWS, you may need to use AWS's SIGv4 signature format (because AWS has stopped - supporting the older v3 format in those regions); for other - providers, you may just need to set the hostname. You can do this - by adding an `/etc/zulip/boto.cfg` containing the following: - ``` - [s3] - use-sigv4 = True - # Edit to provide your bucket's AWS region or hostname here. - host = s3.eu-central-1.amazonaws.com - ``` +1. If you are using a non-AWS block storage provider, + you need to set the `S3_ENDPOINT_URL` setting to your + endpoint url (e.g. `"https://s3.eu-central-1.amazonaws.com"`). + For certain AWS regions, you may need to set the `S3_REGION` + setting to your default AWS region's code (e.g. `"eu-central-1"`). 1. You will need to configure `nginx` to direct requests for uploaded files to the Zulip server (which will then serve a redirect to the diff --git a/zerver/lib/upload.py b/zerver/lib/upload.py index 8333955d2f..1e88fd2a64 100644 --- a/zerver/lib/upload.py +++ b/zerver/lib/upload.py @@ -272,7 +272,8 @@ def get_bucket(bucket_name: str, session: Optional[Session]=None) -> ServiceReso # for why this return type is a `ServiceResource`. if session is None: session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY) - bucket = session.resource('s3').Bucket(bucket_name) + bucket = session.resource('s3', region_name=settings.S3_REGION, + endpoint_url=settings.S3_ENDPOINT_URL).Bucket(bucket_name) return bucket def upload_image_to_s3( @@ -326,7 +327,9 @@ def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Opti def get_signed_upload_url(path: str) -> str: client = boto3.client('s3', aws_access_key_id=settings.S3_KEY, - aws_secret_access_key=settings.S3_SECRET_KEY) + aws_secret_access_key=settings.S3_SECRET_KEY, + region_name=settings.S3_REGION, + endpoint_url=settings.S3_ENDPOINT_URL) return client.generate_presigned_url(ClientMethod='get_object', Params={ 'Bucket': settings.S3_AUTH_UPLOADS_BUCKET, @@ -601,7 +604,9 @@ class S3UploadBackend(ZulipUploadBackend): session = botocore.session.get_session() config = Config(signature_version=botocore.UNSIGNED) - public_url = session.create_client('s3', config=config).generate_presigned_url( + public_url = session.create_client('s3', region_name=settings.S3_REGION, + endpoint_url=settings.S3_ENDPOINT_URL, + config=config).generate_presigned_url( 'get_object', Params={ 'Bucket': self.avatar_bucket.name, diff --git a/zerver/migrations/0149_realm_emoji_drop_unique_constraint.py b/zerver/migrations/0149_realm_emoji_drop_unique_constraint.py index 9ada418e03..6fddbcf1c0 100644 --- a/zerver/migrations/0149_realm_emoji_drop_unique_constraint.py +++ b/zerver/migrations/0149_realm_emoji_drop_unique_constraint.py @@ -55,7 +55,7 @@ class S3Uploader(Uploader): super().__init__() session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY) self.bucket_name = settings.S3_AVATAR_BUCKET - self.bucket = session.resource('s3').Bucket(self.bucket_name) + self.bucket = session.resource('s3', region_name=settings.S3_REGION, endpoint_url=settings.S3_ENDPOINT_URL).Bucket(self.bucket_name) def copy_files(self, src_key: str, dst_key: str) -> None: source = dict(Bucket=self.bucket_name, Key=src_key) diff --git a/zproject/computed_settings.py b/zproject/computed_settings.py index aede21e437..6c91f8bdd7 100644 --- a/zproject/computed_settings.py +++ b/zproject/computed_settings.py @@ -110,15 +110,6 @@ GENERATE_STRIPE_FIXTURES = False # This is overridden in test_settings.py for the test suites BAN_CONSOLE_OUTPUT = False -# Google Compute Engine has an /etc/boto.cfg that is "nicely -# configured" to work with GCE's storage service. However, their -# configuration is super aggressive broken, in that it means importing -# boto in a virtualenv that doesn't contain the GCE tools crashes. -# -# By using our own path for BOTO_CONFIG, we can cause boto to not -# process /etc/boto.cfg. -os.environ['BOTO_CONFIG'] = '/etc/zulip/boto.cfg' - # These are the settings that we will check that the user has filled in for # production deployments before starting the app. It consists of a series # of pairs of (setting name, default value that it must be changed from) diff --git a/zproject/default_settings.py b/zproject/default_settings.py index 16c6ac37e9..640fcf8637 100644 --- a/zproject/default_settings.py +++ b/zproject/default_settings.py @@ -116,7 +116,8 @@ DEFAULT_AVATAR_URI = '/static/images/default-avatar.png' DEFAULT_LOGO_URI = '/static/images/logo/zulip-org-logo.svg' S3_AVATAR_BUCKET = '' S3_AUTH_UPLOADS_BUCKET = '' -S3_REGION = '' +S3_REGION = None +S3_ENDPOINT_URL = None LOCAL_UPLOADS_DIR: Optional[str] = None MAX_FILE_UPLOAD_SIZE = 25 diff --git a/zproject/prod_settings_template.py b/zproject/prod_settings_template.py index 9dcabdeee2..0c47928cd9 100644 --- a/zproject/prod_settings_template.py +++ b/zproject/prod_settings_template.py @@ -406,7 +406,8 @@ ENABLE_FILE_LINKS = False LOCAL_UPLOADS_DIR = "/home/zulip/uploads" #S3_AUTH_UPLOADS_BUCKET = "" #S3_AVATAR_BUCKET = "" -#S3_REGION = "" +#S3_REGION = None +#S3_ENDPOINT_URL = None # Maximum allowed size of uploaded files, in megabytes. DO NOT SET # ABOVE 80MB. The file upload implementation doesn't support chunked