2022-12-14 21:51:37 +01:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import secrets
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Callable, Iterator
|
2023-02-27 20:55:33 +01:00
|
|
|
from datetime import datetime
|
2024-09-19 22:48:03 +02:00
|
|
|
from typing import IO, TYPE_CHECKING, Any, Literal
|
2023-12-05 21:14:17 +01:00
|
|
|
from urllib.parse import urljoin, urlsplit, urlunsplit
|
2022-12-14 21:51:37 +01:00
|
|
|
|
|
|
|
import botocore
|
2024-09-10 20:33:25 +02:00
|
|
|
import pyvips
|
2022-12-14 21:51:37 +01:00
|
|
|
from botocore.client import Config
|
2024-09-10 20:33:25 +02:00
|
|
|
from botocore.response import StreamingBody
|
2022-12-14 21:51:37 +01:00
|
|
|
from django.conf import settings
|
2024-08-29 23:39:07 +02:00
|
|
|
from django.utils.http import content_disposition_header
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2024-09-10 20:33:25 +02:00
|
|
|
from zerver.lib.partial import partial
|
2024-11-08 23:21:02 +01:00
|
|
|
from zerver.lib.thumbnail import resize_logo, resize_realm_icon
|
2024-09-10 20:33:25 +02:00
|
|
|
from zerver.lib.upload.base import INLINE_MIME_TYPES, StreamingSourceWithSize, ZulipUploadBackend
|
2022-12-14 21:51:37 +01:00
|
|
|
from zerver.models import Realm, RealmEmoji, UserProfile
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
from mypy_boto3_s3.client import S3Client
|
|
|
|
from mypy_boto3_s3.service_resource import Bucket, Object
|
|
|
|
|
2022-12-14 21:51:37 +01:00
|
|
|
# Duration that the signed upload URLs that we redirect to when
|
|
|
|
# accessing uploaded files are available for clients to fetch before
|
|
|
|
# they expire.
|
|
|
|
SIGNED_UPLOAD_URL_DURATION = 60
|
|
|
|
|
|
|
|
# Performance note:
|
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
|
|
|
# To come up with a s3 key we randomly generate a "directory". The
|
|
|
|
# "file name" is the original filename provided by the user run
|
|
|
|
# through a sanitization function.
|
|
|
|
|
|
|
|
|
|
|
|
# https://github.com/boto/botocore/issues/2644 means that the IMDS
|
|
|
|
# request _always_ pulls from the environment. Monkey-patch the
|
|
|
|
# `should_bypass_proxies` function if we need to skip them, based
|
|
|
|
# on S3_SKIP_PROXY.
|
|
|
|
if settings.S3_SKIP_PROXY is True: # nocoverage
|
|
|
|
botocore.utils.should_bypass_proxies = lambda url: True
|
|
|
|
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
def get_bucket(bucket_name: str, authed: bool = True) -> "Bucket":
|
|
|
|
import boto3
|
|
|
|
|
2024-01-03 21:26:23 +01:00
|
|
|
return boto3.resource(
|
|
|
|
"s3",
|
|
|
|
aws_access_key_id=settings.S3_KEY if authed else None,
|
|
|
|
aws_secret_access_key=settings.S3_SECRET_KEY if authed else None,
|
|
|
|
region_name=settings.S3_REGION,
|
|
|
|
endpoint_url=settings.S3_ENDPOINT_URL,
|
|
|
|
config=Config(
|
|
|
|
signature_version=None if authed else botocore.UNSIGNED,
|
2024-01-03 21:27:53 +01:00
|
|
|
s3={"addressing_style": settings.S3_ADDRESSING_STYLE},
|
2024-01-03 21:26:23 +01:00
|
|
|
),
|
2022-12-14 21:51:37 +01:00
|
|
|
).Bucket(bucket_name)
|
|
|
|
|
|
|
|
|
2024-08-29 22:49:28 +02:00
|
|
|
def upload_content_to_s3(
|
2024-09-25 01:00:18 +02:00
|
|
|
bucket: "Bucket",
|
2024-08-29 23:39:07 +02:00
|
|
|
path: str,
|
2024-07-12 02:30:23 +02:00
|
|
|
content_type: str | None,
|
2024-06-20 23:58:27 +02:00
|
|
|
user_profile: UserProfile | None,
|
2022-12-14 21:51:37 +01:00
|
|
|
contents: bytes,
|
2024-06-13 14:57:18 +02:00
|
|
|
*,
|
2023-07-19 04:27:03 +02:00
|
|
|
storage_class: Literal[
|
|
|
|
"GLACIER_IR",
|
|
|
|
"INTELLIGENT_TIERING",
|
|
|
|
"ONEZONE_IA",
|
|
|
|
"REDUCED_REDUNDANCY",
|
|
|
|
"STANDARD",
|
|
|
|
"STANDARD_IA",
|
|
|
|
] = "STANDARD",
|
2024-07-12 02:30:23 +02:00
|
|
|
cache_control: str | None = None,
|
|
|
|
extra_metadata: dict[str, str] | None = None,
|
2024-08-29 23:39:07 +02:00
|
|
|
filename: str | None = None,
|
2022-12-14 21:51:37 +01:00
|
|
|
) -> None:
|
2024-09-25 21:39:49 +02:00
|
|
|
# Note that these steps are also replicated in
|
|
|
|
# handle_upload_pre_finish_hook in zerver.views.tus, to update
|
|
|
|
# properties for files uploaded via TUS.
|
|
|
|
|
2024-08-29 23:39:07 +02:00
|
|
|
key = bucket.Object(path)
|
2024-06-20 23:58:27 +02:00
|
|
|
metadata: dict[str, str] = {}
|
|
|
|
if user_profile:
|
|
|
|
metadata["user_profile_id"] = str(user_profile.id)
|
|
|
|
metadata["realm_id"] = str(user_profile.realm_id)
|
2024-06-13 14:57:18 +02:00
|
|
|
if extra_metadata is not None:
|
|
|
|
metadata.update(extra_metadata)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2024-06-29 01:44:30 +02:00
|
|
|
extras = {}
|
2024-07-10 22:54:47 +02:00
|
|
|
if content_type is None: # nocoverage
|
2022-12-14 21:51:37 +01:00
|
|
|
content_type = ""
|
2024-08-29 23:39:07 +02:00
|
|
|
is_attachment = content_type not in INLINE_MIME_TYPES
|
|
|
|
if filename is not None:
|
|
|
|
extras["ContentDisposition"] = content_disposition_header(is_attachment, filename)
|
|
|
|
elif is_attachment:
|
2024-06-29 01:44:30 +02:00
|
|
|
extras["ContentDisposition"] = "attachment"
|
2024-06-18 18:26:01 +02:00
|
|
|
if cache_control is not None:
|
|
|
|
extras["CacheControl"] = cache_control
|
2022-12-14 21:51:37 +01:00
|
|
|
|
|
|
|
key.put(
|
|
|
|
Body=contents,
|
|
|
|
Metadata=metadata,
|
|
|
|
ContentType=content_type,
|
2023-07-19 04:27:03 +02:00
|
|
|
StorageClass=storage_class,
|
2024-06-29 01:44:30 +02:00
|
|
|
**extras, # type: ignore[arg-type] # The dynamic kwargs here confuse mypy.
|
2022-12-14 21:51:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
BOTO_CLIENT: "S3Client | None" = None
|
2023-03-03 21:38:23 +01:00
|
|
|
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
def get_boto_client() -> "S3Client":
|
2023-03-03 21:38:23 +01:00
|
|
|
"""
|
|
|
|
Creating the client takes a long time so we need to cache it.
|
|
|
|
"""
|
|
|
|
global BOTO_CLIENT
|
|
|
|
if BOTO_CLIENT is None:
|
|
|
|
BOTO_CLIENT = get_bucket(settings.S3_AUTH_UPLOADS_BUCKET).meta.client
|
|
|
|
return BOTO_CLIENT
|
|
|
|
|
|
|
|
|
2024-09-25 22:26:02 +02:00
|
|
|
def get_signed_upload_url(path: str, filename: str, force_download: bool = False) -> str:
|
CVE-2023-22735: Provide the Content-Disposition header from S3.
The Content-Type of user-provided uploads was provided by the browser
at initial upload time, and stored in S3; however, 04cf68b45ebb
switched to determining the Content-Disposition merely from the
filename. This makes uploads vulnerable to a stored XSS, wherein a
file uploaded with a content-type of `text/html` and an extension of
`.png` would be served to browsers as `Content-Disposition: inline`,
which is unsafe.
The `Content-Security-Policy` headers in the previous commit mitigate
this, but only for browsers which support them.
Revert parts of 04cf68b45ebb, specifically by allowing S3 to provide
the Content-Disposition header, and using the
`ResponseContentDisposition` argument when necessary to override it to
`attachment`. Because we expect S3 responses to vary based on this
argument, we include it in the cache key; since the query parameter
has dashes in it, we can't use use the helper `$arg_` variables, and
must parse it from the query parameters manually.
Adding the disposition may decrease the cache hit rate somewhat, but
downloads are infrequent enough that it is unlikely to have a
noticeable effect. We take care to not adjust the cache key for
requests which do not specify the disposition.
2023-01-11 17:36:41 +01:00
|
|
|
params = {
|
|
|
|
"Bucket": settings.S3_AUTH_UPLOADS_BUCKET,
|
|
|
|
"Key": path,
|
|
|
|
}
|
|
|
|
if force_download:
|
2024-09-25 22:26:02 +02:00
|
|
|
params["ResponseContentDisposition"] = (
|
|
|
|
content_disposition_header(True, filename) or "attachment"
|
|
|
|
)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-03-03 21:38:23 +01:00
|
|
|
return get_boto_client().generate_presigned_url(
|
2022-12-14 21:51:37 +01:00
|
|
|
ClientMethod="get_object",
|
CVE-2023-22735: Provide the Content-Disposition header from S3.
The Content-Type of user-provided uploads was provided by the browser
at initial upload time, and stored in S3; however, 04cf68b45ebb
switched to determining the Content-Disposition merely from the
filename. This makes uploads vulnerable to a stored XSS, wherein a
file uploaded with a content-type of `text/html` and an extension of
`.png` would be served to browsers as `Content-Disposition: inline`,
which is unsafe.
The `Content-Security-Policy` headers in the previous commit mitigate
this, but only for browsers which support them.
Revert parts of 04cf68b45ebb, specifically by allowing S3 to provide
the Content-Disposition header, and using the
`ResponseContentDisposition` argument when necessary to override it to
`attachment`. Because we expect S3 responses to vary based on this
argument, we include it in the cache key; since the query parameter
has dashes in it, we can't use use the helper `$arg_` variables, and
must parse it from the query parameters manually.
Adding the disposition may decrease the cache hit rate somewhat, but
downloads are infrequent enough that it is unlikely to have a
noticeable effect. We take care to not adjust the cache key for
requests which do not specify the disposition.
2023-01-11 17:36:41 +01:00
|
|
|
Params=params,
|
2022-12-14 21:51:37 +01:00
|
|
|
ExpiresIn=SIGNED_UPLOAD_URL_DURATION,
|
|
|
|
HttpMethod="GET",
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
class S3UploadBackend(ZulipUploadBackend):
|
|
|
|
def __init__(self) -> None:
|
2024-09-25 01:00:18 +02:00
|
|
|
from mypy_boto3_s3.service_resource import Bucket
|
|
|
|
|
2024-01-03 21:26:23 +01:00
|
|
|
self.avatar_bucket = get_bucket(settings.S3_AVATAR_BUCKET)
|
|
|
|
self.uploads_bucket = get_bucket(settings.S3_AUTH_UPLOADS_BUCKET)
|
2022-04-07 00:46:13 +02:00
|
|
|
self.export_bucket: Bucket | None = None
|
|
|
|
if settings.S3_EXPORT_BUCKET:
|
|
|
|
self.export_bucket = get_bucket(settings.S3_EXPORT_BUCKET)
|
|
|
|
|
2022-12-14 21:51:37 +01:00
|
|
|
self.public_upload_url_base = self.construct_public_upload_url_base()
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
def delete_file_from_s3(self, path_id: str, bucket: "Bucket") -> bool:
|
2023-02-28 04:33:25 +01:00
|
|
|
key = bucket.Object(path_id)
|
|
|
|
|
|
|
|
try:
|
|
|
|
key.load()
|
|
|
|
except botocore.exceptions.ClientError:
|
|
|
|
file_name = path_id.split("/")[-1]
|
|
|
|
logging.warning(
|
|
|
|
"%s does not exist. Its entry in the database will be removed.", file_name
|
|
|
|
)
|
|
|
|
return False
|
|
|
|
key.delete()
|
|
|
|
return True
|
|
|
|
|
2022-12-14 21:51:37 +01:00
|
|
|
def construct_public_upload_url_base(self) -> str:
|
|
|
|
# Return the pattern for public URL for a key in the S3 Avatar bucket.
|
|
|
|
# For Amazon S3 itself, this will return the following:
|
|
|
|
# f"https://{self.avatar_bucket.name}.{network_location}/{key}"
|
|
|
|
#
|
|
|
|
# However, we need this function to properly handle S3 style
|
|
|
|
# file upload backends that Zulip supports, which can have a
|
|
|
|
# different URL format. Configuring no signature and providing
|
|
|
|
# no access key makes `generate_presigned_url` just return the
|
|
|
|
# normal public URL for a key.
|
|
|
|
#
|
|
|
|
# It unfortunately takes 2ms per query to call
|
2024-01-03 21:17:49 +01:00
|
|
|
# generate_presigned_url. Since we need to potentially compute
|
|
|
|
# hundreds of avatar URLs in single `GET /messages` request,
|
|
|
|
# we instead back-compute the URL pattern here.
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2024-06-18 19:19:17 +02:00
|
|
|
# The S3_AVATAR_PUBLIC_URL_PREFIX setting is used to override
|
|
|
|
# this prefix, for instance if a CloudFront distribution is
|
|
|
|
# used.
|
|
|
|
if settings.S3_AVATAR_PUBLIC_URL_PREFIX is not None:
|
|
|
|
prefix = settings.S3_AVATAR_PUBLIC_URL_PREFIX
|
|
|
|
if not prefix.endswith("/"):
|
|
|
|
prefix += "/"
|
|
|
|
return prefix
|
|
|
|
|
2022-12-14 21:51:37 +01:00
|
|
|
DUMMY_KEY = "dummy_key_ignored"
|
2024-01-03 21:26:23 +01:00
|
|
|
|
|
|
|
# We do not access self.avatar_bucket.meta.client directly,
|
|
|
|
# since that client is auth'd, and we want only the direct
|
|
|
|
# unauthed endpoint here.
|
2024-01-29 00:32:21 +01:00
|
|
|
client = get_bucket(self.avatar_bucket.name, authed=False).meta.client
|
2024-01-03 21:17:49 +01:00
|
|
|
dummy_signed_url = client.generate_presigned_url(
|
2022-12-14 21:51:37 +01:00
|
|
|
ClientMethod="get_object",
|
|
|
|
Params={
|
|
|
|
"Bucket": self.avatar_bucket.name,
|
|
|
|
"Key": DUMMY_KEY,
|
|
|
|
},
|
|
|
|
ExpiresIn=0,
|
|
|
|
)
|
2024-01-03 21:17:49 +01:00
|
|
|
split_url = urlsplit(dummy_signed_url)
|
2022-12-14 21:51:37 +01:00
|
|
|
assert split_url.path.endswith(f"/{DUMMY_KEY}")
|
|
|
|
|
2023-12-05 21:14:17 +01:00
|
|
|
return urlunsplit(
|
2024-09-03 19:42:14 +02:00
|
|
|
(split_url.scheme, split_url.netloc, split_url.path.removesuffix(DUMMY_KEY), "", "")
|
2022-12-14 21:51:37 +01:00
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 04:33:25 +01:00
|
|
|
def get_public_upload_root_url(self) -> str:
|
|
|
|
return self.public_upload_url_base
|
|
|
|
|
2022-12-14 21:51:37 +01:00
|
|
|
def get_public_upload_url(
|
|
|
|
self,
|
|
|
|
key: str,
|
|
|
|
) -> str:
|
|
|
|
assert not key.startswith("/")
|
2023-12-05 21:14:17 +01:00
|
|
|
return urljoin(self.public_upload_url_base, key)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-20 18:11:59 +02:00
|
|
|
def generate_message_upload_path(self, realm_id: str, sanitized_file_name: str) -> str:
|
2022-12-14 21:51:37 +01:00
|
|
|
return "/".join(
|
|
|
|
[
|
|
|
|
realm_id,
|
|
|
|
secrets.token_urlsafe(18),
|
2024-06-20 18:11:59 +02:00
|
|
|
sanitized_file_name,
|
2022-12-14 21:51:37 +01:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 03:46:41 +01:00
|
|
|
def upload_message_attachment(
|
2022-12-14 21:51:37 +01:00
|
|
|
self,
|
2024-06-20 18:11:59 +02:00
|
|
|
path_id: str,
|
2024-08-29 23:39:07 +02:00
|
|
|
filename: str,
|
2024-06-20 18:19:25 +02:00
|
|
|
content_type: str,
|
2022-12-14 21:51:37 +01:00
|
|
|
file_data: bytes,
|
2024-06-20 23:58:27 +02:00
|
|
|
user_profile: UserProfile | None,
|
2024-06-20 18:11:59 +02:00
|
|
|
) -> None:
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.uploads_bucket,
|
2024-06-20 18:11:59 +02:00
|
|
|
path_id,
|
2022-12-14 21:51:37 +01:00
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
file_data,
|
2024-06-13 14:57:18 +02:00
|
|
|
storage_class=settings.S3_UPLOADS_STORAGE_CLASS,
|
2024-08-29 23:39:07 +02:00
|
|
|
filename=filename,
|
2022-12-14 21:51:37 +01:00
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-09-19 22:48:03 +02:00
|
|
|
def save_attachment_contents(self, path_id: str, filehandle: IO[bytes]) -> None:
|
2023-03-14 17:16:41 +01:00
|
|
|
for chunk in self.uploads_bucket.Object(path_id).get()["Body"]:
|
|
|
|
filehandle.write(chunk)
|
|
|
|
|
2024-09-10 20:33:25 +02:00
|
|
|
@override
|
|
|
|
def attachment_vips_source(self, path_id: str) -> StreamingSourceWithSize:
|
|
|
|
metadata = self.uploads_bucket.Object(path_id).get()
|
|
|
|
|
|
|
|
def s3_read(streamingbody: StreamingBody, size: int) -> bytes:
|
|
|
|
return streamingbody.read(amt=size)
|
|
|
|
|
|
|
|
source: pyvips.Source = pyvips.SourceCustom()
|
|
|
|
source.on_read(partial(s3_read, metadata["Body"]))
|
|
|
|
return StreamingSourceWithSize(size=metadata["ContentLength"], source=source)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 03:49:04 +01:00
|
|
|
def delete_message_attachment(self, path_id: str) -> bool:
|
2022-12-14 21:51:37 +01:00
|
|
|
return self.delete_file_from_s3(path_id, self.uploads_bucket)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-07-12 02:30:17 +02:00
|
|
|
def delete_message_attachments(self, path_ids: list[str]) -> None:
|
2023-02-28 04:44:29 +01:00
|
|
|
self.uploads_bucket.delete_objects(
|
|
|
|
Delete={"Objects": [{"Key": path_id} for path_id in path_ids]}
|
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-20 23:58:27 +02:00
|
|
|
def all_message_attachments(
|
2024-09-24 20:44:03 +02:00
|
|
|
self,
|
|
|
|
include_thumbnails: bool = False,
|
|
|
|
prefix: str = "",
|
2024-06-20 23:58:27 +02:00
|
|
|
) -> Iterator[tuple[str, datetime]]:
|
2024-01-03 21:26:23 +01:00
|
|
|
client = self.uploads_bucket.meta.client
|
2023-02-27 20:55:33 +01:00
|
|
|
paginator = client.get_paginator("list_objects_v2")
|
2024-09-24 20:44:03 +02:00
|
|
|
page_iterator = paginator.paginate(Bucket=self.uploads_bucket.name, Prefix=prefix)
|
2023-02-27 20:55:33 +01:00
|
|
|
|
|
|
|
for page in page_iterator:
|
|
|
|
if page["KeyCount"] > 0:
|
|
|
|
for item in page["Contents"]:
|
2024-06-20 23:58:27 +02:00
|
|
|
if not include_thumbnails and item["Key"].startswith("thumbnail/"):
|
|
|
|
continue
|
2023-02-27 20:55:33 +01:00
|
|
|
yield (
|
|
|
|
item["Key"],
|
|
|
|
item["LastModified"],
|
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-25 21:03:49 +02:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool = False) -> str:
|
|
|
|
return self.get_public_upload_url(self.get_avatar_path(hash_key, medium))
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_avatar_contents(self, file_path: str) -> tuple[bytes, str]:
|
2024-06-25 21:03:49 +02:00
|
|
|
key = self.avatar_bucket.Object(file_path + ".original")
|
2022-12-14 21:51:37 +01:00
|
|
|
image_data = key.get()["Body"].read()
|
|
|
|
content_type = key.content_type
|
2024-06-25 21:03:49 +02:00
|
|
|
return image_data, content_type
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-25 21:03:49 +02:00
|
|
|
def upload_single_avatar_image(
|
|
|
|
self,
|
|
|
|
file_path: str,
|
|
|
|
*,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
image_data: bytes,
|
2024-07-12 02:30:23 +02:00
|
|
|
content_type: str | None,
|
2024-06-13 14:57:18 +02:00
|
|
|
future: bool = True,
|
2024-06-25 21:03:49 +02:00
|
|
|
) -> None:
|
2024-06-13 14:57:18 +02:00
|
|
|
extra_metadata = {"avatar_version": str(user_profile.avatar_version + (1 if future else 0))}
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2023-02-28 04:33:25 +01:00
|
|
|
self.avatar_bucket,
|
2024-06-25 21:03:49 +02:00
|
|
|
file_path,
|
|
|
|
content_type,
|
2023-02-28 04:33:25 +01:00
|
|
|
user_profile,
|
2024-06-25 21:03:49 +02:00
|
|
|
image_data,
|
2024-06-13 14:57:18 +02:00
|
|
|
extra_metadata=extra_metadata,
|
|
|
|
cache_control="public, max-age=31536000, immutable",
|
2023-02-28 04:33:25 +01:00
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-25 21:03:49 +02:00
|
|
|
def delete_avatar_image(self, path_id: str) -> None:
|
2023-02-28 04:33:25 +01:00
|
|
|
self.delete_file_from_s3(path_id + ".original", self.avatar_bucket)
|
2024-06-25 21:03:49 +02:00
|
|
|
self.delete_file_from_s3(self.get_avatar_path(path_id, True), self.avatar_bucket)
|
|
|
|
self.delete_file_from_s3(self.get_avatar_path(path_id, False), self.avatar_bucket)
|
2023-02-28 04:33:25 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 04:33:25 +01:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
|
|
|
public_url = self.get_public_upload_url(f"{realm_id}/realm/icon.png")
|
|
|
|
return public_url + f"?version={version}"
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-07-10 22:54:47 +02:00
|
|
|
def upload_realm_icon_image(
|
|
|
|
self, icon_file: IO[bytes], user_profile: UserProfile, content_type: str
|
|
|
|
) -> None:
|
2022-12-14 21:51:37 +01:00
|
|
|
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), "icon")
|
|
|
|
|
|
|
|
image_data = icon_file.read()
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.avatar_bucket,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
2024-11-08 23:21:02 +01:00
|
|
|
resized_data = resize_realm_icon(image_data)
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.avatar_bucket,
|
|
|
|
s3_file_name + ".png",
|
|
|
|
"image/png",
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 04:33:25 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
|
|
|
if not night:
|
|
|
|
file_name = "logo.png"
|
|
|
|
else:
|
|
|
|
file_name = "night_logo.png"
|
|
|
|
public_url = self.get_public_upload_url(f"{realm_id}/realm/{file_name}")
|
2022-12-14 21:51:37 +01:00
|
|
|
return public_url + f"?version={version}"
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-12-14 21:51:37 +01:00
|
|
|
def upload_realm_logo_image(
|
2024-07-10 22:54:47 +02:00
|
|
|
self, logo_file: IO[bytes], user_profile: UserProfile, night: bool, content_type: str
|
2022-12-14 21:51:37 +01:00
|
|
|
) -> None:
|
|
|
|
if night:
|
|
|
|
basename = "night_logo"
|
|
|
|
else:
|
|
|
|
basename = "logo"
|
|
|
|
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), basename)
|
|
|
|
|
|
|
|
image_data = logo_file.read()
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.avatar_bucket,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_logo(image_data)
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.avatar_bucket,
|
|
|
|
s3_file_name + ".png",
|
|
|
|
"image/png",
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 04:33:25 +01:00
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int, still: bool = False) -> str:
|
|
|
|
if still:
|
|
|
|
emoji_path = RealmEmoji.STILL_PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id,
|
|
|
|
emoji_filename_without_extension=os.path.splitext(emoji_file_name)[0],
|
|
|
|
)
|
|
|
|
return self.get_public_upload_url(emoji_path)
|
2022-12-14 21:51:37 +01:00
|
|
|
else:
|
2023-02-28 04:33:25 +01:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id, emoji_file_name=emoji_file_name
|
|
|
|
)
|
|
|
|
return self.get_public_upload_url(emoji_path)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-06-24 22:25:16 +02:00
|
|
|
def upload_single_emoji_image(
|
2024-07-12 02:30:23 +02:00
|
|
|
self, path: str, content_type: str | None, user_profile: UserProfile, image_data: bytes
|
2024-06-24 22:25:16 +02:00
|
|
|
) -> None:
|
2024-08-29 22:49:28 +02:00
|
|
|
upload_content_to_s3(
|
2022-12-14 21:51:37 +01:00
|
|
|
self.avatar_bucket,
|
2024-06-24 22:25:16 +02:00
|
|
|
path,
|
2022-12-14 21:51:37 +01:00
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
2024-06-18 18:27:00 +02:00
|
|
|
cache_control="public, max-age=31536000, immutable",
|
2022-12-14 21:51:37 +01:00
|
|
|
)
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2023-02-28 04:33:25 +01:00
|
|
|
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
|
2022-04-07 00:46:13 +02:00
|
|
|
export_path = export_path.removeprefix("/")
|
|
|
|
if self.export_bucket:
|
|
|
|
# Fix old data if the row was created when an export bucket was not in use.
|
|
|
|
export_path = export_path.removeprefix("exports/")
|
|
|
|
client = self.export_bucket.meta.client
|
|
|
|
return client.generate_presigned_url(
|
|
|
|
ClientMethod="get_object",
|
|
|
|
Params={
|
|
|
|
"Bucket": self.export_bucket.name,
|
|
|
|
"Key": export_path,
|
|
|
|
},
|
|
|
|
# Expires in one week, the longest allowed by AWS
|
|
|
|
ExpiresIn=60 * 60 * 24 * 7,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
if not export_path.startswith("exports/"):
|
|
|
|
export_path = "exports/" + export_path
|
|
|
|
client = self.avatar_bucket.meta.client
|
|
|
|
signed_url = client.generate_presigned_url(
|
|
|
|
ClientMethod="get_object",
|
|
|
|
Params={
|
|
|
|
"Bucket": self.avatar_bucket.name,
|
|
|
|
"Key": export_path,
|
|
|
|
},
|
|
|
|
ExpiresIn=0,
|
|
|
|
)
|
|
|
|
# Strip off the signing query parameters, since this URL is public
|
|
|
|
return urlsplit(signed_url)._replace(query="").geturl()
|
|
|
|
|
2024-09-25 01:00:18 +02:00
|
|
|
def export_object(self, tarball_path: str) -> "Object":
|
2022-04-07 00:46:13 +02:00
|
|
|
if self.export_bucket:
|
|
|
|
return self.export_bucket.Object(
|
|
|
|
os.path.join(secrets.token_hex(16), os.path.basename(tarball_path))
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
# We fall back to the avatar bucket, because it's world-readable.
|
|
|
|
return self.avatar_bucket.Object(
|
|
|
|
os.path.join("exports", secrets.token_hex(16), os.path.basename(tarball_path))
|
|
|
|
)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2022-12-14 21:51:37 +01:00
|
|
|
def upload_export_tarball(
|
|
|
|
self,
|
2023-03-03 21:15:16 +01:00
|
|
|
realm: Realm,
|
2022-12-14 21:51:37 +01:00
|
|
|
tarball_path: str,
|
2024-07-12 02:30:23 +02:00
|
|
|
percent_callback: Callable[[Any], None] | None = None,
|
2022-12-14 21:51:37 +01:00
|
|
|
) -> str:
|
2022-04-07 00:46:13 +02:00
|
|
|
key = self.export_object(tarball_path)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
|
|
|
if percent_callback is None:
|
|
|
|
key.upload_file(Filename=tarball_path)
|
|
|
|
else:
|
|
|
|
key.upload_file(Filename=tarball_path, Callback=percent_callback)
|
|
|
|
|
2022-04-07 01:52:23 +02:00
|
|
|
return self.get_export_tarball_url(realm, key.key)
|
2022-12-14 21:51:37 +01:00
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2024-07-12 02:30:23 +02:00
|
|
|
def delete_export_tarball(self, export_path: str) -> str | None:
|
2022-12-14 21:51:37 +01:00
|
|
|
assert export_path.startswith("/")
|
2024-09-03 19:42:14 +02:00
|
|
|
path_id = export_path.removeprefix("/")
|
2022-04-07 00:46:13 +02:00
|
|
|
bucket = self.export_bucket or self.avatar_bucket
|
|
|
|
if self.delete_file_from_s3(path_id, bucket):
|
2022-12-14 21:51:37 +01:00
|
|
|
return export_path
|
|
|
|
return None
|