upload: Lazily import boto3.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2024-09-24 16:00:18 -07:00 committed by Tim Abbott
parent af915523a7
commit 184c0203f3
2 changed files with 21 additions and 14 deletions

View File

@ -16,7 +16,7 @@ from collections.abc import Callable, Iterable, Mapping
from contextlib import suppress
from datetime import datetime
from functools import cache
from typing import Any, Optional, TypeAlias, TypedDict
from typing import TYPE_CHECKING, Any, Optional, TypeAlias, TypedDict
import orjson
from django.apps import apps
@ -24,7 +24,6 @@ from django.conf import settings
from django.db.models import Exists, OuterRef, Q
from django.forms.models import model_to_dict
from django.utils.timezone import is_naive as timezone_is_naive
from mypy_boto3_s3.service_resource import Object
import zerver.lib.upload
from analytics.models import RealmCount, StreamCount, UserCount
@ -77,6 +76,9 @@ from zerver.models.realm_audit_logs import AuditLogEventType
from zerver.models.realms import EXPORT_FULL_WITH_CONSENT, EXPORT_PUBLIC, get_realm
from zerver.models.users import get_system_bot, get_user_profile_by_id
if TYPE_CHECKING:
from mypy_boto3_s3.service_resource import Object
# Custom mypy types follow:
Record: TypeAlias = dict[str, Any]
TableName = str
@ -1624,7 +1626,7 @@ def export_uploads_and_avatars(
def _get_exported_s3_record(
bucket_name: str, key: Object, processing_emoji: bool
bucket_name: str, key: "Object", processing_emoji: bool
) -> dict[str, Any]:
# Helper function for export_files_from_s3
record: dict[str, Any] = dict(
@ -1673,7 +1675,7 @@ def _get_exported_s3_record(
def _save_s3_object_to_file(
key: Object,
key: "Object",
output_dir: str,
processing_uploads: bool,
) -> None:

View File

@ -3,18 +3,15 @@ import os
import secrets
from collections.abc import Callable, Iterator
from datetime import datetime
from typing import IO, Any, BinaryIO, Literal
from typing import IO, TYPE_CHECKING, Any, BinaryIO, Literal
from urllib.parse import urljoin, urlsplit, urlunsplit
import boto3
import botocore
import pyvips
from botocore.client import Config
from botocore.response import StreamingBody
from django.conf import settings
from django.utils.http import content_disposition_header
from mypy_boto3_s3.client import S3Client
from mypy_boto3_s3.service_resource import Bucket, Object
from typing_extensions import override
from zerver.lib.partial import partial
@ -22,6 +19,10 @@ from zerver.lib.thumbnail import resize_avatar, resize_logo
from zerver.lib.upload.base import INLINE_MIME_TYPES, StreamingSourceWithSize, ZulipUploadBackend
from zerver.models import Realm, RealmEmoji, UserProfile
if TYPE_CHECKING:
from mypy_boto3_s3.client import S3Client
from mypy_boto3_s3.service_resource import Bucket, Object
# Duration that the signed upload URLs that we redirect to when
# accessing uploaded files are available for clients to fetch before
# they expire.
@ -51,7 +52,9 @@ if settings.S3_SKIP_PROXY is True: # nocoverage
botocore.utils.should_bypass_proxies = lambda url: True
def get_bucket(bucket_name: str, authed: bool = True) -> Bucket:
def get_bucket(bucket_name: str, authed: bool = True) -> "Bucket":
import boto3
return boto3.resource(
"s3",
aws_access_key_id=settings.S3_KEY if authed else None,
@ -66,7 +69,7 @@ def get_bucket(bucket_name: str, authed: bool = True) -> Bucket:
def upload_content_to_s3(
bucket: Bucket,
bucket: "Bucket",
path: str,
content_type: str | None,
user_profile: UserProfile | None,
@ -112,10 +115,10 @@ def upload_content_to_s3(
)
BOTO_CLIENT: S3Client | None = None
BOTO_CLIENT: "S3Client | None" = None
def get_boto_client() -> S3Client:
def get_boto_client() -> "S3Client":
"""
Creating the client takes a long time so we need to cache it.
"""
@ -143,6 +146,8 @@ def get_signed_upload_url(path: str, force_download: bool = False) -> str:
class S3UploadBackend(ZulipUploadBackend):
def __init__(self) -> None:
from mypy_boto3_s3.service_resource import Bucket
self.avatar_bucket = get_bucket(settings.S3_AVATAR_BUCKET)
self.uploads_bucket = get_bucket(settings.S3_AUTH_UPLOADS_BUCKET)
self.export_bucket: Bucket | None = None
@ -151,7 +156,7 @@ class S3UploadBackend(ZulipUploadBackend):
self.public_upload_url_base = self.construct_public_upload_url_base()
def delete_file_from_s3(self, path_id: str, bucket: Bucket) -> bool:
def delete_file_from_s3(self, path_id: str, bucket: "Bucket") -> bool:
key = bucket.Object(path_id)
try:
@ -461,7 +466,7 @@ class S3UploadBackend(ZulipUploadBackend):
# Strip off the signing query parameters, since this URL is public
return urlsplit(signed_url)._replace(query="").geturl()
def export_object(self, tarball_path: str) -> Object:
def export_object(self, tarball_path: str) -> "Object":
if self.export_bucket:
return self.export_bucket.Object(
os.path.join(secrets.token_hex(16), os.path.basename(tarball_path))