mirror of https://github.com/zulip/zulip.git
requirements: Upgrade boto to boto3.
Fixes: #3490 Contributors include: Author: whoodes <hoodesw@hawaii.edu> Author: zhoufeng1989 <zhoufengloop@gmail.com> Author: rht <rhtbot@protonmail.com>
This commit is contained in:
parent
23f0b3bc45
commit
cea7d713cd
|
@ -30,7 +30,7 @@ SQLAlchemy
|
|||
argon2-cffi
|
||||
|
||||
# Needed for S3 file uploads
|
||||
boto
|
||||
boto3
|
||||
|
||||
# Needed for integrations
|
||||
defusedxml
|
||||
|
|
|
@ -72,11 +72,11 @@ beautifulsoup4==4.9.0 \
|
|||
boto3==1.12.41 \
|
||||
--hash=sha256:c2c1ee703cb0fa03c5df84b7f00eaa462c808be477dc9014c1e8eef269122770 \
|
||||
--hash=sha256:ef16d7dc5f357faf1b081411d2faf62a01793f79a9d664c8b6b1b3ff37aa5e44 \
|
||||
# via aws-sam-translator, moto
|
||||
# via -r requirements/common.in, aws-sam-translator, moto
|
||||
boto==2.49.0 \
|
||||
--hash=sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8 \
|
||||
--hash=sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a \
|
||||
# via -r requirements/common.in, moto
|
||||
# via moto
|
||||
botocore==1.15.41 \
|
||||
--hash=sha256:a45a65ba036bc980decfc3ce6c2688a2d5fffd76e4b02ea4d59e63ff0f6896d4 \
|
||||
--hash=sha256:b12a5b642aa210a72d84204da18618276eeae052fbff58958f57d28ef3193034 \
|
||||
|
|
|
@ -46,10 +46,14 @@ beautifulsoup4==4.9.0 \
|
|||
--hash=sha256:a4bbe77fd30670455c5296242967a123ec28c37e9702a8a81bd2f20a4baf0368 \
|
||||
--hash=sha256:d4e96ac9b0c3a6d3f0caae2e4124e6055c5dcafde8e2f831ff194c104f0775a0 \
|
||||
# via -r requirements/common.in, pyoembed, zulip-bots
|
||||
boto==2.49.0 \
|
||||
--hash=sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8 \
|
||||
--hash=sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a \
|
||||
boto3==1.12.41 \
|
||||
--hash=sha256:c2c1ee703cb0fa03c5df84b7f00eaa462c808be477dc9014c1e8eef269122770 \
|
||||
--hash=sha256:ef16d7dc5f357faf1b081411d2faf62a01793f79a9d664c8b6b1b3ff37aa5e44 \
|
||||
# via -r requirements/common.in
|
||||
botocore==1.15.41 \
|
||||
--hash=sha256:a45a65ba036bc980decfc3ce6c2688a2d5fffd76e4b02ea4d59e63ff0f6896d4 \
|
||||
--hash=sha256:b12a5b642aa210a72d84204da18618276eeae052fbff58958f57d28ef3193034 \
|
||||
# via boto3, s3transfer
|
||||
cachetools==4.1.0 \
|
||||
--hash=sha256:1d057645db16ca7fe1f3bd953558897603d6f0b9c51ed9d11eb4d071ec4e2aab \
|
||||
--hash=sha256:de5d88f87781602201cde465d3afe837546663b168e8b39df67411b0bf10cefc \
|
||||
|
@ -213,6 +217,11 @@ django==2.2.12 \
|
|||
--hash=sha256:69897097095f336d5aeef45b4103dceae51c00afa6d3ae198a2a18e519791b7a \
|
||||
--hash=sha256:6ecd229e1815d4fc5240fc98f1cca78c41e7a8cd3e3f2eefadc4735031077916 \
|
||||
# via -r requirements/common.in, django-auth-ldap, django-bitfield, django-formtools, django-otp, django-phonenumber-field, django-sendfile2, django-two-factor-auth
|
||||
docutils==0.15.2 \
|
||||
--hash=sha256:6c4f696463b79f1fb8ba0c594b63840ebd41f059e92b31957c46b74a4599b6d0 \
|
||||
--hash=sha256:9e4d7ecfc600058e07ba661411a2b7de2fd0fafa17d1a7f7361cd47b1175c827 \
|
||||
--hash=sha256:a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99 \
|
||||
# via botocore
|
||||
future==0.18.2 \
|
||||
--hash=sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d \
|
||||
# via python-twitter
|
||||
|
@ -296,6 +305,10 @@ jinja2==2.11.2 \
|
|||
--hash=sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0 \
|
||||
--hash=sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035 \
|
||||
# via -r requirements/common.in
|
||||
jmespath==0.9.5 \
|
||||
--hash=sha256:695cb76fa78a10663425d5b73ddc5714eb711157e52704d69be03b1a02ba4fec \
|
||||
--hash=sha256:cca55c8d153173e21baa59983015ad0daf603f9cb799904ff057bfb8ff8dc2d9 \
|
||||
# via boto3, botocore
|
||||
jsx-lexer==0.0.8 \
|
||||
--hash=sha256:1cb35102b78525aa3f587dc327f3208c0e1c76d5cdea64d4f9c3ced05d10c017 \
|
||||
--hash=sha256:b879c7fafe974440a1dd9f9544dfb8629fa22078ada7f769c8fbb06149eac5d1 \
|
||||
|
@ -494,7 +507,7 @@ pyopenssl==19.1.0 \
|
|||
python-dateutil==2.8.1 \
|
||||
--hash=sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c \
|
||||
--hash=sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a \
|
||||
# via -r requirements/common.in, hypchat
|
||||
# via -r requirements/common.in, botocore, hypchat
|
||||
python-gcm==0.4 \
|
||||
--hash=sha256:511c35fc5ae829f7fc3cbdb45c4ec3fda02f85e4fae039864efe82682ccb9c18 \
|
||||
# via -r requirements/common.in
|
||||
|
@ -574,6 +587,10 @@ requests[security]==2.23.0 \
|
|||
--hash=sha256:43999036bfa82904b6af1d99e4882b560e5e2c68e5c4b0aa03b655f3d7d73fee \
|
||||
--hash=sha256:b3f43d496c6daba4493e7c431722aeb7dbc6288f52a6e04e7b6023b0247817e6 \
|
||||
# via hypchat, matrix-client, premailer, pyoembed, python-gcm, python-twitter, requests-oauthlib, social-auth-core, stripe, twilio, zulip
|
||||
s3transfer==0.3.3 \
|
||||
--hash=sha256:2482b4259524933a022d59da830f51bd746db62f047d6eb213f2f8855dcb8a13 \
|
||||
--hash=sha256:921a37e2aefc64145e7b73d50c71bb4f26f46e4c9f414dc648c6245ff92cf7db \
|
||||
# via boto3
|
||||
six==1.14.0 \
|
||||
--hash=sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a \
|
||||
--hash=sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c \
|
||||
|
@ -657,7 +674,7 @@ https://github.com/zulip/ultrajson/archive/70ac02becc3e11174cd5072650f885b30daab
|
|||
urllib3==1.25.9 \
|
||||
--hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
|
||||
--hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115 \
|
||||
# via requests
|
||||
# via botocore, requests
|
||||
uwsgi==2.0.18 \
|
||||
--hash=sha256:4972ac538800fb2d421027f49b4a1869b66048839507ccf0aa2fda792d99f583 \
|
||||
# via -r requirements/prod.in
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
# (2) if it doesn't belong in EXCLUDED_TABLES, add a Config object for
|
||||
# it to get_realm_config.
|
||||
import datetime
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key # for mypy
|
||||
import boto3
|
||||
from boto3.resources.base import ServiceResource
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.forms.models import model_to_dict
|
||||
|
@ -1172,14 +1172,14 @@ def export_uploads_and_avatars(realm: Realm, output_dir: Path) -> None:
|
|||
processing_realm_icon_and_logo=True)
|
||||
|
||||
def _check_key_metadata(email_gateway_bot: Optional[UserProfile],
|
||||
key: Key, processing_avatars: bool,
|
||||
key: ServiceResource, processing_avatars: bool,
|
||||
realm: Realm, user_ids: Set[int]) -> None:
|
||||
# Helper function for export_files_from_s3
|
||||
if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id):
|
||||
if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id):
|
||||
raise AssertionError("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id))
|
||||
# Email gateway bot sends messages, potentially including attachments, cross-realm.
|
||||
print("File uploaded by email gateway bot: %s / %s" % (key.name, key.metadata))
|
||||
print("File uploaded by email gateway bot: %s / %s" % (key.key, key.metadata))
|
||||
elif processing_avatars:
|
||||
if 'user_profile_id' not in key.metadata:
|
||||
raise AssertionError("Missing user_profile_id in key metadata: %s" % (key.metadata,))
|
||||
|
@ -1190,16 +1190,16 @@ def _check_key_metadata(email_gateway_bot: Optional[UserProfile],
|
|||
|
||||
def _get_exported_s3_record(
|
||||
bucket_name: str,
|
||||
key: Key,
|
||||
key: ServiceResource,
|
||||
processing_emoji: bool) -> Dict[str, Union[str, int]]:
|
||||
# Helper function for export_files_from_s3
|
||||
record = dict(s3_path=key.name, bucket=bucket_name,
|
||||
size=key.size, last_modified=key.last_modified,
|
||||
content_type=key.content_type, md5=key.md5)
|
||||
record = dict(s3_path=key.key, bucket=bucket_name,
|
||||
size=key.content_length, last_modified=key.last_modified,
|
||||
content_type=key.content_type, md5=key.e_tag)
|
||||
record.update(key.metadata)
|
||||
|
||||
if processing_emoji:
|
||||
record['file_name'] = os.path.basename(key.name)
|
||||
record['file_name'] = os.path.basename(key.key)
|
||||
|
||||
if "user_profile_id" in record:
|
||||
user_profile = get_user_profile_by_id(record['user_profile_id'])
|
||||
|
@ -1225,16 +1225,16 @@ def _get_exported_s3_record(
|
|||
|
||||
return record
|
||||
|
||||
def _save_s3_object_to_file(key: Key, output_dir: str, processing_avatars: bool,
|
||||
def _save_s3_object_to_file(key: ServiceResource, output_dir: str, processing_avatars: bool,
|
||||
processing_emoji: bool, processing_realm_icon_and_logo: bool) -> None:
|
||||
# Helper function for export_files_from_s3
|
||||
if processing_avatars or processing_emoji or processing_realm_icon_and_logo:
|
||||
filename = os.path.join(output_dir, key.name)
|
||||
filename = os.path.join(output_dir, key.key)
|
||||
else:
|
||||
fields = key.name.split('/')
|
||||
fields = key.key.split('/')
|
||||
if len(fields) != 3:
|
||||
raise AssertionError("Suspicious key with invalid format %s" % (key.name,))
|
||||
filename = os.path.join(output_dir, key.name)
|
||||
raise AssertionError("Suspicious key with invalid format %s" % (key.key,))
|
||||
filename = os.path.join(output_dir, key.key)
|
||||
|
||||
if "../" in filename:
|
||||
raise AssertionError("Suspicious file with invalid format %s" % (filename,))
|
||||
|
@ -1242,13 +1242,14 @@ def _save_s3_object_to_file(key: Key, output_dir: str, processing_avatars: bool,
|
|||
dirname = os.path.dirname(filename)
|
||||
if not os.path.exists(dirname):
|
||||
os.makedirs(dirname)
|
||||
key.get_contents_to_filename(filename)
|
||||
key.download_file(filename)
|
||||
|
||||
def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
||||
processing_avatars: bool=False, processing_emoji: bool=False,
|
||||
processing_realm_icon_and_logo: bool=False) -> None:
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = conn.get_bucket(bucket_name, validate=True)
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
s3 = session.resource('s3')
|
||||
bucket = s3.Bucket(bucket_name)
|
||||
records = []
|
||||
|
||||
logging.info("Downloading uploaded files from %s", bucket_name)
|
||||
|
@ -1256,7 +1257,6 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
|||
avatar_hash_values = set()
|
||||
user_ids = set()
|
||||
if processing_avatars:
|
||||
bucket_list = bucket.list()
|
||||
for user_profile in UserProfile.objects.filter(realm=realm):
|
||||
avatar_path = user_avatar_path_from_ids(user_profile.id, realm.id)
|
||||
avatar_hash_values.add(avatar_path)
|
||||
|
@ -1264,11 +1264,11 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
|||
user_ids.add(user_profile.id)
|
||||
|
||||
if processing_realm_icon_and_logo:
|
||||
bucket_list = bucket.list(prefix="%s/realm/" % (realm.id,))
|
||||
object_prefix = "%s/realm/" % (realm.id,)
|
||||
elif processing_emoji:
|
||||
bucket_list = bucket.list(prefix="%s/emoji/images/" % (realm.id,))
|
||||
object_prefix = "%s/emoji/images/" % (realm.id,)
|
||||
else:
|
||||
bucket_list = bucket.list(prefix="%s/" % (realm.id,))
|
||||
object_prefix = "%s/" % (realm.id,)
|
||||
|
||||
if settings.EMAIL_GATEWAY_BOT is not None:
|
||||
email_gateway_bot: Optional[UserProfile] = get_system_bot(settings.EMAIL_GATEWAY_BOT)
|
||||
|
@ -1276,16 +1276,16 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
|||
email_gateway_bot = None
|
||||
|
||||
count = 0
|
||||
for bkey in bucket_list:
|
||||
if processing_avatars and bkey.name not in avatar_hash_values:
|
||||
for bkey in bucket.objects.filter(Prefix=object_prefix):
|
||||
if processing_avatars and bkey.Object().key not in avatar_hash_values:
|
||||
continue
|
||||
key = bucket.get_key(bkey.name)
|
||||
|
||||
key = bucket.Object(bkey.key)
|
||||
# This can happen if an email address has moved realms
|
||||
_check_key_metadata(email_gateway_bot, key, processing_avatars, realm, user_ids)
|
||||
record = _get_exported_s3_record(bucket_name, key, processing_emoji)
|
||||
|
||||
record['path'] = key.name
|
||||
record['path'] = key.key
|
||||
_save_s3_object_to_file(key, output_dir, processing_avatars, processing_emoji,
|
||||
processing_realm_icon_and_logo)
|
||||
|
||||
|
|
|
@ -4,8 +4,7 @@ import os
|
|||
import ujson
|
||||
import shutil
|
||||
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.key import Key
|
||||
import boto3
|
||||
from bs4 import BeautifulSoup
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
|
@ -614,8 +613,8 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
|
|||
bucket_name = settings.S3_AVATAR_BUCKET
|
||||
else:
|
||||
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = conn.get_bucket(bucket_name, validate=True)
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = session.resource('s3').Bucket(bucket_name)
|
||||
|
||||
count = 0
|
||||
for record in records:
|
||||
|
@ -657,8 +656,8 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
|
|||
path_maps['attachment_path'][record['s3_path']] = relative_path
|
||||
|
||||
if s3_uploads:
|
||||
key = Key(bucket)
|
||||
key.key = relative_path
|
||||
key = bucket.Object(relative_path)
|
||||
metadata = {}
|
||||
if processing_emojis and "user_profile_id" not in record:
|
||||
# Exported custom emoji from tools like Slack don't have
|
||||
# the data for what user uploaded them in `user_profile_id`.
|
||||
|
@ -674,11 +673,11 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
|
|||
logging.info("Uploaded by ID mapped user: %s!", user_profile_id)
|
||||
user_profile_id = ID_MAP["user_profile"][user_profile_id]
|
||||
user_profile = get_user_profile_by_id(user_profile_id)
|
||||
key.set_metadata("user_profile_id", str(user_profile.id))
|
||||
metadata["user_profile_id"] = str(user_profile.id)
|
||||
|
||||
if 'last_modified' in record:
|
||||
key.set_metadata("orig_last_modified", str(record['last_modified']))
|
||||
key.set_metadata("realm_id", str(record['realm_id']))
|
||||
metadata["orig_last_modified"] = str(record['last_modified'])
|
||||
metadata["realm_id"] = str(record['realm_id'])
|
||||
|
||||
# Zulip exports will always have a content-type, but third-party exports might not.
|
||||
content_type = record.get("content_type")
|
||||
|
@ -690,9 +689,11 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
|
|||
# set; that is OK, because those are never served
|
||||
# directly anyway.
|
||||
content_type = 'application/octet-stream'
|
||||
headers: Dict[str, Any] = {'Content-Type': content_type}
|
||||
|
||||
key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers)
|
||||
key.upload_file(os.path.join(import_dir, record['path']),
|
||||
ExtraArgs={
|
||||
'ContentType': content_type,
|
||||
'Metadata': metadata})
|
||||
else:
|
||||
if processing_avatars or processing_emojis or processing_realm_icons:
|
||||
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", relative_path)
|
||||
|
|
|
@ -9,8 +9,8 @@ from django.conf import settings
|
|||
from django.test import override_settings
|
||||
from django.http import HttpResponse, HttpResponseRedirect
|
||||
from django.db.migrations.state import StateApps
|
||||
from boto.s3.connection import S3Connection
|
||||
from boto.s3.bucket import Bucket
|
||||
import boto3
|
||||
from boto3.resources.base import ServiceResource
|
||||
|
||||
import zerver.lib.upload
|
||||
from zerver.lib.actions import do_set_realm_property
|
||||
|
@ -48,7 +48,7 @@ import re
|
|||
import sys
|
||||
import time
|
||||
import ujson
|
||||
from moto import mock_s3_deprecated
|
||||
from moto import mock_s3
|
||||
|
||||
import fakeldap
|
||||
import ldap
|
||||
|
@ -456,7 +456,7 @@ def load_subdomain_token(response: HttpResponse) -> ExternalAuthDataDict:
|
|||
FuncT = TypeVar('FuncT', bound=Callable[..., None])
|
||||
|
||||
def use_s3_backend(method: FuncT) -> FuncT:
|
||||
@mock_s3_deprecated
|
||||
@mock_s3
|
||||
@override_settings(LOCAL_UPLOADS_DIR=None)
|
||||
def new_method(*args: Any, **kwargs: Any) -> Any:
|
||||
zerver.lib.upload.upload_backend = S3UploadBackend()
|
||||
|
@ -466,9 +466,10 @@ def use_s3_backend(method: FuncT) -> FuncT:
|
|||
zerver.lib.upload.upload_backend = LocalUploadBackend()
|
||||
return new_method
|
||||
|
||||
def create_s3_buckets(*bucket_names: Tuple[str]) -> List[Bucket]:
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
buckets = [conn.create_bucket(name) for name in bucket_names]
|
||||
def create_s3_buckets(*bucket_names: Tuple[str]) -> List[ServiceResource]:
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
s3 = session.resource('s3')
|
||||
buckets = [s3.create_bucket(Bucket=name) for name in bucket_names]
|
||||
return buckets
|
||||
|
||||
def use_db_models(method: Callable[..., None]) -> Callable[..., None]: # nocoverage
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Optional, Tuple, Any
|
||||
from typing import Any, Optional, Tuple
|
||||
|
||||
from datetime import timedelta
|
||||
|
||||
|
@ -15,9 +15,12 @@ from zerver.lib.avatar_hash import user_avatar_path
|
|||
from zerver.lib.exceptions import JsonableError, ErrorCode
|
||||
from zerver.lib.utils import generate_random_token
|
||||
|
||||
from boto.s3.bucket import Bucket
|
||||
from boto.s3.key import Key
|
||||
from boto.s3.connection import S3Connection
|
||||
import boto3
|
||||
import botocore
|
||||
from botocore.client import Config
|
||||
from boto3.resources.base import ServiceResource
|
||||
from boto3.session import Session
|
||||
|
||||
from mimetypes import guess_type, guess_extension
|
||||
|
||||
from zerver.models import get_user_profile_by_id
|
||||
|
@ -269,16 +272,10 @@ class ZulipUploadBackend:
|
|||
|
||||
### S3
|
||||
|
||||
def get_bucket(conn: S3Connection, bucket_name: str) -> Bucket:
|
||||
# Calling get_bucket() with validate=True can apparently lead
|
||||
# to expensive S3 bills:
|
||||
# https://www.appneta.com/blog/s3-list-get-bucket-default/
|
||||
# The benefits of validation aren't completely clear to us, and
|
||||
# we want to save on our bills, so we set the validate flag to False.
|
||||
# (We think setting validate to True would cause us to fail faster
|
||||
# in situations where buckets don't exist, but that shouldn't be
|
||||
# an issue for us.)
|
||||
bucket = conn.get_bucket(bucket_name, validate=False)
|
||||
def get_bucket(session: Session, bucket_name: str) -> ServiceResource:
|
||||
# See https://github.com/python/typeshed/issues/2706
|
||||
# for why this return type is a `ServiceResource`.
|
||||
bucket = session.resource('s3').Bucket(bucket_name)
|
||||
return bucket
|
||||
|
||||
def upload_image_to_s3(
|
||||
|
@ -288,20 +285,22 @@ def upload_image_to_s3(
|
|||
user_profile: UserProfile,
|
||||
contents: bytes) -> None:
|
||||
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = get_bucket(conn, bucket_name)
|
||||
key = Key(bucket)
|
||||
key.key = file_name
|
||||
key.set_metadata("user_profile_id", str(user_profile.id))
|
||||
key.set_metadata("realm_id", str(user_profile.realm_id))
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = get_bucket(session, bucket_name)
|
||||
key = bucket.Object(file_name)
|
||||
metadata = {
|
||||
"user_profile_id": str(user_profile.id),
|
||||
"realm_id": str(user_profile.realm_id)
|
||||
}
|
||||
|
||||
headers = {}
|
||||
if content_type is not None:
|
||||
headers["Content-Type"] = content_type
|
||||
content_disposition = ''
|
||||
if content_type is None:
|
||||
content_type = ''
|
||||
if content_type not in INLINE_MIME_TYPES:
|
||||
headers["Content-Disposition"] = "attachment"
|
||||
content_disposition = "attachment"
|
||||
|
||||
key.set_contents_from_string(contents, headers=headers)
|
||||
key.put(Body=contents, Metadata=metadata, ContentType=content_type,
|
||||
ContentDisposition=content_disposition)
|
||||
|
||||
def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None:
|
||||
upload_quota = realm.upload_quota_bytes()
|
||||
|
@ -331,34 +330,42 @@ def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Opti
|
|||
|
||||
|
||||
def get_signed_upload_url(path: str) -> str:
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
return conn.generate_url(SIGNED_UPLOAD_URL_DURATION, 'GET',
|
||||
bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
|
||||
client = boto3.client('s3', aws_access_key_id=settings.S3_KEY,
|
||||
aws_secret_access_key=settings.S3_SECRET_KEY)
|
||||
return client.generate_presigned_url(ClientMethod='get_object',
|
||||
Params={
|
||||
'Bucket': settings.S3_AUTH_UPLOADS_BUCKET,
|
||||
'Key': path},
|
||||
ExpiresIn=SIGNED_UPLOAD_URL_DURATION,
|
||||
HttpMethod='GET')
|
||||
|
||||
def get_realm_for_filename(path: str) -> Optional[int]:
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
key: Optional[Key] = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path)
|
||||
if key is None:
|
||||
# This happens if the key does not exist.
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
bucket = get_bucket(session, settings.S3_AUTH_UPLOADS_BUCKET)
|
||||
key = bucket.Object(path)
|
||||
|
||||
try:
|
||||
user_profile_id = key.metadata['user_profile_id']
|
||||
except botocore.exceptions.ClientError:
|
||||
return None
|
||||
return get_user_profile_by_id(key.metadata["user_profile_id"]).realm_id
|
||||
return get_user_profile_by_id(user_profile_id).realm_id
|
||||
|
||||
class S3UploadBackend(ZulipUploadBackend):
|
||||
def __init__(self) -> None:
|
||||
self.connection = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
self.session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
|
||||
def delete_file_from_s3(self, path_id: str, bucket_name: str) -> bool:
|
||||
bucket = get_bucket(self.connection, bucket_name)
|
||||
|
||||
# check if file exists
|
||||
key: Optional[Key] = bucket.get_key(path_id)
|
||||
if key is not None:
|
||||
bucket.delete_key(key)
|
||||
return True
|
||||
bucket = get_bucket(self.session, bucket_name)
|
||||
key = bucket.Object(path_id)
|
||||
|
||||
try:
|
||||
key.load()
|
||||
except botocore.exceptions.ClientError:
|
||||
file_name = path_id.split("/")[-1]
|
||||
logging.warning("%s does not exist. Its entry in the database will be removed.", file_name)
|
||||
return False
|
||||
key.delete()
|
||||
return True
|
||||
|
||||
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
|
||||
content_type: Optional[str], file_data: bytes,
|
||||
|
@ -440,10 +447,12 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
self.delete_file_from_s3(path_id + "-medium.png", bucket_name)
|
||||
self.delete_file_from_s3(path_id, bucket_name)
|
||||
|
||||
def get_avatar_key(self, file_name: str) -> Key:
|
||||
bucket = get_bucket(self.connection, settings.S3_AVATAR_BUCKET)
|
||||
def get_avatar_key(self, file_name: str) -> ServiceResource:
|
||||
# See https://github.com/python/typeshed/issues/2706
|
||||
# for why this return type is a `ServiceResource`.
|
||||
bucket = get_bucket(self.session, settings.S3_AVATAR_BUCKET)
|
||||
|
||||
key = bucket.get_key(file_name)
|
||||
key = bucket.Object(file_name)
|
||||
return key
|
||||
|
||||
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
||||
|
@ -451,7 +460,7 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
s3_target_file_name = user_avatar_path(target_profile)
|
||||
|
||||
key = self.get_avatar_key(s3_source_file_name + ".original")
|
||||
image_data = key.get_contents_as_string()
|
||||
image_data = key.get()['Body'].read()
|
||||
content_type = key.content_type
|
||||
|
||||
self.write_avatar_images(s3_target_file_name, target_profile, image_data, content_type)
|
||||
|
@ -460,8 +469,7 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
bucket = settings.S3_AVATAR_BUCKET
|
||||
medium_suffix = "-medium.png" if medium else ""
|
||||
# ?x=x allows templates to append additional parameters with &s
|
||||
return "https://%s.%s/%s%s?x=x" % (bucket, self.connection.DefaultHost,
|
||||
hash_key, medium_suffix)
|
||||
return "https://%s.s3.amazonaws.com/%s%s?x=x" % (bucket, hash_key, medium_suffix)
|
||||
|
||||
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
|
||||
bucket = settings.S3_AVATAR_BUCKET
|
||||
|
@ -499,8 +507,8 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
||||
bucket = settings.S3_AVATAR_BUCKET
|
||||
# ?x=x allows templates to append additional parameters with &s
|
||||
return "https://%s.%s/%s/realm/icon.png?version=%s" % (
|
||||
bucket, self.connection.DefaultHost, realm_id, version)
|
||||
return "https://%s.s3.amazonaws.com/%s/realm/icon.png?version=%s" % (
|
||||
bucket, realm_id, version)
|
||||
|
||||
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
|
||||
night: bool) -> None:
|
||||
|
@ -539,17 +547,17 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
file_name = 'logo.png'
|
||||
else:
|
||||
file_name = 'night_logo.png'
|
||||
return "https://%s.%s/%s/realm/%s?version=%s" % (
|
||||
bucket, self.connection.DefaultHost, realm_id, file_name, version)
|
||||
return "https://%s.s3.amazonaws.com/%s/realm/%s?version=%s" % (
|
||||
bucket, realm_id, file_name, version)
|
||||
|
||||
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
|
||||
file_path = user_avatar_path(user_profile)
|
||||
s3_file_name = file_path
|
||||
|
||||
bucket_name = settings.S3_AVATAR_BUCKET
|
||||
bucket = get_bucket(self.connection, bucket_name)
|
||||
key = bucket.get_key(file_path + ".original")
|
||||
image_data = key.get_contents_as_string()
|
||||
bucket = get_bucket(self.session, bucket_name)
|
||||
key = bucket.Object(file_path + ".original")
|
||||
image_data = key.get()['Body'].read()
|
||||
|
||||
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
||||
upload_image_to_s3(
|
||||
|
@ -567,9 +575,9 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
s3_file_name = file_path
|
||||
|
||||
bucket_name = settings.S3_AVATAR_BUCKET
|
||||
bucket = get_bucket(self.connection, bucket_name)
|
||||
key = bucket.get_key(file_path + ".original")
|
||||
image_data = key.get_contents_as_string()
|
||||
bucket = get_bucket(self.session, bucket_name)
|
||||
key = bucket.Object(file_path + ".original")
|
||||
image_data = key.get()['Body'].read()
|
||||
|
||||
resized_avatar = resize_avatar(image_data)
|
||||
upload_image_to_s3(
|
||||
|
@ -610,24 +618,32 @@ class S3UploadBackend(ZulipUploadBackend):
|
|||
bucket = settings.S3_AVATAR_BUCKET
|
||||
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id,
|
||||
emoji_file_name=emoji_file_name)
|
||||
return "https://%s.%s/%s" % (bucket, self.connection.DefaultHost, emoji_path)
|
||||
return "https://%s.s3.amazonaws.com/%s" % (bucket, emoji_path)
|
||||
|
||||
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
|
||||
def percent_callback(complete: Any, total: Any) -> None:
|
||||
def percent_callback(bytes_transferred: Any) -> None:
|
||||
sys.stdout.write('.')
|
||||
sys.stdout.flush()
|
||||
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
# We use the avatar bucket, because it's world-readable.
|
||||
bucket = get_bucket(conn, settings.S3_AVATAR_BUCKET)
|
||||
key = Key(bucket)
|
||||
key.key = os.path.join("exports", generate_random_token(32), os.path.basename(tarball_path))
|
||||
key.set_contents_from_filename(tarball_path, cb=percent_callback, num_cb=40)
|
||||
bucket = get_bucket(session, settings.S3_AVATAR_BUCKET)
|
||||
key = bucket.Object(os.path.join("exports", generate_random_token(32),
|
||||
os.path.basename(tarball_path)))
|
||||
|
||||
public_url = 'https://{bucket}.{host}/{key}'.format(
|
||||
host=conn.server_name(),
|
||||
bucket=bucket.name,
|
||||
key=key.key)
|
||||
key.upload_file(tarball_path, Callback=percent_callback)
|
||||
|
||||
session = botocore.session.get_session()
|
||||
config = Config(signature_version=botocore.UNSIGNED)
|
||||
|
||||
public_url = session.create_client('s3', config=config).generate_presigned_url(
|
||||
'get_object',
|
||||
Params={
|
||||
'Bucket': bucket.name,
|
||||
'Key': key.key
|
||||
},
|
||||
ExpiresIn=0
|
||||
)
|
||||
return public_url
|
||||
|
||||
def delete_export_tarball(self, path_id: str) -> Optional[str]:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import os
|
||||
import shutil
|
||||
|
||||
from boto.s3.connection import S3Connection
|
||||
import boto3
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
|
||||
|
@ -53,12 +53,13 @@ class LocalUploader(Uploader):
|
|||
class S3Uploader(Uploader):
|
||||
def __init__(self) -> None:
|
||||
super().__init__()
|
||||
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
||||
self.bucket_name = settings.S3_AVATAR_BUCKET
|
||||
self.bucket = conn.get_bucket(self.bucket_name, validate=False)
|
||||
self.bucket = session.resource('s3').Bucket(self.bucket_name)
|
||||
|
||||
def copy_files(self, src_key: str, dst_key: str) -> None:
|
||||
self.bucket.copy_key(dst_key, self.bucket_name, src_key)
|
||||
source = dict(Bucket=self.bucket_name, Key=src_key)
|
||||
self.bucket.copy(source, dst_key)
|
||||
|
||||
def get_uploader() -> Uploader:
|
||||
if settings.LOCAL_UPLOADS_DIR is None:
|
||||
|
|
|
@ -4086,14 +4086,14 @@ class TestZulipLDAPUserPopulator(ZulipLDAPTestCase):
|
|||
original_image_path_id = path_id + ".original"
|
||||
medium_path_id = path_id + "-medium.png"
|
||||
|
||||
original_image_key = bucket.get_key(original_image_path_id)
|
||||
medium_image_key = bucket.get_key(medium_path_id)
|
||||
original_image_key = bucket.Object(original_image_path_id)
|
||||
medium_image_key = bucket.Object(medium_path_id)
|
||||
|
||||
image_data = original_image_key.get_contents_as_string()
|
||||
image_data = original_image_key.get()['Body'].read()
|
||||
self.assertEqual(image_data, test_image_data)
|
||||
|
||||
test_medium_image_data = resize_avatar(test_image_data, MEDIUM_AVATAR_SIZE)
|
||||
medium_image_data = medium_image_key.get_contents_as_string()
|
||||
medium_image_data = medium_image_key.get()['Body'].read()
|
||||
self.assertEqual(medium_image_data, test_medium_image_data)
|
||||
|
||||
# Try to use invalid data as the image:
|
||||
|
|
|
@ -36,6 +36,7 @@ from zerver.lib.test_classes import (
|
|||
ZulipTestCase,
|
||||
)
|
||||
from zerver.lib.test_helpers import (
|
||||
get_test_image_file,
|
||||
use_s3_backend,
|
||||
create_s3_buckets,
|
||||
)
|
||||
|
@ -89,10 +90,6 @@ from zerver.models import (
|
|||
get_huddle_hash,
|
||||
)
|
||||
|
||||
from zerver.lib.test_helpers import (
|
||||
get_test_image_file,
|
||||
)
|
||||
|
||||
class QueryUtilTest(ZulipTestCase):
|
||||
def _create_messages(self) -> None:
|
||||
for name in ['cordelia', 'hamlet', 'iago']:
|
||||
|
@ -1096,7 +1093,7 @@ class ImportExportTest(ZulipTestCase):
|
|||
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
||||
self.assertEqual(len(b'zulip!'), uploaded_file.size)
|
||||
|
||||
attachment_content = uploads_bucket.get_key(uploaded_file.path_id).get_contents_as_string()
|
||||
attachment_content = uploads_bucket.Object(uploaded_file.path_id).get()['Body'].read()
|
||||
self.assertEqual(b"zulip!", attachment_content)
|
||||
|
||||
# Test emojis
|
||||
|
@ -1105,43 +1102,43 @@ class ImportExportTest(ZulipTestCase):
|
|||
realm_id=imported_realm.id,
|
||||
emoji_file_name=realm_emoji.file_name,
|
||||
)
|
||||
emoji_key = avatar_bucket.get_key(emoji_path)
|
||||
self.assertIsNotNone(emoji_key)
|
||||
emoji_key = avatar_bucket.Object(emoji_path)
|
||||
self.assertIsNotNone(emoji_key.get()['Body'].read())
|
||||
self.assertEqual(emoji_key.key, emoji_path)
|
||||
|
||||
# Test avatars
|
||||
user_email = Message.objects.all()[0].sender.email
|
||||
user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm)
|
||||
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
||||
original_image_key = avatar_bucket.get_key(avatar_path_id)
|
||||
original_image_key = avatar_bucket.Object(avatar_path_id)
|
||||
self.assertEqual(original_image_key.key, avatar_path_id)
|
||||
image_data = original_image_key.get_contents_as_string()
|
||||
image_data = avatar_bucket.Object(avatar_path_id).get()['Body'].read()
|
||||
self.assertEqual(image_data, test_image_data)
|
||||
|
||||
# Test realm icon and logo
|
||||
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
||||
|
||||
original_icon_path_id = os.path.join(upload_path, "icon.original")
|
||||
original_icon_key = avatar_bucket.get_key(original_icon_path_id)
|
||||
self.assertEqual(original_icon_key.get_contents_as_string(), test_image_data)
|
||||
original_icon_key = avatar_bucket.Object(original_icon_path_id)
|
||||
self.assertEqual(original_icon_key.get()['Body'].read(), test_image_data)
|
||||
resized_icon_path_id = os.path.join(upload_path, "icon.png")
|
||||
resized_icon_key = avatar_bucket.get_key(resized_icon_path_id)
|
||||
resized_icon_key = avatar_bucket.Object(resized_icon_path_id)
|
||||
self.assertEqual(resized_icon_key.key, resized_icon_path_id)
|
||||
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
||||
|
||||
original_logo_path_id = os.path.join(upload_path, "logo.original")
|
||||
original_logo_key = avatar_bucket.get_key(original_logo_path_id)
|
||||
self.assertEqual(original_logo_key.get_contents_as_string(), test_image_data)
|
||||
original_logo_key = avatar_bucket.Object(original_logo_path_id)
|
||||
self.assertEqual(original_logo_key.get()['Body'].read(), test_image_data)
|
||||
resized_logo_path_id = os.path.join(upload_path, "logo.png")
|
||||
resized_logo_key = avatar_bucket.get_key(resized_logo_path_id)
|
||||
resized_logo_key = avatar_bucket.Object(resized_logo_path_id)
|
||||
self.assertEqual(resized_logo_key.key, resized_logo_path_id)
|
||||
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
||||
|
||||
night_logo_original_path_id = os.path.join(upload_path, "night_logo.original")
|
||||
night_logo_original_key = avatar_bucket.get_key(night_logo_original_path_id)
|
||||
self.assertEqual(night_logo_original_key.get_contents_as_string(), test_image_data)
|
||||
night_logo_original_key = avatar_bucket.Object(night_logo_original_path_id)
|
||||
self.assertEqual(night_logo_original_key.get()['Body'].read(), test_image_data)
|
||||
resized_night_logo_path_id = os.path.join(upload_path, "night_logo.png")
|
||||
resized_night_logo_key = avatar_bucket.get_key(resized_night_logo_path_id)
|
||||
resized_night_logo_key = avatar_bucket.Object(resized_night_logo_path_id)
|
||||
self.assertEqual(resized_night_logo_key.key, resized_night_logo_path_id)
|
||||
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ from zerver.views.realm_export import export_realm
|
|||
|
||||
import os
|
||||
import ujson
|
||||
import botocore.exceptions
|
||||
|
||||
class RealmExportTest(ZulipTestCase):
|
||||
"""
|
||||
|
@ -60,7 +61,7 @@ class RealmExportTest(ZulipTestCase):
|
|||
# Test that the file is hosted, and the contents are as expected.
|
||||
path_id = ujson.loads(audit_log_entry.extra_data).get('export_path')
|
||||
self.assertIsNotNone(path_id)
|
||||
self.assertEqual(bucket.get_key(path_id).get_contents_as_string(), b'zulip!')
|
||||
self.assertEqual(bucket.Object(path_id).get()['Body'].read(), b'zulip!')
|
||||
|
||||
result = self.client_get('/json/export/realm')
|
||||
self.assert_json_success(result)
|
||||
|
@ -79,7 +80,8 @@ class RealmExportTest(ZulipTestCase):
|
|||
# Finally, delete the file.
|
||||
result = self.client_delete('/json/export/realm/{id}'.format(id=audit_log_entry.id))
|
||||
self.assert_json_success(result)
|
||||
self.assertIsNone(bucket.get_key(path_id))
|
||||
with self.assertRaises(botocore.exceptions.ClientError):
|
||||
bucket.Object(path_id).load()
|
||||
|
||||
# Try to delete an export with a `deleted_timestamp` key.
|
||||
audit_log_entry.refresh_from_db()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from django.conf import settings
|
||||
|
||||
from moto import mock_s3_deprecated
|
||||
from moto import mock_s3
|
||||
from unittest.mock import Mock, patch
|
||||
import logging
|
||||
|
||||
|
@ -28,7 +28,7 @@ class TransferUploadsToS3Test(ZulipTestCase):
|
|||
m2.assert_called_with(4)
|
||||
m3.assert_called_with(4)
|
||||
|
||||
@mock_s3_deprecated
|
||||
@mock_s3
|
||||
def test_transfer_avatars_to_s3(self) -> None:
|
||||
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
||||
|
||||
|
@ -41,16 +41,16 @@ class TransferUploadsToS3Test(ZulipTestCase):
|
|||
transfer_avatars_to_s3(1)
|
||||
|
||||
path_id = user_avatar_path(user)
|
||||
image_key = bucket.get_key(path_id)
|
||||
original_image_key = bucket.get_key(path_id + ".original")
|
||||
medium_image_key = bucket.get_key(path_id + "-medium.png")
|
||||
image_key = bucket.Object(path_id)
|
||||
original_image_key = bucket.Object(path_id + ".original")
|
||||
medium_image_key = bucket.Object(path_id + "-medium.png")
|
||||
|
||||
self.assertEqual(len(bucket.get_all_keys()), 3)
|
||||
self.assertEqual(image_key.get_contents_as_string(), open(avatar_disk_path(user), "rb").read())
|
||||
self.assertEqual(original_image_key.get_contents_as_string(), open(avatar_disk_path(user, original=True), "rb").read())
|
||||
self.assertEqual(medium_image_key.get_contents_as_string(), open(avatar_disk_path(user, medium=True), "rb").read())
|
||||
self.assertEqual(len(list(bucket.objects.all())), 3)
|
||||
self.assertEqual(image_key.get()['Body'].read(), open(avatar_disk_path(user), "rb").read())
|
||||
self.assertEqual(original_image_key.get()['Body'].read(), open(avatar_disk_path(user, original=True), "rb").read())
|
||||
self.assertEqual(medium_image_key.get()['Body'].read(), open(avatar_disk_path(user, medium=True), "rb").read())
|
||||
|
||||
@mock_s3_deprecated
|
||||
@mock_s3
|
||||
def test_transfer_message_files(self) -> None:
|
||||
bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0]
|
||||
hamlet = self.example_user('hamlet')
|
||||
|
@ -63,11 +63,11 @@ class TransferUploadsToS3Test(ZulipTestCase):
|
|||
|
||||
attachments = Attachment.objects.all()
|
||||
|
||||
self.assertEqual(len(bucket.get_all_keys()), 2)
|
||||
self.assertEqual(bucket.get_key(attachments[0].path_id).get_contents_as_string(), b'zulip1!')
|
||||
self.assertEqual(bucket.get_key(attachments[1].path_id).get_contents_as_string(), b'zulip2!')
|
||||
self.assertEqual(len(list(bucket.objects.all())), 2)
|
||||
self.assertEqual(bucket.Object(attachments[0].path_id).get()['Body'].read(), b'zulip1!')
|
||||
self.assertEqual(bucket.Object(attachments[1].path_id).get()['Body'].read(), b'zulip2!')
|
||||
|
||||
@mock_s3_deprecated
|
||||
@mock_s3
|
||||
def test_transfer_emoji_to_s3(self) -> None:
|
||||
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
||||
othello = self.example_user('othello')
|
||||
|
@ -87,13 +87,13 @@ class TransferUploadsToS3Test(ZulipTestCase):
|
|||
|
||||
transfer_emoji_to_s3(1)
|
||||
|
||||
self.assertEqual(len(bucket.get_all_keys()), 2)
|
||||
original_key = bucket.get_key(emoji_path + ".original")
|
||||
resized_key = bucket.get_key(emoji_path)
|
||||
self.assertEqual(len(list(bucket.objects.all())), 2)
|
||||
original_key = bucket.Object(emoji_path + ".original")
|
||||
resized_key = bucket.Object(emoji_path)
|
||||
|
||||
image_file.seek(0)
|
||||
image_data = image_file.read()
|
||||
resized_image_data = resize_emoji(image_data)
|
||||
|
||||
self.assertEqual(image_data, original_key.get_contents_as_string())
|
||||
self.assertEqual(resized_image_data, resized_key.get_contents_as_string())
|
||||
self.assertEqual(image_data, original_key.get()['Body'].read())
|
||||
self.assertEqual(resized_image_data, resized_key.get()['Body'].read())
|
||||
|
|
|
@ -46,6 +46,7 @@ from scripts.lib.zulip_tools import get_dev_uuid_var_path
|
|||
import urllib
|
||||
import ujson
|
||||
from PIL import Image
|
||||
import botocore.exceptions
|
||||
|
||||
from io import StringIO
|
||||
from unittest import mock
|
||||
|
@ -1577,7 +1578,7 @@ class S3Test(ZulipTestCase):
|
|||
base = '/user_uploads/'
|
||||
self.assertEqual(base, uri[:len(base)])
|
||||
path_id = re.sub('/user_uploads/', '', uri)
|
||||
content = bucket.get_key(path_id).get_contents_as_string()
|
||||
content = bucket.Object(path_id).get()['Body'].read()
|
||||
self.assertEqual(b"zulip!", content)
|
||||
|
||||
uploaded_file = Attachment.objects.get(owner=user_profile, path_id=path_id)
|
||||
|
@ -1595,7 +1596,7 @@ class S3Test(ZulipTestCase):
|
|||
uri = upload_message_file('dummy.txt', len(b'zulip!'), None, b'zulip!', user_profile)
|
||||
|
||||
path_id = re.sub('/user_uploads/', '', uri)
|
||||
self.assertEqual(b"zulip!", bucket.get_key(path_id).get_contents_as_string())
|
||||
self.assertEqual(b"zulip!", bucket.Object(path_id).get()['Body'].read())
|
||||
uploaded_file = Attachment.objects.get(owner=user_profile, path_id=path_id)
|
||||
self.assertEqual(len(b"zulip!"), uploaded_file.size)
|
||||
|
||||
|
@ -1618,7 +1619,7 @@ class S3Test(ZulipTestCase):
|
|||
"""
|
||||
A call to /json/user_uploads should return a uri and actually create an object.
|
||||
"""
|
||||
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)
|
||||
bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0]
|
||||
|
||||
self.login('hamlet')
|
||||
fp = StringIO("zulip!")
|
||||
|
@ -1632,9 +1633,9 @@ class S3Test(ZulipTestCase):
|
|||
self.assertEqual(base, uri[:len(base)])
|
||||
|
||||
response = self.client_get(uri)
|
||||
self.assertEqual(response.status_code, 302)
|
||||
redirect_url = response['Location']
|
||||
self.assertEqual(b"zulip!", urllib.request.urlopen(redirect_url).read().strip())
|
||||
key = urllib.parse.urlparse(redirect_url).path
|
||||
self.assertEqual(b"zulip!", bucket.Object(key).get()['Body'].read())
|
||||
|
||||
# Now try the endpoint that's supposed to return a temporary url for access
|
||||
# to the file.
|
||||
|
@ -1642,7 +1643,8 @@ class S3Test(ZulipTestCase):
|
|||
self.assert_json_success(result)
|
||||
data = result.json()
|
||||
url_only_url = data['url']
|
||||
self.assertEqual(b"zulip!", urllib.request.urlopen(url_only_url).read().strip())
|
||||
key = urllib.parse.urlparse(url_only_url).path
|
||||
self.assertEqual(b"zulip!", bucket.Object(key).get()['Body'].read())
|
||||
|
||||
# Note: Depending on whether the calls happened in the same
|
||||
# second (resulting in the same timestamp+signature),
|
||||
|
@ -1667,19 +1669,19 @@ class S3Test(ZulipTestCase):
|
|||
test_image_data = f.read()
|
||||
test_medium_image_data = resize_avatar(test_image_data, MEDIUM_AVATAR_SIZE)
|
||||
|
||||
original_image_key = bucket.get_key(original_image_path_id)
|
||||
original_image_key = bucket.Object(original_image_path_id)
|
||||
self.assertEqual(original_image_key.key, original_image_path_id)
|
||||
image_data = original_image_key.get_contents_as_string()
|
||||
image_data = original_image_key.get()['Body'].read()
|
||||
self.assertEqual(image_data, test_image_data)
|
||||
|
||||
medium_image_key = bucket.get_key(medium_path_id)
|
||||
medium_image_key = bucket.Object(medium_path_id)
|
||||
self.assertEqual(medium_image_key.key, medium_path_id)
|
||||
medium_image_data = medium_image_key.get_contents_as_string()
|
||||
medium_image_data = medium_image_key.get()['Body'].read()
|
||||
self.assertEqual(medium_image_data, test_medium_image_data)
|
||||
bucket.delete_key(medium_image_key)
|
||||
|
||||
bucket.Object(medium_image_key.key).delete()
|
||||
zerver.lib.upload.upload_backend.ensure_medium_avatar_image(user_profile)
|
||||
medium_image_key = bucket.get_key(medium_path_id)
|
||||
medium_image_key = bucket.Object(medium_path_id)
|
||||
self.assertEqual(medium_image_key.key, medium_path_id)
|
||||
|
||||
@use_s3_backend
|
||||
|
@ -1699,32 +1701,31 @@ class S3Test(ZulipTestCase):
|
|||
target_path_id = user_avatar_path(target_user_profile)
|
||||
self.assertNotEqual(source_path_id, target_path_id)
|
||||
|
||||
source_image_key = bucket.get_key(source_path_id)
|
||||
target_image_key = bucket.get_key(target_path_id)
|
||||
source_image_key = bucket.Object(source_path_id)
|
||||
target_image_key = bucket.Object(target_path_id)
|
||||
self.assertEqual(target_image_key.key, target_path_id)
|
||||
self.assertEqual(source_image_key.content_type, target_image_key.content_type)
|
||||
source_image_data = source_image_key.get_contents_as_string()
|
||||
target_image_data = target_image_key.get_contents_as_string()
|
||||
self.assertEqual(source_image_data, target_image_data)
|
||||
source_image_data = source_image_key.get()['Body'].read()
|
||||
target_image_data = target_image_key.get()['Body'].read()
|
||||
|
||||
source_original_image_path_id = source_path_id + ".original"
|
||||
target_original_image_path_id = target_path_id + ".original"
|
||||
target_original_image_key = bucket.get_key(target_original_image_path_id)
|
||||
target_original_image_key = bucket.Object(target_original_image_path_id)
|
||||
self.assertEqual(target_original_image_key.key, target_original_image_path_id)
|
||||
source_original_image_key = bucket.get_key(source_original_image_path_id)
|
||||
source_original_image_key = bucket.Object(source_original_image_path_id)
|
||||
self.assertEqual(source_original_image_key.content_type, target_original_image_key.content_type)
|
||||
source_image_data = source_original_image_key.get_contents_as_string()
|
||||
target_image_data = target_original_image_key.get_contents_as_string()
|
||||
source_image_data = source_original_image_key.get()['Body'].read()
|
||||
target_image_data = target_original_image_key.get()['Body'].read()
|
||||
self.assertEqual(source_image_data, target_image_data)
|
||||
|
||||
target_medium_path_id = target_path_id + "-medium.png"
|
||||
source_medium_path_id = source_path_id + "-medium.png"
|
||||
source_medium_image_key = bucket.get_key(source_medium_path_id)
|
||||
target_medium_image_key = bucket.get_key(target_medium_path_id)
|
||||
source_medium_image_key = bucket.Object(source_medium_path_id)
|
||||
target_medium_image_key = bucket.Object(target_medium_path_id)
|
||||
self.assertEqual(target_medium_image_key.key, target_medium_path_id)
|
||||
self.assertEqual(source_medium_image_key.content_type, target_medium_image_key.content_type)
|
||||
source_medium_image_data = source_medium_image_key.get_contents_as_string()
|
||||
target_medium_image_data = target_medium_image_key.get_contents_as_string()
|
||||
source_medium_image_data = source_medium_image_key.get()['Body'].read()
|
||||
target_medium_image_data = target_medium_image_key.get()['Body'].read()
|
||||
self.assertEqual(source_medium_image_data, target_medium_image_data)
|
||||
|
||||
@use_s3_backend
|
||||
|
@ -1742,16 +1743,21 @@ class S3Test(ZulipTestCase):
|
|||
avatar_medium_path_id = avatar_path_id + "-medium.png"
|
||||
|
||||
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_USER)
|
||||
self.assertIsNotNone(bucket.get_key(avatar_path_id))
|
||||
self.assertIsNotNone(bucket.get_key(avatar_original_image_path_id))
|
||||
self.assertIsNotNone(bucket.get_key(avatar_medium_path_id))
|
||||
self.assertIsNotNone(bucket.Object(avatar_path_id))
|
||||
self.assertIsNotNone(bucket.Object(avatar_original_image_path_id))
|
||||
self.assertIsNotNone(bucket.Object(avatar_medium_path_id))
|
||||
|
||||
zerver.lib.actions.do_delete_avatar_image(user)
|
||||
|
||||
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
||||
self.assertIsNone(bucket.get_key(avatar_path_id))
|
||||
self.assertIsNone(bucket.get_key(avatar_original_image_path_id))
|
||||
self.assertIsNone(bucket.get_key(avatar_medium_path_id))
|
||||
|
||||
# Confirm that the avatar files no longer exist in S3.
|
||||
with self.assertRaises(botocore.exceptions.ClientError):
|
||||
bucket.Object(avatar_path_id).load()
|
||||
with self.assertRaises(botocore.exceptions.ClientError):
|
||||
bucket.Object(avatar_original_image_path_id).load()
|
||||
with self.assertRaises(botocore.exceptions.ClientError):
|
||||
bucket.Object(avatar_medium_path_id).load()
|
||||
|
||||
@use_s3_backend
|
||||
def test_get_realm_for_filename(self) -> None:
|
||||
|
@ -1764,7 +1770,7 @@ class S3Test(ZulipTestCase):
|
|||
|
||||
@use_s3_backend
|
||||
def test_get_realm_for_filename_when_key_doesnt_exist(self) -> None:
|
||||
self.assertEqual(None, get_realm_for_filename('non-existent-file-path'))
|
||||
self.assertIsNone(get_realm_for_filename('non-existent-file-path'))
|
||||
|
||||
@use_s3_backend
|
||||
def test_upload_realm_icon_image(self) -> None:
|
||||
|
@ -1775,13 +1781,12 @@ class S3Test(ZulipTestCase):
|
|||
zerver.lib.upload.upload_backend.upload_realm_icon_image(image_file, user_profile)
|
||||
|
||||
original_path_id = os.path.join(str(user_profile.realm.id), "realm", "icon.original")
|
||||
original_key = bucket.get_key(original_path_id)
|
||||
original_key = bucket.Object(original_path_id)
|
||||
image_file.seek(0)
|
||||
self.assertEqual(image_file.read(), original_key.get_contents_as_string())
|
||||
self.assertEqual(image_file.read(), original_key.get()['Body'].read())
|
||||
|
||||
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "icon.png")
|
||||
resized_data = bucket.get_key(resized_path_id).read()
|
||||
# resized image size should be 100 x 100 because thumbnail keeps aspect ratio
|
||||
resized_data = bucket.Object(resized_path_id).get()['Body'].read()
|
||||
# while trying to fit in a 800 x 100 box without losing part of the image
|
||||
resized_image = Image.open(io.BytesIO(resized_data)).size
|
||||
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
||||
|
@ -1795,12 +1800,12 @@ class S3Test(ZulipTestCase):
|
|||
zerver.lib.upload.upload_backend.upload_realm_logo_image(image_file, user_profile, night)
|
||||
|
||||
original_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.original" % (file_name,))
|
||||
original_key = bucket.get_key(original_path_id)
|
||||
original_key = bucket.Object(original_path_id)
|
||||
image_file.seek(0)
|
||||
self.assertEqual(image_file.read(), original_key.get_contents_as_string())
|
||||
self.assertEqual(image_file.read(), original_key.get()['Body'].read())
|
||||
|
||||
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "%s.png" % (file_name,))
|
||||
resized_data = bucket.get_key(resized_path_id).read()
|
||||
resized_data = bucket.Object(resized_path_id).get()['Body'].read()
|
||||
resized_image = Image.open(io.BytesIO(resized_data)).size
|
||||
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
||||
|
||||
|
@ -1821,11 +1826,11 @@ class S3Test(ZulipTestCase):
|
|||
realm_id=user_profile.realm_id,
|
||||
emoji_file_name=emoji_name,
|
||||
)
|
||||
original_key = bucket.get_key(emoji_path + ".original")
|
||||
original_key = bucket.Object(emoji_path + ".original")
|
||||
image_file.seek(0)
|
||||
self.assertEqual(image_file.read(), original_key.get_contents_as_string())
|
||||
self.assertEqual(image_file.read(), original_key.get()['Body'].read())
|
||||
|
||||
resized_data = bucket.get_key(emoji_path).read()
|
||||
resized_data = bucket.Object(emoji_path).get()['Body'].read()
|
||||
resized_image = Image.open(io.BytesIO(resized_data))
|
||||
self.assertEqual(resized_image.size, (DEFAULT_EMOJI_SIZE, DEFAULT_EMOJI_SIZE))
|
||||
|
||||
|
@ -1858,7 +1863,7 @@ class S3Test(ZulipTestCase):
|
|||
result = re.search(re.compile(r"([0-9a-fA-F]{32})"), uri)
|
||||
if result is not None:
|
||||
hex_value = result.group(1)
|
||||
expected_url = "https://{bucket}.s3.amazonaws.com:443/exports/{hex_value}/{path}".format(
|
||||
expected_url = "https://{bucket}.s3.amazonaws.com/exports/{hex_value}/{path}".format(
|
||||
bucket=bucket.name,
|
||||
hex_value=hex_value,
|
||||
path=os.path.basename(tarball_path))
|
||||
|
|
Loading…
Reference in New Issue