zulip/zerver/lib/upload.py

927 lines
36 KiB
Python
Raw Normal View History

from typing import Optional, Tuple, Any
from datetime import timedelta
from django.utils.translation import ugettext as _
from django.conf import settings
2016-06-05 03:54:32 +02:00
from django.core.files import File
from django.core.signing import TimestampSigner, BadSignature
2016-06-05 03:54:32 +02:00
from django.http import HttpRequest
from django.urls import reverse
from jinja2 import Markup as mark_safe
import unicodedata
from zerver.lib.avatar_hash import user_avatar_path
from zerver.lib.exceptions import JsonableError, ErrorCode
from zerver.lib.utils import generate_random_token
2016-06-05 03:54:32 +02:00
from boto.s3.bucket import Bucket
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from mimetypes import guess_type, guess_extension
from zerver.models import get_user_profile_by_id
from zerver.models import Attachment
from zerver.models import Realm, RealmEmoji, UserProfile, Message
import urllib
import base64
import binascii
import os
import re
from PIL import Image, ImageOps, ExifTags
from PIL.Image import DecompressionBombError
from PIL.GifImagePlugin import GifImageFile
import io
import random
import logging
import shutil
import sys
2016-10-03 06:28:31 +02:00
DEFAULT_AVATAR_SIZE = 100
MEDIUM_AVATAR_SIZE = 500
DEFAULT_EMOJI_SIZE = 64
2016-10-03 06:28:31 +02:00
# These sizes were selected based on looking at the maximum common
# sizes in a library of animated custom emoji, balanced against the
# network cost of very large emoji images.
MAX_EMOJI_GIF_SIZE = 128
MAX_EMOJI_GIF_FILE_SIZE_BYTES = 128 * 1024 * 1024 # 128 kb
# Duration that the signed upload URLs that we redirect to when
# accessing uploaded files are available for clients to fetch before
# they expire.
SIGNED_UPLOAD_URL_DURATION = 60
INLINE_MIME_TYPES = [
"application/pdf",
"image/gif",
"image/jpeg",
"image/png",
"image/webp",
# To avoid cross-site scripting attacks, DO NOT add types such
# as application/xhtml+xml, application/x-shockwave-flash,
# image/svg+xml, text/html, or text/xml.
]
# Performance Note:
#
# For writing files to S3, the file could either be stored in RAM
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
#
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
# should occur in practice.
#
# This is great, because passing the pseudofile object that Django gives
# you to boto would be a pain.
# To come up with a s3 key we randomly generate a "directory". The
# "file name" is the original filename provided by the user run
# through a sanitization function.
2018-01-26 16:13:33 +01:00
class RealmUploadQuotaError(JsonableError):
code = ErrorCode.REALM_UPLOAD_QUOTA
def sanitize_name(value: str) -> str:
"""
Sanitizes a value to be safe to store in a Linux filesystem, in
S3, and in a URL. So unicode is allowed, but not special
characters other than ".", "-", and "_".
This implementation is based on django.utils.text.slugify; it is
modified by:
* adding '.' and '_' to the list of allowed characters.
* preserving the case of the value.
"""
value = unicodedata.normalize('NFKC', value)
value = re.sub(r'[^\w\s._-]', '', value, flags=re.U).strip()
return mark_safe(re.sub(r'[-\s]+', '-', value, flags=re.U))
def random_name(bytes: int=60) -> str:
return base64.urlsafe_b64encode(os.urandom(bytes)).decode('utf-8')
class BadImageError(JsonableError):
code = ErrorCode.BAD_IMAGE
name_to_tag_num = {name: num for num, name in ExifTags.TAGS.items()}
# https://stackoverflow.com/a/6218425
def exif_rotate(image: Image) -> Image:
if not hasattr(image, '_getexif'):
return image
exif_data = image._getexif()
if exif_data is None:
return image
exif_dict = dict(exif_data.items())
orientation = exif_dict.get(name_to_tag_num['Orientation'])
if orientation == 3:
return image.rotate(180, expand=True)
elif orientation == 6:
return image.rotate(270, expand=True)
elif orientation == 8:
return image.rotate(90, expand=True)
return image
def resize_avatar(image_data: bytes, size: int=DEFAULT_AVATAR_SIZE) -> bytes:
try:
im = Image.open(io.BytesIO(image_data))
im = exif_rotate(im)
2016-10-03 06:28:31 +02:00
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
except OSError:
raise BadImageError(_("Could not decode image; did you upload an image file?"))
except DecompressionBombError:
raise BadImageError(_("Image size exceeds limit."))
out = io.BytesIO()
if im.mode == 'CMYK':
im = im.convert('RGB')
im.save(out, format='png')
return out.getvalue()
def resize_logo(image_data: bytes) -> bytes:
try:
im = Image.open(io.BytesIO(image_data))
im = exif_rotate(im)
im.thumbnail((8*DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE), Image.ANTIALIAS)
except OSError:
raise BadImageError(_("Could not decode image; did you upload an image file?"))
except DecompressionBombError:
raise BadImageError(_("Image size exceeds limit."))
out = io.BytesIO()
if im.mode == 'CMYK':
im = im.convert('RGB')
im.save(out, format='png')
return out.getvalue()
def resize_gif(im: GifImageFile, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
frames = []
duration_info = []
# If 'loop' info is not set then loop for infinite number of times.
loop = im.info.get("loop", 0)
for frame_num in range(0, im.n_frames):
im.seek(frame_num)
new_frame = Image.new("RGBA", im.size)
new_frame.paste(im, (0, 0), im.convert("RGBA"))
new_frame = ImageOps.fit(new_frame, (size, size), Image.ANTIALIAS)
frames.append(new_frame)
duration_info.append(im.info['duration'])
out = io.BytesIO()
frames[0].save(out, save_all=True, optimize=True,
format="GIF", append_images=frames[1:],
duration=duration_info,
loop=loop)
return out.getvalue()
def resize_emoji(image_data: bytes, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
try:
im = Image.open(io.BytesIO(image_data))
image_format = im.format
if image_format == "GIF":
# There are a number of bugs in Pillow.GifImagePlugin which cause
# results in resized gifs being broken. To work around this we
# only resize under certain conditions to minimize the chance of
# creating ugly gifs.
should_resize = any((
im.size[0] != im.size[1], # not square
im.size[0] > MAX_EMOJI_GIF_SIZE, # dimensions too large
len(image_data) > MAX_EMOJI_GIF_FILE_SIZE_BYTES, # filesize too large
))
return resize_gif(im, size) if should_resize else image_data
else:
im = exif_rotate(im)
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
out = io.BytesIO()
im.save(out, format=image_format)
return out.getvalue()
except OSError:
raise BadImageError(_("Could not decode image; did you upload an image file?"))
except DecompressionBombError:
raise BadImageError(_("Image size exceeds limit."))
### Common
class ZulipUploadBackend:
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile,
target_realm: Optional[Realm]=None) -> str:
raise NotImplementedError()
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile,
content_type: Optional[str]=None) -> None:
raise NotImplementedError()
def delete_avatar_image(self, user: UserProfile) -> None:
raise NotImplementedError()
def delete_message_image(self, path_id: str) -> bool:
raise NotImplementedError()
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
raise NotImplementedError()
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
raise NotImplementedError()
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
raise NotImplementedError()
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None:
raise NotImplementedError()
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
raise NotImplementedError()
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
raise NotImplementedError()
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
night: bool) -> None:
raise NotImplementedError()
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
raise NotImplementedError()
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
raise NotImplementedError()
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
raise NotImplementedError()
def upload_export_tarball(self, realm: Realm, tarball_path: str) -> str:
raise NotImplementedError()
def delete_export_tarball(self, path_id: str) -> Optional[str]:
raise NotImplementedError()
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
raise NotImplementedError()
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
raise NotImplementedError()
### S3
def get_bucket(conn: S3Connection, bucket_name: str) -> Bucket:
# Calling get_bucket() with validate=True can apparently lead
# to expensive S3 bills:
# https://www.appneta.com/blog/s3-list-get-bucket-default/
# The benefits of validation aren't completely clear to us, and
# we want to save on our bills, so we set the validate flag to False.
# (We think setting validate to True would cause us to fail faster
# in situations where buckets don't exist, but that shouldn't be
# an issue for us.)
bucket = conn.get_bucket(bucket_name, validate=False)
return bucket
def upload_image_to_s3(
bucket_name: str,
file_name: str,
content_type: Optional[str],
user_profile: UserProfile,
contents: bytes) -> None:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = get_bucket(conn, bucket_name)
key = Key(bucket)
2017-11-04 20:01:46 +01:00
key.key = file_name
key.set_metadata("user_profile_id", str(user_profile.id))
key.set_metadata("realm_id", str(user_profile.realm_id))
headers = {}
if content_type is not None:
headers["Content-Type"] = content_type
if content_type not in INLINE_MIME_TYPES:
headers["Content-Disposition"] = "attachment"
key.set_contents_from_string(contents, headers=headers)
2018-01-26 16:13:33 +01:00
def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None:
upload_quota = realm.upload_quota_bytes()
if upload_quota is None:
2018-01-26 16:13:33 +01:00
return
used_space = realm.currently_used_upload_space_bytes()
if (used_space + uploaded_file_size) > upload_quota:
2018-01-26 16:13:33 +01:00
raise RealmUploadQuotaError(_("Upload would exceed your organization's upload quota."))
def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Optional[str]]:
uploaded_file_name = user_file.name
content_type = request.GET.get('mimetype')
if content_type is None:
guessed_type = guess_type(uploaded_file_name)[0]
if guessed_type is not None:
content_type = guessed_type
else:
extension = guess_extension(content_type)
if extension is not None:
uploaded_file_name = uploaded_file_name + extension
uploaded_file_name = urllib.parse.unquote(uploaded_file_name)
uploaded_file_size = user_file.size
return uploaded_file_name, uploaded_file_size, content_type
def get_signed_upload_url(path: str) -> str:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
return conn.generate_url(SIGNED_UPLOAD_URL_DURATION, 'GET',
bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
def get_realm_for_filename(path: str) -> Optional[int]:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
python: Convert assignment type annotations to Python 3.6 style. This commit was split by tabbott; this piece covers the vast majority of files in Zulip, but excludes scripts/, tools/, and puppet/ to help ensure we at least show the right error messages for Xenial systems. We can likely further refine the remaining pieces with some testing. Generated by com2ann, with whitespace fixes and various manual fixes for runtime issues: - invoiced_through: Optional[LicenseLedger] = models.ForeignKey( + invoiced_through: Optional["LicenseLedger"] = models.ForeignKey( -_apns_client: Optional[APNsClient] = None +_apns_client: Optional["APNsClient"] = None - notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) + author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) - bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) + bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) - default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) - default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) -descriptors_by_handler_id: Dict[int, ClientDescriptor] = {} +descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {} -worker_classes: Dict[str, Type[QueueProcessingWorker]] = {} -queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {} +worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {} +queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {} -AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None +AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
key: Optional[Key] = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path)
if key is None:
# This happens if the key does not exist.
return None
return get_user_profile_by_id(key.metadata["user_profile_id"]).realm_id
class S3UploadBackend(ZulipUploadBackend):
def __init__(self) -> None:
self.connection = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
def delete_file_from_s3(self, path_id: str, bucket_name: str) -> bool:
bucket = get_bucket(self.connection, bucket_name)
# check if file exists
python: Convert assignment type annotations to Python 3.6 style. This commit was split by tabbott; this piece covers the vast majority of files in Zulip, but excludes scripts/, tools/, and puppet/ to help ensure we at least show the right error messages for Xenial systems. We can likely further refine the remaining pieces with some testing. Generated by com2ann, with whitespace fixes and various manual fixes for runtime issues: - invoiced_through: Optional[LicenseLedger] = models.ForeignKey( + invoiced_through: Optional["LicenseLedger"] = models.ForeignKey( -_apns_client: Optional[APNsClient] = None +_apns_client: Optional["APNsClient"] = None - notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) + author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) - bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) + bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) - default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) - default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) -descriptors_by_handler_id: Dict[int, ClientDescriptor] = {} +descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {} -worker_classes: Dict[str, Type[QueueProcessingWorker]] = {} -queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {} +worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {} +queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {} -AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None +AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
key: Optional[Key] = bucket.get_key(path_id)
if key is not None:
bucket.delete_key(key)
return True
file_name = path_id.split("/")[-1]
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
return False
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
if target_realm is None:
target_realm = user_profile.realm
s3_file_name = "/".join([
str(target_realm.id),
random_name(18),
sanitize_name(uploaded_file_name)
])
url = "/user_uploads/%s" % (s3_file_name,)
upload_image_to_s3(
2017-01-24 07:06:13 +01:00
bucket_name,
s3_file_name,
content_type,
user_profile,
file_data
)
create_attachment(uploaded_file_name, s3_file_name, user_profile, uploaded_file_size)
return url
def delete_message_image(self, path_id: str) -> bool:
return self.delete_file_from_s3(path_id, settings.S3_AUTH_UPLOADS_BUCKET)
def write_avatar_images(self, s3_file_name: str, target_user_profile: UserProfile,
image_data: bytes, content_type: Optional[str]) -> None:
bucket_name = settings.S3_AVATAR_BUCKET
upload_image_to_s3(
bucket_name,
s3_file_name + ".original",
content_type,
target_user_profile,
image_data,
)
# custom 500px wide version
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
upload_image_to_s3(
bucket_name,
s3_file_name + "-medium.png",
"image/png",
target_user_profile,
resized_medium
)
resized_data = resize_avatar(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name,
'image/png',
target_user_profile,
resized_data,
)
# See avatar_url in avatar.py for URL. (That code also handles the case
# that users use gravatar.)
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile,
content_type: Optional[str] = None) -> None:
if content_type is None:
content_type = guess_type(user_file.name)[0]
s3_file_name = user_avatar_path(target_user_profile)
image_data = user_file.read()
self.write_avatar_images(s3_file_name, target_user_profile,
image_data, content_type)
def delete_avatar_image(self, user: UserProfile) -> None:
path_id = user_avatar_path(user)
bucket_name = settings.S3_AVATAR_BUCKET
self.delete_file_from_s3(path_id + ".original", bucket_name)
self.delete_file_from_s3(path_id + "-medium.png", bucket_name)
self.delete_file_from_s3(path_id, bucket_name)
def get_avatar_key(self, file_name: str) -> Key:
bucket = get_bucket(self.connection, settings.S3_AVATAR_BUCKET)
key = bucket.get_key(file_name)
return key
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
s3_source_file_name = user_avatar_path(source_profile)
s3_target_file_name = user_avatar_path(target_profile)
key = self.get_avatar_key(s3_source_file_name + ".original")
image_data = key.get_contents_as_string()
content_type = key.content_type
self.write_avatar_images(s3_target_file_name, target_profile, image_data, content_type)
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
bucket = settings.S3_AVATAR_BUCKET
medium_suffix = "-medium.png" if medium else ""
# ?x=x allows templates to append additional parameters with &s
return "https://%s.%s/%s%s?x=x" % (bucket, self.connection.DefaultHost,
hash_key, medium_suffix)
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
bucket = settings.S3_AVATAR_BUCKET
# export_path has a leading /
return "https://%s.s3.amazonaws.com%s" % (bucket, export_path)
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
return os.path.join(str(realm.id), 'realm')
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
content_type = guess_type(icon_file.name)[0]
bucket_name = settings.S3_AVATAR_BUCKET
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), 'icon')
image_data = icon_file.read()
upload_image_to_s3(
bucket_name,
s3_file_name + ".original",
content_type,
user_profile,
image_data,
)
resized_data = resize_avatar(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name + ".png",
'image/png',
user_profile,
resized_data,
)
# See avatar_url in avatar.py for URL. (That code also handles the case
# that users use gravatar.)
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
bucket = settings.S3_AVATAR_BUCKET
# ?x=x allows templates to append additional parameters with &s
return "https://%s.%s/%s/realm/icon.png?version=%s" % (
bucket, self.connection.DefaultHost, realm_id, version)
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
night: bool) -> None:
content_type = guess_type(logo_file.name)[0]
bucket_name = settings.S3_AVATAR_BUCKET
if night:
basename = 'night_logo'
else:
basename = 'logo'
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), basename)
image_data = logo_file.read()
upload_image_to_s3(
bucket_name,
s3_file_name + ".original",
content_type,
user_profile,
image_data,
)
resized_data = resize_logo(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name + ".png",
'image/png',
user_profile,
resized_data,
)
# See avatar_url in avatar.py for URL. (That code also handles the case
# that users use gravatar.)
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
bucket = settings.S3_AVATAR_BUCKET
# ?x=x allows templates to append additional parameters with &s
if not night:
file_name = 'logo.png'
else:
file_name = 'night_logo.png'
return "https://%s.%s/%s/realm/%s?version=%s" % (
bucket, self.connection.DefaultHost, realm_id, file_name, version)
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
file_path = user_avatar_path(user_profile)
s3_file_name = file_path
bucket_name = settings.S3_AVATAR_BUCKET
bucket = get_bucket(self.connection, bucket_name)
key = bucket.get_key(file_path + ".original")
image_data = key.get_contents_as_string()
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
upload_image_to_s3(
bucket_name,
s3_file_name + "-medium.png",
"image/png",
user_profile,
resized_medium
)
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage
# TODO: Refactor this to share code with ensure_medium_avatar_image
file_path = user_avatar_path(user_profile)
# Also TODO: Migrate to user_avatar_path(user_profile) + ".png".
s3_file_name = file_path
bucket_name = settings.S3_AVATAR_BUCKET
bucket = get_bucket(self.connection, bucket_name)
key = bucket.get_key(file_path + ".original")
image_data = key.get_contents_as_string()
resized_avatar = resize_avatar(image_data)
upload_image_to_s3(
bucket_name,
s3_file_name,
"image/png",
user_profile,
resized_avatar
)
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
user_profile: UserProfile) -> None:
content_type = guess_type(emoji_file.name)[0]
bucket_name = settings.S3_AVATAR_BUCKET
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=user_profile.realm_id,
emoji_file_name=emoji_file_name
)
image_data = emoji_file.read()
resized_image_data = resize_emoji(image_data)
upload_image_to_s3(
bucket_name,
".".join((emoji_path, "original")),
content_type,
user_profile,
image_data,
)
upload_image_to_s3(
bucket_name,
emoji_path,
content_type,
user_profile,
resized_image_data,
)
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
bucket = settings.S3_AVATAR_BUCKET
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id,
emoji_file_name=emoji_file_name)
return "https://%s.%s/%s" % (bucket, self.connection.DefaultHost, emoji_path)
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
def percent_callback(complete: Any, total: Any) -> None:
sys.stdout.write('.')
sys.stdout.flush()
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
# We use the avatar bucket, because it's world-readable.
bucket = get_bucket(conn, settings.S3_AVATAR_BUCKET)
key = Key(bucket)
key.key = os.path.join("exports", generate_random_token(32), os.path.basename(tarball_path))
key.set_contents_from_filename(tarball_path, cb=percent_callback, num_cb=40)
public_url = 'https://{bucket}.{host}/{key}'.format(
host=conn.server_name(),
bucket=bucket.name,
key=key.key)
return public_url
def delete_export_tarball(self, path_id: str) -> Optional[str]:
if self.delete_file_from_s3(path_id, settings.S3_AVATAR_BUCKET):
return path_id
return None
### Local
def write_local_file(type: str, path: str, file_data: bytes) -> None:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, 'wb') as f:
f.write(file_data)
def read_local_file(type: str, path: str) -> bytes:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
with open(file_path, 'rb') as f:
return f.read()
def delete_local_file(type: str, path: str) -> bool:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
if os.path.isfile(file_path):
# This removes the file but the empty folders still remain.
os.remove(file_path)
return True
file_name = path.split("/")[-1]
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
return False
def get_local_file_path(path_id: str) -> Optional[str]:
local_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
if os.path.isfile(local_path):
return local_path
else:
return None
LOCAL_FILE_ACCESS_TOKEN_SALT = "local_file_"
def generate_unauthed_file_access_url(path_id: str) -> str:
signed_data = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT).sign(path_id)
token = base64.b16encode(signed_data.encode('utf-8')).decode('utf-8')
filename = path_id.split('/')[-1]
return reverse('zerver.views.upload.serve_local_file_unauthed', args=[token, filename])
def get_local_file_path_id_from_token(token: str) -> Optional[str]:
signer = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT)
try:
signed_data = base64.b16decode(token).decode('utf-8')
path_id = signer.unsign(signed_data, max_age=timedelta(seconds=60))
except (BadSignature, binascii.Error):
return None
return path_id
class LocalUploadBackend(ZulipUploadBackend):
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
# Split into 256 subdirectories to prevent directories from getting too big
path = "/".join([
str(user_profile.realm_id),
format(random.randint(0, 255), 'x'),
random_name(18),
sanitize_name(uploaded_file_name)
])
write_local_file('files', path, file_data)
create_attachment(uploaded_file_name, path, user_profile, uploaded_file_size)
return '/user_uploads/' + path
def delete_message_image(self, path_id: str) -> bool:
return delete_local_file('files', path_id)
def write_avatar_images(self, file_path: str, image_data: bytes) -> None:
write_local_file('avatars', file_path + '.original', image_data)
resized_data = resize_avatar(image_data)
write_local_file('avatars', file_path + '.png', resized_data)
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
write_local_file('avatars', file_path + '-medium.png', resized_medium)
def upload_avatar_image(self, user_file: File,
acting_user_profile: UserProfile,
target_user_profile: UserProfile,
content_type: Optional[str] = None) -> None:
file_path = user_avatar_path(target_user_profile)
image_data = user_file.read()
self.write_avatar_images(file_path, image_data)
def delete_avatar_image(self, user: UserProfile) -> None:
path_id = user_avatar_path(user)
delete_local_file("avatars", path_id + ".original")
delete_local_file("avatars", path_id + ".png")
delete_local_file("avatars", path_id + "-medium.png")
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
# ?x=x allows templates to append additional parameters with &s
medium_suffix = "-medium" if medium else ""
return "/user_avatars/%s%s.png?x=x" % (hash_key, medium_suffix)
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
source_file_path = user_avatar_path(source_profile)
target_file_path = user_avatar_path(target_profile)
image_data = read_local_file('avatars', source_file_path + '.original')
self.write_avatar_images(target_file_path, image_data)
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
return os.path.join('avatars', str(realm.id), 'realm')
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
upload_path = self.realm_avatar_and_logo_path(user_profile.realm)
image_data = icon_file.read()
write_local_file(
upload_path,
'icon.original',
image_data)
resized_data = resize_avatar(image_data)
write_local_file(upload_path, 'icon.png', resized_data)
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
# ?x=x allows templates to append additional parameters with &s
return "/user_avatars/%s/realm/icon.png?version=%s" % (realm_id, version)
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
night: bool) -> None:
upload_path = self.realm_avatar_and_logo_path(user_profile.realm)
if night:
original_file = 'night_logo.original'
resized_file = 'night_logo.png'
else:
original_file = 'logo.original'
resized_file = 'logo.png'
image_data = logo_file.read()
write_local_file(
upload_path,
original_file,
image_data)
resized_data = resize_logo(image_data)
write_local_file(upload_path, resized_file, resized_data)
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
# ?x=x allows templates to append additional parameters with &s
if night:
file_name = 'night_logo.png'
else:
file_name = 'logo.png'
return "/user_avatars/%s/realm/%s?version=%s" % (realm_id, file_name, version)
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
file_path = user_avatar_path(user_profile)
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + "-medium.png")
if os.path.isfile(output_path):
return
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
with open(image_path, "rb") as f:
image_data = f.read()
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
write_local_file('avatars', file_path + '-medium.png', resized_medium)
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage
# TODO: Refactor this to share code with ensure_medium_avatar_image
file_path = user_avatar_path(user_profile)
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".png")
if os.path.isfile(output_path):
return
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
with open(image_path, "rb") as f:
image_data = f.read()
resized_avatar = resize_avatar(image_data)
write_local_file('avatars', file_path + '.png', resized_avatar)
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
user_profile: UserProfile) -> None:
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id= user_profile.realm_id,
emoji_file_name=emoji_file_name
)
image_data = emoji_file.read()
resized_image_data = resize_emoji(image_data)
write_local_file(
'avatars',
".".join((emoji_path, "original")),
image_data)
write_local_file(
'avatars',
emoji_path,
resized_image_data)
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
return os.path.join(
"/user_avatars",
RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id, emoji_file_name=emoji_file_name))
def upload_export_tarball(self, realm: Realm, tarball_path: str) -> str:
path = os.path.join(
'exports',
str(realm.id),
random_name(18),
os.path.basename(tarball_path),
)
abs_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'avatars', path)
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
shutil.copy(tarball_path, abs_path)
public_url = realm.uri + '/user_avatars/' + path
return public_url
def delete_export_tarball(self, path_id: str) -> Optional[str]:
# Get the last element of a list in the form ['user_avatars', '<file_path>']
file_path = path_id.strip('/').split('/', 1)[-1]
if delete_local_file('avatars', file_path):
return path_id
return None
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
# export_path has a leading `/`
return realm.uri + export_path
# Common and wrappers
if settings.LOCAL_UPLOADS_DIR is not None:
python: Convert assignment type annotations to Python 3.6 style. This commit was split by tabbott; this piece covers the vast majority of files in Zulip, but excludes scripts/, tools/, and puppet/ to help ensure we at least show the right error messages for Xenial systems. We can likely further refine the remaining pieces with some testing. Generated by com2ann, with whitespace fixes and various manual fixes for runtime issues: - invoiced_through: Optional[LicenseLedger] = models.ForeignKey( + invoiced_through: Optional["LicenseLedger"] = models.ForeignKey( -_apns_client: Optional[APNsClient] = None +_apns_client: Optional["APNsClient"] = None - notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) + author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) - bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) + bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) - default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) - default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) -descriptors_by_handler_id: Dict[int, ClientDescriptor] = {} +descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {} -worker_classes: Dict[str, Type[QueueProcessingWorker]] = {} -queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {} +worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {} +queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {} -AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None +AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
upload_backend: ZulipUploadBackend = LocalUploadBackend()
else:
upload_backend = S3UploadBackend() # nocoverage
def delete_message_image(path_id: str) -> bool:
return upload_backend.delete_message_image(path_id)
def upload_avatar_image(user_file: File, acting_user_profile: UserProfile,
target_user_profile: UserProfile,
content_type: Optional[str]=None) -> None:
upload_backend.upload_avatar_image(user_file, acting_user_profile,
target_user_profile, content_type=content_type)
def delete_avatar_image(user_profile: UserProfile) -> None:
upload_backend.delete_avatar_image(user_profile)
def copy_avatar(source_profile: UserProfile, target_profile: UserProfile) -> None:
upload_backend.copy_avatar(source_profile, target_profile)
def upload_icon_image(user_file: File, user_profile: UserProfile) -> None:
upload_backend.upload_realm_icon_image(user_file, user_profile)
def upload_logo_image(user_file: File, user_profile: UserProfile, night: bool) -> None:
upload_backend.upload_realm_logo_image(user_file, user_profile, night)
def upload_emoji_image(emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
upload_backend.upload_emoji_image(emoji_file, emoji_file_name, user_profile)
def upload_message_file(uploaded_file_name: str, uploaded_file_size: int,
content_type: Optional[str], file_data: bytes,
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
return upload_backend.upload_message_file(uploaded_file_name, uploaded_file_size,
content_type, file_data, user_profile,
target_realm=target_realm)
def claim_attachment(user_profile: UserProfile,
path_id: str,
message: Message,
is_message_realm_public: bool) -> Attachment:
attachment = Attachment.objects.get(path_id=path_id)
attachment.messages.add(message)
attachment.is_realm_public = attachment.is_realm_public or is_message_realm_public
attachment.save()
return attachment
def create_attachment(file_name: str, path_id: str, user_profile: UserProfile,
file_size: int) -> bool:
attachment = Attachment.objects.create(file_name=file_name, path_id=path_id, owner=user_profile,
realm=user_profile.realm, size=file_size)
from zerver.lib.actions import notify_attachment_update
notify_attachment_update(user_profile, 'add', attachment.to_dict())
return True
def upload_message_image_from_request(request: HttpRequest, user_file: File,
user_profile: UserProfile) -> str:
uploaded_file_name, uploaded_file_size, content_type = get_file_info(request, user_file)
return upload_message_file(uploaded_file_name, uploaded_file_size,
content_type, user_file.read(), user_profile)
def upload_export_tarball(realm: Realm, tarball_path: str) -> str:
return upload_backend.upload_export_tarball(realm, tarball_path)
def delete_export_tarball(path_id: str) -> Optional[str]:
return upload_backend.delete_export_tarball(path_id)