2013-06-18 20:47:37 +02:00
|
|
|
from __future__ import absolute_import
|
2016-06-08 10:35:16 +02:00
|
|
|
from typing import Optional, Tuple, Mapping, Any
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-06-18 20:47:37 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.template.defaultfilters import slugify
|
2016-03-13 10:29:33 +01:00
|
|
|
from django.utils.encoding import force_text
|
2016-06-05 03:54:32 +02:00
|
|
|
from django.core.files import File
|
|
|
|
from django.http import HttpRequest
|
2016-04-21 08:48:33 +02:00
|
|
|
from jinja2 import Markup as mark_safe
|
2016-03-13 10:29:33 +01:00
|
|
|
import unicodedata
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.avatar import user_avatar_hash
|
2016-05-29 16:52:55 +02:00
|
|
|
from zerver.lib.request import JsonableError
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-06-05 03:54:32 +02:00
|
|
|
from boto.s3.bucket import Bucket
|
2013-06-18 20:47:37 +02:00
|
|
|
from boto.s3.key import Key
|
|
|
|
from boto.s3.connection import S3Connection
|
|
|
|
from mimetypes import guess_type, guess_extension
|
|
|
|
|
2014-05-06 03:48:23 +02:00
|
|
|
from zerver.models import get_user_profile_by_id
|
2016-03-24 20:24:01 +01:00
|
|
|
from zerver.models import Attachment
|
2016-06-08 10:35:16 +02:00
|
|
|
from zerver.models import Realm, UserProfile
|
2014-05-06 03:48:23 +02:00
|
|
|
|
2016-06-13 13:24:19 +02:00
|
|
|
from six.moves import urllib
|
2013-06-18 20:47:37 +02:00
|
|
|
import base64
|
|
|
|
import os
|
2016-03-13 10:29:33 +01:00
|
|
|
import re
|
2016-06-05 03:54:32 +02:00
|
|
|
import six
|
2013-10-28 17:44:09 +01:00
|
|
|
from PIL import Image, ImageOps
|
2016-06-12 14:22:20 +02:00
|
|
|
from six import binary_type, text_type
|
2016-01-24 04:22:35 +01:00
|
|
|
from six.moves import cStringIO as StringIO
|
2013-10-28 16:13:53 +01:00
|
|
|
import random
|
2016-03-24 20:24:01 +01:00
|
|
|
import logging
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
# Performance Note:
|
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
2016-03-13 10:29:33 +01:00
|
|
|
# To come up with a s3 key we randomly generate a "directory". The
|
|
|
|
# "file name" is the original filename provided by the user run
|
|
|
|
# through a sanitization function.
|
|
|
|
|
|
|
|
def sanitize_name(value):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (six.text_type) -> str
|
2016-03-13 10:29:33 +01:00
|
|
|
"""
|
|
|
|
Sanitizes a value to be safe to store in a Linux filesystem, in
|
|
|
|
S3, and in a URL. So unicode is allowed, but not special
|
|
|
|
characters other than ".", "-", and "_".
|
|
|
|
|
|
|
|
This implementation is based on django.utils.text.slugify; it is
|
|
|
|
modified by:
|
|
|
|
* hardcoding allow_unicode=True.
|
|
|
|
* adding '.' and '_' to the list of allowed characters.
|
|
|
|
* preserving the case of the value.
|
|
|
|
"""
|
|
|
|
value = force_text(value)
|
|
|
|
value = unicodedata.normalize('NFKC', value)
|
|
|
|
value = re.sub('[^\w\s._-]', '', value, flags=re.U).strip()
|
|
|
|
return mark_safe(re.sub('[-\s]+', '-', value, flags=re.U))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-23 19:41:13 +02:00
|
|
|
def random_name(bytes=60):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (int) -> str
|
2013-10-23 19:41:13 +02:00
|
|
|
return base64.urlsafe_b64encode(os.urandom(bytes))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-04-03 08:13:36 +02:00
|
|
|
class BadImageError(JsonableError):
|
|
|
|
pass
|
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
def resize_avatar(image_data):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (str) -> str
|
2013-10-28 16:13:53 +01:00
|
|
|
AVATAR_SIZE = 100
|
2016-04-03 08:13:36 +02:00
|
|
|
try:
|
|
|
|
im = Image.open(StringIO(image_data))
|
|
|
|
im = ImageOps.fit(im, (AVATAR_SIZE, AVATAR_SIZE), Image.ANTIALIAS)
|
|
|
|
except IOError:
|
|
|
|
raise BadImageError("Could not decode avatar image; did you upload an image file?")
|
2013-10-28 16:13:53 +01:00
|
|
|
out = StringIO()
|
|
|
|
im.save(out, format='png')
|
|
|
|
return out.getvalue()
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
### Common
|
|
|
|
|
|
|
|
class ZulipUploadBackend(object):
|
|
|
|
def upload_message_image(self, uploaded_file_name, content_type, file_data, user_profile, target_realm=None):
|
|
|
|
# type: (str, str, str, UserProfile, Optional[Realm]) -> str
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def upload_avatar_image(self, user_file, user_profile, email):
|
2016-06-13 07:40:23 +02:00
|
|
|
# type: (File, UserProfile, text_type) -> None
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
|
|
|
def delete_message_image(self, path_id):
|
2016-06-13 10:57:50 +02:00
|
|
|
# type: (text_type) -> bool
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
2013-10-28 16:13:53 +01:00
|
|
|
|
|
|
|
### S3
|
|
|
|
|
2014-02-06 18:03:40 +01:00
|
|
|
def get_bucket(conn, bucket_name):
|
2016-06-12 14:22:20 +02:00
|
|
|
# type: (S3Connection, text_type) -> Bucket
|
2014-02-06 18:03:40 +01:00
|
|
|
# Calling get_bucket() with validate=True can apparently lead
|
|
|
|
# to expensive S3 bills:
|
|
|
|
# http://www.appneta.com/blog/s3-list-get-bucket-default/
|
|
|
|
# The benefits of validation aren't completely clear to us, and
|
|
|
|
# we want to save on our bills, so we set the validate flag to False.
|
|
|
|
# (We think setting validate to True would cause us to fail faster
|
|
|
|
# in situations where buckets don't exist, but that shouldn't be
|
|
|
|
# an issue for us.)
|
|
|
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
|
|
|
return bucket
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
def upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
contents,
|
2013-06-18 20:47:37 +02:00
|
|
|
):
|
2016-06-12 14:22:20 +02:00
|
|
|
# type: (text_type, text_type, text_type, UserProfile, text_type) -> None
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2014-02-06 18:03:40 +01:00
|
|
|
bucket = get_bucket(conn, bucket_name)
|
|
|
|
key = Key(bucket)
|
2013-06-18 20:47:37 +02:00
|
|
|
key.key = file_name
|
2013-10-23 19:31:40 +02:00
|
|
|
key.set_metadata("user_profile_id", str(user_profile.id))
|
|
|
|
key.set_metadata("realm_id", str(user_profile.realm.id))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
if content_type:
|
|
|
|
headers = {'Content-Type': content_type}
|
|
|
|
else:
|
|
|
|
headers = None
|
|
|
|
|
2013-07-26 04:58:09 +02:00
|
|
|
key.set_contents_from_string(contents, headers=headers)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
def get_file_info(request, user_file):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (HttpRequest, File) -> Tuple[str, str]
|
2016-06-13 13:24:19 +02:00
|
|
|
|
|
|
|
# `user_file.name` is a unicode whereas it should be an ascii
|
|
|
|
# so convert it into an ascii.
|
|
|
|
uploaded_file_name = user_file.name.encode('ascii')
|
2013-06-18 20:47:37 +02:00
|
|
|
content_type = request.GET.get('mimetype')
|
|
|
|
if content_type is None:
|
|
|
|
content_type = guess_type(uploaded_file_name)[0]
|
|
|
|
else:
|
|
|
|
uploaded_file_name = uploaded_file_name + guess_extension(content_type)
|
2016-06-13 13:24:19 +02:00
|
|
|
|
|
|
|
uploaded_file_name = urllib.parse.unquote(uploaded_file_name).decode('utf-8')
|
2013-06-18 20:47:37 +02:00
|
|
|
return uploaded_file_name, content_type
|
|
|
|
|
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
def get_signed_upload_url(path):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (str) -> str
|
2013-10-23 16:46:18 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
return conn.generate_url(15, 'GET', bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
|
2013-09-16 20:59:54 +02:00
|
|
|
|
2014-05-06 03:48:23 +02:00
|
|
|
def get_realm_for_filename(path):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (str) -> int
|
2014-05-06 03:48:23 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
key = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path)
|
|
|
|
if key is None:
|
|
|
|
# This happens if the key does not exist.
|
|
|
|
return None
|
|
|
|
return get_user_profile_by_id(key.metadata["user_profile_id"]).realm.id
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class S3UploadBackend(ZulipUploadBackend):
|
|
|
|
def upload_message_image(self, uploaded_file_name, content_type, file_data, user_profile, target_realm=None):
|
|
|
|
# type: (str, str, str, UserProfile, Optional[Realm]) -> str
|
|
|
|
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
|
|
|
|
s3_file_name = "/".join([
|
|
|
|
str(target_realm.id if target_realm is not None else user_profile.realm.id),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
url = "/user_uploads/%s" % (s3_file_name)
|
|
|
|
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
file_data
|
|
|
|
)
|
|
|
|
|
|
|
|
create_attachment(uploaded_file_name, s3_file_name, user_profile)
|
|
|
|
return url
|
|
|
|
|
|
|
|
def delete_message_image(self, path_id):
|
2016-06-13 10:57:50 +02:00
|
|
|
# type: (text_type) -> bool
|
2016-06-09 07:53:35 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
bucket = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET)
|
|
|
|
|
|
|
|
# check if file exists
|
|
|
|
key = bucket.get_key(path_id)
|
|
|
|
if key is not None:
|
|
|
|
bucket.delete_key(key)
|
|
|
|
return True
|
|
|
|
|
|
|
|
file_name = path_id.split("/")[-1]
|
|
|
|
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
|
|
|
|
return False
|
|
|
|
|
|
|
|
def upload_avatar_image(self, user_file, user_profile, email):
|
2016-06-13 07:40:23 +02:00
|
|
|
# type: (File, UserProfile, text_type) -> None
|
2016-06-09 07:53:35 +02:00
|
|
|
content_type = guess_type(user_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
s3_file_name = user_avatar_hash(email)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
2013-10-28 17:44:09 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
'image/png',
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
|
|
|
### Local
|
|
|
|
|
|
|
|
def mkdirs(path):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (str) -> None
|
2013-10-28 16:13:53 +01:00
|
|
|
dirname = os.path.dirname(path)
|
|
|
|
if not os.path.isdir(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
|
|
|
|
def write_local_file(type, path, file_data):
|
2016-06-12 14:22:20 +02:00
|
|
|
# type: (text_type, text_type, binary_type) -> None
|
2013-10-28 16:13:53 +01:00
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
|
|
|
|
mkdirs(file_path)
|
|
|
|
with open(file_path, 'wb') as f:
|
|
|
|
f.write(file_data)
|
|
|
|
|
2016-06-09 12:19:56 +02:00
|
|
|
def get_local_file_path(path_id):
|
|
|
|
local_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
|
|
|
|
if os.path.isfile(local_path):
|
|
|
|
return local_path
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class LocalUploadBackend(ZulipUploadBackend):
|
|
|
|
def upload_message_image(self, uploaded_file_name, content_type, file_data, user_profile, target_realm=None):
|
|
|
|
# type: (str, str, str, UserProfile, Optional[Realm]) -> str
|
|
|
|
# Split into 256 subdirectories to prevent directories from getting too big
|
|
|
|
path = "/".join([
|
|
|
|
str(user_profile.realm.id),
|
|
|
|
format(random.randint(0, 255), 'x'),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
|
|
|
|
write_local_file('files', path, file_data)
|
|
|
|
create_attachment(uploaded_file_name, path, user_profile)
|
|
|
|
return '/user_uploads/' + path
|
|
|
|
|
|
|
|
def delete_message_image(self, path_id):
|
2016-06-13 10:57:50 +02:00
|
|
|
# type: (text_type) -> bool
|
2016-06-09 07:53:35 +02:00
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
|
|
|
|
if os.path.isfile(file_path):
|
|
|
|
# This removes the file but the empty folders still remain.
|
|
|
|
os.remove(file_path)
|
|
|
|
return True
|
|
|
|
|
|
|
|
file_name = path_id.split("/")[-1]
|
|
|
|
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
|
|
|
|
return False
|
|
|
|
|
|
|
|
def upload_avatar_image(self, user_file, user_profile, email):
|
2016-06-13 07:40:23 +02:00
|
|
|
# type: (File, UserProfile, text_type) -> None
|
2016-06-09 07:53:35 +02:00
|
|
|
email_hash = user_avatar_hash(email)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
write_local_file('avatars', email_hash+'.original', image_data)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
write_local_file('avatars', email_hash+'.png', resized_data)
|
|
|
|
|
|
|
|
# Common and wrappers
|
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
|
|
|
upload_backend = LocalUploadBackend() # type: ZulipUploadBackend
|
|
|
|
else:
|
|
|
|
upload_backend = S3UploadBackend()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
def delete_message_image(path_id):
|
2016-06-13 10:57:50 +02:00
|
|
|
# type: (text_type) -> bool
|
2016-06-09 07:53:35 +02:00
|
|
|
return upload_backend.delete_message_image(path_id)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
def upload_avatar_image(user_file, user_profile, email):
|
2016-06-13 07:40:23 +02:00
|
|
|
# type: (File, UserProfile, text_type) -> None
|
2016-06-09 07:53:35 +02:00
|
|
|
upload_backend.upload_avatar_image(user_file, user_profile, email)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
def upload_message_image(uploaded_file_name, content_type, file_data, user_profile, target_realm=None):
|
|
|
|
# type: (str, str, str, UserProfile, Optional[Realm]) -> str
|
|
|
|
return upload_backend.upload_message_image(uploaded_file_name, content_type, file_data,
|
|
|
|
user_profile, target_realm=target_realm)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
def claim_attachment(path_id, message):
|
2016-06-08 10:35:16 +02:00
|
|
|
# type: (text_type, Mapping[str, Any]) -> bool
|
2016-03-24 20:24:01 +01:00
|
|
|
try:
|
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
attachment.messages.add(message)
|
|
|
|
attachment.save()
|
|
|
|
return True
|
|
|
|
except Attachment.DoesNotExist:
|
2016-05-25 15:02:02 +02:00
|
|
|
raise JsonableError(_("The upload was not successful. Please reupload the file again in a new message."))
|
2016-03-24 20:24:01 +01:00
|
|
|
return False
|
|
|
|
|
|
|
|
def create_attachment(file_name, path_id, user_profile):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (str, str, UserProfile) -> bool
|
2016-03-24 20:24:01 +01:00
|
|
|
Attachment.objects.create(file_name=file_name, path_id=path_id, owner=user_profile)
|
|
|
|
return True
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2016-06-09 12:16:38 +02:00
|
|
|
def upload_message_image_from_request(request, user_file, user_profile):
|
2016-06-05 03:54:32 +02:00
|
|
|
# type: (HttpRequest, File, UserProfile) -> str
|
2013-10-28 16:13:53 +01:00
|
|
|
uploaded_file_name, content_type = get_file_info(request, user_file)
|
2014-05-06 01:14:09 +02:00
|
|
|
return upload_message_image(uploaded_file_name, content_type, user_file.read(), user_profile)
|