2013-06-18 20:47:37 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.template.defaultfilters import slugify
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.avatar import user_avatar_hash
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
from boto.s3.key import Key
|
|
|
|
from boto.s3.connection import S3Connection
|
|
|
|
from mimetypes import guess_type, guess_extension
|
|
|
|
|
|
|
|
import base64
|
|
|
|
import os
|
2013-10-28 17:44:09 +01:00
|
|
|
from PIL import Image, ImageOps
|
|
|
|
from StringIO import StringIO
|
2013-10-28 16:13:53 +01:00
|
|
|
import random
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
# Performance Note:
|
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
2013-10-23 19:41:13 +02:00
|
|
|
# To come up with a s3 key we randomly generate a "directory". The "file
|
|
|
|
# name" is the original filename provided by the user run through Django's
|
|
|
|
# slugify.
|
|
|
|
|
|
|
|
def sanitize_name(name):
|
2013-06-18 20:47:37 +02:00
|
|
|
split_name = name.split('.')
|
|
|
|
base = ".".join(split_name[:-1])
|
|
|
|
extension = split_name[-1]
|
2013-10-23 19:41:13 +02:00
|
|
|
return slugify(base) + "." + slugify(extension)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-23 19:41:13 +02:00
|
|
|
def random_name(bytes=60):
|
|
|
|
return base64.urlsafe_b64encode(os.urandom(bytes))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
def resize_avatar(image_data):
|
|
|
|
AVATAR_SIZE = 100
|
|
|
|
im = Image.open(StringIO(image_data))
|
|
|
|
im = ImageOps.fit(im, (AVATAR_SIZE, AVATAR_SIZE), Image.ANTIALIAS)
|
|
|
|
out = StringIO()
|
|
|
|
im.save(out, format='png')
|
|
|
|
return out.getvalue()
|
|
|
|
|
|
|
|
|
|
|
|
### S3
|
|
|
|
|
2014-02-06 18:03:40 +01:00
|
|
|
def get_bucket(conn, bucket_name):
|
|
|
|
# Calling get_bucket() with validate=True can apparently lead
|
|
|
|
# to expensive S3 bills:
|
|
|
|
# http://www.appneta.com/blog/s3-list-get-bucket-default/
|
|
|
|
# The benefits of validation aren't completely clear to us, and
|
|
|
|
# we want to save on our bills, so we set the validate flag to False.
|
|
|
|
# (We think setting validate to True would cause us to fail faster
|
|
|
|
# in situations where buckets don't exist, but that shouldn't be
|
|
|
|
# an issue for us.)
|
|
|
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
|
|
|
return bucket
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
def upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
contents,
|
2013-06-18 20:47:37 +02:00
|
|
|
):
|
|
|
|
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2014-02-06 18:03:40 +01:00
|
|
|
bucket = get_bucket(conn, bucket_name)
|
|
|
|
key = Key(bucket)
|
2013-06-18 20:47:37 +02:00
|
|
|
key.key = file_name
|
2013-10-23 19:31:40 +02:00
|
|
|
key.set_metadata("user_profile_id", str(user_profile.id))
|
|
|
|
key.set_metadata("realm_id", str(user_profile.realm.id))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
if content_type:
|
|
|
|
headers = {'Content-Type': content_type}
|
|
|
|
else:
|
|
|
|
headers = None
|
|
|
|
|
2013-07-26 04:58:09 +02:00
|
|
|
key.set_contents_from_string(contents, headers=headers)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
def get_file_info(request, user_file):
|
|
|
|
uploaded_file_name = user_file.name
|
|
|
|
content_type = request.GET.get('mimetype')
|
|
|
|
if content_type is None:
|
|
|
|
content_type = guess_type(uploaded_file_name)[0]
|
|
|
|
else:
|
|
|
|
uploaded_file_name = uploaded_file_name + guess_extension(content_type)
|
|
|
|
return uploaded_file_name, content_type
|
|
|
|
|
2014-02-07 05:05:18 +01:00
|
|
|
def authed_upload_enabled(realm):
|
2014-03-05 17:50:13 +01:00
|
|
|
return realm.domain in ('zulip.com', 'squarespace.com', 'bargainbit.com')
|
2013-10-23 16:46:18 +02:00
|
|
|
|
2014-02-07 05:37:23 +01:00
|
|
|
def upload_message_image_s3(uploaded_file_name, content_type, file_data, user_profile, private=None, target_realm=None):
|
2013-10-23 16:46:18 +02:00
|
|
|
if private is None:
|
2014-02-07 05:37:23 +01:00
|
|
|
private = authed_upload_enabled(target_realm if target_realm is not None else user_profile.realm)
|
2013-10-23 16:46:18 +02:00
|
|
|
if private:
|
|
|
|
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
|
|
|
|
s3_file_name = "/".join([
|
2014-02-07 05:37:23 +01:00
|
|
|
str(target_realm.id if target_realm is not None else user_profile.realm.id),
|
2013-10-23 16:46:18 +02:00
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
url = "/user_uploads/%s" % (s3_file_name)
|
|
|
|
else:
|
|
|
|
bucket_name = settings.S3_BUCKET
|
2013-10-24 21:49:22 +02:00
|
|
|
s3_file_name = "/".join([random_name(18), sanitize_name(uploaded_file_name)])
|
2013-10-23 16:46:18 +02:00
|
|
|
url = "https://%s.s3.amazonaws.com/%s" % (bucket_name, s3_file_name)
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
file_data
|
2013-06-18 20:47:37 +02:00
|
|
|
)
|
2013-10-23 16:46:18 +02:00
|
|
|
return url
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
def get_signed_upload_url(path):
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
return conn.generate_url(15, 'GET', bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
|
2013-09-16 20:59:54 +02:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
def upload_avatar_image_s3(user_file, user_profile, email):
|
2013-06-18 20:47:37 +02:00
|
|
|
content_type = guess_type(user_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
s3_file_name = user_avatar_hash(email)
|
2013-10-28 17:44:09 +01:00
|
|
|
|
|
|
|
image_data = user_file.read()
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
2013-10-28 17:44:09 +01:00
|
|
|
s3_file_name + ".original",
|
2013-06-18 20:47:37 +02:00
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-10-28 17:44:09 +01:00
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
'image/png',
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
2013-06-18 20:47:37 +02:00
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
|
|
|
### Local
|
|
|
|
|
|
|
|
def mkdirs(path):
|
|
|
|
dirname = os.path.dirname(path)
|
|
|
|
if not os.path.isdir(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
|
|
|
|
def write_local_file(type, path, file_data):
|
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
|
|
|
|
mkdirs(file_path)
|
|
|
|
with open(file_path, 'wb') as f:
|
|
|
|
f.write(file_data)
|
|
|
|
|
2014-02-07 05:37:23 +01:00
|
|
|
def upload_message_image_local(uploaded_file_name, content_type, file_data, user_profile, private=None, target_realm=None):
|
2013-10-28 16:13:53 +01:00
|
|
|
# Split into 256 subdirectories to prevent directories from getting too big
|
|
|
|
path = "/".join([
|
|
|
|
str(user_profile.realm.id),
|
|
|
|
format(random.randint(0, 255), 'x'),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
|
|
|
|
write_local_file('files', path, file_data)
|
|
|
|
|
|
|
|
return '/user_uploads/' + path
|
|
|
|
|
|
|
|
def upload_avatar_image_local(user_file, user_profile, email):
|
|
|
|
email_hash = user_avatar_hash(email)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
write_local_file('avatars', email_hash+'.original', image_data)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
write_local_file('avatars', email_hash+'.png', resized_data)
|
|
|
|
|
|
|
|
### Common
|
|
|
|
|
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
|
|
|
upload_message_image = upload_message_image_local
|
|
|
|
upload_avatar_image = upload_avatar_image_local
|
|
|
|
else:
|
|
|
|
upload_message_image = upload_message_image_s3
|
|
|
|
upload_avatar_image = upload_avatar_image_s3
|
|
|
|
|
|
|
|
def upload_message_image_through_web_client(request, user_file, user_profile, private=None):
|
|
|
|
uploaded_file_name, content_type = get_file_info(request, user_file)
|
|
|
|
return upload_message_image(uploaded_file_name, content_type, user_file.read(), user_profile, private)
|