2013-06-18 20:47:37 +02:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
from django.template.defaultfilters import slugify
|
|
|
|
|
2013-07-29 23:03:31 +02:00
|
|
|
from zerver.lib.avatar import user_avatar_hash
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
from boto.s3.key import Key
|
|
|
|
from boto.s3.connection import S3Connection
|
|
|
|
from mimetypes import guess_type, guess_extension
|
|
|
|
|
|
|
|
import base64
|
|
|
|
import os
|
|
|
|
|
|
|
|
# Performance Note:
|
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
2013-10-23 19:41:13 +02:00
|
|
|
# To come up with a s3 key we randomly generate a "directory". The "file
|
|
|
|
# name" is the original filename provided by the user run through Django's
|
|
|
|
# slugify.
|
|
|
|
|
|
|
|
def sanitize_name(name):
|
2013-06-18 20:47:37 +02:00
|
|
|
split_name = name.split('.')
|
|
|
|
base = ".".join(split_name[:-1])
|
|
|
|
extension = split_name[-1]
|
2013-10-23 19:41:13 +02:00
|
|
|
return slugify(base) + "." + slugify(extension)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-23 19:41:13 +02:00
|
|
|
def random_name(bytes=60):
|
|
|
|
return base64.urlsafe_b64encode(os.urandom(bytes))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
def upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
contents,
|
2013-06-18 20:47:37 +02:00
|
|
|
):
|
|
|
|
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
key = Key(conn.get_bucket(bucket_name))
|
|
|
|
key.key = file_name
|
2013-10-23 19:31:40 +02:00
|
|
|
key.set_metadata("user_profile_id", str(user_profile.id))
|
|
|
|
key.set_metadata("realm_id", str(user_profile.realm.id))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
if content_type:
|
|
|
|
headers = {'Content-Type': content_type}
|
|
|
|
else:
|
|
|
|
headers = None
|
|
|
|
|
2013-07-26 04:58:09 +02:00
|
|
|
key.set_contents_from_string(contents, headers=headers)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
def get_file_info(request, user_file):
|
|
|
|
uploaded_file_name = user_file.name
|
|
|
|
content_type = request.GET.get('mimetype')
|
|
|
|
if content_type is None:
|
|
|
|
content_type = guess_type(uploaded_file_name)[0]
|
|
|
|
else:
|
|
|
|
uploaded_file_name = uploaded_file_name + guess_extension(content_type)
|
|
|
|
return uploaded_file_name, content_type
|
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
def authed_upload_enabled(user_profile):
|
2013-10-24 18:11:12 +02:00
|
|
|
return user_profile.realm.domain in ('zulip.com', 'squarespace.com')
|
2013-10-23 16:46:18 +02:00
|
|
|
|
|
|
|
def upload_message_image(uploaded_file_name, content_type, file_data, user_profile, private=None):
|
|
|
|
if private is None:
|
|
|
|
private = authed_upload_enabled(user_profile)
|
|
|
|
if private:
|
|
|
|
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
|
|
|
|
s3_file_name = "/".join([
|
|
|
|
str(user_profile.realm.id),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
url = "/user_uploads/%s" % (s3_file_name)
|
|
|
|
else:
|
|
|
|
bucket_name = settings.S3_BUCKET
|
|
|
|
s3_file_name = "/".join([random_name(60), sanitize_name(uploaded_file_name)])
|
|
|
|
url = "https://%s.s3.amazonaws.com/%s" % (bucket_name, s3_file_name)
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
file_data
|
2013-06-18 20:47:37 +02:00
|
|
|
)
|
2013-10-23 16:46:18 +02:00
|
|
|
return url
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2013-10-23 16:46:18 +02:00
|
|
|
def upload_message_image_through_web_client(request, user_file, user_profile, private=None):
|
2013-09-16 20:59:54 +02:00
|
|
|
uploaded_file_name, content_type = get_file_info(request, user_file)
|
2013-10-23 16:46:18 +02:00
|
|
|
return upload_message_image(uploaded_file_name, content_type, user_file.read(), user_profile, private)
|
|
|
|
|
|
|
|
def get_signed_upload_url(path):
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
return conn.generate_url(15, 'GET', bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
|
2013-09-16 20:59:54 +02:00
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
def upload_avatar_image(user_file, user_profile, email):
|
|
|
|
content_type = guess_type(user_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
s3_file_name = user_avatar_hash(email)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
2013-10-23 19:31:40 +02:00
|
|
|
user_profile,
|
2013-09-16 21:03:05 +02:00
|
|
|
user_file.read(),
|
2013-06-18 20:47:37 +02:00
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|