2019-06-21 22:46:04 +02:00
|
|
|
from typing import Dict, Optional, Tuple, Any
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-05-25 15:02:02 +02:00
|
|
|
from django.utils.translation import ugettext as _
|
2013-06-18 20:47:37 +02:00
|
|
|
from django.conf import settings
|
2016-06-05 03:54:32 +02:00
|
|
|
from django.core.files import File
|
|
|
|
from django.http import HttpRequest
|
2016-04-21 08:48:33 +02:00
|
|
|
from jinja2 import Markup as mark_safe
|
2016-03-13 10:29:33 +01:00
|
|
|
import unicodedata
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2017-03-02 23:45:57 +01:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
2017-07-21 02:18:33 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError, ErrorCode
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-06-05 03:54:32 +02:00
|
|
|
from boto.s3.bucket import Bucket
|
2013-06-18 20:47:37 +02:00
|
|
|
from boto.s3.key import Key
|
|
|
|
from boto.s3.connection import S3Connection
|
|
|
|
from mimetypes import guess_type, guess_extension
|
|
|
|
|
2019-02-02 23:53:55 +01:00
|
|
|
from zerver.models import get_user_profile_by_id
|
2016-03-24 20:24:01 +01:00
|
|
|
from zerver.models import Attachment
|
2017-03-13 05:45:50 +01:00
|
|
|
from zerver.models import Realm, RealmEmoji, UserProfile, Message
|
2014-05-06 03:48:23 +02:00
|
|
|
|
2019-06-21 22:46:04 +02:00
|
|
|
from zerver.lib.utils import generate_random_token
|
|
|
|
|
2017-11-05 05:30:31 +01:00
|
|
|
import urllib
|
2013-06-18 20:47:37 +02:00
|
|
|
import base64
|
|
|
|
import os
|
2016-03-13 10:29:33 +01:00
|
|
|
import re
|
2018-05-29 17:29:57 +02:00
|
|
|
from PIL import Image, ImageOps, ExifTags
|
2019-01-13 07:27:30 +01:00
|
|
|
from PIL.Image import DecompressionBombError
|
2018-07-17 20:27:09 +02:00
|
|
|
from PIL.GifImagePlugin import GifImageFile
|
2016-06-29 17:13:28 +02:00
|
|
|
import io
|
2013-10-28 16:13:53 +01:00
|
|
|
import random
|
2016-03-24 20:24:01 +01:00
|
|
|
import logging
|
2019-06-21 22:46:04 +02:00
|
|
|
import shutil
|
|
|
|
import sys
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-10-03 06:28:31 +02:00
|
|
|
DEFAULT_AVATAR_SIZE = 100
|
2016-09-20 21:48:48 +02:00
|
|
|
MEDIUM_AVATAR_SIZE = 500
|
2017-03-13 05:45:50 +01:00
|
|
|
DEFAULT_EMOJI_SIZE = 64
|
2016-10-03 06:28:31 +02:00
|
|
|
|
2019-01-28 21:02:48 +01:00
|
|
|
# These sizes were selected based on looking at the maximum common
|
|
|
|
# sizes in a library of animated custom emoji, balanced against the
|
|
|
|
# network cost of very large emoji images.
|
|
|
|
MAX_EMOJI_GIF_SIZE = 128
|
|
|
|
MAX_EMOJI_GIF_FILE_SIZE_BYTES = 128 * 1024 * 1024 # 128 kb
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
# Performance Note:
|
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
2016-03-13 10:29:33 +01:00
|
|
|
# To come up with a s3 key we randomly generate a "directory". The
|
|
|
|
# "file name" is the original filename provided by the user run
|
|
|
|
# through a sanitization function.
|
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
class RealmUploadQuotaError(JsonableError):
|
|
|
|
code = ErrorCode.REALM_UPLOAD_QUOTA
|
|
|
|
|
2018-07-02 00:05:24 +02:00
|
|
|
attachment_url_re = re.compile(r'[/\-]user[\-_]uploads[/\.-].*?(?=[ )]|\Z)')
|
2016-07-11 03:04:58 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def attachment_url_to_path_id(attachment_url: str) -> str:
|
2018-07-02 00:05:24 +02:00
|
|
|
path_id_raw = re.sub(r'[/\-]user[\-_]uploads[/\.-]', '', attachment_url)
|
2016-07-11 03:07:37 +02:00
|
|
|
# Remove any extra '.' after file extension. These are probably added by the user
|
2017-11-03 03:12:25 +01:00
|
|
|
return re.sub('[.]+$', '', path_id_raw, re.M)
|
2016-07-11 03:07:37 +02:00
|
|
|
|
2018-11-27 20:21:55 +01:00
|
|
|
def sanitize_name(value: str) -> str:
|
2016-03-13 10:29:33 +01:00
|
|
|
"""
|
|
|
|
Sanitizes a value to be safe to store in a Linux filesystem, in
|
|
|
|
S3, and in a URL. So unicode is allowed, but not special
|
|
|
|
characters other than ".", "-", and "_".
|
|
|
|
|
|
|
|
This implementation is based on django.utils.text.slugify; it is
|
|
|
|
modified by:
|
|
|
|
* adding '.' and '_' to the list of allowed characters.
|
|
|
|
* preserving the case of the value.
|
|
|
|
"""
|
|
|
|
value = unicodedata.normalize('NFKC', value)
|
2018-07-02 00:05:24 +02:00
|
|
|
value = re.sub(r'[^\w\s._-]', '', value, flags=re.U).strip()
|
|
|
|
return mark_safe(re.sub(r'[-\s]+', '-', value, flags=re.U))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def random_name(bytes: int=60) -> str:
|
2016-06-29 17:13:28 +02:00
|
|
|
return base64.urlsafe_b64encode(os.urandom(bytes)).decode('utf-8')
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-04-03 08:13:36 +02:00
|
|
|
class BadImageError(JsonableError):
|
2017-07-21 02:18:33 +02:00
|
|
|
code = ErrorCode.BAD_IMAGE
|
2016-04-03 08:13:36 +02:00
|
|
|
|
2018-05-29 17:29:57 +02:00
|
|
|
name_to_tag_num = dict((name, num) for num, name in ExifTags.TAGS.items())
|
|
|
|
|
|
|
|
# https://stackoverflow.com/a/6218425
|
|
|
|
def exif_rotate(image: Image) -> Image:
|
2018-05-30 18:22:01 +02:00
|
|
|
if not hasattr(image, '_getexif'):
|
2018-05-29 17:29:57 +02:00
|
|
|
return image
|
2018-05-30 18:22:01 +02:00
|
|
|
exif_data = image._getexif()
|
2018-05-31 13:25:55 +02:00
|
|
|
if exif_data is None:
|
2018-05-30 18:22:01 +02:00
|
|
|
return image
|
|
|
|
|
|
|
|
exif_dict = dict(exif_data.items())
|
2018-05-29 17:29:57 +02:00
|
|
|
orientation = exif_dict.get(name_to_tag_num['Orientation'])
|
|
|
|
|
|
|
|
if orientation == 3:
|
|
|
|
return image.rotate(180, expand=True)
|
|
|
|
elif orientation == 6:
|
|
|
|
return image.rotate(270, expand=True)
|
|
|
|
elif orientation == 8:
|
|
|
|
return image.rotate(90, expand=True)
|
|
|
|
|
|
|
|
return image
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def resize_avatar(image_data: bytes, size: int=DEFAULT_AVATAR_SIZE) -> bytes:
|
2016-04-03 08:13:36 +02:00
|
|
|
try:
|
2016-06-29 17:13:28 +02:00
|
|
|
im = Image.open(io.BytesIO(image_data))
|
2018-05-29 17:29:57 +02:00
|
|
|
im = exif_rotate(im)
|
2016-10-03 06:28:31 +02:00
|
|
|
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
|
2016-04-03 08:13:36 +02:00
|
|
|
except IOError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2016-06-29 17:13:28 +02:00
|
|
|
out = io.BytesIO()
|
2018-03-02 17:56:25 +01:00
|
|
|
if im.mode == 'CMYK':
|
|
|
|
im = im.convert('RGB')
|
2013-10-28 16:13:53 +01:00
|
|
|
im.save(out, format='png')
|
|
|
|
return out.getvalue()
|
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
def resize_logo(image_data: bytes) -> bytes:
|
|
|
|
try:
|
|
|
|
im = Image.open(io.BytesIO(image_data))
|
|
|
|
im = exif_rotate(im)
|
|
|
|
im.thumbnail((8*DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE), Image.ANTIALIAS)
|
|
|
|
except IOError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2018-08-16 01:26:55 +02:00
|
|
|
out = io.BytesIO()
|
|
|
|
if im.mode == 'CMYK':
|
|
|
|
im = im.convert('RGB')
|
|
|
|
im.save(out, format='png')
|
|
|
|
return out.getvalue()
|
|
|
|
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2018-07-17 20:27:09 +02:00
|
|
|
def resize_gif(im: GifImageFile, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
|
|
|
|
frames = []
|
|
|
|
duration_info = []
|
|
|
|
# If 'loop' info is not set then loop for infinite number of times.
|
|
|
|
loop = im.info.get("loop", 0)
|
|
|
|
for frame_num in range(0, im.n_frames):
|
|
|
|
im.seek(frame_num)
|
|
|
|
new_frame = Image.new("RGBA", im.size)
|
|
|
|
new_frame.paste(im, (0, 0), im.convert("RGBA"))
|
|
|
|
new_frame = ImageOps.fit(new_frame, (size, size), Image.ANTIALIAS)
|
|
|
|
frames.append(new_frame)
|
|
|
|
duration_info.append(im.info['duration'])
|
|
|
|
out = io.BytesIO()
|
|
|
|
frames[0].save(out, save_all=True, optimize=True,
|
|
|
|
format="GIF", append_images=frames[1:],
|
|
|
|
duration=duration_info,
|
|
|
|
loop=loop)
|
|
|
|
return out.getvalue()
|
|
|
|
|
2019-01-28 21:02:48 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def resize_emoji(image_data: bytes, size: int=DEFAULT_EMOJI_SIZE) -> bytes:
|
2017-03-13 05:45:50 +01:00
|
|
|
try:
|
|
|
|
im = Image.open(io.BytesIO(image_data))
|
|
|
|
image_format = im.format
|
2018-07-17 20:27:09 +02:00
|
|
|
if image_format == "GIF":
|
2019-01-28 21:02:48 +01:00
|
|
|
# There are a number of bugs in Pillow.GifImagePlugin which cause
|
|
|
|
# results in resized gifs being broken. To work around this we
|
|
|
|
# only resize under certain conditions to minimize the chance of
|
|
|
|
# creating ugly gifs.
|
|
|
|
should_resize = any((
|
|
|
|
im.size[0] != im.size[1], # not square
|
|
|
|
im.size[0] > MAX_EMOJI_GIF_SIZE, # dimensions too large
|
|
|
|
len(image_data) > MAX_EMOJI_GIF_FILE_SIZE_BYTES, # filesize too large
|
|
|
|
))
|
|
|
|
return resize_gif(im, size) if should_resize else image_data
|
2018-07-17 20:27:09 +02:00
|
|
|
else:
|
|
|
|
im = exif_rotate(im)
|
|
|
|
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
|
|
|
|
out = io.BytesIO()
|
|
|
|
im.save(out, format=image_format)
|
|
|
|
return out.getvalue()
|
2017-03-13 05:45:50 +01:00
|
|
|
except IOError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2017-03-13 05:45:50 +01:00
|
|
|
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
### Common
|
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class ZulipUploadBackend:
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
|
|
|
|
content_type: Optional[str], file_data: bytes,
|
2018-03-28 18:14:17 +02:00
|
|
|
user_profile: UserProfile,
|
2018-05-11 01:40:23 +02:00
|
|
|
target_realm: Optional[Realm]=None) -> str:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_avatar_image(self, user_file: File,
|
|
|
|
acting_user_profile: UserProfile,
|
2019-06-07 23:36:19 +02:00
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str]=None) -> None:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
|
2016-09-20 21:48:48 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
|
2016-09-28 00:21:31 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-11-27 02:28:34 +01:00
|
|
|
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2017-02-21 03:41:20 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
|
|
|
|
night: bool) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
2018-08-16 01:26:55 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
|
2017-03-13 05:45:50 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
|
2017-03-13 05:45:50 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-06-21 22:46:04 +02:00
|
|
|
def upload_export_tarball(self, realm: Realm, tarball_path: str) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
def delete_export_tarball(self, path_id: str) -> Optional[str]:
|
|
|
|
raise NotImplementedError()
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
### S3
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_bucket(conn: S3Connection, bucket_name: str) -> Bucket:
|
2014-02-06 18:03:40 +01:00
|
|
|
# Calling get_bucket() with validate=True can apparently lead
|
|
|
|
# to expensive S3 bills:
|
|
|
|
# http://www.appneta.com/blog/s3-list-get-bucket-default/
|
|
|
|
# The benefits of validation aren't completely clear to us, and
|
|
|
|
# we want to save on our bills, so we set the validate flag to False.
|
|
|
|
# (We think setting validate to True would cause us to fail faster
|
|
|
|
# in situations where buckets don't exist, but that shouldn't be
|
|
|
|
# an issue for us.)
|
|
|
|
bucket = conn.get_bucket(bucket_name, validate=False)
|
|
|
|
return bucket
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
def upload_image_to_s3(
|
2018-11-27 20:21:55 +01:00
|
|
|
bucket_name: str,
|
2018-05-11 01:40:23 +02:00
|
|
|
file_name: str,
|
|
|
|
content_type: Optional[str],
|
2017-12-10 23:59:06 +01:00
|
|
|
user_profile: UserProfile,
|
|
|
|
contents: bytes) -> None:
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2017-01-31 11:35:33 +01:00
|
|
|
bucket = get_bucket(conn, bucket_name)
|
2014-02-06 18:03:40 +01:00
|
|
|
key = Key(bucket)
|
2017-11-04 20:01:46 +01:00
|
|
|
key.key = file_name
|
2013-10-23 19:31:40 +02:00
|
|
|
key.set_metadata("user_profile_id", str(user_profile.id))
|
2017-01-03 21:04:55 +01:00
|
|
|
key.set_metadata("realm_id", str(user_profile.realm_id))
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-07-13 18:25:40 +02:00
|
|
|
if content_type is not None:
|
2018-05-11 01:40:23 +02:00
|
|
|
headers = {'Content-Type': content_type} # type: Optional[Dict[str, str]]
|
2013-06-18 20:47:37 +02:00
|
|
|
else:
|
|
|
|
headers = None
|
|
|
|
|
2017-08-16 16:16:15 +02:00
|
|
|
key.set_contents_from_string(contents, headers=headers) # type: ignore # https://github.com/python/typeshed/issues/1552
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None:
|
2018-02-19 02:05:49 +01:00
|
|
|
upload_quota = realm.upload_quota_bytes()
|
|
|
|
if upload_quota is None:
|
2018-01-26 16:13:33 +01:00
|
|
|
return
|
2019-01-11 13:41:52 +01:00
|
|
|
used_space = realm.currently_used_upload_space_bytes()
|
2018-02-19 02:05:49 +01:00
|
|
|
if (used_space + uploaded_file_size) > upload_quota:
|
2018-01-26 16:13:33 +01:00
|
|
|
raise RealmUploadQuotaError(_("Upload would exceed your organization's upload quota."))
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Optional[str]]:
|
2016-06-13 13:24:19 +02:00
|
|
|
|
2016-06-29 17:13:28 +02:00
|
|
|
uploaded_file_name = user_file.name
|
2013-06-18 20:47:37 +02:00
|
|
|
content_type = request.GET.get('mimetype')
|
|
|
|
if content_type is None:
|
2016-07-13 18:25:40 +02:00
|
|
|
guessed_type = guess_type(uploaded_file_name)[0]
|
|
|
|
if guessed_type is not None:
|
2017-11-04 19:08:30 +01:00
|
|
|
content_type = guessed_type
|
2013-06-18 20:47:37 +02:00
|
|
|
else:
|
2017-05-26 02:08:16 +02:00
|
|
|
extension = guess_extension(content_type)
|
|
|
|
if extension is not None:
|
|
|
|
uploaded_file_name = uploaded_file_name + extension
|
2016-06-13 13:24:19 +02:00
|
|
|
|
2016-06-29 17:13:28 +02:00
|
|
|
uploaded_file_name = urllib.parse.unquote(uploaded_file_name)
|
2017-02-26 11:03:45 +01:00
|
|
|
uploaded_file_size = user_file.size
|
|
|
|
|
|
|
|
return uploaded_file_name, uploaded_file_size, content_type
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_signed_upload_url(path: str) -> str:
|
2013-10-23 16:46:18 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2017-11-04 20:01:46 +01:00
|
|
|
return conn.generate_url(15, 'GET', bucket=settings.S3_AUTH_UPLOADS_BUCKET, key=path)
|
2013-09-16 20:59:54 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_for_filename(path: str) -> Optional[int]:
|
2014-05-06 03:48:23 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
key = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path)
|
|
|
|
if key is None:
|
|
|
|
# This happens if the key does not exist.
|
|
|
|
return None
|
2017-01-03 21:04:55 +01:00
|
|
|
return get_user_profile_by_id(key.metadata["user_profile_id"]).realm_id
|
2014-05-06 03:48:23 +02:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class S3UploadBackend(ZulipUploadBackend):
|
2018-09-07 12:31:57 +02:00
|
|
|
def delete_file_from_s3(self, path_id: str, bucket_name: str) -> bool:
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
bucket = get_bucket(conn, bucket_name)
|
|
|
|
|
|
|
|
# check if file exists
|
|
|
|
key = bucket.get_key(path_id)
|
|
|
|
if key is not None:
|
|
|
|
bucket.delete_key(key)
|
|
|
|
return True
|
|
|
|
|
|
|
|
file_name = path_id.split("/")[-1]
|
|
|
|
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
|
|
|
|
return False
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
|
|
|
|
content_type: Optional[str], file_data: bytes,
|
|
|
|
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
|
2016-06-09 07:53:35 +02:00
|
|
|
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
|
2017-02-11 04:47:04 +01:00
|
|
|
if target_realm is None:
|
|
|
|
target_realm = user_profile.realm
|
2016-06-09 07:53:35 +02:00
|
|
|
s3_file_name = "/".join([
|
2017-02-11 04:47:04 +01:00
|
|
|
str(target_realm.id),
|
2016-06-09 07:53:35 +02:00
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
2017-01-09 20:45:11 +01:00
|
|
|
url = "/user_uploads/%s" % (s3_file_name,)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
|
|
|
upload_image_to_s3(
|
2017-01-24 07:06:13 +01:00
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
file_data
|
2016-06-09 07:53:35 +02:00
|
|
|
)
|
|
|
|
|
2017-02-26 11:03:45 +01:00
|
|
|
create_attachment(uploaded_file_name, s3_file_name, user_profile, uploaded_file_size)
|
2016-06-09 07:53:35 +02:00
|
|
|
return url
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2018-09-07 12:31:57 +02:00
|
|
|
return self.delete_file_from_s3(path_id, settings.S3_AUTH_UPLOADS_BUCKET)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2018-05-30 18:29:41 +02:00
|
|
|
def write_avatar_images(self, s3_file_name: str, target_user_profile: UserProfile,
|
|
|
|
image_data: bytes, content_type: Optional[str]) -> None:
|
2016-06-09 07:53:35 +02:00
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
2016-06-09 07:53:35 +02:00
|
|
|
image_data,
|
|
|
|
)
|
2013-10-28 17:44:09 +01:00
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
# custom 500px wide version
|
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + "-medium.png",
|
|
|
|
"image/png",
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
2016-09-20 21:48:48 +02:00
|
|
|
resized_medium
|
|
|
|
)
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name,
|
|
|
|
'image/png',
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
2016-06-09 07:53:35 +02:00
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2018-05-30 18:29:41 +02:00
|
|
|
def upload_avatar_image(self, user_file: File,
|
|
|
|
acting_user_profile: UserProfile,
|
2019-06-07 23:36:19 +02:00
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None) -> None:
|
|
|
|
if content_type is None:
|
|
|
|
content_type = guess_type(user_file.name)[0]
|
2018-05-30 18:29:41 +02:00
|
|
|
s3_file_name = user_avatar_path(target_user_profile)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
self.write_avatar_images(s3_file_name, target_user_profile,
|
|
|
|
image_data, content_type)
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
path_id = user_avatar_path(user)
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
|
|
|
|
self.delete_file_from_s3(path_id + ".original", bucket_name)
|
|
|
|
self.delete_file_from_s3(path_id + "-medium.png", bucket_name)
|
|
|
|
self.delete_file_from_s3(path_id, bucket_name)
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def get_avatar_key(self, file_name: str) -> Key:
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
bucket = get_bucket(conn, bucket_name)
|
|
|
|
|
|
|
|
key = bucket.get_key(file_name)
|
|
|
|
return key
|
|
|
|
|
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
s3_source_file_name = user_avatar_path(source_profile)
|
|
|
|
s3_target_file_name = user_avatar_path(target_profile)
|
|
|
|
|
|
|
|
key = self.get_avatar_key(s3_source_file_name + ".original")
|
|
|
|
image_data = key.get_contents_as_string() # type: ignore # https://github.com/python/typeshed/issues/1552
|
|
|
|
content_type = key.content_type
|
|
|
|
|
|
|
|
self.write_avatar_images(s3_target_file_name, target_profile, image_data, content_type) # type: ignore # image_data is `bytes`, boto subs are wrong
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
|
2016-09-28 00:21:31 +02:00
|
|
|
bucket = settings.S3_AVATAR_BUCKET
|
2017-03-21 23:53:54 +01:00
|
|
|
medium_suffix = "-medium.png" if medium else ""
|
2016-09-28 00:21:31 +02:00
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2017-11-04 05:34:38 +01:00
|
|
|
return "https://%s.s3.amazonaws.com/%s%s?x=x" % (bucket, hash_key, medium_suffix)
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
content_type = guess_type(icon_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
s3_file_name = os.path.join(str(user_profile.realm.id), 'realm', 'icon')
|
|
|
|
|
|
|
|
image_data = icon_file.read()
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
2017-04-13 04:48:24 +02:00
|
|
|
s3_file_name + ".png",
|
2017-02-21 03:41:20 +01:00
|
|
|
'image/png',
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2017-02-21 03:41:20 +01:00
|
|
|
bucket = settings.S3_AVATAR_BUCKET
|
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2017-11-04 05:34:38 +01:00
|
|
|
return "https://%s.s3.amazonaws.com/%s/realm/icon.png?version=%s" % (bucket, realm_id, version)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
|
|
|
|
night: bool) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
content_type = guess_type(logo_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
2019-01-27 08:25:10 +01:00
|
|
|
if night:
|
|
|
|
basename = 'night_logo'
|
|
|
|
else:
|
|
|
|
basename = 'logo'
|
|
|
|
s3_file_name = os.path.join(str(user_profile.realm.id), 'realm', basename)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
image_data = logo_file.read()
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_logo(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + ".png",
|
|
|
|
'image/png',
|
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
2018-08-16 01:26:55 +02:00
|
|
|
bucket = settings.S3_AVATAR_BUCKET
|
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2019-01-27 08:25:10 +01:00
|
|
|
if not night:
|
|
|
|
file_name = 'logo.png'
|
|
|
|
else:
|
|
|
|
file_name = 'night_logo.png'
|
|
|
|
return "https://%s.s3.amazonaws.com/%s/realm/%s?version=%s" % (bucket, realm_id, file_name, version)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
|
2017-03-02 23:45:57 +01:00
|
|
|
file_path = user_avatar_path(user_profile)
|
|
|
|
s3_file_name = file_path
|
2016-09-20 21:48:48 +02:00
|
|
|
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2017-01-31 11:35:33 +01:00
|
|
|
bucket = get_bucket(conn, bucket_name)
|
2018-11-27 05:20:27 +01:00
|
|
|
key = bucket.get_key(file_path + ".original")
|
2017-12-21 22:05:14 +01:00
|
|
|
image_data = key.get_contents_as_string()
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2017-12-21 22:05:14 +01:00
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE) # type: ignore # image_data is `bytes`, boto subs are wrong
|
2016-09-20 21:48:48 +02:00
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
s3_file_name + "-medium.png",
|
|
|
|
"image/png",
|
|
|
|
user_profile,
|
|
|
|
resized_medium
|
|
|
|
)
|
2016-09-28 00:21:31 +02:00
|
|
|
|
2018-11-27 02:28:34 +01:00
|
|
|
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage
|
|
|
|
# TODO: Refactor this to share code with ensure_medium_avatar_image
|
|
|
|
file_path = user_avatar_path(user_profile)
|
2019-01-02 23:34:28 +01:00
|
|
|
# Also TODO: Migrate to user_avatar_path(user_profile) + ".png".
|
2018-11-27 02:28:34 +01:00
|
|
|
s3_file_name = file_path
|
|
|
|
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
bucket = get_bucket(conn, bucket_name)
|
2019-01-02 23:34:28 +01:00
|
|
|
key = bucket.get_key(file_path + ".original")
|
2018-11-27 02:28:34 +01:00
|
|
|
image_data = key.get_contents_as_string()
|
|
|
|
|
|
|
|
resized_avatar = resize_avatar(image_data) # type: ignore # image_data is `bytes`, boto subs are wrong
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
2019-01-02 23:34:28 +01:00
|
|
|
s3_file_name,
|
2018-11-27 02:28:34 +01:00
|
|
|
"image/png",
|
|
|
|
user_profile,
|
|
|
|
resized_avatar
|
|
|
|
)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
user_profile: UserProfile) -> None:
|
2017-03-13 05:45:50 +01:00
|
|
|
content_type = guess_type(emoji_file.name)[0]
|
|
|
|
bucket_name = settings.S3_AVATAR_BUCKET
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_file_name=emoji_file_name
|
|
|
|
)
|
|
|
|
|
|
|
|
image_data = emoji_file.read()
|
|
|
|
resized_image_data = resize_emoji(image_data)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
".".join((emoji_path, "original")),
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
upload_image_to_s3(
|
|
|
|
bucket_name,
|
|
|
|
emoji_path,
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
resized_image_data,
|
|
|
|
)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
|
2017-03-13 05:45:50 +01:00
|
|
|
bucket = settings.S3_AVATAR_BUCKET
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id,
|
|
|
|
emoji_file_name=emoji_file_name)
|
2017-11-04 05:34:38 +01:00
|
|
|
return "https://%s.s3.amazonaws.com/%s" % (bucket, emoji_path)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2019-06-21 22:46:04 +02:00
|
|
|
def upload_export_tarball(self, realm: Optional[Realm], tarball_path: str) -> str:
|
|
|
|
def percent_callback(complete: Any, total: Any) -> None:
|
|
|
|
sys.stdout.write('.')
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
# We use the avatar bucket, because it's world-readable.
|
|
|
|
bucket = get_bucket(conn, settings.S3_AVATAR_BUCKET)
|
|
|
|
key = Key(bucket)
|
|
|
|
key.key = os.path.join("exports", generate_random_token(32), os.path.basename(tarball_path))
|
|
|
|
key.set_contents_from_filename(tarball_path, cb=percent_callback, num_cb=40)
|
|
|
|
|
|
|
|
public_url = 'https://{bucket}.{host}/{key}'.format(
|
|
|
|
host=conn.server_name(),
|
|
|
|
bucket=bucket.name,
|
|
|
|
key=key.key)
|
|
|
|
return public_url
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
def delete_export_tarball(self, path_id: str) -> Optional[str]:
|
|
|
|
if self.delete_file_from_s3(path_id, settings.S3_AVATAR_BUCKET):
|
|
|
|
return path_id
|
|
|
|
return None
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
### Local
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def write_local_file(type: str, path: str, file_data: bytes) -> None:
|
2013-10-28 16:13:53 +01:00
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
|
2017-11-14 04:34:23 +01:00
|
|
|
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
2013-10-28 16:13:53 +01:00
|
|
|
with open(file_path, 'wb') as f:
|
|
|
|
f.write(file_data)
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def read_local_file(type: str, path: str) -> bytes:
|
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
|
|
|
|
with open(file_path, 'rb') as f:
|
|
|
|
return f.read()
|
|
|
|
|
2018-09-07 12:31:57 +02:00
|
|
|
def delete_local_file(type: str, path: str) -> bool:
|
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path)
|
|
|
|
if os.path.isfile(file_path):
|
|
|
|
# This removes the file but the empty folders still remain.
|
|
|
|
os.remove(file_path)
|
|
|
|
return True
|
|
|
|
file_name = path.split("/")[-1]
|
|
|
|
logging.warning("%s does not exist. Its entry in the database will be removed." % (file_name,))
|
|
|
|
return False
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_local_file_path(path_id: str) -> Optional[str]:
|
2016-06-09 12:19:56 +02:00
|
|
|
local_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', path_id)
|
|
|
|
if os.path.isfile(local_path):
|
|
|
|
return local_path
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class LocalUploadBackend(ZulipUploadBackend):
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_message_file(self, uploaded_file_name: str, uploaded_file_size: int,
|
|
|
|
content_type: Optional[str], file_data: bytes,
|
|
|
|
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
|
2016-06-09 07:53:35 +02:00
|
|
|
# Split into 256 subdirectories to prevent directories from getting too big
|
|
|
|
path = "/".join([
|
2017-01-03 21:04:55 +01:00
|
|
|
str(user_profile.realm_id),
|
2016-06-09 07:53:35 +02:00
|
|
|
format(random.randint(0, 255), 'x'),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(uploaded_file_name)
|
|
|
|
])
|
|
|
|
|
|
|
|
write_local_file('files', path, file_data)
|
2017-02-26 11:03:45 +01:00
|
|
|
create_attachment(uploaded_file_name, path, user_profile, uploaded_file_size)
|
2016-06-09 07:53:35 +02:00
|
|
|
return '/user_uploads/' + path
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2018-09-07 12:31:57 +02:00
|
|
|
return delete_local_file('files', path_id)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2018-05-30 16:35:58 +02:00
|
|
|
def write_avatar_images(self, file_path: str, image_data: bytes) -> None:
|
2017-03-02 23:45:57 +01:00
|
|
|
write_local_file('avatars', file_path + '.original', image_data)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
2017-03-02 23:45:57 +01:00
|
|
|
write_local_file('avatars', file_path + '.png', resized_data)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
2017-03-02 23:45:57 +01:00
|
|
|
write_local_file('avatars', file_path + '-medium.png', resized_medium)
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2018-05-30 16:35:58 +02:00
|
|
|
def upload_avatar_image(self, user_file: File,
|
|
|
|
acting_user_profile: UserProfile,
|
2019-06-07 23:36:19 +02:00
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None) -> None:
|
2018-05-30 16:35:58 +02:00
|
|
|
file_path = user_avatar_path(target_user_profile)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
self.write_avatar_images(file_path, image_data)
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
path_id = user_avatar_path(user)
|
|
|
|
|
|
|
|
delete_local_file("avatars", path_id + ".original")
|
|
|
|
delete_local_file("avatars", path_id + ".png")
|
|
|
|
delete_local_file("avatars", path_id + "-medium.png")
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool=False) -> str:
|
2016-09-28 00:21:31 +02:00
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2016-09-20 21:48:48 +02:00
|
|
|
medium_suffix = "-medium" if medium else ""
|
2017-11-04 05:34:38 +01:00
|
|
|
return "/user_avatars/%s%s.png?x=x" % (hash_key, medium_suffix)
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
source_file_path = user_avatar_path(source_profile)
|
|
|
|
target_file_path = user_avatar_path(target_profile)
|
|
|
|
|
|
|
|
image_data = read_local_file('avatars', source_file_path + '.original')
|
|
|
|
self.write_avatar_images(target_file_path, image_data)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_realm_icon_image(self, icon_file: File, user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
upload_path = os.path.join('avatars', str(user_profile.realm.id), 'realm')
|
|
|
|
|
|
|
|
image_data = icon_file.read()
|
|
|
|
write_local_file(
|
|
|
|
upload_path,
|
|
|
|
'icon.original',
|
|
|
|
image_data)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
write_local_file(upload_path, 'icon.png', resized_data)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2017-02-21 03:41:20 +01:00
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2017-11-04 05:34:38 +01:00
|
|
|
return "/user_avatars/%s/realm/icon.png?version=%s" % (realm_id, version)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def upload_realm_logo_image(self, logo_file: File, user_profile: UserProfile,
|
|
|
|
night: bool) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
upload_path = os.path.join('avatars', str(user_profile.realm.id), 'realm')
|
2019-01-27 08:25:10 +01:00
|
|
|
if night:
|
|
|
|
original_file = 'night_logo.original'
|
|
|
|
resized_file = 'night_logo.png'
|
|
|
|
else:
|
|
|
|
original_file = 'logo.original'
|
|
|
|
resized_file = 'logo.png'
|
2018-08-16 01:26:55 +02:00
|
|
|
image_data = logo_file.read()
|
|
|
|
write_local_file(
|
|
|
|
upload_path,
|
2019-01-27 08:25:10 +01:00
|
|
|
original_file,
|
2018-08-16 01:26:55 +02:00
|
|
|
image_data)
|
|
|
|
|
|
|
|
resized_data = resize_logo(image_data)
|
2019-01-27 08:25:10 +01:00
|
|
|
write_local_file(upload_path, resized_file, resized_data)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
2018-08-16 01:26:55 +02:00
|
|
|
# ?x=x allows templates to append additional parameters with &s
|
2019-01-27 08:25:10 +01:00
|
|
|
if night:
|
|
|
|
file_name = 'night_logo.png'
|
|
|
|
else:
|
|
|
|
file_name = 'logo.png'
|
|
|
|
return "/user_avatars/%s/realm/%s?version=%s" % (realm_id, file_name, version)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def ensure_medium_avatar_image(self, user_profile: UserProfile) -> None:
|
2017-03-02 23:45:57 +01:00
|
|
|
file_path = user_avatar_path(user_profile)
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2017-03-02 23:45:57 +01:00
|
|
|
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + "-medium.png")
|
2016-09-20 21:48:48 +02:00
|
|
|
if os.path.isfile(output_path):
|
|
|
|
return
|
|
|
|
|
2017-03-02 23:45:57 +01:00
|
|
|
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(image_path, "rb") as f:
|
|
|
|
image_data = f.read()
|
2016-09-20 21:48:48 +02:00
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
2017-03-02 23:45:57 +01:00
|
|
|
write_local_file('avatars', file_path + '-medium.png', resized_medium)
|
2016-09-28 00:21:31 +02:00
|
|
|
|
2018-11-27 02:28:34 +01:00
|
|
|
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage
|
|
|
|
# TODO: Refactor this to share code with ensure_medium_avatar_image
|
|
|
|
file_path = user_avatar_path(user_profile)
|
|
|
|
|
|
|
|
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".png")
|
|
|
|
if os.path.isfile(output_path):
|
|
|
|
return
|
|
|
|
|
|
|
|
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(image_path, "rb") as f:
|
|
|
|
image_data = f.read()
|
2018-11-27 02:28:34 +01:00
|
|
|
resized_avatar = resize_avatar(image_data)
|
|
|
|
write_local_file('avatars', file_path + '.png', resized_avatar)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
user_profile: UserProfile) -> None:
|
2017-03-13 05:45:50 +01:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id= user_profile.realm_id,
|
|
|
|
emoji_file_name=emoji_file_name
|
|
|
|
)
|
|
|
|
|
|
|
|
image_data = emoji_file.read()
|
|
|
|
resized_image_data = resize_emoji(image_data)
|
|
|
|
write_local_file(
|
|
|
|
'avatars',
|
|
|
|
".".join((emoji_path, "original")),
|
|
|
|
image_data)
|
|
|
|
write_local_file(
|
|
|
|
'avatars',
|
|
|
|
emoji_path,
|
|
|
|
resized_image_data)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int) -> str:
|
2017-03-13 05:45:50 +01:00
|
|
|
return os.path.join(
|
2017-11-04 05:34:38 +01:00
|
|
|
"/user_avatars",
|
2017-03-13 05:45:50 +01:00
|
|
|
RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id, emoji_file_name=emoji_file_name))
|
|
|
|
|
2019-06-21 22:46:04 +02:00
|
|
|
def upload_export_tarball(self, realm: Realm, tarball_path: str) -> str:
|
|
|
|
path = os.path.join(
|
|
|
|
'exports',
|
|
|
|
str(realm.id),
|
|
|
|
random_name(18),
|
|
|
|
os.path.basename(tarball_path),
|
|
|
|
)
|
|
|
|
abs_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'avatars', path)
|
|
|
|
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
|
|
|
shutil.copy(tarball_path, abs_path)
|
|
|
|
public_url = realm.uri + '/user_avatars/' + path
|
|
|
|
return public_url
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
def delete_export_tarball(self, path_id: str) -> Optional[str]:
|
|
|
|
# Get the last element of a list in the form ['user_avatars', '<file_path>']
|
|
|
|
file_path = path_id.strip('/').split('/', 1)[-1]
|
|
|
|
if delete_local_file('avatars', file_path):
|
|
|
|
return path_id
|
|
|
|
return None
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
# Common and wrappers
|
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
2017-05-17 21:19:04 +02:00
|
|
|
upload_backend = LocalUploadBackend() # type: ZulipUploadBackend
|
2016-06-09 07:53:35 +02:00
|
|
|
else:
|
2018-05-15 00:28:17 +02:00
|
|
|
upload_backend = S3UploadBackend() # nocoverage
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(path_id: str) -> bool:
|
2016-06-09 07:53:35 +02:00
|
|
|
return upload_backend.delete_message_image(path_id)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_avatar_image(user_file: File, acting_user_profile: UserProfile,
|
2019-06-07 23:36:19 +02:00
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str]=None) -> None:
|
|
|
|
upload_backend.upload_avatar_image(user_file, acting_user_profile,
|
|
|
|
target_user_profile, content_type=content_type)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2018-10-12 01:11:20 +02:00
|
|
|
def delete_avatar_image(user_profile: UserProfile) -> None:
|
2018-09-07 17:44:40 +02:00
|
|
|
upload_backend.delete_avatar_image(user_profile)
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
upload_backend.copy_avatar(source_profile, target_profile)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_icon_image(user_file: File, user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
upload_backend.upload_realm_icon_image(user_file, user_profile)
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def upload_logo_image(user_file: File, user_profile: UserProfile, night: bool) -> None:
|
|
|
|
upload_backend.upload_realm_logo_image(user_file, user_profile, night)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_emoji_image(emoji_file: File, emoji_file_name: str, user_profile: UserProfile) -> None:
|
2017-03-13 05:45:50 +01:00
|
|
|
upload_backend.upload_emoji_image(emoji_file, emoji_file_name, user_profile)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def upload_message_file(uploaded_file_name: str, uploaded_file_size: int,
|
|
|
|
content_type: Optional[str], file_data: bytes,
|
|
|
|
user_profile: UserProfile, target_realm: Optional[Realm]=None) -> str:
|
2018-03-28 18:14:17 +02:00
|
|
|
return upload_backend.upload_message_file(uploaded_file_name, uploaded_file_size,
|
|
|
|
content_type, file_data, user_profile,
|
|
|
|
target_realm=target_realm)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def claim_attachment(user_profile: UserProfile,
|
2018-05-11 01:40:23 +02:00
|
|
|
path_id: str,
|
2017-11-05 11:15:10 +01:00
|
|
|
message: Message,
|
2018-05-04 22:57:36 +02:00
|
|
|
is_message_realm_public: bool) -> Attachment:
|
2017-04-14 01:03:49 +02:00
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
attachment.messages.add(message)
|
|
|
|
attachment.is_realm_public = attachment.is_realm_public or is_message_realm_public
|
|
|
|
attachment.save()
|
2018-05-04 22:57:36 +02:00
|
|
|
return attachment
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def create_attachment(file_name: str, path_id: str, user_profile: UserProfile,
|
2017-11-05 11:15:10 +01:00
|
|
|
file_size: int) -> bool:
|
2018-05-04 22:57:36 +02:00
|
|
|
attachment = Attachment.objects.create(file_name=file_name, path_id=path_id, owner=user_profile,
|
|
|
|
realm=user_profile.realm, size=file_size)
|
|
|
|
from zerver.lib.actions import notify_attachment_update
|
|
|
|
notify_attachment_update(user_profile, 'add', attachment.to_dict())
|
2016-03-24 20:24:01 +01:00
|
|
|
return True
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def upload_message_image_from_request(request: HttpRequest, user_file: File,
|
2018-05-11 01:40:23 +02:00
|
|
|
user_profile: UserProfile) -> str:
|
2017-02-26 11:03:45 +01:00
|
|
|
uploaded_file_name, uploaded_file_size, content_type = get_file_info(request, user_file)
|
2018-03-28 18:14:17 +02:00
|
|
|
return upload_message_file(uploaded_file_name, uploaded_file_size,
|
|
|
|
content_type, user_file.read(), user_profile)
|
2019-06-21 22:46:04 +02:00
|
|
|
|
|
|
|
def upload_export_tarball(realm: Realm, tarball_path: str) -> str:
|
|
|
|
return upload_backend.upload_export_tarball(realm, tarball_path)
|
2019-06-27 20:41:47 +02:00
|
|
|
|
|
|
|
def delete_export_tarball(path_id: str) -> Optional[str]:
|
|
|
|
return upload_backend.delete_export_tarball(path_id)
|