2020-06-11 00:54:34 +02:00
|
|
|
import base64
|
|
|
|
import binascii
|
|
|
|
import io
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import random
|
|
|
|
import re
|
2020-09-05 04:02:13 +02:00
|
|
|
import secrets
|
2020-06-11 00:54:34 +02:00
|
|
|
import shutil
|
|
|
|
import unicodedata
|
|
|
|
import urllib
|
2020-04-08 00:27:24 +02:00
|
|
|
from datetime import timedelta
|
2020-06-11 00:54:34 +02:00
|
|
|
from mimetypes import guess_extension, guess_type
|
2021-08-03 17:54:31 +02:00
|
|
|
from typing import IO, Any, Callable, Optional, Tuple
|
2022-02-10 22:06:11 +01:00
|
|
|
from urllib.parse import urljoin
|
2020-04-08 00:27:24 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import boto3
|
|
|
|
import botocore
|
|
|
|
from boto3.session import Session
|
|
|
|
from botocore.client import Config
|
2013-06-18 20:47:37 +02:00
|
|
|
from django.conf import settings
|
2016-06-05 03:54:32 +02:00
|
|
|
from django.core.files import File
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.signing import BadSignature, TimestampSigner
|
2016-06-05 03:54:32 +02:00
|
|
|
from django.http import HttpRequest
|
2020-04-08 00:27:24 +02:00
|
|
|
from django.urls import reverse
|
2021-04-16 00:57:30 +02:00
|
|
|
from django.utils.translation import gettext as _
|
2022-01-13 22:54:47 +01:00
|
|
|
from markupsafe import Markup as mark_safe
|
2021-08-10 02:11:16 +02:00
|
|
|
from mypy_boto3_s3.client import S3Client
|
|
|
|
from mypy_boto3_s3.service_resource import Bucket, Object
|
2022-02-17 01:07:58 +01:00
|
|
|
from PIL import GifImagePlugin, Image, ImageOps, PngImagePlugin
|
2020-06-11 00:54:34 +02:00
|
|
|
from PIL.Image import DecompressionBombError
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2017-03-02 23:45:57 +01:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.exceptions import ErrorCode, JsonableError
|
2022-02-10 22:06:11 +01:00
|
|
|
from zerver.lib.outgoing_http import OutgoingSession
|
2021-07-25 16:31:12 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2022-01-26 20:17:12 +01:00
|
|
|
from zerver.models import (
|
|
|
|
Attachment,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmEmoji,
|
|
|
|
UserProfile,
|
|
|
|
is_cross_realm_bot_email,
|
|
|
|
)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2016-10-03 06:28:31 +02:00
|
|
|
DEFAULT_AVATAR_SIZE = 100
|
2016-09-20 21:48:48 +02:00
|
|
|
MEDIUM_AVATAR_SIZE = 500
|
2017-03-13 05:45:50 +01:00
|
|
|
DEFAULT_EMOJI_SIZE = 64
|
2016-10-03 06:28:31 +02:00
|
|
|
|
2019-01-28 21:02:48 +01:00
|
|
|
# These sizes were selected based on looking at the maximum common
|
|
|
|
# sizes in a library of animated custom emoji, balanced against the
|
|
|
|
# network cost of very large emoji images.
|
|
|
|
MAX_EMOJI_GIF_SIZE = 128
|
|
|
|
MAX_EMOJI_GIF_FILE_SIZE_BYTES = 128 * 1024 * 1024 # 128 kb
|
|
|
|
|
2020-04-08 00:32:13 +02:00
|
|
|
# Duration that the signed upload URLs that we redirect to when
|
|
|
|
# accessing uploaded files are available for clients to fetch before
|
|
|
|
# they expire.
|
|
|
|
SIGNED_UPLOAD_URL_DURATION = 60
|
|
|
|
|
2019-09-10 00:21:31 +02:00
|
|
|
INLINE_MIME_TYPES = [
|
|
|
|
"application/pdf",
|
|
|
|
"image/gif",
|
|
|
|
"image/jpeg",
|
|
|
|
"image/png",
|
|
|
|
"image/webp",
|
|
|
|
# To avoid cross-site scripting attacks, DO NOT add types such
|
|
|
|
# as application/xhtml+xml, application/x-shockwave-flash,
|
|
|
|
# image/svg+xml, text/html, or text/xml.
|
|
|
|
]
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Performance note:
|
2013-06-18 20:47:37 +02:00
|
|
|
#
|
|
|
|
# For writing files to S3, the file could either be stored in RAM
|
|
|
|
# (if it is less than 2.5MiB or so) or an actual temporary file on disk.
|
|
|
|
#
|
|
|
|
# Because we set FILE_UPLOAD_MAX_MEMORY_SIZE to 0, only the latter case
|
|
|
|
# should occur in practice.
|
|
|
|
#
|
|
|
|
# This is great, because passing the pseudofile object that Django gives
|
|
|
|
# you to boto would be a pain.
|
|
|
|
|
2016-03-13 10:29:33 +01:00
|
|
|
# To come up with a s3 key we randomly generate a "directory". The
|
|
|
|
# "file name" is the original filename provided by the user run
|
|
|
|
# through a sanitization function.
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
class RealmUploadQuotaError(JsonableError):
|
|
|
|
code = ErrorCode.REALM_UPLOAD_QUOTA
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-11-27 20:21:55 +01:00
|
|
|
def sanitize_name(value: str) -> str:
|
2016-03-13 10:29:33 +01:00
|
|
|
"""
|
|
|
|
Sanitizes a value to be safe to store in a Linux filesystem, in
|
2020-10-23 02:43:28 +02:00
|
|
|
S3, and in a URL. So Unicode is allowed, but not special
|
2016-03-13 10:29:33 +01:00
|
|
|
characters other than ".", "-", and "_".
|
|
|
|
|
|
|
|
This implementation is based on django.utils.text.slugify; it is
|
|
|
|
modified by:
|
2021-05-02 14:42:23 +02:00
|
|
|
* adding '.' to the list of allowed characters.
|
2016-03-13 10:29:33 +01:00
|
|
|
* preserving the case of the value.
|
2021-05-02 14:46:03 +02:00
|
|
|
* not stripping trailing dashes and underscores.
|
2016-03-13 10:29:33 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
value = unicodedata.normalize("NFKC", value)
|
2021-10-21 06:00:38 +02:00
|
|
|
value = re.sub(r"[^\w\s.-]", "", value).strip()
|
|
|
|
value = re.sub(r"[-\s]+", "-", value)
|
2021-02-12 08:20:45 +01:00
|
|
|
assert value not in {"", ".", ".."}
|
2019-12-20 00:00:45 +01:00
|
|
|
return mark_safe(value)
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-04-03 08:13:36 +02:00
|
|
|
class BadImageError(JsonableError):
|
2017-07-21 02:18:33 +02:00
|
|
|
code = ErrorCode.BAD_IMAGE
|
2016-04-03 08:13:36 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def resize_avatar(image_data: bytes, size: int = DEFAULT_AVATAR_SIZE) -> bytes:
|
2016-04-03 08:13:36 +02:00
|
|
|
try:
|
2016-06-29 17:13:28 +02:00
|
|
|
im = Image.open(io.BytesIO(image_data))
|
2021-08-09 23:42:01 +02:00
|
|
|
im = ImageOps.exif_transpose(im)
|
2016-10-03 06:28:31 +02:00
|
|
|
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
|
2020-04-09 21:51:58 +02:00
|
|
|
except OSError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2016-06-29 17:13:28 +02:00
|
|
|
out = io.BytesIO()
|
2021-02-12 08:20:45 +01:00
|
|
|
if im.mode == "CMYK":
|
|
|
|
im = im.convert("RGB")
|
|
|
|
im.save(out, format="png")
|
2013-10-28 16:13:53 +01:00
|
|
|
return out.getvalue()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
def resize_logo(image_data: bytes) -> bytes:
|
|
|
|
try:
|
|
|
|
im = Image.open(io.BytesIO(image_data))
|
2021-08-09 23:42:01 +02:00
|
|
|
im = ImageOps.exif_transpose(im)
|
2021-02-12 08:19:30 +01:00
|
|
|
im.thumbnail((8 * DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE), Image.ANTIALIAS)
|
2020-04-09 21:51:58 +02:00
|
|
|
except OSError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2018-08-16 01:26:55 +02:00
|
|
|
out = io.BytesIO()
|
2021-02-12 08:20:45 +01:00
|
|
|
if im.mode == "CMYK":
|
|
|
|
im = im.convert("RGB")
|
|
|
|
im.save(out, format="png")
|
2018-08-16 01:26:55 +02:00
|
|
|
return out.getvalue()
|
|
|
|
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2022-02-17 01:07:58 +01:00
|
|
|
def resize_animated(im: Image.Image, size: int = DEFAULT_EMOJI_SIZE) -> bytes:
|
|
|
|
assert im.n_frames > 1
|
2018-07-17 20:27:09 +02:00
|
|
|
frames = []
|
|
|
|
duration_info = []
|
2020-10-02 04:11:07 +02:00
|
|
|
disposals = []
|
2018-07-17 20:27:09 +02:00
|
|
|
# If 'loop' info is not set then loop for infinite number of times.
|
|
|
|
loop = im.info.get("loop", 0)
|
|
|
|
for frame_num in range(0, im.n_frames):
|
|
|
|
im.seek(frame_num)
|
2020-10-02 04:11:07 +02:00
|
|
|
new_frame = im.copy()
|
2018-07-17 20:27:09 +02:00
|
|
|
new_frame.paste(im, (0, 0), im.convert("RGBA"))
|
2020-10-03 02:10:17 +02:00
|
|
|
new_frame = ImageOps.pad(new_frame, (size, size), Image.ANTIALIAS)
|
2018-07-17 20:27:09 +02:00
|
|
|
frames.append(new_frame)
|
2022-02-17 01:07:58 +01:00
|
|
|
if im.info.get("duration") is None: # nocoverage
|
|
|
|
raise BadImageError(_("Corrupt animated image."))
|
2021-02-12 08:20:45 +01:00
|
|
|
duration_info.append(im.info["duration"])
|
2022-02-17 01:07:58 +01:00
|
|
|
if isinstance(im, GifImagePlugin.GifImageFile):
|
|
|
|
disposals.append(
|
|
|
|
im.disposal_method # type: ignore[attr-defined] # private member missing from stubs
|
|
|
|
)
|
|
|
|
elif isinstance(im, PngImagePlugin.PngImageFile):
|
|
|
|
disposals.append(im.info.get("disposal", PngImagePlugin.APNG_DISPOSE_OP_NONE))
|
|
|
|
else: # nocoverage
|
|
|
|
raise BadImageError(_("Unknown animated image format."))
|
2018-07-17 20:27:09 +02:00
|
|
|
out = io.BytesIO()
|
2021-02-12 08:19:30 +01:00
|
|
|
frames[0].save(
|
|
|
|
out,
|
|
|
|
save_all=True,
|
|
|
|
optimize=False,
|
2022-02-17 01:07:58 +01:00
|
|
|
format=im.format,
|
2021-02-12 08:19:30 +01:00
|
|
|
append_images=frames[1:],
|
|
|
|
duration=duration_info,
|
2022-02-17 01:07:58 +01:00
|
|
|
disposal=disposals,
|
2021-02-12 08:19:30 +01:00
|
|
|
loop=loop,
|
|
|
|
)
|
2022-02-17 01:07:58 +01:00
|
|
|
|
2018-07-17 20:27:09 +02:00
|
|
|
return out.getvalue()
|
|
|
|
|
2019-01-28 21:02:48 +01:00
|
|
|
|
2021-08-12 10:19:53 +02:00
|
|
|
def resize_emoji(
|
|
|
|
image_data: bytes, size: int = DEFAULT_EMOJI_SIZE
|
|
|
|
) -> Tuple[bytes, bool, Optional[bytes]]:
|
|
|
|
# This function returns three values:
|
|
|
|
# 1) Emoji image data.
|
|
|
|
# 2) If emoji is gif i.e animated.
|
|
|
|
# 3) If is animated then return still image data i.e first frame of gif.
|
|
|
|
|
2017-03-13 05:45:50 +01:00
|
|
|
try:
|
|
|
|
im = Image.open(io.BytesIO(image_data))
|
|
|
|
image_format = im.format
|
2022-02-17 01:07:58 +01:00
|
|
|
if getattr(im, "n_frames", 1) > 1:
|
|
|
|
# There are a number of bugs in Pillow which cause results
|
|
|
|
# in resized images being broken. To work around this we
|
|
|
|
# only resize under certain conditions to minimize the
|
|
|
|
# chance of creating ugly images.
|
2020-09-02 02:50:08 +02:00
|
|
|
should_resize = (
|
2021-02-12 08:19:30 +01:00
|
|
|
im.size[0] != im.size[1] # not square
|
|
|
|
or im.size[0] > MAX_EMOJI_GIF_SIZE # dimensions too large
|
|
|
|
or len(image_data) > MAX_EMOJI_GIF_FILE_SIZE_BYTES # filesize too large
|
2020-09-02 02:50:08 +02:00
|
|
|
)
|
2021-08-12 10:19:53 +02:00
|
|
|
|
|
|
|
# Generate a still image from the first frame. Since
|
|
|
|
# we're converting the format to PNG anyway, we resize unconditionally.
|
|
|
|
still_image = im.copy()
|
|
|
|
still_image.seek(0)
|
|
|
|
still_image = ImageOps.exif_transpose(still_image)
|
|
|
|
still_image = ImageOps.fit(still_image, (size, size), Image.ANTIALIAS)
|
|
|
|
out = io.BytesIO()
|
|
|
|
still_image.save(out, format="PNG")
|
|
|
|
still_image_data = out.getvalue()
|
|
|
|
|
|
|
|
if should_resize:
|
2022-02-17 01:07:58 +01:00
|
|
|
image_data = resize_animated(im, size)
|
2021-08-12 10:19:53 +02:00
|
|
|
|
2022-02-17 01:07:58 +01:00
|
|
|
return image_data, True, still_image_data
|
2018-07-17 20:27:09 +02:00
|
|
|
else:
|
2021-08-12 10:19:53 +02:00
|
|
|
# Note that this is essentially duplicated in the
|
|
|
|
# still_image code path, above.
|
2021-08-09 23:42:01 +02:00
|
|
|
im = ImageOps.exif_transpose(im)
|
2018-07-17 20:27:09 +02:00
|
|
|
im = ImageOps.fit(im, (size, size), Image.ANTIALIAS)
|
|
|
|
out = io.BytesIO()
|
|
|
|
im.save(out, format=image_format)
|
2021-08-12 10:19:53 +02:00
|
|
|
return out.getvalue(), False, None
|
2020-04-09 21:51:58 +02:00
|
|
|
except OSError:
|
2019-01-13 07:24:09 +01:00
|
|
|
raise BadImageError(_("Could not decode image; did you upload an image file?"))
|
2019-01-13 07:27:30 +01:00
|
|
|
except DecompressionBombError:
|
|
|
|
raise BadImageError(_("Image size exceeds limit."))
|
2017-03-13 05:45:50 +01:00
|
|
|
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
### Common
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class ZulipUploadBackend:
|
2021-06-05 02:38:54 +02:00
|
|
|
def get_public_upload_root_url(self) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-09-02 14:19:44 +02:00
|
|
|
def generate_message_upload_path(self, realm_id: str, uploaded_file_name: str) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_message_file(
|
|
|
|
self,
|
|
|
|
uploaded_file_name: str,
|
|
|
|
uploaded_file_size: int,
|
|
|
|
content_type: Optional[str],
|
|
|
|
file_data: bytes,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_realm: Optional[Realm] = None,
|
|
|
|
) -> str:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_avatar_image(
|
|
|
|
self,
|
2021-08-03 17:54:31 +02:00
|
|
|
user_file: IO[bytes],
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user_profile: UserProfile,
|
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None,
|
|
|
|
) -> None:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2016-06-09 07:53:35 +02:00
|
|
|
raise NotImplementedError()
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool = False) -> str:
|
2016-09-20 21:48:48 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
def ensure_avatar_image(self, user_profile: UserProfile, is_medium: bool = False) -> None:
|
2018-11-27 02:28:34 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_realm_icon_image(self, icon_file: IO[bytes], user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2017-02-21 03:41:20 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_realm_logo_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, logo_file: IO[bytes], user_profile: UserProfile, night: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
2018-08-16 01:26:55 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_emoji_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, emoji_file: IO[bytes], emoji_file_name: str, user_profile: UserProfile
|
2021-08-12 10:19:53 +02:00
|
|
|
) -> bool:
|
2017-03-13 05:45:50 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-08-12 10:19:53 +02:00
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int, still: bool = False) -> str:
|
2017-03-13 05:45:50 +01:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_export_tarball(
|
|
|
|
self,
|
|
|
|
realm: Realm,
|
|
|
|
tarball_path: str,
|
|
|
|
percent_callback: Optional[Callable[[Any], None]] = None,
|
|
|
|
) -> str:
|
2019-06-21 22:46:04 +02:00
|
|
|
raise NotImplementedError()
|
|
|
|
|
2020-09-13 05:41:39 +02:00
|
|
|
def delete_export_tarball(self, export_path: str) -> Optional[str]:
|
2019-06-27 20:41:47 +02:00
|
|
|
raise NotImplementedError()
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2019-09-24 22:46:53 +02:00
|
|
|
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
|
|
|
|
raise NotImplementedError()
|
|
|
|
|
2019-07-19 14:11:54 +02:00
|
|
|
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
|
|
|
|
raise NotImplementedError()
|
2019-09-24 22:46:53 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
### S3
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def get_bucket(bucket_name: str, session: Optional[Session] = None) -> Bucket:
|
2020-10-26 22:10:53 +01:00
|
|
|
if session is None:
|
|
|
|
session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2021-02-12 08:19:30 +01:00
|
|
|
bucket = session.resource(
|
2021-02-12 08:20:45 +01:00
|
|
|
"s3", region_name=settings.S3_REGION, endpoint_url=settings.S3_ENDPOINT_URL
|
2021-02-12 08:19:30 +01:00
|
|
|
).Bucket(bucket_name)
|
2014-02-06 18:03:40 +01:00
|
|
|
return bucket
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
def upload_image_to_s3(
|
2021-08-10 02:11:16 +02:00
|
|
|
bucket: Bucket,
|
2021-02-12 08:19:30 +01:00
|
|
|
file_name: str,
|
|
|
|
content_type: Optional[str],
|
|
|
|
user_profile: UserProfile,
|
|
|
|
contents: bytes,
|
|
|
|
) -> None:
|
2018-12-07 17:52:01 +01:00
|
|
|
key = bucket.Object(file_name)
|
|
|
|
metadata = {
|
|
|
|
"user_profile_id": str(user_profile.id),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"realm_id": str(user_profile.realm_id),
|
2018-12-07 17:52:01 +01:00
|
|
|
}
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
content_disposition = ""
|
2018-12-07 17:52:01 +01:00
|
|
|
if content_type is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
content_type = ""
|
2019-09-10 00:21:31 +02:00
|
|
|
if content_type not in INLINE_MIME_TYPES:
|
2018-12-07 17:52:01 +01:00
|
|
|
content_disposition = "attachment"
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
key.put(
|
|
|
|
Body=contents,
|
|
|
|
Metadata=metadata,
|
|
|
|
ContentType=content_type,
|
|
|
|
ContentDisposition=content_disposition,
|
|
|
|
)
|
|
|
|
|
2013-06-18 20:47:37 +02:00
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
def check_upload_within_quota(realm: Realm, uploaded_file_size: int) -> None:
|
2018-02-19 02:05:49 +01:00
|
|
|
upload_quota = realm.upload_quota_bytes()
|
|
|
|
if upload_quota is None:
|
2018-01-26 16:13:33 +01:00
|
|
|
return
|
2019-01-11 13:41:52 +01:00
|
|
|
used_space = realm.currently_used_upload_space_bytes()
|
2018-02-19 02:05:49 +01:00
|
|
|
if (used_space + uploaded_file_size) > upload_quota:
|
2018-01-26 16:13:33 +01:00
|
|
|
raise RealmUploadQuotaError(_("Upload would exceed your organization's upload quota."))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_file_info(request: HttpRequest, user_file: File) -> Tuple[str, int, Optional[str]]:
|
2016-06-13 13:24:19 +02:00
|
|
|
|
2016-06-29 17:13:28 +02:00
|
|
|
uploaded_file_name = user_file.name
|
2021-02-12 08:20:45 +01:00
|
|
|
content_type = request.GET.get("mimetype")
|
2013-06-18 20:47:37 +02:00
|
|
|
if content_type is None:
|
2016-07-13 18:25:40 +02:00
|
|
|
guessed_type = guess_type(uploaded_file_name)[0]
|
|
|
|
if guessed_type is not None:
|
2017-11-04 19:08:30 +01:00
|
|
|
content_type = guessed_type
|
2013-06-18 20:47:37 +02:00
|
|
|
else:
|
2017-05-26 02:08:16 +02:00
|
|
|
extension = guess_extension(content_type)
|
|
|
|
if extension is not None:
|
|
|
|
uploaded_file_name = uploaded_file_name + extension
|
2016-06-13 13:24:19 +02:00
|
|
|
|
2016-06-29 17:13:28 +02:00
|
|
|
uploaded_file_name = urllib.parse.unquote(uploaded_file_name)
|
2017-02-26 11:03:45 +01:00
|
|
|
uploaded_file_size = user_file.size
|
|
|
|
|
|
|
|
return uploaded_file_name, uploaded_file_size, content_type
|
2013-06-18 20:47:37 +02:00
|
|
|
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_signed_upload_url(path: str) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
client = boto3.client(
|
2021-02-12 08:20:45 +01:00
|
|
|
"s3",
|
2021-02-12 08:19:30 +01:00
|
|
|
aws_access_key_id=settings.S3_KEY,
|
|
|
|
aws_secret_access_key=settings.S3_SECRET_KEY,
|
|
|
|
region_name=settings.S3_REGION,
|
|
|
|
endpoint_url=settings.S3_ENDPOINT_URL,
|
|
|
|
)
|
|
|
|
return client.generate_presigned_url(
|
2021-02-12 08:20:45 +01:00
|
|
|
ClientMethod="get_object",
|
|
|
|
Params={"Bucket": settings.S3_AUTH_UPLOADS_BUCKET, "Key": path},
|
2021-02-12 08:19:30 +01:00
|
|
|
ExpiresIn=SIGNED_UPLOAD_URL_DURATION,
|
2021-02-12 08:20:45 +01:00
|
|
|
HttpMethod="GET",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2013-09-16 20:59:54 +02:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class S3UploadBackend(ZulipUploadBackend):
|
2019-06-28 19:48:07 +02:00
|
|
|
def __init__(self) -> None:
|
2018-12-07 17:52:01 +01:00
|
|
|
self.session = boto3.Session(settings.S3_KEY, settings.S3_SECRET_KEY)
|
2020-10-26 22:10:53 +01:00
|
|
|
self.avatar_bucket = get_bucket(settings.S3_AVATAR_BUCKET, self.session)
|
|
|
|
self.uploads_bucket = get_bucket(settings.S3_AUTH_UPLOADS_BUCKET, self.session)
|
2020-06-19 01:09:20 +02:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
self._boto_client: Optional[S3Client] = None
|
2021-06-30 15:12:08 +02:00
|
|
|
self.public_upload_url_base = self.construct_public_upload_url_base()
|
2021-06-19 11:48:22 +02:00
|
|
|
|
2021-06-30 15:12:08 +02:00
|
|
|
def construct_public_upload_url_base(self) -> str:
|
2021-06-22 17:03:35 +02:00
|
|
|
# Return the pattern for public URL for a key in the S3 Avatar bucket.
|
2020-10-27 16:45:23 +01:00
|
|
|
# For Amazon S3 itself, this will return the following:
|
|
|
|
# f"https://{self.avatar_bucket.name}.{network_location}/{key}"
|
|
|
|
#
|
|
|
|
# However, we need this function to properly handle S3 style
|
|
|
|
# file upload backends that Zulip supports, which can have a
|
|
|
|
# different URL format. Configuring no signature and providing
|
|
|
|
# no access key makes `generate_presigned_url` just return the
|
|
|
|
# normal public URL for a key.
|
2021-06-22 17:03:35 +02:00
|
|
|
#
|
|
|
|
# It unfortunately takes 2ms per query to call
|
|
|
|
# generate_presigned_url, even with our cached boto
|
|
|
|
# client. Since we need to potentially compute hundreds of
|
|
|
|
# avatar URLs in single `GET /messages` request, we instead
|
|
|
|
# back-compute the URL pattern here.
|
|
|
|
|
|
|
|
DUMMY_KEY = "dummy_key_ignored"
|
|
|
|
foo_url = self.get_boto_client().generate_presigned_url(
|
2020-10-27 16:45:23 +01:00
|
|
|
ClientMethod="get_object",
|
|
|
|
Params={
|
|
|
|
"Bucket": self.avatar_bucket.name,
|
2021-06-22 17:03:35 +02:00
|
|
|
"Key": DUMMY_KEY,
|
2020-10-27 16:45:23 +01:00
|
|
|
},
|
|
|
|
ExpiresIn=0,
|
|
|
|
)
|
2021-06-30 15:12:08 +02:00
|
|
|
split_url = urllib.parse.urlsplit(foo_url)
|
|
|
|
assert split_url.path.endswith(f"/{DUMMY_KEY}")
|
2021-06-22 17:03:35 +02:00
|
|
|
|
2021-06-30 15:12:08 +02:00
|
|
|
return urllib.parse.urlunsplit(
|
|
|
|
(split_url.scheme, split_url.netloc, split_url.path[: -len(DUMMY_KEY)], "", "")
|
2021-06-22 17:03:35 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def get_public_upload_url(
|
|
|
|
self,
|
|
|
|
key: str,
|
|
|
|
) -> str:
|
2021-06-30 15:12:08 +02:00
|
|
|
assert not key.startswith("/")
|
|
|
|
return urllib.parse.urljoin(self.public_upload_url_base, key)
|
2020-10-27 16:45:23 +01:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def get_boto_client(self) -> S3Client:
|
2021-06-19 11:48:22 +02:00
|
|
|
"""
|
|
|
|
Creating the client takes a long time so we need to cache it.
|
|
|
|
"""
|
|
|
|
if self._boto_client is None:
|
|
|
|
config = Config(signature_version=botocore.UNSIGNED)
|
|
|
|
self._boto_client = self.session.client(
|
|
|
|
"s3",
|
|
|
|
region_name=settings.S3_REGION,
|
|
|
|
endpoint_url=settings.S3_ENDPOINT_URL,
|
|
|
|
config=config,
|
|
|
|
)
|
|
|
|
return self._boto_client
|
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def delete_file_from_s3(self, path_id: str, bucket: Bucket) -> bool:
|
2018-12-07 17:52:01 +01:00
|
|
|
key = bucket.Object(path_id)
|
|
|
|
|
|
|
|
try:
|
|
|
|
key.load()
|
|
|
|
except botocore.exceptions.ClientError:
|
|
|
|
file_name = path_id.split("/")[-1]
|
2021-02-12 08:19:30 +01:00
|
|
|
logging.warning(
|
|
|
|
"%s does not exist. Its entry in the database will be removed.", file_name
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
return False
|
|
|
|
key.delete()
|
|
|
|
return True
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-06-05 02:38:54 +02:00
|
|
|
def get_public_upload_root_url(self) -> str:
|
2021-06-30 15:12:08 +02:00
|
|
|
return self.public_upload_url_base
|
2021-06-05 02:38:54 +02:00
|
|
|
|
2021-09-02 14:19:44 +02:00
|
|
|
def generate_message_upload_path(self, realm_id: str, uploaded_file_name: str) -> str:
|
|
|
|
return "/".join(
|
|
|
|
[
|
|
|
|
realm_id,
|
|
|
|
secrets.token_urlsafe(18),
|
|
|
|
sanitize_name(uploaded_file_name),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_message_file(
|
|
|
|
self,
|
|
|
|
uploaded_file_name: str,
|
|
|
|
uploaded_file_size: int,
|
|
|
|
content_type: Optional[str],
|
|
|
|
file_data: bytes,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_realm: Optional[Realm] = None,
|
|
|
|
) -> str:
|
2017-02-11 04:47:04 +01:00
|
|
|
if target_realm is None:
|
|
|
|
target_realm = user_profile.realm
|
2021-09-02 14:19:44 +02:00
|
|
|
s3_file_name = self.generate_message_upload_path(str(target_realm.id), uploaded_file_name)
|
2020-06-10 06:41:04 +02:00
|
|
|
url = f"/user_uploads/{s3_file_name}"
|
2016-06-09 07:53:35 +02:00
|
|
|
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.uploads_bucket,
|
2017-01-24 07:06:13 +01:00
|
|
|
s3_file_name,
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
file_data,
|
2016-06-09 07:53:35 +02:00
|
|
|
)
|
|
|
|
|
2022-01-26 20:17:12 +01:00
|
|
|
create_attachment(
|
|
|
|
uploaded_file_name, s3_file_name, user_profile, target_realm, uploaded_file_size
|
|
|
|
)
|
2016-06-09 07:53:35 +02:00
|
|
|
return url
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2020-06-19 01:09:20 +02:00
|
|
|
return self.delete_file_from_s3(path_id, self.uploads_bucket)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def write_avatar_images(
|
|
|
|
self,
|
|
|
|
s3_file_name: str,
|
|
|
|
target_user_profile: UserProfile,
|
|
|
|
image_data: bytes,
|
|
|
|
content_type: Optional[str],
|
|
|
|
) -> None:
|
2016-06-09 07:53:35 +02:00
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2016-06-09 07:53:35 +02:00
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
2016-06-09 07:53:35 +02:00
|
|
|
image_data,
|
|
|
|
)
|
2013-10-28 17:44:09 +01:00
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
# custom 500px wide version
|
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2016-09-20 21:48:48 +02:00
|
|
|
s3_file_name + "-medium.png",
|
|
|
|
"image/png",
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
resized_medium,
|
2016-09-20 21:48:48 +02:00
|
|
|
)
|
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2016-06-09 07:53:35 +02:00
|
|
|
s3_file_name,
|
2021-02-12 08:20:45 +01:00
|
|
|
"image/png",
|
2017-03-02 16:21:46 +01:00
|
|
|
target_user_profile,
|
2016-06-09 07:53:35 +02:00
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_avatar_image(
|
|
|
|
self,
|
2021-08-03 17:54:31 +02:00
|
|
|
user_file: IO[bytes],
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user_profile: UserProfile,
|
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None,
|
|
|
|
) -> None:
|
2019-06-07 23:36:19 +02:00
|
|
|
if content_type is None:
|
|
|
|
content_type = guess_type(user_file.name)[0]
|
2018-05-30 18:29:41 +02:00
|
|
|
s3_file_name = user_avatar_path(target_user_profile)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.write_avatar_images(s3_file_name, target_user_profile, image_data, content_type)
|
2018-05-30 18:29:41 +02:00
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
path_id = user_avatar_path(user)
|
|
|
|
|
2020-06-19 01:09:20 +02:00
|
|
|
self.delete_file_from_s3(path_id + ".original", self.avatar_bucket)
|
|
|
|
self.delete_file_from_s3(path_id + "-medium.png", self.avatar_bucket)
|
|
|
|
self.delete_file_from_s3(path_id, self.avatar_bucket)
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
def get_avatar_key(self, file_name: str) -> Object:
|
2020-06-19 01:09:20 +02:00
|
|
|
key = self.avatar_bucket.Object(file_name)
|
2018-06-06 14:30:26 +02:00
|
|
|
return key
|
|
|
|
|
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
s3_source_file_name = user_avatar_path(source_profile)
|
|
|
|
s3_target_file_name = user_avatar_path(target_profile)
|
|
|
|
|
|
|
|
key = self.get_avatar_key(s3_source_file_name + ".original")
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = key.get()["Body"].read()
|
2018-06-06 14:30:26 +02:00
|
|
|
content_type = key.content_type
|
|
|
|
|
2019-08-02 22:36:01 +02:00
|
|
|
self.write_avatar_images(s3_target_file_name, target_profile, image_data, content_type)
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool = False) -> str:
|
2017-03-21 23:53:54 +01:00
|
|
|
medium_suffix = "-medium.png" if medium else ""
|
2021-10-14 00:50:26 +02:00
|
|
|
return self.get_public_upload_url(f"{hash_key}{medium_suffix}")
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2019-09-24 22:46:53 +02:00
|
|
|
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
|
|
|
|
# export_path has a leading /
|
2020-10-27 16:45:23 +01:00
|
|
|
return self.get_public_upload_url(export_path[1:])
|
2019-09-24 22:46:53 +02:00
|
|
|
|
2019-07-19 14:11:54 +02:00
|
|
|
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return os.path.join(str(realm.id), "realm")
|
2019-07-19 14:11:54 +02:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_realm_icon_image(self, icon_file: IO[bytes], user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
content_type = guess_type(icon_file.name)[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), "icon")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
image_data = icon_file.read()
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2017-02-21 03:41:20 +01:00
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2017-04-13 04:48:24 +02:00
|
|
|
s3_file_name + ".png",
|
2021-02-12 08:20:45 +01:00
|
|
|
"image/png",
|
2017-02-21 03:41:20 +01:00
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2020-10-27 16:45:23 +01:00
|
|
|
public_url = self.get_public_upload_url(f"{realm_id}/realm/icon.png")
|
|
|
|
return public_url + f"?version={version}"
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_realm_logo_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, logo_file: IO[bytes], user_profile: UserProfile, night: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
content_type = guess_type(logo_file.name)[0]
|
2019-01-27 08:25:10 +01:00
|
|
|
if night:
|
2021-02-12 08:20:45 +01:00
|
|
|
basename = "night_logo"
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
basename = "logo"
|
2019-07-19 14:11:54 +02:00
|
|
|
s3_file_name = os.path.join(self.realm_avatar_and_logo_path(user_profile.realm), basename)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
image_data = logo_file.read()
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2018-08-16 01:26:55 +02:00
|
|
|
s3_file_name + ".original",
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
|
|
|
|
resized_data = resize_logo(image_data)
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2018-08-16 01:26:55 +02:00
|
|
|
s3_file_name + ".png",
|
2021-02-12 08:20:45 +01:00
|
|
|
"image/png",
|
2018-08-16 01:26:55 +02:00
|
|
|
user_profile,
|
|
|
|
resized_data,
|
|
|
|
)
|
|
|
|
# See avatar_url in avatar.py for URL. (That code also handles the case
|
|
|
|
# that users use gravatar.)
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
|
|
|
if not night:
|
2021-02-12 08:20:45 +01:00
|
|
|
file_name = "logo.png"
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
file_name = "night_logo.png"
|
2020-10-27 16:45:23 +01:00
|
|
|
public_url = self.get_public_upload_url(f"{realm_id}/realm/{file_name}")
|
|
|
|
return public_url + f"?version={version}"
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
def ensure_avatar_image(self, user_profile: UserProfile, is_medium: bool = False) -> None:
|
|
|
|
# BUG: The else case should be user_avatar_path(user_profile) + ".png".
|
|
|
|
# See #12852 for details on this bug and how to migrate it.
|
|
|
|
file_extension = "-medium.png" if is_medium else ""
|
2018-11-27 02:28:34 +01:00
|
|
|
file_path = user_avatar_path(user_profile)
|
|
|
|
s3_file_name = file_path
|
|
|
|
|
2020-06-19 01:09:20 +02:00
|
|
|
key = self.avatar_bucket.Object(file_path + ".original")
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = key.get()["Body"].read()
|
2018-11-27 02:28:34 +01:00
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
if is_medium:
|
|
|
|
resized_avatar = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
else:
|
|
|
|
resized_avatar = resize_avatar(image_data)
|
2018-11-27 02:28:34 +01:00
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2021-03-17 17:54:23 +01:00
|
|
|
s3_file_name + file_extension,
|
2018-11-27 02:28:34 +01:00
|
|
|
"image/png",
|
|
|
|
user_profile,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
resized_avatar,
|
2018-11-27 02:28:34 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_emoji_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, emoji_file: IO[bytes], emoji_file_name: str, user_profile: UserProfile
|
2021-08-12 10:19:53 +02:00
|
|
|
) -> bool:
|
2022-01-13 19:18:06 +01:00
|
|
|
content_type = guess_type(emoji_file_name)[0]
|
2017-03-13 05:45:50 +01:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
emoji_file_name=emoji_file_name,
|
2017-03-13 05:45:50 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
image_data = emoji_file.read()
|
2021-08-12 10:19:53 +02:00
|
|
|
resized_image_data, is_animated, still_image_data = resize_emoji(image_data)
|
2017-03-13 05:45:50 +01:00
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2017-03-13 05:45:50 +01:00
|
|
|
".".join((emoji_path, "original")),
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
image_data,
|
|
|
|
)
|
|
|
|
upload_image_to_s3(
|
2020-06-19 01:09:20 +02:00
|
|
|
self.avatar_bucket,
|
2017-03-13 05:45:50 +01:00
|
|
|
emoji_path,
|
|
|
|
content_type,
|
|
|
|
user_profile,
|
|
|
|
resized_image_data,
|
|
|
|
)
|
2021-08-12 10:19:53 +02:00
|
|
|
if is_animated:
|
|
|
|
still_path = RealmEmoji.STILL_PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_filename_without_extension=os.path.splitext(emoji_file_name)[0],
|
|
|
|
)
|
|
|
|
assert still_image_data is not None
|
|
|
|
upload_image_to_s3(
|
|
|
|
self.avatar_bucket,
|
|
|
|
still_path,
|
|
|
|
"image/png",
|
|
|
|
user_profile,
|
|
|
|
still_image_data,
|
|
|
|
)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-08-12 10:19:53 +02:00
|
|
|
return is_animated
|
|
|
|
|
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int, still: bool = False) -> str:
|
|
|
|
if still:
|
|
|
|
emoji_path = RealmEmoji.STILL_PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id,
|
|
|
|
emoji_filename_without_extension=os.path.splitext(emoji_file_name)[0],
|
|
|
|
)
|
|
|
|
return self.get_public_upload_url(emoji_path)
|
|
|
|
else:
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id, emoji_file_name=emoji_file_name
|
|
|
|
)
|
|
|
|
return self.get_public_upload_url(emoji_path)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_export_tarball(
|
|
|
|
self,
|
|
|
|
realm: Optional[Realm],
|
|
|
|
tarball_path: str,
|
|
|
|
percent_callback: Optional[Callable[[Any], None]] = None,
|
|
|
|
) -> str:
|
2019-06-21 22:46:04 +02:00
|
|
|
# We use the avatar bucket, because it's world-readable.
|
2021-02-12 08:19:30 +01:00
|
|
|
key = self.avatar_bucket.Object(
|
|
|
|
os.path.join("exports", secrets.token_hex(16), os.path.basename(tarball_path))
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
|
2021-08-10 02:11:16 +02:00
|
|
|
if percent_callback is None:
|
|
|
|
key.upload_file(Filename=tarball_path)
|
|
|
|
else:
|
|
|
|
key.upload_file(Filename=tarball_path, Callback=percent_callback)
|
2018-12-07 17:52:01 +01:00
|
|
|
|
2021-05-28 08:23:07 +02:00
|
|
|
public_url = self.get_public_upload_url(key.key)
|
2019-06-21 22:46:04 +02:00
|
|
|
return public_url
|
|
|
|
|
2020-09-13 05:41:39 +02:00
|
|
|
def delete_export_tarball(self, export_path: str) -> Optional[str]:
|
|
|
|
assert export_path.startswith("/")
|
|
|
|
path_id = export_path[1:]
|
2020-06-19 01:09:20 +02:00
|
|
|
if self.delete_file_from_s3(path_id, self.avatar_bucket):
|
2020-09-13 05:41:39 +02:00
|
|
|
return export_path
|
2019-06-27 20:41:47 +02:00
|
|
|
return None
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
### Local
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def write_local_file(type: str, path: str, file_data: bytes) -> None:
|
2021-07-25 16:31:12 +02:00
|
|
|
file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), type, path)
|
2019-12-20 00:00:45 +01:00
|
|
|
|
2017-11-14 04:34:23 +01:00
|
|
|
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(file_path, "wb") as f:
|
2013-10-28 16:13:53 +01:00
|
|
|
f.write(file_data)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def read_local_file(type: str, path: str) -> bytes:
|
2021-07-25 16:31:12 +02:00
|
|
|
file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), type, path)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(file_path, "rb") as f:
|
2018-06-06 14:30:26 +02:00
|
|
|
return f.read()
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-09-07 12:31:57 +02:00
|
|
|
def delete_local_file(type: str, path: str) -> bool:
|
2021-07-25 16:31:12 +02:00
|
|
|
file_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), type, path)
|
2018-09-07 12:31:57 +02:00
|
|
|
if os.path.isfile(file_path):
|
|
|
|
# This removes the file but the empty folders still remain.
|
|
|
|
os.remove(file_path)
|
|
|
|
return True
|
|
|
|
file_name = path.split("/")[-1]
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.warning("%s does not exist. Its entry in the database will be removed.", file_name)
|
2018-09-07 12:31:57 +02:00
|
|
|
return False
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_local_file_path(path_id: str) -> Optional[str]:
|
2021-07-25 16:31:12 +02:00
|
|
|
local_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), "files", path_id)
|
2016-06-09 12:19:56 +02:00
|
|
|
if os.path.isfile(local_path):
|
|
|
|
return local_path
|
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
LOCAL_FILE_ACCESS_TOKEN_SALT = "local_file_"
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
def generate_unauthed_file_access_url(path_id: str) -> str:
|
|
|
|
signed_data = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT).sign(path_id)
|
2021-08-02 23:20:39 +02:00
|
|
|
token = base64.b16encode(signed_data.encode()).decode()
|
2020-04-18 16:11:13 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
filename = path_id.split("/")[-1]
|
|
|
|
return reverse("local_file_unauthed", args=[token, filename])
|
2020-04-08 00:27:24 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
def get_local_file_path_id_from_token(token: str) -> Optional[str]:
|
|
|
|
signer = TimestampSigner(salt=LOCAL_FILE_ACCESS_TOKEN_SALT)
|
|
|
|
try:
|
2021-08-02 23:20:39 +02:00
|
|
|
signed_data = base64.b16decode(token).decode()
|
2020-04-08 00:27:24 +02:00
|
|
|
path_id = signer.unsign(signed_data, max_age=timedelta(seconds=60))
|
|
|
|
except (BadSignature, binascii.Error):
|
|
|
|
return None
|
|
|
|
|
|
|
|
return path_id
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
class LocalUploadBackend(ZulipUploadBackend):
|
2021-06-05 02:38:54 +02:00
|
|
|
def get_public_upload_root_url(self) -> str:
|
|
|
|
return "/user_avatars/"
|
|
|
|
|
2021-09-02 14:19:44 +02:00
|
|
|
def generate_message_upload_path(self, realm_id: str, uploaded_file_name: str) -> str:
|
|
|
|
# Split into 256 subdirectories to prevent directories from getting too big
|
|
|
|
return "/".join(
|
|
|
|
[
|
|
|
|
realm_id,
|
|
|
|
format(random.randint(0, 255), "x"),
|
|
|
|
secrets.token_urlsafe(18),
|
|
|
|
sanitize_name(uploaded_file_name),
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_message_file(
|
|
|
|
self,
|
|
|
|
uploaded_file_name: str,
|
|
|
|
uploaded_file_size: int,
|
|
|
|
content_type: Optional[str],
|
|
|
|
file_data: bytes,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_realm: Optional[Realm] = None,
|
|
|
|
) -> str:
|
2022-01-26 20:17:12 +01:00
|
|
|
if target_realm is None:
|
|
|
|
target_realm = user_profile.realm
|
|
|
|
|
|
|
|
path = self.generate_message_upload_path(str(target_realm.id), uploaded_file_name)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file("files", path, file_data)
|
2022-01-26 20:17:12 +01:00
|
|
|
create_attachment(uploaded_file_name, path, user_profile, target_realm, uploaded_file_size)
|
2021-02-12 08:20:45 +01:00
|
|
|
return "/user_uploads/" + path
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(self, path_id: str) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
return delete_local_file("files", path_id)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2018-05-30 16:35:58 +02:00
|
|
|
def write_avatar_images(self, file_path: str, image_data: bytes) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file("avatars", file_path + ".original", image_data)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file("avatars", file_path + ".png", resized_data)
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
resized_medium = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file("avatars", file_path + "-medium.png", resized_medium)
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_avatar_image(
|
|
|
|
self,
|
2021-08-03 17:54:31 +02:00
|
|
|
user_file: IO[bytes],
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user_profile: UserProfile,
|
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None,
|
|
|
|
) -> None:
|
2018-05-30 16:35:58 +02:00
|
|
|
file_path = user_avatar_path(target_user_profile)
|
|
|
|
|
|
|
|
image_data = user_file.read()
|
|
|
|
self.write_avatar_images(file_path, image_data)
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def delete_avatar_image(self, user: UserProfile) -> None:
|
|
|
|
path_id = user_avatar_path(user)
|
|
|
|
|
|
|
|
delete_local_file("avatars", path_id + ".original")
|
|
|
|
delete_local_file("avatars", path_id + ".png")
|
|
|
|
delete_local_file("avatars", path_id + "-medium.png")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_avatar_url(self, hash_key: str, medium: bool = False) -> str:
|
2016-09-20 21:48:48 +02:00
|
|
|
medium_suffix = "-medium" if medium else ""
|
2021-10-14 00:50:26 +02:00
|
|
|
return f"/user_avatars/{hash_key}{medium_suffix}.png"
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(self, source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
source_file_path = user_avatar_path(source_profile)
|
|
|
|
target_file_path = user_avatar_path(target_profile)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = read_local_file("avatars", source_file_path + ".original")
|
2018-06-06 14:30:26 +02:00
|
|
|
self.write_avatar_images(target_file_path, image_data)
|
|
|
|
|
2019-07-19 14:11:54 +02:00
|
|
|
def realm_avatar_and_logo_path(self, realm: Realm) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return os.path.join("avatars", str(realm.id), "realm")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_realm_icon_image(self, icon_file: IO[bytes], user_profile: UserProfile) -> None:
|
2019-07-19 14:11:54 +02:00
|
|
|
upload_path = self.realm_avatar_and_logo_path(user_profile.realm)
|
2017-02-21 03:41:20 +01:00
|
|
|
image_data = icon_file.read()
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file(upload_path, "icon.original", image_data)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
resized_data = resize_avatar(image_data)
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file(upload_path, "icon.png", resized_data)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def get_realm_icon_url(self, realm_id: int, version: int) -> str:
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"/user_avatars/{realm_id}/realm/icon.png?version={version}"
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_realm_logo_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, logo_file: IO[bytes], user_profile: UserProfile, night: bool
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2019-07-19 14:11:54 +02:00
|
|
|
upload_path = self.realm_avatar_and_logo_path(user_profile.realm)
|
2019-01-27 08:25:10 +01:00
|
|
|
if night:
|
2021-02-12 08:20:45 +01:00
|
|
|
original_file = "night_logo.original"
|
|
|
|
resized_file = "night_logo.png"
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
original_file = "logo.original"
|
|
|
|
resized_file = "logo.png"
|
2018-08-16 01:26:55 +02:00
|
|
|
image_data = logo_file.read()
|
2021-02-12 08:19:30 +01:00
|
|
|
write_local_file(upload_path, original_file, image_data)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
resized_data = resize_logo(image_data)
|
2019-01-27 08:25:10 +01:00
|
|
|
write_local_file(upload_path, resized_file, resized_data)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def get_realm_logo_url(self, realm_id: int, version: int, night: bool) -> str:
|
|
|
|
if night:
|
2021-02-12 08:20:45 +01:00
|
|
|
file_name = "night_logo.png"
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
file_name = "logo.png"
|
2020-06-10 06:41:04 +02:00
|
|
|
return f"/user_avatars/{realm_id}/realm/{file_name}?version={version}"
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
def ensure_avatar_image(self, user_profile: UserProfile, is_medium: bool = False) -> None:
|
|
|
|
file_extension = "-medium.png" if is_medium else ".png"
|
2018-11-27 02:28:34 +01:00
|
|
|
file_path = user_avatar_path(user_profile)
|
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
output_path = os.path.join(
|
2021-07-25 16:31:12 +02:00
|
|
|
assert_is_not_none(settings.LOCAL_UPLOADS_DIR),
|
|
|
|
"avatars",
|
|
|
|
file_path + file_extension,
|
2021-03-17 17:54:23 +01:00
|
|
|
)
|
2018-11-27 02:28:34 +01:00
|
|
|
if os.path.isfile(output_path):
|
|
|
|
return
|
|
|
|
|
2021-07-25 16:31:12 +02:00
|
|
|
image_path = os.path.join(
|
|
|
|
assert_is_not_none(settings.LOCAL_UPLOADS_DIR), "avatars", file_path + ".original"
|
|
|
|
)
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(image_path, "rb") as f:
|
|
|
|
image_data = f.read()
|
2021-03-17 17:54:23 +01:00
|
|
|
if is_medium:
|
|
|
|
resized_avatar = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
else:
|
|
|
|
resized_avatar = resize_avatar(image_data)
|
|
|
|
write_local_file("avatars", file_path + file_extension, resized_avatar)
|
2018-11-27 02:28:34 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_emoji_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
self, emoji_file: IO[bytes], emoji_file_name: str, user_profile: UserProfile
|
2021-08-12 10:19:53 +02:00
|
|
|
) -> bool:
|
2017-03-13 05:45:50 +01:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
2021-02-12 08:19:30 +01:00
|
|
|
realm_id=user_profile.realm_id,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
emoji_file_name=emoji_file_name,
|
2017-03-13 05:45:50 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
image_data = emoji_file.read()
|
2021-08-12 10:19:53 +02:00
|
|
|
resized_image_data, is_animated, still_image_data = resize_emoji(image_data)
|
2021-02-12 08:20:45 +01:00
|
|
|
write_local_file("avatars", ".".join((emoji_path, "original")), image_data)
|
|
|
|
write_local_file("avatars", emoji_path, resized_image_data)
|
2021-08-12 10:19:53 +02:00
|
|
|
if is_animated:
|
|
|
|
assert still_image_data is not None
|
|
|
|
still_path = RealmEmoji.STILL_PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_filename_without_extension=os.path.splitext(emoji_file_name)[0],
|
|
|
|
)
|
|
|
|
write_local_file("avatars", still_path, still_image_data)
|
|
|
|
return is_animated
|
|
|
|
|
|
|
|
def get_emoji_url(self, emoji_file_name: str, realm_id: int, still: bool = False) -> str:
|
|
|
|
if still:
|
|
|
|
return os.path.join(
|
|
|
|
"/user_avatars",
|
|
|
|
RealmEmoji.STILL_PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id,
|
|
|
|
emoji_filename_without_extension=os.path.splitext(emoji_file_name)[0],
|
|
|
|
),
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
return os.path.join(
|
|
|
|
"/user_avatars",
|
|
|
|
RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm_id, emoji_file_name=emoji_file_name
|
|
|
|
),
|
|
|
|
)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def upload_export_tarball(
|
|
|
|
self,
|
|
|
|
realm: Realm,
|
|
|
|
tarball_path: str,
|
|
|
|
percent_callback: Optional[Callable[[Any], None]] = None,
|
|
|
|
) -> str:
|
2019-06-21 22:46:04 +02:00
|
|
|
path = os.path.join(
|
2021-02-12 08:20:45 +01:00
|
|
|
"exports",
|
2019-06-21 22:46:04 +02:00
|
|
|
str(realm.id),
|
2020-09-05 04:02:13 +02:00
|
|
|
secrets.token_urlsafe(18),
|
2019-06-21 22:46:04 +02:00
|
|
|
os.path.basename(tarball_path),
|
|
|
|
)
|
2021-07-25 16:31:12 +02:00
|
|
|
abs_path = os.path.join(assert_is_not_none(settings.LOCAL_UPLOADS_DIR), "avatars", path)
|
2019-06-21 22:46:04 +02:00
|
|
|
os.makedirs(os.path.dirname(abs_path), exist_ok=True)
|
|
|
|
shutil.copy(tarball_path, abs_path)
|
2021-02-12 08:20:45 +01:00
|
|
|
public_url = realm.uri + "/user_avatars/" + path
|
2019-06-21 22:46:04 +02:00
|
|
|
return public_url
|
|
|
|
|
2020-09-13 05:41:39 +02:00
|
|
|
def delete_export_tarball(self, export_path: str) -> Optional[str]:
|
2019-06-27 20:41:47 +02:00
|
|
|
# Get the last element of a list in the form ['user_avatars', '<file_path>']
|
2020-09-13 05:41:39 +02:00
|
|
|
assert export_path.startswith("/")
|
2021-02-12 08:20:45 +01:00
|
|
|
file_path = export_path[1:].split("/", 1)[-1]
|
|
|
|
if delete_local_file("avatars", file_path):
|
2020-09-13 05:41:39 +02:00
|
|
|
return export_path
|
2019-06-27 20:41:47 +02:00
|
|
|
return None
|
|
|
|
|
2019-09-24 22:46:53 +02:00
|
|
|
def get_export_tarball_url(self, realm: Realm, export_path: str) -> str:
|
|
|
|
# export_path has a leading `/`
|
|
|
|
return realm.uri + export_path
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
# Common and wrappers
|
|
|
|
if settings.LOCAL_UPLOADS_DIR is not None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
upload_backend: ZulipUploadBackend = LocalUploadBackend()
|
2016-06-09 07:53:35 +02:00
|
|
|
else:
|
2018-05-15 00:28:17 +02:00
|
|
|
upload_backend = S3UploadBackend() # nocoverage
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-05 02:38:54 +02:00
|
|
|
def get_public_upload_root_url() -> str:
|
|
|
|
return upload_backend.get_public_upload_root_url()
|
|
|
|
|
|
|
|
|
2018-05-11 01:40:23 +02:00
|
|
|
def delete_message_image(path_id: str) -> bool:
|
2016-06-09 07:53:35 +02:00
|
|
|
return upload_backend.delete_message_image(path_id)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def upload_avatar_image(
|
2021-08-03 17:54:31 +02:00
|
|
|
user_file: IO[bytes],
|
2021-02-12 08:19:30 +01:00
|
|
|
acting_user_profile: UserProfile,
|
|
|
|
target_user_profile: UserProfile,
|
|
|
|
content_type: Optional[str] = None,
|
|
|
|
) -> None:
|
|
|
|
upload_backend.upload_avatar_image(
|
|
|
|
user_file, acting_user_profile, target_user_profile, content_type=content_type
|
|
|
|
)
|
|
|
|
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2018-10-12 01:11:20 +02:00
|
|
|
def delete_avatar_image(user_profile: UserProfile) -> None:
|
2018-09-07 17:44:40 +02:00
|
|
|
upload_backend.delete_avatar_image(user_profile)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def copy_avatar(source_profile: UserProfile, target_profile: UserProfile) -> None:
|
|
|
|
upload_backend.copy_avatar(source_profile, target_profile)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_icon_image(user_file: IO[bytes], user_profile: UserProfile) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
upload_backend.upload_realm_icon_image(user_file, user_profile)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_logo_image(user_file: IO[bytes], user_profile: UserProfile, night: bool) -> None:
|
2019-01-27 08:25:10 +01:00
|
|
|
upload_backend.upload_realm_logo_image(user_file, user_profile, night)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-08-03 17:54:31 +02:00
|
|
|
def upload_emoji_image(
|
|
|
|
emoji_file: IO[bytes], emoji_file_name: str, user_profile: UserProfile
|
2021-08-12 10:19:53 +02:00
|
|
|
) -> bool:
|
|
|
|
return upload_backend.upload_emoji_image(emoji_file, emoji_file_name, user_profile)
|
2017-03-13 05:45:50 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def upload_message_file(
|
|
|
|
uploaded_file_name: str,
|
|
|
|
uploaded_file_size: int,
|
|
|
|
content_type: Optional[str],
|
|
|
|
file_data: bytes,
|
|
|
|
user_profile: UserProfile,
|
|
|
|
target_realm: Optional[Realm] = None,
|
|
|
|
) -> str:
|
|
|
|
return upload_backend.upload_message_file(
|
|
|
|
uploaded_file_name,
|
|
|
|
uploaded_file_size,
|
|
|
|
content_type,
|
|
|
|
file_data,
|
|
|
|
user_profile,
|
|
|
|
target_realm=target_realm,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def claim_attachment(
|
|
|
|
user_profile: UserProfile,
|
|
|
|
path_id: str,
|
|
|
|
message: Message,
|
|
|
|
is_message_realm_public: bool,
|
|
|
|
is_message_web_public: bool = False,
|
|
|
|
) -> Attachment:
|
2017-04-14 01:03:49 +02:00
|
|
|
attachment = Attachment.objects.get(path_id=path_id)
|
|
|
|
attachment.messages.add(message)
|
2020-08-01 03:17:21 +02:00
|
|
|
attachment.is_web_public = attachment.is_web_public or is_message_web_public
|
2017-04-14 01:03:49 +02:00
|
|
|
attachment.is_realm_public = attachment.is_realm_public or is_message_realm_public
|
|
|
|
attachment.save()
|
2018-05-04 22:57:36 +02:00
|
|
|
return attachment
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def create_attachment(
|
2022-01-26 20:17:12 +01:00
|
|
|
file_name: str, path_id: str, user_profile: UserProfile, realm: Realm, file_size: int
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> bool:
|
2022-01-26 20:17:12 +01:00
|
|
|
assert (user_profile.realm_id == realm.id) or is_cross_realm_bot_email(
|
|
|
|
user_profile.delivery_email
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
attachment = Attachment.objects.create(
|
|
|
|
file_name=file_name,
|
|
|
|
path_id=path_id,
|
|
|
|
owner=user_profile,
|
2022-01-26 20:17:12 +01:00
|
|
|
realm=realm,
|
2021-02-12 08:19:30 +01:00
|
|
|
size=file_size,
|
|
|
|
)
|
2018-05-04 22:57:36 +02:00
|
|
|
from zerver.lib.actions import notify_attachment_update
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
notify_attachment_update(user_profile, "add", attachment.to_dict())
|
2016-03-24 20:24:01 +01:00
|
|
|
return True
|
2013-10-28 16:13:53 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def upload_message_image_from_request(
|
|
|
|
request: HttpRequest, user_file: File, user_profile: UserProfile
|
|
|
|
) -> str:
|
2017-02-26 11:03:45 +01:00
|
|
|
uploaded_file_name, uploaded_file_size, content_type = get_file_info(request, user_file)
|
2021-02-12 08:19:30 +01:00
|
|
|
return upload_message_file(
|
|
|
|
uploaded_file_name, uploaded_file_size, content_type, user_file.read(), user_profile
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def upload_export_tarball(
|
|
|
|
realm: Realm, tarball_path: str, percent_callback: Optional[Callable[[Any], None]] = None
|
|
|
|
) -> str:
|
|
|
|
return upload_backend.upload_export_tarball(
|
|
|
|
realm, tarball_path, percent_callback=percent_callback
|
|
|
|
)
|
2019-06-21 22:46:04 +02:00
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
|
2020-09-13 05:41:39 +02:00
|
|
|
def delete_export_tarball(export_path: str) -> Optional[str]:
|
|
|
|
return upload_backend.delete_export_tarball(export_path)
|
2022-02-10 22:06:11 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_emoji_file_content(
|
|
|
|
session: OutgoingSession, emoji_url: str, emoji_id: int, logger: logging.Logger
|
2022-02-11 19:23:41 +01:00
|
|
|
) -> bytes: # nocoverage
|
2022-02-10 22:06:11 +01:00
|
|
|
original_emoji_url = emoji_url + ".original"
|
|
|
|
|
|
|
|
logger.info("Downloading %s", original_emoji_url)
|
|
|
|
response = session.get(original_emoji_url)
|
|
|
|
if response.status_code == 200:
|
|
|
|
assert type(response.content) == bytes
|
|
|
|
return response.content
|
|
|
|
|
|
|
|
logger.info("Error fetching emoji from URL %s", original_emoji_url)
|
|
|
|
logger.info("Trying %s instead", emoji_url)
|
|
|
|
response = session.get(emoji_url)
|
|
|
|
if response.status_code == 200:
|
|
|
|
assert type(response.content) == bytes
|
|
|
|
return response.content
|
|
|
|
logger.info("Error fetching emoji from URL %s", emoji_url)
|
|
|
|
logger.error("Could not fetch emoji %s", emoji_id)
|
|
|
|
raise AssertionError(f"Could not fetch emoji {emoji_id}")
|
|
|
|
|
|
|
|
|
2022-02-11 19:23:41 +01:00
|
|
|
def handle_reupload_emojis_event(realm: Realm, logger: logging.Logger) -> None: # nocoverage
|
2022-02-10 22:06:11 +01:00
|
|
|
from zerver.lib.emoji import get_emoji_url
|
|
|
|
|
|
|
|
session = OutgoingSession(role="reupload_emoji", timeout=3, max_retries=3)
|
|
|
|
|
|
|
|
query = RealmEmoji.objects.filter(realm=realm).order_by("id")
|
|
|
|
|
|
|
|
for realm_emoji in query:
|
|
|
|
logger.info("Processing emoji %s", realm_emoji.id)
|
|
|
|
emoji_filename = realm_emoji.file_name
|
|
|
|
emoji_url = get_emoji_url(emoji_filename, realm_emoji.realm_id)
|
|
|
|
if emoji_url.startswith("/"):
|
|
|
|
emoji_url = urljoin(realm_emoji.realm.uri, emoji_url)
|
|
|
|
|
|
|
|
emoji_file_content = get_emoji_file_content(session, emoji_url, realm_emoji.id, logger)
|
|
|
|
|
|
|
|
emoji_bytes_io = io.BytesIO(emoji_file_content)
|
|
|
|
|
|
|
|
user_profile = realm_emoji.author
|
|
|
|
# When this runs, emojis have already been migrated to always have .author set.
|
|
|
|
assert user_profile is not None
|
|
|
|
|
|
|
|
logger.info("Reuploading emoji %s", realm_emoji.id)
|
|
|
|
realm_emoji.is_animated = upload_emoji_image(emoji_bytes_io, emoji_filename, user_profile)
|
|
|
|
realm_emoji.save(update_fields=["is_animated"])
|