2018-07-30 22:16:26 +02:00
|
|
|
# See https://zulip.readthedocs.io/en/latest/subsystems/thumbnailing.html
|
2017-11-09 16:31:57 +01:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
|
|
|
from six.moves import urllib
|
|
|
|
from tornado.concurrent import return_future
|
2018-03-08 09:37:09 +01:00
|
|
|
from thumbor.loaders import LoaderResult, file_loader, https_loader
|
2017-11-09 16:31:57 +01:00
|
|
|
from tc_aws.loaders import s3_loader
|
|
|
|
from thumbor.context import Context
|
|
|
|
from .helpers import (
|
2018-03-08 09:37:09 +01:00
|
|
|
separate_url_and_source_type,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
THUMBOR_S3_TYPE, THUMBOR_LOCAL_FILE_TYPE, THUMBOR_EXTERNAL_TYPE,
|
2017-11-09 16:31:57 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
from typing import Any, Callable
|
|
|
|
|
2018-03-08 09:37:09 +01:00
|
|
|
import base64
|
|
|
|
import logging
|
|
|
|
|
2017-11-09 16:31:57 +01:00
|
|
|
def get_not_found_result():
|
|
|
|
# type: () -> LoaderResult
|
|
|
|
result = LoaderResult()
|
|
|
|
result.error = LoaderResult.ERROR_NOT_FOUND
|
|
|
|
result.successful = False
|
|
|
|
return result
|
|
|
|
|
2018-03-12 03:27:29 +01:00
|
|
|
@return_future
|
2017-11-09 16:31:57 +01:00
|
|
|
def load(context, url, callback):
|
2018-03-12 03:27:29 +01:00
|
|
|
# type: (Context, str, Callable[..., Any]) -> None
|
2018-03-08 09:37:09 +01:00
|
|
|
source_type, encoded_url = separate_url_and_source_type(url)
|
2019-08-10 00:30:35 +02:00
|
|
|
actual_url = base64.urlsafe_b64decode(urllib.parse.unquote(encoded_url)).decode('utf-8')
|
2018-03-08 09:37:09 +01:00
|
|
|
if source_type not in (THUMBOR_S3_TYPE, THUMBOR_LOCAL_FILE_TYPE,
|
|
|
|
THUMBOR_EXTERNAL_TYPE):
|
2017-11-09 16:31:57 +01:00
|
|
|
callback(get_not_found_result())
|
2018-03-08 09:37:09 +01:00
|
|
|
logging.warning('INVALID SOURCE TYPE: ' + source_type)
|
2017-11-09 16:31:57 +01:00
|
|
|
return
|
|
|
|
|
|
|
|
if source_type == THUMBOR_S3_TYPE:
|
2019-08-10 00:30:35 +02:00
|
|
|
if actual_url.startswith('/user_uploads/'):
|
2018-09-01 14:15:02 +02:00
|
|
|
actual_url = actual_url[len('/user_uploads/'):]
|
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected s3 file.")
|
|
|
|
|
2018-03-08 09:37:09 +01:00
|
|
|
s3_loader.load(context, actual_url, callback)
|
2017-11-09 16:31:57 +01:00
|
|
|
elif source_type == THUMBOR_LOCAL_FILE_TYPE:
|
2019-08-10 00:30:35 +02:00
|
|
|
if actual_url.startswith('/user_uploads/'):
|
2018-08-11 02:20:43 +02:00
|
|
|
actual_url = actual_url[len('/user_uploads/'):]
|
|
|
|
local_file_path_prefix = 'files/'
|
|
|
|
else:
|
|
|
|
raise AssertionError("Unexpected local file.")
|
|
|
|
|
2019-08-10 00:30:35 +02:00
|
|
|
patched_local_url = local_file_path_prefix + actual_url
|
2018-03-08 09:37:09 +01:00
|
|
|
file_loader.load(context, patched_local_url, callback)
|
2017-11-09 16:31:57 +01:00
|
|
|
elif source_type == THUMBOR_EXTERNAL_TYPE:
|
2018-03-08 09:37:09 +01:00
|
|
|
https_loader.load(context, actual_url, callback)
|