2020-06-11 00:54:34 +02:00
|
|
|
import datetime
|
|
|
|
import io
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import shutil
|
|
|
|
import time
|
|
|
|
import urllib
|
|
|
|
from io import StringIO
|
|
|
|
from unittest import mock
|
|
|
|
from unittest.mock import patch
|
|
|
|
|
|
|
|
import botocore.exceptions
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2016-04-14 16:26:01 +02:00
|
|
|
from django.conf import settings
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2020-06-25 02:29:22 +02:00
|
|
|
from django_sendfile.utils import _get_sendfile
|
2020-06-11 00:54:34 +02:00
|
|
|
from PIL import Image
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import zerver.lib.upload
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_change_icon_source,
|
|
|
|
do_change_logo_source,
|
|
|
|
do_change_plan_type,
|
2021-03-08 13:22:43 +01:00
|
|
|
do_create_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_delete_old_unclaimed_attachments,
|
|
|
|
do_set_realm_property,
|
|
|
|
internal_send_private_message,
|
2017-10-10 03:53:25 +02:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field
|
2017-12-21 09:37:59 +01:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import cache_get, get_realm_used_upload_space_cache_key
|
|
|
|
from zerver.lib.create_user import copy_user_settings
|
2020-03-06 18:40:46 +01:00
|
|
|
from zerver.lib.initial_password import initial_password
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2019-08-19 19:46:45 +02:00
|
|
|
from zerver.lib.realm_logo import get_realm_logo_url
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import UploadSerializeMixin, ZulipTestCase
|
2017-03-08 19:47:42 +01:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
avatar_disk_path,
|
2018-12-07 18:15:51 +01:00
|
|
|
create_s3_buckets,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_test_image_file,
|
2018-06-05 21:12:28 +02:00
|
|
|
queries_captured,
|
2020-06-11 00:54:34 +02:00
|
|
|
use_s3_backend,
|
2017-03-08 19:47:42 +01:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.upload import (
|
|
|
|
DEFAULT_AVATAR_SIZE,
|
|
|
|
DEFAULT_EMOJI_SIZE,
|
|
|
|
MEDIUM_AVATAR_SIZE,
|
|
|
|
BadImageError,
|
|
|
|
LocalUploadBackend,
|
|
|
|
S3UploadBackend,
|
|
|
|
ZulipUploadBackend,
|
|
|
|
delete_export_tarball,
|
|
|
|
delete_message_image,
|
|
|
|
exif_rotate,
|
|
|
|
resize_avatar,
|
|
|
|
resize_emoji,
|
|
|
|
sanitize_name,
|
|
|
|
upload_emoji_image,
|
|
|
|
upload_export_tarball,
|
|
|
|
upload_message_file,
|
2021-03-17 17:54:23 +01:00
|
|
|
write_local_file,
|
2017-08-18 12:26:43 +02:00
|
|
|
)
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Attachment,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmDomain,
|
|
|
|
RealmEmoji,
|
|
|
|
UserProfile,
|
|
|
|
get_realm,
|
|
|
|
get_system_bot,
|
|
|
|
get_user_by_delivery_email,
|
|
|
|
validate_attachment_request,
|
|
|
|
)
|
2017-03-08 19:47:42 +01:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def destroy_uploads() -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
if os.path.exists(settings.LOCAL_UPLOADS_DIR):
|
|
|
|
shutil.rmtree(settings.LOCAL_UPLOADS_DIR)
|
|
|
|
|
2016-06-25 11:05:59 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_rest_endpoint(self) -> None:
|
2016-06-25 11:05:59 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Tests the /api/v1/user_uploads API endpoint. Here a single file is uploaded
|
2016-06-25 11:05:59 +02:00
|
|
|
and downloaded using a username and api_key
|
|
|
|
"""
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
|
|
|
# Upload file via API
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.api_post(self.example_user("hamlet"), "/api/v1/user_uploads", {"file": fp})
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("uri", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
uri = result.json()["uri"]
|
|
|
|
base = "/user_uploads/"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2016-06-25 11:05:59 +02:00
|
|
|
|
2016-06-27 16:41:58 +02:00
|
|
|
# Download file via API
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2020-03-10 11:48:26 +01:00
|
|
|
response = self.api_get(self.example_user("hamlet"), uri)
|
2018-04-13 19:04:39 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2016-07-13 22:09:27 +02:00
|
|
|
data = b"".join(response.streaming_content)
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(b"zulip!", data)
|
2016-06-27 16:41:58 +02:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Files uploaded through the API should be accessible via the web client
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-12-19 16:17:19 +01:00
|
|
|
self.assert_url_serves_contents_of_file(uri, b"zulip!")
|
2016-06-25 11:05:59 +02:00
|
|
|
|
2018-04-13 19:04:39 +02:00
|
|
|
def test_mobile_api_endpoint(self) -> None:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Tests the /api/v1/user_uploads API endpoint with ?api_key
|
2018-04-13 19:04:39 +02:00
|
|
|
auth. Here a single file is uploaded and downloaded using a
|
|
|
|
username and api_key
|
|
|
|
"""
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
|
|
|
# Upload file via API
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.api_post(self.example_user("hamlet"), "/api/v1/user_uploads", {"file": fp})
|
2018-04-13 19:04:39 +02:00
|
|
|
self.assertIn("uri", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
uri = result.json()["uri"]
|
|
|
|
base = "/user_uploads/"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2018-04-13 19:04:39 +02:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Try to download file via API, passing URL and invalid API key
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(uri, {"api_key": "invalid"})
|
2019-01-05 20:18:18 +01:00
|
|
|
self.assertEqual(response.status_code, 401)
|
2018-04-13 19:04:39 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(uri, {"api_key": get_api_key(user_profile)})
|
2018-04-13 19:04:39 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
|
2018-05-14 19:07:38 +02:00
|
|
|
def test_upload_file_with_supplied_mimetype(self) -> None:
|
|
|
|
"""
|
|
|
|
When files are copied into the system clipboard and pasted for upload
|
|
|
|
the filename may not be supplied so the extension is determined from a
|
|
|
|
query string parameter.
|
|
|
|
"""
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "pasted_file"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.api_post(
|
|
|
|
self.example_user("hamlet"), "/api/v1/user_uploads?mimetype=image/png", {"file": fp}
|
|
|
|
)
|
2018-05-14 19:07:38 +02:00
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
self.assertTrue(uri.endswith("pasted_file.png"))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_too_big_failure(self) -> None:
|
2016-09-16 16:41:04 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload big files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-09-16 16:41:04 +02:00
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = "a.txt"
|
|
|
|
|
|
|
|
# Use MAX_FILE_UPLOAD_SIZE of 0, because the next increment
|
|
|
|
# would be 1MB.
|
|
|
|
with self.settings(MAX_FILE_UPLOAD_SIZE=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2016-09-16 16:41:04 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 16:26:01 +02:00
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = "a.txt"
|
|
|
|
fp2 = StringIO("pshaw!")
|
|
|
|
fp2.name = "b.txt"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp, "f2": fp2})
|
2016-04-14 16:26:01 +02:00
|
|
|
self.assert_json_error(result, "You may only upload one file at a time")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2017-07-31 20:52:17 +02:00
|
|
|
result = self.client_post("/json/user_uploads")
|
2016-04-14 16:26:01 +02:00
|
|
|
self.assert_json_error(result, "You must specify a file to upload")
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# This test will go through the code path for uploading files onto LOCAL storage
|
2020-10-23 02:43:28 +02:00
|
|
|
# when Zulip is in DEVELOPMENT mode.
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_upload_authed(self) -> None:
|
2016-04-14 23:44:39 +02:00
|
|
|
"""
|
2017-07-31 20:52:17 +02:00
|
|
|
A call to /json/user_uploads should return a uri and actually create an
|
2016-03-24 20:24:01 +01:00
|
|
|
entry in the database. This entry will be marked unclaimed till a message
|
|
|
|
refers it.
|
2016-04-14 23:44:39 +02:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 23:44:39 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2016-04-14 23:44:39 +02:00
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("uri", result.json())
|
|
|
|
uri = result.json()["uri"]
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2016-04-14 23:44:39 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# In the future, local file requests will follow the same style as S3
|
|
|
|
# requests; they will be first authenthicated and redirected
|
2016-12-19 16:17:19 +01:00
|
|
|
self.assert_url_serves_contents_of_file(uri, b"zulip!")
|
2016-04-14 23:44:39 +02:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# check if DB has attachment marked as unclaimed
|
2021-02-12 08:20:45 +01:00
|
|
|
entry = Attachment.objects.get(file_name="zulip.txt")
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(entry.is_claimed(), False)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2016-06-14 04:38:30 +02:00
|
|
|
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-06-14 04:38:30 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Now try the endpoint that's supposed to return a temporary URL for access
|
2020-04-08 00:27:24 +02:00
|
|
|
# to the file.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json" + uri)
|
2020-04-08 00:27:24 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
data = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-04-08 00:27:24 +02:00
|
|
|
# Ensure this is different from the original uri:
|
|
|
|
self.assertNotEqual(url_only_url, uri)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("user_uploads/temporary/", url_only_url)
|
|
|
|
self.assertTrue(url_only_url.endswith("zulip.txt"))
|
2020-10-23 02:43:28 +02:00
|
|
|
# The generated URL has a token authorizing the requestor to access the file
|
2020-04-08 00:27:24 +02:00
|
|
|
# without being logged in.
|
|
|
|
self.logout()
|
|
|
|
self.assert_url_serves_contents_of_file(url_only_url, b"zulip!")
|
|
|
|
# The original uri shouldn't work when logged out:
|
|
|
|
result = self.client_get(uri)
|
|
|
|
self.assertEqual(result.status_code, 401)
|
|
|
|
|
|
|
|
def test_serve_local_file_unauthed_invalid_token(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/user_uploads/temporary/badtoken/file.png")
|
2020-04-08 00:27:24 +02:00
|
|
|
self.assert_json_error(result, "Invalid token")
|
|
|
|
|
2020-04-18 16:11:13 +02:00
|
|
|
def test_serve_local_file_unauthed_altered_filename(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-04-18 16:11:13 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
url = "/json" + result.json()["uri"]
|
2020-04-18 16:11:13 +02:00
|
|
|
|
|
|
|
result = self.client_get(url)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
data = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-04-18 16:11:13 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(url_only_url.endswith("zulip.txt"))
|
|
|
|
url_only_url_changed_filename = url_only_url.split("zulip.txt")[0] + "differentname.exe"
|
2020-04-18 16:11:13 +02:00
|
|
|
result = self.client_get(url_only_url_changed_filename)
|
|
|
|
self.assert_json_error(result, "Invalid filename")
|
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
def test_serve_local_file_unauthed_token_expires(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-04-08 00:27:24 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
url = "/json" + result.json()["uri"]
|
2020-04-08 00:27:24 +02:00
|
|
|
|
|
|
|
start_time = time.time()
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("django.core.signing.time.time", return_value=start_time):
|
2020-04-08 00:27:24 +02:00
|
|
|
result = self.client_get(url)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
data = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-04-08 00:27:24 +02:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
self.assert_url_serves_contents_of_file(url_only_url, b"zulip!")
|
|
|
|
|
|
|
|
# After over 60 seconds, the token should become invalid:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("django.core.signing.time.time", return_value=start_time + 61):
|
2020-04-08 00:27:24 +02:00
|
|
|
result = self.client_get(url_only_url)
|
|
|
|
self.assert_json_error(result, "Invalid token")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_unauthed(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2017-08-16 09:52:16 +02:00
|
|
|
uri = result.json()["uri"]
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
response = self.client_get(uri)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
|
|
|
response, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_removed_file_download(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-06-17 19:48:17 +02:00
|
|
|
Trying to download deleted files should return 404 error
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
destroy_uploads()
|
|
|
|
|
2017-08-16 09:52:16 +02:00
|
|
|
response = self.client_get(result.json()["uri"])
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_existing_file_download(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-06-17 19:48:17 +02:00
|
|
|
Trying to download a file that was never uploaded will return a json_error
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2019-07-24 07:34:48 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:19:30 +01:00
|
|
|
response = self.client_get(
|
|
|
|
f"http://localhost:9991/user_uploads/{hamlet.realm_id}/ff/gg/abc.py"
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 404)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_in_response("File not found.", response)
|
2016-06-27 21:09:56 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_old_unclaimed_attachments(self) -> None:
|
2016-03-24 20:24:01 +01:00
|
|
|
# Upload some files and make them older than a weeek
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-03-24 20:24:01 +01:00
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
|
|
|
d1_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
d2 = StringIO("zulip!")
|
|
|
|
d2.name = "dummy_2.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d2})
|
|
|
|
d2_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-04-15 04:03:56 +02:00
|
|
|
two_week_ago = timezone_now() - datetime.timedelta(weeks=2)
|
2021-02-12 08:19:30 +01:00
|
|
|
d1_attachment = Attachment.objects.get(path_id=d1_path_id)
|
2016-03-24 20:24:01 +01:00
|
|
|
d1_attachment.create_time = two_week_ago
|
|
|
|
d1_attachment.save()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(str(d1_attachment), "<Attachment: dummy_1.txt>")
|
2021-02-12 08:19:30 +01:00
|
|
|
d2_attachment = Attachment.objects.get(path_id=d2_path_id)
|
2016-03-24 20:24:01 +01:00
|
|
|
d2_attachment.create_time = two_week_ago
|
|
|
|
d2_attachment.save()
|
|
|
|
|
2017-11-09 16:26:38 +01:00
|
|
|
# Send message referring only dummy_1
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
"Some files here ...[zulip.txt](http://localhost:9991/user_uploads/" + d1_path_id + ")"
|
|
|
|
)
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-03-24 20:24:01 +01:00
|
|
|
|
|
|
|
# dummy_2 should not exist in database or the uploads folder
|
|
|
|
do_delete_old_unclaimed_attachments(2)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(not Attachment.objects.filter(path_id=d2_path_id).exists())
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
2020-07-27 03:44:00 +02:00
|
|
|
self.assertTrue(not delete_message_image(d2_path_id))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["WARNING:root:dummy_2.txt does not exist. Its entry in the database will be removed."],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_attachment_url_without_upload(self) -> None:
|
2019-07-24 07:34:48 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2020-06-10 06:41:04 +02:00
|
|
|
body = f"Test message ...[zulip.txt](http://localhost:9991/user_uploads/{hamlet.realm_id}/64/fake_path_id.txt)"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertFalse(Attachment.objects.filter(path_id="1/64/fake_path_id.txt").exists())
|
2017-04-14 01:15:46 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_claim_attachments(self) -> None:
|
2016-03-24 20:24:01 +01:00
|
|
|
"""
|
|
|
|
This test tries to claim the same attachment twice. The messages field in
|
|
|
|
the Attachment model should have both the messages in its entry.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-03-24 20:24:01 +01:00
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
|
|
|
d1_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2021-02-12 08:20:45 +01:00
|
|
|
host = self.example_user("hamlet").realm.host
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Second message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 2)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_claim_attachments_different_owners(self) -> None:
|
2017-04-14 00:59:59 +02:00
|
|
|
"""This test tries to claim the same attachment more than once, first
|
2017-11-09 16:26:38 +01:00
|
|
|
with a private stream and then with different recipients."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-04-14 00:59:59 +02:00
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
|
|
|
d1_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
|
|
|
host = self.example_user("hamlet").realm.host
|
2017-04-14 00:59:59 +02:00
|
|
|
|
|
|
|
self.make_stream("private_stream", invite_only=True)
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "private_stream")
|
2017-04-14 00:59:59 +02:00
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
# First, send the message to the new private stream.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "private_stream", body, "test")
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 1)
|
|
|
|
|
|
|
|
# Then, try having a user who didn't receive the message try to publish it, and fail
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Illegal message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2021-07-10 09:26:03 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
2021-07-10 09:26:03 +02:00
|
|
|
self.send_stream_message(cordelia, "Denmark", body, "test")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
2021-07-10 09:26:03 +02:00
|
|
|
f"WARNING:root:User {cordelia.id} tried to share upload" in warn_log.output[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
and "but lacks permission" in warn_log.output[0]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 1)
|
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
|
|
|
# Then, have the owner PM it to another user, giving that other user access.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Second message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_personal_message(self.example_user("hamlet"), self.example_user("othello"), body)
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 2)
|
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
|
|
|
# Then, have that new recipient user publish it.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Third message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Denmark", body, "test")
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 3)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
|
|
|
|
|
|
|
# Finally send to Rome, the web-public stream, and confirm it's now web-public
|
|
|
|
body = f"Fourth message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
|
|
|
self.send_stream_message(self.example_user("othello"), "Rome", body, "test")
|
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 4)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_check_attachment_reference_update(self) -> None:
|
2016-07-07 09:47:15 +02:00
|
|
|
f1 = StringIO("file1")
|
|
|
|
f1.name = "file1.txt"
|
|
|
|
f2 = StringIO("file2")
|
|
|
|
f2.name = "file2.txt"
|
|
|
|
f3 = StringIO("file3")
|
|
|
|
f3.name = "file3.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-12-13 03:56:59 +01:00
|
|
|
host = hamlet.realm.host
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f1})
|
|
|
|
f1_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f2})
|
|
|
|
f2_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
self.subscribe(hamlet, "test")
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"[f1.txt](http://{host}/user_uploads/" + f1_path_id + ") "
|
|
|
|
"[f2.txt](http://{}/user_uploads/".format(host) + f2_path_id + ")"
|
|
|
|
)
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "test", body, "test")
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f3})
|
|
|
|
f3_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
new_body = (
|
|
|
|
f"[f3.txt](http://{host}/user_uploads/" + f3_path_id + ") "
|
|
|
|
"[f2.txt](http://{}/user_uploads/".format(host) + f2_path_id + ")"
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": new_body,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2016-07-07 09:47:15 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
|
|
|
|
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
|
2016-07-24 22:03:22 +02:00
|
|
|
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
self.assertTrue(message not in f1_attachment.messages.all())
|
|
|
|
self.assertTrue(message in f2_attachment.messages.all())
|
|
|
|
self.assertTrue(message in f3_attachment.messages.all())
|
|
|
|
|
2016-07-23 07:06:13 +02:00
|
|
|
# Delete all the attachments from the message
|
|
|
|
new_body = "(deleted)"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"message_id": msg_id,
|
|
|
|
"content": new_body,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2016-07-23 07:06:13 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
|
|
|
|
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
|
|
|
|
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
|
|
|
|
self.assertTrue(message not in f1_attachment.messages.all())
|
|
|
|
self.assertTrue(message not in f2_attachment.messages.all())
|
|
|
|
self.assertTrue(message not in f3_attachment.messages.all())
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_name(self) -> None:
|
2016-09-20 11:02:15 +02:00
|
|
|
"""
|
|
|
|
Unicode filenames should be processed correctly.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-09-20 11:02:15 +02:00
|
|
|
for expected in ["Здравейте.txt", "test"]:
|
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = urllib.parse.quote(expected)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
|
|
|
assert sanitize_name(expected) in result.json()["uri"]
|
2016-09-20 11:02:15 +02:00
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
def test_realm_quota(self) -> None:
|
|
|
|
"""
|
|
|
|
Realm quota for uploading should not be exceeded.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
|
|
|
d1_path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2021-02-12 08:19:30 +01:00
|
|
|
d1_attachment = Attachment.objects.get(path_id=d1_path_id)
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.upload_quota_gb = 1
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.save(update_fields=["upload_quota_gb"])
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
# The size of StringIO("zulip!") is 6 bytes. Setting the size of
|
|
|
|
# d1_attachment to realm.upload_quota_bytes() - 11 should allow
|
|
|
|
# us to upload only one more attachment.
|
2018-02-19 06:39:38 +01:00
|
|
|
quota = realm.upload_quota_bytes()
|
2021-02-12 08:19:30 +01:00
|
|
|
assert quota is not None
|
2018-02-19 06:39:38 +01:00
|
|
|
d1_attachment.size = quota - 11
|
2021-02-12 08:20:45 +01:00
|
|
|
d1_attachment.save(update_fields=["size"])
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
d2 = StringIO("zulip!")
|
|
|
|
d2.name = "dummy_2.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d2})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
d3 = StringIO("zulip!")
|
|
|
|
d3.name = "dummy_3.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d3})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_error(result, "Upload would exceed your organization's upload quota.")
|
|
|
|
|
|
|
|
realm.upload_quota_gb = None
|
2021-02-12 08:20:45 +01:00
|
|
|
realm.save(update_fields=["upload_quota_gb"])
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": d3})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_cross_realm_file_access(self) -> None:
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_user(email: str, realm_id: str) -> UserProfile:
|
2020-03-06 18:40:46 +01:00
|
|
|
password = initial_password(email)
|
|
|
|
if password is not None:
|
|
|
|
self.register(email, password, subdomain=realm_id)
|
2020-03-12 14:17:25 +01:00
|
|
|
return get_user_by_delivery_email(email, get_realm(realm_id))
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-08-26 01:20:47 +02:00
|
|
|
test_subdomain = "uploadtest.example.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
user1_email = "user1@uploadtest.example.com"
|
|
|
|
user2_email = "test-og-bot@zulip.com"
|
|
|
|
user3_email = "other-user@uploadtest.example.com"
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2021-03-08 13:22:43 +01:00
|
|
|
r1 = do_create_realm(string_id=test_subdomain, name=test_subdomain)
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(r1, "invite_required", False, acting_user=None)
|
2017-08-26 01:20:47 +02:00
|
|
|
RealmDomain.objects.create(realm=r1, domain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user_1 = create_user(user1_email, test_subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_2 = create_user(user2_email, "zulip")
|
2019-12-13 03:56:59 +01:00
|
|
|
user_3 = create_user(user3_email, test_subdomain)
|
|
|
|
host = user_3.realm.host
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Send a message from @zulip.com -> @uploadtest.example.com
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_2)
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + fp_path_id + ")"
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(CROSS_REALM_BOT_EMAILS={user_2.email, user_3.email}):
|
2017-08-18 12:26:43 +02:00
|
|
|
internal_send_private_message(
|
2021-03-08 11:54:39 +01:00
|
|
|
sender=get_system_bot(user_2.email, user_2.realm_id),
|
2020-03-12 14:17:25 +01:00
|
|
|
recipient_user=user_1,
|
2017-08-18 12:26:43 +02:00
|
|
|
content=body,
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_1)
|
2017-08-26 01:20:47 +02:00
|
|
|
response = self.client_get(uri, subdomain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Confirm other cross-realm users can't read it.
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_3)
|
2017-08-26 01:20:47 +02:00
|
|
|
response = self.client_get(uri, subdomain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_authorization_invite_only(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-06 18:40:46 +01:00
|
|
|
realm = hamlet.realm
|
|
|
|
subscribed_users = [hamlet, cordelia]
|
2020-03-10 11:48:26 +01:00
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2018-06-05 21:12:28 +02:00
|
|
|
stream_name = "test-subscribe"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name, realm=realm, invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, stream_name)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{realm.host}/user_uploads/" + fp_path_id + ")"
|
2020-03-06 18:40:46 +01:00
|
|
|
self.send_stream_message(hamlet, stream_name, body, "test")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# Owner user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
2016-06-17 19:48:17 +02:00
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 5)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Subscribed user who received the message should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(cordelia)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
self.logout()
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 6)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
def assert_cannot_access_file(user: UserProfile) -> None:
|
|
|
|
response = self.api_get(user, uri)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
|
|
|
|
|
|
|
late_subscribed_user = self.example_user("aaron")
|
|
|
|
self.subscribe(late_subscribed_user, stream_name)
|
2020-03-10 11:48:26 +01:00
|
|
|
assert_cannot_access_file(late_subscribed_user)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Unsubscribed user should not be able to view file
|
2020-03-10 11:48:26 +01:00
|
|
|
for unsubscribed_user in unsubscribed_users:
|
2018-06-05 21:12:28 +02:00
|
|
|
assert_cannot_access_file(unsubscribed_user)
|
|
|
|
|
|
|
|
def test_file_download_authorization_invite_only_with_shared_history(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
polonius = self.example_user("polonius")
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users = [user, polonius]
|
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2018-06-05 21:12:28 +02:00
|
|
|
stream_name = "test-subscribe"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name, realm=user.realm, invite_only=True, history_public_to_subscribers=True
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, stream_name)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-06-05 21:12:28 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"First message ...[zulip.txt](http://{user.realm.host}/user_uploads/"
|
|
|
|
+ fp_path_id
|
|
|
|
+ ")"
|
|
|
|
)
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(user, stream_name, body, "test")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Add aaron as a subscribed after the message was sent
|
|
|
|
late_subscribed_user = self.example_user("aaron")
|
|
|
|
self.subscribe(late_subscribed_user, stream_name)
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users.append(late_subscribed_user)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
# Owner user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
2016-06-17 19:48:17 +02:00
|
|
|
response = self.client_get(uri)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
self.logout()
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 5)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
# Originally subscribed user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(polonius)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
self.logout()
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 6)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
# Subscribed user who did not receive the message should also be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(late_subscribed_user)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
self.logout()
|
|
|
|
# It takes a few extra queries to verify access because of shared history.
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 9)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
def assert_cannot_access_file(user: UserProfile) -> None:
|
|
|
|
self.login_user(user)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
2018-06-05 21:12:28 +02:00
|
|
|
# It takes a few extra queries to verify lack of access with shared history.
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 8)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# Unsubscribed user should not be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
for unsubscribed_user in unsubscribed_users:
|
2018-06-05 21:12:28 +02:00
|
|
|
assert_cannot_access_file(unsubscribed_user)
|
|
|
|
|
|
|
|
def test_multiple_message_attachment_file_download(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
for i in range(0, 5):
|
2020-06-10 06:41:04 +02:00
|
|
|
stream_name = f"test-subscribe {i}"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name,
|
|
|
|
realm=hamlet.realm,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2018-06-05 21:12:28 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2018-06-05 21:12:28 +02:00
|
|
|
for i in range(20):
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"First message ...[zulip.txt](http://{hamlet.realm.host}/user_uploads/"
|
|
|
|
+ fp_path_id
|
|
|
|
+ ")"
|
|
|
|
)
|
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), f"test-subscribe {i % 5}", body, "test"
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
user = self.example_user("aaron")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-06-05 21:12:28 +02:00
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 8)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
self.subscribe(user, "test-subscribe 1")
|
|
|
|
self.subscribe(user, "test-subscribe 2")
|
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
response = self.client_get(uri)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
|
|
|
# If we were accidentally one query per message, this would be 20+
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 9)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
with queries_captured() as queries:
|
|
|
|
self.assertTrue(validate_attachment_request(user, fp_path_id))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(queries, 6)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_authorization_public(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users = [self.example_user("hamlet"), self.example_user("iago")]
|
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2017-08-25 06:01:29 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, "test-subscribe")
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{realm.host}/user_uploads/" + fp_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "test-subscribe", body, "test")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Now all users should be able to access the files
|
|
|
|
for user in subscribed_users + unsubscribed_users:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2016-06-17 19:48:17 +02:00
|
|
|
response = self.client_get(uri)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(b"zulip!", data)
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-02-12 18:18:03 +01:00
|
|
|
def test_serve_local(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_xsend_links(
|
2021-02-12 08:20:45 +01:00
|
|
|
name: str, name_str_for_test: str, content_disposition: str = ""
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.settings(SENDFILE_BACKEND="django_sendfile.backends.nginx"):
|
2020-06-25 02:29:22 +02:00
|
|
|
_get_sendfile.cache_clear() # To clearout cached version of backend from djangosendfile
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-02-12 18:18:03 +01:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = name
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
uri = result.json()["uri"]
|
|
|
|
fp_path_id = re.sub("/user_uploads/", "", uri)
|
2018-02-12 18:18:03 +01:00
|
|
|
fp_path = os.path.split(fp_path_id)[0]
|
|
|
|
response = self.client_get(uri)
|
2020-06-25 02:29:22 +02:00
|
|
|
_get_sendfile.cache_clear()
|
2019-07-05 21:50:51 +02:00
|
|
|
test_run, worker = os.path.split(os.path.dirname(settings.LOCAL_UPLOADS_DIR))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
response["X-Accel-Redirect"],
|
|
|
|
"/serve_uploads/" + fp_path + "/" + name_str_for_test,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
if content_disposition != "":
|
|
|
|
self.assertIn("attachment;", response["Content-disposition"])
|
|
|
|
self.assertIn(content_disposition, response["Content-disposition"])
|
2018-03-13 07:08:27 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("inline;", response["Content-disposition"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(response["Cache-Control"].split(", ")), {"private", "immutable"}
|
|
|
|
)
|
2018-03-13 07:08:27 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
check_xsend_links("zulip.txt", "zulip.txt", 'filename="zulip.txt"')
|
2021-02-12 08:19:30 +01:00
|
|
|
check_xsend_links(
|
2021-02-12 08:20:45 +01:00
|
|
|
"áéБД.txt",
|
|
|
|
"%C3%A1%C3%A9%D0%91%D0%94.txt",
|
2021-02-12 08:19:30 +01:00
|
|
|
"filename*=UTF-8''%C3%A1%C3%A9%D0%91%D0%94.txt",
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_xsend_links("zulip.html", "zulip.html", 'filename="zulip.html"')
|
|
|
|
check_xsend_links("zulip.sh", "zulip.sh", 'filename="zulip.sh"')
|
|
|
|
check_xsend_links("zulip.jpeg", "zulip.jpeg")
|
|
|
|
check_xsend_links("áéБД.pdf", "%C3%A1%C3%A9%D0%91%D0%94.pdf")
|
|
|
|
check_xsend_links("zulip", "zulip", 'filename="zulip"')
|
2018-02-12 18:18:03 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def tearDown(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2016-04-17 23:51:49 +02:00
|
|
|
|
2018-05-14 23:47:19 +02:00
|
|
|
|
2017-02-16 10:10:37 +01:00
|
|
|
class AvatarTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_avatar_field(self) -> None:
|
2017-10-10 03:53:25 +02:00
|
|
|
with self.settings(AVATAR_SALT="salt"):
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=17,
|
|
|
|
realm_id=5,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_USER,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
"/user_avatars/5/fc2b9f1a81f4508a4df2d95451a2a77e0524ca0e-medium.png?x=x&version=2",
|
2017-10-10 03:53:25 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=9999,
|
|
|
|
realm_id=9999,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
"https://secure.gravatar.com/avatar/b48def645758b95537d4424c84d1a9ff?d=identicon&s=500&version=2",
|
2017-10-10 03:53:25 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=9999,
|
|
|
|
realm_id=9999,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(url, None)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_avatar_url(self) -> None:
|
2017-03-21 23:53:54 +01:00
|
|
|
"""Verifies URL schemes for avatars and realm icons."""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
backend: ZulipUploadBackend = LocalUploadBackend()
|
2021-06-05 02:38:54 +02:00
|
|
|
self.assertEqual(backend.get_public_upload_root_url(), "/user_avatars/")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(backend.get_avatar_url("hash", False), "/user_avatars/hash.png?x=x")
|
|
|
|
self.assertEqual(backend.get_avatar_url("hash", True), "/user_avatars/hash-medium.png?x=x")
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_icon_url(15, 1), "/user_avatars/15/realm/icon.png?version=1"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, False), "/user_avatars/15/realm/logo.png?version=1"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, True),
|
|
|
|
"/user_avatars/15/realm/night_logo.png?version=1",
|
|
|
|
)
|
2017-03-21 23:53:54 +01:00
|
|
|
|
|
|
|
with self.settings(S3_AVATAR_BUCKET="bucket"):
|
|
|
|
backend = S3UploadBackend()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
backend.get_avatar_url("hash", False), "https://bucket.s3.amazonaws.com/hash?x=x"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_avatar_url("hash", True),
|
|
|
|
"https://bucket.s3.amazonaws.com/hash-medium.png?x=x",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_icon_url(15, 1),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/icon.png?version=1",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, False),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/logo.png?version=1",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, True),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/night_logo.png?version=1",
|
|
|
|
)
|
2017-03-21 23:53:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"f1": fp1, "f2": fp2})
|
2016-04-17 23:51:49 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one avatar.")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-17 23:51:49 +02:00
|
|
|
|
2017-07-05 19:15:15 +02:00
|
|
|
result = self.client_post("/json/users/me/avatar")
|
2016-04-17 23:51:49 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one avatar.")
|
|
|
|
|
2019-04-23 04:51:04 +02:00
|
|
|
def test_avatar_changes_disabled_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Attempting to upload avatar on a realm with avatar changes disabled should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
self.example_user("cordelia").realm,
|
|
|
|
"avatar_changes_disabled",
|
|
|
|
True,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2019-04-23 04:51:04 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as fp1:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"f1": fp1})
|
2019-04-23 04:51:04 +02:00
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.")
|
|
|
|
|
2016-04-17 23:57:03 +02:00
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2016-04-17 23:57:03 +02:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
corrupt_files = ["text.txt", "corrupt.png", "corrupt.gif"]
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_gravatar_avatar(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
cordelia.save()
|
|
|
|
with self.settings(ENABLE_GRAVATAR=True):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, str(avatar_url(cordelia)) + "&foo=bar")
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
with self.settings(ENABLE_GRAVATAR=False):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_user_avatar(self) -> None:
|
2020-03-10 11:48:26 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
|
|
|
|
2021-03-08 11:54:39 +01:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
cross_realm_bot = get_system_bot(settings.WELCOME_BOT, internal_realm.id)
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2016-10-24 16:42:43 +02:00
|
|
|
|
2018-04-18 21:40:54 +02:00
|
|
|
response = self.client_get("/avatar/")
|
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
|
2018-08-13 19:09:09 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Test /avatar/<email_or_id> endpoint with HTTP basic auth.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, "/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, f"/avatar/{cordelia.id}", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
|
|
|
# Test cross_realm_bot avatar access using email.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, "/avatar/welcome-bot@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
|
|
|
# Test cross_realm_bot avatar access using id.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, f"/avatar/{cross_realm_bot.id}", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
|
|
|
response, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_user_avatar_medium(self) -> None:
|
2020-03-10 11:48:26 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
2017-02-23 20:13:56 +01:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "&foo=bar"))
|
2017-02-23 20:13:56 +01:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "&foo=bar"))
|
2017-02-23 20:13:56 +01:00
|
|
|
|
2018-08-13 19:09:09 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Test /avatar/<email_or_id>/medium endpoint with HTTP basic auth.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, "/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.api_get(hamlet, f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
|
|
|
response, "Not logged in: API authentication or user session required", status_code=401
|
|
|
|
)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_valid_user_avatar(self) -> None:
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
# It's debatable whether we should generate avatars for non-users,
|
|
|
|
# but this test just validates the current code's behavior.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/nonexistent_user@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
actual_url = "https://secure.gravatar.com/avatar/444258b521f152129eb0c162996e572d?d=identicon&version=1&foo=bar"
|
2016-07-13 01:56:59 +02:00
|
|
|
self.assertEqual(redirect_url, actual_url)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_valid_avatars(self) -> None:
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/users/me/avatar with a valid file should return a URL and actually create an avatar.
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2017-01-28 19:05:20 +01:00
|
|
|
version = 2
|
2016-04-17 23:57:03 +02:00
|
|
|
for fname, rfname in self.correct_files:
|
|
|
|
# TODO: use self.subTest once we're exclusively on python 3 by uncommenting the line below.
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-12-19 08:48:03 +01:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
2016-04-17 23:57:03 +02:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("avatar_url", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_avatars/"
|
|
|
|
url = result.json()["avatar_url"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2016-07-26 08:14:49 +02:00
|
|
|
if rfname is not None:
|
2016-07-28 00:38:45 +02:00
|
|
|
response = self.client_get(url)
|
2016-07-26 08:14:49 +02:00
|
|
|
data = b"".join(response.streaming_content)
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(Image.open(io.BytesIO(data)).size, (100, 100))
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
# Verify that the medium-size avatar was created
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2016-12-19 08:48:03 +01:00
|
|
|
medium_avatar_disk_path = avatar_disk_path(user_profile, medium=True)
|
2016-09-20 21:48:48 +02:00
|
|
|
self.assertTrue(os.path.exists(medium_avatar_disk_path))
|
|
|
|
|
2018-05-14 23:47:19 +02:00
|
|
|
# Verify that ensure_medium_avatar_url does not overwrite this file if it exists
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.upload.write_local_file") as mock_write_local_file:
|
2021-03-17 17:54:23 +01:00
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile, is_medium=True)
|
2018-05-14 23:47:19 +02:00
|
|
|
self.assertFalse(mock_write_local_file.called)
|
|
|
|
|
2016-09-20 21:48:48 +02:00
|
|
|
# Confirm that ensure_medium_avatar_url works to recreate
|
|
|
|
# medium size avatars from the original if needed
|
|
|
|
os.remove(medium_avatar_disk_path)
|
|
|
|
self.assertFalse(os.path.exists(medium_avatar_disk_path))
|
2021-03-17 17:54:23 +01:00
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile, is_medium=True)
|
2016-09-20 21:48:48 +02:00
|
|
|
self.assertTrue(os.path.exists(medium_avatar_disk_path))
|
|
|
|
|
2017-01-28 19:05:20 +01:00
|
|
|
# Verify whether the avatar_version gets incremented with every new upload
|
|
|
|
self.assertEqual(user_profile.avatar_version, version)
|
|
|
|
version += 1
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def test_copy_avatar_image(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
source_user_profile = self.example_user("hamlet")
|
|
|
|
target_user_profile = self.example_user("iago")
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
copy_user_settings(source_user_profile, target_user_profile)
|
|
|
|
|
|
|
|
source_path_id = avatar_disk_path(source_user_profile)
|
|
|
|
target_path_id = avatar_disk_path(target_user_profile)
|
|
|
|
self.assertNotEqual(source_path_id, target_path_id)
|
2020-10-24 09:33:54 +02:00
|
|
|
with open(source_path_id, "rb") as source, open(target_path_id, "rb") as target:
|
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_original_path_id = avatar_disk_path(source_user_profile, original=True)
|
|
|
|
target_original_path_id = avatar_disk_path(target_user_profile, original=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
with open(source_original_path_id, "rb") as source, open(
|
|
|
|
target_original_path_id, "rb"
|
|
|
|
) as target:
|
2020-10-24 09:33:54 +02:00
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_medium_path_id = avatar_disk_path(source_user_profile, medium=True)
|
|
|
|
target_medium_path_id = avatar_disk_path(target_user_profile, medium=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
with open(source_medium_path_id, "rb") as source, open(
|
|
|
|
target_medium_path_id, "rb"
|
|
|
|
) as target:
|
2020-10-24 09:33:54 +02:00
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def test_delete_avatar_image(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
avatar_path_id = avatar_disk_path(user)
|
|
|
|
avatar_original_path_id = avatar_disk_path(user, original=True)
|
|
|
|
avatar_medium_path_id = avatar_disk_path(user, medium=True)
|
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_USER)
|
|
|
|
self.assertTrue(os.path.isfile(avatar_path_id))
|
|
|
|
self.assertTrue(os.path.isfile(avatar_original_path_id))
|
|
|
|
self.assertTrue(os.path.isfile(avatar_medium_path_id))
|
|
|
|
|
2020-06-29 12:47:44 +02:00
|
|
|
zerver.lib.actions.do_delete_avatar_image(user, acting_user=user)
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
|
|
|
self.assertFalse(os.path.isfile(avatar_path_id))
|
|
|
|
self.assertFalse(os.path.isfile(avatar_original_path_id))
|
|
|
|
self.assertFalse(os.path.isfile(avatar_medium_path_id))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invalid_avatars(self) -> None:
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2016-12-21 18:34:03 +01:00
|
|
|
A PUT request to /json/users/me/avatar with an invalid file should fail.
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
|
|
|
for fname in self.corrupt_files:
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-12-19 08:48:03 +01:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2017-02-26 20:17:34 +01:00
|
|
|
self.assert_json_error(result, "Could not decode image; did you upload an image file?")
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-01-28 19:05:20 +01:00
|
|
|
self.assertEqual(user_profile.avatar_version, 1)
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_avatar(self) -> None:
|
2016-12-21 18:34:03 +01:00
|
|
|
"""
|
2019-03-09 17:43:48 +01:00
|
|
|
A DELETE request to /json/users/me/avatar should delete the profile picture and return gravatar URL
|
2016-12-21 18:34:03 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2019-04-23 04:51:04 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
|
|
|
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(cordelia.realm, "avatar_changes_disabled", True, acting_user=None)
|
2019-04-23 04:51:04 +02:00
|
|
|
result = self.client_delete("/json/users/me/avatar")
|
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.", 400)
|
2016-12-21 18:34:03 +01:00
|
|
|
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(cordelia.realm, "avatar_changes_disabled", False, acting_user=None)
|
2016-12-21 18:34:03 +01:00
|
|
|
result = self.client_delete("/json/users/me/avatar")
|
2019-04-23 04:51:04 +02:00
|
|
|
user_profile = self.example_user("cordelia")
|
2016-12-21 18:34:03 +01:00
|
|
|
|
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("avatar_url", result.json())
|
|
|
|
self.assertEqual(result.json()["avatar_url"], avatar_url(user_profile))
|
2016-12-21 18:34:03 +01:00
|
|
|
|
|
|
|
self.assertEqual(user_profile.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
2017-01-28 19:05:20 +01:00
|
|
|
self.assertEqual(user_profile.avatar_version, 2)
|
2016-12-21 18:34:03 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_avatar_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-03-06 06:22:28 +01:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-05-29 08:51:07 +02:00
|
|
|
with self.settings(MAX_AVATAR_FILE_SIZE_MIB=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2017-03-06 06:22:28 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def tearDown(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2016-04-14 23:44:39 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-04-15 13:20:36 +02:00
|
|
|
class EmojiTest(UploadSerializeMixin, ZulipTestCase):
|
2018-07-17 20:27:09 +02:00
|
|
|
# While testing GIF resizing, we can't test if the final GIF has the same
|
|
|
|
# number of frames as the original one because PIL drops duplicate frames
|
|
|
|
# with a corresponding increase in the duration of the previous frame.
|
2018-04-15 13:20:36 +02:00
|
|
|
def test_resize_emoji(self) -> None:
|
|
|
|
# Test unequal width and height of animated GIF image
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("animated_unequal_img.gif") as f:
|
2020-10-24 09:33:54 +02:00
|
|
|
animated_unequal_img_data = f.read()
|
2018-07-17 20:27:09 +02:00
|
|
|
resized_img_data = resize_emoji(animated_unequal_img_data, size=50)
|
|
|
|
im = Image.open(io.BytesIO(resized_img_data))
|
|
|
|
self.assertEqual((50, 50), im.size)
|
2018-04-15 13:20:36 +02:00
|
|
|
|
|
|
|
# Test corrupt image exception
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("corrupt.gif") as f:
|
2020-10-24 09:33:54 +02:00
|
|
|
corrupted_img_data = f.read()
|
2018-04-15 13:20:36 +02:00
|
|
|
with self.assertRaises(BadImageError):
|
|
|
|
resize_emoji(corrupted_img_data)
|
|
|
|
|
2019-01-28 21:02:48 +01:00
|
|
|
# Test an image larger than max is resized
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("animated_large_img.gif") as f:
|
2020-10-24 09:33:54 +02:00
|
|
|
animated_large_img_data = f.read()
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.upload.MAX_EMOJI_GIF_SIZE", 128):
|
2019-01-28 21:02:48 +01:00
|
|
|
resized_img_data = resize_emoji(animated_large_img_data, size=50)
|
|
|
|
im = Image.open(io.BytesIO(resized_img_data))
|
|
|
|
self.assertEqual((50, 50), im.size)
|
|
|
|
|
|
|
|
# Test an image file larger than max is resized
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("animated_large_img.gif") as f:
|
2020-10-24 09:33:54 +02:00
|
|
|
animated_large_img_data = f.read()
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.upload.MAX_EMOJI_GIF_FILE_SIZE_BYTES", 3 * 1024 * 1024):
|
2019-01-28 21:02:48 +01:00
|
|
|
resized_img_data = resize_emoji(animated_large_img_data, size=50)
|
|
|
|
im = Image.open(io.BytesIO(resized_img_data))
|
|
|
|
self.assertEqual((50, 50), im.size)
|
|
|
|
|
|
|
|
# Test an image smaller than max and smaller than file size max is not resized
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("animated_large_img.gif") as f:
|
2020-10-24 09:33:54 +02:00
|
|
|
animated_large_img_data = f.read()
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.upload.MAX_EMOJI_GIF_SIZE", 512):
|
2019-01-28 21:02:48 +01:00
|
|
|
resized_img_data = resize_emoji(animated_large_img_data, size=50)
|
|
|
|
im = Image.open(io.BytesIO(resized_img_data))
|
|
|
|
self.assertEqual((256, 256), im.size)
|
|
|
|
|
2018-04-15 13:20:36 +02:00
|
|
|
def tearDown(self) -> None:
|
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2018-04-15 13:20:36 +02:00
|
|
|
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class RealmIconTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
|
|
|
result = self.client_post("/json/realm/icon", {"f1": fp1, "f2": fp2})
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assert_json_error(result, "You must upload exactly one icon.")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-07-05 19:02:54 +02:00
|
|
|
result = self.client_post("/json/realm/icon")
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assert_json_error(result, "You must upload exactly one icon.")
|
|
|
|
|
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2017-02-21 03:41:20 +01:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
corrupt_files = ["text.txt", "corrupt.png", "corrupt.gif"]
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_admin_user_upload(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-02-21 03:41:20 +01:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_gravatar_icon(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_FROM_GRAVATAR, acting_user=None)
|
2017-02-21 03:41:20 +01:00
|
|
|
with self.settings(ENABLE_GRAVATAR=True):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, realm_icon_url(realm) + "&foo=bar")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
with self.settings(ENABLE_GRAVATAR=False):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(realm_icon_url(realm) + "&foo=bar"))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_realm_icon(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(realm_icon_url(realm) + "&foo=bar"))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_valid_icons(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/realm/icon with a valid file should return a URL
|
2017-02-21 03:41:20 +01:00
|
|
|
and actually create an realm icon.
|
|
|
|
"""
|
|
|
|
for fname, rfname in self.correct_files:
|
|
|
|
# TODO: use self.subTest once we're exclusively on python 3 by uncommenting the line below.
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-02-21 03:41:20 +01:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
realm = get_realm("zulip")
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("icon_url", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
base = f"/user_avatars/{realm.id}/realm/icon.png"
|
|
|
|
url = result.json()["icon_url"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
if rfname is not None:
|
|
|
|
response = self.client_get(url)
|
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
self.assertEqual(Image.open(io.BytesIO(data)).size, (100, 100))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invalid_icons(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
A PUT request to /json/realm/icon with an invalid file should fail.
|
|
|
|
"""
|
|
|
|
for fname in self.corrupt_files:
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-02-21 03:41:20 +01:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
self.assert_json_error(result, "Could not decode image; did you upload an image file?")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_icon(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
A DELETE request to /json/realm/icon should delete the realm icon and return gravatar URL
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
result = self.client_delete("/json/realm/icon")
|
|
|
|
|
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("icon_url", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertEqual(result.json()["icon_url"], realm_icon_url(realm))
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assertEqual(realm.icon_source, Realm.ICON_FROM_GRAVATAR)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_icon_version(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2017-02-21 03:41:20 +01:00
|
|
|
icon_version = realm.icon_version
|
|
|
|
self.assertEqual(icon_version, 1)
|
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
realm = get_realm("zulip")
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assertEqual(realm.icon_version, icon_version + 1)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_icon_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-03-06 06:22:28 +01:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-05-29 08:55:34 +02:00
|
|
|
with self.settings(MAX_ICON_FILE_SIZE_MIB=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2017-03-06 06:22:28 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def tearDown(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
class RealmLogoTest(UploadSerializeMixin, ZulipTestCase):
|
2019-01-27 08:25:10 +01:00
|
|
|
night = False
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_multiple_upload_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/realm/logo",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"f1": fp1, "f2": fp2, "night": orjson.dumps(self.night).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one logo.")
|
|
|
|
|
|
|
|
def test_no_file_upload_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one logo.")
|
|
|
|
|
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2018-08-16 01:26:55 +02:00
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
corrupt_files = ["text.txt", "corrupt.png", "corrupt.gif"]
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_no_admin_user_upload(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_upload_limited_plan_type(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
2020-12-04 10:54:15 +01:00
|
|
|
do_change_plan_type(user_profile.realm, Realm.LIMITED, acting_user=None)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_profile)
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Available on Zulip Standard. Upgrade to access.")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_get_default_logo(self) -> None:
|
2020-06-29 12:35:58 +02:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_DEFAULT, self.night, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2020-06-08 11:53:24 +02:00
|
|
|
is_night_str = str(self.night).lower()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
redirect_url, f"/static/images/logo/zulip-org-logo.svg?version=0&night={is_night_str}"
|
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2020-06-29 12:35:58 +02:00
|
|
|
def test_get_realm_logo(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, self.night, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
|
|
|
redirect_url.endswith(
|
2021-02-12 08:20:45 +01:00
|
|
|
get_realm_logo_url(realm, self.night) + f"&night={str(self.night).lower()}"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2020-06-08 11:53:24 +02:00
|
|
|
is_night_str = str(self.night).lower()
|
|
|
|
|
|
|
|
if self.night:
|
|
|
|
file_name = "night_logo.png"
|
|
|
|
else:
|
|
|
|
file_name = "logo.png"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
redirect_url,
|
|
|
|
f"/user_avatars/{realm.id}/realm/{file_name}?version=2&night={is_night_str}",
|
|
|
|
)
|
2020-06-08 11:53:24 +02:00
|
|
|
|
2020-12-04 10:54:15 +01:00
|
|
|
do_change_plan_type(realm, Realm.LIMITED, acting_user=user_profile)
|
2020-06-08 11:53:24 +02:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_source, Realm.LOGO_UPLOADED)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
redirect_url, f"/static/images/logo/zulip-org-logo.svg?version=0&night={is_night_str}"
|
|
|
|
)
|
2020-06-08 11:53:24 +02:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
def test_valid_logos(self) -> None:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/realm/logo with a valid file should return a URL
|
2018-08-16 01:26:55 +02:00
|
|
|
and actually create an realm logo.
|
|
|
|
"""
|
|
|
|
for fname, rfname in self.correct_files:
|
|
|
|
# TODO: use self.subTest once we're exclusively on python 3 by uncommenting the line below.
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_success(result)
|
2019-08-19 19:46:45 +02:00
|
|
|
logo_url = get_realm_logo_url(realm, self.night)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
if rfname is not None:
|
2019-02-28 12:51:04 +01:00
|
|
|
response = self.client_get(logo_url)
|
2018-08-16 01:26:55 +02:00
|
|
|
data = b"".join(response.streaming_content)
|
|
|
|
# size should be 100 x 100 because thumbnail keeps aspect ratio
|
|
|
|
# while trying to fit in a 800 x 100 box without losing part of the image
|
|
|
|
self.assertEqual(Image.open(io.BytesIO(data)).size, (100, 100))
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_invalid_logo_upload(self) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
"""
|
|
|
|
A PUT request to /json/realm/logo with an invalid file should fail.
|
|
|
|
"""
|
|
|
|
for fname in self.corrupt_files:
|
|
|
|
# with self.subTest(fname=fname):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(fname) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
self.assert_json_error(result, "Could not decode image; did you upload an image file?")
|
|
|
|
|
|
|
|
def test_delete_logo(self) -> None:
|
|
|
|
"""
|
|
|
|
A DELETE request to /json/realm/logo should delete the realm logo and return gravatar URL
|
|
|
|
"""
|
2020-06-29 12:35:58 +02:00
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, self.night, acting_user=user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_delete(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_source, Realm.LOGO_DEFAULT)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_source, Realm.LOGO_DEFAULT)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_logo_version(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
version = realm.night_logo_version
|
|
|
|
else:
|
|
|
|
version = realm.logo_version
|
|
|
|
self.assertEqual(version, 1)
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_version, version + 1)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_version, version + 1)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_logo_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-05-29 08:59:21 +02:00
|
|
|
with self.settings(MAX_LOGO_FILE_SIZE_MIB=0):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def tearDown(self) -> None:
|
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
class RealmNightLogoTest(RealmLogoTest):
|
|
|
|
# Run the same tests as for RealmLogoTest, just with night mode enabled
|
|
|
|
night = True
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class LocalStorageTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_upload_local(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
uri = upload_message_file(
|
2021-02-12 08:20:45 +01:00
|
|
|
"dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-04-20 21:50:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2021-02-12 08:20:45 +01:00
|
|
|
path_id = re.sub("/user_uploads/", "", uri)
|
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", path_id)
|
2016-04-20 21:50:56 +02:00
|
|
|
self.assertTrue(os.path.isfile(file_path))
|
|
|
|
|
2017-02-26 11:03:45 +01:00
|
|
|
uploaded_file = Attachment.objects.get(owner=user_profile, path_id=path_id)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2017-02-26 11:03:45 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_message_image_local(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-20 21:50:56 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2016-04-20 21:50:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
path_id = re.sub("/user_uploads/", "", result.json()["uri"])
|
2016-06-09 07:53:35 +02:00
|
|
|
self.assertTrue(delete_message_image(path_id))
|
2016-04-20 21:50:56 +02:00
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
def test_ensure_avatar_image_local(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file_path = user_avatar_path(user_profile)
|
|
|
|
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
write_local_file("avatars", file_path + ".original", image_file.read())
|
|
|
|
|
|
|
|
image_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".original")
|
|
|
|
with open(image_path, "rb") as f:
|
|
|
|
image_data = f.read()
|
|
|
|
|
|
|
|
resized_avatar = resize_avatar(image_data)
|
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile)
|
|
|
|
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + ".png")
|
|
|
|
with open(output_path, "rb") as original_file:
|
|
|
|
self.assertEqual(resized_avatar, original_file.read())
|
|
|
|
|
|
|
|
resized_avatar = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile, is_medium=True)
|
|
|
|
output_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", file_path + "-medium.png")
|
|
|
|
with open(output_path, "rb") as original_file:
|
|
|
|
self.assertEqual(resized_avatar, original_file.read())
|
|
|
|
|
2018-05-15 00:10:30 +02:00
|
|
|
def test_emoji_upload_local(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file_name = "emoji.png"
|
|
|
|
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
upload_emoji_image(image_file, file_name, user_profile)
|
2018-05-15 00:10:30 +02:00
|
|
|
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_file_name=file_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path)
|
2021-02-12 08:19:30 +01:00
|
|
|
with get_test_image_file("img.png") as image_file, open(
|
|
|
|
file_path + ".original", "rb"
|
|
|
|
) as original_file:
|
2020-06-03 06:50:08 +02:00
|
|
|
self.assertEqual(image_file.read(), original_file.read())
|
2018-05-15 00:10:30 +02:00
|
|
|
|
|
|
|
expected_size = (DEFAULT_EMOJI_SIZE, DEFAULT_EMOJI_SIZE)
|
2020-10-24 09:33:54 +02:00
|
|
|
with Image.open(file_path) as resized_image:
|
|
|
|
self.assertEqual(expected_size, resized_image.size)
|
2018-05-15 00:10:30 +02:00
|
|
|
|
2018-05-15 00:25:06 +02:00
|
|
|
def test_get_emoji_url_local(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
file_name = "emoji.png"
|
|
|
|
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
upload_emoji_image(image_file, file_name, user_profile)
|
2018-05-15 00:25:06 +02:00
|
|
|
url = zerver.lib.upload.upload_backend.get_emoji_url(file_name, user_profile.realm_id)
|
|
|
|
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_file_name=file_name,
|
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
expected_url = f"/user_avatars/{emoji_path}"
|
2018-05-15 00:25:06 +02:00
|
|
|
self.assertEqual(expected_url, url)
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
def test_tarball_upload_and_deletion_local(self) -> None:
|
2019-06-21 22:46:04 +02:00
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
tarball_path = os.path.join(settings.TEST_WORKER_DIR, "tarball.tar.gz")
|
|
|
|
with open(tarball_path, "w") as f:
|
|
|
|
f.write("dummy")
|
2019-06-21 22:46:04 +02:00
|
|
|
|
|
|
|
uri = upload_export_tarball(user_profile.realm, tarball_path)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
2021-02-12 08:20:45 +01:00
|
|
|
os.path.isfile(os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", tarball_path))
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-06-21 22:46:04 +02:00
|
|
|
|
|
|
|
result = re.search(re.compile(r"([A-Za-z0-9\-_]{24})"), uri)
|
|
|
|
if result is not None:
|
|
|
|
random_name = result.group(1)
|
2020-06-10 06:40:53 +02:00
|
|
|
expected_url = f"http://zulip.testserver/user_avatars/exports/{user_profile.realm_id}/{random_name}/tarball.tar.gz"
|
2019-06-21 22:46:04 +02:00
|
|
|
self.assertEqual(expected_url, uri)
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
# Delete the tarball.
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
|
|
|
self.assertIsNone(delete_export_tarball("/not_a_file"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["WARNING:root:not_a_file does not exist. Its entry in the database will be removed."],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-06-27 20:41:47 +02:00
|
|
|
path_id = urllib.parse.urlparse(uri).path
|
|
|
|
self.assertEqual(delete_export_tarball(path_id), path_id)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def tearDown(self) -> None:
|
2016-04-20 21:50:56 +02:00
|
|
|
destroy_uploads()
|
2019-10-18 16:11:48 +02:00
|
|
|
super().tearDown()
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2018-05-15 00:10:30 +02:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class S3Test(ZulipTestCase):
|
2016-06-09 07:53:35 +02:00
|
|
|
@use_s3_backend
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_upload_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0]
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
uri = upload_message_file(
|
2021-02-12 08:20:45 +01:00
|
|
|
"dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2021-02-12 08:20:45 +01:00
|
|
|
path_id = re.sub("/user_uploads/", "", uri)
|
|
|
|
content = bucket.Object(path_id).get()["Body"].read()
|
2017-02-26 11:03:45 +01:00
|
|
|
self.assertEqual(b"zulip!", content)
|
|
|
|
|
|
|
|
uploaded_file = Attachment.objects.get(owner=user_profile, path_id=path_id)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2016-06-14 04:38:30 +02:00
|
|
|
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-06-14 04:38:30 +02:00
|
|
|
|
2018-05-14 20:46:59 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_file_upload_s3_with_undefined_content_type(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0]
|
2018-05-14 20:46:59 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
uri = upload_message_file("dummy.txt", len(b"zulip!"), None, b"zulip!", user_profile)
|
2018-05-14 20:46:59 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
path_id = re.sub("/user_uploads/", "", uri)
|
|
|
|
self.assertEqual(b"zulip!", bucket.Object(path_id).get()["Body"].read())
|
2018-05-14 20:46:59 +02:00
|
|
|
uploaded_file = Attachment.objects.get(owner=user_profile, path_id=path_id)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2018-05-14 20:46:59 +02:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
@use_s3_backend
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_message_image_delete_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
uri = upload_message_file(
|
2021-02-12 08:20:45 +01:00
|
|
|
"dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
path_id = re.sub("/user_uploads/", "", uri)
|
2016-06-09 07:53:35 +02:00
|
|
|
self.assertTrue(delete_message_image(path_id))
|
2016-04-20 21:51:21 +02:00
|
|
|
|
2018-05-14 21:56:49 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_message_image_delete_when_file_doesnt_exist(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
|
|
|
self.assertEqual(False, delete_message_image("non-existant-file"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
[
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:root:non-existant-file does not exist. Its entry in the database will be removed."
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
2018-05-14 21:56:49 +02:00
|
|
|
|
2016-06-09 07:53:35 +02:00
|
|
|
@use_s3_backend
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_upload_authed(self) -> None:
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
2017-07-31 20:52:17 +02:00
|
|
|
A call to /json/user_uploads should return a uri and actually create an object.
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
2018-12-07 17:52:01 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET)[0]
|
2016-06-09 07:53:35 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 16:26:01 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2016-04-14 16:26:01 +02:00
|
|
|
self.assert_json_success(result)
|
2017-08-16 09:52:16 +02:00
|
|
|
self.assertIn("uri", result.json())
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
|
|
|
uri = result.json()["uri"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(base, uri[: len(base)])
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2016-07-28 00:38:45 +02:00
|
|
|
response = self.client_get(uri)
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
2020-09-13 05:41:39 +02:00
|
|
|
path = urllib.parse.urlparse(redirect_url).path
|
|
|
|
assert path.startswith("/")
|
|
|
|
key = path[1:]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(b"zulip!", bucket.Object(key).get()["Body"].read())
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Now try the endpoint that's supposed to return a temporary URL for access
|
2020-04-08 00:27:24 +02:00
|
|
|
# to the file.
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json" + uri)
|
2020-04-08 00:27:24 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
data = result.json()
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-09-13 05:41:39 +02:00
|
|
|
path = urllib.parse.urlparse(url_only_url).path
|
|
|
|
assert path.startswith("/")
|
|
|
|
key = path[1:]
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(b"zulip!", bucket.Object(key).get()["Body"].read())
|
2020-04-22 02:57:24 +02:00
|
|
|
|
|
|
|
# Note: Depending on whether the calls happened in the same
|
|
|
|
# second (resulting in the same timestamp+signature),
|
|
|
|
# url_only_url may or may not equal redirect_url.
|
2020-04-08 00:27:24 +02:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2016-06-14 04:38:30 +02:00
|
|
|
body = "First message ...[zulip.txt](http://localhost:9991" + uri + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-06-14 04:38:30 +02:00
|
|
|
|
2017-12-21 09:37:59 +01:00
|
|
|
@use_s3_backend
|
|
|
|
def test_upload_avatar_image(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2017-12-21 09:37:59 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2017-12-21 09:37:59 +01:00
|
|
|
path_id = user_avatar_path(user_profile)
|
|
|
|
original_image_path_id = path_id + ".original"
|
|
|
|
medium_path_id = path_id + "-medium.png"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver.lib.upload.upload_backend.upload_avatar_image(
|
|
|
|
image_file, user_profile, user_profile
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(get_test_image_file("img.png").name, "rb") as f:
|
2019-07-14 21:37:08 +02:00
|
|
|
test_image_data = f.read()
|
2017-12-21 09:37:59 +01:00
|
|
|
test_medium_image_data = resize_avatar(test_image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
original_image_key = bucket.Object(original_image_path_id)
|
2017-12-21 09:37:59 +01:00
|
|
|
self.assertEqual(original_image_key.key, original_image_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = original_image_key.get()["Body"].read()
|
2017-12-21 09:37:59 +01:00
|
|
|
self.assertEqual(image_data, test_image_data)
|
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
medium_image_key = bucket.Object(medium_path_id)
|
2017-12-21 09:37:59 +01:00
|
|
|
self.assertEqual(medium_image_key.key, medium_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
medium_image_data = medium_image_key.get()["Body"].read()
|
2017-12-21 09:37:59 +01:00
|
|
|
self.assertEqual(medium_image_data, test_medium_image_data)
|
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
bucket.Object(medium_image_key.key).delete()
|
2021-03-17 17:54:23 +01:00
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile, is_medium=True)
|
2018-12-07 17:52:01 +01:00
|
|
|
medium_image_key = bucket.Object(medium_path_id)
|
2017-12-21 09:37:59 +01:00
|
|
|
self.assertEqual(medium_image_key.key, medium_path_id)
|
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_copy_avatar_image(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
source_user_profile = self.example_user("hamlet")
|
|
|
|
target_user_profile = self.example_user("othello")
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
copy_user_settings(source_user_profile, target_user_profile)
|
|
|
|
|
|
|
|
source_path_id = user_avatar_path(source_user_profile)
|
|
|
|
target_path_id = user_avatar_path(target_user_profile)
|
|
|
|
self.assertNotEqual(source_path_id, target_path_id)
|
|
|
|
|
2018-12-07 17:52:01 +01:00
|
|
|
source_image_key = bucket.Object(source_path_id)
|
|
|
|
target_image_key = bucket.Object(target_path_id)
|
2018-06-06 14:30:26 +02:00
|
|
|
self.assertEqual(target_image_key.key, target_path_id)
|
|
|
|
self.assertEqual(source_image_key.content_type, target_image_key.content_type)
|
2021-02-12 08:20:45 +01:00
|
|
|
source_image_data = source_image_key.get()["Body"].read()
|
|
|
|
target_image_data = target_image_key.get()["Body"].read()
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_original_image_path_id = source_path_id + ".original"
|
|
|
|
target_original_image_path_id = target_path_id + ".original"
|
2018-12-07 17:52:01 +01:00
|
|
|
target_original_image_key = bucket.Object(target_original_image_path_id)
|
2018-06-06 14:30:26 +02:00
|
|
|
self.assertEqual(target_original_image_key.key, target_original_image_path_id)
|
2018-12-07 17:52:01 +01:00
|
|
|
source_original_image_key = bucket.Object(source_original_image_path_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
source_original_image_key.content_type, target_original_image_key.content_type
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
source_image_data = source_original_image_key.get()["Body"].read()
|
|
|
|
target_image_data = target_original_image_key.get()["Body"].read()
|
2018-06-06 14:30:26 +02:00
|
|
|
self.assertEqual(source_image_data, target_image_data)
|
|
|
|
|
|
|
|
target_medium_path_id = target_path_id + "-medium.png"
|
|
|
|
source_medium_path_id = source_path_id + "-medium.png"
|
2018-12-07 17:52:01 +01:00
|
|
|
source_medium_image_key = bucket.Object(source_medium_path_id)
|
|
|
|
target_medium_image_key = bucket.Object(target_medium_path_id)
|
2018-06-06 14:30:26 +02:00
|
|
|
self.assertEqual(target_medium_image_key.key, target_medium_path_id)
|
|
|
|
self.assertEqual(source_medium_image_key.content_type, target_medium_image_key.content_type)
|
2021-02-12 08:20:45 +01:00
|
|
|
source_medium_image_data = source_medium_image_key.get()["Body"].read()
|
|
|
|
target_medium_image_data = target_medium_image_key.get()["Body"].read()
|
2018-06-06 14:30:26 +02:00
|
|
|
self.assertEqual(source_medium_image_data, target_medium_image_data)
|
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_delete_avatar_image(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
avatar_path_id = user_avatar_path(user)
|
|
|
|
avatar_original_image_path_id = avatar_path_id + ".original"
|
|
|
|
avatar_medium_path_id = avatar_path_id + "-medium.png"
|
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_USER)
|
2018-12-07 17:52:01 +01:00
|
|
|
self.assertIsNotNone(bucket.Object(avatar_path_id))
|
|
|
|
self.assertIsNotNone(bucket.Object(avatar_original_image_path_id))
|
|
|
|
self.assertIsNotNone(bucket.Object(avatar_medium_path_id))
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2020-06-29 12:47:44 +02:00
|
|
|
zerver.lib.actions.do_delete_avatar_image(user, acting_user=user)
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
2018-12-07 17:52:01 +01:00
|
|
|
|
|
|
|
# Confirm that the avatar files no longer exist in S3.
|
|
|
|
with self.assertRaises(botocore.exceptions.ClientError):
|
|
|
|
bucket.Object(avatar_path_id).load()
|
|
|
|
with self.assertRaises(botocore.exceptions.ClientError):
|
|
|
|
bucket.Object(avatar_original_image_path_id).load()
|
|
|
|
with self.assertRaises(botocore.exceptions.ClientError):
|
|
|
|
bucket.Object(avatar_medium_path_id).load()
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2018-05-14 22:55:03 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_upload_realm_icon_image(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2018-05-14 22:55:03 +02:00
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
zerver.lib.upload.upload_backend.upload_realm_icon_image(image_file, user_profile)
|
2018-05-14 22:55:03 +02:00
|
|
|
|
|
|
|
original_path_id = os.path.join(str(user_profile.realm.id), "realm", "icon.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_key = bucket.Object(original_path_id)
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(image_file.read(), original_key.get()["Body"].read())
|
2018-05-14 22:55:03 +02:00
|
|
|
|
|
|
|
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", "icon.png")
|
2021-02-12 08:20:45 +01:00
|
|
|
resized_data = bucket.Object(resized_path_id).get()["Body"].read()
|
2018-08-16 01:26:55 +02:00
|
|
|
# while trying to fit in a 800 x 100 box without losing part of the image
|
|
|
|
resized_image = Image.open(io.BytesIO(resized_data)).size
|
|
|
|
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
|
|
|
|
|
|
|
@use_s3_backend
|
2019-01-27 08:25:10 +01:00
|
|
|
def _test_upload_logo_image(self, night: bool, file_name: str) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver.lib.upload.upload_backend.upload_realm_logo_image(
|
|
|
|
image_file, user_profile, night
|
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
original_path_id = os.path.join(
|
|
|
|
str(user_profile.realm.id), "realm", f"{file_name}.original"
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
original_key = bucket.Object(original_path_id)
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(image_file.read(), original_key.get()["Body"].read())
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
resized_path_id = os.path.join(str(user_profile.realm.id), "realm", f"{file_name}.png")
|
2021-02-12 08:20:45 +01:00
|
|
|
resized_data = bucket.Object(resized_path_id).get()["Body"].read()
|
2018-05-14 23:23:32 +02:00
|
|
|
resized_image = Image.open(io.BytesIO(resized_data)).size
|
|
|
|
self.assertEqual(resized_image, (DEFAULT_AVATAR_SIZE, DEFAULT_AVATAR_SIZE))
|
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_upload_realm_logo_image(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self._test_upload_logo_image(night=False, file_name="logo")
|
|
|
|
self._test_upload_logo_image(night=True, file_name="night_logo")
|
2019-01-27 08:25:10 +01:00
|
|
|
|
2018-05-14 23:23:32 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_upload_emoji_image(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
2018-05-14 23:23:32 +02:00
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
emoji_name = "emoji.png"
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver.lib.upload.upload_backend.upload_emoji_image(
|
|
|
|
image_file, emoji_name, user_profile
|
|
|
|
)
|
2018-05-14 23:23:32 +02:00
|
|
|
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=user_profile.realm_id,
|
|
|
|
emoji_file_name=emoji_name,
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
original_key = bucket.Object(emoji_path + ".original")
|
2020-10-24 09:33:54 +02:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(image_file.read(), original_key.get()["Body"].read())
|
2018-05-14 23:23:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
resized_data = bucket.Object(emoji_path).get()["Body"].read()
|
2018-05-14 23:23:32 +02:00
|
|
|
resized_image = Image.open(io.BytesIO(resized_data))
|
|
|
|
self.assertEqual(resized_image.size, (DEFAULT_EMOJI_SIZE, DEFAULT_EMOJI_SIZE))
|
2018-05-14 22:55:03 +02:00
|
|
|
|
2021-03-17 17:54:23 +01:00
|
|
|
@use_s3_backend
|
|
|
|
def test_ensure_avatar_image(self) -> None:
|
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
base_file_path = user_avatar_path(user_profile)
|
|
|
|
# Bug: This should have + ".png", but the implementation is wrong.
|
|
|
|
file_path = base_file_path
|
|
|
|
original_file_path = base_file_path + ".original"
|
|
|
|
medium_file_path = base_file_path + "-medium.png"
|
|
|
|
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
zerver.lib.upload.upload_backend.upload_avatar_image(
|
|
|
|
image_file, user_profile, user_profile
|
|
|
|
)
|
|
|
|
|
|
|
|
key = bucket.Object(original_file_path)
|
|
|
|
image_data = key.get()["Body"].read()
|
|
|
|
|
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile)
|
|
|
|
resized_avatar = resize_avatar(image_data)
|
|
|
|
key = bucket.Object(file_path)
|
|
|
|
self.assertEqual(resized_avatar, key.get()["Body"].read())
|
|
|
|
|
|
|
|
zerver.lib.upload.upload_backend.ensure_avatar_image(user_profile, is_medium=True)
|
|
|
|
resized_avatar = resize_avatar(image_data, MEDIUM_AVATAR_SIZE)
|
|
|
|
key = bucket.Object(medium_file_path)
|
|
|
|
self.assertEqual(resized_avatar, key.get()["Body"].read())
|
|
|
|
|
2018-05-14 23:37:02 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_get_emoji_url(self) -> None:
|
|
|
|
emoji_name = "emoji.png"
|
|
|
|
realm_id = 1
|
|
|
|
bucket = settings.S3_AVATAR_BUCKET
|
|
|
|
path = RealmEmoji.PATH_ID_TEMPLATE.format(realm_id=realm_id, emoji_file_name=emoji_name)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
url = zerver.lib.upload.upload_backend.get_emoji_url("emoji.png", realm_id)
|
2018-05-14 23:37:02 +02:00
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
expected_url = f"https://{bucket}.s3.amazonaws.com/{path}"
|
2018-05-14 23:37:02 +02:00
|
|
|
self.assertEqual(expected_url, url)
|
|
|
|
|
2019-06-21 22:46:04 +02:00
|
|
|
@use_s3_backend
|
2019-06-27 20:41:47 +02:00
|
|
|
def test_tarball_upload_and_deletion(self) -> None:
|
2019-06-21 22:46:04 +02:00
|
|
|
bucket = create_s3_buckets(settings.S3_AVATAR_BUCKET)[0]
|
|
|
|
|
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.assertTrue(user_profile.is_realm_admin)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
tarball_path = os.path.join(settings.TEST_WORKER_DIR, "tarball.tar.gz")
|
|
|
|
with open(tarball_path, "w") as f:
|
|
|
|
f.write("dummy")
|
2019-06-21 22:46:04 +02:00
|
|
|
|
2020-07-30 22:10:15 +02:00
|
|
|
total_bytes_transferred = 0
|
|
|
|
|
|
|
|
def percent_callback(bytes_transferred: int) -> None:
|
|
|
|
nonlocal total_bytes_transferred
|
|
|
|
total_bytes_transferred += bytes_transferred
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
uri = upload_export_tarball(
|
|
|
|
user_profile.realm, tarball_path, percent_callback=percent_callback
|
|
|
|
)
|
2020-07-30 22:10:15 +02:00
|
|
|
# Verify the percent_callback API works
|
|
|
|
self.assertEqual(total_bytes_transferred, 5)
|
2019-06-21 22:46:04 +02:00
|
|
|
|
|
|
|
result = re.search(re.compile(r"([0-9a-fA-F]{32})"), uri)
|
|
|
|
if result is not None:
|
|
|
|
hex_value = result.group(1)
|
2020-06-10 06:40:53 +02:00
|
|
|
expected_url = f"https://{bucket.name}.s3.amazonaws.com/exports/{hex_value}/{os.path.basename(tarball_path)}"
|
2019-06-21 22:46:04 +02:00
|
|
|
self.assertEqual(uri, expected_url)
|
|
|
|
|
2019-06-27 20:41:47 +02:00
|
|
|
# Delete the tarball.
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
|
|
|
self.assertIsNone(delete_export_tarball("/not_a_file"))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
2021-02-12 08:20:45 +01:00
|
|
|
["WARNING:root:not_a_file does not exist. Its entry in the database will be removed."],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-06-27 20:41:47 +02:00
|
|
|
path_id = urllib.parse.urlparse(uri).path
|
|
|
|
self.assertEqual(delete_export_tarball(path_id), path_id)
|
2018-05-14 19:53:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-01 04:19:54 +02:00
|
|
|
class SanitizeNameTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(sanitize_name("test.txt"), "test.txt")
|
|
|
|
self.assertEqual(sanitize_name(".hidden"), ".hidden")
|
|
|
|
self.assertEqual(sanitize_name(".hidden.txt"), ".hidden.txt")
|
|
|
|
self.assertEqual(sanitize_name("tarball.tar.gz"), "tarball.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name(".hidden_tarball.tar.gz"), ".hidden_tarball.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name("Testing{}*&*#().ta&&%$##&&r.gz"), "Testing.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name("*testingfile?*.txt"), "testingfile.txt")
|
|
|
|
self.assertEqual(sanitize_name("snowman☃.txt"), "snowman.txt")
|
|
|
|
self.assertEqual(sanitize_name("테스트.txt"), "테스트.txt")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
sanitize_name('~/."\\`\\?*"u0`000ssh/test.t**{}ar.gz'), ".u0000sshtest.tar.gz"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-14 21:33:51 +02:00
|
|
|
|
|
|
|
|
|
|
|
class UploadSpaceTests(UploadSerializeMixin, ZulipTestCase):
|
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2018-05-14 21:33:51 +02:00
|
|
|
self.realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2018-05-14 21:33:51 +02:00
|
|
|
|
|
|
|
def test_currently_used_upload_space(self) -> None:
|
2019-01-14 07:46:31 +01:00
|
|
|
self.assertEqual(None, cache_get(get_realm_used_upload_space_cache_key(self.realm)))
|
2019-01-11 13:41:52 +01:00
|
|
|
self.assertEqual(0, self.realm.currently_used_upload_space_bytes())
|
2019-01-14 07:46:31 +01:00
|
|
|
self.assertEqual(0, cache_get(get_realm_used_upload_space_cache_key(self.realm))[0])
|
2018-05-14 21:33:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = b"zulip!"
|
|
|
|
upload_message_file("dummy.txt", len(data), "text/plain", data, self.user_profile)
|
2019-01-17 12:05:09 +01:00
|
|
|
# notify_attachment_update function calls currently_used_upload_space_bytes which
|
|
|
|
# updates the cache.
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data, cache_get(get_realm_used_upload_space_cache_key(self.realm))[0])
|
|
|
|
self.assert_length(data, self.realm.currently_used_upload_space_bytes())
|
2018-05-14 21:33:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data2 = b"more-data!"
|
|
|
|
upload_message_file("dummy2.txt", len(data2), "text/plain", data2, self.user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
len(data) + len(data2), cache_get(get_realm_used_upload_space_cache_key(self.realm))[0]
|
|
|
|
)
|
2019-01-17 12:05:09 +01:00
|
|
|
self.assertEqual(len(data) + len(data2), self.realm.currently_used_upload_space_bytes())
|
2019-01-14 07:46:31 +01:00
|
|
|
|
|
|
|
attachment = Attachment.objects.get(file_name="dummy.txt")
|
|
|
|
attachment.file_name = "dummy1.txt"
|
|
|
|
attachment.save(update_fields=["file_name"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
len(data) + len(data2), cache_get(get_realm_used_upload_space_cache_key(self.realm))[0]
|
|
|
|
)
|
2019-01-14 07:46:31 +01:00
|
|
|
self.assertEqual(len(data) + len(data2), self.realm.currently_used_upload_space_bytes())
|
|
|
|
|
|
|
|
attachment.delete()
|
|
|
|
self.assertEqual(None, cache_get(get_realm_used_upload_space_cache_key(self.realm)))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data2, self.realm.currently_used_upload_space_bytes())
|
2019-01-17 12:04:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-01 04:19:54 +02:00
|
|
|
class ExifRotateTests(ZulipTestCase):
|
2018-05-29 17:29:57 +02:00
|
|
|
def test_image_do_not_rotate(self) -> None:
|
|
|
|
# Image does not have _getexif method.
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
result = exif_rotate(img)
|
|
|
|
self.assertEqual(result, img)
|
2018-05-31 13:25:55 +02:00
|
|
|
|
|
|
|
# Image with no exif data.
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img_no_exif.jpg") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
result = exif_rotate(img)
|
|
|
|
self.assertEqual(result, img)
|
2018-05-29 17:29:57 +02:00
|
|
|
|
|
|
|
# Orientation of the image is 1.
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.jpg") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
result = exif_rotate(img)
|
|
|
|
self.assertEqual(result, img)
|
2018-05-29 17:29:57 +02:00
|
|
|
|
|
|
|
def test_image_rotate(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("PIL.Image.Image.rotate") as rotate:
|
|
|
|
with get_test_image_file("img_orientation_3.jpg") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
exif_rotate(img)
|
|
|
|
rotate.assert_called_with(180, expand=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img_orientation_6.jpg") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
exif_rotate(img)
|
|
|
|
rotate.assert_called_with(270, expand=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img_orientation_8.jpg") as f, Image.open(f) as img:
|
2020-10-24 09:33:54 +02:00
|
|
|
exif_rotate(img)
|
|
|
|
rotate.assert_called_with(90, expand=True)
|
2019-01-13 07:27:30 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-13 07:27:30 +01:00
|
|
|
class DecompressionBombTests(ZulipTestCase):
|
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2019-01-13 07:27:30 +01:00
|
|
|
self.test_urls = {
|
|
|
|
"/json/users/me/avatar": "Image size exceeds limit.",
|
|
|
|
"/json/realm/logo": "Image size exceeds limit.",
|
|
|
|
"/json/realm/icon": "Image size exceeds limit.",
|
|
|
|
"/json/realm/emoji/bomb_emoji": "Image file upload failed.",
|
|
|
|
}
|
|
|
|
|
|
|
|
def test_decompression_bomb(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2019-01-13 07:27:30 +01:00
|
|
|
with get_test_image_file("bomb.png") as fp:
|
|
|
|
for url, error_string in self.test_urls.items():
|
|
|
|
fp.seek(0, 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
if url == "/json/realm/logo":
|
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
url, {"f1": fp, "night": orjson.dumps(False).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post(url, {"f1": fp})
|
2019-01-13 07:27:30 +01:00
|
|
|
self.assert_json_error(result, error_string)
|