2020-06-11 00:54:34 +02:00
|
|
|
import os
|
|
|
|
import re
|
2024-08-29 23:39:07 +02:00
|
|
|
from datetime import timedelta
|
2020-06-11 00:54:34 +02:00
|
|
|
from io import StringIO
|
|
|
|
from unittest import mock
|
2024-07-11 21:10:17 +02:00
|
|
|
from unittest.mock import patch
|
2023-12-05 21:14:17 +01:00
|
|
|
from urllib.parse import quote
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2024-06-13 05:48:22 +02:00
|
|
|
import pyvips
|
2024-08-29 23:39:07 +02:00
|
|
|
import time_machine
|
2016-04-14 16:26:01 +02:00
|
|
|
from django.conf import settings
|
2024-09-06 16:46:13 +02:00
|
|
|
from django.core.files.uploadedfile import SimpleUploadedFile
|
2024-04-19 03:06:53 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2024-07-11 04:50:44 +02:00
|
|
|
from pyvips import at_least_libvips
|
|
|
|
from pyvips import version as libvips_version
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import zerver.lib.upload
|
2024-04-19 03:06:53 +02:00
|
|
|
from analytics.models import RealmCount
|
2022-04-14 23:58:15 +02:00
|
|
|
from zerver.actions.create_realm import do_create_realm
|
2024-07-24 13:51:19 +02:00
|
|
|
from zerver.actions.create_user import do_create_user
|
2022-04-14 23:50:10 +02:00
|
|
|
from zerver.actions.message_send import internal_send_private_message
|
2022-04-14 23:39:22 +02:00
|
|
|
from zerver.actions.realm_icon import do_change_icon_source
|
2022-04-14 23:37:16 +02:00
|
|
|
from zerver.actions.realm_logo import do_change_logo_source
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_change_realm_plan_type, do_set_realm_property
|
2022-04-14 23:49:26 +02:00
|
|
|
from zerver.actions.user_settings import do_delete_avatar_image
|
2023-12-15 20:03:19 +01:00
|
|
|
from zerver.lib.attachments import validate_attachment_request
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field
|
2024-04-19 03:06:53 +02:00
|
|
|
from zerver.lib.cache import cache_delete, cache_get, get_realm_used_upload_space_cache_key
|
2021-06-29 18:08:42 +02:00
|
|
|
from zerver.lib.create_user import copy_default_settings
|
2020-03-06 18:40:46 +01:00
|
|
|
from zerver.lib.initial_password import initial_password
|
2017-02-21 03:41:20 +01:00
|
|
|
from zerver.lib.realm_icon import realm_icon_url
|
2019-08-19 19:46:45 +02:00
|
|
|
from zerver.lib.realm_logo import get_realm_logo_url
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import UploadSerializeMixin, ZulipTestCase
|
2024-07-16 23:53:27 +02:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
avatar_disk_path,
|
|
|
|
consume_response,
|
|
|
|
get_test_image_file,
|
|
|
|
ratelimit_rule,
|
|
|
|
)
|
2024-06-20 18:12:58 +02:00
|
|
|
from zerver.lib.upload import sanitize_name, upload_message_attachment
|
|
|
|
from zerver.lib.upload.base import ZulipUploadBackend
|
2023-02-28 16:32:43 +01:00
|
|
|
from zerver.lib.upload.local import LocalUploadBackend
|
2022-12-14 21:51:37 +01:00
|
|
|
from zerver.lib.upload.s3 import S3UploadBackend
|
2018-08-01 10:53:40 +02:00
|
|
|
from zerver.lib.users import get_api_key
|
2023-12-15 20:03:19 +01:00
|
|
|
from zerver.models import Attachment, Message, Realm, RealmDomain, UserProfile
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot, get_user_by_delivery_email
|
2017-03-08 19:47:42 +01:00
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class FileUploadTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_rest_endpoint(self) -> None:
|
2016-06-25 11:05:59 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Tests the /api/v1/user_uploads API endpoint. Here a single file is uploaded
|
2016-06-25 11:05:59 +02:00
|
|
|
and downloaded using a username and api_key
|
|
|
|
"""
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
|
|
|
# Upload file via API
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.api_post(self.example_user("hamlet"), "/api/v1/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", response_dict)
|
2024-07-15 07:06:38 +02:00
|
|
|
self.assertIn("url", response_dict)
|
|
|
|
url = response_dict["url"]
|
|
|
|
self.assertEqual(response_dict["uri"], url)
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2023-04-08 07:01:50 +02:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2016-06-25 11:05:59 +02:00
|
|
|
|
2016-06-27 16:41:58 +02:00
|
|
|
# Download file via API
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.api_get(self.example_user("hamlet"), url)
|
2018-04-13 19:04:39 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2016-06-27 16:41:58 +02:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Files uploaded through the API should be accessible via the web client
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(self.client_get(url).getvalue(), b"zulip!")
|
2016-06-25 11:05:59 +02:00
|
|
|
|
2018-04-13 19:04:39 +02:00
|
|
|
def test_mobile_api_endpoint(self) -> None:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Tests the /api/v1/user_uploads API endpoint with ?api_key
|
2018-04-13 19:04:39 +02:00
|
|
|
auth. Here a single file is uploaded and downloaded using a
|
|
|
|
username and api_key
|
|
|
|
"""
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
|
|
|
# Upload file via API
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.api_post(self.example_user("hamlet"), "/api/v1/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", response_dict)
|
2024-07-15 07:06:38 +02:00
|
|
|
self.assertIn("url", response_dict)
|
|
|
|
url = response_dict["url"]
|
|
|
|
self.assertEqual(response_dict["uri"], url)
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2023-04-08 07:01:50 +02:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2018-04-13 19:04:39 +02:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Try to download file via API, passing URL and invalid API key
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url, {"api_key": "invalid"})
|
2019-01-05 20:18:18 +01:00
|
|
|
self.assertEqual(response.status_code, 401)
|
2018-04-13 19:04:39 +02:00
|
|
|
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url, {"api_key": get_api_key(user_profile)})
|
2018-04-13 19:04:39 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-04-13 19:04:39 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_too_big_failure(self) -> None:
|
2016-09-16 16:41:04 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload big files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-09-16 16:41:04 +02:00
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = "a.txt"
|
|
|
|
|
|
|
|
# Use MAX_FILE_UPLOAD_SIZE of 0, because the next increment
|
|
|
|
# would be 1MB.
|
|
|
|
with self.settings(MAX_FILE_UPLOAD_SIZE=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
2024-10-15 21:02:13 +02:00
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"File is larger than this server's configured maximum upload size (0 MiB).",
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_file_too_big_failure_standard_plan(self) -> None:
|
|
|
|
"""
|
|
|
|
Verify error message where a plan is involved.
|
|
|
|
"""
|
|
|
|
self.login("hamlet")
|
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = "a.txt"
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm.plan_type = Realm.PLAN_TYPE_LIMITED
|
|
|
|
realm.save()
|
|
|
|
with self.settings(MAX_FILE_UPLOAD_SIZE=0):
|
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
|
|
|
self.assert_json_error(
|
|
|
|
result,
|
|
|
|
"File is larger than the maximum upload size (0 MiB) allowed by your organization's plan.",
|
|
|
|
)
|
2016-09-16 16:41:04 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 16:26:01 +02:00
|
|
|
fp = StringIO("bah!")
|
|
|
|
fp.name = "a.txt"
|
|
|
|
fp2 = StringIO("pshaw!")
|
|
|
|
fp2.name = "b.txt"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp, "f2": fp2})
|
2016-04-14 16:26:01 +02:00
|
|
|
self.assert_json_error(result, "You may only upload one file at a time")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2016-04-14 16:26:01 +02:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 16:26:01 +02:00
|
|
|
|
2017-07-31 20:52:17 +02:00
|
|
|
result = self.client_post("/json/user_uploads")
|
2016-04-14 16:26:01 +02:00
|
|
|
self.assert_json_error(result, "You must specify a file to upload")
|
|
|
|
|
2022-07-27 22:54:31 +02:00
|
|
|
def test_guess_content_type_from_filename(self) -> None:
|
|
|
|
"""
|
|
|
|
Test coverage for files without content-type in the metadata;
|
|
|
|
in which case we try to guess the content-type from the filename.
|
|
|
|
"""
|
2024-09-06 16:46:13 +02:00
|
|
|
uploaded_file = SimpleUploadedFile("somefile", b"zulip!", content_type="")
|
2022-07-27 22:54:31 +02:00
|
|
|
result = self.api_post(
|
2024-09-06 16:46:13 +02:00
|
|
|
self.example_user("hamlet"), "/api/v1/user_uploads", {"file": uploaded_file}
|
2022-07-27 22:54:31 +02:00
|
|
|
)
|
|
|
|
|
2024-09-06 16:46:13 +02:00
|
|
|
self.login("hamlet")
|
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
url = response_dict["url"]
|
|
|
|
result = self.client_get(url)
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assertEqual(result["Content-Type"], "application/octet-stream")
|
|
|
|
|
|
|
|
uploaded_file = SimpleUploadedFile("somefile.txt", b"zulip!", content_type="")
|
2022-07-27 22:54:31 +02:00
|
|
|
result = self.api_post(
|
2024-09-06 16:46:13 +02:00
|
|
|
self.example_user("hamlet"), "/api/v1/user_uploads", {"file": uploaded_file}
|
2022-07-27 22:54:31 +02:00
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2024-09-06 16:46:13 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
url = response_dict["url"]
|
|
|
|
result = self.client_get(url)
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assertEqual(result["Content-Type"], "text/plain")
|
|
|
|
|
2024-09-04 20:19:25 +02:00
|
|
|
def test_preserve_provided_content_type(self) -> None:
|
|
|
|
uploaded_file = SimpleUploadedFile("somefile.txt", b"zulip!", content_type="image/png")
|
|
|
|
result = self.api_post(
|
|
|
|
self.example_user("hamlet"), "/api/v1/user_uploads", {"file": uploaded_file}
|
|
|
|
)
|
|
|
|
|
|
|
|
self.login("hamlet")
|
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
url = response_dict["url"]
|
|
|
|
result = self.client_get(url)
|
|
|
|
self.assertEqual(result.status_code, 200)
|
|
|
|
self.assertEqual(result["Content-Type"], "image/png")
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# This test will go through the code path for uploading files onto LOCAL storage
|
2020-10-23 02:43:28 +02:00
|
|
|
# when Zulip is in DEVELOPMENT mode.
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_upload_authed(self) -> None:
|
2016-04-14 23:44:39 +02:00
|
|
|
"""
|
2023-04-08 07:01:50 +02:00
|
|
|
A call to /json/user_uploads should return a url and actually create an
|
2016-03-24 20:24:01 +01:00
|
|
|
entry in the database. This entry will be marked unclaimed till a message
|
|
|
|
refers it.
|
2016-04-14 23:44:39 +02:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-14 23:44:39 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("uri", response_dict)
|
2024-07-15 07:06:38 +02:00
|
|
|
self.assertIn("url", response_dict)
|
|
|
|
url = response_dict["url"]
|
|
|
|
self.assertEqual(response_dict["uri"], url)
|
2021-02-12 08:20:45 +01:00
|
|
|
base = "/user_uploads/"
|
2023-04-08 07:01:50 +02:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
2016-04-14 23:44:39 +02:00
|
|
|
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(self.client_get(url).getvalue(), b"zulip!")
|
2016-04-14 23:44:39 +02:00
|
|
|
|
2022-03-22 04:38:18 +01:00
|
|
|
# Check the download endpoint
|
2023-04-08 07:01:50 +02:00
|
|
|
download_url = url.replace("/user_uploads/", "/user_uploads/download/")
|
|
|
|
result = self.client_get(download_url)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(result.getvalue(), b"zulip!")
|
2022-03-22 04:38:18 +01:00
|
|
|
self.assertIn("attachment;", result.headers["Content-Disposition"])
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
# check if DB has attachment marked as unclaimed
|
2021-02-12 08:20:45 +01:00
|
|
|
entry = Attachment.objects.get(file_name="zulip.txt")
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(entry.is_claimed(), False)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2022-05-18 21:31:18 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.subscribe(hamlet, "Denmark")
|
2023-04-08 07:01:50 +02:00
|
|
|
body = f"First message ...[zulip.txt]({hamlet.realm.host}{url})"
|
2022-05-18 21:31:18 +02:00
|
|
|
self.send_stream_message(hamlet, "Denmark", body, "test")
|
2016-06-14 04:38:30 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Now try the endpoint that's supposed to return a temporary URL for access
|
2020-04-08 00:27:24 +02:00
|
|
|
# to the file.
|
2023-04-08 07:01:50 +02:00
|
|
|
result = self.client_get("/json" + url)
|
2022-06-07 01:37:01 +02:00
|
|
|
data = self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2023-04-08 07:01:50 +02:00
|
|
|
# Ensure this is different from the original url:
|
|
|
|
self.assertNotEqual(url_only_url, url)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("user_uploads/temporary/", url_only_url)
|
|
|
|
self.assertTrue(url_only_url.endswith("zulip.txt"))
|
2023-06-20 22:52:31 +02:00
|
|
|
# The generated URL has a token authorizing the requester to access the file
|
2020-04-08 00:27:24 +02:00
|
|
|
# without being logged in.
|
|
|
|
self.logout()
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(self.client_get(url_only_url).getvalue(), b"zulip!")
|
2023-11-21 22:23:22 +01:00
|
|
|
# The original url shouldn't work when logged out -- it redirects to the login page
|
2023-04-08 07:01:50 +02:00
|
|
|
result = self.client_get(url)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(result.status_code, 302)
|
|
|
|
self.assertTrue(result.headers["Location"].endswith(f"/login/?next={url}"))
|
2021-11-02 15:42:58 +01:00
|
|
|
|
|
|
|
def test_serve_file_unauthed(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip_web_public.txt"
|
|
|
|
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
with ratelimit_rule(86400, 1000, domain="spectator_attachment_access_by_file"):
|
|
|
|
# Deny file access for non-web-public stream
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
|
|
|
host = self.example_user("hamlet").realm.host
|
|
|
|
body = f"First message ...[zulip.txt](http://{host}" + url + ")"
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
self.logout()
|
|
|
|
response = self.client_get(url)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertTrue(response.headers["Location"].endswith(f"/login/?next={url}"))
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
# Allow file access for web-public stream
|
|
|
|
self.login("hamlet")
|
|
|
|
self.make_stream("web-public-stream", is_web_public=True)
|
|
|
|
self.subscribe(self.example_user("hamlet"), "web-public-stream")
|
|
|
|
body = f"First message ...[zulip.txt](http://{host}" + url + ")"
|
|
|
|
self.send_stream_message(self.example_user("hamlet"), "web-public-stream", body, "test")
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
self.logout()
|
|
|
|
response = self.client_get(url)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
2024-07-16 23:53:27 +02:00
|
|
|
consume_response(response)
|
2021-11-02 15:42:58 +01:00
|
|
|
|
|
|
|
# Deny file access since rate limited
|
2023-06-07 23:01:42 +02:00
|
|
|
with ratelimit_rule(86400, 0, domain="spectator_attachment_access_by_file"):
|
|
|
|
response = self.client_get(url)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertTrue(response.headers["Location"].endswith(f"/login/?next={url}"))
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2023-06-07 22:26:04 +02:00
|
|
|
# Check that the /download/ variant works as well
|
|
|
|
download_url = url.replace("/user_uploads/", "/user_uploads/download/")
|
|
|
|
with ratelimit_rule(86400, 1000, domain="spectator_attachment_access_by_file"):
|
|
|
|
response = self.client_get(download_url)
|
|
|
|
self.assertEqual(response.status_code, 200)
|
2024-07-16 23:53:27 +02:00
|
|
|
consume_response(response)
|
2023-06-07 22:26:04 +02:00
|
|
|
with ratelimit_rule(86400, 0, domain="spectator_attachment_access_by_file"):
|
|
|
|
response = self.client_get(download_url)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertTrue(response.headers["Location"].endswith(f"/login/?next={download_url}"))
|
2023-06-07 22:26:04 +02:00
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
# Deny random file access
|
|
|
|
response = self.client_get(
|
|
|
|
"/user_uploads/2/71/QYB7LA-ULMYEad-QfLMxmI2e/zulip-non-existent.txt"
|
|
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 404)
|
2020-04-08 00:27:24 +02:00
|
|
|
|
|
|
|
def test_serve_local_file_unauthed_invalid_token(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/user_uploads/temporary/badtoken/file.png")
|
2020-04-08 00:27:24 +02:00
|
|
|
self.assert_json_error(result, "Invalid token")
|
|
|
|
|
2020-04-18 16:11:13 +02:00
|
|
|
def test_serve_local_file_unauthed_altered_filename(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-04-18 16:11:13 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
url = "/json" + response_dict["url"]
|
2020-04-18 16:11:13 +02:00
|
|
|
|
|
|
|
result = self.client_get(url)
|
2022-06-07 01:37:01 +02:00
|
|
|
data = self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-04-18 16:11:13 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(url_only_url.endswith("zulip.txt"))
|
|
|
|
url_only_url_changed_filename = url_only_url.split("zulip.txt")[0] + "differentname.exe"
|
2020-04-18 16:11:13 +02:00
|
|
|
result = self.client_get(url_only_url_changed_filename)
|
|
|
|
self.assert_json_error(result, "Invalid filename")
|
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
def test_serve_local_file_unauthed_token_expires(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-04-08 00:27:24 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2024-08-29 23:39:07 +02:00
|
|
|
now = timezone_now()
|
|
|
|
with time_machine.travel(now, tick=False):
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
url = "/json" + response_dict["url"]
|
2020-04-08 00:27:24 +02:00
|
|
|
|
|
|
|
result = self.client_get(url)
|
2022-06-07 01:37:01 +02:00
|
|
|
data = self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
url_only_url = data["url"]
|
2020-04-08 00:27:24 +02:00
|
|
|
self.logout()
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(self.client_get(url_only_url).getvalue(), b"zulip!")
|
2020-04-08 00:27:24 +02:00
|
|
|
|
2024-08-29 23:39:07 +02:00
|
|
|
with time_machine.travel(now + timedelta(seconds=30), tick=False):
|
|
|
|
self.assertEqual(self.client_get(url_only_url).getvalue(), b"zulip!")
|
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
# After over 60 seconds, the token should become invalid:
|
2024-08-29 23:39:07 +02:00
|
|
|
with time_machine.travel(now + timedelta(seconds=61), tick=False):
|
|
|
|
result = self.client_get(url_only_url)
|
|
|
|
self.assert_json_error(result, "Invalid token")
|
|
|
|
|
|
|
|
def test_serve_local_file_unauthed_token_deleted(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
now = timezone_now()
|
|
|
|
with time_machine.travel(now, tick=False):
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
url = "/json" + response_dict["url"]
|
|
|
|
|
|
|
|
result = self.client_get(url)
|
|
|
|
data = self.assert_json_success(result)
|
|
|
|
url_only_url = data["url"]
|
|
|
|
|
|
|
|
path_id = response_dict["url"].removeprefix("/user_uploads/")
|
|
|
|
Attachment.objects.get(path_id=path_id).delete()
|
|
|
|
|
2020-04-08 00:27:24 +02:00
|
|
|
result = self.client_get(url_only_url)
|
|
|
|
self.assert_json_error(result, "Invalid token")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_unauthed(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
url = response_dict["url"]
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertTrue(response.headers["Location"].endswith(f"/login/?next={url}"))
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2023-11-21 22:03:02 +01:00
|
|
|
def test_image_download_unauthed(self) -> None:
|
|
|
|
"""
|
|
|
|
As the above, but with an Accept header that prefers images.
|
|
|
|
"""
|
|
|
|
self.login("hamlet")
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
url = response_dict["url"]
|
2023-11-21 22:03:02 +01:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
response = self.client_get(
|
|
|
|
url,
|
|
|
|
# This is what Chrome sends for <img> tags
|
|
|
|
headers={"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8"},
|
|
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assertEqual(response.headers["Content-Type"], "image/png")
|
2024-07-16 23:53:27 +02:00
|
|
|
consume_response(response)
|
2023-11-21 22:03:02 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_removed_file_download(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-06-17 19:48:17 +02:00
|
|
|
Trying to download deleted files should return 404 error
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2023-03-24 16:43:53 +01:00
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
|
|
|
self.rm_tree(settings.LOCAL_UPLOADS_DIR)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response = self.client_get(response_dict["uri"])
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 404)
|
2024-07-15 07:06:38 +02:00
|
|
|
response = self.client_get(response_dict["url"])
|
|
|
|
self.assertEqual(response.status_code, 404)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_existing_file_download(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2016-06-17 19:48:17 +02:00
|
|
|
Trying to download a file that was never uploaded will return a json_error
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2019-07-24 07:34:48 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:19:30 +01:00
|
|
|
response = self.client_get(
|
2022-05-18 21:31:18 +02:00
|
|
|
f"http://{hamlet.realm.host}/user_uploads/{hamlet.realm_id}/ff/gg/abc.py"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 404)
|
2023-06-07 11:17:46 +02:00
|
|
|
self.assert_in_response("This file does not exist or has been deleted.", response)
|
2016-06-27 21:09:56 +02:00
|
|
|
|
2023-11-21 22:03:02 +01:00
|
|
|
def test_non_existing_image_download(self) -> None:
|
|
|
|
"""
|
|
|
|
As the above method, but with an Accept header that prefers images to text
|
|
|
|
"""
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
self.login_user(hamlet)
|
|
|
|
response = self.client_get(
|
|
|
|
f"http://{hamlet.realm.host}/user_uploads/{hamlet.realm_id}/ff/gg/abc.png",
|
|
|
|
# This is what Chrome sends for <img> tags
|
|
|
|
headers={"Accept": "image/avif,image/webp,image/apng,image/*,*/*;q=0.8"},
|
|
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
self.assertEqual(response.headers["Content-Type"], "image/png")
|
2024-07-16 23:53:27 +02:00
|
|
|
consume_response(response)
|
2023-11-21 22:03:02 +01:00
|
|
|
|
|
|
|
response = self.client_get(
|
|
|
|
f"http://{hamlet.realm.host}/user_uploads/{hamlet.realm_id}/ff/gg/abc.png",
|
|
|
|
# Ask for something neither image nor text -- you get text as a default
|
|
|
|
headers={"Accept": "audio/*,application/octet-stream"},
|
|
|
|
)
|
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
self.assertEqual(response.headers["Content-Type"], "text/html; charset=utf-8")
|
|
|
|
self.assert_in_response("This file does not exist or has been deleted.", response)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_attachment_url_without_upload(self) -> None:
|
2019-07-24 07:34:48 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2022-05-18 21:31:18 +02:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
|
|
|
body = f"Test message ...[zulip.txt](http://{hamlet.realm.host}/user_uploads/{hamlet.realm_id}/64/fake_path_id.txt)"
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), "Denmark", body, "test"
|
|
|
|
)
|
|
|
|
self.assertFalse(
|
|
|
|
Attachment.objects.filter(path_id=f"{hamlet.realm_id}/64/fake_path_id.txt").exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
warn_log.output,
|
|
|
|
[
|
|
|
|
f"WARNING:root:User {hamlet.id} tried to share upload {hamlet.realm_id}/64/fake_path_id.txt in message {message_id}, but lacks permission"
|
|
|
|
],
|
|
|
|
)
|
2017-04-14 01:15:46 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_claim_attachments(self) -> None:
|
2016-03-24 20:24:01 +01:00
|
|
|
"""
|
|
|
|
This test tries to claim the same attachment twice. The messages field in
|
|
|
|
the Attachment model should have both the messages in its entry.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-03-24 20:24:01 +01:00
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
d1_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Denmark")
|
2021-02-12 08:20:45 +01:00
|
|
|
host = self.example_user("hamlet").realm.host
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Second message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Denmark", body, "test")
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2016-12-16 02:01:34 +01:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 2)
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_claim_attachments_different_owners(self) -> None:
|
2017-04-14 00:59:59 +02:00
|
|
|
"""This test tries to claim the same attachment more than once, first
|
2017-11-09 16:26:38 +01:00
|
|
|
with a private stream and then with different recipients."""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-04-14 00:59:59 +02:00
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
d1_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2021-02-12 08:20:45 +01:00
|
|
|
host = self.example_user("hamlet").realm.host
|
2017-04-14 00:59:59 +02:00
|
|
|
|
|
|
|
self.make_stream("private_stream", invite_only=True)
|
2017-08-25 06:01:29 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "private_stream")
|
2017-04-14 00:59:59 +02:00
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
# First, send the message to the new private stream.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "private_stream", body, "test")
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 1)
|
|
|
|
|
|
|
|
# Then, try having a user who didn't receive the message try to publish it, and fail
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Illegal message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2021-07-10 09:26:03 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="WARNING") as warn_log:
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.send_stream_message(cordelia, "Verona", body, "test")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
2021-07-10 09:26:03 +02:00
|
|
|
f"WARNING:root:User {cordelia.id} tried to share upload" in warn_log.output[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
and "but lacks permission" in warn_log.output[0]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 1)
|
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Then, have it in a direct message to another user, giving that other user access.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Second message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_personal_message(self.example_user("hamlet"), self.example_user("othello"), body)
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 2)
|
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
|
|
|
# Then, have that new recipient user publish it.
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"Third message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Verona", body, "test")
|
2017-04-14 00:59:59 +02:00
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 3)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
2020-08-01 03:17:21 +02:00
|
|
|
self.assertFalse(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
|
|
|
|
|
|
|
# Finally send to Rome, the web-public stream, and confirm it's now web-public
|
|
|
|
body = f"Fourth message ...[zulip.txt](http://{host}/user_uploads/" + d1_path_id + ")"
|
tests: Ensure stream senders get a UserMessage row.
We now complain if a test author sends a stream message
that does not result in the sender getting a
UserMessage row for the message.
This is basically 100% equivalent to complaining that
the author failed to subscribe the sender to the stream
as part of the test setup, as far as I can tell, so the
AssertionError instructs the author to subscribe the
sender to the stream.
We exempt bots from this check, although it is
plausible we should only exempt the system bots like
the notification bot.
I considered auto-subscribing the sender to the stream,
but that can be a little more expensive than the
current check, and we generally want test setup to be
explicit.
If there is some legitimate way than a subscribed human
sender can't get a UserMessage, then we probably want
an explicit test for that, or we may want to change the
backend to just write a UserMessage row in that
hypothetical situation.
For most tests, including almost all the ones fixed
here, the author just wants their test setup to
realistically reflect normal operation, and often devs
may not realize that Cordelia is not subscribed to
Denmark or not realize that Hamlet is not subscribed to
Scotland.
Some of us don't remember our Shakespeare from high
school, and our stream subscriptions don't even
necessarily reflect which countries the Bard placed his
characters in.
There may also be some legitimate use case where an
author wants to simulate sending a message to an
unsubscribed stream, but for those edge cases, they can
always set allow_unsubscribed_sender to True.
2021-12-10 13:55:48 +01:00
|
|
|
self.subscribe(self.example_user("othello"), "Rome")
|
2020-08-01 03:17:21 +02:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Rome", body, "test")
|
|
|
|
self.assertEqual(Attachment.objects.get(path_id=d1_path_id).messages.count(), 4)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_realm_public)
|
|
|
|
self.assertTrue(Attachment.objects.get(path_id=d1_path_id).is_web_public)
|
2017-04-14 00:59:59 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_check_attachment_reference_update(self) -> None:
|
2016-07-07 09:47:15 +02:00
|
|
|
f1 = StringIO("file1")
|
|
|
|
f1.name = "file1.txt"
|
|
|
|
f2 = StringIO("file2")
|
|
|
|
f2.name = "file2.txt"
|
|
|
|
f3 = StringIO("file3")
|
|
|
|
f3.name = "file3.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-12-13 03:56:59 +01:00
|
|
|
host = hamlet.realm.host
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f1})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
f1_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f2})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
f2_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2019-12-13 03:56:59 +01:00
|
|
|
self.subscribe(hamlet, "test")
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"[f1.txt](http://{host}/user_uploads/" + f1_path_id + ") "
|
2023-08-03 00:28:59 +02:00
|
|
|
f"[f2.txt](http://{host}/user_uploads/" + f2_path_id + ")"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-03-07 11:43:05 +01:00
|
|
|
msg_id = self.send_stream_message(hamlet, "test", body, "test")
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": f3})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
f3_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2016-07-07 09:47:15 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
new_body = (
|
|
|
|
f"[f3.txt](http://{host}/user_uploads/" + f3_path_id + ") "
|
2023-08-03 00:28:59 +02:00
|
|
|
f"[f2.txt](http://{host}/user_uploads/" + f2_path_id + ")"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_body,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2016-07-07 09:47:15 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
|
|
|
|
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
|
2016-07-24 22:03:22 +02:00
|
|
|
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
|
2016-07-07 09:47:15 +02:00
|
|
|
|
|
|
|
self.assertTrue(message not in f1_attachment.messages.all())
|
|
|
|
self.assertTrue(message in f2_attachment.messages.all())
|
|
|
|
self.assertTrue(message in f3_attachment.messages.all())
|
|
|
|
|
2016-07-23 07:06:13 +02:00
|
|
|
# Delete all the attachments from the message
|
|
|
|
new_body = "(deleted)"
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
|
|
|
"/json/messages/" + str(msg_id),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"content": new_body,
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2016-07-23 07:06:13 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
message = Message.objects.get(id=msg_id)
|
|
|
|
f1_attachment = Attachment.objects.get(path_id=f1_path_id)
|
|
|
|
f2_attachment = Attachment.objects.get(path_id=f2_path_id)
|
|
|
|
f3_attachment = Attachment.objects.get(path_id=f3_path_id)
|
|
|
|
self.assertTrue(message not in f1_attachment.messages.all())
|
|
|
|
self.assertTrue(message not in f2_attachment.messages.all())
|
|
|
|
self.assertTrue(message not in f3_attachment.messages.all())
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_name(self) -> None:
|
2016-09-20 11:02:15 +02:00
|
|
|
"""
|
|
|
|
Unicode filenames should be processed correctly.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-09-20 11:02:15 +02:00
|
|
|
for expected in ["Здравейте.txt", "test"]:
|
|
|
|
fp = StringIO("bah!")
|
2023-12-05 21:14:17 +01:00
|
|
|
fp.name = quote(expected)
|
2016-09-20 11:02:15 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
assert sanitize_name(expected) in response_dict["uri"]
|
2024-07-15 07:06:38 +02:00
|
|
|
assert sanitize_name(expected) in response_dict["url"]
|
2016-09-20 11:02:15 +02:00
|
|
|
|
2023-07-03 20:40:57 +02:00
|
|
|
def test_sanitize_file_name(self) -> None:
|
|
|
|
self.login("hamlet")
|
2024-08-30 04:13:01 +02:00
|
|
|
for uploaded_filename, expected_url, expected_filename in [
|
|
|
|
("../foo", "foo", "foo"),
|
|
|
|
(".. ", "uploaded-file", ".. "),
|
|
|
|
("/", "f1", "f1"),
|
|
|
|
("./", "f1", "f1"),
|
|
|
|
("././", "f1", "f1"),
|
|
|
|
(".!", "uploaded-file", ".!"),
|
|
|
|
("**", "uploaded-file", "**"),
|
|
|
|
("foo bar--baz", "foo-bar-baz", "foo bar--baz"),
|
2023-07-03 20:40:57 +02:00
|
|
|
]:
|
|
|
|
fp = StringIO("bah!")
|
2023-12-05 21:14:17 +01:00
|
|
|
fp.name = quote(uploaded_filename)
|
2023-07-03 20:40:57 +02:00
|
|
|
|
|
|
|
result = self.client_post("/json/user_uploads", {"f1": fp})
|
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertNotIn(response_dict["uri"], uploaded_filename)
|
2024-08-30 04:13:01 +02:00
|
|
|
self.assertTrue(response_dict["uri"].endswith("/" + expected_url))
|
2024-07-15 07:06:38 +02:00
|
|
|
self.assertNotIn(response_dict["url"], uploaded_filename)
|
2024-08-30 04:13:01 +02:00
|
|
|
self.assertTrue(response_dict["url"].endswith("/" + expected_url))
|
|
|
|
self.assertEqual(response_dict["filename"], expected_filename)
|
2023-07-03 20:40:57 +02:00
|
|
|
|
2018-01-26 16:13:33 +01:00
|
|
|
def test_realm_quota(self) -> None:
|
|
|
|
"""
|
|
|
|
Realm quota for uploading should not be exceeded.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
d1 = StringIO("zulip!")
|
|
|
|
d1.name = "dummy_1.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d1})
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2024-07-15 07:06:38 +02:00
|
|
|
d1_path_id = re.sub(r"/user_uploads/", "", response_dict["url"])
|
2021-02-12 08:19:30 +01:00
|
|
|
d1_attachment = Attachment.objects.get(path_id=d1_path_id)
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
2024-03-10 22:57:56 +01:00
|
|
|
realm.custom_upload_quota_gb = 1
|
|
|
|
realm.save(update_fields=["custom_upload_quota_gb"])
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
# The size of StringIO("zulip!") is 6 bytes. Setting the size of
|
|
|
|
# d1_attachment to realm.upload_quota_bytes() - 11 should allow
|
|
|
|
# us to upload only one more attachment.
|
2018-02-19 06:39:38 +01:00
|
|
|
quota = realm.upload_quota_bytes()
|
2021-02-12 08:19:30 +01:00
|
|
|
assert quota is not None
|
2018-02-19 06:39:38 +01:00
|
|
|
d1_attachment.size = quota - 11
|
2021-02-12 08:20:45 +01:00
|
|
|
d1_attachment.save(update_fields=["size"])
|
2018-01-26 16:13:33 +01:00
|
|
|
|
|
|
|
d2 = StringIO("zulip!")
|
|
|
|
d2.name = "dummy_2.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d2})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
d3 = StringIO("zulip!")
|
|
|
|
d3.name = "dummy_3.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d3})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_error(result, "Upload would exceed your organization's upload quota.")
|
|
|
|
|
2024-03-10 22:57:56 +01:00
|
|
|
realm.custom_upload_quota_gb = None
|
|
|
|
realm.save(update_fields=["custom_upload_quota_gb"])
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": d3})
|
2018-01-26 16:13:33 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_cross_realm_file_access(self) -> None:
|
2018-05-11 01:39:38 +02:00
|
|
|
def create_user(email: str, realm_id: str) -> UserProfile:
|
2020-03-06 18:40:46 +01:00
|
|
|
password = initial_password(email)
|
|
|
|
if password is not None:
|
|
|
|
self.register(email, password, subdomain=realm_id)
|
2022-01-23 20:30:46 +01:00
|
|
|
# self.register has the side-effect of ending up with a logged in session
|
|
|
|
# for the new user. We don't want that in these tests.
|
|
|
|
self.logout()
|
2020-03-12 14:17:25 +01:00
|
|
|
return get_user_by_delivery_email(email, get_realm(realm_id))
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2017-08-26 01:20:47 +02:00
|
|
|
test_subdomain = "uploadtest.example.com"
|
2021-02-12 08:20:45 +01:00
|
|
|
user1_email = "user1@uploadtest.example.com"
|
|
|
|
user2_email = "test-og-bot@zulip.com"
|
|
|
|
user3_email = "other-user@uploadtest.example.com"
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2021-03-08 13:22:43 +01:00
|
|
|
r1 = do_create_realm(string_id=test_subdomain, name=test_subdomain)
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(r1, "invite_required", False, acting_user=None)
|
2017-08-26 01:20:47 +02:00
|
|
|
RealmDomain.objects.create(realm=r1, domain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
user_1 = create_user(user1_email, test_subdomain)
|
2021-02-12 08:20:45 +01:00
|
|
|
user_2 = create_user(user2_email, "zulip")
|
2019-12-13 03:56:59 +01:00
|
|
|
user_3 = create_user(user3_email, test_subdomain)
|
|
|
|
host = user_3.realm.host
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Send a message from @zulip.com -> @uploadtest.example.com
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_2)
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{host}/user_uploads/" + fp_path_id + ")"
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.settings(CROSS_REALM_BOT_EMAILS={user_2.email, user_3.email}):
|
2017-08-18 12:26:43 +02:00
|
|
|
internal_send_private_message(
|
2021-03-08 11:54:39 +01:00
|
|
|
sender=get_system_bot(user_2.email, user_2.realm_id),
|
2020-03-12 14:17:25 +01:00
|
|
|
recipient_user=user_1,
|
2017-08-18 12:26:43 +02:00
|
|
|
content=body,
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_1)
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url, subdomain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Confirm other cross-realm users can't read it.
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_3)
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url, subdomain=test_subdomain)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
|
|
|
|
2021-11-02 15:42:58 +01:00
|
|
|
# Verify that cross-realm access to files for spectators is denied.
|
|
|
|
self.logout()
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url, subdomain=test_subdomain)
|
2023-11-21 22:23:22 +01:00
|
|
|
self.assertEqual(response.status_code, 302)
|
|
|
|
self.assertTrue(response.headers["Location"].endswith(f"/login/?next={url}"))
|
2021-11-02 15:42:58 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_authorization_invite_only(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-06 18:40:46 +01:00
|
|
|
realm = hamlet.realm
|
|
|
|
subscribed_users = [hamlet, cordelia]
|
2020-03-10 11:48:26 +01:00
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2018-06-05 21:12:28 +02:00
|
|
|
stream_name = "test-subscribe"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name, realm=realm, invite_only=True, history_public_to_subscribers=False
|
|
|
|
)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, stream_name)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{realm.host}/user_uploads/" + fp_path_id + ")"
|
2020-03-06 18:40:46 +01:00
|
|
|
self.send_stream_message(hamlet, stream_name, body, "test")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# Owner user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(6):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Subscribed user who received the message should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(cordelia)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(7):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
2020-03-10 11:48:26 +01:00
|
|
|
def assert_cannot_access_file(user: UserProfile) -> None:
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.api_get(user, url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
|
|
|
|
|
|
|
late_subscribed_user = self.example_user("aaron")
|
|
|
|
self.subscribe(late_subscribed_user, stream_name)
|
2020-03-10 11:48:26 +01:00
|
|
|
assert_cannot_access_file(late_subscribed_user)
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Unsubscribed user should not be able to view file
|
2020-03-10 11:48:26 +01:00
|
|
|
for unsubscribed_user in unsubscribed_users:
|
2018-06-05 21:12:28 +02:00
|
|
|
assert_cannot_access_file(unsubscribed_user)
|
|
|
|
|
|
|
|
def test_file_download_authorization_invite_only_with_shared_history(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:20:45 +01:00
|
|
|
polonius = self.example_user("polonius")
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users = [user, polonius]
|
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2018-06-05 21:12:28 +02:00
|
|
|
stream_name = "test-subscribe"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name, realm=user.realm, invite_only=True, history_public_to_subscribers=True
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, stream_name)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2018-06-05 21:12:28 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"First message ...[zulip.txt](http://{user.realm.host}/user_uploads/"
|
|
|
|
+ fp_path_id
|
|
|
|
+ ")"
|
|
|
|
)
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(user, stream_name, body, "test")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Add aaron as a subscribed after the message was sent
|
|
|
|
late_subscribed_user = self.example_user("aaron")
|
|
|
|
self.subscribe(late_subscribed_user, stream_name)
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users.append(late_subscribed_user)
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
# Owner user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(6):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Originally subscribed user should be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(polonius)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(7):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
# Subscribed user who did not receive the message should also be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(late_subscribed_user)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(10):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
# It takes a few extra queries to verify access because of shared history.
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
def assert_cannot_access_file(user: UserProfile) -> None:
|
|
|
|
self.login_user(user)
|
2022-10-15 22:47:40 +02:00
|
|
|
# It takes a few extra queries to verify lack of access with shared history.
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(9):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2016-06-17 19:48:17 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-06-05 21:12:28 +02:00
|
|
|
# Unsubscribed user should not be able to view file
|
2020-03-06 18:40:46 +01:00
|
|
|
for unsubscribed_user in unsubscribed_users:
|
2018-06-05 21:12:28 +02:00
|
|
|
assert_cannot_access_file(unsubscribed_user)
|
|
|
|
|
|
|
|
def test_multiple_message_attachment_file_download(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
2023-09-01 23:05:08 +02:00
|
|
|
for i in range(5):
|
2020-06-10 06:41:04 +02:00
|
|
|
stream_name = f"test-subscribe {i}"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.make_stream(
|
|
|
|
stream_name,
|
|
|
|
realm=hamlet.realm,
|
|
|
|
invite_only=True,
|
|
|
|
history_public_to_subscribers=True,
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.subscribe(hamlet, stream_name)
|
|
|
|
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2018-06-05 21:12:28 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2018-06-05 21:12:28 +02:00
|
|
|
for i in range(20):
|
2021-02-12 08:19:30 +01:00
|
|
|
body = (
|
|
|
|
f"First message ...[zulip.txt](http://{hamlet.realm.host}/user_uploads/"
|
|
|
|
+ fp_path_id
|
|
|
|
+ ")"
|
|
|
|
)
|
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("hamlet"), f"test-subscribe {i % 5}", body, "test"
|
|
|
|
)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.logout()
|
|
|
|
|
|
|
|
user = self.example_user("aaron")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(9):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 403)
|
|
|
|
self.assert_in_response("You are not authorized to view this file.", response)
|
|
|
|
|
|
|
|
self.subscribe(user, "test-subscribe 1")
|
|
|
|
self.subscribe(user, "test-subscribe 2")
|
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
# If we were accidentally one query per message, this would be 20+
|
2023-09-27 02:10:49 +02:00
|
|
|
with self.assert_database_query_count(10):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2018-06-05 21:12:28 +02:00
|
|
|
self.assertEqual(response.status_code, 200)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2018-06-05 21:12:28 +02:00
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(6):
|
2024-09-04 20:04:03 +02:00
|
|
|
self.assertTrue(validate_attachment_request(user, fp_path_id)[0])
|
2018-06-05 21:12:28 +02:00
|
|
|
|
|
|
|
self.logout()
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_download_authorization_public(self) -> None:
|
2020-03-06 18:40:46 +01:00
|
|
|
subscribed_users = [self.example_user("hamlet"), self.example_user("iago")]
|
|
|
|
unsubscribed_users = [self.example_user("othello"), self.example_user("prospero")]
|
2017-08-25 06:01:29 +02:00
|
|
|
realm = get_realm("zulip")
|
2020-03-06 18:40:46 +01:00
|
|
|
for subscribed_user in subscribed_users:
|
|
|
|
self.subscribe(subscribed_user, "test-subscribe")
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-06-17 19:48:17 +02:00
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = "zulip.txt"
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2020-06-09 00:25:09 +02:00
|
|
|
body = f"First message ...[zulip.txt](http://{realm.host}/user_uploads/" + fp_path_id + ")"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "test-subscribe", body, "test")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
|
|
|
# Now all users should be able to access the files
|
|
|
|
for user in subscribed_users + unsubscribed_users:
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2023-06-16 01:07:40 +02:00
|
|
|
self.assertEqual(response.getvalue(), b"zulip!")
|
2017-04-18 03:23:32 +02:00
|
|
|
self.logout()
|
2016-06-17 19:48:17 +02:00
|
|
|
|
2018-02-12 18:18:03 +01:00
|
|
|
def test_serve_local(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_xsend_links(
|
2022-03-22 04:38:18 +01:00
|
|
|
name: str,
|
|
|
|
name_str_for_test: str,
|
|
|
|
content_disposition: str = "",
|
|
|
|
download: bool = False,
|
2024-08-29 23:39:07 +02:00
|
|
|
returned_attachment: bool = False,
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2022-12-06 22:26:39 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
fp = StringIO("zulip!")
|
|
|
|
fp.name = name
|
|
|
|
result = self.client_post("/json/user_uploads", {"file": fp})
|
2024-07-15 07:06:38 +02:00
|
|
|
url = self.assert_json_success(result)["url"]
|
2024-04-26 20:30:22 +02:00
|
|
|
fp_path_id = re.sub(r"/user_uploads/", "", url)
|
2022-12-06 22:26:39 +01:00
|
|
|
fp_path = os.path.split(fp_path_id)[0]
|
|
|
|
if download:
|
2023-04-08 07:01:50 +02:00
|
|
|
url = url.replace("/user_uploads/", "/user_uploads/download/")
|
2022-12-06 22:26:39 +01:00
|
|
|
with self.settings(DEVELOPMENT=False):
|
2023-04-08 07:01:50 +02:00
|
|
|
response = self.client_get(url)
|
2022-12-06 22:26:39 +01:00
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
|
|
|
test_run, worker = os.path.split(os.path.dirname(settings.LOCAL_UPLOADS_DIR))
|
|
|
|
self.assertEqual(
|
|
|
|
response["X-Accel-Redirect"],
|
uploads: Serve S3 uploads directly from nginx.
When file uploads are stored in S3, this means that Zulip serves as a
302 to S3. Because browsers do not cache redirects, this means that
no image contents can be cached -- and upon every page load or reload,
every recently-posted image must be re-fetched. This incurs extra
load on the Zulip server, as well as potentially excessive bandwidth
usage from S3, and on the client's connection.
Switch to fetching the content from S3 in nginx, and serving the
content from nginx. These have `Cache-control: private, immutable`
headers set on the response, allowing browsers to cache them locally.
Because nginx fetching from S3 can be slow, and requests for uploads
will generally be bunched around when a message containing them are
first posted, we instruct nginx to cache the contents locally. This
is safe because uploaded file contents are immutable; access control
is still mediated by Django. The nginx cache key is the URL without
query parameters, as those parameters include a time-limited signed
authentication parameter which lets nginx fetch the non-public file.
This adds a number of nginx-level configuration parameters to control
the caching which nginx performs, including the amount of in-memory
index for he cache, the maximum storage of the cache on disk, and how
long data is retained in the cache. The currently-chosen figures are
reasonable for small to medium deployments.
The most notable effect of this change is in allowing browsers to
cache uploaded image content; however, while there will be many fewer
requests, it also has an improvement on request latency. The
following tests were done with a non-AWS client in SFO, a server and
S3 storage in us-east-1, and with 100 requests after 10 requests of
warm-up (to fill the nginx cache). The mean and standard deviation
are shown.
| | Redirect to S3 | Caching proxy, hot | Caching proxy, cold |
| ----------------- | ------------------- | ------------------- | ------------------- |
| Time in Django | 263.0 ms ± 28.3 ms | 258.0 ms ± 12.3 ms | 258.0 ms ± 12.3 ms |
| Small file (842b) | 586.1 ms ± 21.1 ms | 266.1 ms ± 67.4 ms | 288.6 ms ± 17.7 ms |
| Large file (660k) | 959.6 ms ± 137.9 ms | 609.5 ms ± 13.0 ms | 648.1 ms ± 43.2 ms |
The hot-cache performance is faster for both large and small files,
since it saves the client the time having to make a second request to
a separate host. This performance improvement remains at least 100ms
even if the client is on the same coast as the server.
Cold nginx caches are only slightly slower than hot caches, because
VPC access to S3 endpoints is extremely fast (assuming it is in the
same region as the host), and nginx can pool connections to S3 and
reuse them.
However, all of the 648ms taken to serve a cold-cache large file is
occupied in nginx, as opposed to the only 263ms which was spent in
nginx when using redirects to S3. This means that to overall spend
less time responding to uploaded-file requests in nginx, clients will
need to find files in their local cache, and skip making an
uploaded-file request, at least 60% of the time. Modeling shows a
reduction in the number of client requests by about 70% - 80%.
The `Content-Disposition` header logic can now also be entirely shared
with the local-file codepath, as can the `url_only` path used by
mobile clients. While we could provide the direct-to-S3 temporary
signed URL to mobile clients, we choose to provide the
served-from-Zulip signed URL, to better control caching headers on it,
and greater consistency. In doing so, we adjust the salt used for the
URL; since these URLs are only valid for 60s, the effect of this salt
change is minimal.
2022-11-22 20:41:35 +01:00
|
|
|
"/internal/local/uploads/" + fp_path + "/" + name_str_for_test,
|
2022-12-06 22:26:39 +01:00
|
|
|
)
|
2024-08-29 23:39:07 +02:00
|
|
|
if returned_attachment:
|
2022-12-06 22:26:39 +01:00
|
|
|
self.assertIn("attachment;", response["Content-disposition"])
|
|
|
|
else:
|
|
|
|
self.assertIn("inline;", response["Content-disposition"])
|
2024-08-29 23:39:07 +02:00
|
|
|
if content_disposition != "":
|
|
|
|
self.assertIn(content_disposition, response["Content-disposition"])
|
2022-12-06 22:26:39 +01:00
|
|
|
self.assertEqual(set(response["Cache-Control"].split(", ")), {"private", "immutable"})
|
2018-03-13 07:08:27 +01:00
|
|
|
|
2024-08-29 23:39:07 +02:00
|
|
|
check_xsend_links(
|
|
|
|
"zulip.txt", "zulip.txt", 'filename="zulip.txt"', returned_attachment=True
|
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
check_xsend_links(
|
2021-02-12 08:20:45 +01:00
|
|
|
"áéБД.txt",
|
|
|
|
"%C3%A1%C3%A9%D0%91%D0%94.txt",
|
2022-11-30 19:11:35 +01:00
|
|
|
"filename*=utf-8''%C3%A1%C3%A9%D0%91%D0%94.txt",
|
2024-08-29 23:39:07 +02:00
|
|
|
returned_attachment=True,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"zulip.html",
|
|
|
|
"zulip.html",
|
|
|
|
'filename="zulip.html"',
|
|
|
|
returned_attachment=True,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"zulip.sh",
|
|
|
|
"zulip.sh",
|
|
|
|
'filename="zulip.sh"',
|
|
|
|
returned_attachment=True,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"zulip.jpeg",
|
|
|
|
"zulip.jpeg",
|
|
|
|
'filename="zulip.jpeg"',
|
|
|
|
returned_attachment=False,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"zulip.jpeg",
|
|
|
|
"zulip.jpeg",
|
|
|
|
download=True,
|
|
|
|
content_disposition='filename="zulip.jpeg"',
|
|
|
|
returned_attachment=True,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"áéБД.pdf",
|
|
|
|
"%C3%A1%C3%A9%D0%91%D0%94.pdf",
|
|
|
|
"filename*=utf-8''%C3%A1%C3%A9%D0%91%D0%94.pdf",
|
|
|
|
returned_attachment=False,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"some file (with spaces).png",
|
|
|
|
"some-file-with-spaces.png",
|
|
|
|
'filename="some file (with spaces).png"',
|
|
|
|
returned_attachment=False,
|
|
|
|
)
|
|
|
|
check_xsend_links(
|
|
|
|
"some file (with spaces).png",
|
|
|
|
"some-file-with-spaces.png",
|
|
|
|
'filename="some file (with spaces).png"',
|
|
|
|
download=True,
|
|
|
|
returned_attachment=True,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-03-22 04:38:18 +01:00
|
|
|
check_xsend_links(
|
2024-08-29 23:39:07 +02:00
|
|
|
".().",
|
|
|
|
"uploaded-file",
|
|
|
|
'filename=".()."',
|
|
|
|
returned_attachment=True,
|
2022-03-22 04:38:18 +01:00
|
|
|
)
|
2024-08-29 23:39:07 +02:00
|
|
|
check_xsend_links("zulip", "zulip", 'filename="zulip"', returned_attachment=True)
|
2018-02-12 18:18:03 +01:00
|
|
|
|
2018-05-14 23:47:19 +02:00
|
|
|
|
2017-02-16 10:10:37 +01:00
|
|
|
class AvatarTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_avatar_field(self) -> None:
|
2017-10-10 03:53:25 +02:00
|
|
|
with self.settings(AVATAR_SALT="salt"):
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=17,
|
|
|
|
realm_id=5,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_USER,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
url,
|
2024-06-13 14:57:18 +02:00
|
|
|
"/user_avatars/5/ff062b0fee41738b38c4312bb33bdf3fe2aad463-medium.png",
|
2017-10-10 03:53:25 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=9999,
|
|
|
|
realm_id=9999,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
url,
|
2021-02-12 08:20:45 +01:00
|
|
|
"https://secure.gravatar.com/avatar/b48def645758b95537d4424c84d1a9ff?d=identicon&s=500&version=2",
|
2017-10-10 03:53:25 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
url = get_avatar_field(
|
|
|
|
user_id=9999,
|
|
|
|
realm_id=9999,
|
2021-02-12 08:20:45 +01:00
|
|
|
email="foo@example.com",
|
2017-10-10 03:53:25 +02:00
|
|
|
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
|
|
|
|
avatar_version=2,
|
|
|
|
medium=True,
|
|
|
|
client_gravatar=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(url, None)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_avatar_url(self) -> None:
|
2017-03-21 23:53:54 +01:00
|
|
|
"""Verifies URL schemes for avatars and realm icons."""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
backend: ZulipUploadBackend = LocalUploadBackend()
|
2021-06-05 02:38:54 +02:00
|
|
|
self.assertEqual(backend.get_public_upload_root_url(), "/user_avatars/")
|
2021-10-14 00:50:26 +02:00
|
|
|
self.assertEqual(backend.get_avatar_url("hash", False), "/user_avatars/hash.png")
|
|
|
|
self.assertEqual(backend.get_avatar_url("hash", True), "/user_avatars/hash-medium.png")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_icon_url(15, 1), "/user_avatars/15/realm/icon.png?version=1"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, False), "/user_avatars/15/realm/logo.png?version=1"
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, True),
|
|
|
|
"/user_avatars/15/realm/night_logo.png?version=1",
|
|
|
|
)
|
2017-03-21 23:53:54 +01:00
|
|
|
|
|
|
|
with self.settings(S3_AVATAR_BUCKET="bucket"):
|
|
|
|
backend = S3UploadBackend()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2024-06-13 14:57:18 +02:00
|
|
|
backend.get_avatar_url("hash", False), "https://bucket.s3.amazonaws.com/hash.png"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_avatar_url("hash", True),
|
2021-10-14 00:50:26 +02:00
|
|
|
"https://bucket.s3.amazonaws.com/hash-medium.png",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_icon_url(15, 1),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/icon.png?version=1",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, False),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/logo.png?version=1",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
backend.get_realm_logo_url(15, 1, True),
|
|
|
|
"https://bucket.s3.amazonaws.com/15/realm/night_logo.png?version=1",
|
|
|
|
)
|
2017-03-21 23:53:54 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"f1": fp1, "f2": fp2})
|
2016-04-17 23:51:49 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one avatar.")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2016-04-17 23:51:49 +02:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-04-17 23:51:49 +02:00
|
|
|
|
2017-07-05 19:15:15 +02:00
|
|
|
result = self.client_post("/json/users/me/avatar")
|
2016-04-17 23:51:49 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one avatar.")
|
|
|
|
|
2019-04-23 04:51:04 +02:00
|
|
|
def test_avatar_changes_disabled_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Attempting to upload avatar on a realm with avatar changes disabled should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(
|
|
|
|
self.example_user("cordelia").realm,
|
|
|
|
"avatar_changes_disabled",
|
|
|
|
True,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2019-04-23 04:51:04 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as fp1:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"f1": fp1})
|
2019-04-23 04:51:04 +02:00
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.")
|
|
|
|
|
2016-04-17 23:57:03 +02:00
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2016-04-17 23:57:03 +02:00
|
|
|
]
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_gravatar_avatar(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_GRAVATAR
|
|
|
|
cordelia.save()
|
|
|
|
with self.settings(ENABLE_GRAVATAR=True):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, str(avatar_url(cordelia)) + "&foo=bar")
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
with self.settings(ENABLE_GRAVATAR=False):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "&foo=bar"))
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2023-01-25 00:30:24 +01:00
|
|
|
def test_get_settings_avatar(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
|
|
|
with self.settings(
|
|
|
|
ENABLE_GRAVATAR=False, DEFAULT_AVATAR_URI="http://other.server/avatar.svg"
|
|
|
|
):
|
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, "http://other.server/avatar.svg?version=1&foo=bar")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_user_avatar(self) -> None:
|
2020-03-10 11:48:26 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
|
|
|
|
2021-03-08 11:54:39 +01:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
cross_realm_bot = get_system_bot(settings.WELCOME_BOT, internal_realm.id)
|
2016-07-13 01:56:59 +02:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "?foo=bar"))
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "?foo=bar"))
|
2016-10-24 16:42:43 +02:00
|
|
|
|
2018-04-18 21:40:54 +02:00
|
|
|
response = self.client_get("/avatar/")
|
|
|
|
self.assertEqual(response.status_code, 404)
|
|
|
|
|
2018-08-13 19:09:09 +02:00
|
|
|
self.logout()
|
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
# Test /avatar/<email_or_id> endpoint with HTTP basic auth.
|
|
|
|
response = self.api_get(hamlet, "/avatar/cordelia@zulip.com", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "?foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
response = self.api_get(hamlet, f"/avatar/{cordelia.id}", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia)) + "?foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
# Test cross_realm_bot avatar access using email.
|
|
|
|
response = self.api_get(hamlet, "/avatar/welcome-bot@zulip.com", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + "&foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
# Test cross_realm_bot avatar access using id.
|
|
|
|
response = self.api_get(hamlet, f"/avatar/{cross_realm_bot.id}", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cross_realm_bot)) + "&foo=bar"))
|
|
|
|
|
|
|
|
# Without spectators enabled, no unauthenticated access.
|
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
|
|
|
self.assert_json_error(
|
|
|
|
response,
|
|
|
|
"Not logged in: API authentication or user session required",
|
|
|
|
status_code=401,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Allow unauthenticated/spectator requests by ID.
|
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}", {"foo": "bar"})
|
|
|
|
self.assertEqual(302, response.status_code)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
# Disallow unauthenticated/spectator requests by email.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
2021-11-01 12:21:17 +01:00
|
|
|
response,
|
|
|
|
"Not logged in: API authentication or user session required",
|
|
|
|
status_code=401,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2023-11-03 19:10:57 +01:00
|
|
|
self.set_up_db_for_testing_user_access()
|
|
|
|
self.login("polonius")
|
|
|
|
|
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}", {"foo": "bar"})
|
|
|
|
self.assertEqual(302, response.status_code)
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith("images/unknown-user-avatar.png?foo=bar"))
|
|
|
|
|
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com", {"foo": "bar"})
|
|
|
|
self.assertEqual(302, response.status_code)
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith("images/unknown-user-avatar.png?foo=bar"))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_user_avatar_medium(self) -> None:
|
2020-03-10 11:48:26 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(hamlet)
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia.email = cordelia.delivery_email
|
|
|
|
cordelia.save()
|
2017-02-23 20:13:56 +01:00
|
|
|
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "?foo=bar"))
|
2017-02-23 20:13:56 +01:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "?foo=bar"))
|
2017-02-23 20:13:56 +01:00
|
|
|
|
2018-08-13 19:09:09 +02:00
|
|
|
self.logout()
|
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
with self.settings(WEB_PUBLIC_STREAMS_ENABLED=False):
|
|
|
|
# Test /avatar/<email_or_id>/medium endpoint with HTTP basic auth.
|
|
|
|
response = self.api_get(hamlet, "/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "?foo=bar"))
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
response = self.api_get(hamlet, f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
2024-06-13 14:57:18 +02:00
|
|
|
self.assertTrue(redirect_url.endswith(str(avatar_url(cordelia, True)) + "?foo=bar"))
|
2021-11-01 12:21:17 +01:00
|
|
|
|
|
|
|
# Without spectators enabled, no unauthenticated access.
|
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
|
|
|
self.assert_json_error(
|
|
|
|
response,
|
|
|
|
"Not logged in: API authentication or user session required",
|
|
|
|
status_code=401,
|
|
|
|
)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2021-11-01 12:21:17 +01:00
|
|
|
# Allow unauthenticated/spectator requests by ID.
|
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
|
|
|
self.assertEqual(302, response.status_code)
|
|
|
|
|
|
|
|
# Disallow unauthenticated/spectator requests by email.
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/cordelia@zulip.com/medium", {"foo": "bar"})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
2021-11-01 12:21:17 +01:00
|
|
|
response,
|
|
|
|
"Not logged in: API authentication or user session required",
|
|
|
|
status_code=401,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-13 19:09:09 +02:00
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
# Allow unauthenticated/spectator requests by ID for a reasonable number of requests.
|
|
|
|
with ratelimit_rule(86400, 1000, domain="spectator_attachment_access_by_file"):
|
2022-03-28 09:42:58 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
|
|
|
self.assertEqual(302, response.status_code)
|
|
|
|
|
2023-06-07 23:01:42 +02:00
|
|
|
# Deny file access since rate limited
|
|
|
|
with ratelimit_rule(86400, 0, domain="spectator_attachment_access_by_file"):
|
2022-03-28 09:42:58 +02:00
|
|
|
response = self.client_get(f"/avatar/{cordelia.id}/medium", {"foo": "bar"})
|
|
|
|
self.assertEqual(429, response.status_code)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_valid_user_avatar(self) -> None:
|
2016-07-13 01:56:59 +02:00
|
|
|
# It's debatable whether we should generate avatars for non-users,
|
|
|
|
# but this test just validates the current code's behavior.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2016-07-13 01:56:59 +02:00
|
|
|
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/avatar/nonexistent_user@zulip.com", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
actual_url = "https://secure.gravatar.com/avatar/444258b521f152129eb0c162996e572d?d=identicon&version=1&foo=bar"
|
2016-07-13 01:56:59 +02:00
|
|
|
self.assertEqual(redirect_url, actual_url)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_valid_avatars(self) -> None:
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/users/me/avatar with a valid file should return a URL and actually create an avatar.
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2017-01-28 19:05:20 +01:00
|
|
|
version = 2
|
2016-04-17 23:57:03 +02:00
|
|
|
for fname, rfname in self.correct_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("avatar_url", response_dict)
|
2022-01-22 03:04:54 +01:00
|
|
|
base = "/user_avatars/"
|
2022-06-07 01:37:01 +02:00
|
|
|
url = self.assert_json_success(result)["avatar_url"]
|
2022-01-22 03:04:54 +01:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
|
|
|
|
|
|
|
if rfname is not None:
|
|
|
|
response = self.client_get(url)
|
2023-06-16 01:07:40 +02:00
|
|
|
data = response.getvalue()
|
2024-06-13 05:48:22 +02:00
|
|
|
avatar_image = pyvips.Image.new_from_buffer(data, "")
|
|
|
|
self.assertEqual(avatar_image.height, 100)
|
|
|
|
self.assertEqual(avatar_image.width, 100)
|
2022-01-22 03:04:54 +01:00
|
|
|
|
|
|
|
# Verify that the medium-size avatar was created
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
medium_avatar_disk_path = avatar_disk_path(user_profile, medium=True)
|
|
|
|
self.assertTrue(os.path.exists(medium_avatar_disk_path))
|
|
|
|
|
|
|
|
# Verify that ensure_medium_avatar_url does not overwrite this file if it exists
|
2022-12-14 21:51:37 +01:00
|
|
|
with mock.patch(
|
|
|
|
"zerver.lib.upload.local.write_local_file"
|
|
|
|
) as mock_write_local_file:
|
2024-06-25 21:03:49 +02:00
|
|
|
zerver.lib.upload.ensure_avatar_image(user_profile, medium=True)
|
2022-01-22 03:04:54 +01:00
|
|
|
self.assertFalse(mock_write_local_file.called)
|
|
|
|
|
|
|
|
# Confirm that ensure_medium_avatar_url works to recreate
|
|
|
|
# medium size avatars from the original if needed
|
|
|
|
os.remove(medium_avatar_disk_path)
|
|
|
|
self.assertFalse(os.path.exists(medium_avatar_disk_path))
|
2024-06-25 21:03:49 +02:00
|
|
|
zerver.lib.upload.ensure_avatar_image(user_profile, medium=True)
|
2022-01-22 03:04:54 +01:00
|
|
|
self.assertTrue(os.path.exists(medium_avatar_disk_path))
|
2016-09-20 21:48:48 +02:00
|
|
|
|
2022-01-22 03:04:54 +01:00
|
|
|
# Verify whether the avatar_version gets incremented with every new upload
|
|
|
|
self.assertEqual(user_profile.avatar_version, version)
|
|
|
|
version += 1
|
2017-01-28 19:05:20 +01:00
|
|
|
|
2018-06-06 14:30:26 +02:00
|
|
|
def test_copy_avatar_image(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
source_user_profile = self.example_user("hamlet")
|
2024-07-24 13:51:19 +02:00
|
|
|
target_user_profile = do_create_user(
|
|
|
|
"user@zulip.com", "password", get_realm("zulip"), "user", acting_user=None
|
|
|
|
)
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2021-06-29 18:08:42 +02:00
|
|
|
copy_default_settings(source_user_profile, target_user_profile)
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_path_id = avatar_disk_path(source_user_profile)
|
|
|
|
target_path_id = avatar_disk_path(target_user_profile)
|
|
|
|
self.assertNotEqual(source_path_id, target_path_id)
|
2020-10-24 09:33:54 +02:00
|
|
|
with open(source_path_id, "rb") as source, open(target_path_id, "rb") as target:
|
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_original_path_id = avatar_disk_path(source_user_profile, original=True)
|
|
|
|
target_original_path_id = avatar_disk_path(target_user_profile, original=True)
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
open(source_original_path_id, "rb") as source,
|
|
|
|
open(target_original_path_id, "rb") as target,
|
|
|
|
):
|
2020-10-24 09:33:54 +02:00
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
|
|
|
source_medium_path_id = avatar_disk_path(source_user_profile, medium=True)
|
|
|
|
target_medium_path_id = avatar_disk_path(target_user_profile, medium=True)
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
open(source_medium_path_id, "rb") as source,
|
|
|
|
open(target_medium_path_id, "rb") as target,
|
|
|
|
):
|
2020-10-24 09:33:54 +02:00
|
|
|
self.assertEqual(source.read(), target.read())
|
2018-06-06 14:30:26 +02:00
|
|
|
|
2018-09-07 17:44:40 +02:00
|
|
|
def test_delete_avatar_image(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file("img.png") as image_file:
|
|
|
|
self.client_post("/json/users/me/avatar", {"file": image_file})
|
2018-09-07 17:44:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
avatar_path_id = avatar_disk_path(user)
|
|
|
|
avatar_original_path_id = avatar_disk_path(user, original=True)
|
|
|
|
avatar_medium_path_id = avatar_disk_path(user, medium=True)
|
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_USER)
|
|
|
|
self.assertTrue(os.path.isfile(avatar_path_id))
|
|
|
|
self.assertTrue(os.path.isfile(avatar_original_path_id))
|
|
|
|
self.assertTrue(os.path.isfile(avatar_medium_path_id))
|
|
|
|
|
2022-04-14 23:49:26 +02:00
|
|
|
do_delete_avatar_image(user, acting_user=user)
|
2018-09-07 17:44:40 +02:00
|
|
|
|
|
|
|
self.assertEqual(user.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
|
|
|
self.assertFalse(os.path.isfile(avatar_path_id))
|
|
|
|
self.assertFalse(os.path.isfile(avatar_original_path_id))
|
|
|
|
self.assertFalse(os.path.isfile(avatar_medium_path_id))
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invalid_avatars(self) -> None:
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2016-12-21 18:34:03 +01:00
|
|
|
A PUT request to /json/users/me/avatar with an invalid file should fail.
|
2016-04-17 23:57:03 +02:00
|
|
|
"""
|
2024-07-11 03:40:35 +02:00
|
|
|
corrupt_files = [
|
|
|
|
("text.txt", False),
|
|
|
|
("unsupported.bmp", False),
|
2024-07-11 04:50:44 +02:00
|
|
|
("actually-a-bmp.png", True),
|
2024-07-11 03:40:35 +02:00
|
|
|
("corrupt.png", True),
|
|
|
|
("corrupt.gif", True),
|
|
|
|
]
|
|
|
|
for fname, is_valid_image_format in corrupt_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
2024-07-11 04:50:44 +02:00
|
|
|
if not at_least_libvips(8, 13) and "actually-a-" in fname: # nocoverage
|
|
|
|
self.skipTest(
|
|
|
|
f"libvips is only version {libvips_version(0)}.{libvips_version(1)}"
|
|
|
|
)
|
2022-01-22 03:04:54 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2024-07-11 03:40:35 +02:00
|
|
|
if is_valid_image_format:
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Could not decode image; did you upload an image file?"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assert_json_error(result, "Invalid image format")
|
2022-01-22 03:04:54 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.assertEqual(user_profile.avatar_version, 1)
|
2016-04-17 23:57:03 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_avatar(self) -> None:
|
2016-12-21 18:34:03 +01:00
|
|
|
"""
|
2019-03-09 17:43:48 +01:00
|
|
|
A DELETE request to /json/users/me/avatar should delete the profile picture and return gravatar URL
|
2016-12-21 18:34:03 +01:00
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2019-04-23 04:51:04 +02:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
|
|
|
cordelia.save()
|
|
|
|
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(cordelia.realm, "avatar_changes_disabled", True, acting_user=None)
|
2019-04-23 04:51:04 +02:00
|
|
|
result = self.client_delete("/json/users/me/avatar")
|
|
|
|
self.assert_json_error(result, "Avatar changes are disabled in this organization.", 400)
|
2016-12-21 18:34:03 +01:00
|
|
|
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(cordelia.realm, "avatar_changes_disabled", False, acting_user=None)
|
2016-12-21 18:34:03 +01:00
|
|
|
result = self.client_delete("/json/users/me/avatar")
|
2019-04-23 04:51:04 +02:00
|
|
|
user_profile = self.example_user("cordelia")
|
2016-12-21 18:34:03 +01:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("avatar_url", response_dict)
|
|
|
|
self.assertEqual(response_dict["avatar_url"], avatar_url(user_profile))
|
2016-12-21 18:34:03 +01:00
|
|
|
|
|
|
|
self.assertEqual(user_profile.avatar_source, UserProfile.AVATAR_FROM_GRAVATAR)
|
2017-01-28 19:05:20 +01:00
|
|
|
self.assertEqual(user_profile.avatar_version, 2)
|
2016-12-21 18:34:03 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_avatar_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file(self.correct_files[0][0]) as fp,
|
|
|
|
self.settings(MAX_AVATAR_FILE_SIZE_MIB=0),
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/users/me/avatar", {"file": fp})
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2017-03-06 06:22:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
class RealmIconTest(UploadSerializeMixin, ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_multiple_upload_failure(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
|
|
|
result = self.client_post("/json/realm/icon", {"f1": fp1, "f2": fp2})
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assert_json_error(result, "You must upload exactly one icon.")
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_file_upload_failure(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-07-05 19:02:54 +02:00
|
|
|
result = self.client_post("/json/realm/icon")
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assert_json_error(result, "You must upload exactly one icon.")
|
|
|
|
|
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2017-02-21 03:41:20 +01:00
|
|
|
]
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_no_admin_user_upload(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-02-21 03:41:20 +01:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_gravatar_icon(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_FROM_GRAVATAR, acting_user=None)
|
2017-02-21 03:41:20 +01:00
|
|
|
with self.settings(ENABLE_GRAVATAR=True):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, realm_icon_url(realm) + "&foo=bar")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
with self.settings(ENABLE_GRAVATAR=False):
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(realm_icon_url(realm) + "&foo=bar"))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2023-01-25 00:30:24 +01:00
|
|
|
def test_get_settings_realm_icon(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
with self.settings(
|
|
|
|
ENABLE_GRAVATAR=False, DEFAULT_AVATAR_URI="http://other.server/icon.svg"
|
|
|
|
):
|
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(redirect_url, "http://other.server/icon.svg?foo=bar")
|
|
|
|
|
|
|
|
def test_get_uploaded_realm_icon(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2020-09-13 00:11:30 +02:00
|
|
|
response = self.client_get("/json/realm/icon", {"foo": "bar"})
|
2021-02-12 08:20:45 +01:00
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertTrue(redirect_url.endswith(realm_icon_url(realm) + "&foo=bar"))
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_valid_icons(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/realm/icon with a valid file should return a URL
|
2017-02-21 03:41:20 +01:00
|
|
|
and actually create an realm icon.
|
|
|
|
"""
|
|
|
|
for fname, rfname in self.correct_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
realm = get_realm("zulip")
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("icon_url", response_dict)
|
2022-01-22 03:04:54 +01:00
|
|
|
base = f"/user_avatars/{realm.id}/realm/icon.png"
|
2022-06-07 01:37:01 +02:00
|
|
|
url = response_dict["icon_url"]
|
2022-01-22 03:04:54 +01:00
|
|
|
self.assertEqual(base, url[: len(base)])
|
|
|
|
|
|
|
|
if rfname is not None:
|
|
|
|
response = self.client_get(url)
|
2023-06-16 01:07:40 +02:00
|
|
|
data = response.getvalue()
|
2024-06-13 05:48:22 +02:00
|
|
|
response_image = pyvips.Image.new_from_buffer(data, "")
|
|
|
|
self.assertEqual(response_image.height, 100)
|
|
|
|
self.assertEqual(response_image.width, 100)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_invalid_icons(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
A PUT request to /json/realm/icon with an invalid file should fail.
|
|
|
|
"""
|
2024-07-11 03:40:35 +02:00
|
|
|
corrupt_files = [
|
|
|
|
("text.txt", False),
|
|
|
|
("unsupported.bmp", False),
|
|
|
|
("corrupt.png", True),
|
|
|
|
("corrupt.gif", True),
|
|
|
|
]
|
|
|
|
for fname, is_valid_image_format in corrupt_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2024-07-11 03:40:35 +02:00
|
|
|
if is_valid_image_format:
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Could not decode image; did you upload an image file?"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assert_json_error(result, "Invalid image format")
|
2017-02-21 03:41:20 +01:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_delete_icon(self) -> None:
|
2017-02-21 03:41:20 +01:00
|
|
|
"""
|
|
|
|
A DELETE request to /json/realm/icon should delete the realm icon and return gravatar URL
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2017-02-21 03:41:20 +01:00
|
|
|
|
|
|
|
result = self.client_delete("/json/realm/icon")
|
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
|
|
|
self.assertIn("icon_url", response_dict)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2022-06-07 01:37:01 +02:00
|
|
|
self.assertEqual(response_dict["icon_url"], realm_icon_url(realm))
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assertEqual(realm.icon_source, Realm.ICON_FROM_GRAVATAR)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_icon_version(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2017-02-21 03:41:20 +01:00
|
|
|
icon_version = realm.icon_version
|
|
|
|
self.assertEqual(icon_version, 1)
|
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.client_post("/json/realm/icon", {"file": fp})
|
|
|
|
realm = get_realm("zulip")
|
2017-02-21 03:41:20 +01:00
|
|
|
self.assertEqual(realm.icon_version, icon_version + 1)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_realm_icon_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file(self.correct_files[0][0]) as fp,
|
|
|
|
self.settings(MAX_ICON_FILE_SIZE_MIB=0),
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/icon", {"file": fp})
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2017-03-06 06:22:28 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
class RealmLogoTest(UploadSerializeMixin, ZulipTestCase):
|
2019-01-27 08:25:10 +01:00
|
|
|
night = False
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_multiple_upload_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Attempting to upload two files should fail.
|
|
|
|
"""
|
|
|
|
# Log in as admin
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file("img.png") as fp1, get_test_image_file("img.png") as fp2:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/realm/logo",
|
2021-02-12 08:20:45 +01:00
|
|
|
{"f1": fp1, "f2": fp2, "night": orjson.dumps(self.night).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one logo.")
|
|
|
|
|
|
|
|
def test_no_file_upload_failure(self) -> None:
|
|
|
|
"""
|
|
|
|
Calling this endpoint with no files should fail.
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_error(result, "You must upload exactly one logo.")
|
|
|
|
|
|
|
|
correct_files = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("img.png", "png_resized.png"),
|
|
|
|
("img.jpg", None), # jpeg resizing is platform-dependent
|
|
|
|
("img.gif", "gif_resized.png"),
|
|
|
|
("img.tif", "tif_resized.png"),
|
|
|
|
("cmyk.jpg", None),
|
2018-08-16 01:26:55 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
def test_no_admin_user_upload(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_upload_limited_plan_type(self) -> None:
|
|
|
|
user_profile = self.example_user("iago")
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(user_profile.realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user_profile)
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-02-05 08:29:54 +01:00
|
|
|
self.assert_json_error(result, "Available on Zulip Cloud Standard. Upgrade to access.")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_get_default_logo(self) -> None:
|
2020-06-29 12:35:58 +02:00
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_DEFAULT, self.night, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2020-06-08 11:53:24 +02:00
|
|
|
is_night_str = str(self.night).lower()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2023-01-25 00:30:24 +01:00
|
|
|
redirect_url,
|
|
|
|
f"http://testserver/static/images/logo/zulip-org-logo.svg?version=0&night={is_night_str}",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2023-01-25 00:30:24 +01:00
|
|
|
def test_get_settings_logo(self) -> None:
|
|
|
|
self.login("hamlet")
|
|
|
|
with self.settings(DEFAULT_LOGO_URI="http://other.server/logo.svg"):
|
|
|
|
response = self.client_get(
|
|
|
|
"/json/realm/logo", {"night": orjson.dumps(self.night).decode()}
|
|
|
|
)
|
|
|
|
redirect_url = response["Location"]
|
|
|
|
self.assertEqual(
|
|
|
|
redirect_url,
|
|
|
|
f"http://other.server/logo.svg?night={str(self.night).lower()}",
|
|
|
|
)
|
|
|
|
|
2020-06-29 12:35:58 +02:00
|
|
|
def test_get_realm_logo(self) -> None:
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, self.night, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
|
|
|
redirect_url.endswith(
|
2021-02-12 08:20:45 +01:00
|
|
|
get_realm_logo_url(realm, self.night) + f"&night={str(self.night).lower()}"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2020-06-08 11:53:24 +02:00
|
|
|
is_night_str = str(self.night).lower()
|
|
|
|
|
|
|
|
if self.night:
|
|
|
|
file_name = "night_logo.png"
|
|
|
|
else:
|
|
|
|
file_name = "logo.png"
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
redirect_url,
|
|
|
|
f"/user_avatars/{realm.id}/realm/{file_name}?version=2&night={is_night_str}",
|
|
|
|
)
|
2020-06-08 11:53:24 +02:00
|
|
|
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=user_profile)
|
2020-06-08 11:53:24 +02:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_source, Realm.LOGO_UPLOADED)
|
2021-02-12 08:20:45 +01:00
|
|
|
response = self.client_get("/json/realm/logo", {"night": orjson.dumps(self.night).decode()})
|
|
|
|
redirect_url = response["Location"]
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2023-01-25 00:30:24 +01:00
|
|
|
redirect_url,
|
|
|
|
f"http://testserver/static/images/logo/zulip-org-logo.svg?version=0&night={is_night_str}",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-08 11:53:24 +02:00
|
|
|
|
2018-08-16 01:26:55 +02:00
|
|
|
def test_valid_logos(self) -> None:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
A PUT request to /json/realm/logo with a valid file should return a URL
|
2018-08-16 01:26:55 +02:00
|
|
|
and actually create an realm logo.
|
|
|
|
"""
|
|
|
|
for fname, rfname in self.correct_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
|
|
|
)
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.assert_json_success(result)
|
|
|
|
logo_url = get_realm_logo_url(realm, self.night)
|
|
|
|
|
|
|
|
if rfname is not None:
|
|
|
|
response = self.client_get(logo_url)
|
2023-06-16 01:07:40 +02:00
|
|
|
data = response.getvalue()
|
2022-01-22 03:04:54 +01:00
|
|
|
# size should be 100 x 100 because thumbnail keeps aspect ratio
|
|
|
|
# while trying to fit in a 800 x 100 box without losing part of the image
|
2024-06-13 05:48:22 +02:00
|
|
|
response_image = pyvips.Image.new_from_buffer(data, "")
|
|
|
|
self.assertEqual(response_image.height, 100)
|
|
|
|
self.assertEqual(response_image.width, 100)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_invalid_logo_upload(self) -> None:
|
2018-08-16 01:26:55 +02:00
|
|
|
"""
|
|
|
|
A PUT request to /json/realm/logo with an invalid file should fail.
|
|
|
|
"""
|
2024-07-11 03:40:35 +02:00
|
|
|
corrupt_files = [
|
|
|
|
("text.txt", False),
|
|
|
|
("unsupported.bmp", False),
|
|
|
|
("corrupt.png", True),
|
|
|
|
("corrupt.gif", True),
|
|
|
|
]
|
|
|
|
for fname, is_valid_image_format in corrupt_files:
|
2022-01-22 03:04:54 +01:00
|
|
|
with self.subTest(fname=fname):
|
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file(fname) as fp:
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
|
|
|
)
|
|
|
|
|
2024-07-11 03:40:35 +02:00
|
|
|
if is_valid_image_format:
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Could not decode image; did you upload an image file?"
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
self.assert_json_error(result, "Invalid image format")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
|
|
|
def test_delete_logo(self) -> None:
|
|
|
|
"""
|
|
|
|
A DELETE request to /json/realm/logo should delete the realm logo and return gravatar URL
|
|
|
|
"""
|
2020-06-29 12:35:58 +02:00
|
|
|
user_profile = self.example_user("iago")
|
|
|
|
self.login_user(user_profile)
|
|
|
|
realm = user_profile.realm
|
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, self.night, acting_user=user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_delete(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-16 01:26:55 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_source, Realm.LOGO_DEFAULT)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_source, Realm.LOGO_DEFAULT)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_logo_version(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
version = realm.night_logo_version
|
|
|
|
else:
|
|
|
|
version = realm.logo_version
|
|
|
|
self.assertEqual(version, 1)
|
2018-08-16 01:26:55 +02:00
|
|
|
with get_test_image_file(self.correct_files[0][0]) as fp:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2019-01-27 08:25:10 +01:00
|
|
|
if self.night:
|
|
|
|
self.assertEqual(realm.night_logo_version, version + 1)
|
|
|
|
else:
|
|
|
|
self.assertEqual(realm.logo_version, version + 1)
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
def test_logo_upload_file_size_error(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file(self.correct_files[0][0]) as fp,
|
|
|
|
self.settings(MAX_LOGO_FILE_SIZE_MIB=0),
|
|
|
|
):
|
|
|
|
result = self.client_post(
|
|
|
|
"/json/realm/logo", {"file": fp, "night": orjson.dumps(self.night).decode()}
|
|
|
|
)
|
2020-06-15 23:22:24 +02:00
|
|
|
self.assert_json_error(result, "Uploaded file is larger than the allowed limit of 0 MiB")
|
2018-08-16 01:26:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-27 08:25:10 +01:00
|
|
|
class RealmNightLogoTest(RealmLogoTest):
|
2021-11-26 08:32:40 +01:00
|
|
|
# Run the same tests as for RealmLogoTest, just with dark theme enabled
|
2019-01-27 08:25:10 +01:00
|
|
|
night = True
|
|
|
|
|
2016-03-24 20:24:01 +01:00
|
|
|
|
2024-07-11 21:10:17 +02:00
|
|
|
class EmojiTest(UploadSerializeMixin, ZulipTestCase):
|
|
|
|
def test_upload_emoji(self) -> None:
|
|
|
|
self.login("iago")
|
|
|
|
with get_test_image_file("img.png") as f:
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_non_image(self) -> None:
|
|
|
|
"""Non-image is not resized"""
|
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file("text.txt") as f,
|
|
|
|
patch("zerver.lib.upload.resize_emoji", return_value=(b"a", None)) as resize_mock,
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_error(result, "Invalid image format")
|
|
|
|
resize_mock.assert_not_called()
|
2024-07-11 21:10:17 +02:00
|
|
|
|
|
|
|
def test_upsupported_format(self) -> None:
|
|
|
|
"""Invalid format is not resized"""
|
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file("img.bmp") as f,
|
|
|
|
patch("zerver.lib.upload.resize_emoji", return_value=(b"a", None)) as resize_mock,
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_error(result, "Invalid image format")
|
|
|
|
resize_mock.assert_not_called()
|
2024-07-11 21:10:17 +02:00
|
|
|
|
2024-07-12 19:35:32 +02:00
|
|
|
def test_upload_too_big_after_resize(self) -> None:
|
|
|
|
"""Non-animated image is too big after resizing"""
|
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file("img.png") as f,
|
|
|
|
patch(
|
2024-07-12 19:35:32 +02:00
|
|
|
"zerver.lib.upload.resize_emoji", return_value=(b"a" * (200 * 1024), None)
|
2024-07-14 20:30:42 +02:00
|
|
|
) as resize_mock,
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_error(result, "Image size exceeds limit")
|
|
|
|
resize_mock.assert_called_once()
|
2024-07-12 19:35:32 +02:00
|
|
|
|
2024-07-11 21:10:17 +02:00
|
|
|
def test_upload_big_after_animated_resize(self) -> None:
|
|
|
|
"""A big animated image is fine as long as the still is small"""
|
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file("animated_img.gif") as f,
|
|
|
|
patch(
|
2024-07-11 21:10:17 +02:00
|
|
|
"zerver.lib.upload.resize_emoji", return_value=(b"a" * (200 * 1024), b"aaa")
|
2024-07-14 20:30:42 +02:00
|
|
|
) as resize_mock,
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_success(result)
|
|
|
|
resize_mock.assert_called_once()
|
2024-07-11 21:10:17 +02:00
|
|
|
|
|
|
|
def test_upload_too_big_after_animated_resize_still(self) -> None:
|
|
|
|
"""Still of animated image is too big after resizing"""
|
|
|
|
self.login("iago")
|
2024-07-14 20:30:42 +02:00
|
|
|
with (
|
|
|
|
get_test_image_file("animated_img.gif") as f,
|
|
|
|
patch(
|
2024-07-11 21:10:17 +02:00
|
|
|
"zerver.lib.upload.resize_emoji", return_value=(b"aaa", b"a" * (200 * 1024))
|
2024-07-14 20:30:42 +02:00
|
|
|
) as resize_mock,
|
|
|
|
):
|
|
|
|
result = self.client_post("/json/realm/emoji/new", {"f1": f})
|
|
|
|
self.assert_json_error(result, "Image size exceeds limit")
|
|
|
|
resize_mock.assert_called_once()
|
2024-07-11 21:10:17 +02:00
|
|
|
|
|
|
|
|
2020-07-01 04:19:54 +02:00
|
|
|
class SanitizeNameTests(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_file_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(sanitize_name("test.txt"), "test.txt")
|
|
|
|
self.assertEqual(sanitize_name(".hidden"), ".hidden")
|
|
|
|
self.assertEqual(sanitize_name(".hidden.txt"), ".hidden.txt")
|
|
|
|
self.assertEqual(sanitize_name("tarball.tar.gz"), "tarball.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name(".hidden_tarball.tar.gz"), ".hidden_tarball.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name("Testing{}*&*#().ta&&%$##&&r.gz"), "Testing.tar.gz")
|
|
|
|
self.assertEqual(sanitize_name("*testingfile?*.txt"), "testingfile.txt")
|
|
|
|
self.assertEqual(sanitize_name("snowman☃.txt"), "snowman.txt")
|
|
|
|
self.assertEqual(sanitize_name("테스트.txt"), "테스트.txt")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
sanitize_name('~/."\\`\\?*"u0`000ssh/test.t**{}ar.gz'), ".u0000sshtest.tar.gz"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-14 21:33:51 +02:00
|
|
|
|
|
|
|
|
|
|
|
class UploadSpaceTests(UploadSerializeMixin, ZulipTestCase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2018-05-14 21:33:51 +02:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2018-05-14 21:33:51 +02:00
|
|
|
self.realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.user_profile = self.example_user("hamlet")
|
2018-05-14 21:33:51 +02:00
|
|
|
|
|
|
|
def test_currently_used_upload_space(self) -> None:
|
2023-06-08 21:46:38 +02:00
|
|
|
self.assertEqual(None, cache_get(get_realm_used_upload_space_cache_key(self.realm.id)))
|
2019-01-11 13:41:52 +01:00
|
|
|
self.assertEqual(0, self.realm.currently_used_upload_space_bytes())
|
2023-06-08 21:46:38 +02:00
|
|
|
self.assertEqual(0, cache_get(get_realm_used_upload_space_cache_key(self.realm.id))[0])
|
2018-05-14 21:33:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = b"zulip!"
|
2024-06-20 19:52:55 +02:00
|
|
|
upload_message_attachment("dummy.txt", "text/plain", data, self.user_profile)
|
2019-01-17 12:05:09 +01:00
|
|
|
# notify_attachment_update function calls currently_used_upload_space_bytes which
|
|
|
|
# updates the cache.
|
2023-06-08 21:46:38 +02:00
|
|
|
self.assert_length(data, cache_get(get_realm_used_upload_space_cache_key(self.realm.id))[0])
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data, self.realm.currently_used_upload_space_bytes())
|
2018-05-14 21:33:51 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data2 = b"more-data!"
|
2024-06-20 19:52:55 +02:00
|
|
|
upload_message_attachment("dummy2.txt", "text/plain", data2, self.user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2023-06-08 21:46:38 +02:00
|
|
|
len(data) + len(data2),
|
|
|
|
cache_get(get_realm_used_upload_space_cache_key(self.realm.id))[0],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-17 12:05:09 +01:00
|
|
|
self.assertEqual(len(data) + len(data2), self.realm.currently_used_upload_space_bytes())
|
2019-01-14 07:46:31 +01:00
|
|
|
|
|
|
|
attachment = Attachment.objects.get(file_name="dummy.txt")
|
|
|
|
attachment.file_name = "dummy1.txt"
|
|
|
|
attachment.save(update_fields=["file_name"])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2023-06-08 21:46:38 +02:00
|
|
|
len(data) + len(data2),
|
|
|
|
cache_get(get_realm_used_upload_space_cache_key(self.realm.id))[0],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-14 07:46:31 +01:00
|
|
|
self.assertEqual(len(data) + len(data2), self.realm.currently_used_upload_space_bytes())
|
|
|
|
|
|
|
|
attachment.delete()
|
2023-06-08 21:46:38 +02:00
|
|
|
self.assertEqual(None, cache_get(get_realm_used_upload_space_cache_key(self.realm.id)))
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data2, self.realm.currently_used_upload_space_bytes())
|
2019-01-17 12:04:54 +01:00
|
|
|
|
2024-04-19 03:06:53 +02:00
|
|
|
now = timezone_now()
|
|
|
|
RealmCount.objects.create(
|
|
|
|
realm=self.realm,
|
|
|
|
property="upload_quota_used_bytes::day",
|
|
|
|
end_time=now,
|
|
|
|
value=len(data2),
|
|
|
|
)
|
|
|
|
# Purge the cache since we want to actually execute the function.
|
|
|
|
cache_delete(get_realm_used_upload_space_cache_key(self.realm.id))
|
|
|
|
|
|
|
|
self.assert_length(data2, self.realm.currently_used_upload_space_bytes())
|
|
|
|
|
|
|
|
data3 = b"even-more-data!"
|
2024-06-20 19:52:55 +02:00
|
|
|
upload_message_attachment("dummy3.txt", "text/plain", data3, self.user_profile)
|
2024-04-19 03:06:53 +02:00
|
|
|
self.assertEqual(len(data2) + len(data3), self.realm.currently_used_upload_space_bytes())
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-01-13 07:27:30 +01:00
|
|
|
class DecompressionBombTests(ZulipTestCase):
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2019-01-13 07:27:30 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2022-02-17 00:01:27 +01:00
|
|
|
self.test_urls = [
|
|
|
|
"/json/users/me/avatar",
|
|
|
|
"/json/realm/logo",
|
|
|
|
"/json/realm/icon",
|
|
|
|
"/json/realm/emoji/bomb_emoji",
|
|
|
|
]
|
2019-01-13 07:27:30 +01:00
|
|
|
|
|
|
|
def test_decompression_bomb(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2019-01-13 07:27:30 +01:00
|
|
|
with get_test_image_file("bomb.png") as fp:
|
2022-02-17 00:01:27 +01:00
|
|
|
for url in self.test_urls:
|
2019-01-13 07:27:30 +01:00
|
|
|
fp.seek(0, 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
if url == "/json/realm/logo":
|
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
url, {"f1": fp, "night": orjson.dumps(False).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-27 08:25:10 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post(url, {"f1": fp})
|
2022-02-17 00:01:27 +01:00
|
|
|
self.assert_json_error(result, "Image size exceeds limit.")
|