2021-12-11 19:44:06 +01:00
|
|
|
import datetime
|
2016-08-13 20:22:23 +02:00
|
|
|
import os
|
2021-12-11 13:51:27 +01:00
|
|
|
import shutil
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Callable, Dict, FrozenSet, List, Optional, Set, Tuple
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import patch
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2019-05-10 14:28:38 +02:00
|
|
|
from django.db.models import Q
|
2020-02-18 14:58:29 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-11 19:44:06 +01:00
|
|
|
from analytics.models import UserCount
|
2019-07-19 19:15:23 +02:00
|
|
|
from zerver.lib import upload
|
2018-07-17 19:11:16 +02:00
|
|
|
from zerver.lib.actions import (
|
2021-12-05 14:26:42 +01:00
|
|
|
check_add_reaction,
|
|
|
|
check_add_realm_emoji,
|
2021-12-11 19:44:06 +01:00
|
|
|
do_add_alert_words,
|
2019-05-10 14:28:38 +02:00
|
|
|
do_add_reaction,
|
2019-07-19 19:15:23 +02:00
|
|
|
do_change_icon_source,
|
2020-02-18 14:58:29 +01:00
|
|
|
do_change_logo_source,
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_create_user,
|
2021-02-14 00:03:40 +01:00
|
|
|
do_deactivate_user,
|
2021-12-11 19:44:06 +01:00
|
|
|
do_mute_topic,
|
2021-04-08 21:21:45 +02:00
|
|
|
do_mute_user,
|
2021-12-11 19:44:06 +01:00
|
|
|
do_update_user_activity,
|
|
|
|
do_update_user_activity_interval,
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_update_user_presence,
|
2021-12-05 13:42:04 +01:00
|
|
|
do_update_user_status,
|
2021-12-11 19:44:06 +01:00
|
|
|
try_add_realm_custom_profile_field,
|
2018-07-17 19:11:16 +02:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
|
|
|
from zerver.lib.bot_config import set_bot_config
|
|
|
|
from zerver.lib.bot_lib import StateHandler
|
2021-12-11 15:55:51 +01:00
|
|
|
from zerver.lib.export import Record, do_export_realm, do_export_user, export_usermessages_batch
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.import_realm import do_import_realm, get_incoming_message_ids
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2021-12-05 14:26:42 +01:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
create_s3_buckets,
|
|
|
|
get_test_image_file,
|
|
|
|
most_recent_message,
|
2021-12-09 21:31:46 +01:00
|
|
|
most_recent_usermessage,
|
2021-12-12 17:17:33 +01:00
|
|
|
read_test_image_file,
|
2021-12-05 14:26:42 +01:00
|
|
|
use_s3_backend,
|
|
|
|
)
|
2021-12-12 22:53:26 +01:00
|
|
|
from zerver.lib.upload import claim_attachment, upload_avatar_image, upload_message_file
|
2022-02-22 21:07:07 +01:00
|
|
|
from zerver.lib.user_topics import add_topic_mute
|
2016-08-13 20:22:23 +02:00
|
|
|
from zerver.models import (
|
2020-07-16 16:11:34 +02:00
|
|
|
AlertWord,
|
2018-05-30 14:55:30 +02:00
|
|
|
Attachment,
|
2020-06-11 00:54:34 +02:00
|
|
|
BotConfigData,
|
|
|
|
BotStorageData,
|
2018-06-04 18:21:58 +02:00
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
2018-05-25 18:54:22 +02:00
|
|
|
Huddle,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2021-04-08 21:21:45 +02:00
|
|
|
MutedUser,
|
2020-06-11 00:54:34 +02:00
|
|
|
Reaction,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmEmoji,
|
2021-06-01 12:55:44 +02:00
|
|
|
RealmUserDefault,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
2018-07-12 13:27:12 +02:00
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserHotspot,
|
|
|
|
UserMessage,
|
2020-02-18 14:58:29 +01:00
|
|
|
UserPresence,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2021-12-05 13:42:04 +01:00
|
|
|
UserStatus,
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic,
|
2018-06-04 18:21:58 +02:00
|
|
|
get_active_streams,
|
2020-02-18 14:58:29 +01:00
|
|
|
get_client,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_huddle_hash,
|
2018-07-14 16:10:45 +02:00
|
|
|
get_stream,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2021-12-11 19:44:06 +01:00
|
|
|
def make_datetime(val: float) -> datetime.datetime:
|
|
|
|
return datetime.datetime.fromtimestamp(val, tz=datetime.timezone.utc)
|
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def get_output_dir() -> str:
|
|
|
|
return os.path.join(settings.TEST_WORKER_DIR, "test-export")
|
|
|
|
|
|
|
|
|
2021-12-11 13:51:27 +01:00
|
|
|
def make_export_output_dir() -> str:
|
2021-12-12 15:50:20 +01:00
|
|
|
output_dir = get_output_dir()
|
2021-12-11 13:51:27 +01:00
|
|
|
if os.path.exists(output_dir):
|
|
|
|
shutil.rmtree(output_dir)
|
|
|
|
os.makedirs(output_dir)
|
|
|
|
return output_dir
|
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def read_json(fn: str) -> Any:
|
|
|
|
output_dir = get_output_dir()
|
2021-12-11 14:25:52 +01:00
|
|
|
full_fn = os.path.join(output_dir, fn)
|
|
|
|
with open(full_fn, "rb") as f:
|
|
|
|
return orjson.loads(f.read())
|
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def export_fn(fn: str) -> str:
|
|
|
|
output_dir = get_output_dir()
|
|
|
|
return os.path.join(output_dir, fn)
|
|
|
|
|
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
def get_user_id(r: Realm, full_name: str) -> int:
|
|
|
|
return UserProfile.objects.get(realm=r, full_name=full_name).id
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
def get_huddle_hashes(r: Realm) -> str:
|
|
|
|
cordelia_full_name = "Cordelia, Lear's daughter"
|
|
|
|
hamlet_full_name = "King Hamlet"
|
|
|
|
othello_full_name = "Othello, the Moor of Venice"
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
user_id_list = [
|
|
|
|
get_user_id(r, cordelia_full_name),
|
|
|
|
get_user_id(r, hamlet_full_name),
|
|
|
|
get_user_id(r, othello_full_name),
|
|
|
|
]
|
|
|
|
|
|
|
|
huddle_hash = get_huddle_hash(user_id_list)
|
|
|
|
return huddle_hash
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
class ExportFile(ZulipTestCase):
|
|
|
|
"""This class is a container for shared helper functions
|
|
|
|
used for both the realm-level and user-level export tests."""
|
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
def setUp(self) -> None:
|
|
|
|
super().setUp()
|
|
|
|
self.rm_tree(settings.LOCAL_UPLOADS_DIR)
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
# Deleting LOCAL_UPLOADS_DIR results in the test database
|
|
|
|
# having RealmEmoji records without associated files.
|
|
|
|
#
|
|
|
|
# Even if we didn't delete them, the way that the test runner
|
|
|
|
# varies settings.LOCAL_UPLOADS_DIR for each test worker
|
|
|
|
# process would likely result in this being necessary anyway.
|
|
|
|
RealmEmoji.objects.all().delete()
|
|
|
|
|
|
|
|
def upload_files_for_user(
|
|
|
|
self, user_profile: UserProfile, *, emoji_name: str = "whatever"
|
|
|
|
) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
message = most_recent_message(user_profile)
|
2021-02-12 08:19:30 +01:00
|
|
|
url = upload_message_file(
|
2021-02-12 08:20:45 +01:00
|
|
|
"dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_path_id = url.replace("/user_uploads/", "")
|
2016-08-13 20:22:23 +02:00
|
|
|
claim_attachment(
|
|
|
|
user_profile=user_profile,
|
2018-06-06 16:55:03 +02:00
|
|
|
path_id=attachment_path_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
message=message,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_message_realm_public=True,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2018-05-30 14:55:30 +02:00
|
|
|
upload_avatar_image(img_file, user_profile, user_profile)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
user_profile.avatar_source = "U"
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2021-12-12 22:53:26 +01:00
|
|
|
check_add_realm_emoji(realm, emoji_name, user_profile, img_file)
|
2021-12-12 21:38:49 +01:00
|
|
|
|
|
|
|
def upload_files_for_realm(self, user_profile: UserProfile) -> None:
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_icon_image(img_file, user_profile)
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(img_file, user_profile, night=False)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, False, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(img_file, user_profile, night=True)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, True, acting_user=user_profile)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
def verify_attachment_json(self, user: UserProfile) -> None:
|
|
|
|
attachment = Attachment.objects.get(owner=user)
|
|
|
|
(record,) = read_json("attachment.json")["zerver_attachment"]
|
|
|
|
self.assertEqual(record["path_id"], attachment.path_id)
|
|
|
|
self.assertEqual(record["owner"], attachment.owner_id)
|
|
|
|
self.assertEqual(record["realm"], attachment.realm_id)
|
|
|
|
|
2021-12-12 21:22:33 +01:00
|
|
|
def verify_uploads(self, user: UserProfile, is_s3: bool) -> None:
|
2021-12-12 21:14:35 +01:00
|
|
|
realm = user.realm
|
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
attachment = Attachment.objects.get(owner=user)
|
|
|
|
path_id = attachment.path_id
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
2021-12-12 15:50:20 +01:00
|
|
|
fn = export_fn(f"uploads/{path_id}")
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(fn) as f:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(f.read(), "zulip!")
|
2021-12-12 21:14:35 +01:00
|
|
|
(record,) = read_json("uploads/records.json")
|
|
|
|
self.assertEqual(record["path"], path_id)
|
|
|
|
self.assertEqual(record["s3_path"], path_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-12 21:22:33 +01:00
|
|
|
if is_s3:
|
|
|
|
realm_str, random_hash, file_name = path_id.split("/")
|
|
|
|
self.assertEqual(realm_str, str(realm.id))
|
|
|
|
self.assert_length(random_hash, 24)
|
|
|
|
self.assertEqual(file_name, "dummy.txt")
|
|
|
|
|
|
|
|
self.assertEqual(record["realm_id"], realm.id)
|
|
|
|
self.assertEqual(record["user_profile_id"], user.id)
|
|
|
|
else:
|
|
|
|
realm_str, slot, random_hash, file_name = path_id.split("/")
|
|
|
|
self.assertEqual(realm_str, str(realm.id))
|
|
|
|
# We randomly pick a number between 0 and 255 and turn it into
|
|
|
|
# hex in order to avoid large directories.
|
|
|
|
assert len(slot) <= 2
|
|
|
|
self.assert_length(random_hash, 24)
|
|
|
|
self.assertEqual(file_name, "dummy.txt")
|
2021-12-12 16:08:50 +01:00
|
|
|
|
2021-12-12 21:09:20 +01:00
|
|
|
def verify_emojis(self, user: UserProfile, is_s3: bool) -> None:
|
|
|
|
realm = user.realm
|
2021-12-12 20:25:23 +01:00
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
realm_emoji = RealmEmoji.objects.get(author=user)
|
|
|
|
file_name = realm_emoji.file_name
|
|
|
|
assert file_name.endswith(".png")
|
|
|
|
|
|
|
|
emoji_path = f"{realm.id}/emoji/images/{file_name}"
|
2021-12-12 18:14:01 +01:00
|
|
|
emoji_dir = export_fn(f"emoji/{realm.id}/emoji/images")
|
2021-12-12 22:53:26 +01:00
|
|
|
self.assertEqual(os.listdir(emoji_dir), [file_name])
|
2021-12-12 18:14:01 +01:00
|
|
|
|
|
|
|
(record,) = read_json("emoji/records.json")
|
2021-12-12 22:53:26 +01:00
|
|
|
self.assertEqual(record["file_name"], file_name)
|
2021-12-12 18:14:01 +01:00
|
|
|
self.assertEqual(record["path"], emoji_path)
|
|
|
|
self.assertEqual(record["s3_path"], emoji_path)
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2021-12-12 21:09:20 +01:00
|
|
|
if is_s3:
|
|
|
|
self.assertEqual(record["realm_id"], realm.id)
|
|
|
|
self.assertEqual(record["user_profile_id"], user.id)
|
2021-12-12 20:43:01 +01:00
|
|
|
|
|
|
|
def verify_realm_logo_and_icon(self) -> None:
|
2021-12-12 15:50:20 +01:00
|
|
|
records = read_json("realm_icons/records.json")
|
2019-07-19 19:15:23 +02:00
|
|
|
image_files = set()
|
2021-12-12 20:43:01 +01:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
for record in records:
|
2021-12-12 20:43:01 +01:00
|
|
|
self.assertEqual(record["path"], record["s3_path"])
|
2021-12-12 15:50:20 +01:00
|
|
|
image_path = export_fn(f"realm_icons/{record['path']}")
|
2021-12-12 20:43:01 +01:00
|
|
|
if image_path.endswith(".original"):
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "rb") as image_file:
|
2020-10-24 09:33:54 +02:00
|
|
|
image_data = image_file.read()
|
2021-12-12 17:31:40 +01:00
|
|
|
self.assertEqual(image_data, read_test_image_file("img.png"))
|
2019-07-19 19:15:23 +02:00
|
|
|
else:
|
|
|
|
self.assertTrue(os.path.exists(image_path))
|
|
|
|
|
|
|
|
image_files.add(os.path.basename(image_path))
|
2021-12-12 20:43:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(image_files),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"night_logo.png",
|
|
|
|
"logo.original",
|
|
|
|
"logo.png",
|
|
|
|
"icon.png",
|
|
|
|
"night_logo.original",
|
|
|
|
"icon.original",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 20:25:23 +01:00
|
|
|
def verify_avatars(self, user: UserProfile) -> None:
|
2021-12-12 15:50:20 +01:00
|
|
|
records = read_json("avatars/records.json")
|
2021-12-12 20:25:23 +01:00
|
|
|
exported_paths = set()
|
|
|
|
|
|
|
|
# Make sure all files in records.json got written.
|
|
|
|
for record in records:
|
|
|
|
self.assertEqual(record["path"], record["s3_path"])
|
|
|
|
path = record["path"]
|
|
|
|
fn = export_fn(f"avatars/{path}")
|
|
|
|
assert os.path.exists(fn)
|
|
|
|
|
|
|
|
if path.endswith(".original"):
|
|
|
|
exported_paths.add(path)
|
|
|
|
|
|
|
|
# For now we know that all our tests use
|
|
|
|
# emojis based on img.png. This may change some
|
|
|
|
# day.
|
|
|
|
with open(fn, "rb") as fb:
|
|
|
|
fn_data = fb.read()
|
|
|
|
|
|
|
|
self.assertEqual(fn_data, read_test_image_file("img.png"))
|
|
|
|
|
|
|
|
assert exported_paths
|
|
|
|
|
|
|
|
# Right now we expect only our user to have an uploaded avatar.
|
|
|
|
db_paths = {user_avatar_path(user) + ".original"}
|
|
|
|
self.assertEqual(exported_paths, db_paths)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
|
|
|
|
class RealmImportExportTest(ExportFile):
|
|
|
|
def export_realm(
|
|
|
|
self,
|
|
|
|
realm: Realm,
|
|
|
|
exportable_user_ids: Optional[Set[int]] = None,
|
|
|
|
consent_message_id: Optional[int] = None,
|
|
|
|
) -> None:
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
with patch("zerver.lib.export.create_soft_link"), self.assertLogs(level="INFO"):
|
|
|
|
do_export_realm(
|
|
|
|
realm=realm,
|
|
|
|
output_dir=output_dir,
|
|
|
|
threads=0,
|
|
|
|
exportable_user_ids=exportable_user_ids,
|
|
|
|
consent_message_id=consent_message_id,
|
|
|
|
)
|
|
|
|
export_usermessages_batch(
|
|
|
|
input_path=os.path.join(output_dir, "messages-000001.json.partial"),
|
|
|
|
output_path=os.path.join(output_dir, "messages-000001.json"),
|
|
|
|
consent_message_id=consent_message_id,
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_export_files_from_local(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
|
|
|
self.export_realm(realm)
|
|
|
|
|
|
|
|
self.verify_attachment_json(user)
|
|
|
|
self.verify_uploads(user, is_s3=False)
|
|
|
|
self.verify_avatars(user)
|
|
|
|
self.verify_emojis(user, is_s3=False)
|
|
|
|
self.verify_realm_logo_and_icon()
|
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_export_files_from_s3(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
self.verify_attachment_json(user)
|
2021-12-12 21:22:33 +01:00
|
|
|
self.verify_uploads(user, is_s3=True)
|
2021-12-12 20:25:23 +01:00
|
|
|
self.verify_avatars(user)
|
2021-12-12 21:09:20 +01:00
|
|
|
self.verify_emojis(user, is_s3=True)
|
2021-12-12 20:43:01 +01:00
|
|
|
self.verify_realm_logo_and_icon()
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_zulip_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
default_bot = self.example_user("default_bot")
|
2020-03-07 11:43:05 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(self.example_user("AARON"), default_bot)
|
|
|
|
pm_b_msg_id = self.send_personal_message(default_bot, self.example_user("iago"))
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("othello"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
realm_user_default.default_language = "de"
|
|
|
|
realm_user_default.save()
|
|
|
|
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_userprofile_crossrealm"], 3)
|
|
|
|
self.assert_length(data["zerver_userprofile_mirrordummy"], 0)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2016-08-13 20:22:23 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2021-04-29 17:22:48 +02:00
|
|
|
{"Denmark", "Rome", "Scotland", "Venice", "Verona", "core team"},
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_alert_words = data["zerver_alertword"]
|
2020-07-16 15:38:56 +02:00
|
|
|
|
|
|
|
# We set up 4 alert words for Hamlet, Cordelia, etc.
|
|
|
|
# when we populate the test database.
|
2020-12-22 15:46:00 +01:00
|
|
|
num_zulip_users = 10
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(exported_alert_words, num_zulip_users * 4)
|
2020-07-16 15:38:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("robotics", {r["word"] for r in exported_alert_words})
|
2020-07-16 15:38:56 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
exported_realm_user_default = data["zerver_realmuserdefault"]
|
|
|
|
self.assert_length(exported_realm_user_default, 1)
|
|
|
|
self.assertEqual(exported_realm_user_default[0]["default_language"], "de")
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2016-08-13 20:22:23 +02:00
|
|
|
um = UserMessage.objects.all()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:29:58 +02:00
|
|
|
self.assertIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
|
2019-05-21 12:32:20 +02:00
|
|
|
def test_export_realm_with_exportable_user_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-04-09 21:51:58 +02:00
|
|
|
user_ids = {cordelia.id, hamlet.id}
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm, exportable_user_ids=user_ids)
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertNotIn("default-bot@zulip.com", exported_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("cordelia"), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
dummy_user_emails = self.get_set(data["zerver_userprofile_mirrordummy"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), dummy_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), dummy_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("iago"), dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("hamlet"), dummy_user_emails)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:51:30 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_message_ids)
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def test_export_realm_with_member_consent(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Create private streams and subscribe users for testing export
|
|
|
|
create_stream_if_needed(realm, "Private A", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("iago"), "Private A")
|
|
|
|
self.subscribe(self.example_user("othello"), "Private A")
|
2021-05-10 07:02:14 +02:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Private A", "Hello stream A")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private B", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private B")
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_b_message_id = self.send_stream_message(
|
2021-05-10 07:02:14 +02:00
|
|
|
self.example_user("prospero"), "Private B", "Hello stream B"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-05-21 14:48:07 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Private B")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private C", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Private C")
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private C")
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_c_message_id = self.send_stream_message(
|
2021-05-10 07:02:14 +02:00
|
|
|
self.example_user("othello"), "Private C", "Hello stream C"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Create huddles
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(
|
|
|
|
self.example_user("iago"), [self.example_user("cordelia"), self.example_user("AARON")]
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
huddle_a = Huddle.objects.last()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(
|
|
|
|
self.example_user("ZOE"),
|
|
|
|
[self.example_user("hamlet"), self.example_user("AARON"), self.example_user("othello")],
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
huddle_b = Huddle.objects.last()
|
|
|
|
|
2019-05-17 11:39:51 +02:00
|
|
|
huddle_c_message_id = self.send_huddle_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("AARON"),
|
|
|
|
[self.example_user("cordelia"), self.example_user("ZOE"), self.example_user("othello")],
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
# Create PMs
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-20 15:09:32 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
# Send message advertising export and make users react
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("othello"),
|
|
|
|
"Verona",
|
|
|
|
topic_name="Export",
|
|
|
|
content="Thumbs up for export",
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
message = Message.objects.last()
|
|
|
|
consented_user_ids = [self.example_user(user).id for user in ["iago", "hamlet"]]
|
2021-02-12 08:19:30 +01:00
|
|
|
do_add_reaction(
|
|
|
|
self.example_user("iago"), message, "outbox", "1f4e4", Reaction.UNICODE_EMOJI
|
|
|
|
)
|
|
|
|
do_add_reaction(
|
|
|
|
self.example_user("hamlet"), message, "outbox", "1f4e4", Reaction.UNICODE_EMOJI
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-07-24 16:56:39 +02:00
|
|
|
assert message is not None
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm, consent_message_id=message.id)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_userprofile_crossrealm"], 3)
|
|
|
|
self.assert_length(data["zerver_userprofile_mirrordummy"], 0)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-04-29 17:22:48 +02:00
|
|
|
"core team",
|
2021-02-12 08:20:45 +01:00
|
|
|
"Denmark",
|
|
|
|
"Rome",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Verona",
|
|
|
|
"Private A",
|
|
|
|
"Private B",
|
|
|
|
"Private C",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2019-05-10 14:28:38 +02:00
|
|
|
)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_usermessages = UserMessage.objects.filter(
|
|
|
|
user_profile__in=[self.example_user("iago"), self.example_user("hamlet")]
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
um = exported_usermessages[0]
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_usermessage"], len(exported_usermessages))
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
public_stream_names = ["Denmark", "Rome", "Scotland", "Venice", "Verona"]
|
2021-02-12 08:19:30 +01:00
|
|
|
public_stream_ids = Stream.objects.filter(name__in=public_stream_names).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
)
|
|
|
|
public_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=public_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
public_stream_message_ids = Message.objects.filter(
|
|
|
|
recipient__in=public_stream_recipients
|
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# Messages from Private stream C are not exported since no member gave consent
|
2021-04-29 17:22:48 +02:00
|
|
|
private_stream_ids = Stream.objects.filter(
|
|
|
|
name__in=["Private A", "Private B", "core team"]
|
|
|
|
).values_list("id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
private_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=private_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
private_stream_message_ids = Message.objects.filter(
|
|
|
|
recipient__in=private_stream_recipients
|
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=consented_user_ids, type=Recipient.PERSONAL
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
pm_query = Q(recipient__in=pm_recipients) | Q(sender__in=consented_user_ids)
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_pm_ids = (
|
|
|
|
Message.objects.filter(pm_query)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Third huddle is not exported since none of the members gave consent
|
2021-07-24 16:56:39 +02:00
|
|
|
assert huddle_a is not None and huddle_b is not None
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=[huddle_a.id, huddle_b.id], type=Recipient.HUDDLE
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
pm_query = Q(recipient__in=huddle_recipients) | Q(sender__in=consented_user_ids)
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_huddle_ids = (
|
|
|
|
Message.objects.filter(pm_query)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_msg_ids = (
|
|
|
|
set(public_stream_message_ids)
|
|
|
|
| set(private_stream_message_ids)
|
|
|
|
| set(exported_pm_ids)
|
|
|
|
| set(exported_huddle_ids)
|
|
|
|
)
|
2019-05-21 12:21:32 +02:00
|
|
|
self.assertEqual(self.get_set(data["zerver_message"], "id"), exported_msg_ids)
|
2019-05-21 14:48:07 +02:00
|
|
|
|
|
|
|
# TODO: This behavior is wrong and should be fixed. The message should not be exported
|
|
|
|
# since it was sent before the only consented user iago joined the stream.
|
|
|
|
self.assertIn(stream_b_message_id, exported_msg_ids)
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertNotIn(stream_c_message_id, exported_msg_ids)
|
2019-05-17 11:39:51 +02:00
|
|
|
self.assertNotIn(huddle_c_message_id, exported_msg_ids)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_msg_ids)
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
"""
|
|
|
|
Tests for import_realm
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
def test_import_realm(self) -> None:
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
2021-02-14 00:03:40 +01:00
|
|
|
|
2021-12-05 14:26:42 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
with get_test_image_file("img.png") as img_file:
|
|
|
|
realm_emoji = check_add_realm_emoji(
|
|
|
|
realm=hamlet.realm, name="hawaii", author=hamlet, image_file=img_file
|
|
|
|
)
|
|
|
|
assert realm_emoji
|
|
|
|
self.assertEqual(realm_emoji.name, "hawaii")
|
|
|
|
|
2021-02-14 00:03:40 +01:00
|
|
|
# Deactivate a user to ensure such a case is covered.
|
|
|
|
do_deactivate_user(self.example_user("aaron"), acting_user=None)
|
2018-05-25 18:54:22 +02:00
|
|
|
# data to test import of huddles
|
|
|
|
huddle = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("othello"),
|
2018-05-25 18:54:22 +02:00
|
|
|
]
|
|
|
|
self.send_huddle_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle,
|
2021-02-12 08:20:45 +01:00
|
|
|
"test huddle message",
|
2018-05-25 18:54:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_mention_message = "@**King Hamlet** Hello"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Verona", user_mention_message)
|
2019-05-23 13:58:10 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_mention_message = "Subscribe to #**Denmark**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Verona", stream_mention_message)
|
2019-05-28 13:06:48 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_group_mention_message = "Hello @*hamletcharacters*"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Verona", user_group_mention_message)
|
2019-05-28 13:47:41 +02:00
|
|
|
|
2019-06-02 23:57:03 +02:00
|
|
|
special_characters_message = "```\n'\n```\n@**Polonius**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Denmark", special_characters_message)
|
2019-06-02 22:24:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
sample_user = self.example_user("hamlet")
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2021-12-05 14:26:42 +01:00
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=most_recent_message(hamlet).id,
|
|
|
|
emoji_name="hawaii",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
assert reaction
|
|
|
|
|
|
|
|
# Verify strange invariant for Reaction/RealmEmoji.
|
|
|
|
self.assertEqual(reaction.emoji_code, str(realm_emoji.id))
|
|
|
|
|
2020-02-18 14:58:29 +01:00
|
|
|
# data to test import of hotspots
|
2018-07-12 16:34:26 +02:00
|
|
|
UserHotspot.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=sample_user,
|
2021-02-12 08:20:45 +01:00
|
|
|
hotspot="intro_streams",
|
2018-07-12 16:34:26 +02:00
|
|
|
)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# data to test import of muted topic
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Verona", original_realm)
|
2021-07-24 16:56:39 +02:00
|
|
|
recipient = stream.recipient
|
|
|
|
assert recipient is not None
|
2018-07-14 16:10:45 +02:00
|
|
|
add_topic_mute(
|
|
|
|
user_profile=sample_user,
|
|
|
|
stream_id=stream.id,
|
2021-07-24 16:56:39 +02:00
|
|
|
recipient_id=recipient.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="Verona2",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2021-04-08 21:21:45 +02:00
|
|
|
# data to test import of muted users
|
|
|
|
do_mute_user(hamlet, cordelia)
|
|
|
|
do_mute_user(cordelia, hamlet)
|
|
|
|
do_mute_user(cordelia, othello)
|
|
|
|
|
2021-12-05 13:42:04 +01:00
|
|
|
client = get_client("website")
|
|
|
|
|
|
|
|
do_update_user_presence(sample_user, client, timezone_now(), UserPresence.ACTIVE)
|
|
|
|
|
|
|
|
# send Cordelia to the islands
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=cordelia,
|
|
|
|
away=True,
|
|
|
|
status_text="in Hawaii",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name="hawaii",
|
|
|
|
emoji_code=str(realm_emoji.id),
|
|
|
|
reaction_type=Reaction.REALM_EMOJI,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-18 14:58:29 +01:00
|
|
|
|
2021-12-05 13:42:04 +01:00
|
|
|
user_status = UserStatus.objects.order_by("id").last()
|
|
|
|
assert user_status
|
|
|
|
|
|
|
|
# Verify strange invariant for UserStatus/RealmEmoji.
|
|
|
|
self.assertEqual(user_status.emoji_code, str(realm_emoji.id))
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# data to test import of botstoragedata and botconfigdata
|
|
|
|
bot_profile = do_create_user(
|
|
|
|
email="bot-1@zulip.com",
|
|
|
|
password="test",
|
|
|
|
realm=original_realm,
|
|
|
|
full_name="bot",
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner=sample_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-17 19:11:16 +02:00
|
|
|
storage = StateHandler(bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
storage.put("some key", "some value")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
set_bot_config(bot_profile, "entry 1", "value 1")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=original_realm)
|
|
|
|
realm_user_default.default_language = "de"
|
|
|
|
realm_user_default.twenty_four_hour_time = True
|
|
|
|
realm_user_default.save()
|
|
|
|
|
2022-02-11 18:21:38 +01:00
|
|
|
# We want to have an extra, malformed RealmEmoji with no .author
|
|
|
|
# to test that upon import that gets fixed.
|
|
|
|
with get_test_image_file("img.png") as img_file:
|
|
|
|
new_realm_emoji = check_add_realm_emoji(
|
|
|
|
realm=hamlet.realm, name="hawaii2", author=hamlet, image_file=img_file
|
|
|
|
)
|
|
|
|
assert new_realm_emoji is not None
|
|
|
|
original_realm_emoji_count = RealmEmoji.objects.count()
|
|
|
|
self.assertGreaterEqual(original_realm_emoji_count, 2)
|
|
|
|
new_realm_emoji.author = None
|
|
|
|
new_realm_emoji.save()
|
|
|
|
|
2021-12-07 18:48:22 +01:00
|
|
|
getters = self.get_realm_getters()
|
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
snapshots: Dict[str, object] = {}
|
2021-12-07 18:48:22 +01:00
|
|
|
|
|
|
|
for f in getters:
|
|
|
|
snapshots[f.__name__] = f(original_realm)
|
|
|
|
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(original_realm)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2021-12-07 17:54:05 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 18:48:22 +01:00
|
|
|
# Make sure our export/import didn't somehow leak info into the
|
|
|
|
# original realm.
|
|
|
|
for f in getters:
|
|
|
|
# One way this will fail is if you make a getter that doesn't
|
|
|
|
# properly restrict its results to a single realm.
|
|
|
|
if f(original_realm) != snapshots[f.__name__]:
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
|
|
|
The export/import process is corrupting your
|
|
|
|
original realm according to {f.__name__}!
|
|
|
|
|
|
|
|
If you wrote that getter, are you sure you
|
|
|
|
are only grabbing objects from one realm?
|
|
|
|
"""
|
|
|
|
)
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
|
|
|
|
# test realm
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
self.assertNotEqual(imported_realm.id, original_realm.id)
|
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
def assert_realm_values(f: Callable[[Realm], object]) -> None:
|
2021-12-07 18:46:35 +01:00
|
|
|
orig_realm_result = f(original_realm)
|
|
|
|
imported_realm_result = f(imported_realm)
|
|
|
|
# orig_realm_result should be truthy and have some values, otherwise
|
|
|
|
# the test is kind of meaningless
|
|
|
|
assert orig_realm_result
|
|
|
|
|
|
|
|
# It may be helpful to do print(f.__name__) if you are having
|
|
|
|
# trouble debugging this.
|
|
|
|
|
|
|
|
# print(f.__name__, orig_realm_result, imported_realm_result)
|
|
|
|
self.assertEqual(orig_realm_result, imported_realm_result)
|
|
|
|
|
|
|
|
for f in getters:
|
|
|
|
assert_realm_values(f)
|
|
|
|
|
|
|
|
self.verify_emoji_code_foreign_keys()
|
|
|
|
|
|
|
|
# Our huddle hashes change, because hashes use ids that change.
|
2021-12-12 14:59:26 +01:00
|
|
|
self.assertNotEqual(get_huddle_hashes(original_realm), get_huddle_hashes(imported_realm))
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
# test to highlight that bs4 which we use to do data-**id
|
|
|
|
# replacements modifies the HTML sometimes. eg replacing <br>
|
|
|
|
# with </br>, ' with \' etc. The modifications doesn't
|
|
|
|
# affect how the browser displays the rendered_content so we
|
|
|
|
# are okay with using bs4 for this. lxml package also has
|
|
|
|
# similar behavior.
|
|
|
|
orig_polonius_user = self.example_user("polonius")
|
|
|
|
original_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=original_realm
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
original_msg.rendered_content,
|
|
|
|
'<div class="codehilite"><pre><span></span><code>'\n</code></pre></div>\n'
|
|
|
|
f'<p><span class="user-mention" data-user-id="{orig_polonius_user.id}">@Polonius</span></p>',
|
|
|
|
)
|
|
|
|
imported_polonius_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("polonius"), realm=imported_realm
|
|
|
|
)
|
|
|
|
imported_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=imported_realm
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
imported_msg.rendered_content,
|
|
|
|
'<div class="codehilite"><pre><span></span><code>\'\n</code></pre></div>\n'
|
|
|
|
f'<p><span class="user-mention" data-user-id="{imported_polonius_user.id}">@Polonius</span></p>',
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check recipient_id was generated correctly for the imported users and streams.
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
|
|
|
self.assertEqual(
|
|
|
|
user_profile.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.PERSONAL, type_id=user_profile.id).id,
|
|
|
|
)
|
|
|
|
for stream in Stream.objects.filter(realm=imported_realm):
|
|
|
|
self.assertEqual(
|
|
|
|
stream.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id).id,
|
|
|
|
)
|
|
|
|
|
|
|
|
for huddle_object in Huddle.objects.all():
|
|
|
|
# Huddles don't have a realm column, so we just test all Huddles for simplicity.
|
|
|
|
self.assertEqual(
|
|
|
|
huddle_object.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.HUDDLE, type_id=huddle_object.id).id,
|
|
|
|
)
|
|
|
|
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
|
|
|
# Check that all Subscriptions have the correct is_user_active set.
|
|
|
|
self.assertEqual(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, is_user_active=user_profile.is_active
|
|
|
|
).count(),
|
|
|
|
Subscription.objects.filter(user_profile=user_profile).count(),
|
|
|
|
)
|
|
|
|
# Verify that we've actually tested something meaningful instead of a blind import
|
|
|
|
# with is_user_active=True used for everything.
|
|
|
|
self.assertTrue(Subscription.objects.filter(is_user_active=False).exists())
|
|
|
|
|
2022-02-11 18:21:38 +01:00
|
|
|
all_imported_realm_emoji = RealmEmoji.objects.filter(realm=imported_realm)
|
|
|
|
self.assertEqual(all_imported_realm_emoji.count(), original_realm_emoji_count)
|
|
|
|
for imported_realm_emoji in all_imported_realm_emoji:
|
|
|
|
self.assertNotEqual(imported_realm_emoji.author, None)
|
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_realm_getters(self) -> List[Callable[[Realm], object]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
names = set()
|
2021-12-17 01:16:11 +01:00
|
|
|
getters: List[Callable[[Realm], object]] = []
|
2021-12-07 17:36:30 +01:00
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
def getter(f: Callable[[Realm], object]) -> Callable[[Realm], object]:
|
2021-12-07 17:36:30 +01:00
|
|
|
getters.append(f)
|
|
|
|
assert f.__name__.startswith("get_")
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
# Avoid dups
|
|
|
|
assert f.__name__ not in names
|
|
|
|
names.add(f.__name__)
|
2021-12-07 17:36:30 +01:00
|
|
|
return f
|
|
|
|
|
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_admin_bot_emails(r: Realm) -> Set[str]:
|
|
|
|
return {user.email for user in r.get_admin_users_and_bots()}
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_active_emails(r: Realm) -> Set[str]:
|
|
|
|
return {user.email for user in r.get_active_users()}
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_active_stream_names(r: Realm) -> Set[str]:
|
|
|
|
return {stream.name for stream in get_active_streams(r)}
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test recipients
|
2020-02-18 17:25:43 +01:00
|
|
|
def get_recipient_stream(r: Realm) -> Recipient:
|
2021-02-12 08:20:45 +01:00
|
|
|
return Stream.objects.get(name="Verona", realm=r).recipient
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2020-02-18 17:13:47 +01:00
|
|
|
def get_recipient_user(r: Realm) -> Recipient:
|
2021-02-12 08:20:45 +01:00
|
|
|
return UserProfile.objects.get(full_name="Iago", realm=r).recipient
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_stream_recipient_type(r: Realm) -> int:
|
|
|
|
return get_recipient_stream(r).type
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_user_recipient_type(r: Realm) -> int:
|
|
|
|
return get_recipient_user(r).type
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test subscription
|
2018-07-10 21:12:02 +02:00
|
|
|
def get_subscribers(recipient: Recipient) -> Set[str]:
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient)
|
|
|
|
users = {sub.user_profile.email for sub in subscriptions}
|
|
|
|
return users
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_stream_subscribers(r: Realm) -> Set[str]:
|
|
|
|
return get_subscribers(get_recipient_stream(r))
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_user_subscribers(r: Realm) -> Set[str]:
|
|
|
|
return get_subscribers(get_recipient_user(r))
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test custom profile fields
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_custom_profile_field_names(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
custom_profile_fields = CustomProfileField.objects.filter(realm=r)
|
|
|
|
custom_profile_field_names = {field.name for field in custom_profile_fields}
|
|
|
|
return custom_profile_field_names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_custom_profile_with_field_type_user(
|
|
|
|
r: Realm,
|
2021-12-17 01:16:11 +01:00
|
|
|
) -> Tuple[Set[object], Set[object], Set[FrozenSet[str]]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
fields = CustomProfileField.objects.filter(field_type=CustomProfileField.USER, realm=r)
|
2018-07-16 17:15:42 +02:00
|
|
|
|
|
|
|
def get_email(user_id: int) -> str:
|
|
|
|
return UserProfile.objects.get(id=user_id).email
|
|
|
|
|
|
|
|
def get_email_from_value(field_value: CustomProfileFieldValue) -> Set[str]:
|
2020-08-07 01:09:47 +02:00
|
|
|
user_id_list = orjson.loads(field_value.value)
|
2018-07-16 17:15:42 +02:00
|
|
|
return {get_email(user_id) for user_id in user_id_list}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def custom_profile_field_values_for(
|
|
|
|
fields: List[CustomProfileField],
|
|
|
|
) -> Set[FrozenSet[str]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_emails: Set[FrozenSet[str]] = set()
|
2018-07-16 17:15:42 +02:00
|
|
|
for field in fields:
|
|
|
|
values = CustomProfileFieldValue.objects.filter(field=field)
|
|
|
|
for value in values:
|
|
|
|
user_emails.add(frozenset(get_email_from_value(value)))
|
|
|
|
return user_emails
|
|
|
|
|
|
|
|
field_names, field_hints = (set() for i in range(2))
|
|
|
|
for field in fields:
|
|
|
|
field_names.add(field.name)
|
|
|
|
field_hints.add(field.hint)
|
|
|
|
|
|
|
|
return (field_hints, field_names, custom_profile_field_values_for(fields))
|
|
|
|
|
2018-07-05 20:08:40 +02:00
|
|
|
# test realmauditlog
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_realm_audit_log_event_type(r: Realm) -> Set[str]:
|
2019-02-13 02:22:36 +01:00
|
|
|
realmauditlogs = RealmAuditLog.objects.filter(realm=r).exclude(
|
2021-04-29 17:22:48 +02:00
|
|
|
event_type__in=[RealmAuditLog.REALM_PLAN_TYPE_CHANGED, RealmAuditLog.STREAM_CREATED]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-10 21:12:02 +02:00
|
|
|
realmauditlog_event_type = {log.event_type for log in realmauditlogs}
|
|
|
|
return realmauditlog_event_type
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_huddle_message(r: Realm) -> str:
|
2021-12-12 14:59:26 +01:00
|
|
|
huddle_hash = get_huddle_hashes(r)
|
2018-07-12 17:34:31 +02:00
|
|
|
huddle_id = Huddle.objects.get(huddle_hash=huddle_hash).id
|
|
|
|
huddle_recipient = Recipient.objects.get(type_id=huddle_id, type=3)
|
|
|
|
huddle_message = Message.objects.get(recipient=huddle_recipient)
|
2021-12-07 17:54:05 +01:00
|
|
|
self.assertEqual(huddle_message.content, "test huddle message")
|
2018-07-12 17:34:31 +02:00
|
|
|
return huddle_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2020-07-16 16:11:34 +02:00
|
|
|
def get_alertwords(r: Realm) -> Set[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return {rec.word for rec in AlertWord.objects.filter(realm_id=r.id)}
|
2020-07-16 16:11:34 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-05 14:26:42 +01:00
|
|
|
def get_realm_emoji_names(r: Realm) -> Set[str]:
|
|
|
|
names = {rec.name for rec in RealmEmoji.objects.filter(realm_id=r.id)}
|
|
|
|
assert "hawaii" in names
|
|
|
|
return names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-05 13:42:04 +01:00
|
|
|
def get_realm_user_statuses(r: Realm) -> Set[Tuple[str, str, int, str]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-05 13:42:04 +01:00
|
|
|
tups = {
|
|
|
|
(rec.user_profile.full_name, rec.emoji_name, rec.status, rec.status_text)
|
|
|
|
for rec in UserStatus.objects.filter(user_profile__realm_id=r.id)
|
|
|
|
}
|
|
|
|
assert (cordelia.full_name, "hawaii", UserStatus.AWAY, "in Hawaii") in tups
|
|
|
|
return tups
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-05 14:26:42 +01:00
|
|
|
def get_realm_emoji_reactions(r: Realm) -> Set[Tuple[str, str]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-05 14:26:42 +01:00
|
|
|
tups = {
|
|
|
|
(rec.emoji_name, rec.user_profile.full_name)
|
|
|
|
for rec in Reaction.objects.filter(
|
|
|
|
user_profile__realm_id=r.id, reaction_type=Reaction.REALM_EMOJI
|
|
|
|
)
|
|
|
|
}
|
|
|
|
self.assertEqual(tups, {("hawaii", cordelia.full_name)})
|
|
|
|
return tups
|
|
|
|
|
2018-07-12 16:34:26 +02:00
|
|
|
# test userhotspot
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_user_hotspots(r: Realm) -> Set[str]:
|
2021-12-12 14:59:26 +01:00
|
|
|
user_id = get_user_id(r, "King Hamlet")
|
2020-07-16 15:04:35 +02:00
|
|
|
hotspots = UserHotspot.objects.filter(user_id=user_id)
|
2018-07-12 16:34:26 +02:00
|
|
|
user_hotspots = {hotspot.hotspot for hotspot in hotspots}
|
|
|
|
return user_hotspots
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# test muted topics
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2018-07-14 16:10:45 +02:00
|
|
|
def get_muted_topics(r: Realm) -> Set[str]:
|
2021-12-12 14:59:26 +01:00
|
|
|
user_profile_id = get_user_id(r, "King Hamlet")
|
2021-08-02 09:49:56 +02:00
|
|
|
muted_topics = UserTopic.objects.filter(
|
|
|
|
user_profile_id=user_profile_id, visibility_policy=UserTopic.MUTED
|
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
topic_names = {muted_topic.topic_name for muted_topic in muted_topics}
|
|
|
|
return topic_names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-05 13:41:11 +01:00
|
|
|
def get_muted_users(r: Realm) -> Set[Tuple[str, str, str]]:
|
|
|
|
mute_objects = MutedUser.objects.filter(user_profile__realm=r)
|
|
|
|
muter_tuples = {
|
|
|
|
(
|
|
|
|
mute_object.user_profile.full_name,
|
|
|
|
mute_object.muted_user.full_name,
|
|
|
|
str(mute_object.date_muted),
|
|
|
|
)
|
2021-04-08 21:21:45 +02:00
|
|
|
for mute_object in mute_objects
|
|
|
|
}
|
2021-12-05 13:41:11 +01:00
|
|
|
return muter_tuples
|
2021-04-08 21:21:45 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_user_group_names(r: Realm) -> Set[str]:
|
|
|
|
return {group.name for group in UserGroup.objects.filter(realm=r)}
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_user_membership(r: Realm) -> Set[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
usergroup = UserGroup.objects.get(realm=r, name="hamletcharacters")
|
2018-07-12 13:27:12 +02:00
|
|
|
usergroup_membership = UserGroupMembership.objects.filter(user_group=usergroup)
|
|
|
|
users = {membership.user_profile.email for membership in usergroup_membership}
|
|
|
|
return users
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# test botstoragedata and botconfigdata
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_botstoragedata(r: Realm) -> Dict[str, object]:
|
2018-07-17 19:11:16 +02:00
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_storage_data = BotStorageData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_storage_data.key, "data": bot_storage_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_botconfigdata(r: Realm) -> Dict[str, object]:
|
2018-07-17 19:11:16 +02:00
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_config_data = BotConfigData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_config_data.key, "data": bot_config_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test messages
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_messages(r: Realm) -> Message:
|
2018-07-10 21:12:02 +02:00
|
|
|
recipient = get_recipient_stream(r)
|
|
|
|
messages = Message.objects.filter(recipient=recipient)
|
|
|
|
return messages
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_topics(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
messages = get_stream_messages(r)
|
2018-11-10 16:11:12 +01:00
|
|
|
topics = {m.topic_name() for m in messages}
|
2018-07-10 21:12:02 +02:00
|
|
|
return topics
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test usermessages
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_usermessages_user(r: Realm) -> Set[object]:
|
2021-02-12 08:20:45 +01:00
|
|
|
messages = get_stream_messages(r).order_by("content")
|
2018-07-10 21:12:02 +02:00
|
|
|
usermessage = UserMessage.objects.filter(message=messages[0])
|
|
|
|
usermessage_user = {um.user_profile.email for um in usermessage}
|
|
|
|
return usermessage_user
|
|
|
|
|
2019-05-23 13:58:10 +02:00
|
|
|
# tests to make sure that various data-*-ids in rendered_content
|
|
|
|
# are replaced correctly with the values of newer realm.
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_user_mention(r: Realm) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
mentioned_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("hamlet"), realm=r
|
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_user_id = f'data-user-id="{mentioned_user.id}"'
|
2019-05-23 13:58:10 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_user_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_stream_mention(r: Realm) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
mentioned_stream = get_stream("Denmark", r)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_stream_id = f'data-stream-id="{mentioned_stream.id}"'
|
2019-05-28 13:06:48 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_stream_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_user_group_mention(r: Realm) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_group = UserGroup.objects.get(realm=r, name="hamletcharacters")
|
2020-06-09 00:25:09 +02:00
|
|
|
data_usergroup_id = f'data-user-group-id="{user_group.id}"'
|
2021-02-12 08:19:30 +01:00
|
|
|
mention_message = get_stream_messages(r).get(
|
|
|
|
rendered_content__contains=data_usergroup_id
|
|
|
|
)
|
2019-05-28 13:47:41 +02:00
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_userpresence_timestamp(r: Realm) -> Set[object]:
|
2020-02-18 14:58:29 +01:00
|
|
|
# It should be sufficient to compare UserPresence timestamps to verify
|
|
|
|
# they got exported/imported correctly.
|
2021-02-12 08:20:45 +01:00
|
|
|
return set(UserPresence.objects.filter(realm=r).values_list("timestamp", flat=True))
|
2020-02-18 14:58:29 +01:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 01:16:11 +01:00
|
|
|
def get_realm_user_default_values(r: Realm) -> Dict[str, object]:
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=r)
|
|
|
|
return {
|
|
|
|
"default_language": realm_user_default.default_language,
|
|
|
|
"twenty_four_hour_time": realm_user_default.twenty_four_hour_time,
|
|
|
|
}
|
|
|
|
|
2021-12-07 18:46:35 +01:00
|
|
|
return getters
|
2021-02-14 00:03:40 +01:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
def test_import_realm_with_no_realm_user_default_table(self) -> None:
|
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
|
|
|
|
|
|
|
RealmUserDefault.objects.get(realm=original_realm).delete()
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(original_realm)
|
2021-06-01 12:55:44 +02:00
|
|
|
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
|
|
|
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
|
|
|
|
# RealmUserDefault table with default values is created, if it is not present in
|
|
|
|
# the import data.
|
|
|
|
self.assertTrue(RealmUserDefault.objects.filter(realm=imported_realm).exists())
|
|
|
|
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=imported_realm)
|
|
|
|
self.assertEqual(realm_user_default.default_language, "en")
|
|
|
|
self.assertEqual(realm_user_default.twenty_four_hour_time, False)
|
|
|
|
|
2018-06-06 18:06:16 +02:00
|
|
|
def test_import_files_from_local(self) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2021-12-12 21:38:49 +01:00
|
|
|
|
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2018-06-06 18:06:16 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
attachment_file_path = os.path.join(
|
2021-02-12 08:20:45 +01:00
|
|
|
settings.LOCAL_UPLOADS_DIR, "files", uploaded_file.path_id
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-06-06 18:06:16 +02:00
|
|
|
self.assertTrue(os.path.isfile(attachment_file_path))
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
|
|
|
emoji_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path)
|
|
|
|
self.assertTrue(os.path.isfile(emoji_file_path))
|
|
|
|
|
|
|
|
# Test avatars
|
2021-12-12 16:32:40 +01:00
|
|
|
user_profile = UserProfile.objects.get(full_name=user.full_name, realm=imported_realm)
|
2018-06-06 18:06:16 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
|
|
|
avatar_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path_id)
|
|
|
|
self.assertTrue(os.path.isfile(avatar_file_path))
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
|
|
|
full_upload_path = os.path.join(settings.LOCAL_UPLOADS_DIR, upload_path)
|
|
|
|
|
2021-12-12 17:17:33 +01:00
|
|
|
test_image_data = read_test_image_file("img.png")
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertIsNotNone(test_image_data)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "icon.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "icon.png")))
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "logo.png")))
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "night_logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "night_logo.png")))
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-06-06 21:37:40 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_import_files_from_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
uploads_bucket, avatar_bucket = create_s3_buckets(
|
2021-02-12 08:19:30 +01:00
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET
|
|
|
|
)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_realm(user)
|
|
|
|
self.upload_files_for_user(user)
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm)
|
2021-12-12 21:38:49 +01:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2021-12-12 17:17:33 +01:00
|
|
|
test_image_data = read_test_image_file("img.png")
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_content = uploads_bucket.Object(uploaded_file.path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(b"zulip!", attachment_content)
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
emoji_key = avatar_bucket.Object(emoji_path)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIsNotNone(emoji_key.get()["Body"].read())
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(emoji_key.key, emoji_path)
|
|
|
|
|
|
|
|
# Test avatars
|
2021-12-12 16:32:40 +01:00
|
|
|
user_profile = UserProfile.objects.get(full_name=user.full_name, realm=imported_realm)
|
2018-06-06 21:37:40 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
2018-12-07 17:52:01 +01:00
|
|
|
original_image_key = avatar_bucket.Object(avatar_path_id)
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(original_image_key.key, avatar_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = avatar_bucket.Object(avatar_path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(image_data, test_image_data)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
|
|
|
|
|
|
|
original_icon_path_id = os.path.join(upload_path, "icon.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_icon_key = avatar_bucket.Object(original_icon_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_icon_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_icon_path_id = os.path.join(upload_path, "icon.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_icon_key = avatar_bucket.Object(resized_icon_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_icon_key.key, resized_icon_path_id)
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
|
|
|
original_logo_path_id = os.path.join(upload_path, "logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_logo_key = avatar_bucket.Object(original_logo_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_logo_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_logo_path_id = os.path.join(upload_path, "logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_logo_key = avatar_bucket.Object(resized_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_logo_key.key, resized_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
|
|
|
night_logo_original_path_id = os.path.join(upload_path, "night_logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
night_logo_original_key = avatar_bucket.Object(night_logo_original_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(night_logo_original_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_night_logo_path_id = os.path.join(upload_path, "night_logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_night_logo_key = avatar_bucket.Object(resized_night_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_night_logo_key.key, resized_night_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-10-16 12:34:47 +02:00
|
|
|
def test_get_incoming_message_ids(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
import_dir = os.path.join(
|
|
|
|
settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "import_fixtures"
|
|
|
|
)
|
2018-10-16 12:34:47 +02:00
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [888, 999, 555])
|
|
|
|
|
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [555, 888, 999])
|
|
|
|
|
2018-09-21 05:39:35 +02:00
|
|
|
def test_plan_type(self) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_user(user)
|
2021-12-12 21:40:56 +01:00
|
|
|
self.export_realm(realm)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=True), self.assertLogs(level="INFO"):
|
|
|
|
realm = do_import_realm(
|
|
|
|
os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip-1"
|
|
|
|
)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_LIMITED)
|
2021-03-09 18:51:16 +01:00
|
|
|
self.assertEqual(realm.max_invites, 100)
|
|
|
|
self.assertEqual(realm.upload_quota_gb, 5)
|
|
|
|
self.assertEqual(realm.message_visibility_limit, 10000)
|
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
realm = do_import_realm(
|
|
|
|
os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip-2"
|
|
|
|
)
|
2021-10-18 23:28:17 +02:00
|
|
|
self.assertEqual(realm.plan_type, Realm.PLAN_TYPE_SELF_HOSTED)
|
2021-03-09 18:51:16 +01:00
|
|
|
self.assertEqual(realm.max_invites, 100)
|
|
|
|
self.assertEqual(realm.upload_quota_gb, None)
|
|
|
|
self.assertEqual(realm.message_visibility_limit, None)
|
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED
|
|
|
|
).exists()
|
|
|
|
)
|
2021-12-11 13:51:27 +01:00
|
|
|
|
|
|
|
|
2021-12-12 22:24:57 +01:00
|
|
|
class SingleUserExportTest(ExportFile):
|
|
|
|
def do_files_test(self, is_s3: bool) -> None:
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
self.upload_files_for_user(cordelia)
|
|
|
|
self.upload_files_for_user(othello, emoji_name="bogus") # try to pollute export
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
|
|
|
self.verify_uploads(cordelia, is_s3=is_s3)
|
|
|
|
self.verify_avatars(cordelia)
|
|
|
|
self.verify_emojis(cordelia, is_s3=is_s3)
|
|
|
|
|
|
|
|
def test_local_files(self) -> None:
|
|
|
|
self.do_files_test(is_s3=False)
|
|
|
|
|
|
|
|
@use_s3_backend
|
|
|
|
def test_s3_files(self) -> None:
|
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET)
|
|
|
|
self.do_files_test(is_s3=True)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
def test_message_data(self) -> None:
|
2021-12-11 13:51:27 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
|
|
|
|
|
|
|
smile_message_id = self.send_stream_message(hamlet, "Denmark", "SMILE!")
|
|
|
|
|
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=smile_message_id,
|
|
|
|
emoji_name="smile",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
assert reaction
|
|
|
|
|
|
|
|
# Send a message that Cordelia should not have in the export.
|
|
|
|
self.send_stream_message(othello, "Denmark", "bogus")
|
|
|
|
|
|
|
|
hi_stream_message_id = self.send_stream_message(cordelia, "Denmark", "hi stream")
|
|
|
|
assert most_recent_usermessage(cordelia).message_id == hi_stream_message_id
|
|
|
|
|
|
|
|
# Try to fool the export again
|
|
|
|
self.send_personal_message(othello, hamlet)
|
|
|
|
self.send_huddle_message(othello, [hamlet, polonius])
|
|
|
|
|
|
|
|
hi_hamlet_message_id = self.send_personal_message(cordelia, hamlet, "hi hamlet")
|
|
|
|
|
|
|
|
hi_peeps_message_id = self.send_huddle_message(cordelia, [hamlet, othello], "hi peeps")
|
|
|
|
bye_peeps_message_id = self.send_huddle_message(othello, [cordelia, hamlet], "bye peeps")
|
|
|
|
|
|
|
|
bye_hamlet_message_id = self.send_personal_message(cordelia, hamlet, "bye hamlet")
|
|
|
|
|
|
|
|
hi_myself_message_id = self.send_personal_message(cordelia, cordelia, "hi myself")
|
|
|
|
bye_stream_message_id = self.send_stream_message(cordelia, "Denmark", "bye stream")
|
|
|
|
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
messages = read_json("messages-000001.json")
|
2021-12-11 13:51:27 +01:00
|
|
|
|
|
|
|
huddle_name = "Cordelia, Lear's daughter, King Hamlet, Othello, the Moor of Venice"
|
|
|
|
|
|
|
|
excerpt = [
|
|
|
|
(rec["id"], rec["content"], rec["recipient_name"])
|
|
|
|
for rec in messages["zerver_message"][-8:]
|
|
|
|
]
|
|
|
|
self.assertEqual(
|
|
|
|
excerpt,
|
|
|
|
[
|
|
|
|
(smile_message_id, "SMILE!", "Denmark"),
|
|
|
|
(hi_stream_message_id, "hi stream", "Denmark"),
|
|
|
|
(hi_hamlet_message_id, "hi hamlet", hamlet.full_name),
|
|
|
|
(hi_peeps_message_id, "hi peeps", huddle_name),
|
|
|
|
(bye_peeps_message_id, "bye peeps", huddle_name),
|
|
|
|
(bye_hamlet_message_id, "bye hamlet", hamlet.full_name),
|
|
|
|
(hi_myself_message_id, "hi myself", cordelia.full_name),
|
|
|
|
(bye_stream_message_id, "bye stream", "Denmark"),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
def test_user_data(self) -> None:
|
2021-12-11 15:55:51 +01:00
|
|
|
# We register checkers during test setup, and then we call them at the end.
|
|
|
|
checkers = {}
|
|
|
|
|
|
|
|
def checker(f: Callable[[List[Record]], None]) -> Callable[[List[Record]], None]:
|
2021-12-11 19:44:06 +01:00
|
|
|
# Every checker function that gets decorated here should be named
|
|
|
|
# after one of the tables that we export in the single-user
|
|
|
|
# export. The table name then is used by code toward the end of the
|
|
|
|
# test to determine which portion of the data from users.json
|
|
|
|
# to pass into the checker.
|
2021-12-11 15:55:51 +01:00
|
|
|
table_name = f.__name__
|
|
|
|
assert table_name not in checkers
|
|
|
|
checkers[table_name] = f
|
|
|
|
return f
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-11 19:44:06 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = cordelia.realm
|
|
|
|
scotland = get_stream("Scotland", realm)
|
|
|
|
client = get_client("some_app")
|
|
|
|
now = timezone_now()
|
2021-12-11 14:25:52 +01:00
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
@checker
|
|
|
|
def zerver_userprofile(records: List[Record]) -> None:
|
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["id"], cordelia.id)
|
|
|
|
self.assertEqual(rec["email"], cordelia.email)
|
2021-12-11 19:44:06 +01:00
|
|
|
self.assertEqual(rec["full_name"], cordelia.full_name)
|
|
|
|
|
|
|
|
"""
|
|
|
|
Try to set up the test data roughly in order of table name, where
|
|
|
|
possible, just to make it a bit easier to read the test.
|
|
|
|
"""
|
|
|
|
|
|
|
|
do_add_alert_words(cordelia, ["pizza"])
|
|
|
|
do_add_alert_words(hamlet, ["bogus"])
|
2021-12-11 15:55:51 +01:00
|
|
|
|
|
|
|
@checker
|
2021-12-11 19:44:06 +01:00
|
|
|
def zerver_alertword(records: List[Record]) -> None:
|
|
|
|
self.assertEqual(records[-1]["word"], "pizza")
|
|
|
|
|
|
|
|
favorite_city = try_add_realm_custom_profile_field(
|
|
|
|
realm,
|
|
|
|
"Favorite city",
|
|
|
|
CustomProfileField.SHORT_TEXT,
|
|
|
|
)
|
|
|
|
|
|
|
|
def set_favorite_city(user: UserProfile, city: str) -> None:
|
|
|
|
do_update_user_custom_profile_data_if_changed(
|
|
|
|
user, [dict(id=favorite_city.id, value=city)]
|
|
|
|
)
|
|
|
|
|
|
|
|
set_favorite_city(cordelia, "Seattle")
|
|
|
|
set_favorite_city(othello, "Moscow")
|
2021-12-11 15:55:51 +01:00
|
|
|
|
|
|
|
@checker
|
2021-12-11 19:44:06 +01:00
|
|
|
def zerver_customprofilefieldvalue(records: List[Record]) -> None:
|
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["field"], favorite_city.id)
|
|
|
|
self.assertEqual(rec["rendered_value"], "<p>Seattle</p>")
|
|
|
|
|
|
|
|
do_mute_user(cordelia, othello)
|
|
|
|
do_mute_user(hamlet, cordelia) # should be ignored
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_muteduser(records: List[Record]) -> None:
|
|
|
|
self.assertEqual(records[-1]["muted_user"], othello.id)
|
2021-12-11 15:55:51 +01:00
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
smile_message_id = self.send_stream_message(hamlet, "Denmark")
|
|
|
|
|
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=smile_message_id,
|
|
|
|
emoji_name="smile",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
assert reaction
|
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
@checker
|
|
|
|
def zerver_reaction(records: List[Record]) -> None:
|
|
|
|
(exported_reaction,) = records
|
|
|
|
self.assertEqual(
|
|
|
|
exported_reaction,
|
|
|
|
dict(
|
|
|
|
id=reaction.id,
|
|
|
|
user_profile=cordelia.id,
|
|
|
|
emoji_name="smile",
|
|
|
|
reaction_type="unicode_emoji",
|
|
|
|
emoji_code=reaction.emoji_code,
|
|
|
|
message=smile_message_id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2021-12-11 19:44:06 +01:00
|
|
|
self.subscribe(cordelia, "Scotland")
|
|
|
|
|
|
|
|
create_stream_if_needed(realm, "bogus")
|
|
|
|
self.subscribe(othello, "bogus")
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_recipient(records: List[Record]) -> None:
|
|
|
|
last_recipient = Recipient.objects.get(id=records[-1]["id"])
|
|
|
|
self.assertEqual(last_recipient.type, Recipient.STREAM)
|
|
|
|
stream_id = last_recipient.type_id
|
|
|
|
self.assertEqual(stream_id, get_stream("Scotland", realm).id)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_stream(records: List[Record]) -> None:
|
|
|
|
streams = {rec["name"] for rec in records}
|
|
|
|
self.assertEqual(streams, {"Scotland", "Verona"})
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_subscription(records: List[Record]) -> None:
|
|
|
|
last_recipient = Recipient.objects.get(id=records[-1]["recipient"])
|
|
|
|
self.assertEqual(last_recipient.type, Recipient.STREAM)
|
|
|
|
stream_id = last_recipient.type_id
|
|
|
|
self.assertEqual(stream_id, get_stream("Scotland", realm).id)
|
|
|
|
|
|
|
|
do_update_user_activity(cordelia.id, client.id, "/some/endpoint", 2, now)
|
|
|
|
do_update_user_activity(cordelia.id, client.id, "/some/endpoint", 3, now)
|
|
|
|
do_update_user_activity(othello.id, client.id, "/bogus", 20, now)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_useractivity(records: List[Record]) -> None:
|
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(
|
|
|
|
rec,
|
|
|
|
dict(
|
|
|
|
client=client.id,
|
|
|
|
count=5,
|
|
|
|
id=rec["id"],
|
|
|
|
last_visit=rec["last_visit"],
|
|
|
|
query="/some/endpoint",
|
|
|
|
user_profile=cordelia.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(make_datetime(rec["last_visit"]), now)
|
|
|
|
|
|
|
|
do_update_user_activity_interval(cordelia, now)
|
|
|
|
do_update_user_activity_interval(othello, now)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_useractivityinterval(records: List[Record]) -> None:
|
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["user_profile"], cordelia.id)
|
|
|
|
self.assertEqual(make_datetime(rec["start"]), now)
|
|
|
|
|
|
|
|
do_update_user_presence(cordelia, client, now, UserPresence.ACTIVE)
|
|
|
|
do_update_user_presence(othello, client, now, UserPresence.IDLE)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_userpresence(records: List[Record]) -> None:
|
|
|
|
self.assertEqual(records[-1]["status"], UserPresence.ACTIVE)
|
|
|
|
self.assertEqual(records[-1]["client"], client.id)
|
|
|
|
self.assertEqual(make_datetime(records[-1]["timestamp"]), now)
|
|
|
|
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=cordelia,
|
|
|
|
away=True,
|
|
|
|
status_text="on vacation",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=othello,
|
|
|
|
away=False,
|
|
|
|
status_text="at my desk",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_userstatus(records: List[Record]) -> None:
|
|
|
|
rec = records[-1]
|
|
|
|
self.assertEqual(rec["status_text"], "on vacation")
|
|
|
|
self.assertEqual(rec["status"], UserStatus.AWAY)
|
|
|
|
|
|
|
|
do_mute_topic(cordelia, scotland, "bagpipe music")
|
|
|
|
do_mute_topic(othello, scotland, "nessie")
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_usertopic(records: List[Record]) -> None:
|
|
|
|
rec = records[-1]
|
|
|
|
self.assertEqual(rec["topic_name"], "bagpipe music")
|
|
|
|
self.assertEqual(rec["visibility_policy"], UserTopic.MUTED)
|
|
|
|
|
|
|
|
"""
|
|
|
|
For some tables we don't bother with super realistic test data
|
|
|
|
setup.
|
|
|
|
"""
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=cordelia, realm=realm, property="whatever", value=42, end_time=now
|
|
|
|
)
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=othello, realm=realm, property="bogus", value=999999, end_time=now
|
|
|
|
)
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def analytics_usercount(records: List[Record]) -> None:
|
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["value"], 42)
|
|
|
|
|
|
|
|
UserHotspot.objects.create(user=cordelia, hotspot="topics")
|
|
|
|
UserHotspot.objects.create(user=othello, hotspot="bogus")
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_userhotspot(records: List[Record]) -> None:
|
|
|
|
self.assertEqual(records[-1]["hotspot"], "topics")
|
|
|
|
|
|
|
|
"""
|
|
|
|
The zerver_realmauditlog checker basically assumes that
|
|
|
|
we subscribed Cordelia to Scotland.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@checker
|
|
|
|
def zerver_realmauditlog(records: List[Record]) -> None:
|
|
|
|
self.assertEqual(records[-1]["modified_stream"], scotland.id)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
user = read_json("user.json")
|
2021-12-11 14:25:52 +01:00
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
for table_name, f in checkers.items():
|
|
|
|
f(user[table_name])
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
for table_name in user:
|
|
|
|
if table_name not in checkers:
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
|
|
|
Please create a checker called "{table_name}"
|
|
|
|
to check the user["{table_name}"] data in users.json.
|
|
|
|
|
|
|
|
Please be thoughtful about where you introduce
|
|
|
|
the new code--if you read the test, the patterns
|
|
|
|
for how to test table data should be clear.
|
|
|
|
Try to mostly keep checkers in alphabetical order.
|
|
|
|
"""
|
|
|
|
)
|