2023-12-01 14:52:44 +01:00
|
|
|
import json
|
2016-08-13 20:22:23 +02:00
|
|
|
import os
|
2021-12-11 13:51:27 +01:00
|
|
|
import shutil
|
2023-10-12 20:10:07 +02:00
|
|
|
import uuid
|
2023-07-12 19:13:17 +02:00
|
|
|
from collections import defaultdict
|
2024-07-12 02:30:25 +02:00
|
|
|
from collections.abc import Callable, Iterable
|
2023-11-19 19:45:19 +01:00
|
|
|
from datetime import datetime, timedelta, timezone
|
2024-07-12 02:30:25 +02:00
|
|
|
from typing import Any
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import patch
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2022-08-26 23:14:12 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2023-03-04 01:52:14 +01:00
|
|
|
from django.db.models import Q, QuerySet
|
2020-02-18 14:58:29 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2023-10-12 19:43:45 +02:00
|
|
|
from typing_extensions import override
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-11 19:44:06 +01:00
|
|
|
from analytics.models import UserCount
|
2024-09-19 17:54:50 +02:00
|
|
|
from version import ZULIP_VERSION
|
2022-04-14 23:35:09 +02:00
|
|
|
from zerver.actions.alert_words import do_add_alert_words
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user
|
2022-04-14 23:46:56 +02:00
|
|
|
from zerver.actions.custom_profile_fields import (
|
|
|
|
do_update_user_custom_profile_data_if_changed,
|
|
|
|
try_add_realm_custom_profile_field,
|
|
|
|
)
|
2022-04-14 23:55:22 +02:00
|
|
|
from zerver.actions.muted_users import do_mute_user
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.presence import do_update_user_presence
|
2024-09-24 06:45:34 +02:00
|
|
|
from zerver.actions.reactions import check_add_reaction
|
2022-04-14 23:40:49 +02:00
|
|
|
from zerver.actions.realm_emoji import check_add_realm_emoji
|
2022-04-14 23:39:22 +02:00
|
|
|
from zerver.actions.realm_icon import do_change_icon_source
|
2022-04-14 23:37:16 +02:00
|
|
|
from zerver.actions.realm_logo import do_change_logo_source
|
2022-08-26 14:34:22 +02:00
|
|
|
from zerver.actions.realm_settings import (
|
|
|
|
do_change_realm_plan_type,
|
|
|
|
do_set_realm_authentication_methods,
|
|
|
|
)
|
2023-04-29 20:45:22 +02:00
|
|
|
from zerver.actions.scheduled_messages import check_schedule_message
|
2024-01-22 21:52:11 +01:00
|
|
|
from zerver.actions.user_activity import do_update_user_activity_interval
|
2024-09-24 06:45:34 +02:00
|
|
|
from zerver.actions.user_settings import do_change_user_setting
|
2022-09-16 18:05:17 +02:00
|
|
|
from zerver.actions.user_status import do_update_user_status
|
2023-02-03 12:57:43 +01:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import do_deactivate_user
|
2019-07-19 19:15:23 +02:00
|
|
|
from zerver.lib import upload
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
|
|
|
from zerver.lib.bot_config import set_bot_config
|
|
|
|
from zerver.lib.bot_lib import StateHandler
|
2024-10-29 10:00:57 +01:00
|
|
|
from zerver.lib.export import (
|
2024-09-19 17:54:50 +02:00
|
|
|
AppMigrations,
|
2024-10-29 10:00:57 +01:00
|
|
|
MigrationStatusJson,
|
|
|
|
Record,
|
|
|
|
do_export_realm,
|
|
|
|
do_export_user,
|
|
|
|
export_usermessages_batch,
|
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.import_realm import do_import_realm, get_incoming_message_ids
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2021-12-05 14:26:42 +01:00
|
|
|
from zerver.lib.test_helpers import (
|
2024-07-16 22:52:01 +02:00
|
|
|
activate_push_notification_service,
|
2021-12-05 14:26:42 +01:00
|
|
|
create_s3_buckets,
|
|
|
|
get_test_image_file,
|
|
|
|
most_recent_message,
|
2021-12-09 21:31:46 +01:00
|
|
|
most_recent_usermessage,
|
2021-12-12 17:17:33 +01:00
|
|
|
read_test_image_file,
|
2021-12-05 14:26:42 +01:00
|
|
|
use_s3_backend,
|
|
|
|
)
|
2024-07-23 21:55:01 +02:00
|
|
|
from zerver.lib.thumbnail import BadImageError
|
2023-02-28 03:46:41 +01:00
|
|
|
from zerver.lib.upload import claim_attachment, upload_avatar_image, upload_message_attachment
|
2022-07-19 01:40:34 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2016-08-13 20:22:23 +02:00
|
|
|
from zerver.models import (
|
2020-07-16 16:11:34 +02:00
|
|
|
AlertWord,
|
2018-05-30 14:55:30 +02:00
|
|
|
Attachment,
|
2020-06-11 00:54:34 +02:00
|
|
|
BotConfigData,
|
|
|
|
BotStorageData,
|
2018-06-04 18:21:58 +02:00
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
2024-07-05 13:13:40 +02:00
|
|
|
DirectMessageGroup,
|
2022-09-13 16:42:25 +02:00
|
|
|
GroupGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2021-04-08 21:21:45 +02:00
|
|
|
MutedUser,
|
2024-04-16 11:10:31 +02:00
|
|
|
NamedUserGroup,
|
2023-12-01 08:20:48 +01:00
|
|
|
OnboardingStep,
|
2024-07-22 16:51:07 +02:00
|
|
|
OnboardingUserMessage,
|
2020-06-11 00:54:34 +02:00
|
|
|
Reaction,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmEmoji,
|
2024-09-26 12:18:55 +02:00
|
|
|
RealmExport,
|
2021-06-01 12:55:44 +02:00
|
|
|
RealmUserDefault,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
2023-04-29 20:45:22 +02:00
|
|
|
ScheduledMessage,
|
2020-06-11 00:54:34 +02:00
|
|
|
Stream,
|
|
|
|
Subscription,
|
2024-01-22 21:52:11 +01:00
|
|
|
UserActivity,
|
2018-07-12 13:27:12 +02:00
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserMessage,
|
2020-02-18 14:58:29 +01:00
|
|
|
UserPresence,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2021-12-05 13:42:04 +01:00
|
|
|
UserStatus,
|
2021-07-23 15:26:02 +02:00
|
|
|
UserTopic,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
2023-12-15 04:33:19 +01:00
|
|
|
from zerver.models.clients import get_client
|
2023-12-15 01:55:59 +01:00
|
|
|
from zerver.models.groups import SystemGroups
|
thumbnail: Make thumbnailing work with data import.
We didn't have thumbnailing for images coming from data import and this
commit adds the functionality.
There are a few fundamental issues that the implementation needs to
solve.
1. The images come from an untrusted source and therefore we don't want
to just pass them through to thumbnailing without checking. For that
reason, we cannot just import ImageAttachment rows from the export
data, even for zulip=>zulip imports.
The right way to process images is to pass them to maybe_thumbail(),
which runs libvips_check_image() on them to verify we're okay with
thumbnailing, creates ImageAttachment rows for them and sends them
to the thumbnailing queue worker. This approach lets us handle both
zulip=>zulip and 3rd party=>zulip imports in the same way,
2. There is a somewhat circular dependency between the Message,
Attachment and ImageAttachment import process:
- ImageAttachments would ideally be created after importing
Attachments, but they need to already exist at the time of Message
import. Otherwise, the markdown processor doesn't know it has to add
HTML for image previews to messages that reference images. This would
mean that messages imported from 3rd party tools don't get image
previews.
- Attachments only get created after Message import however, due to the
many-to-many relationship between Message and Attachment.
This is solved by fixing up some data of Attachments pre-emptively, such
as the path_ids. This gives us the necessary information for creating
ImageAttachments before importing Messages.
While we generate ImageAttachment rows synchronously, the actual
thumbnailing job is sent to the queue worker. Theoretically, the worker
could be very backlogged and not process the thumbnails anytime soon.
This is fine - if the app is loaded and tries to display a message with
such a not-yet-generated thumbnail, the code in `serve_file` will
generate the thumbnails synchronously on the fly and the user will see
the image preview displayed normally. See:
https://github.com/zulip/zulip/blob/1b47134d0d564f8ba4961d25743f3ad0f09e6dfb/zerver/views/upload.py#L333-L342
2024-10-17 21:20:49 +02:00
|
|
|
from zerver.models.messages import ImageAttachment
|
2024-05-26 02:38:57 +02:00
|
|
|
from zerver.models.presence import PresenceSequence
|
2024-08-30 18:15:41 +02:00
|
|
|
from zerver.models.realm_audit_logs import AuditLogEventType
|
2024-09-26 12:18:55 +02:00
|
|
|
from zerver.models.realms import get_realm
|
2024-07-04 14:05:48 +02:00
|
|
|
from zerver.models.recipients import get_direct_message_group_hash
|
2023-12-15 03:57:04 +01:00
|
|
|
from zerver.models.streams import get_active_streams, get_stream
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot, get_user_by_delivery_email
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2023-11-19 19:45:19 +01:00
|
|
|
def make_datetime(val: float) -> datetime:
|
|
|
|
return datetime.fromtimestamp(val, tz=timezone.utc)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def get_output_dir() -> str:
|
|
|
|
return os.path.join(settings.TEST_WORKER_DIR, "test-export")
|
|
|
|
|
|
|
|
|
2021-12-11 13:51:27 +01:00
|
|
|
def make_export_output_dir() -> str:
|
2021-12-12 15:50:20 +01:00
|
|
|
output_dir = get_output_dir()
|
2021-12-11 13:51:27 +01:00
|
|
|
if os.path.exists(output_dir):
|
|
|
|
shutil.rmtree(output_dir)
|
|
|
|
os.makedirs(output_dir)
|
|
|
|
return output_dir
|
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def read_json(fn: str) -> Any:
|
|
|
|
output_dir = get_output_dir()
|
2021-12-11 14:25:52 +01:00
|
|
|
full_fn = os.path.join(output_dir, fn)
|
|
|
|
with open(full_fn, "rb") as f:
|
|
|
|
return orjson.loads(f.read())
|
|
|
|
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
def export_fn(fn: str) -> str:
|
|
|
|
output_dir = get_output_dir()
|
|
|
|
return os.path.join(output_dir, fn)
|
|
|
|
|
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
def get_user_id(r: Realm, full_name: str) -> int:
|
|
|
|
return UserProfile.objects.get(realm=r, full_name=full_name).id
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
def get_direct_message_group_hashes(r: Realm) -> str:
|
2021-12-12 14:59:26 +01:00
|
|
|
cordelia_full_name = "Cordelia, Lear's daughter"
|
|
|
|
hamlet_full_name = "King Hamlet"
|
|
|
|
othello_full_name = "Othello, the Moor of Venice"
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
user_id_list = [
|
|
|
|
get_user_id(r, cordelia_full_name),
|
|
|
|
get_user_id(r, hamlet_full_name),
|
|
|
|
get_user_id(r, othello_full_name),
|
|
|
|
]
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
direct_message_group_hash = get_direct_message_group_hash(user_id_list)
|
|
|
|
return direct_message_group_hash
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 14:59:26 +01:00
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
class ExportFile(ZulipTestCase):
|
|
|
|
"""This class is a container for shared helper functions
|
|
|
|
used for both the realm-level and user-level export tests."""
|
|
|
|
|
2023-10-12 19:43:45 +02:00
|
|
|
@override
|
2021-12-12 14:59:26 +01:00
|
|
|
def setUp(self) -> None:
|
|
|
|
super().setUp()
|
2022-07-06 06:58:43 +02:00
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
2021-12-12 14:59:26 +01:00
|
|
|
self.rm_tree(settings.LOCAL_UPLOADS_DIR)
|
2021-12-07 18:28:07 +01:00
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
# Deleting LOCAL_UPLOADS_DIR results in the test database
|
|
|
|
# having RealmEmoji records without associated files.
|
|
|
|
#
|
|
|
|
# Even if we didn't delete them, the way that the test runner
|
|
|
|
# varies settings.LOCAL_UPLOADS_DIR for each test worker
|
|
|
|
# process would likely result in this being necessary anyway.
|
|
|
|
RealmEmoji.objects.all().delete()
|
|
|
|
|
|
|
|
def upload_files_for_user(
|
|
|
|
self, user_profile: UserProfile, *, emoji_name: str = "whatever"
|
|
|
|
) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
message = most_recent_message(user_profile)
|
2024-08-30 04:13:01 +02:00
|
|
|
url = upload_message_attachment("dummy.txt", "text/plain", b"zulip!", user_profile)[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_path_id = url.replace("/user_uploads/", "")
|
2016-08-13 20:22:23 +02:00
|
|
|
claim_attachment(
|
2018-06-06 16:55:03 +02:00
|
|
|
path_id=attachment_path_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
message=message,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_message_realm_public=True,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2024-06-13 14:57:18 +02:00
|
|
|
upload_avatar_image(img_file, user_profile, future=False)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
user_profile.avatar_source = "U"
|
|
|
|
user_profile.save()
|
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
realm = user_profile.realm
|
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2024-07-11 20:46:33 +02:00
|
|
|
check_add_realm_emoji(realm, emoji_name, user_profile, img_file, "image/png")
|
2021-12-12 21:38:49 +01:00
|
|
|
|
|
|
|
def upload_files_for_realm(self, user_profile: UserProfile) -> None:
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2024-07-10 22:54:47 +02:00
|
|
|
upload.upload_backend.upload_realm_icon_image(img_file, user_profile, "image/png")
|
2021-04-08 10:42:55 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED, acting_user=None)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2024-07-10 22:54:47 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(
|
|
|
|
img_file, user_profile, night=False, content_type="image/png"
|
|
|
|
)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, False, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2024-07-10 22:54:47 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(
|
|
|
|
img_file, user_profile, night=True, content_type="image/png"
|
|
|
|
)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, True, acting_user=user_profile)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
def verify_attachment_json(self, user: UserProfile) -> None:
|
|
|
|
attachment = Attachment.objects.get(owner=user)
|
|
|
|
(record,) = read_json("attachment.json")["zerver_attachment"]
|
|
|
|
self.assertEqual(record["path_id"], attachment.path_id)
|
|
|
|
self.assertEqual(record["owner"], attachment.owner_id)
|
|
|
|
self.assertEqual(record["realm"], attachment.realm_id)
|
|
|
|
|
2021-12-12 21:22:33 +01:00
|
|
|
def verify_uploads(self, user: UserProfile, is_s3: bool) -> None:
|
2021-12-12 21:14:35 +01:00
|
|
|
realm = user.realm
|
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
attachment = Attachment.objects.get(owner=user)
|
|
|
|
path_id = attachment.path_id
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
2021-12-12 15:50:20 +01:00
|
|
|
fn = export_fn(f"uploads/{path_id}")
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(fn) as f:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(f.read(), "zulip!")
|
2021-12-12 21:14:35 +01:00
|
|
|
(record,) = read_json("uploads/records.json")
|
|
|
|
self.assertEqual(record["path"], path_id)
|
|
|
|
self.assertEqual(record["s3_path"], path_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-12 21:22:33 +01:00
|
|
|
if is_s3:
|
|
|
|
realm_str, random_hash, file_name = path_id.split("/")
|
|
|
|
self.assertEqual(realm_str, str(realm.id))
|
|
|
|
self.assert_length(random_hash, 24)
|
|
|
|
self.assertEqual(file_name, "dummy.txt")
|
|
|
|
|
|
|
|
self.assertEqual(record["realm_id"], realm.id)
|
|
|
|
self.assertEqual(record["user_profile_id"], user.id)
|
|
|
|
else:
|
|
|
|
realm_str, slot, random_hash, file_name = path_id.split("/")
|
|
|
|
self.assertEqual(realm_str, str(realm.id))
|
|
|
|
# We randomly pick a number between 0 and 255 and turn it into
|
|
|
|
# hex in order to avoid large directories.
|
|
|
|
assert len(slot) <= 2
|
|
|
|
self.assert_length(random_hash, 24)
|
|
|
|
self.assertEqual(file_name, "dummy.txt")
|
2021-12-12 16:08:50 +01:00
|
|
|
|
2021-12-12 21:09:20 +01:00
|
|
|
def verify_emojis(self, user: UserProfile, is_s3: bool) -> None:
|
|
|
|
realm = user.realm
|
2021-12-12 20:25:23 +01:00
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
realm_emoji = RealmEmoji.objects.get(author=user)
|
|
|
|
file_name = realm_emoji.file_name
|
2022-05-31 01:34:34 +02:00
|
|
|
assert file_name is not None
|
2021-12-12 22:53:26 +01:00
|
|
|
assert file_name.endswith(".png")
|
|
|
|
|
|
|
|
emoji_path = f"{realm.id}/emoji/images/{file_name}"
|
2021-12-12 18:14:01 +01:00
|
|
|
emoji_dir = export_fn(f"emoji/{realm.id}/emoji/images")
|
2024-07-23 21:55:01 +02:00
|
|
|
self.assertEqual(set(os.listdir(emoji_dir)), {file_name, file_name + ".original"})
|
|
|
|
|
|
|
|
(record1, record2) = read_json("emoji/records.json")
|
|
|
|
# The return order is not guaranteed, so sort it so that we can reliably
|
|
|
|
# know which record is for the .original file and which for the actual emoji.
|
|
|
|
record, record_original = sorted(
|
|
|
|
(record1, record2), key=lambda r: r["path"].endswith(".original")
|
|
|
|
)
|
2021-12-12 18:14:01 +01:00
|
|
|
|
2021-12-12 22:53:26 +01:00
|
|
|
self.assertEqual(record["file_name"], file_name)
|
2021-12-12 18:14:01 +01:00
|
|
|
self.assertEqual(record["path"], emoji_path)
|
|
|
|
self.assertEqual(record["s3_path"], emoji_path)
|
2024-07-23 21:55:01 +02:00
|
|
|
self.assertEqual(record_original["file_name"], file_name)
|
|
|
|
self.assertEqual(record_original["path"], emoji_path + ".original")
|
|
|
|
self.assertEqual(record_original["s3_path"], emoji_path + ".original")
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2021-12-12 21:09:20 +01:00
|
|
|
if is_s3:
|
|
|
|
self.assertEqual(record["realm_id"], realm.id)
|
|
|
|
self.assertEqual(record["user_profile_id"], user.id)
|
2024-07-23 21:55:01 +02:00
|
|
|
self.assertEqual(record_original["realm_id"], realm.id)
|
|
|
|
self.assertEqual(record_original["user_profile_id"], user.id)
|
2021-12-12 20:43:01 +01:00
|
|
|
|
|
|
|
def verify_realm_logo_and_icon(self) -> None:
|
2021-12-12 15:50:20 +01:00
|
|
|
records = read_json("realm_icons/records.json")
|
2019-07-19 19:15:23 +02:00
|
|
|
image_files = set()
|
2021-12-12 20:43:01 +01:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
for record in records:
|
2021-12-12 20:43:01 +01:00
|
|
|
self.assertEqual(record["path"], record["s3_path"])
|
2021-12-12 15:50:20 +01:00
|
|
|
image_path = export_fn(f"realm_icons/{record['path']}")
|
2021-12-12 20:43:01 +01:00
|
|
|
if image_path.endswith(".original"):
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "rb") as image_file:
|
2020-10-24 09:33:54 +02:00
|
|
|
image_data = image_file.read()
|
2021-12-12 17:31:40 +01:00
|
|
|
self.assertEqual(image_data, read_test_image_file("img.png"))
|
2019-07-19 19:15:23 +02:00
|
|
|
else:
|
|
|
|
self.assertTrue(os.path.exists(image_path))
|
|
|
|
|
|
|
|
image_files.add(os.path.basename(image_path))
|
2021-12-12 20:43:01 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(image_files),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"night_logo.png",
|
|
|
|
"logo.original",
|
|
|
|
"logo.png",
|
|
|
|
"icon.png",
|
|
|
|
"night_logo.original",
|
|
|
|
"icon.original",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 20:25:23 +01:00
|
|
|
def verify_avatars(self, user: UserProfile) -> None:
|
2021-12-12 15:50:20 +01:00
|
|
|
records = read_json("avatars/records.json")
|
2021-12-12 20:25:23 +01:00
|
|
|
exported_paths = set()
|
|
|
|
|
|
|
|
# Make sure all files in records.json got written.
|
|
|
|
for record in records:
|
|
|
|
self.assertEqual(record["path"], record["s3_path"])
|
|
|
|
path = record["path"]
|
|
|
|
fn = export_fn(f"avatars/{path}")
|
|
|
|
assert os.path.exists(fn)
|
|
|
|
|
|
|
|
if path.endswith(".original"):
|
|
|
|
exported_paths.add(path)
|
|
|
|
|
|
|
|
# For now we know that all our tests use
|
|
|
|
# emojis based on img.png. This may change some
|
|
|
|
# day.
|
|
|
|
with open(fn, "rb") as fb:
|
|
|
|
fn_data = fb.read()
|
|
|
|
|
|
|
|
self.assertEqual(fn_data, read_test_image_file("img.png"))
|
|
|
|
|
|
|
|
assert exported_paths
|
|
|
|
|
|
|
|
# Right now we expect only our user to have an uploaded avatar.
|
|
|
|
db_paths = {user_avatar_path(user) + ".original"}
|
|
|
|
self.assertEqual(exported_paths, db_paths)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2024-09-19 17:54:50 +02:00
|
|
|
def get_applied_migrations_fixture(self, fixture_name: str) -> AppMigrations:
|
|
|
|
fixture = orjson.loads(
|
|
|
|
self.fixture_data(fixture_name, "import_fixtures/applied_migrations_fixtures")
|
|
|
|
)
|
|
|
|
return fixture
|
|
|
|
|
2024-11-10 00:07:19 +01:00
|
|
|
def get_applied_migrations_error_message(self, fixture_name: str) -> str:
|
|
|
|
fixture = self.fixture_data(fixture_name, "import_fixtures/check_migrations_errors")
|
|
|
|
fixture = fixture.format(version_placeholder=ZULIP_VERSION)
|
|
|
|
return fixture.strip()
|
|
|
|
|
2024-10-29 10:00:57 +01:00
|
|
|
def verify_migration_status_json(self) -> None:
|
|
|
|
# This function asserts that the generated migration_status.json
|
|
|
|
# is structurally familiar for it to be used for assertion at
|
|
|
|
# import_realm.py. Hence, it doesn't really matter if the individual
|
|
|
|
# apps' migrations in migration_status.json fixture are outdated.
|
|
|
|
exported: MigrationStatusJson = read_json("migration_status.json")
|
|
|
|
fixture: MigrationStatusJson = orjson.loads(
|
|
|
|
self.fixture_data("migration_status.json", "import_fixtures")
|
|
|
|
)
|
|
|
|
for app, migrations in fixture["migrations_by_app"].items():
|
|
|
|
self.assertTrue(
|
|
|
|
set(migrations).issubset(set(exported["migrations_by_app"].get(app, []))),
|
|
|
|
)
|
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
|
|
|
|
class RealmImportExportTest(ExportFile):
|
thumbnail: Make thumbnailing work with data import.
We didn't have thumbnailing for images coming from data import and this
commit adds the functionality.
There are a few fundamental issues that the implementation needs to
solve.
1. The images come from an untrusted source and therefore we don't want
to just pass them through to thumbnailing without checking. For that
reason, we cannot just import ImageAttachment rows from the export
data, even for zulip=>zulip imports.
The right way to process images is to pass them to maybe_thumbail(),
which runs libvips_check_image() on them to verify we're okay with
thumbnailing, creates ImageAttachment rows for them and sends them
to the thumbnailing queue worker. This approach lets us handle both
zulip=>zulip and 3rd party=>zulip imports in the same way,
2. There is a somewhat circular dependency between the Message,
Attachment and ImageAttachment import process:
- ImageAttachments would ideally be created after importing
Attachments, but they need to already exist at the time of Message
import. Otherwise, the markdown processor doesn't know it has to add
HTML for image previews to messages that reference images. This would
mean that messages imported from 3rd party tools don't get image
previews.
- Attachments only get created after Message import however, due to the
many-to-many relationship between Message and Attachment.
This is solved by fixing up some data of Attachments pre-emptively, such
as the path_ids. This gives us the necessary information for creating
ImageAttachments before importing Messages.
While we generate ImageAttachment rows synchronously, the actual
thumbnailing job is sent to the queue worker. Theoretically, the worker
could be very backlogged and not process the thumbnails anytime soon.
This is fine - if the app is loaded and tries to display a message with
such a not-yet-generated thumbnail, the code in `serve_file` will
generate the thumbnails synchronously on the fly and the user will see
the image preview displayed normally. See:
https://github.com/zulip/zulip/blob/1b47134d0d564f8ba4961d25743f3ad0f09e6dfb/zerver/views/upload.py#L333-L342
2024-10-17 21:20:49 +02:00
|
|
|
def create_user_and_login(self, email: str, realm: Realm) -> None:
|
|
|
|
self.register(email, "test", subdomain=realm.subdomain)
|
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
def export_realm(
|
|
|
|
self,
|
|
|
|
realm: Realm,
|
2024-09-24 06:45:34 +02:00
|
|
|
export_type: int,
|
2024-07-12 02:30:23 +02:00
|
|
|
exportable_user_ids: set[int] | None = None,
|
2021-12-13 13:30:57 +01:00
|
|
|
) -> None:
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
with patch("zerver.lib.export.create_soft_link"), self.assertLogs(level="INFO"):
|
|
|
|
do_export_realm(
|
|
|
|
realm=realm,
|
|
|
|
output_dir=output_dir,
|
|
|
|
threads=0,
|
2024-09-24 06:45:34 +02:00
|
|
|
export_type=export_type,
|
2021-12-13 13:30:57 +01:00
|
|
|
exportable_user_ids=exportable_user_ids,
|
|
|
|
)
|
2023-10-12 20:10:07 +02:00
|
|
|
|
|
|
|
# This is a unique field and thus the cycle of export->import
|
|
|
|
# within the same server (which is what happens in our tests)
|
|
|
|
# will cause a conflict - so rotate it.
|
|
|
|
realm.uuid = uuid.uuid4()
|
|
|
|
realm.save()
|
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
export_usermessages_batch(
|
|
|
|
input_path=os.path.join(output_dir, "messages-000001.json.partial"),
|
|
|
|
output_path=os.path.join(output_dir, "messages-000001.json"),
|
2024-09-26 12:18:55 +02:00
|
|
|
export_full_with_consent=export_type == RealmExport.EXPORT_FULL_WITH_CONSENT,
|
2021-12-13 13:30:57 +01:00
|
|
|
)
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
def export_realm_and_create_auditlog(
|
|
|
|
self,
|
|
|
|
original_realm: Realm,
|
2024-09-26 12:18:55 +02:00
|
|
|
export_type: int = RealmExport.EXPORT_FULL_WITHOUT_CONSENT,
|
2024-07-12 02:30:23 +02:00
|
|
|
exportable_user_ids: set[int] | None = None,
|
2024-04-17 13:53:32 +02:00
|
|
|
) -> None:
|
|
|
|
RealmAuditLog.objects.create(
|
2024-09-03 15:58:19 +02:00
|
|
|
realm=original_realm,
|
|
|
|
event_type=AuditLogEventType.REALM_EXPORTED,
|
|
|
|
event_time=timezone_now(),
|
2024-04-17 13:53:32 +02:00
|
|
|
)
|
2024-09-24 06:45:34 +02:00
|
|
|
self.export_realm(original_realm, export_type, exportable_user_ids)
|
2024-04-17 13:53:32 +02:00
|
|
|
|
2021-12-13 13:30:57 +01:00
|
|
|
def test_export_files_from_local(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2021-12-13 13:30:57 +01:00
|
|
|
|
|
|
|
self.verify_attachment_json(user)
|
|
|
|
self.verify_uploads(user, is_s3=False)
|
|
|
|
self.verify_avatars(user)
|
|
|
|
self.verify_emojis(user, is_s3=False)
|
|
|
|
self.verify_realm_logo_and_icon()
|
2024-10-29 10:00:57 +01:00
|
|
|
self.verify_migration_status_json()
|
2021-12-13 13:30:57 +01:00
|
|
|
|
2022-07-07 18:08:42 +02:00
|
|
|
def test_public_only_export_files_private_uploads_not_included(self) -> None:
|
|
|
|
"""
|
|
|
|
This test verifies that when doing a public_only export, private uploads
|
|
|
|
don't get included in the exported data.
|
|
|
|
"""
|
|
|
|
|
|
|
|
user_profile = self.example_user("hamlet")
|
|
|
|
realm = user_profile.realm
|
|
|
|
|
|
|
|
# We create an attachment tied to a personal message. That means it shouldn't be
|
|
|
|
# included in a public export, as it's private data.
|
|
|
|
personal_message_id = self.send_personal_message(user_profile, self.example_user("othello"))
|
2024-08-30 04:13:01 +02:00
|
|
|
url = upload_message_attachment("dummy.txt", "text/plain", b"zulip!", user_profile)[0]
|
2022-07-07 18:08:42 +02:00
|
|
|
attachment_path_id = url.replace("/user_uploads/", "")
|
|
|
|
attachment = claim_attachment(
|
|
|
|
path_id=attachment_path_id,
|
|
|
|
message=Message.objects.get(id=personal_message_id),
|
|
|
|
is_message_realm_public=True,
|
|
|
|
)
|
|
|
|
|
2024-09-26 12:18:55 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm, export_type=RealmExport.EXPORT_PUBLIC)
|
2022-07-07 18:08:42 +02:00
|
|
|
|
|
|
|
# The attachment row shouldn't have been exported:
|
2023-09-12 21:10:57 +02:00
|
|
|
self.assertEqual(read_json("attachment.json")["zerver_attachment"], [])
|
2022-07-07 18:08:42 +02:00
|
|
|
|
|
|
|
# Aside of the attachment row, we also need to verify that the file itself
|
|
|
|
# isn't included.
|
|
|
|
fn = export_fn(f"uploads/{attachment.path_id}")
|
|
|
|
self.assertFalse(os.path.exists(fn))
|
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_export_files_from_s3(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-12-13 13:12:48 +01:00
|
|
|
self.verify_attachment_json(user)
|
2021-12-12 21:22:33 +01:00
|
|
|
self.verify_uploads(user, is_s3=True)
|
2021-12-12 20:25:23 +01:00
|
|
|
self.verify_avatars(user)
|
2021-12-12 21:09:20 +01:00
|
|
|
self.verify_emojis(user, is_s3=True)
|
2021-12-12 20:43:01 +01:00
|
|
|
self.verify_realm_logo_and_icon()
|
2024-10-29 10:00:57 +01:00
|
|
|
self.verify_migration_status_json()
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_zulip_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
default_bot = self.example_user("default_bot")
|
2020-03-07 11:43:05 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(self.example_user("AARON"), default_bot)
|
|
|
|
pm_b_msg_id = self.send_personal_message(default_bot, self.example_user("iago"))
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("othello"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
realm_user_default.default_language = "de"
|
|
|
|
realm_user_default.save()
|
|
|
|
|
2024-07-22 16:51:07 +02:00
|
|
|
welcome_bot = get_system_bot(settings.WELCOME_BOT, realm.id)
|
|
|
|
onboarding_message_id = self.send_stream_message(
|
|
|
|
welcome_bot, str(Realm.ZULIP_SANDBOX_CHANNEL_NAME), recipient_realm=realm
|
|
|
|
)
|
|
|
|
OnboardingUserMessage.objects.create(
|
|
|
|
realm=realm,
|
|
|
|
message_id=onboarding_message_id,
|
|
|
|
flags=OnboardingUserMessage.flags.starred,
|
|
|
|
)
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_userprofile_crossrealm"], 3)
|
|
|
|
self.assert_length(data["zerver_userprofile_mirrordummy"], 0)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2016-08-13 20:22:23 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2024-03-26 13:58:30 +01:00
|
|
|
{
|
|
|
|
"Denmark",
|
|
|
|
"Rome",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Verona",
|
|
|
|
"core team",
|
|
|
|
"Zulip",
|
|
|
|
"sandbox",
|
|
|
|
},
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_alert_words = data["zerver_alertword"]
|
2020-07-16 15:38:56 +02:00
|
|
|
|
|
|
|
# We set up 4 alert words for Hamlet, Cordelia, etc.
|
|
|
|
# when we populate the test database.
|
2020-12-22 15:46:00 +01:00
|
|
|
num_zulip_users = 10
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(exported_alert_words, num_zulip_users * 4)
|
2020-07-16 15:38:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("robotics", {r["word"] for r in exported_alert_words})
|
2020-07-16 15:38:56 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
exported_realm_user_default = data["zerver_realmuserdefault"]
|
|
|
|
self.assert_length(exported_realm_user_default, 1)
|
|
|
|
self.assertEqual(exported_realm_user_default[0]["default_language"], "de")
|
|
|
|
|
2022-09-12 10:06:51 +02:00
|
|
|
exported_usergroups = data["zerver_usergroup"]
|
2024-10-07 19:00:15 +02:00
|
|
|
self.assert_length(exported_usergroups, 11)
|
2023-03-27 05:28:12 +02:00
|
|
|
self.assertFalse("direct_members" in exported_usergroups[2])
|
|
|
|
self.assertFalse("direct_subgroups" in exported_usergroups[2])
|
2022-09-12 10:06:51 +02:00
|
|
|
|
2024-04-16 11:10:31 +02:00
|
|
|
exported_namedusergroups = data["zerver_namedusergroup"]
|
|
|
|
self.assert_length(exported_namedusergroups, 9)
|
2024-04-18 18:59:50 +02:00
|
|
|
self.assertEqual(exported_namedusergroups[2]["name"], "role:administrators")
|
2024-04-16 11:10:31 +02:00
|
|
|
self.assertTrue("usergroup_ptr" in exported_namedusergroups[2])
|
|
|
|
self.assertTrue("realm_for_sharding" in exported_namedusergroups[2])
|
|
|
|
self.assertFalse("realm" in exported_namedusergroups[2])
|
|
|
|
self.assertFalse("direct_members" in exported_namedusergroups[2])
|
|
|
|
self.assertFalse("direct_subgroups" in exported_namedusergroups[2])
|
|
|
|
|
2024-07-22 16:51:07 +02:00
|
|
|
exported_onboarding_usermessages = data["zerver_onboardingusermessage"]
|
|
|
|
self.assert_length(exported_onboarding_usermessages, 1)
|
|
|
|
self.assertEqual(exported_onboarding_usermessages[0]["message"], onboarding_message_id)
|
|
|
|
self.assertEqual(
|
|
|
|
exported_onboarding_usermessages[0]["flags_mask"],
|
|
|
|
OnboardingUserMessage.flags.starred.mask,
|
|
|
|
)
|
|
|
|
self.assertEqual(exported_onboarding_usermessages[0]["realm"], realm.id)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2016-08-13 20:22:23 +02:00
|
|
|
um = UserMessage.objects.all()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:29:58 +02:00
|
|
|
self.assertIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
|
2019-05-21 12:32:20 +02:00
|
|
|
def test_export_realm_with_exportable_user_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-04-09 21:51:58 +02:00
|
|
|
user_ids = {cordelia.id, hamlet.id}
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm, exportable_user_ids=user_ids)
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertNotIn("default-bot@zulip.com", exported_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("cordelia"), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
dummy_user_emails = self.get_set(data["zerver_userprofile_mirrordummy"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), dummy_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), dummy_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("iago"), dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("hamlet"), dummy_user_emails)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:51:30 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_message_ids)
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def test_export_realm_with_member_consent(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Create private streams and subscribe users for testing export
|
|
|
|
create_stream_if_needed(realm, "Private A", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("iago"), "Private A")
|
|
|
|
self.subscribe(self.example_user("othello"), "Private A")
|
2021-05-10 07:02:14 +02:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Private A", "Hello stream A")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private B", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private B")
|
2022-07-01 18:25:59 +02:00
|
|
|
stream_b_first_message_id = self.send_stream_message(
|
2021-05-10 07:02:14 +02:00
|
|
|
self.example_user("prospero"), "Private B", "Hello stream B"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-07-01 18:25:59 +02:00
|
|
|
# Hamlet subscribes now, so due to protected history, will not have access to the first message.
|
|
|
|
# This means that his consent will not be sufficient for the export of that message.
|
2019-05-21 14:48:07 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Private B")
|
2022-07-01 18:25:59 +02:00
|
|
|
stream_b_second_message_id = self.send_stream_message(
|
|
|
|
self.example_user("prospero"), "Private B", "Hello again stream B"
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private C", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Private C")
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private C")
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_c_message_id = self.send_stream_message(
|
2021-05-10 07:02:14 +02:00
|
|
|
self.example_user("othello"), "Private C", "Hello stream C"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2022-07-01 18:25:59 +02:00
|
|
|
create_stream_if_needed(
|
|
|
|
realm, "Private D", invite_only=True, history_public_to_subscribers=True
|
|
|
|
)
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private D")
|
|
|
|
self.send_stream_message(self.example_user("prospero"), "Private D", "Hello stream D")
|
|
|
|
# Hamlet subscribes now, but due to the stream having public history to subscribers, that doesn't
|
|
|
|
# matter and he his consent is sufficient to export also messages sent before he was added
|
|
|
|
# to the stream.
|
|
|
|
self.subscribe(self.example_user("hamlet"), "Private D")
|
|
|
|
self.send_stream_message(self.example_user("prospero"), "Private D", "Hello again stream D")
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
# Create direct message groups
|
|
|
|
self.send_group_direct_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("iago"), [self.example_user("cordelia"), self.example_user("AARON")]
|
|
|
|
)
|
2024-07-05 13:13:40 +02:00
|
|
|
direct_message_group_a = DirectMessageGroup.objects.last()
|
2024-07-04 14:05:48 +02:00
|
|
|
self.send_group_direct_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("ZOE"),
|
|
|
|
[self.example_user("hamlet"), self.example_user("AARON"), self.example_user("othello")],
|
|
|
|
)
|
2024-07-05 13:13:40 +02:00
|
|
|
direct_message_group_b = DirectMessageGroup.objects.last()
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
direct_message_group_c_message_id = self.send_group_direct_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("AARON"),
|
|
|
|
[self.example_user("cordelia"), self.example_user("ZOE"), self.example_user("othello")],
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2023-06-19 16:26:12 +02:00
|
|
|
# Create direct messages
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-20 15:09:32 +02:00
|
|
|
|
2024-09-24 06:45:34 +02:00
|
|
|
# Iago and Hamlet consented to export their private data.
|
2019-05-10 14:28:38 +02:00
|
|
|
consented_user_ids = [self.example_user(user).id for user in ["iago", "hamlet"]]
|
2024-09-24 06:45:34 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.example_user("iago"), "allow_private_data_export", True, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-09-24 06:45:34 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
self.example_user("hamlet"), "allow_private_data_export", True, acting_user=None
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2024-09-26 12:18:55 +02:00
|
|
|
self.export_realm_and_create_auditlog(
|
|
|
|
realm, export_type=RealmExport.EXPORT_FULL_WITH_CONSENT
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("realm.json")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_userprofile_crossrealm"], 3)
|
|
|
|
self.assert_length(data["zerver_userprofile_mirrordummy"], 0)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-04-29 17:22:48 +02:00
|
|
|
"core team",
|
2021-02-12 08:20:45 +01:00
|
|
|
"Denmark",
|
|
|
|
"Rome",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Verona",
|
2024-03-26 13:58:30 +01:00
|
|
|
"Zulip",
|
|
|
|
"sandbox",
|
2021-02-12 08:20:45 +01:00
|
|
|
"Private A",
|
|
|
|
"Private B",
|
|
|
|
"Private C",
|
2022-07-01 18:25:59 +02:00
|
|
|
"Private D",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2019-05-10 14:28:38 +02:00
|
|
|
)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
data = read_json("messages-000001.json")
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_usermessages = UserMessage.objects.filter(
|
|
|
|
user_profile__in=[self.example_user("iago"), self.example_user("hamlet")]
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
um = exported_usermessages[0]
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(data["zerver_usermessage"], len(exported_usermessages))
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2024-03-26 13:58:30 +01:00
|
|
|
public_stream_names = [
|
|
|
|
"Denmark",
|
|
|
|
"Rome",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Verona",
|
|
|
|
"Zulip",
|
|
|
|
"sandbox",
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
public_stream_ids = Stream.objects.filter(name__in=public_stream_names).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
)
|
|
|
|
public_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=public_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
public_stream_message_ids = Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
realm_id=realm.id, recipient__in=public_stream_recipients
|
2021-02-12 08:19:30 +01:00
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# Messages from Private stream C are not exported since no member gave consent
|
2022-07-01 18:25:59 +02:00
|
|
|
# Only the second message from Private stream B is exported, so that gets handled
|
|
|
|
# separately.
|
2021-04-29 17:22:48 +02:00
|
|
|
private_stream_ids = Stream.objects.filter(
|
2022-07-01 18:25:59 +02:00
|
|
|
name__in=["Private A", "Private D", "core team"]
|
2021-04-29 17:22:48 +02:00
|
|
|
).values_list("id", flat=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
private_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=private_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
private_stream_message_ids = Message.objects.filter(
|
2023-08-30 21:19:37 +02:00
|
|
|
realm_id=realm.id, recipient__in=private_stream_recipients
|
2021-02-12 08:19:30 +01:00
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=consented_user_ids, type=Recipient.PERSONAL
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
pm_query = Q(recipient__in=pm_recipients) | Q(sender__in=consented_user_ids)
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_pm_ids = (
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(pm_query, realm=realm.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
# Third direct message group is not exported since none of
|
|
|
|
# the members gave consent
|
|
|
|
assert direct_message_group_a is not None and direct_message_group_b is not None
|
|
|
|
direct_message_group_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=[direct_message_group_a.id, direct_message_group_b.id],
|
|
|
|
type=Recipient.DIRECT_MESSAGE_GROUP,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2024-07-04 14:05:48 +02:00
|
|
|
pm_query = Q(recipient__in=direct_message_group_recipients) | Q(
|
|
|
|
sender__in=consented_user_ids
|
|
|
|
)
|
|
|
|
exported_direct_message_group_ids = (
|
2023-08-30 21:19:37 +02:00
|
|
|
Message.objects.filter(pm_query, realm=realm.id)
|
2021-02-12 08:19:30 +01:00
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2022-10-30 00:30:40 +02:00
|
|
|
exported_msg_ids = {
|
|
|
|
*public_stream_message_ids,
|
|
|
|
*private_stream_message_ids,
|
|
|
|
stream_b_second_message_id,
|
|
|
|
*exported_pm_ids,
|
2024-07-04 14:05:48 +02:00
|
|
|
*exported_direct_message_group_ids,
|
2022-10-30 00:30:40 +02:00
|
|
|
}
|
2019-05-21 12:21:32 +02:00
|
|
|
self.assertEqual(self.get_set(data["zerver_message"], "id"), exported_msg_ids)
|
2019-05-21 14:48:07 +02:00
|
|
|
|
2022-07-01 18:25:59 +02:00
|
|
|
self.assertNotIn(stream_b_first_message_id, exported_msg_ids)
|
2019-05-21 14:48:07 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertNotIn(stream_c_message_id, exported_msg_ids)
|
2024-07-04 14:05:48 +02:00
|
|
|
self.assertNotIn(direct_message_group_c_message_id, exported_msg_ids)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_msg_ids)
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
"""
|
|
|
|
Tests for import_realm
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
def test_import_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
2021-02-14 00:03:40 +01:00
|
|
|
|
2021-12-05 14:26:42 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
2024-05-14 03:40:42 +02:00
|
|
|
denmark_stream = get_stream("Denmark", original_realm)
|
|
|
|
denmark_stream.creator = hamlet
|
|
|
|
denmark_stream.save()
|
|
|
|
|
2023-05-18 20:33:12 +02:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
|
|
|
cross_realm_bot = get_system_bot(settings.WELCOME_BOT, internal_realm.id)
|
|
|
|
|
2021-12-05 14:26:42 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
|
|
|
realm_emoji = check_add_realm_emoji(
|
2024-07-11 20:46:33 +02:00
|
|
|
realm=hamlet.realm,
|
|
|
|
name="hawaii",
|
|
|
|
author=hamlet,
|
|
|
|
image_file=img_file,
|
|
|
|
content_type="image/png",
|
2021-12-05 14:26:42 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(realm_emoji.name, "hawaii")
|
|
|
|
|
thumbnail: Make thumbnailing work with data import.
We didn't have thumbnailing for images coming from data import and this
commit adds the functionality.
There are a few fundamental issues that the implementation needs to
solve.
1. The images come from an untrusted source and therefore we don't want
to just pass them through to thumbnailing without checking. For that
reason, we cannot just import ImageAttachment rows from the export
data, even for zulip=>zulip imports.
The right way to process images is to pass them to maybe_thumbail(),
which runs libvips_check_image() on them to verify we're okay with
thumbnailing, creates ImageAttachment rows for them and sends them
to the thumbnailing queue worker. This approach lets us handle both
zulip=>zulip and 3rd party=>zulip imports in the same way,
2. There is a somewhat circular dependency between the Message,
Attachment and ImageAttachment import process:
- ImageAttachments would ideally be created after importing
Attachments, but they need to already exist at the time of Message
import. Otherwise, the markdown processor doesn't know it has to add
HTML for image previews to messages that reference images. This would
mean that messages imported from 3rd party tools don't get image
previews.
- Attachments only get created after Message import however, due to the
many-to-many relationship between Message and Attachment.
This is solved by fixing up some data of Attachments pre-emptively, such
as the path_ids. This gives us the necessary information for creating
ImageAttachments before importing Messages.
While we generate ImageAttachment rows synchronously, the actual
thumbnailing job is sent to the queue worker. Theoretically, the worker
could be very backlogged and not process the thumbnails anytime soon.
This is fine - if the app is loaded and tries to display a message with
such a not-yet-generated thumbnail, the code in `serve_file` will
generate the thumbnails synchronously on the fly and the user will see
the image preview displayed normally. See:
https://github.com/zulip/zulip/blob/1b47134d0d564f8ba4961d25743f3ad0f09e6dfb/zerver/views/upload.py#L333-L342
2024-10-17 21:20:49 +02:00
|
|
|
# We want to set up some image data to verify image attachment thumbnailing works correctly
|
|
|
|
# in the import.
|
|
|
|
# We'll create a new user to use as the sender of the messages with such images,
|
|
|
|
# so that we can easily find them after importing - by fetching messages sent
|
|
|
|
# by the thumbnailing_test_user_email account.
|
|
|
|
thumbnailing_test_user_email = "thumbnailing_test@zulip.com"
|
|
|
|
self.create_user_and_login(thumbnailing_test_user_email, original_realm)
|
|
|
|
thumbnailing_test_user = get_user_by_delivery_email(
|
|
|
|
thumbnailing_test_user_email, original_realm
|
|
|
|
)
|
|
|
|
|
|
|
|
# Send a message with the image. After the import, we'll verify that this message
|
|
|
|
# and the associated ImageAttachment have been created correctly.
|
|
|
|
image_path_id = self.upload_and_thumbnail_image("img.png")
|
|
|
|
self.send_stream_message(
|
|
|
|
sender=thumbnailing_test_user,
|
|
|
|
stream_name="Verona",
|
|
|
|
content=f"An [image](/user_uploads/{image_path_id})",
|
|
|
|
)
|
|
|
|
image_attachment = ImageAttachment.objects.get(path_id=image_path_id)
|
|
|
|
# Malform some ImageAttachment info. These shouldn't get exported (and certainly not imported!)
|
|
|
|
# anyway, so we can test that this misinformation doesn't make its way into the imported realm.
|
|
|
|
image_attachment.original_width_px = 9999
|
|
|
|
image_attachment.original_height_px = 9999
|
|
|
|
image_attachment.save()
|
|
|
|
|
2021-02-14 00:03:40 +01:00
|
|
|
# Deactivate a user to ensure such a case is covered.
|
|
|
|
do_deactivate_user(self.example_user("aaron"), acting_user=None)
|
2022-08-26 14:34:22 +02:00
|
|
|
|
|
|
|
# Change some authentication_methods so that some are enabled and some disabled
|
|
|
|
# for this to be properly tested, as opposed to some special case
|
|
|
|
# with e.g. everything enabled.
|
|
|
|
authentication_methods = original_realm.authentication_methods_dict()
|
|
|
|
authentication_methods["Email"] = False
|
|
|
|
authentication_methods["Dev"] = True
|
|
|
|
|
|
|
|
do_set_realm_authentication_methods(
|
|
|
|
original_realm, authentication_methods, acting_user=None
|
|
|
|
)
|
|
|
|
|
2023-05-18 20:33:12 +02:00
|
|
|
# Set up an edge-case RealmAuditLog with acting_user in a different realm. Such an acting_user can't be covered
|
|
|
|
# by the export, so we'll test that it is handled by getting set to None.
|
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
2024-08-30 18:15:41 +02:00
|
|
|
modified_user=hamlet, event_type=AuditLogEventType.USER_CREATED
|
2023-05-18 20:33:12 +02:00
|
|
|
).count(),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
RealmAuditLog.objects.filter(
|
2024-08-30 18:15:41 +02:00
|
|
|
modified_user=hamlet, event_type=AuditLogEventType.USER_CREATED
|
2023-05-18 20:33:12 +02:00
|
|
|
).update(acting_user_id=cross_realm_bot.id)
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
# data to test import of direct message groups
|
|
|
|
direct_message_group = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("othello"),
|
2018-05-25 18:54:22 +02:00
|
|
|
]
|
2024-07-04 14:05:48 +02:00
|
|
|
self.send_group_direct_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2024-07-04 14:05:48 +02:00
|
|
|
direct_message_group,
|
|
|
|
"test group direct message",
|
2018-05-25 18:54:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_mention_message = "@**King Hamlet** Hello"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Verona", user_mention_message)
|
2019-05-23 13:58:10 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_mention_message = "Subscribe to #**Denmark**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Verona", stream_mention_message)
|
2019-05-28 13:06:48 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_group_mention_message = "Hello @*hamletcharacters*"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Verona", user_group_mention_message)
|
2019-05-28 13:47:41 +02:00
|
|
|
|
2019-06-02 23:57:03 +02:00
|
|
|
special_characters_message = "```\n'\n```\n@**Polonius**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Denmark", special_characters_message)
|
2019-06-02 22:24:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
sample_user = self.example_user("hamlet")
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2021-12-05 14:26:42 +01:00
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=most_recent_message(hamlet).id,
|
|
|
|
emoji_name="hawaii",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
assert reaction
|
|
|
|
|
|
|
|
# Verify strange invariant for Reaction/RealmEmoji.
|
|
|
|
self.assertEqual(reaction.emoji_code, str(realm_emoji.id))
|
|
|
|
|
2024-05-10 15:37:43 +02:00
|
|
|
# data to test import of onboaring step
|
2023-12-01 08:20:48 +01:00
|
|
|
OnboardingStep.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=sample_user,
|
2024-05-10 15:37:43 +02:00
|
|
|
onboarding_step="intro_inbox_view_modal",
|
2018-07-12 16:34:26 +02:00
|
|
|
)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# data to test import of muted topic
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Verona", original_realm)
|
2023-02-03 12:57:43 +01:00
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-03 18:00:27 +01:00
|
|
|
sample_user,
|
|
|
|
stream,
|
|
|
|
"Verona2",
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2021-04-08 21:21:45 +02:00
|
|
|
# data to test import of muted users
|
|
|
|
do_mute_user(hamlet, cordelia)
|
|
|
|
do_mute_user(cordelia, hamlet)
|
|
|
|
do_mute_user(cordelia, othello)
|
|
|
|
|
2021-12-05 13:42:04 +01:00
|
|
|
client = get_client("website")
|
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
do_update_user_presence(
|
|
|
|
sample_user, client, timezone_now(), UserPresence.LEGACY_STATUS_ACTIVE_INT
|
|
|
|
)
|
2024-05-26 02:38:57 +02:00
|
|
|
user_presence_last_update_ids = set(
|
|
|
|
UserPresence.objects.filter(realm=original_realm)
|
|
|
|
.values_list("last_update_id", flat=True)
|
|
|
|
.distinct("last_update_id")
|
|
|
|
)
|
|
|
|
presence_sequence = PresenceSequence.objects.get(realm=original_realm)
|
2021-12-05 13:42:04 +01:00
|
|
|
|
2023-04-29 20:45:22 +02:00
|
|
|
# Set up scheduled messages.
|
|
|
|
ScheduledMessage.objects.filter(realm=original_realm).delete()
|
|
|
|
check_schedule_message(
|
|
|
|
sender=hamlet,
|
|
|
|
client=get_client("website"),
|
|
|
|
recipient_type_name="stream",
|
|
|
|
message_to=[Stream.objects.get(name="Denmark", realm=original_realm).id],
|
|
|
|
topic_name="test-import",
|
|
|
|
message_content="test message",
|
2023-11-19 19:45:19 +01:00
|
|
|
deliver_at=timezone_now() + timedelta(days=365),
|
2023-04-29 20:45:22 +02:00
|
|
|
realm=original_realm,
|
|
|
|
)
|
|
|
|
original_scheduled_message = ScheduledMessage.objects.filter(realm=original_realm).last()
|
|
|
|
assert original_scheduled_message is not None
|
|
|
|
|
2021-12-05 13:42:04 +01:00
|
|
|
# send Cordelia to the islands
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=cordelia,
|
|
|
|
away=True,
|
|
|
|
status_text="in Hawaii",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name="hawaii",
|
|
|
|
emoji_code=str(realm_emoji.id),
|
|
|
|
reaction_type=Reaction.REALM_EMOJI,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-02-18 14:58:29 +01:00
|
|
|
|
2021-12-05 13:42:04 +01:00
|
|
|
user_status = UserStatus.objects.order_by("id").last()
|
|
|
|
assert user_status
|
|
|
|
|
|
|
|
# Verify strange invariant for UserStatus/RealmEmoji.
|
|
|
|
self.assertEqual(user_status.emoji_code, str(realm_emoji.id))
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# data to test import of botstoragedata and botconfigdata
|
|
|
|
bot_profile = do_create_user(
|
|
|
|
email="bot-1@zulip.com",
|
|
|
|
password="test",
|
|
|
|
realm=original_realm,
|
|
|
|
full_name="bot",
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner=sample_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-17 19:11:16 +02:00
|
|
|
storage = StateHandler(bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
storage.put("some key", "some value")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
set_bot_config(bot_profile, "entry 1", "value 1")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=original_realm)
|
|
|
|
realm_user_default.default_language = "de"
|
|
|
|
realm_user_default.twenty_four_hour_time = True
|
|
|
|
realm_user_default.save()
|
|
|
|
|
2024-07-22 16:51:07 +02:00
|
|
|
# Data to test import of onboarding usermessages
|
|
|
|
onboarding_message_id = self.send_stream_message(
|
|
|
|
cross_realm_bot,
|
|
|
|
str(Realm.ZULIP_SANDBOX_CHANNEL_NAME),
|
|
|
|
"onboarding message",
|
|
|
|
recipient_realm=original_realm,
|
|
|
|
)
|
|
|
|
OnboardingUserMessage.objects.create(
|
|
|
|
realm=original_realm,
|
|
|
|
message_id=onboarding_message_id,
|
|
|
|
flags=OnboardingUserMessage.flags.starred,
|
|
|
|
)
|
|
|
|
|
2022-02-11 18:21:38 +01:00
|
|
|
# We want to have an extra, malformed RealmEmoji with no .author
|
|
|
|
# to test that upon import that gets fixed.
|
|
|
|
with get_test_image_file("img.png") as img_file:
|
|
|
|
new_realm_emoji = check_add_realm_emoji(
|
2024-07-11 20:46:33 +02:00
|
|
|
realm=hamlet.realm,
|
|
|
|
name="hawaii2",
|
|
|
|
author=hamlet,
|
|
|
|
image_file=img_file,
|
|
|
|
content_type="image/png",
|
2022-02-11 18:21:38 +01:00
|
|
|
)
|
|
|
|
assert new_realm_emoji is not None
|
|
|
|
original_realm_emoji_count = RealmEmoji.objects.count()
|
|
|
|
self.assertGreaterEqual(original_realm_emoji_count, 2)
|
|
|
|
new_realm_emoji.author = None
|
|
|
|
new_realm_emoji.save()
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
RealmAuditLog.objects.create(
|
2024-09-03 15:58:19 +02:00
|
|
|
realm=original_realm,
|
|
|
|
event_type=AuditLogEventType.REALM_EXPORTED,
|
|
|
|
event_time=timezone_now(),
|
2024-04-17 13:53:32 +02:00
|
|
|
)
|
|
|
|
|
2021-12-07 18:48:22 +01:00
|
|
|
getters = self.get_realm_getters()
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
snapshots: dict[str, object] = {}
|
2021-12-07 18:48:22 +01:00
|
|
|
|
|
|
|
for f in getters:
|
|
|
|
snapshots[f.__name__] = f(original_realm)
|
|
|
|
|
2024-09-26 12:18:55 +02:00
|
|
|
self.export_realm(original_realm, export_type=RealmExport.EXPORT_FULL_WITHOUT_CONSENT)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
thumbnail: Make thumbnailing work with data import.
We didn't have thumbnailing for images coming from data import and this
commit adds the functionality.
There are a few fundamental issues that the implementation needs to
solve.
1. The images come from an untrusted source and therefore we don't want
to just pass them through to thumbnailing without checking. For that
reason, we cannot just import ImageAttachment rows from the export
data, even for zulip=>zulip imports.
The right way to process images is to pass them to maybe_thumbail(),
which runs libvips_check_image() on them to verify we're okay with
thumbnailing, creates ImageAttachment rows for them and sends them
to the thumbnailing queue worker. This approach lets us handle both
zulip=>zulip and 3rd party=>zulip imports in the same way,
2. There is a somewhat circular dependency between the Message,
Attachment and ImageAttachment import process:
- ImageAttachments would ideally be created after importing
Attachments, but they need to already exist at the time of Message
import. Otherwise, the markdown processor doesn't know it has to add
HTML for image previews to messages that reference images. This would
mean that messages imported from 3rd party tools don't get image
previews.
- Attachments only get created after Message import however, due to the
many-to-many relationship between Message and Attachment.
This is solved by fixing up some data of Attachments pre-emptively, such
as the path_ids. This gives us the necessary information for creating
ImageAttachments before importing Messages.
While we generate ImageAttachment rows synchronously, the actual
thumbnailing job is sent to the queue worker. Theoretically, the worker
could be very backlogged and not process the thumbnails anytime soon.
This is fine - if the app is loaded and tries to display a message with
such a not-yet-generated thumbnail, the code in `serve_file` will
generate the thumbnails synchronously on the fly and the user will see
the image preview displayed normally. See:
https://github.com/zulip/zulip/blob/1b47134d0d564f8ba4961d25743f3ad0f09e6dfb/zerver/views/upload.py#L333-L342
2024-10-17 21:20:49 +02:00
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
# With captureOnCommitCallbacks we ensure that tasks delegated to the queue workers
|
|
|
|
# are executed immediately. We use this to make thumbnailing runs in the import
|
|
|
|
# process in this test.
|
|
|
|
self.captureOnCommitCallbacks(execute=True),
|
|
|
|
):
|
2022-08-26 23:19:01 +02:00
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 18:48:22 +01:00
|
|
|
# Make sure our export/import didn't somehow leak info into the
|
|
|
|
# original realm.
|
|
|
|
for f in getters:
|
|
|
|
# One way this will fail is if you make a getter that doesn't
|
|
|
|
# properly restrict its results to a single realm.
|
|
|
|
if f(original_realm) != snapshots[f.__name__]:
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
|
|
|
The export/import process is corrupting your
|
|
|
|
original realm according to {f.__name__}!
|
|
|
|
|
|
|
|
If you wrote that getter, are you sure you
|
|
|
|
are only grabbing objects from one realm?
|
|
|
|
"""
|
|
|
|
)
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
|
|
|
|
# test realm
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
self.assertNotEqual(imported_realm.id, original_realm.id)
|
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
def assert_realm_values(f: Callable[[Realm], object]) -> None:
|
2021-12-07 18:46:35 +01:00
|
|
|
orig_realm_result = f(original_realm)
|
|
|
|
imported_realm_result = f(imported_realm)
|
|
|
|
# orig_realm_result should be truthy and have some values, otherwise
|
|
|
|
# the test is kind of meaningless
|
2023-03-04 01:24:14 +01:00
|
|
|
assert orig_realm_result
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
# It may be helpful to do print(f.__name__) if you are having
|
|
|
|
# trouble debugging this.
|
|
|
|
|
|
|
|
# print(f.__name__, orig_realm_result, imported_realm_result)
|
|
|
|
self.assertEqual(orig_realm_result, imported_realm_result)
|
|
|
|
|
|
|
|
for f in getters:
|
|
|
|
assert_realm_values(f)
|
|
|
|
|
|
|
|
self.verify_emoji_code_foreign_keys()
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
# Our direct message group hashes change, because hashes
|
|
|
|
# use ids that change.
|
|
|
|
self.assertNotEqual(
|
|
|
|
get_direct_message_group_hashes(original_realm),
|
|
|
|
get_direct_message_group_hashes(imported_realm),
|
|
|
|
)
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
# test to highlight that bs4 which we use to do data-**id
|
|
|
|
# replacements modifies the HTML sometimes. eg replacing <br>
|
|
|
|
# with </br>, ' with \' etc. The modifications doesn't
|
|
|
|
# affect how the browser displays the rendered_content so we
|
|
|
|
# are okay with using bs4 for this. lxml package also has
|
|
|
|
# similar behavior.
|
|
|
|
orig_polonius_user = self.example_user("polonius")
|
|
|
|
original_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=original_realm
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
original_msg.rendered_content,
|
|
|
|
'<div class="codehilite"><pre><span></span><code>'\n</code></pre></div>\n'
|
|
|
|
f'<p><span class="user-mention" data-user-id="{orig_polonius_user.id}">@Polonius</span></p>',
|
|
|
|
)
|
|
|
|
imported_polonius_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("polonius"), realm=imported_realm
|
|
|
|
)
|
|
|
|
imported_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=imported_realm
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
imported_msg.rendered_content,
|
|
|
|
'<div class="codehilite"><pre><span></span><code>\'\n</code></pre></div>\n'
|
|
|
|
f'<p><span class="user-mention" data-user-id="{imported_polonius_user.id}">@Polonius</span></p>',
|
|
|
|
)
|
|
|
|
|
2024-05-14 03:40:42 +02:00
|
|
|
imported_hamlet_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("hamlet"), realm=imported_realm
|
|
|
|
)
|
|
|
|
imported_denmark_stream = Stream.objects.get(name="Denmark", realm=imported_realm)
|
|
|
|
self.assertEqual(imported_denmark_stream.creator, imported_hamlet_user)
|
|
|
|
|
2021-12-07 18:46:35 +01:00
|
|
|
# Check recipient_id was generated correctly for the imported users and streams.
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
|
|
|
self.assertEqual(
|
|
|
|
user_profile.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.PERSONAL, type_id=user_profile.id).id,
|
|
|
|
)
|
|
|
|
for stream in Stream.objects.filter(realm=imported_realm):
|
|
|
|
self.assertEqual(
|
|
|
|
stream.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id).id,
|
|
|
|
)
|
|
|
|
|
2024-07-05 13:13:40 +02:00
|
|
|
for dm_group in DirectMessageGroup.objects.all():
|
|
|
|
# Direct Message groups don't have a realm column, so we just test all
|
|
|
|
# Direct Message groups for simplicity.
|
2021-12-07 18:46:35 +01:00
|
|
|
self.assertEqual(
|
2024-07-04 14:05:48 +02:00
|
|
|
dm_group.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.DIRECT_MESSAGE_GROUP, type_id=dm_group.id).id,
|
2021-12-07 18:46:35 +01:00
|
|
|
)
|
|
|
|
|
2023-04-29 20:45:22 +02:00
|
|
|
self.assertEqual(ScheduledMessage.objects.filter(realm=imported_realm).count(), 1)
|
|
|
|
imported_scheduled_message = ScheduledMessage.objects.first()
|
|
|
|
assert imported_scheduled_message is not None
|
|
|
|
self.assertEqual(imported_scheduled_message.content, original_scheduled_message.content)
|
|
|
|
self.assertEqual(
|
|
|
|
imported_scheduled_message.scheduled_timestamp,
|
|
|
|
original_scheduled_message.scheduled_timestamp,
|
|
|
|
)
|
|
|
|
|
2021-12-07 18:46:35 +01:00
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
|
|
|
# Check that all Subscriptions have the correct is_user_active set.
|
|
|
|
self.assertEqual(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, is_user_active=user_profile.is_active
|
|
|
|
).count(),
|
|
|
|
Subscription.objects.filter(user_profile=user_profile).count(),
|
|
|
|
)
|
|
|
|
# Verify that we've actually tested something meaningful instead of a blind import
|
|
|
|
# with is_user_active=True used for everything.
|
|
|
|
self.assertTrue(Subscription.objects.filter(is_user_active=False).exists())
|
|
|
|
|
2022-02-11 18:21:38 +01:00
|
|
|
all_imported_realm_emoji = RealmEmoji.objects.filter(realm=imported_realm)
|
|
|
|
self.assertEqual(all_imported_realm_emoji.count(), original_realm_emoji_count)
|
|
|
|
for imported_realm_emoji in all_imported_realm_emoji:
|
|
|
|
self.assertNotEqual(imported_realm_emoji.author, None)
|
|
|
|
|
2022-08-26 14:34:22 +02:00
|
|
|
self.assertEqual(
|
|
|
|
original_realm.authentication_methods_dict(),
|
|
|
|
imported_realm.authentication_methods_dict(),
|
|
|
|
)
|
|
|
|
|
2023-05-18 20:33:12 +02:00
|
|
|
imported_hamlet = get_user_by_delivery_email(hamlet.delivery_email, imported_realm)
|
|
|
|
realmauditlog = RealmAuditLog.objects.get(
|
2024-08-30 18:15:41 +02:00
|
|
|
modified_user=imported_hamlet, event_type=AuditLogEventType.USER_CREATED
|
2023-05-18 20:33:12 +02:00
|
|
|
)
|
|
|
|
self.assertEqual(realmauditlog.realm, imported_realm)
|
|
|
|
# As explained above when setting up the RealmAuditLog row, the .acting_user should have been
|
|
|
|
# set to None due to being unexportable.
|
|
|
|
self.assertEqual(realmauditlog.acting_user, None)
|
|
|
|
|
2024-05-26 02:38:57 +02:00
|
|
|
# Verify the PresenceSequence for the realm got imported correctly.
|
|
|
|
imported_presence_sequence = PresenceSequence.objects.get(realm=imported_realm)
|
|
|
|
self.assertEqual(
|
|
|
|
presence_sequence.last_update_id, imported_presence_sequence.last_update_id
|
|
|
|
)
|
|
|
|
imported_last_update_ids = set(
|
|
|
|
UserPresence.objects.filter(realm=imported_realm)
|
|
|
|
.values_list("last_update_id", flat=True)
|
|
|
|
.distinct("last_update_id")
|
|
|
|
)
|
|
|
|
self.assertEqual(user_presence_last_update_ids, imported_last_update_ids)
|
|
|
|
self.assertEqual(imported_presence_sequence.last_update_id, max(imported_last_update_ids))
|
|
|
|
|
2022-09-27 21:42:31 +02:00
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.filter(realm=original_realm).count(),
|
|
|
|
Message.objects.filter(realm=imported_realm).count(),
|
|
|
|
)
|
|
|
|
|
thumbnail: Make thumbnailing work with data import.
We didn't have thumbnailing for images coming from data import and this
commit adds the functionality.
There are a few fundamental issues that the implementation needs to
solve.
1. The images come from an untrusted source and therefore we don't want
to just pass them through to thumbnailing without checking. For that
reason, we cannot just import ImageAttachment rows from the export
data, even for zulip=>zulip imports.
The right way to process images is to pass them to maybe_thumbail(),
which runs libvips_check_image() on them to verify we're okay with
thumbnailing, creates ImageAttachment rows for them and sends them
to the thumbnailing queue worker. This approach lets us handle both
zulip=>zulip and 3rd party=>zulip imports in the same way,
2. There is a somewhat circular dependency between the Message,
Attachment and ImageAttachment import process:
- ImageAttachments would ideally be created after importing
Attachments, but they need to already exist at the time of Message
import. Otherwise, the markdown processor doesn't know it has to add
HTML for image previews to messages that reference images. This would
mean that messages imported from 3rd party tools don't get image
previews.
- Attachments only get created after Message import however, due to the
many-to-many relationship between Message and Attachment.
This is solved by fixing up some data of Attachments pre-emptively, such
as the path_ids. This gives us the necessary information for creating
ImageAttachments before importing Messages.
While we generate ImageAttachment rows synchronously, the actual
thumbnailing job is sent to the queue worker. Theoretically, the worker
could be very backlogged and not process the thumbnails anytime soon.
This is fine - if the app is loaded and tries to display a message with
such a not-yet-generated thumbnail, the code in `serve_file` will
generate the thumbnails synchronously on the fly and the user will see
the image preview displayed normally. See:
https://github.com/zulip/zulip/blob/1b47134d0d564f8ba4961d25743f3ad0f09e6dfb/zerver/views/upload.py#L333-L342
2024-10-17 21:20:49 +02:00
|
|
|
# Verify thumbnailing.
|
|
|
|
imported_thumbnailing_test_user = get_user_by_delivery_email(
|
|
|
|
thumbnailing_test_user_email, imported_realm
|
|
|
|
)
|
|
|
|
imported_messages_with_thumbnail = Message.objects.filter(
|
|
|
|
sender=imported_thumbnailing_test_user, realm=imported_realm
|
|
|
|
)
|
|
|
|
imported_message_with_thumbnail = imported_messages_with_thumbnail.latest("id")
|
|
|
|
attachment_with_thumbnail = Attachment.objects.get(
|
|
|
|
owner=imported_thumbnailing_test_user, messages=imported_message_with_thumbnail
|
|
|
|
)
|
|
|
|
|
|
|
|
path_id = attachment_with_thumbnail.path_id
|
|
|
|
# An ImageAttachment has been created in the import process.
|
|
|
|
imported_image_attachment = ImageAttachment.objects.get(
|
|
|
|
path_id=path_id, realm=imported_realm
|
|
|
|
)
|
|
|
|
|
|
|
|
# It figured out the dimensions correctly and didn't inherit the bad data in the
|
|
|
|
# original ImageAttachment.
|
|
|
|
self.assertEqual(imported_image_attachment.original_width_px, 128)
|
|
|
|
self.assertEqual(imported_image_attachment.original_height_px, 128)
|
|
|
|
# ImageAttachment.thumbnail_metadata contains information about thumbnails that actually
|
|
|
|
# got generated. By asserting it's not empty, we make sure thumbnailing ran for the image
|
|
|
|
# and that we didn't merely create the ImageAttachment row in the database.
|
|
|
|
self.assertNotEqual(len(imported_image_attachment.thumbnail_metadata), 0)
|
|
|
|
self.assertTrue(imported_image_attachment.thumbnail_metadata[0])
|
|
|
|
|
|
|
|
# Content and rendered_content got updated correctly, to point to the correct, new path_id
|
|
|
|
# and include the HTML for image preview using the thumbnail.
|
|
|
|
self.assertEqual(
|
|
|
|
imported_message_with_thumbnail.content, f"An [image](/user_uploads/{path_id})"
|
|
|
|
)
|
|
|
|
expected_rendered_preview = (
|
|
|
|
f'<p>An <a href="/user_uploads/{path_id}">image</a></p>\n'
|
|
|
|
f'<div class="message_inline_image"><a href="/user_uploads/{path_id}" title="image">'
|
|
|
|
f'<img data-original-dimensions="128x128" src="/user_uploads/thumbnail/{path_id}/840x560.webp"></a></div>'
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
imported_message_with_thumbnail.rendered_content, expected_rendered_preview
|
|
|
|
)
|
|
|
|
|
2024-05-30 02:57:56 +02:00
|
|
|
def test_import_message_edit_history(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
user_mention_message = f"@**King Hamlet|{hamlet.id}** Hello"
|
|
|
|
|
|
|
|
self.login_user(iago)
|
|
|
|
message_id = self.send_stream_message(
|
|
|
|
self.example_user("iago"), "Verona", user_mention_message
|
|
|
|
)
|
|
|
|
|
|
|
|
new_content = "new content"
|
|
|
|
result = self.client_patch(
|
|
|
|
f"/json/messages/{message_id}",
|
|
|
|
{
|
|
|
|
"content": new_content,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
self.export_realm_and_create_auditlog(realm)
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
|
|
|
|
imported_message = Message.objects.filter(realm=imported_realm).latest("id")
|
|
|
|
imported_hamlet_id = UserProfile.objects.get(
|
|
|
|
delivery_email=hamlet.delivery_email, realm=imported_realm
|
|
|
|
).id
|
|
|
|
imported_iago_id = UserProfile.objects.get(
|
|
|
|
delivery_email=iago.delivery_email, realm=imported_realm
|
|
|
|
).id
|
|
|
|
|
|
|
|
edit_history_json = imported_message.edit_history
|
|
|
|
assert edit_history_json is not None
|
|
|
|
edit_history = orjson.loads(edit_history_json)
|
|
|
|
self.assert_length(edit_history, 1)
|
|
|
|
|
|
|
|
prev_version_of_message = edit_history[0]
|
|
|
|
# Ensure the "user_id" (of the sender) was updated correctly
|
|
|
|
# to the imported id in the data.
|
|
|
|
self.assertEqual(prev_version_of_message["user_id"], imported_iago_id)
|
|
|
|
|
|
|
|
# The mention metadata in the rendered content should be updated.
|
|
|
|
self.assertIn(
|
|
|
|
f'data-user-id="{imported_hamlet_id}"', prev_version_of_message["prev_rendered_content"]
|
|
|
|
)
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_getters(self) -> list[Callable[[Realm], object]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
names = set()
|
2024-07-12 02:30:17 +02:00
|
|
|
getters: list[Callable[[Realm], object]] = []
|
2021-12-07 17:36:30 +01:00
|
|
|
|
2021-12-17 01:16:11 +01:00
|
|
|
def getter(f: Callable[[Realm], object]) -> Callable[[Realm], object]:
|
2021-12-07 17:36:30 +01:00
|
|
|
getters.append(f)
|
|
|
|
assert f.__name__.startswith("get_")
|
2021-12-07 18:46:35 +01:00
|
|
|
|
|
|
|
# Avoid dups
|
|
|
|
assert f.__name__ not in names
|
|
|
|
names.add(f.__name__)
|
2021-12-07 17:36:30 +01:00
|
|
|
return f
|
|
|
|
|
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_admin_bot_emails(r: Realm) -> set[str]:
|
2021-12-07 17:25:26 +01:00
|
|
|
return {user.email for user in r.get_admin_users_and_bots()}
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_active_emails(r: Realm) -> set[str]:
|
2021-12-07 17:25:26 +01:00
|
|
|
return {user.email for user in r.get_active_users()}
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_active_stream_names(r: Realm) -> set[str]:
|
2021-12-07 17:25:26 +01:00
|
|
|
return {stream.name for stream in get_active_streams(r)}
|
|
|
|
|
2023-08-09 15:06:56 +02:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_group_names_for_group_settings(r: Realm) -> set[str]:
|
2023-08-09 15:06:56 +02:00
|
|
|
return {
|
2024-04-18 18:59:50 +02:00
|
|
|
getattr(r, permission_name).named_user_group.name
|
2023-10-09 20:54:10 +02:00
|
|
|
for permission_name in Realm.REALM_PERMISSION_GROUP_SETTINGS
|
2023-08-09 15:06:56 +02:00
|
|
|
}
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test recipients
|
2020-02-18 17:25:43 +01:00
|
|
|
def get_recipient_stream(r: Realm) -> Recipient:
|
2022-05-29 21:52:25 +02:00
|
|
|
recipient = Stream.objects.get(name="Verona", realm=r).recipient
|
|
|
|
assert recipient is not None
|
|
|
|
return recipient
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2020-02-18 17:13:47 +01:00
|
|
|
def get_recipient_user(r: Realm) -> Recipient:
|
2022-07-19 01:40:34 +02:00
|
|
|
return assert_is_not_none(UserProfile.objects.get(full_name="Iago", realm=r).recipient)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_stream_recipient_type(r: Realm) -> int:
|
|
|
|
return get_recipient_stream(r).type
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-07 17:25:26 +01:00
|
|
|
def get_user_recipient_type(r: Realm) -> int:
|
|
|
|
return get_recipient_user(r).type
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test subscription
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_subscribers(recipient: Recipient) -> set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient)
|
|
|
|
users = {sub.user_profile.email for sub in subscriptions}
|
|
|
|
return users
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_stream_subscribers(r: Realm) -> set[str]:
|
2021-12-07 17:25:26 +01:00
|
|
|
return get_subscribers(get_recipient_stream(r))
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_subscribers(r: Realm) -> set[str]:
|
2021-12-07 17:25:26 +01:00
|
|
|
return get_subscribers(get_recipient_user(r))
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test custom profile fields
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_custom_profile_field_names(r: Realm) -> set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
custom_profile_fields = CustomProfileField.objects.filter(realm=r)
|
|
|
|
custom_profile_field_names = {field.name for field in custom_profile_fields}
|
|
|
|
return custom_profile_field_names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_custom_profile_with_field_type_user(
|
|
|
|
r: Realm,
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> tuple[set[str], set[str], set[frozenset[str]]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
fields = CustomProfileField.objects.filter(field_type=CustomProfileField.USER, realm=r)
|
2018-07-16 17:15:42 +02:00
|
|
|
|
|
|
|
def get_email(user_id: int) -> str:
|
|
|
|
return UserProfile.objects.get(id=user_id).email
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_email_from_value(field_value: CustomProfileFieldValue) -> set[str]:
|
2020-08-07 01:09:47 +02:00
|
|
|
user_id_list = orjson.loads(field_value.value)
|
2018-07-16 17:15:42 +02:00
|
|
|
return {get_email(user_id) for user_id in user_id_list}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def custom_profile_field_values_for(
|
2022-06-23 20:07:19 +02:00
|
|
|
fields: Iterable[CustomProfileField],
|
2024-07-12 02:30:17 +02:00
|
|
|
) -> set[frozenset[str]]:
|
2024-07-14 21:17:13 +02:00
|
|
|
return {
|
|
|
|
frozenset(get_email_from_value(value))
|
|
|
|
for value in CustomProfileFieldValue.objects.filter(field__in=fields)
|
|
|
|
}
|
2018-07-16 17:15:42 +02:00
|
|
|
|
|
|
|
field_names, field_hints = (set() for i in range(2))
|
|
|
|
for field in fields:
|
|
|
|
field_names.add(field.name)
|
|
|
|
field_hints.add(field.hint)
|
|
|
|
|
|
|
|
return (field_hints, field_names, custom_profile_field_values_for(fields))
|
|
|
|
|
2018-07-05 20:08:40 +02:00
|
|
|
# test realmauditlog
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_audit_log_event_type(r: Realm) -> set[int]:
|
2019-02-13 02:22:36 +01:00
|
|
|
realmauditlogs = RealmAuditLog.objects.filter(realm=r).exclude(
|
2023-12-11 23:26:38 +01:00
|
|
|
event_type__in=[
|
2024-09-03 15:58:19 +02:00
|
|
|
AuditLogEventType.REALM_PLAN_TYPE_CHANGED,
|
2024-09-06 13:56:53 +02:00
|
|
|
AuditLogEventType.CHANNEL_CREATED,
|
2024-09-03 16:46:18 +02:00
|
|
|
AuditLogEventType.REALM_IMPORTED,
|
2023-12-11 23:26:38 +01:00
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-10 21:12:02 +02:00
|
|
|
realmauditlog_event_type = {log.event_type for log in realmauditlogs}
|
|
|
|
return realmauditlog_event_type
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-04 14:05:48 +02:00
|
|
|
def get_group_direct_message(r: Realm) -> str:
|
|
|
|
direct_message_group_hash = get_direct_message_group_hashes(r)
|
2024-07-05 13:13:40 +02:00
|
|
|
direct_message_group_id = DirectMessageGroup.objects.get(
|
|
|
|
huddle_hash=direct_message_group_hash
|
|
|
|
).id
|
2024-07-04 14:05:48 +02:00
|
|
|
direct_message_group_recipient = Recipient.objects.get(
|
|
|
|
type_id=direct_message_group_id, type=3
|
|
|
|
)
|
|
|
|
group_direct_message = Message.objects.get(recipient=direct_message_group_recipient)
|
|
|
|
self.assertEqual(group_direct_message.content, "test group direct message")
|
|
|
|
return group_direct_message.content
|
2018-07-12 17:34:31 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_alertwords(r: Realm) -> set[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return {rec.word for rec in AlertWord.objects.filter(realm_id=r.id)}
|
2020-07-16 16:11:34 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_emoji_names(r: Realm) -> set[str]:
|
2021-12-05 14:26:42 +01:00
|
|
|
names = {rec.name for rec in RealmEmoji.objects.filter(realm_id=r.id)}
|
|
|
|
assert "hawaii" in names
|
|
|
|
return names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_user_statuses(r: Realm) -> set[tuple[str, str, str]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-05 13:42:04 +01:00
|
|
|
tups = {
|
2022-09-22 12:21:00 +02:00
|
|
|
(rec.user_profile.full_name, rec.emoji_name, rec.status_text)
|
2021-12-05 13:42:04 +01:00
|
|
|
for rec in UserStatus.objects.filter(user_profile__realm_id=r.id)
|
|
|
|
}
|
2022-09-22 12:21:00 +02:00
|
|
|
assert (cordelia.full_name, "hawaii", "in Hawaii") in tups
|
2021-12-05 13:42:04 +01:00
|
|
|
return tups
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_emoji_reactions(r: Realm) -> set[tuple[str, str]]:
|
2021-12-07 18:46:35 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-05 14:26:42 +01:00
|
|
|
tups = {
|
|
|
|
(rec.emoji_name, rec.user_profile.full_name)
|
|
|
|
for rec in Reaction.objects.filter(
|
|
|
|
user_profile__realm_id=r.id, reaction_type=Reaction.REALM_EMOJI
|
|
|
|
)
|
|
|
|
}
|
|
|
|
self.assertEqual(tups, {("hawaii", cordelia.full_name)})
|
|
|
|
return tups
|
|
|
|
|
2024-05-10 15:37:43 +02:00
|
|
|
# test onboarding step
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_onboarding_steps(r: Realm) -> set[str]:
|
2021-12-12 14:59:26 +01:00
|
|
|
user_id = get_user_id(r, "King Hamlet")
|
2024-05-10 15:37:43 +02:00
|
|
|
onboarding_steps = set(
|
|
|
|
OnboardingStep.objects.filter(user_id=user_id).values_list(
|
|
|
|
"onboarding_step", flat=True
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return onboarding_steps
|
2018-07-12 16:34:26 +02:00
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# test muted topics
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_muted_topics(r: Realm) -> set[str]:
|
2021-12-12 14:59:26 +01:00
|
|
|
user_profile_id = get_user_id(r, "King Hamlet")
|
2021-08-02 09:49:56 +02:00
|
|
|
muted_topics = UserTopic.objects.filter(
|
2023-03-12 16:19:42 +01:00
|
|
|
user_profile_id=user_profile_id, visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
2021-08-02 09:49:56 +02:00
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
topic_names = {muted_topic.topic_name for muted_topic in muted_topics}
|
|
|
|
return topic_names
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_muted_users(r: Realm) -> set[tuple[str, str, str]]:
|
2021-12-05 13:41:11 +01:00
|
|
|
mute_objects = MutedUser.objects.filter(user_profile__realm=r)
|
|
|
|
muter_tuples = {
|
|
|
|
(
|
|
|
|
mute_object.user_profile.full_name,
|
|
|
|
mute_object.muted_user.full_name,
|
|
|
|
str(mute_object.date_muted),
|
|
|
|
)
|
2021-04-08 21:21:45 +02:00
|
|
|
for mute_object in mute_objects
|
|
|
|
}
|
2021-12-05 13:41:11 +01:00
|
|
|
return muter_tuples
|
2021-04-08 21:21:45 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_group_names(r: Realm) -> set[str]:
|
2024-09-25 11:51:28 +02:00
|
|
|
result = set()
|
|
|
|
for group in UserGroup.objects.filter(realm=r):
|
|
|
|
if hasattr(group, "named_user_group"):
|
|
|
|
result.add(group.named_user_group.name)
|
|
|
|
|
|
|
|
return result
|
2021-12-07 17:25:26 +01:00
|
|
|
|
2024-04-16 11:10:31 +02:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_named_user_group_names(r: Realm) -> set[str]:
|
2024-04-16 11:10:31 +02:00
|
|
|
return {group.name for group in NamedUserGroup.objects.filter(realm=r)}
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_membership(r: Realm) -> set[str]:
|
2024-04-18 12:23:46 +02:00
|
|
|
usergroup = NamedUserGroup.objects.get(realm=r, name="hamletcharacters")
|
2018-07-12 13:27:12 +02:00
|
|
|
usergroup_membership = UserGroupMembership.objects.filter(user_group=usergroup)
|
|
|
|
users = {membership.user_profile.email for membership in usergroup_membership}
|
|
|
|
return users
|
|
|
|
|
2022-09-13 16:42:25 +02:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_group_group_membership(r: Realm) -> set[str]:
|
2024-04-18 12:23:46 +02:00
|
|
|
usergroup = NamedUserGroup.objects.get(realm=r, name="role:members")
|
2022-09-13 16:42:25 +02:00
|
|
|
group_group_membership = GroupGroupMembership.objects.filter(supergroup=usergroup)
|
2024-04-18 18:59:50 +02:00
|
|
|
subgroups = {
|
|
|
|
membership.subgroup.named_user_group.name for membership in group_group_membership
|
|
|
|
}
|
2022-09-13 16:42:25 +02:00
|
|
|
return subgroups
|
|
|
|
|
2022-09-12 10:06:51 +02:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_group_direct_members(r: Realm) -> set[str]:
|
2022-09-12 10:06:51 +02:00
|
|
|
# We already check the members of the group through UserGroupMembership
|
|
|
|
# objects, but we also want to check direct_members field is set
|
|
|
|
# correctly since we do not include this in export data.
|
2024-04-18 12:23:46 +02:00
|
|
|
usergroup = NamedUserGroup.objects.get(realm=r, name="hamletcharacters")
|
2022-09-12 10:06:51 +02:00
|
|
|
direct_members = usergroup.direct_members.all()
|
|
|
|
direct_member_emails = {user.email for user in direct_members}
|
|
|
|
return direct_member_emails
|
|
|
|
|
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_user_group_direct_subgroups(r: Realm) -> set[str]:
|
2022-09-12 10:06:51 +02:00
|
|
|
# We already check the subgroups of the group through GroupGroupMembership
|
|
|
|
# objects, but we also want to check that direct_subgroups field is set
|
|
|
|
# correctly since we do not include this in export data.
|
2024-04-18 12:23:46 +02:00
|
|
|
usergroup = NamedUserGroup.objects.get(realm=r, name="role:members")
|
2022-09-12 10:06:51 +02:00
|
|
|
direct_subgroups = usergroup.direct_subgroups.all()
|
2024-04-18 18:59:50 +02:00
|
|
|
direct_subgroup_names = {group.named_user_group.name for group in direct_subgroups}
|
2022-09-12 10:06:51 +02:00
|
|
|
return direct_subgroup_names
|
|
|
|
|
2023-06-12 13:27:47 +02:00
|
|
|
@getter
|
2023-08-10 20:15:39 +02:00
|
|
|
def get_user_group_can_mention_group_setting(r: Realm) -> str:
|
2024-04-18 12:23:46 +02:00
|
|
|
user_group = NamedUserGroup.objects.get(realm=r, name="hamletcharacters")
|
2024-04-18 18:59:50 +02:00
|
|
|
return user_group.can_mention_group.named_user_group.name
|
2023-06-12 13:27:47 +02:00
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# test botstoragedata and botconfigdata
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_botstoragedata(r: Realm) -> dict[str, object]:
|
2018-07-17 19:11:16 +02:00
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_storage_data = BotStorageData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_storage_data.key, "data": bot_storage_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_botconfigdata(r: Realm) -> dict[str, object]:
|
2018-07-17 19:11:16 +02:00
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_config_data = BotConfigData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_config_data.key, "data": bot_config_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test messages
|
2022-06-23 22:51:32 +02:00
|
|
|
def get_stream_messages(r: Realm) -> QuerySet[Message]:
|
2018-07-10 21:12:02 +02:00
|
|
|
recipient = get_recipient_stream(r)
|
2023-08-30 21:19:37 +02:00
|
|
|
messages = Message.objects.filter(realm_id=r.id, recipient=recipient)
|
2018-07-10 21:12:02 +02:00
|
|
|
return messages
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_stream_topics(r: Realm) -> set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
messages = get_stream_messages(r)
|
2024-01-15 12:17:50 +01:00
|
|
|
topic_names = {m.topic_name() for m in messages}
|
|
|
|
return topic_names
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test usermessages
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_usermessages_user(r: Realm) -> set[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
messages = get_stream_messages(r).order_by("content")
|
2018-07-10 21:12:02 +02:00
|
|
|
usermessage = UserMessage.objects.filter(message=messages[0])
|
|
|
|
usermessage_user = {um.user_profile.email for um in usermessage}
|
|
|
|
return usermessage_user
|
|
|
|
|
2019-05-23 13:58:10 +02:00
|
|
|
# tests to make sure that various data-*-ids in rendered_content
|
|
|
|
# are replaced correctly with the values of newer realm.
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_user_mention(r: Realm) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
mentioned_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("hamlet"), realm=r
|
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_user_id = f'data-user-id="{mentioned_user.id}"'
|
2019-05-23 13:58:10 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_user_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_stream_mention(r: Realm) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
mentioned_stream = get_stream("Denmark", r)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_stream_id = f'data-stream-id="{mentioned_stream.id}"'
|
2019-05-28 13:06:48 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_stream_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2021-12-17 02:06:59 +01:00
|
|
|
def get_user_group_mention(r: Realm) -> str:
|
2024-04-18 12:23:46 +02:00
|
|
|
user_group = NamedUserGroup.objects.get(realm=r, name="hamletcharacters")
|
2020-06-09 00:25:09 +02:00
|
|
|
data_usergroup_id = f'data-user-group-id="{user_group.id}"'
|
2021-02-12 08:19:30 +01:00
|
|
|
mention_message = get_stream_messages(r).get(
|
|
|
|
rendered_content__contains=data_usergroup_id
|
|
|
|
)
|
2019-05-28 13:47:41 +02:00
|
|
|
return mention_message.content
|
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_userpresence_timestamp(r: Realm) -> set[object]:
|
2020-02-18 14:58:29 +01:00
|
|
|
# It should be sufficient to compare UserPresence timestamps to verify
|
|
|
|
# they got exported/imported correctly.
|
2020-06-11 16:03:47 +02:00
|
|
|
return set(
|
|
|
|
UserPresence.objects.filter(realm=r).values_list(
|
|
|
|
"last_active_time", "last_connected_time"
|
|
|
|
)
|
|
|
|
)
|
2020-02-18 14:58:29 +01:00
|
|
|
|
2021-12-07 17:36:30 +01:00
|
|
|
@getter
|
2024-07-12 02:30:17 +02:00
|
|
|
def get_realm_user_default_values(r: Realm) -> dict[str, object]:
|
2021-06-01 12:55:44 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=r)
|
|
|
|
return {
|
|
|
|
"default_language": realm_user_default.default_language,
|
|
|
|
"twenty_four_hour_time": realm_user_default.twenty_four_hour_time,
|
|
|
|
}
|
|
|
|
|
2024-07-22 16:51:07 +02:00
|
|
|
@getter
|
|
|
|
def get_onboarding_usermessages(r: Realm) -> set[tuple[str, Any]]:
|
|
|
|
tups = {
|
|
|
|
(rec.message.content, rec.flags.mask)
|
|
|
|
for rec in OnboardingUserMessage.objects.filter(realm_id=r.id)
|
|
|
|
}
|
|
|
|
self.assertEqual(
|
|
|
|
tups, {("onboarding message", OnboardingUserMessage.flags.starred.mask)}
|
|
|
|
)
|
|
|
|
return tups
|
|
|
|
|
2021-12-07 18:46:35 +01:00
|
|
|
return getters
|
2021-02-14 00:03:40 +01:00
|
|
|
|
2022-08-26 23:14:12 +02:00
|
|
|
def test_import_realm_with_invalid_email_addresses_fails_validation(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2022-08-26 23:14:12 +02:00
|
|
|
data = read_json("realm.json")
|
|
|
|
|
|
|
|
data["zerver_userprofile"][0]["delivery_email"] = "invalid_email_address"
|
|
|
|
|
|
|
|
output_dir = get_output_dir()
|
|
|
|
full_fn = os.path.join(output_dir, "realm.json")
|
|
|
|
with open(full_fn, "wb") as f:
|
|
|
|
f.write(orjson.dumps(data))
|
|
|
|
|
|
|
|
with self.assertRaises(ValidationError), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(output_dir, "test-zulip")
|
|
|
|
|
|
|
|
# Now test a weird case where delivery_email is valid, but .email is not.
|
|
|
|
# Such data should never reasonably get generated, but we should still
|
|
|
|
# be defensive against it (since it can still happen due to bugs or manual edition
|
|
|
|
# of export files in an attempt to get us to import malformed data).
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2022-08-26 23:14:12 +02:00
|
|
|
data = read_json("realm.json")
|
|
|
|
data["zerver_userprofile"][0]["email"] = "invalid_email_address"
|
|
|
|
|
|
|
|
output_dir = get_output_dir()
|
|
|
|
full_fn = os.path.join(output_dir, "realm.json")
|
|
|
|
with open(full_fn, "wb") as f:
|
|
|
|
f.write(orjson.dumps(data))
|
|
|
|
|
|
|
|
with self.assertRaises(ValidationError), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(output_dir, "test-zulip2")
|
|
|
|
|
2021-06-01 12:55:44 +02:00
|
|
|
def test_import_realm_with_no_realm_user_default_table(self) -> None:
|
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
|
|
|
|
|
|
|
RealmUserDefault.objects.get(realm=original_realm).delete()
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(original_realm)
|
2021-06-01 12:55:44 +02:00
|
|
|
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
2022-08-26 23:19:01 +02:00
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
2021-06-01 12:55:44 +02:00
|
|
|
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
|
|
|
|
# RealmUserDefault table with default values is created, if it is not present in
|
|
|
|
# the import data.
|
|
|
|
self.assertTrue(RealmUserDefault.objects.filter(realm=imported_realm).exists())
|
|
|
|
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=imported_realm)
|
|
|
|
self.assertEqual(realm_user_default.default_language, "en")
|
|
|
|
self.assertEqual(realm_user_default.twenty_four_hour_time, False)
|
|
|
|
|
2024-07-16 22:52:01 +02:00
|
|
|
@activate_push_notification_service()
|
2023-12-01 14:52:44 +01:00
|
|
|
def test_import_realm_notify_bouncer(self) -> None:
|
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(original_realm)
|
2023-12-01 14:52:44 +01:00
|
|
|
|
2024-07-12 02:30:32 +02:00
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
patch("zerver.lib.remote_server.send_to_push_bouncer") as m,
|
|
|
|
):
|
2023-12-09 13:29:59 +01:00
|
|
|
get_response = {
|
|
|
|
"last_realm_count_id": 0,
|
|
|
|
"last_installation_count_id": 0,
|
|
|
|
"last_realmauditlog_id": 0,
|
|
|
|
}
|
|
|
|
|
|
|
|
def mock_send_to_push_bouncer_response( # type: ignore[return]
|
|
|
|
method: str, *args: Any
|
2024-07-12 02:30:23 +02:00
|
|
|
) -> dict[str, int] | None:
|
2023-12-09 13:29:59 +01:00
|
|
|
if method == "GET":
|
|
|
|
return get_response
|
|
|
|
|
|
|
|
m.side_effect = mock_send_to_push_bouncer_response
|
|
|
|
|
2023-12-11 22:12:22 +01:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
new_realm = do_import_realm(get_output_dir(), "test-zulip")
|
2023-12-01 14:52:44 +01:00
|
|
|
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
2023-12-09 13:29:59 +01:00
|
|
|
calls_args_for_assert = m.call_args_list[1][0]
|
2023-12-01 14:52:44 +01:00
|
|
|
self.assertEqual(calls_args_for_assert[0], "POST")
|
|
|
|
self.assertEqual(calls_args_for_assert[1], "server/analytics")
|
|
|
|
self.assertIn(
|
2023-12-09 13:29:59 +01:00
|
|
|
new_realm.id, [realm["id"] for realm in json.loads(m.call_args_list[1][0][2]["realms"])]
|
2023-12-01 14:52:44 +01:00
|
|
|
)
|
|
|
|
|
2024-07-23 21:55:01 +02:00
|
|
|
def test_import_emoji_error(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
|
|
|
|
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
|
|
|
|
|
|
|
self.export_realm_and_create_auditlog(realm)
|
|
|
|
|
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="WARNING") as mock_log,
|
|
|
|
patch("zerver.lib.import_realm.upload_emoji_image", side_effect=BadImageError("test")),
|
|
|
|
):
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
self.assert_length(mock_log.output, 1)
|
|
|
|
self.assertIn("Could not thumbnail emoji image", mock_log.output[0])
|
|
|
|
|
2018-06-06 18:06:16 +02:00
|
|
|
def test_import_files_from_local(self) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2021-12-12 21:38:49 +01:00
|
|
|
|
|
|
|
self.upload_files_for_user(user)
|
|
|
|
self.upload_files_for_realm(user)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
2022-08-26 23:19:01 +02:00
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2018-06-06 18:06:16 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2022-05-31 00:59:29 +02:00
|
|
|
assert settings.LOCAL_UPLOADS_DIR is not None
|
2022-12-12 22:02:25 +01:00
|
|
|
assert settings.LOCAL_FILES_DIR is not None
|
|
|
|
assert settings.LOCAL_AVATARS_DIR is not None
|
|
|
|
|
|
|
|
attachment_file_path = os.path.join(settings.LOCAL_FILES_DIR, uploaded_file.path_id)
|
2018-06-06 18:06:16 +02:00
|
|
|
self.assertTrue(os.path.isfile(attachment_file_path))
|
|
|
|
|
2024-07-23 21:55:01 +02:00
|
|
|
test_image_data = read_test_image_file("img.png")
|
|
|
|
self.assertIsNotNone(test_image_data)
|
|
|
|
|
2018-06-06 18:06:16 +02:00
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
2022-12-12 22:02:25 +01:00
|
|
|
emoji_file_path = os.path.join(settings.LOCAL_AVATARS_DIR, emoji_path)
|
2024-07-23 21:55:01 +02:00
|
|
|
with open(emoji_file_path + ".original", "rb") as f:
|
|
|
|
self.assertEqual(f.read(), test_image_data)
|
2018-06-06 18:06:16 +02:00
|
|
|
self.assertTrue(os.path.isfile(emoji_file_path))
|
|
|
|
|
|
|
|
# Test avatars
|
2021-12-12 16:32:40 +01:00
|
|
|
user_profile = UserProfile.objects.get(full_name=user.full_name, realm=imported_realm)
|
2018-06-06 18:06:16 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
2022-12-12 22:02:25 +01:00
|
|
|
avatar_file_path = os.path.join(settings.LOCAL_AVATARS_DIR, avatar_path_id)
|
2018-06-06 18:06:16 +02:00
|
|
|
self.assertTrue(os.path.isfile(avatar_file_path))
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
2022-12-12 22:02:25 +01:00
|
|
|
full_upload_path = os.path.join(settings.LOCAL_AVATARS_DIR, upload_path)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "icon.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "icon.png")))
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "logo.png")))
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "night_logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "night_logo.png")))
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-06-06 21:37:40 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_import_files_from_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
uploads_bucket, avatar_bucket = create_s3_buckets(
|
2021-02-12 08:19:30 +01:00
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET
|
|
|
|
)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_realm(user)
|
|
|
|
self.upload_files_for_user(user)
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2021-12-12 21:38:49 +01:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
2022-08-26 23:19:01 +02:00
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
2021-03-09 18:51:16 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2021-12-12 17:17:33 +01:00
|
|
|
test_image_data = read_test_image_file("img.png")
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(b"zulip!", uploaded_file.size)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_content = uploads_bucket.Object(uploaded_file.path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(b"zulip!", attachment_content)
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
2024-07-23 21:55:01 +02:00
|
|
|
resized_emoji_key = avatar_bucket.Object(emoji_path)
|
|
|
|
self.assertIsNotNone(resized_emoji_key.get()["Body"].read())
|
|
|
|
self.assertEqual(resized_emoji_key.key, emoji_path)
|
|
|
|
original_emoji_path_id = emoji_path + ".original"
|
|
|
|
original_emoji_key = avatar_bucket.Object(original_emoji_path_id)
|
|
|
|
self.assertEqual(original_emoji_key.get()["Body"].read(), test_image_data)
|
|
|
|
self.assertEqual(original_emoji_key.key, original_emoji_path_id)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
# Test avatars
|
2021-12-12 16:32:40 +01:00
|
|
|
user_profile = UserProfile.objects.get(full_name=user.full_name, realm=imported_realm)
|
2018-06-06 21:37:40 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
2018-12-07 17:52:01 +01:00
|
|
|
original_image_key = avatar_bucket.Object(avatar_path_id)
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(original_image_key.key, avatar_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = avatar_bucket.Object(avatar_path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(image_data, test_image_data)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
|
|
|
|
|
|
|
original_icon_path_id = os.path.join(upload_path, "icon.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_icon_key = avatar_bucket.Object(original_icon_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_icon_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_icon_path_id = os.path.join(upload_path, "icon.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_icon_key = avatar_bucket.Object(resized_icon_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_icon_key.key, resized_icon_path_id)
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
|
|
|
original_logo_path_id = os.path.join(upload_path, "logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_logo_key = avatar_bucket.Object(original_logo_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_logo_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_logo_path_id = os.path.join(upload_path, "logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_logo_key = avatar_bucket.Object(resized_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_logo_key.key, resized_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
|
|
|
night_logo_original_path_id = os.path.join(upload_path, "night_logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
night_logo_original_key = avatar_bucket.Object(night_logo_original_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(night_logo_original_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_night_logo_path_id = os.path.join(upload_path, "night_logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_night_logo_key = avatar_bucket.Object(resized_night_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_night_logo_key.key, resized_night_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-10-16 12:34:47 +02:00
|
|
|
def test_get_incoming_message_ids(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
import_dir = os.path.join(
|
|
|
|
settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "import_fixtures"
|
|
|
|
)
|
2018-10-16 12:34:47 +02:00
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [888, 999, 555])
|
|
|
|
|
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [555, 888, 999])
|
|
|
|
|
2024-02-05 23:52:25 +01:00
|
|
|
def test_import_of_authentication_methods(self) -> None:
|
|
|
|
with self.settings(
|
|
|
|
AUTHENTICATION_BACKENDS=(
|
|
|
|
"zproject.backends.EmailAuthBackend",
|
|
|
|
"zproject.backends.AzureADAuthBackend",
|
|
|
|
"zproject.backends.SAMLAuthBackend",
|
|
|
|
)
|
|
|
|
):
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
authentication_methods_dict = realm.authentication_methods_dict()
|
|
|
|
for auth_method in authentication_methods_dict:
|
|
|
|
authentication_methods_dict[auth_method] = True
|
|
|
|
do_set_realm_authentication_methods(
|
|
|
|
realm, authentication_methods_dict, acting_user=None
|
|
|
|
)
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2024-02-05 23:52:25 +01:00
|
|
|
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
self.assertEqual(
|
|
|
|
realm.authentication_methods_dict(),
|
|
|
|
imported_realm.authentication_methods_dict(),
|
|
|
|
)
|
|
|
|
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2024-02-05 23:52:25 +01:00
|
|
|
|
|
|
|
with self.settings(BILLING_ENABLED=True), self.assertLogs(level="WARN") as mock_warn:
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip2")
|
|
|
|
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip2")
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
imported_realm.authentication_methods_dict(),
|
|
|
|
{"Email": True, "AzureAD": False, "SAML": False},
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
mock_warn.output,
|
|
|
|
[
|
|
|
|
"WARNING:root:Dropped restricted authentication method: AzureAD",
|
|
|
|
"WARNING:root:Dropped restricted authentication method: SAML",
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2018-09-21 05:39:35 +02:00
|
|
|
def test_plan_type(self) -> None:
|
2021-12-12 16:32:40 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
realm = user.realm
|
2021-12-01 02:10:40 +01:00
|
|
|
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2021-12-12 21:38:49 +01:00
|
|
|
self.upload_files_for_user(user)
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=True), self.assertLogs(level="INFO"):
|
2023-10-12 20:10:07 +02:00
|
|
|
imported_realm = do_import_realm(get_output_dir(), "test-zulip-1")
|
|
|
|
self.assertEqual(imported_realm.plan_type, Realm.PLAN_TYPE_LIMITED)
|
|
|
|
self.assertEqual(imported_realm.max_invites, 100)
|
|
|
|
self.assertEqual(imported_realm.upload_quota_gb, 5)
|
|
|
|
self.assertEqual(imported_realm.message_visibility_limit, 10000)
|
2021-03-09 18:51:16 +01:00
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
2024-09-03 15:58:19 +02:00
|
|
|
realm=imported_realm, event_type=AuditLogEventType.REALM_PLAN_TYPE_CHANGED
|
2021-03-09 18:51:16 +01:00
|
|
|
).exists()
|
|
|
|
)
|
2023-10-12 20:10:07 +02:00
|
|
|
|
|
|
|
# Importing the same export data twice would cause conflict on unique fields,
|
|
|
|
# so instead re-export the original realm via self.export_realm, which handles
|
|
|
|
# this issue.
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2023-10-12 20:10:07 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
2023-10-12 20:10:07 +02:00
|
|
|
imported_realm = do_import_realm(get_output_dir(), "test-zulip-2")
|
|
|
|
self.assertEqual(imported_realm.plan_type, Realm.PLAN_TYPE_SELF_HOSTED)
|
|
|
|
self.assertEqual(imported_realm.max_invites, 100)
|
|
|
|
self.assertEqual(imported_realm.upload_quota_gb, None)
|
|
|
|
self.assertEqual(imported_realm.message_visibility_limit, None)
|
2021-03-09 18:51:16 +01:00
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
2024-09-03 15:58:19 +02:00
|
|
|
realm=imported_realm, event_type=AuditLogEventType.REALM_PLAN_TYPE_CHANGED
|
2021-03-09 18:51:16 +01:00
|
|
|
).exists()
|
|
|
|
)
|
2021-12-11 13:51:27 +01:00
|
|
|
|
2023-07-12 19:13:17 +02:00
|
|
|
def test_system_usergroup_audit_logs(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2024-04-17 13:53:32 +02:00
|
|
|
self.export_realm_and_create_auditlog(realm)
|
2023-07-12 19:13:17 +02:00
|
|
|
|
|
|
|
# Simulate an external export where user groups are missing.
|
|
|
|
data = read_json("realm.json")
|
|
|
|
data.pop("zerver_usergroup")
|
2024-04-16 11:10:31 +02:00
|
|
|
data.pop("zerver_namedusergroup")
|
2023-07-12 19:13:17 +02:00
|
|
|
data.pop("zerver_realmauditlog")
|
2024-04-17 13:53:32 +02:00
|
|
|
data["zerver_realm"][0]["zulip_update_announcements_level"] = None
|
|
|
|
data["zerver_realm"][0]["zulip_update_announcements_stream"] = None
|
2023-08-09 15:06:56 +02:00
|
|
|
|
|
|
|
# User groups data is missing. So, all the realm group based settings
|
|
|
|
# should be None.
|
|
|
|
for setting_name in Realm.REALM_PERMISSION_GROUP_SETTINGS:
|
|
|
|
data["zerver_realm"][0][setting_name] = None
|
|
|
|
|
2023-07-12 19:13:17 +02:00
|
|
|
with open(export_fn("realm.json"), "wb") as f:
|
|
|
|
f.write(orjson.dumps(data))
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
imported_realm = do_import_realm(get_output_dir(), "test-zulip-1")
|
|
|
|
user_membership_logs = RealmAuditLog.objects.filter(
|
|
|
|
realm=imported_realm,
|
2024-09-06 17:06:03 +02:00
|
|
|
event_type=AuditLogEventType.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
2023-07-12 19:13:17 +02:00
|
|
|
).values_list("modified_user_id", "modified_user_group__name")
|
|
|
|
logged_membership_by_user_id = defaultdict(set)
|
|
|
|
for user_id, user_group_name in user_membership_logs:
|
|
|
|
logged_membership_by_user_id[user_id].add(user_group_name)
|
|
|
|
|
|
|
|
# Make sure that all users get logged as a member in their
|
|
|
|
# corresponding system groups.
|
|
|
|
for user in UserProfile.objects.filter(realm=imported_realm):
|
2024-04-18 10:50:51 +02:00
|
|
|
expected_group_names = {NamedUserGroup.SYSTEM_USER_GROUP_ROLE_MAP[user.role]["name"]}
|
2023-09-21 13:06:39 +02:00
|
|
|
if SystemGroups.MEMBERS in expected_group_names:
|
|
|
|
expected_group_names.add(SystemGroups.FULL_MEMBERS)
|
2023-07-12 19:13:17 +02:00
|
|
|
self.assertSetEqual(logged_membership_by_user_id[user.id], expected_group_names)
|
|
|
|
|
2024-09-19 17:54:50 +02:00
|
|
|
def test_import_realm_with_unapplied_migrations(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with (
|
|
|
|
self.assertRaises(Exception) as e,
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
patch("zerver.lib.export.get_migrations_by_app") as mock_export,
|
|
|
|
patch("zerver.lib.import_realm.get_migrations_by_app") as mock_import,
|
|
|
|
):
|
|
|
|
mock_export.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_unapplied_migrations.json"
|
|
|
|
)
|
|
|
|
mock_import.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
self.export_realm(
|
|
|
|
realm,
|
|
|
|
export_type=RealmExport.EXPORT_FULL_WITH_CONSENT,
|
|
|
|
)
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
|
2024-11-10 00:07:19 +01:00
|
|
|
expected_error_message = self.get_applied_migrations_error_message(
|
|
|
|
"unapplied_migrations_error.txt"
|
|
|
|
)
|
2024-09-19 17:54:50 +02:00
|
|
|
error_message = str(e.exception).strip()
|
|
|
|
self.assertEqual(expected_error_message, error_message)
|
|
|
|
|
|
|
|
def test_import_realm_with_extra_migrations(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with (
|
|
|
|
self.assertRaises(Exception) as e,
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
patch("zerver.lib.export.get_migrations_by_app") as mock_export,
|
|
|
|
patch("zerver.lib.import_realm.get_migrations_by_app") as mock_import,
|
|
|
|
):
|
|
|
|
mock_export.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
mock_import.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_unapplied_migrations.json"
|
|
|
|
)
|
|
|
|
self.export_realm(
|
|
|
|
realm,
|
|
|
|
export_type=RealmExport.EXPORT_FULL_WITH_CONSENT,
|
|
|
|
)
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
2024-11-10 00:07:19 +01:00
|
|
|
expected_error_message = self.get_applied_migrations_error_message(
|
|
|
|
"extra_migrations_error.txt"
|
|
|
|
)
|
2024-09-19 17:54:50 +02:00
|
|
|
error_message = str(e.exception).strip()
|
|
|
|
self.assertEqual(expected_error_message, error_message)
|
|
|
|
|
|
|
|
def test_import_realm_with_extra_exported_apps(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="WARNING") as mock_log,
|
|
|
|
patch("zerver.lib.export.get_migrations_by_app") as mock_export,
|
|
|
|
patch("zerver.lib.import_realm.get_migrations_by_app") as mock_import,
|
|
|
|
):
|
|
|
|
mock_export.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
mock_import.return_value = self.get_applied_migrations_fixture("with_missing_apps.json")
|
|
|
|
self.export_realm_and_create_auditlog(
|
|
|
|
realm,
|
|
|
|
export_type=RealmExport.EXPORT_FULL_WITH_CONSENT,
|
|
|
|
)
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
missing_apps_log = [
|
|
|
|
"WARNING:root:Exported realm has 'phonenumber' app installed, but this server does not.",
|
|
|
|
"WARNING:root:Exported realm has 'sessions' app installed, but this server does not.",
|
|
|
|
]
|
|
|
|
# The log output is sorted because it's order is nondeterministic.
|
|
|
|
self.assertEqual(sorted(mock_log.output), sorted(missing_apps_log))
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
self.assertNotEqual(imported_realm.id, realm.id)
|
|
|
|
|
|
|
|
def test_import_realm_with_missing_apps(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="WARNING") as mock_log,
|
|
|
|
patch("zerver.lib.export.get_migrations_by_app") as mock_export,
|
|
|
|
patch("zerver.lib.import_realm.get_migrations_by_app") as mock_import,
|
|
|
|
):
|
|
|
|
mock_export.return_value = self.get_applied_migrations_fixture("with_missing_apps.json")
|
|
|
|
mock_import.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
self.export_realm_and_create_auditlog(
|
|
|
|
realm,
|
|
|
|
export_type=RealmExport.EXPORT_FULL_WITH_CONSENT,
|
|
|
|
)
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
missing_apps_log = [
|
|
|
|
"WARNING:root:This server has 'phonenumber' app installed, but exported realm does not.",
|
|
|
|
"WARNING:root:This server has 'sessions' app installed, but exported realm does not.",
|
|
|
|
]
|
|
|
|
self.assertEqual(sorted(mock_log.output), sorted(missing_apps_log))
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
self.assertNotEqual(imported_realm.id, realm.id)
|
|
|
|
|
|
|
|
def test_check_migration_for_zulip_cloud_realm(self) -> None:
|
|
|
|
# This test ensures that `check_migrations_status` correctly handles
|
|
|
|
# checking the migrations of a Zulip Cloud-like realm (with zilencer/
|
|
|
|
# corporate apps installed) when importing into a self-hosted realm
|
|
|
|
# (where these apps are not installed).
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with (
|
|
|
|
self.settings(BILLING_ENABLED=False),
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
patch("zerver.lib.export.get_migrations_by_app") as mock_export,
|
|
|
|
patch("zerver.lib.import_realm.get_migrations_by_app") as mock_import,
|
|
|
|
):
|
|
|
|
mock_export.return_value = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
self_hosted_migrations = self.get_applied_migrations_fixture(
|
|
|
|
"with_complete_migrations.json"
|
|
|
|
)
|
|
|
|
for key in ["zilencer", "corporate"]:
|
|
|
|
self_hosted_migrations.pop(key, None)
|
|
|
|
mock_import.return_value = self_hosted_migrations
|
|
|
|
self.export_realm_and_create_auditlog(
|
|
|
|
realm,
|
|
|
|
export_type=RealmExport.EXPORT_FULL_WITH_CONSENT,
|
|
|
|
)
|
|
|
|
do_import_realm(get_output_dir(), "test-zulip")
|
|
|
|
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
self.assertNotEqual(imported_realm.id, realm.id)
|
|
|
|
|
|
|
|
def test_import_realm_without_migration_status_file(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
with patch("zerver.lib.export.export_migration_status"):
|
|
|
|
self.export_realm_and_create_auditlog(realm)
|
|
|
|
|
|
|
|
with self.assertRaises(Exception) as e, self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(
|
|
|
|
get_output_dir(),
|
|
|
|
"test-zulip",
|
|
|
|
)
|
|
|
|
expected_error_message = "Missing migration_status.json file! Make sure you're using the same Zulip version as the exported realm."
|
|
|
|
self.assertEqual(expected_error_message, str(e.exception))
|
|
|
|
|
|
|
|
def test_import_realm_with_different_stated_zulip_version(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.export_realm_and_create_auditlog(realm)
|
|
|
|
|
|
|
|
with (
|
|
|
|
patch("zerver.lib.import_realm.ZULIP_VERSION", "8.0"),
|
|
|
|
self.assertRaises(Exception) as e,
|
|
|
|
self.assertLogs(level="INFO"),
|
|
|
|
):
|
|
|
|
do_import_realm(
|
|
|
|
get_output_dir(),
|
|
|
|
"test-zulip",
|
|
|
|
)
|
|
|
|
expected_error_message = (
|
|
|
|
"Export was generated on a different Zulip major version.\n"
|
|
|
|
f"Export={ZULIP_VERSION}\n"
|
|
|
|
"Server=8.0"
|
|
|
|
)
|
|
|
|
self.assertEqual(expected_error_message, str(e.exception))
|
|
|
|
|
2021-12-11 13:51:27 +01:00
|
|
|
|
2021-12-12 22:24:57 +01:00
|
|
|
class SingleUserExportTest(ExportFile):
|
|
|
|
def do_files_test(self, is_s3: bool) -> None:
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
self.upload_files_for_user(cordelia)
|
|
|
|
self.upload_files_for_user(othello, emoji_name="bogus") # try to pollute export
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
|
|
|
self.verify_uploads(cordelia, is_s3=is_s3)
|
|
|
|
self.verify_avatars(cordelia)
|
|
|
|
self.verify_emojis(cordelia, is_s3=is_s3)
|
|
|
|
|
|
|
|
def test_local_files(self) -> None:
|
|
|
|
self.do_files_test(is_s3=False)
|
|
|
|
|
|
|
|
@use_s3_backend
|
|
|
|
def test_s3_files(self) -> None:
|
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET)
|
|
|
|
self.do_files_test(is_s3=True)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
def test_message_data(self) -> None:
|
2021-12-11 13:51:27 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
polonius = self.example_user("polonius")
|
|
|
|
|
|
|
|
self.subscribe(cordelia, "Denmark")
|
|
|
|
|
|
|
|
smile_message_id = self.send_stream_message(hamlet, "Denmark", "SMILE!")
|
|
|
|
|
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=smile_message_id,
|
|
|
|
emoji_name="smile",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
assert reaction
|
|
|
|
|
|
|
|
# Send a message that Cordelia should not have in the export.
|
|
|
|
self.send_stream_message(othello, "Denmark", "bogus")
|
|
|
|
|
|
|
|
hi_stream_message_id = self.send_stream_message(cordelia, "Denmark", "hi stream")
|
|
|
|
assert most_recent_usermessage(cordelia).message_id == hi_stream_message_id
|
|
|
|
|
|
|
|
# Try to fool the export again
|
|
|
|
self.send_personal_message(othello, hamlet)
|
2024-07-04 14:05:48 +02:00
|
|
|
self.send_group_direct_message(othello, [hamlet, polonius])
|
2021-12-11 13:51:27 +01:00
|
|
|
|
|
|
|
hi_hamlet_message_id = self.send_personal_message(cordelia, hamlet, "hi hamlet")
|
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
hi_peeps_message_id = self.send_group_direct_message(
|
|
|
|
cordelia, [hamlet, othello], "hi peeps"
|
|
|
|
)
|
|
|
|
bye_peeps_message_id = self.send_group_direct_message(
|
|
|
|
othello, [cordelia, hamlet], "bye peeps"
|
|
|
|
)
|
2021-12-11 13:51:27 +01:00
|
|
|
|
|
|
|
bye_hamlet_message_id = self.send_personal_message(cordelia, hamlet, "bye hamlet")
|
|
|
|
|
|
|
|
hi_myself_message_id = self.send_personal_message(cordelia, cordelia, "hi myself")
|
|
|
|
bye_stream_message_id = self.send_stream_message(cordelia, "Denmark", "bye stream")
|
|
|
|
|
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
messages = read_json("messages-000001.json")
|
2021-12-11 13:51:27 +01:00
|
|
|
|
2024-07-04 14:05:48 +02:00
|
|
|
direct_message_group_name = (
|
|
|
|
"Cordelia, Lear's daughter, King Hamlet, Othello, the Moor of Venice"
|
|
|
|
)
|
2021-12-11 13:51:27 +01:00
|
|
|
|
|
|
|
excerpt = [
|
|
|
|
(rec["id"], rec["content"], rec["recipient_name"])
|
|
|
|
for rec in messages["zerver_message"][-8:]
|
|
|
|
]
|
|
|
|
self.assertEqual(
|
|
|
|
excerpt,
|
|
|
|
[
|
|
|
|
(smile_message_id, "SMILE!", "Denmark"),
|
|
|
|
(hi_stream_message_id, "hi stream", "Denmark"),
|
|
|
|
(hi_hamlet_message_id, "hi hamlet", hamlet.full_name),
|
2024-07-04 14:05:48 +02:00
|
|
|
(hi_peeps_message_id, "hi peeps", direct_message_group_name),
|
|
|
|
(bye_peeps_message_id, "bye peeps", direct_message_group_name),
|
2021-12-11 13:51:27 +01:00
|
|
|
(bye_hamlet_message_id, "bye hamlet", hamlet.full_name),
|
|
|
|
(hi_myself_message_id, "hi myself", cordelia.full_name),
|
|
|
|
(bye_stream_message_id, "bye stream", "Denmark"),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
def test_user_data(self) -> None:
|
2021-12-11 15:55:51 +01:00
|
|
|
# We register checkers during test setup, and then we call them at the end.
|
|
|
|
checkers = {}
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def checker(f: Callable[[list[Record]], None]) -> Callable[[list[Record]], None]:
|
2021-12-11 19:44:06 +01:00
|
|
|
# Every checker function that gets decorated here should be named
|
|
|
|
# after one of the tables that we export in the single-user
|
|
|
|
# export. The table name then is used by code toward the end of the
|
|
|
|
# test to determine which portion of the data from users.json
|
|
|
|
# to pass into the checker.
|
2021-12-11 15:55:51 +01:00
|
|
|
table_name = f.__name__
|
|
|
|
assert table_name not in checkers
|
|
|
|
checkers[table_name] = f
|
|
|
|
return f
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2021-12-11 19:44:06 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
realm = cordelia.realm
|
|
|
|
scotland = get_stream("Scotland", realm)
|
|
|
|
client = get_client("some_app")
|
|
|
|
now = timezone_now()
|
2021-12-11 14:25:52 +01:00
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_userprofile(records: list[Record]) -> None:
|
2021-12-11 15:55:51 +01:00
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["id"], cordelia.id)
|
|
|
|
self.assertEqual(rec["email"], cordelia.email)
|
2021-12-11 19:44:06 +01:00
|
|
|
self.assertEqual(rec["full_name"], cordelia.full_name)
|
|
|
|
|
|
|
|
"""
|
|
|
|
Try to set up the test data roughly in order of table name, where
|
|
|
|
possible, just to make it a bit easier to read the test.
|
|
|
|
"""
|
|
|
|
|
|
|
|
do_add_alert_words(cordelia, ["pizza"])
|
|
|
|
do_add_alert_words(hamlet, ["bogus"])
|
2021-12-11 15:55:51 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_alertword(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
self.assertEqual(records[-1]["word"], "pizza")
|
|
|
|
|
|
|
|
favorite_city = try_add_realm_custom_profile_field(
|
|
|
|
realm,
|
|
|
|
"Favorite city",
|
|
|
|
CustomProfileField.SHORT_TEXT,
|
|
|
|
)
|
|
|
|
|
|
|
|
def set_favorite_city(user: UserProfile, city: str) -> None:
|
|
|
|
do_update_user_custom_profile_data_if_changed(
|
|
|
|
user, [dict(id=favorite_city.id, value=city)]
|
|
|
|
)
|
|
|
|
|
|
|
|
set_favorite_city(cordelia, "Seattle")
|
|
|
|
set_favorite_city(othello, "Moscow")
|
2021-12-11 15:55:51 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_customprofilefieldvalue(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["field"], favorite_city.id)
|
|
|
|
self.assertEqual(rec["rendered_value"], "<p>Seattle</p>")
|
|
|
|
|
|
|
|
do_mute_user(cordelia, othello)
|
|
|
|
do_mute_user(hamlet, cordelia) # should be ignored
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_muteduser(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
self.assertEqual(records[-1]["muted_user"], othello.id)
|
2021-12-11 15:55:51 +01:00
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
smile_message_id = self.send_stream_message(hamlet, "Denmark")
|
|
|
|
|
|
|
|
check_add_reaction(
|
|
|
|
user_profile=cordelia,
|
|
|
|
message_id=smile_message_id,
|
|
|
|
emoji_name="smile",
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
reaction = Reaction.objects.order_by("id").last()
|
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_reaction(records: list[Record]) -> None:
|
2022-05-31 01:34:34 +02:00
|
|
|
assert reaction
|
2021-12-11 15:55:51 +01:00
|
|
|
self.assertEqual(
|
2024-05-09 19:08:34 +02:00
|
|
|
records[-1],
|
2021-12-11 15:55:51 +01:00
|
|
|
dict(
|
|
|
|
id=reaction.id,
|
|
|
|
user_profile=cordelia.id,
|
|
|
|
emoji_name="smile",
|
|
|
|
reaction_type="unicode_emoji",
|
|
|
|
emoji_code=reaction.emoji_code,
|
|
|
|
message=smile_message_id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
|
2021-12-11 19:44:06 +01:00
|
|
|
self.subscribe(cordelia, "Scotland")
|
|
|
|
|
|
|
|
create_stream_if_needed(realm, "bogus")
|
|
|
|
self.subscribe(othello, "bogus")
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_recipient(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
last_recipient = Recipient.objects.get(id=records[-1]["id"])
|
|
|
|
self.assertEqual(last_recipient.type, Recipient.STREAM)
|
|
|
|
stream_id = last_recipient.type_id
|
|
|
|
self.assertEqual(stream_id, get_stream("Scotland", realm).id)
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_stream(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
streams = {rec["name"] for rec in records}
|
|
|
|
self.assertEqual(streams, {"Scotland", "Verona"})
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_subscription(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
last_recipient = Recipient.objects.get(id=records[-1]["recipient"])
|
|
|
|
self.assertEqual(last_recipient.type, Recipient.STREAM)
|
|
|
|
stream_id = last_recipient.type_id
|
|
|
|
self.assertEqual(stream_id, get_stream("Scotland", realm).id)
|
|
|
|
|
2024-01-22 21:52:11 +01:00
|
|
|
UserActivity.objects.create(
|
|
|
|
user_profile_id=cordelia.id,
|
|
|
|
client_id=client.id,
|
|
|
|
query="/some/endpoint",
|
|
|
|
count=5,
|
|
|
|
last_visit=now,
|
|
|
|
)
|
|
|
|
UserActivity.objects.create(
|
|
|
|
user_profile_id=othello.id,
|
|
|
|
client_id=client.id,
|
|
|
|
query="/bogus",
|
|
|
|
count=20,
|
|
|
|
last_visit=now,
|
|
|
|
)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_useractivity(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(
|
|
|
|
rec,
|
|
|
|
dict(
|
|
|
|
client=client.id,
|
|
|
|
count=5,
|
|
|
|
id=rec["id"],
|
|
|
|
last_visit=rec["last_visit"],
|
|
|
|
query="/some/endpoint",
|
|
|
|
user_profile=cordelia.id,
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assertEqual(make_datetime(rec["last_visit"]), now)
|
|
|
|
|
|
|
|
do_update_user_activity_interval(cordelia, now)
|
|
|
|
do_update_user_activity_interval(othello, now)
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_useractivityinterval(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["user_profile"], cordelia.id)
|
|
|
|
self.assertEqual(make_datetime(rec["start"]), now)
|
|
|
|
|
2020-06-11 16:03:47 +02:00
|
|
|
do_update_user_presence(cordelia, client, now, UserPresence.LEGACY_STATUS_ACTIVE_INT)
|
|
|
|
do_update_user_presence(othello, client, now, UserPresence.LEGACY_STATUS_IDLE_INT)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_userpresence(records: list[Record]) -> None:
|
2020-06-11 16:03:47 +02:00
|
|
|
self.assertEqual(make_datetime(records[-1]["last_connected_time"]), now)
|
|
|
|
self.assertEqual(make_datetime(records[-1]["last_active_time"]), now)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=cordelia,
|
2022-09-22 11:56:58 +02:00
|
|
|
away=None,
|
2021-12-11 19:44:06 +01:00
|
|
|
status_text="on vacation",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
do_update_user_status(
|
|
|
|
user_profile=othello,
|
|
|
|
away=False,
|
|
|
|
status_text="at my desk",
|
|
|
|
client_id=client.id,
|
|
|
|
emoji_name=None,
|
|
|
|
emoji_code=None,
|
|
|
|
reaction_type=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_userstatus(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
rec = records[-1]
|
|
|
|
self.assertEqual(rec["status_text"], "on vacation")
|
|
|
|
|
2023-02-03 12:57:43 +01:00
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-24 18:32:06 +01:00
|
|
|
cordelia,
|
|
|
|
scotland,
|
|
|
|
"bagpipe music",
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2023-02-03 12:57:43 +01:00
|
|
|
)
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
othello, scotland, "nessie", visibility_policy=UserTopic.VisibilityPolicy.MUTED
|
2023-02-03 12:57:43 +01:00
|
|
|
)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_usertopic(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
rec = records[-1]
|
|
|
|
self.assertEqual(rec["topic_name"], "bagpipe music")
|
2023-03-12 16:19:42 +01:00
|
|
|
self.assertEqual(rec["visibility_policy"], UserTopic.VisibilityPolicy.MUTED)
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
For some tables we don't bother with super realistic test data
|
|
|
|
setup.
|
|
|
|
"""
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=cordelia, realm=realm, property="whatever", value=42, end_time=now
|
|
|
|
)
|
|
|
|
UserCount.objects.create(
|
|
|
|
user=othello, realm=realm, property="bogus", value=999999, end_time=now
|
|
|
|
)
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def analytics_usercount(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
(rec,) = records
|
|
|
|
self.assertEqual(rec["value"], 42)
|
|
|
|
|
2023-12-01 08:20:48 +01:00
|
|
|
OnboardingStep.objects.create(user=cordelia, onboarding_step="topics")
|
|
|
|
OnboardingStep.objects.create(user=othello, onboarding_step="bogus")
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_onboardingstep(records: list[Record]) -> None:
|
2023-12-01 08:20:48 +01:00
|
|
|
self.assertEqual(records[-1]["onboarding_step"], "topics")
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
The zerver_realmauditlog checker basically assumes that
|
|
|
|
we subscribed Cordelia to Scotland.
|
|
|
|
"""
|
|
|
|
|
|
|
|
@checker
|
2024-07-12 02:30:17 +02:00
|
|
|
def zerver_realmauditlog(records: list[Record]) -> None:
|
2021-12-11 19:44:06 +01:00
|
|
|
self.assertEqual(records[-1]["modified_stream"], scotland.id)
|
|
|
|
|
2021-12-11 14:25:52 +01:00
|
|
|
output_dir = make_export_output_dir()
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
2021-12-12 15:50:20 +01:00
|
|
|
user = read_json("user.json")
|
2021-12-11 14:25:52 +01:00
|
|
|
|
2021-12-11 15:55:51 +01:00
|
|
|
for table_name, f in checkers.items():
|
|
|
|
f(user[table_name])
|
2021-12-11 19:44:06 +01:00
|
|
|
|
|
|
|
for table_name in user:
|
|
|
|
if table_name not in checkers:
|
|
|
|
raise AssertionError(
|
|
|
|
f"""
|
|
|
|
Please create a checker called "{table_name}"
|
|
|
|
to check the user["{table_name}"] data in users.json.
|
|
|
|
|
|
|
|
Please be thoughtful about where you introduce
|
|
|
|
the new code--if you read the test, the patterns
|
|
|
|
for how to test table data should be clear.
|
|
|
|
Try to mostly keep checkers in alphabetical order.
|
|
|
|
"""
|
|
|
|
)
|