2016-08-13 20:22:23 +02:00
|
|
|
import os
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import Any, Callable, Dict, FrozenSet, List, Optional, Set, Tuple
|
2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import patch
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2019-05-10 14:28:38 +02:00
|
|
|
from django.db.models import Q
|
2020-02-18 14:58:29 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
from zerver.lib import upload
|
2018-07-17 19:11:16 +02:00
|
|
|
from zerver.lib.actions import (
|
2019-05-10 14:28:38 +02:00
|
|
|
do_add_reaction,
|
2019-07-19 19:15:23 +02:00
|
|
|
do_change_icon_source,
|
2020-02-18 14:58:29 +01:00
|
|
|
do_change_logo_source,
|
2020-02-25 08:17:46 +01:00
|
|
|
do_change_plan_type,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_create_user,
|
2021-02-14 00:03:40 +01:00
|
|
|
do_deactivate_user,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_update_user_presence,
|
2018-07-17 19:11:16 +02:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path
|
|
|
|
from zerver.lib.bot_config import set_bot_config
|
|
|
|
from zerver.lib.bot_lib import StateHandler
|
|
|
|
from zerver.lib.export import do_export_realm, do_export_user, export_usermessages_batch
|
|
|
|
from zerver.lib.import_realm import do_import_realm, get_incoming_message_ids
|
2020-03-24 14:47:41 +01:00
|
|
|
from zerver.lib.streams import create_stream_if_needed
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import create_s3_buckets, get_test_image_file, use_s3_backend
|
|
|
|
from zerver.lib.topic_mutes import add_topic_mute
|
|
|
|
from zerver.lib.upload import (
|
|
|
|
claim_attachment,
|
|
|
|
upload_avatar_image,
|
|
|
|
upload_emoji_image,
|
|
|
|
upload_message_file,
|
|
|
|
)
|
|
|
|
from zerver.lib.utils import query_chunker
|
2016-08-13 20:22:23 +02:00
|
|
|
from zerver.models import (
|
2020-07-16 16:11:34 +02:00
|
|
|
AlertWord,
|
2018-05-30 14:55:30 +02:00
|
|
|
Attachment,
|
2020-06-11 00:54:34 +02:00
|
|
|
BotConfigData,
|
|
|
|
BotStorageData,
|
2018-06-04 18:21:58 +02:00
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
2018-05-25 18:54:22 +02:00
|
|
|
Huddle,
|
2020-06-11 00:54:34 +02:00
|
|
|
Message,
|
2018-07-14 16:10:45 +02:00
|
|
|
MutedTopic,
|
2020-06-11 00:54:34 +02:00
|
|
|
Reaction,
|
|
|
|
Realm,
|
|
|
|
RealmAuditLog,
|
|
|
|
RealmEmoji,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
2018-07-12 13:27:12 +02:00
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserHotspot,
|
|
|
|
UserMessage,
|
2020-02-18 14:58:29 +01:00
|
|
|
UserPresence,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserProfile,
|
2018-06-04 18:21:58 +02:00
|
|
|
get_active_streams,
|
2020-02-18 14:58:29 +01:00
|
|
|
get_client,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_huddle_hash,
|
2018-09-21 05:39:35 +02:00
|
|
|
get_realm,
|
2018-07-14 16:10:45 +02:00
|
|
|
get_stream,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2016-08-14 18:33:29 +02:00
|
|
|
class QueryUtilTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def _create_messages(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
for name in ["cordelia", "hamlet", "iago"]:
|
2020-03-07 11:43:05 +01:00
|
|
|
user = self.example_user(name)
|
2016-08-14 18:33:29 +02:00
|
|
|
for _ in range(5):
|
2021-02-12 08:20:45 +01:00
|
|
|
self.send_personal_message(user, self.example_user("othello"))
|
2016-08-14 18:33:29 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_query_chunker(self) -> None:
|
2016-08-14 18:33:29 +02:00
|
|
|
self._create_messages()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2016-08-14 18:33:29 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_queries() -> List[Any]:
|
2016-08-14 18:33:29 +02:00
|
|
|
queries = [
|
|
|
|
Message.objects.filter(sender_id=cordelia.id),
|
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
Message.objects.exclude(sender_id__in=[cordelia.id, hamlet.id]),
|
2016-08-14 18:33:29 +02:00
|
|
|
]
|
|
|
|
return queries
|
|
|
|
|
|
|
|
for query in get_queries():
|
|
|
|
# For our test to be meaningful, we want non-empty queries
|
|
|
|
# at first
|
|
|
|
assert len(list(query)) > 0
|
|
|
|
|
|
|
|
queries = get_queries()
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
all_msg_ids: Set[int] = set()
|
2016-08-14 18:33:29 +02:00
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
|
|
|
chunk_size=20,
|
|
|
|
)
|
|
|
|
|
|
|
|
all_row_ids = []
|
|
|
|
for chunk in chunker:
|
|
|
|
for row in chunk:
|
|
|
|
all_row_ids.append(row.id)
|
|
|
|
|
|
|
|
self.assertEqual(all_row_ids, sorted(all_row_ids))
|
|
|
|
self.assertEqual(len(all_msg_ids), len(Message.objects.all()))
|
|
|
|
|
|
|
|
# Now just search for cordelia/hamlet. Note that we don't really
|
|
|
|
# need the order_by here, but it should be harmless.
|
|
|
|
queries = [
|
2021-02-12 08:20:45 +01:00
|
|
|
Message.objects.filter(sender_id=cordelia.id).order_by("id"),
|
2016-08-14 18:33:29 +02:00
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=7, # use a different size
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(
|
|
|
|
len(all_msg_ids),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
len(Message.objects.filter(sender_id__in=[cordelia.id, hamlet.id])),
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Try just a single query to validate chunking.
|
|
|
|
queries = [
|
|
|
|
Message.objects.exclude(sender_id=cordelia.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=11, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(
|
|
|
|
len(all_msg_ids),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
len(Message.objects.exclude(sender_id=cordelia.id)),
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
|
|
|
self.assertTrue(len(all_msg_ids) > 15)
|
|
|
|
|
|
|
|
# Verify assertions about disjoint-ness.
|
|
|
|
queries = [
|
|
|
|
Message.objects.exclude(sender_id=cordelia.id),
|
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=13, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
|
|
|
with self.assertRaises(AssertionError):
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exercise the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
|
|
|
|
# Try to confuse things with ids part of the query...
|
|
|
|
queries = [
|
|
|
|
Message.objects.filter(id__lte=10),
|
|
|
|
Message.objects.filter(id__gt=10),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=11, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
self.assertEqual(len(all_msg_ids), 0) # until we actually use the iterator
|
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(len(all_msg_ids), len(Message.objects.all()))
|
|
|
|
|
|
|
|
# Verify that we can just get the first chunk with a next() call.
|
|
|
|
queries = [
|
|
|
|
Message.objects.all(),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=10, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2020-06-23 08:03:47 +02:00
|
|
|
first_chunk = next(chunker)
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(len(first_chunk), 10)
|
|
|
|
self.assertEqual(len(all_msg_ids), 10)
|
|
|
|
expected_msg = Message.objects.all()[0:10][5]
|
|
|
|
actual_msg = first_chunk[5]
|
|
|
|
self.assertEqual(actual_msg.content, expected_msg.content)
|
|
|
|
self.assertEqual(actual_msg.sender_id, expected_msg.sender_id)
|
|
|
|
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
class ImportExportTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2019-10-19 20:47:00 +02:00
|
|
|
super().setUp()
|
2019-04-04 12:03:54 +02:00
|
|
|
self.rm_tree(settings.LOCAL_UPLOADS_DIR)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _make_output_dir(self) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
output_dir = os.path.join(settings.TEST_WORKER_DIR, "test-export")
|
2019-04-04 12:03:54 +02:00
|
|
|
self.rm_tree(output_dir)
|
2017-10-25 20:06:11 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
2016-08-13 20:22:23 +02:00
|
|
|
return output_dir
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def _export_realm(
|
|
|
|
self,
|
|
|
|
realm: Realm,
|
|
|
|
exportable_user_ids: Optional[Set[int]] = None,
|
|
|
|
consent_message_id: Optional[int] = None,
|
|
|
|
) -> Dict[str, Any]:
|
2016-08-13 20:22:23 +02:00
|
|
|
output_dir = self._make_output_dir()
|
2021-03-09 18:51:16 +01:00
|
|
|
with patch("zerver.lib.export.create_soft_link"), self.assertLogs(level="INFO"):
|
2016-08-13 20:22:23 +02:00
|
|
|
do_export_realm(
|
|
|
|
realm=realm,
|
|
|
|
output_dir=output_dir,
|
|
|
|
threads=0,
|
|
|
|
exportable_user_ids=exportable_user_ids,
|
2019-05-10 14:28:38 +02:00
|
|
|
consent_message_id=consent_message_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
export_usermessages_batch(
|
2021-02-12 08:20:45 +01:00
|
|
|
input_path=os.path.join(output_dir, "messages-000001.json.partial"),
|
|
|
|
output_path=os.path.join(output_dir, "messages-000001.json"),
|
2019-05-10 14:28:38 +02:00
|
|
|
consent_message_id=consent_message_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
try:
|
|
|
|
export_usermessages_batch(
|
2021-02-12 08:20:45 +01:00
|
|
|
input_path=os.path.join(output_dir, "messages-000002.json.partial"),
|
|
|
|
output_path=os.path.join(output_dir, "messages-000002.json"),
|
2019-05-20 15:09:32 +02:00
|
|
|
consent_message_id=consent_message_id,
|
|
|
|
)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def read_file(fn: str) -> Any:
|
2016-08-13 20:22:23 +02:00
|
|
|
full_fn = os.path.join(output_dir, fn)
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(full_fn, "rb") as f:
|
|
|
|
return orjson.loads(f.read())
|
2016-08-13 20:22:23 +02:00
|
|
|
|
|
|
|
result = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
result["realm"] = read_file("realm.json")
|
|
|
|
result["attachment"] = read_file("attachment.json")
|
|
|
|
result["message"] = read_file("messages-000001.json")
|
2019-05-20 15:09:32 +02:00
|
|
|
try:
|
2021-02-12 08:20:45 +01:00
|
|
|
message = read_file("messages-000002.json")
|
2019-05-20 15:09:32 +02:00
|
|
|
result["message"]["zerver_usermessage"].extend(message["zerver_usermessage"])
|
|
|
|
result["message"]["zerver_message"].extend(message["zerver_message"])
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2021-02-12 08:20:45 +01:00
|
|
|
result["uploads_dir"] = os.path.join(output_dir, "uploads")
|
|
|
|
result["uploads_dir_records"] = read_file(os.path.join("uploads", "records.json"))
|
|
|
|
result["emoji_dir"] = os.path.join(output_dir, "emoji")
|
|
|
|
result["emoji_dir_records"] = read_file(os.path.join("emoji", "records.json"))
|
|
|
|
result["avatar_dir"] = os.path.join(output_dir, "avatars")
|
|
|
|
result["avatar_dir_records"] = read_file(os.path.join("avatars", "records.json"))
|
|
|
|
result["realm_icons_dir"] = os.path.join(output_dir, "realm_icons")
|
|
|
|
result["realm_icons_dir_records"] = read_file(os.path.join("realm_icons", "records.json"))
|
2016-08-13 20:22:23 +02:00
|
|
|
return result
|
|
|
|
|
2020-05-20 16:13:29 +02:00
|
|
|
def _setup_export_files(self, realm: Realm) -> Tuple[str, str, str, bytes]:
|
2016-08-13 20:22:23 +02:00
|
|
|
message = Message.objects.all()[0]
|
|
|
|
user_profile = message.sender
|
2021-02-12 08:19:30 +01:00
|
|
|
url = upload_message_file(
|
2021-02-12 08:20:45 +01:00
|
|
|
"dummy.txt", len(b"zulip!"), "text/plain", b"zulip!", user_profile
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_path_id = url.replace("/user_uploads/", "")
|
2016-08-13 20:22:23 +02:00
|
|
|
claim_attachment(
|
|
|
|
user_profile=user_profile,
|
2018-06-06 16:55:03 +02:00
|
|
|
path_id=attachment_path_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
message=message,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
is_message_realm_public=True,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
2018-05-30 14:55:30 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile)
|
|
|
|
original_avatar_path_id = avatar_path_id + ".original"
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-06-06 16:55:03 +02:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_file_name="1.png",
|
2018-06-06 16:55:03 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
|
|
|
upload_emoji_image(img_file, "1.png", user_profile)
|
|
|
|
with get_test_image_file("img.png") as img_file:
|
2018-05-30 14:55:30 +02:00
|
|
|
upload_avatar_image(img_file, user_profile, user_profile)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_icon_image(img_file, user_profile)
|
2020-07-11 20:59:52 +02:00
|
|
|
do_change_icon_source(realm, Realm.ICON_UPLOADED)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(img_file, user_profile, night=False)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, False, acting_user=user_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2019-07-19 19:15:23 +02:00
|
|
|
upload.upload_backend.upload_realm_logo_image(img_file, user_profile, night=True)
|
2020-06-29 12:35:58 +02:00
|
|
|
do_change_logo_source(realm, Realm.LOGO_UPLOADED, True, acting_user=user_profile)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as img_file:
|
2020-10-24 09:33:54 +02:00
|
|
|
test_image = img_file.read()
|
2021-02-12 08:20:45 +01:00
|
|
|
message.sender.avatar_source = "U"
|
2018-05-30 14:55:30 +02:00
|
|
|
message.sender.save()
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2020-05-20 16:13:29 +02:00
|
|
|
realm.refresh_from_db()
|
|
|
|
|
2018-06-06 16:55:03 +02:00
|
|
|
return attachment_path_id, emoji_path, original_avatar_path_id, test_image
|
|
|
|
|
|
|
|
"""
|
|
|
|
Tests for export
|
|
|
|
"""
|
|
|
|
|
|
|
|
def test_export_files_from_local(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2020-05-20 16:13:29 +02:00
|
|
|
path_id, emoji_path, original_avatar_path_id, test_image = self._setup_export_files(realm)
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["attachment"]
|
|
|
|
self.assertEqual(len(data["zerver_attachment"]), 1)
|
|
|
|
record = data["zerver_attachment"][0]
|
|
|
|
self.assertEqual(record["path_id"], path_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
2021-02-12 08:20:45 +01:00
|
|
|
fn = os.path.join(full_data["uploads_dir"], path_id)
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(fn) as f:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(f.read(), "zulip!")
|
|
|
|
records = full_data["uploads_dir_records"]
|
|
|
|
self.assertEqual(records[0]["path"], path_id)
|
|
|
|
self.assertEqual(records[0]["s3_path"], path_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test emojis
|
2021-02-12 08:20:45 +01:00
|
|
|
fn = os.path.join(full_data["emoji_dir"], emoji_path)
|
|
|
|
fn = fn.replace("1.png", "")
|
|
|
|
self.assertEqual("1.png", os.listdir(fn)[0])
|
|
|
|
records = full_data["emoji_dir_records"]
|
|
|
|
self.assertEqual(records[0]["file_name"], "1.png")
|
|
|
|
self.assertEqual(records[0]["path"], "2/emoji/images/1.png")
|
|
|
|
self.assertEqual(records[0]["s3_path"], "2/emoji/images/1.png")
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm logo and icon
|
2021-02-12 08:20:45 +01:00
|
|
|
records = full_data["realm_icons_dir_records"]
|
2019-07-19 19:15:23 +02:00
|
|
|
image_files = set()
|
|
|
|
for record in records:
|
2021-02-12 08:20:45 +01:00
|
|
|
image_path = os.path.join(full_data["realm_icons_dir"], record["path"])
|
2019-07-19 19:15:23 +02:00
|
|
|
if image_path[-9:] == ".original":
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "rb") as image_file:
|
2020-10-24 09:33:54 +02:00
|
|
|
image_data = image_file.read()
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(image_data, test_image)
|
|
|
|
else:
|
|
|
|
self.assertTrue(os.path.exists(image_path))
|
|
|
|
|
|
|
|
image_files.add(os.path.basename(image_path))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(image_files),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"night_logo.png",
|
|
|
|
"logo.original",
|
|
|
|
"logo.png",
|
|
|
|
"icon.png",
|
|
|
|
"night_logo.original",
|
|
|
|
"icon.original",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test avatars
|
2021-02-12 08:20:45 +01:00
|
|
|
fn = os.path.join(full_data["avatar_dir"], original_avatar_path_id)
|
|
|
|
with open(fn, "rb") as fb:
|
2019-07-14 21:37:08 +02:00
|
|
|
fn_data = fb.read()
|
2018-05-30 14:55:30 +02:00
|
|
|
self.assertEqual(fn_data, test_image)
|
2021-02-12 08:20:45 +01:00
|
|
|
records = full_data["avatar_dir_records"]
|
|
|
|
record_path = [record["path"] for record in records]
|
|
|
|
record_s3_path = [record["s3_path"] for record in records]
|
2018-06-06 20:03:53 +02:00
|
|
|
self.assertIn(original_avatar_path_id, record_path)
|
|
|
|
self.assertIn(original_avatar_path_id, record_s3_path)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
@use_s3_backend
|
|
|
|
def test_export_files_from_s3(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
create_s3_buckets(settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
attachment_path_id,
|
|
|
|
emoji_path,
|
|
|
|
original_avatar_path_id,
|
|
|
|
test_image,
|
|
|
|
) = self._setup_export_files(realm)
|
2018-05-30 14:55:30 +02:00
|
|
|
full_data = self._export_realm(realm)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["attachment"]
|
|
|
|
self.assertEqual(len(data["zerver_attachment"]), 1)
|
|
|
|
record = data["zerver_attachment"][0]
|
|
|
|
self.assertEqual(record["path_id"], attachment_path_id)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2020-06-20 10:37:43 +02:00
|
|
|
def check_types(user_profile_id: int, realm_id: int) -> None:
|
2018-06-06 21:27:04 +02:00
|
|
|
self.assertEqual(type(user_profile_id), int)
|
|
|
|
self.assertEqual(type(realm_id), int)
|
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
2021-02-12 08:20:45 +01:00
|
|
|
fields = attachment_path_id.split("/")
|
|
|
|
fn = os.path.join(full_data["uploads_dir"], os.path.join(fields[0], fields[1], fields[2]))
|
2020-04-09 21:51:58 +02:00
|
|
|
with open(fn) as f:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(f.read(), "zulip!")
|
|
|
|
records = full_data["uploads_dir_records"]
|
|
|
|
self.assertEqual(records[0]["path"], os.path.join(fields[0], fields[1], fields[2]))
|
|
|
|
self.assertEqual(records[0]["s3_path"], attachment_path_id)
|
|
|
|
check_types(records[0]["user_profile_id"], records[0]["realm_id"])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
# Test emojis
|
2021-02-12 08:20:45 +01:00
|
|
|
fn = os.path.join(full_data["emoji_dir"], emoji_path)
|
|
|
|
fn = fn.replace("1.png", "")
|
|
|
|
self.assertIn("1.png", os.listdir(fn))
|
|
|
|
records = full_data["emoji_dir_records"]
|
|
|
|
self.assertEqual(records[0]["file_name"], "1.png")
|
|
|
|
self.assertTrue("last_modified" in records[0])
|
|
|
|
self.assertEqual(records[0]["path"], "2/emoji/images/1.png")
|
|
|
|
self.assertEqual(records[0]["s3_path"], "2/emoji/images/1.png")
|
|
|
|
check_types(records[0]["user_profile_id"], records[0]["realm_id"])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm logo and icon
|
2021-02-12 08:20:45 +01:00
|
|
|
records = full_data["realm_icons_dir_records"]
|
2019-07-19 19:15:23 +02:00
|
|
|
image_files = set()
|
|
|
|
for record in records:
|
2021-02-12 08:20:45 +01:00
|
|
|
image_path = os.path.join(full_data["realm_icons_dir"], record["s3_path"])
|
2019-07-19 19:15:23 +02:00
|
|
|
if image_path[-9:] == ".original":
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "rb") as image_file:
|
2020-10-24 09:33:54 +02:00
|
|
|
image_data = image_file.read()
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(image_data, test_image)
|
|
|
|
else:
|
|
|
|
self.assertTrue(os.path.exists(image_path))
|
|
|
|
|
|
|
|
image_files.add(os.path.basename(image_path))
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(image_files),
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"night_logo.png",
|
|
|
|
"logo.original",
|
|
|
|
"logo.png",
|
|
|
|
"icon.png",
|
|
|
|
"night_logo.original",
|
|
|
|
"icon.original",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test avatars
|
2021-02-12 08:20:45 +01:00
|
|
|
fn = os.path.join(full_data["avatar_dir"], original_avatar_path_id)
|
|
|
|
with open(fn, "rb") as file:
|
2019-07-14 21:37:08 +02:00
|
|
|
fn_data = file.read()
|
2018-05-30 14:55:30 +02:00
|
|
|
self.assertEqual(fn_data, test_image)
|
2021-02-12 08:20:45 +01:00
|
|
|
records = full_data["avatar_dir_records"]
|
|
|
|
record_path = [record["path"] for record in records]
|
|
|
|
record_s3_path = [record["s3_path"] for record in records]
|
2018-06-06 20:03:53 +02:00
|
|
|
self.assertIn(original_avatar_path_id, record_path)
|
|
|
|
self.assertIn(original_avatar_path_id, record_s3_path)
|
2021-02-12 08:20:45 +01:00
|
|
|
check_types(records[0]["user_profile_id"], records[0]["realm_id"])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_zulip_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
default_bot = self.example_user("default_bot")
|
2020-03-07 11:43:05 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(self.example_user("AARON"), default_bot)
|
|
|
|
pm_b_msg_id = self.send_personal_message(default_bot, self.example_user("iago"))
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("othello"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:29:58 +02:00
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=realm)
|
|
|
|
realm_emoji.delete()
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm)
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji.save()
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["realm"]
|
|
|
|
self.assertEqual(len(data["zerver_userprofile_crossrealm"]), 3)
|
|
|
|
self.assertEqual(len(data["zerver_userprofile_mirrordummy"]), 0)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2016-08-13 20:22:23 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2021-02-12 08:20:45 +01:00
|
|
|
{"Denmark", "Rome", "Scotland", "Venice", "Verona"},
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_alert_words = data["zerver_alertword"]
|
2020-07-16 15:38:56 +02:00
|
|
|
|
|
|
|
# We set up 4 alert words for Hamlet, Cordelia, etc.
|
|
|
|
# when we populate the test database.
|
2020-12-22 15:46:00 +01:00
|
|
|
num_zulip_users = 10
|
2020-07-16 15:38:56 +02:00
|
|
|
self.assertEqual(len(exported_alert_words), num_zulip_users * 4)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("robotics", {r["word"] for r in exported_alert_words})
|
2020-07-16 15:38:56 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["message"]
|
2016-08-13 20:22:23 +02:00
|
|
|
um = UserMessage.objects.all()[0]
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:29:58 +02:00
|
|
|
self.assertIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
|
2019-05-21 12:32:20 +02:00
|
|
|
def test_export_realm_with_exportable_user_ids(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-04-09 21:51:58 +02:00
|
|
|
user_ids = {cordelia.id, hamlet.id}
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=realm)
|
|
|
|
realm_emoji.delete()
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm, exportable_user_ids=user_ids)
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["realm"]
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertNotIn("default-bot@zulip.com", exported_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("cordelia"), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
dummy_user_emails = self.get_set(data["zerver_userprofile_mirrordummy"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), dummy_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), dummy_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("iago"), dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email("hamlet"), dummy_user_emails)
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["message"]
|
2019-05-21 13:51:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message_ids = self.get_set(data["zerver_message"], "id")
|
2019-05-21 13:51:30 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_message_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_message_ids)
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def test_export_realm_with_member_consent(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Create private streams and subscribe users for testing export
|
|
|
|
create_stream_if_needed(realm, "Private A", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("iago"), "Private A")
|
|
|
|
self.subscribe(self.example_user("othello"), "Private A")
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Private A", "Hello Stream A")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private B", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private B")
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_b_message_id = self.send_stream_message(
|
|
|
|
self.example_user("prospero"), "Private B", "Hello Stream B"
|
|
|
|
)
|
2019-05-21 14:48:07 +02:00
|
|
|
self.subscribe(self.example_user("hamlet"), "Private B")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
create_stream_if_needed(realm, "Private C", invite_only=True)
|
|
|
|
self.subscribe(self.example_user("othello"), "Private C")
|
|
|
|
self.subscribe(self.example_user("prospero"), "Private C")
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_c_message_id = self.send_stream_message(
|
|
|
|
self.example_user("othello"), "Private C", "Hello Stream C"
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Create huddles
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(
|
|
|
|
self.example_user("iago"), [self.example_user("cordelia"), self.example_user("AARON")]
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
huddle_a = Huddle.objects.last()
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(
|
|
|
|
self.example_user("ZOE"),
|
|
|
|
[self.example_user("hamlet"), self.example_user("AARON"), self.example_user("othello")],
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
huddle_b = Huddle.objects.last()
|
|
|
|
|
2019-05-17 11:39:51 +02:00
|
|
|
huddle_c_message_id = self.send_huddle_message(
|
2021-02-12 08:19:30 +01:00
|
|
|
self.example_user("AARON"),
|
|
|
|
[self.example_user("cordelia"), self.example_user("ZOE"), self.example_user("othello")],
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
# Create PMs
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_a_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("AARON"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_b_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("cordelia"), self.example_user("iago")
|
|
|
|
)
|
|
|
|
pm_c_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("hamlet"), self.example_user("othello")
|
|
|
|
)
|
|
|
|
pm_d_msg_id = self.send_personal_message(
|
|
|
|
self.example_user("iago"), self.example_user("hamlet")
|
|
|
|
)
|
2019-05-20 15:09:32 +02:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
# Send message advertising export and make users react
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_stream_message(
|
|
|
|
self.example_user("othello"),
|
|
|
|
"Verona",
|
|
|
|
topic_name="Export",
|
|
|
|
content="Thumbs up for export",
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
message = Message.objects.last()
|
|
|
|
consented_user_ids = [self.example_user(user).id for user in ["iago", "hamlet"]]
|
2021-02-12 08:19:30 +01:00
|
|
|
do_add_reaction(
|
|
|
|
self.example_user("iago"), message, "outbox", "1f4e4", Reaction.UNICODE_EMOJI
|
|
|
|
)
|
|
|
|
do_add_reaction(
|
|
|
|
self.example_user("hamlet"), message, "outbox", "1f4e4", Reaction.UNICODE_EMOJI
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=realm)
|
|
|
|
realm_emoji.delete()
|
|
|
|
full_data = self._export_realm(realm, consent_message_id=message.id)
|
|
|
|
realm_emoji.save()
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["realm"]
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(len(data["zerver_userprofile_crossrealm"]), 3)
|
|
|
|
self.assertEqual(len(data["zerver_userprofile_mirrordummy"]), 0)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_emails = self.get_set(data["zerver_userprofile"], "delivery_email")
|
|
|
|
self.assertIn(self.example_email("cordelia"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("hamlet"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("iago"), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email("othello"), exported_user_emails)
|
|
|
|
self.assertIn("default-bot@zulip.com", exported_user_emails)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_streams = self.get_set(data["zerver_stream"], "name")
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"Denmark",
|
|
|
|
"Rome",
|
|
|
|
"Scotland",
|
|
|
|
"Venice",
|
|
|
|
"Verona",
|
|
|
|
"Private A",
|
|
|
|
"Private B",
|
|
|
|
"Private C",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2019-05-10 14:28:38 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
data = full_data["message"]
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_usermessages = UserMessage.objects.filter(
|
|
|
|
user_profile__in=[self.example_user("iago"), self.example_user("hamlet")]
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
um = exported_usermessages[0]
|
|
|
|
self.assertEqual(len(data["zerver_usermessage"]), len(exported_usermessages))
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_um = self.find_by_id(data["zerver_usermessage"], um.id)
|
|
|
|
self.assertEqual(exported_um["message"], um.message_id)
|
|
|
|
self.assertEqual(exported_um["user_profile"], um.user_profile_id)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_message = self.find_by_id(data["zerver_message"], um.message_id)
|
|
|
|
self.assertEqual(exported_message["content"], um.message.content)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
public_stream_names = ["Denmark", "Rome", "Scotland", "Venice", "Verona"]
|
2021-02-12 08:19:30 +01:00
|
|
|
public_stream_ids = Stream.objects.filter(name__in=public_stream_names).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
)
|
|
|
|
public_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=public_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
public_stream_message_ids = Message.objects.filter(
|
|
|
|
recipient__in=public_stream_recipients
|
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Messages from Private Stream C are not exported since no member gave consent
|
2021-02-12 08:19:30 +01:00
|
|
|
private_stream_ids = Stream.objects.filter(name__in=["Private A", "Private B"]).values_list(
|
|
|
|
"id", flat=True
|
|
|
|
)
|
|
|
|
private_stream_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=private_stream_ids, type=Recipient.STREAM
|
|
|
|
)
|
|
|
|
private_stream_message_ids = Message.objects.filter(
|
|
|
|
recipient__in=private_stream_recipients
|
|
|
|
).values_list("id", flat=True)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
pm_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=consented_user_ids, type=Recipient.PERSONAL
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
pm_query = Q(recipient__in=pm_recipients) | Q(sender__in=consented_user_ids)
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_pm_ids = (
|
|
|
|
Message.objects.filter(pm_query)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
|
|
|
# Third huddle is not exported since none of the members gave consent
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle_recipients = Recipient.objects.filter(
|
|
|
|
type_id__in=[huddle_a.id, huddle_b.id], type=Recipient.HUDDLE
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
pm_query = Q(recipient__in=huddle_recipients) | Q(sender__in=consented_user_ids)
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_huddle_ids = (
|
|
|
|
Message.objects.filter(pm_query)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
.values_list("id", flat=True)
|
|
|
|
)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
exported_msg_ids = (
|
|
|
|
set(public_stream_message_ids)
|
|
|
|
| set(private_stream_message_ids)
|
|
|
|
| set(exported_pm_ids)
|
|
|
|
| set(exported_huddle_ids)
|
|
|
|
)
|
2019-05-21 12:21:32 +02:00
|
|
|
self.assertEqual(self.get_set(data["zerver_message"], "id"), exported_msg_ids)
|
2019-05-21 14:48:07 +02:00
|
|
|
|
|
|
|
# TODO: This behavior is wrong and should be fixed. The message should not be exported
|
|
|
|
# since it was sent before the only consented user iago joined the stream.
|
|
|
|
self.assertIn(stream_b_message_id, exported_msg_ids)
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
self.assertNotIn(stream_c_message_id, exported_msg_ids)
|
2019-05-17 11:39:51 +02:00
|
|
|
self.assertNotIn(huddle_c_message_id, exported_msg_ids)
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2019-05-20 15:09:32 +02:00
|
|
|
self.assertNotIn(pm_a_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_b_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_c_msg_id, exported_msg_ids)
|
|
|
|
self.assertIn(pm_d_msg_id, exported_msg_ids)
|
|
|
|
|
2018-05-30 17:09:52 +02:00
|
|
|
def test_export_single_user(self) -> None:
|
|
|
|
output_dir = self._make_output_dir()
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.assertLogs(level="INFO"):
|
2018-05-30 17:09:52 +02:00
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
|
|
|
def read_file(fn: str) -> Any:
|
|
|
|
full_fn = os.path.join(output_dir, fn)
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(full_fn, "rb") as f:
|
|
|
|
return orjson.loads(f.read())
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
messages = read_file("messages-000001.json")
|
|
|
|
user = read_file("user.json")
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_id = self.get_set(user["zerver_userprofile"], "id")
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_user_id, {cordelia.id})
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_user_email = self.get_set(user["zerver_userprofile"], "email")
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_user_email, {cordelia.email})
|
2018-05-30 17:09:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_recipient_type_id = self.get_set(user["zerver_recipient"], "type_id")
|
2018-05-30 17:09:52 +02:00
|
|
|
self.assertIn(cordelia.id, exported_recipient_type_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_stream_id = self.get_set(user["zerver_stream"], "id")
|
2018-05-30 17:09:52 +02:00
|
|
|
self.assertIn(list(exported_stream_id)[0], exported_recipient_type_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_recipient_id = self.get_set(user["zerver_recipient"], "id")
|
|
|
|
exported_subscription_recipient = self.get_set(user["zerver_subscription"], "recipient")
|
2018-05-30 17:09:52 +02:00
|
|
|
self.assertEqual(exported_recipient_id, exported_subscription_recipient)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
exported_messages_recipient = self.get_set(messages["zerver_message"], "recipient")
|
2018-05-30 17:09:52 +02:00
|
|
|
self.assertIn(list(exported_messages_recipient)[0], exported_recipient_id)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
Tests for import_realm
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
def test_import_realm(self) -> None:
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
original_realm = Realm.objects.get(string_id="zulip")
|
2018-06-04 18:21:58 +02:00
|
|
|
RealmEmoji.objects.get(realm=original_realm).delete()
|
2021-02-14 00:03:40 +01:00
|
|
|
|
|
|
|
# Deactivate a user to ensure such a case is covered.
|
|
|
|
do_deactivate_user(self.example_user("aaron"), acting_user=None)
|
2018-05-25 18:54:22 +02:00
|
|
|
# data to test import of huddles
|
|
|
|
huddle = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet"),
|
|
|
|
self.example_user("othello"),
|
2018-05-25 18:54:22 +02:00
|
|
|
]
|
|
|
|
self.send_huddle_message(
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("cordelia"),
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle,
|
2021-02-12 08:20:45 +01:00
|
|
|
"test huddle message",
|
2018-05-25 18:54:22 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_mention_message = "@**King Hamlet** Hello"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Verona", user_mention_message)
|
2019-05-23 13:58:10 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_mention_message = "Subscribe to #**Denmark**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("hamlet"), "Verona", stream_mention_message)
|
2019-05-28 13:06:48 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_group_mention_message = "Hello @*hamletcharacters*"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("othello"), "Verona", user_group_mention_message)
|
2019-05-28 13:47:41 +02:00
|
|
|
|
2019-06-02 23:57:03 +02:00
|
|
|
special_characters_message = "```\n'\n```\n@**Polonius**"
|
2020-03-07 11:43:05 +01:00
|
|
|
self.send_stream_message(self.example_user("iago"), "Denmark", special_characters_message)
|
2019-06-02 22:24:30 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
sample_user = self.example_user("hamlet")
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2020-02-18 14:58:29 +01:00
|
|
|
# data to test import of hotspots
|
2018-07-12 16:34:26 +02:00
|
|
|
UserHotspot.objects.create(
|
2021-02-12 08:19:30 +01:00
|
|
|
user=sample_user,
|
2021-02-12 08:20:45 +01:00
|
|
|
hotspot="intro_streams",
|
2018-07-12 16:34:26 +02:00
|
|
|
)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# data to test import of muted topic
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Verona", original_realm)
|
2018-07-14 16:10:45 +02:00
|
|
|
add_topic_mute(
|
|
|
|
user_profile=sample_user,
|
|
|
|
stream_id=stream.id,
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient_id=stream.recipient.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="Verona2",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
do_update_user_presence(
|
|
|
|
sample_user, get_client("website"), timezone_now(), UserPresence.ACTIVE
|
|
|
|
)
|
2020-02-18 14:58:29 +01:00
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# data to test import of botstoragedata and botconfigdata
|
|
|
|
bot_profile = do_create_user(
|
|
|
|
email="bot-1@zulip.com",
|
|
|
|
password="test",
|
|
|
|
realm=original_realm,
|
|
|
|
full_name="bot",
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
2021-02-12 08:19:30 +01:00
|
|
|
bot_owner=sample_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-17 19:11:16 +02:00
|
|
|
storage = StateHandler(bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
storage.put("some key", "some value")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
set_bot_config(bot_profile, "entry 1", "value 1")
|
2018-07-17 19:11:16 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
self._export_realm(original_realm)
|
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# sanity checks
|
|
|
|
|
|
|
|
# test realm
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(Realm.objects.filter(string_id="test-zulip").exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2018-06-04 18:21:58 +02:00
|
|
|
self.assertNotEqual(imported_realm.id, original_realm.id)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def assert_realm_values(f: Callable[[Realm], Any], equal: bool = True) -> None:
|
2018-07-10 21:12:02 +02:00
|
|
|
orig_realm_result = f(original_realm)
|
|
|
|
imported_realm_result = f(imported_realm)
|
2018-07-19 15:52:09 +02:00
|
|
|
# orig_realm_result should be truthy and have some values, otherwise
|
|
|
|
# the test is kind of meaningless
|
2021-02-12 08:19:30 +01:00
|
|
|
assert orig_realm_result
|
2018-07-12 17:34:31 +02:00
|
|
|
if equal:
|
|
|
|
self.assertEqual(orig_realm_result, imported_realm_result)
|
|
|
|
else:
|
|
|
|
self.assertNotEqual(orig_realm_result, imported_realm_result)
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test users
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: {user.email for user in r.get_admin_users_and_bots()},
|
2018-07-10 21:12:02 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: {user.email for user in r.get_active_users()},
|
2018-07-10 21:12:02 +02:00
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test stream
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: {stream.name for stream in get_active_streams(r)},
|
2018-07-10 21:12:02 +02:00
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test recipients
|
2020-02-18 17:25:43 +01:00
|
|
|
def get_recipient_stream(r: Realm) -> Recipient:
|
2021-02-12 08:20:45 +01:00
|
|
|
return Stream.objects.get(name="Verona", realm=r).recipient
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2020-02-18 17:13:47 +01:00
|
|
|
def get_recipient_user(r: Realm) -> Recipient:
|
2021-02-12 08:20:45 +01:00
|
|
|
return UserProfile.objects.get(full_name="Iago", realm=r).recipient
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(lambda r: get_recipient_stream(r).type)
|
|
|
|
assert_realm_values(lambda r: get_recipient_user(r).type)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test subscription
|
2018-07-10 21:12:02 +02:00
|
|
|
def get_subscribers(recipient: Recipient) -> Set[str]:
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient)
|
|
|
|
users = {sub.user_profile.email for sub in subscriptions}
|
|
|
|
return users
|
|
|
|
|
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: get_subscribers(get_recipient_stream(r)),
|
2018-07-10 21:12:02 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: get_subscribers(get_recipient_user(r)),
|
2018-07-10 21:12:02 +02:00
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test custom profile fields
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_custom_profile_field_names(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
custom_profile_fields = CustomProfileField.objects.filter(realm=r)
|
|
|
|
custom_profile_field_names = {field.name for field in custom_profile_fields}
|
|
|
|
return custom_profile_field_names
|
|
|
|
|
|
|
|
assert_realm_values(get_custom_profile_field_names)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_custom_profile_with_field_type_user(
|
|
|
|
r: Realm,
|
|
|
|
) -> Tuple[Set[Any], Set[Any], Set[FrozenSet[str]]]:
|
|
|
|
fields = CustomProfileField.objects.filter(field_type=CustomProfileField.USER, realm=r)
|
2018-07-16 17:15:42 +02:00
|
|
|
|
|
|
|
def get_email(user_id: int) -> str:
|
|
|
|
return UserProfile.objects.get(id=user_id).email
|
|
|
|
|
|
|
|
def get_email_from_value(field_value: CustomProfileFieldValue) -> Set[str]:
|
2020-08-07 01:09:47 +02:00
|
|
|
user_id_list = orjson.loads(field_value.value)
|
2018-07-16 17:15:42 +02:00
|
|
|
return {get_email(user_id) for user_id in user_id_list}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def custom_profile_field_values_for(
|
|
|
|
fields: List[CustomProfileField],
|
|
|
|
) -> Set[FrozenSet[str]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_emails: Set[FrozenSet[str]] = set()
|
2018-07-16 17:15:42 +02:00
|
|
|
for field in fields:
|
|
|
|
values = CustomProfileFieldValue.objects.filter(field=field)
|
|
|
|
for value in values:
|
|
|
|
user_emails.add(frozenset(get_email_from_value(value)))
|
|
|
|
return user_emails
|
|
|
|
|
|
|
|
field_names, field_hints = (set() for i in range(2))
|
|
|
|
for field in fields:
|
|
|
|
field_names.add(field.name)
|
|
|
|
field_hints.add(field.hint)
|
|
|
|
|
|
|
|
return (field_hints, field_names, custom_profile_field_values_for(fields))
|
|
|
|
|
|
|
|
assert_realm_values(get_custom_profile_with_field_type_user)
|
|
|
|
|
2018-07-05 20:08:40 +02:00
|
|
|
# test realmauditlog
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_realm_audit_log_event_type(r: Realm) -> Set[str]:
|
2019-02-13 02:22:36 +01:00
|
|
|
realmauditlogs = RealmAuditLog.objects.filter(realm=r).exclude(
|
2021-02-12 08:19:30 +01:00
|
|
|
event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED
|
|
|
|
)
|
2018-07-10 21:12:02 +02:00
|
|
|
realmauditlog_event_type = {log.event_type for log in realmauditlogs}
|
|
|
|
return realmauditlog_event_type
|
|
|
|
|
|
|
|
assert_realm_values(get_realm_audit_log_event_type)
|
2018-07-05 20:08:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia_full_name = "Cordelia Lear"
|
|
|
|
hamlet_full_name = "King Hamlet"
|
|
|
|
othello_full_name = "Othello, the Moor of Venice"
|
2020-07-16 15:04:35 +02:00
|
|
|
|
|
|
|
def get_user_id(r: Realm, full_name: str) -> int:
|
|
|
|
return UserProfile.objects.get(realm=r, full_name=full_name).id
|
|
|
|
|
2018-05-25 18:54:22 +02:00
|
|
|
# test huddles
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_huddle_hashes(r: Realm) -> str:
|
2020-07-16 15:04:35 +02:00
|
|
|
user_id_list = [
|
|
|
|
get_user_id(r, cordelia_full_name),
|
|
|
|
get_user_id(r, hamlet_full_name),
|
|
|
|
get_user_id(r, othello_full_name),
|
|
|
|
]
|
|
|
|
|
2018-07-12 17:34:31 +02:00
|
|
|
huddle_hash = get_huddle_hash(user_id_list)
|
|
|
|
return huddle_hash
|
|
|
|
|
|
|
|
assert_realm_values(get_huddle_hashes, equal=False)
|
|
|
|
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_huddle_message(r: Realm) -> str:
|
2018-07-12 17:34:31 +02:00
|
|
|
huddle_hash = get_huddle_hashes(r)
|
|
|
|
huddle_id = Huddle.objects.get(huddle_hash=huddle_hash).id
|
|
|
|
huddle_recipient = Recipient.objects.get(type_id=huddle_id, type=3)
|
|
|
|
huddle_message = Message.objects.get(recipient=huddle_recipient)
|
|
|
|
return huddle_message.content
|
|
|
|
|
|
|
|
assert_realm_values(get_huddle_message)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(get_huddle_message(imported_realm), "test huddle message")
|
2018-05-25 18:54:22 +02:00
|
|
|
|
2020-07-16 16:11:34 +02:00
|
|
|
# test alertword
|
|
|
|
def get_alertwords(r: Realm) -> Set[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return {rec.word for rec in AlertWord.objects.filter(realm_id=r.id)}
|
2020-07-16 16:11:34 +02:00
|
|
|
|
|
|
|
assert_realm_values(get_alertwords)
|
|
|
|
|
2018-07-12 16:34:26 +02:00
|
|
|
# test userhotspot
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_user_hotspots(r: Realm) -> Set[str]:
|
2020-07-16 15:04:35 +02:00
|
|
|
user_id = get_user_id(r, hamlet_full_name)
|
|
|
|
hotspots = UserHotspot.objects.filter(user_id=user_id)
|
2018-07-12 16:34:26 +02:00
|
|
|
user_hotspots = {hotspot.hotspot for hotspot in hotspots}
|
|
|
|
return user_hotspots
|
|
|
|
|
|
|
|
assert_realm_values(get_user_hotspots)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# test muted topics
|
|
|
|
def get_muted_topics(r: Realm) -> Set[str]:
|
2020-07-16 15:04:35 +02:00
|
|
|
user_profile_id = get_user_id(r, hamlet_full_name)
|
|
|
|
muted_topics = MutedTopic.objects.filter(user_profile_id=user_profile_id)
|
2018-07-14 16:10:45 +02:00
|
|
|
topic_names = {muted_topic.topic_name for muted_topic in muted_topics}
|
|
|
|
return topic_names
|
|
|
|
|
|
|
|
assert_realm_values(get_muted_topics)
|
|
|
|
|
2018-07-12 13:27:12 +02:00
|
|
|
# test usergroups
|
|
|
|
assert_realm_values(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
lambda r: {group.name for group in UserGroup.objects.filter(realm=r)},
|
2018-07-12 13:27:12 +02:00
|
|
|
)
|
|
|
|
|
2020-07-16 14:56:12 +02:00
|
|
|
def get_user_membership(r: Realm) -> Set[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
usergroup = UserGroup.objects.get(realm=r, name="hamletcharacters")
|
2018-07-12 13:27:12 +02:00
|
|
|
usergroup_membership = UserGroupMembership.objects.filter(user_group=usergroup)
|
|
|
|
users = {membership.user_profile.email for membership in usergroup_membership}
|
|
|
|
return users
|
|
|
|
|
|
|
|
assert_realm_values(get_user_membership)
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# test botstoragedata and botconfigdata
|
|
|
|
def get_botstoragedata(r: Realm) -> Dict[str, Any]:
|
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_storage_data = BotStorageData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_storage_data.key, "data": bot_storage_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
|
|
|
assert_realm_values(get_botstoragedata)
|
|
|
|
|
|
|
|
def get_botconfigdata(r: Realm) -> Dict[str, Any]:
|
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_config_data = BotConfigData.objects.get(bot_profile=bot_profile)
|
2021-02-12 08:20:45 +01:00
|
|
|
return {"key": bot_config_data.key, "data": bot_config_data.value}
|
2018-07-17 19:11:16 +02:00
|
|
|
|
|
|
|
assert_realm_values(get_botconfigdata)
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test messages
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_messages(r: Realm) -> Message:
|
2018-07-10 21:12:02 +02:00
|
|
|
recipient = get_recipient_stream(r)
|
|
|
|
messages = Message.objects.filter(recipient=recipient)
|
|
|
|
return messages
|
|
|
|
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_topics(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
messages = get_stream_messages(r)
|
2018-11-10 16:11:12 +01:00
|
|
|
topics = {m.topic_name() for m in messages}
|
2018-07-10 21:12:02 +02:00
|
|
|
return topics
|
|
|
|
|
|
|
|
assert_realm_values(get_stream_topics)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test usermessages
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_usermessages_user(r: Realm) -> Set[Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
messages = get_stream_messages(r).order_by("content")
|
2018-07-10 21:12:02 +02:00
|
|
|
usermessage = UserMessage.objects.filter(message=messages[0])
|
|
|
|
usermessage_user = {um.user_profile.email for um in usermessage}
|
|
|
|
return usermessage_user
|
|
|
|
|
|
|
|
assert_realm_values(get_usermessages_user)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2019-05-23 13:58:10 +02:00
|
|
|
# tests to make sure that various data-*-ids in rendered_content
|
|
|
|
# are replaced correctly with the values of newer realm.
|
|
|
|
|
|
|
|
def get_user_mention(r: Realm) -> Set[Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
mentioned_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("hamlet"), realm=r
|
|
|
|
)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_user_id = f'data-user-id="{mentioned_user.id}"'
|
2019-05-23 13:58:10 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_user_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
|
|
|
assert_realm_values(get_user_mention)
|
|
|
|
|
2019-05-28 13:06:48 +02:00
|
|
|
def get_stream_mention(r: Realm) -> Set[Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
mentioned_stream = get_stream("Denmark", r)
|
2020-06-09 00:25:09 +02:00
|
|
|
data_stream_id = f'data-stream-id="{mentioned_stream.id}"'
|
2019-05-28 13:06:48 +02:00
|
|
|
mention_message = get_stream_messages(r).get(rendered_content__contains=data_stream_id)
|
|
|
|
return mention_message.content
|
|
|
|
|
|
|
|
assert_realm_values(get_stream_mention)
|
|
|
|
|
2019-05-28 13:47:41 +02:00
|
|
|
def get_user_group_mention(r: Realm) -> Set[Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_group = UserGroup.objects.get(realm=r, name="hamletcharacters")
|
2020-06-09 00:25:09 +02:00
|
|
|
data_usergroup_id = f'data-user-group-id="{user_group.id}"'
|
2021-02-12 08:19:30 +01:00
|
|
|
mention_message = get_stream_messages(r).get(
|
|
|
|
rendered_content__contains=data_usergroup_id
|
|
|
|
)
|
2019-05-28 13:47:41 +02:00
|
|
|
return mention_message.content
|
|
|
|
|
|
|
|
assert_realm_values(get_user_group_mention)
|
|
|
|
|
2020-02-18 14:58:29 +01:00
|
|
|
def get_userpresence_timestamp(r: Realm) -> Set[Any]:
|
|
|
|
# It should be sufficient to compare UserPresence timestamps to verify
|
|
|
|
# they got exported/imported correctly.
|
2021-02-12 08:20:45 +01:00
|
|
|
return set(UserPresence.objects.filter(realm=r).values_list("timestamp", flat=True))
|
2020-02-18 14:58:29 +01:00
|
|
|
|
|
|
|
assert_realm_values(get_userpresence_timestamp)
|
|
|
|
|
2019-06-03 00:00:16 +02:00
|
|
|
# test to highlight that bs4 which we use to do data-**id
|
|
|
|
# replacements modifies the HTML sometimes. eg replacing <br>
|
|
|
|
# with </br>, ' with \' etc. The modifications doesn't
|
|
|
|
# affect how the browser displays the rendered_content so we
|
|
|
|
# are okay with using bs4 for this. lxml package also has
|
|
|
|
# similar behavior.
|
2021-02-12 08:20:45 +01:00
|
|
|
orig_polonius_user = self.example_user("polonius")
|
2021-02-12 08:19:30 +01:00
|
|
|
original_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=original_realm
|
|
|
|
)
|
2019-06-02 22:24:30 +02:00
|
|
|
self.assertEqual(
|
|
|
|
original_msg.rendered_content,
|
2020-10-19 04:49:17 +02:00
|
|
|
'<div class="codehilite"><pre><span></span><code>'\n</code></pre></div>\n'
|
2020-06-14 02:57:50 +02:00
|
|
|
f'<p><span class="user-mention" data-user-id="{orig_polonius_user.id}">@Polonius</span></p>',
|
2019-06-02 22:24:30 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
imported_polonius_user = UserProfile.objects.get(
|
|
|
|
delivery_email=self.example_email("polonius"), realm=imported_realm
|
|
|
|
)
|
|
|
|
imported_msg = Message.objects.get(
|
|
|
|
content=special_characters_message, sender__realm=imported_realm
|
|
|
|
)
|
2019-06-02 22:24:30 +02:00
|
|
|
self.assertEqual(
|
|
|
|
imported_msg.rendered_content,
|
2020-06-14 02:57:50 +02:00
|
|
|
'<div class="codehilite"><pre><span></span><code>\'\n</code></pre></div>\n'
|
|
|
|
f'<p><span class="user-mention" data-user-id="{imported_polonius_user.id}">@Polonius</span></p>',
|
2019-06-02 22:24:30 +02:00
|
|
|
)
|
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
# Check recipient_id was generated correctly for the imported users and streams.
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
user_profile.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.PERSONAL, type_id=user_profile.id).id,
|
|
|
|
)
|
2019-11-28 16:56:04 +01:00
|
|
|
for stream in Stream.objects.filter(realm=imported_realm):
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
stream.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.STREAM, type_id=stream.id).id,
|
|
|
|
)
|
2019-11-28 16:56:04 +01:00
|
|
|
|
2020-03-15 19:05:27 +01:00
|
|
|
for huddle_object in Huddle.objects.all():
|
|
|
|
# Huddles don't have a realm column, so we just test all Huddles for simplicity.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
huddle_object.recipient_id,
|
|
|
|
Recipient.objects.get(type=Recipient.HUDDLE, type_id=huddle_object.id).id,
|
|
|
|
)
|
2020-03-15 19:05:27 +01:00
|
|
|
|
2021-02-14 00:03:40 +01:00
|
|
|
for user_profile in UserProfile.objects.filter(realm=imported_realm):
|
|
|
|
# Check that all Subscriptions have the correct is_user_active set.
|
|
|
|
self.assertEqual(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=user_profile, is_user_active=user_profile.is_active
|
|
|
|
).count(),
|
|
|
|
Subscription.objects.filter(user_profile=user_profile).count(),
|
|
|
|
)
|
|
|
|
# Verify that we've actually tested something meaningful instead of a blind import
|
|
|
|
# with is_user_active=True used for everything.
|
|
|
|
self.assertTrue(Subscription.objects.filter(is_user_active=False).exists())
|
|
|
|
|
2018-06-06 18:06:16 +02:00
|
|
|
def test_import_files_from_local(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2020-05-20 16:13:29 +02:00
|
|
|
self._setup_export_files(realm)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2018-06-06 18:06:16 +02:00
|
|
|
self._export_realm(realm)
|
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
2018-06-06 18:06:16 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(len(b"zulip!"), uploaded_file.size)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
attachment_file_path = os.path.join(
|
2021-02-12 08:20:45 +01:00
|
|
|
settings.LOCAL_UPLOADS_DIR, "files", uploaded_file.path_id
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-06-06 18:06:16 +02:00
|
|
|
self.assertTrue(os.path.isfile(attachment_file_path))
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
|
|
|
emoji_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path)
|
|
|
|
self.assertTrue(os.path.isfile(emoji_file_path))
|
|
|
|
|
|
|
|
# Test avatars
|
|
|
|
user_email = Message.objects.all()[0].sender.email
|
|
|
|
user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm)
|
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
|
|
|
avatar_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path_id)
|
|
|
|
self.assertTrue(os.path.isfile(avatar_file_path))
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
|
|
|
full_upload_path = os.path.join(settings.LOCAL_UPLOADS_DIR, upload_path)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
test_image_data = f.read()
|
|
|
|
self.assertIsNotNone(test_image_data)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "icon.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "icon.png")))
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "logo.png")))
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(os.path.join(full_upload_path, "night_logo.original"), "rb") as f:
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(f.read(), test_image_data)
|
|
|
|
self.assertTrue(os.path.isfile(os.path.join(full_upload_path, "night_logo.png")))
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-06-06 21:37:40 +02:00
|
|
|
@use_s3_backend
|
|
|
|
def test_import_files_from_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
uploads_bucket, avatar_bucket = create_s3_buckets(
|
2021-02-12 08:19:30 +01:00
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET, settings.S3_AVATAR_BUCKET
|
|
|
|
)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = Realm.objects.get(string_id="zulip")
|
2020-05-20 16:13:29 +02:00
|
|
|
self._setup_export_files(realm)
|
2019-07-19 19:15:23 +02:00
|
|
|
|
2018-06-06 21:37:40 +02:00
|
|
|
self._export_realm(realm)
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
imported_realm = Realm.objects.get(string_id="test-zulip")
|
|
|
|
with get_test_image_file("img.png") as f:
|
2019-07-14 21:37:08 +02:00
|
|
|
test_image_data = f.read()
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(len(b"zulip!"), uploaded_file.size)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_content = uploads_bucket.Object(uploaded_file.path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(b"zulip!", attachment_content)
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
2018-12-07 17:52:01 +01:00
|
|
|
emoji_key = avatar_bucket.Object(emoji_path)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIsNotNone(emoji_key.get()["Body"].read())
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(emoji_key.key, emoji_path)
|
|
|
|
|
|
|
|
# Test avatars
|
|
|
|
user_email = Message.objects.all()[0].sender.email
|
|
|
|
user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm)
|
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
2018-12-07 17:52:01 +01:00
|
|
|
original_image_key = avatar_bucket.Object(avatar_path_id)
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(original_image_key.key, avatar_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
image_data = avatar_bucket.Object(avatar_path_id).get()["Body"].read()
|
2018-06-06 21:37:40 +02:00
|
|
|
self.assertEqual(image_data, test_image_data)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2019-07-19 19:15:23 +02:00
|
|
|
# Test realm icon and logo
|
|
|
|
upload_path = upload.upload_backend.realm_avatar_and_logo_path(imported_realm)
|
|
|
|
|
|
|
|
original_icon_path_id = os.path.join(upload_path, "icon.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_icon_key = avatar_bucket.Object(original_icon_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_icon_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_icon_path_id = os.path.join(upload_path, "icon.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_icon_key = avatar_bucket.Object(resized_icon_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_icon_key.key, resized_icon_path_id)
|
|
|
|
self.assertEqual(imported_realm.icon_source, Realm.ICON_UPLOADED)
|
|
|
|
|
|
|
|
original_logo_path_id = os.path.join(upload_path, "logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
original_logo_key = avatar_bucket.Object(original_logo_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(original_logo_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_logo_path_id = os.path.join(upload_path, "logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_logo_key = avatar_bucket.Object(resized_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_logo_key.key, resized_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
|
|
|
night_logo_original_path_id = os.path.join(upload_path, "night_logo.original")
|
2018-12-07 17:52:01 +01:00
|
|
|
night_logo_original_key = avatar_bucket.Object(night_logo_original_path_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(night_logo_original_key.get()["Body"].read(), test_image_data)
|
2019-07-19 19:15:23 +02:00
|
|
|
resized_night_logo_path_id = os.path.join(upload_path, "night_logo.png")
|
2018-12-07 17:52:01 +01:00
|
|
|
resized_night_logo_key = avatar_bucket.Object(resized_night_logo_path_id)
|
2019-07-19 19:15:23 +02:00
|
|
|
self.assertEqual(resized_night_logo_key.key, resized_night_logo_path_id)
|
|
|
|
self.assertEqual(imported_realm.night_logo_source, Realm.LOGO_UPLOADED)
|
|
|
|
|
2018-10-16 12:34:47 +02:00
|
|
|
def test_get_incoming_message_ids(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
import_dir = os.path.join(
|
|
|
|
settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "import_fixtures"
|
|
|
|
)
|
2018-10-16 12:34:47 +02:00
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [888, 999, 555])
|
|
|
|
|
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [555, 888, 999])
|
|
|
|
|
2018-09-21 05:39:35 +02:00
|
|
|
def test_plan_type(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2020-02-25 08:17:46 +01:00
|
|
|
do_change_plan_type(realm, Realm.LIMITED)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2020-05-20 16:13:29 +02:00
|
|
|
self._setup_export_files(realm)
|
2018-09-21 05:39:35 +02:00
|
|
|
self._export_realm(realm)
|
|
|
|
|
2021-03-09 18:51:16 +01:00
|
|
|
with self.settings(BILLING_ENABLED=True), self.assertLogs(level="INFO"):
|
|
|
|
realm = do_import_realm(
|
|
|
|
os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip-1"
|
|
|
|
)
|
|
|
|
self.assertEqual(realm.plan_type, Realm.LIMITED)
|
|
|
|
self.assertEqual(realm.max_invites, 100)
|
|
|
|
self.assertEqual(realm.upload_quota_gb, 5)
|
|
|
|
self.assertEqual(realm.message_visibility_limit, 10000)
|
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
|
|
|
realm = do_import_realm(
|
|
|
|
os.path.join(settings.TEST_WORKER_DIR, "test-export"), "test-zulip-2"
|
|
|
|
)
|
|
|
|
self.assertEqual(realm.plan_type, Realm.SELF_HOSTED)
|
|
|
|
self.assertEqual(realm.max_invites, 100)
|
|
|
|
self.assertEqual(realm.upload_quota_gb, None)
|
|
|
|
self.assertEqual(realm.message_visibility_limit, None)
|
|
|
|
self.assertTrue(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED
|
|
|
|
).exists()
|
|
|
|
)
|