2016-08-13 20:22:23 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
from django.conf import settings
|
|
|
|
|
|
|
|
import os
|
|
|
|
import shutil
|
|
|
|
import ujson
|
|
|
|
|
2019-02-02 23:53:44 +01:00
|
|
|
from mock import patch
|
2018-07-16 17:15:42 +02:00
|
|
|
from typing import Any, Dict, List, Set, Optional, Tuple, Callable, \
|
|
|
|
FrozenSet
|
2016-08-13 20:22:23 +02:00
|
|
|
|
|
|
|
from zerver.lib.export import (
|
|
|
|
do_export_realm,
|
|
|
|
export_usermessages_batch,
|
2018-05-30 17:09:52 +02:00
|
|
|
do_export_user,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
from zerver.lib.import_realm import (
|
|
|
|
do_import_realm,
|
2018-10-16 12:34:47 +02:00
|
|
|
get_incoming_message_ids,
|
2018-06-04 18:21:58 +02:00
|
|
|
)
|
2018-05-30 14:55:30 +02:00
|
|
|
from zerver.lib.avatar_hash import (
|
|
|
|
user_avatar_path,
|
|
|
|
)
|
2016-08-13 20:22:23 +02:00
|
|
|
from zerver.lib.upload import (
|
|
|
|
claim_attachment,
|
2018-03-28 18:14:17 +02:00
|
|
|
upload_message_file,
|
2018-05-26 21:18:54 +02:00
|
|
|
upload_emoji_image,
|
2018-05-30 14:55:30 +02:00
|
|
|
upload_avatar_image,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
2016-08-14 18:33:29 +02:00
|
|
|
from zerver.lib.utils import (
|
|
|
|
query_chunker,
|
|
|
|
)
|
2016-11-10 19:30:09 +01:00
|
|
|
from zerver.lib.test_classes import (
|
2016-08-14 18:33:29 +02:00
|
|
|
ZulipTestCase,
|
|
|
|
)
|
2018-05-30 14:55:30 +02:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
use_s3_backend,
|
2018-12-07 18:15:51 +01:00
|
|
|
create_s3_buckets,
|
2018-05-30 14:55:30 +02:00
|
|
|
)
|
2016-08-14 18:33:29 +02:00
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
from zerver.lib.topic_mutes import (
|
|
|
|
add_topic_mute,
|
|
|
|
)
|
2018-07-17 19:11:16 +02:00
|
|
|
from zerver.lib.bot_lib import (
|
|
|
|
StateHandler,
|
|
|
|
)
|
|
|
|
from zerver.lib.bot_config import (
|
|
|
|
set_bot_config
|
|
|
|
)
|
|
|
|
from zerver.lib.actions import (
|
|
|
|
do_create_user,
|
|
|
|
)
|
2018-07-14 16:10:45 +02:00
|
|
|
|
2016-08-14 18:33:29 +02:00
|
|
|
from zerver.lib.test_runner import slow
|
|
|
|
|
2016-08-13 20:22:23 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Message,
|
|
|
|
Realm,
|
2018-06-04 18:21:58 +02:00
|
|
|
Stream,
|
|
|
|
UserProfile,
|
|
|
|
Subscription,
|
2018-05-30 14:55:30 +02:00
|
|
|
Attachment,
|
2018-05-26 21:18:54 +02:00
|
|
|
RealmEmoji,
|
2016-08-14 18:33:29 +02:00
|
|
|
Recipient,
|
2016-08-13 20:22:23 +02:00
|
|
|
UserMessage,
|
2018-06-04 18:21:58 +02:00
|
|
|
CustomProfileField,
|
|
|
|
CustomProfileFieldValue,
|
2018-07-05 20:08:40 +02:00
|
|
|
RealmAuditLog,
|
2018-05-25 18:54:22 +02:00
|
|
|
Huddle,
|
2018-07-12 16:34:26 +02:00
|
|
|
UserHotspot,
|
2018-07-14 16:10:45 +02:00
|
|
|
MutedTopic,
|
2018-07-12 13:27:12 +02:00
|
|
|
UserGroup,
|
|
|
|
UserGroupMembership,
|
2018-07-17 19:11:16 +02:00
|
|
|
BotStorageData,
|
|
|
|
BotConfigData,
|
2018-06-04 18:21:58 +02:00
|
|
|
get_active_streams,
|
2018-09-21 05:39:35 +02:00
|
|
|
get_realm,
|
2018-07-14 16:10:45 +02:00
|
|
|
get_stream,
|
2018-06-04 18:21:58 +02:00
|
|
|
get_stream_recipient,
|
|
|
|
get_personal_recipient,
|
2018-05-25 18:54:22 +02:00
|
|
|
get_huddle_hash,
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
from zerver.lib.test_helpers import (
|
|
|
|
get_test_image_file,
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def rm_tree(path: str) -> None:
|
2016-08-13 20:22:23 +02:00
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2016-08-14 18:33:29 +02:00
|
|
|
class QueryUtilTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def _create_messages(self) -> None:
|
2017-05-24 05:08:49 +02:00
|
|
|
for email in [self.example_email('cordelia'),
|
|
|
|
self.example_email('hamlet'),
|
|
|
|
self.example_email('iago')]:
|
2016-08-14 18:33:29 +02:00
|
|
|
for _ in range(5):
|
2017-10-28 17:01:36 +02:00
|
|
|
self.send_personal_message(email, self.example_email('othello'))
|
2016-08-14 18:33:29 +02:00
|
|
|
|
|
|
|
@slow('creates lots of data')
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_query_chunker(self) -> None:
|
2016-08-14 18:33:29 +02:00
|
|
|
self._create_messages()
|
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
hamlet = self.example_user('hamlet')
|
2016-08-14 18:33:29 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_queries() -> List[Any]:
|
2016-08-14 18:33:29 +02:00
|
|
|
queries = [
|
|
|
|
Message.objects.filter(sender_id=cordelia.id),
|
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
|
|
|
Message.objects.exclude(sender_id__in=[cordelia.id, hamlet.id])
|
|
|
|
]
|
|
|
|
return queries
|
|
|
|
|
|
|
|
for query in get_queries():
|
|
|
|
# For our test to be meaningful, we want non-empty queries
|
|
|
|
# at first
|
|
|
|
assert len(list(query)) > 0
|
|
|
|
|
|
|
|
queries = get_queries()
|
|
|
|
|
2017-05-07 19:59:57 +02:00
|
|
|
all_msg_ids = set() # type: Set[int]
|
2016-08-14 18:33:29 +02:00
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
|
|
|
chunk_size=20,
|
|
|
|
)
|
|
|
|
|
|
|
|
all_row_ids = []
|
|
|
|
for chunk in chunker:
|
|
|
|
for row in chunk:
|
|
|
|
all_row_ids.append(row.id)
|
|
|
|
|
|
|
|
self.assertEqual(all_row_ids, sorted(all_row_ids))
|
|
|
|
self.assertEqual(len(all_msg_ids), len(Message.objects.all()))
|
|
|
|
|
|
|
|
# Now just search for cordelia/hamlet. Note that we don't really
|
|
|
|
# need the order_by here, but it should be harmless.
|
|
|
|
queries = [
|
|
|
|
Message.objects.filter(sender_id=cordelia.id).order_by('id'),
|
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=7, # use a different size
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(
|
|
|
|
len(all_msg_ids),
|
|
|
|
len(Message.objects.filter(sender_id__in=[cordelia.id, hamlet.id]))
|
|
|
|
)
|
|
|
|
|
|
|
|
# Try just a single query to validate chunking.
|
|
|
|
queries = [
|
|
|
|
Message.objects.exclude(sender_id=cordelia.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=11, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(
|
|
|
|
len(all_msg_ids),
|
|
|
|
len(Message.objects.exclude(sender_id=cordelia.id))
|
|
|
|
)
|
|
|
|
self.assertTrue(len(all_msg_ids) > 15)
|
|
|
|
|
|
|
|
# Verify assertions about disjoint-ness.
|
|
|
|
queries = [
|
|
|
|
Message.objects.exclude(sender_id=cordelia.id),
|
|
|
|
Message.objects.filter(sender_id=hamlet.id),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=13, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
|
|
|
with self.assertRaises(AssertionError):
|
2017-05-07 19:59:57 +02:00
|
|
|
list(chunker) # exercise the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
|
|
|
|
# Try to confuse things with ids part of the query...
|
|
|
|
queries = [
|
|
|
|
Message.objects.filter(id__lte=10),
|
|
|
|
Message.objects.filter(id__gt=10),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=11, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
self.assertEqual(len(all_msg_ids), 0) # until we actually use the iterator
|
|
|
|
list(chunker) # exhaust the iterator
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(len(all_msg_ids), len(Message.objects.all()))
|
|
|
|
|
|
|
|
# Verify that we can just get the first chunk with a next() call.
|
|
|
|
queries = [
|
|
|
|
Message.objects.all(),
|
|
|
|
]
|
|
|
|
all_msg_ids = set()
|
|
|
|
chunker = query_chunker(
|
|
|
|
queries=queries,
|
|
|
|
id_collector=all_msg_ids,
|
2017-05-07 19:59:57 +02:00
|
|
|
chunk_size=10, # use a different size each time
|
2016-08-14 18:33:29 +02:00
|
|
|
)
|
2017-05-07 19:59:57 +02:00
|
|
|
first_chunk = next(chunker) # type: ignore
|
2016-08-14 18:33:29 +02:00
|
|
|
self.assertEqual(len(first_chunk), 10)
|
|
|
|
self.assertEqual(len(all_msg_ids), 10)
|
|
|
|
expected_msg = Message.objects.all()[0:10][5]
|
|
|
|
actual_msg = first_chunk[5]
|
|
|
|
self.assertEqual(actual_msg.content, expected_msg.content)
|
|
|
|
self.assertEqual(actual_msg.sender_id, expected_msg.sender_id)
|
|
|
|
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
class ImportExportTest(ZulipTestCase):
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def setUp(self) -> None:
|
2016-08-13 20:22:23 +02:00
|
|
|
rm_tree(settings.LOCAL_UPLOADS_DIR)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def _make_output_dir(self) -> str:
|
2016-08-13 20:22:23 +02:00
|
|
|
output_dir = 'var/test-export'
|
|
|
|
rm_tree(output_dir)
|
2017-10-25 20:06:11 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
2016-08-13 20:22:23 +02:00
|
|
|
return output_dir
|
|
|
|
|
2018-03-23 23:42:54 +01:00
|
|
|
def _export_realm(self, realm: Realm, exportable_user_ids: Optional[Set[int]]=None) -> Dict[str, Any]:
|
2016-08-13 20:22:23 +02:00
|
|
|
output_dir = self._make_output_dir()
|
|
|
|
with patch('logging.info'), patch('zerver.lib.export.create_soft_link'):
|
|
|
|
do_export_realm(
|
|
|
|
realm=realm,
|
|
|
|
output_dir=output_dir,
|
|
|
|
threads=0,
|
|
|
|
exportable_user_ids=exportable_user_ids,
|
|
|
|
)
|
|
|
|
# TODO: Process the second partial file, which can be created
|
|
|
|
# for certain edge cases.
|
|
|
|
export_usermessages_batch(
|
|
|
|
input_path=os.path.join(output_dir, 'messages-000001.json.partial'),
|
2018-06-12 15:31:28 +02:00
|
|
|
output_path=os.path.join(output_dir, 'messages-000001.json')
|
2016-08-13 20:22:23 +02:00
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def read_file(fn: str) -> Any:
|
2016-08-13 20:22:23 +02:00
|
|
|
full_fn = os.path.join(output_dir, fn)
|
|
|
|
with open(full_fn) as f:
|
|
|
|
return ujson.load(f)
|
|
|
|
|
|
|
|
result = {}
|
|
|
|
result['realm'] = read_file('realm.json')
|
|
|
|
result['attachment'] = read_file('attachment.json')
|
2018-06-12 15:31:28 +02:00
|
|
|
result['message'] = read_file('messages-000001.json')
|
2016-08-13 20:22:23 +02:00
|
|
|
result['uploads_dir'] = os.path.join(output_dir, 'uploads')
|
2018-06-06 20:03:53 +02:00
|
|
|
result['uploads_dir_records'] = read_file(os.path.join('uploads', 'records.json'))
|
2018-05-26 21:18:54 +02:00
|
|
|
result['emoji_dir'] = os.path.join(output_dir, 'emoji')
|
2018-06-06 20:03:53 +02:00
|
|
|
result['emoji_dir_records'] = read_file(os.path.join('emoji', 'records.json'))
|
2018-05-30 14:55:30 +02:00
|
|
|
result['avatar_dir'] = os.path.join(output_dir, 'avatars')
|
2018-06-06 20:03:53 +02:00
|
|
|
result['avatar_dir_records'] = read_file(os.path.join('avatars', 'records.json'))
|
2016-08-13 20:22:23 +02:00
|
|
|
return result
|
|
|
|
|
2018-06-06 16:55:03 +02:00
|
|
|
def _setup_export_files(self) -> Tuple[str, str, str, bytes]:
|
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
2016-08-13 20:22:23 +02:00
|
|
|
message = Message.objects.all()[0]
|
|
|
|
user_profile = message.sender
|
2018-03-28 18:14:17 +02:00
|
|
|
url = upload_message_file(u'dummy.txt', len(b'zulip!'), u'text/plain', b'zulip!', user_profile)
|
2018-06-06 16:55:03 +02:00
|
|
|
attachment_path_id = url.replace('/user_uploads/', '')
|
2016-08-13 20:22:23 +02:00
|
|
|
claim_attachment(
|
|
|
|
user_profile=user_profile,
|
2018-06-06 16:55:03 +02:00
|
|
|
path_id=attachment_path_id,
|
2016-08-13 20:22:23 +02:00
|
|
|
message=message,
|
|
|
|
is_message_realm_public=True
|
|
|
|
)
|
2018-05-30 14:55:30 +02:00
|
|
|
avatar_path_id = user_avatar_path(user_profile)
|
|
|
|
original_avatar_path_id = avatar_path_id + ".original"
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-06-06 16:55:03 +02:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm.id,
|
|
|
|
emoji_file_name='1.png',
|
|
|
|
)
|
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
with get_test_image_file('img.png') as img_file:
|
|
|
|
upload_emoji_image(img_file, '1.png', user_profile)
|
2018-05-30 14:55:30 +02:00
|
|
|
with get_test_image_file('img.png') as img_file:
|
|
|
|
upload_avatar_image(img_file, user_profile, user_profile)
|
|
|
|
test_image = open(get_test_image_file('img.png').name, 'rb').read()
|
|
|
|
message.sender.avatar_source = 'U'
|
|
|
|
message.sender.save()
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2018-06-06 16:55:03 +02:00
|
|
|
return attachment_path_id, emoji_path, original_avatar_path_id, test_image
|
|
|
|
|
|
|
|
"""
|
|
|
|
Tests for export
|
|
|
|
"""
|
|
|
|
|
|
|
|
def test_export_files_from_local(self) -> None:
|
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
|
|
|
path_id, emoji_path, original_avatar_path_id, test_image = self._setup_export_files()
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
|
|
|
data = full_data['attachment']
|
|
|
|
self.assertEqual(len(data['zerver_attachment']), 1)
|
|
|
|
record = data['zerver_attachment'][0]
|
|
|
|
self.assertEqual(record['path_id'], path_id)
|
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
2016-08-13 20:22:23 +02:00
|
|
|
fn = os.path.join(full_data['uploads_dir'], path_id)
|
|
|
|
with open(fn) as f:
|
|
|
|
self.assertEqual(f.read(), 'zulip!')
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['uploads_dir_records']
|
|
|
|
self.assertEqual(records[0]['path'], path_id)
|
|
|
|
self.assertEqual(records[0]['s3_path'], path_id)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test emojis
|
2018-06-06 16:55:03 +02:00
|
|
|
fn = os.path.join(full_data['emoji_dir'], emoji_path)
|
2018-05-26 21:18:54 +02:00
|
|
|
fn = fn.replace('1.png', '')
|
|
|
|
self.assertEqual('1.png', os.listdir(fn)[0])
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['emoji_dir_records']
|
|
|
|
self.assertEqual(records[0]['file_name'], '1.png')
|
|
|
|
self.assertEqual(records[0]['path'], '1/emoji/images/1.png')
|
|
|
|
self.assertEqual(records[0]['s3_path'], '1/emoji/images/1.png')
|
2018-05-26 21:18:54 +02:00
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test avatars
|
|
|
|
fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id)
|
|
|
|
fn_data = open(fn, 'rb').read()
|
|
|
|
self.assertEqual(fn_data, test_image)
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['avatar_dir_records']
|
|
|
|
record_path = [record['path'] for record in records]
|
|
|
|
record_s3_path = [record['s3_path'] for record in records]
|
|
|
|
self.assertIn(original_avatar_path_id, record_path)
|
|
|
|
self.assertIn(original_avatar_path_id, record_s3_path)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
@use_s3_backend
|
|
|
|
def test_export_files_from_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
create_s3_buckets(
|
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET,
|
|
|
|
settings.S3_AVATAR_BUCKET)
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
2018-06-06 16:55:03 +02:00
|
|
|
attachment_path_id, emoji_path, original_avatar_path_id, test_image = self._setup_export_files()
|
2018-05-30 14:55:30 +02:00
|
|
|
full_data = self._export_realm(realm)
|
|
|
|
|
|
|
|
data = full_data['attachment']
|
|
|
|
self.assertEqual(len(data['zerver_attachment']), 1)
|
|
|
|
record = data['zerver_attachment'][0]
|
|
|
|
self.assertEqual(record['path_id'], attachment_path_id)
|
|
|
|
|
2018-06-06 21:27:04 +02:00
|
|
|
def check_variable_type(user_profile_id: int, realm_id: int) -> None:
|
|
|
|
self.assertEqual(type(user_profile_id), int)
|
|
|
|
self.assertEqual(type(realm_id), int)
|
|
|
|
|
2018-05-30 14:55:30 +02:00
|
|
|
# Test uploads
|
|
|
|
fields = attachment_path_id.split('/')
|
2018-09-21 04:57:24 +02:00
|
|
|
fn = os.path.join(full_data['uploads_dir'], os.path.join(fields[0], fields[1], fields[2]))
|
2018-05-30 14:55:30 +02:00
|
|
|
with open(fn) as f:
|
|
|
|
self.assertEqual(f.read(), 'zulip!')
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['uploads_dir_records']
|
2018-09-21 04:57:24 +02:00
|
|
|
self.assertEqual(records[0]['path'], os.path.join(fields[0], fields[1], fields[2]))
|
2018-06-06 20:03:53 +02:00
|
|
|
self.assertEqual(records[0]['s3_path'], attachment_path_id)
|
2018-06-06 21:27:04 +02:00
|
|
|
check_variable_type(records[0]['user_profile_id'], records[0]['realm_id'])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
fn = os.path.join(full_data['emoji_dir'], emoji_path)
|
|
|
|
fn = fn.replace('1.png', '')
|
|
|
|
self.assertIn('1.png', os.listdir(fn))
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['emoji_dir_records']
|
|
|
|
self.assertEqual(records[0]['file_name'], '1.png')
|
2018-08-10 23:13:50 +02:00
|
|
|
self.assertTrue('last_modified' in records[0])
|
2018-06-06 20:03:53 +02:00
|
|
|
self.assertEqual(records[0]['path'], '1/emoji/images/1.png')
|
|
|
|
self.assertEqual(records[0]['s3_path'], '1/emoji/images/1.png')
|
2018-06-06 21:27:04 +02:00
|
|
|
check_variable_type(records[0]['user_profile_id'], records[0]['realm_id'])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
|
|
|
# Test avatars
|
|
|
|
fn = os.path.join(full_data['avatar_dir'], original_avatar_path_id)
|
|
|
|
fn_data = open(fn, 'rb').read()
|
|
|
|
self.assertEqual(fn_data, test_image)
|
2018-06-06 20:03:53 +02:00
|
|
|
records = full_data['avatar_dir_records']
|
|
|
|
record_path = [record['path'] for record in records]
|
|
|
|
record_s3_path = [record['s3_path'] for record in records]
|
|
|
|
self.assertIn(original_avatar_path_id, record_path)
|
|
|
|
self.assertIn(original_avatar_path_id, record_s3_path)
|
2018-06-06 21:27:04 +02:00
|
|
|
check_variable_type(records[0]['user_profile_id'], records[0]['realm_id'])
|
2018-05-30 14:55:30 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_zulip_realm(self) -> None:
|
2017-01-08 20:24:05 +01:00
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=realm)
|
|
|
|
realm_emoji.delete()
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm)
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji.save()
|
2016-08-13 20:22:23 +02:00
|
|
|
|
|
|
|
data = full_data['realm']
|
|
|
|
self.assertEqual(len(data['zerver_userprofile_crossrealm']), 0)
|
|
|
|
self.assertEqual(len(data['zerver_userprofile_mirrordummy']), 0)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def get_set(table: str, field: str) -> Set[str]:
|
2016-08-13 20:22:23 +02:00
|
|
|
values = set(r[field] for r in data[table])
|
|
|
|
# print('set(%s)' % sorted(values))
|
|
|
|
return values
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def find_by_id(table: str, db_id: int) -> Dict[str, Any]:
|
2016-08-13 20:22:23 +02:00
|
|
|
return [
|
|
|
|
r for r in data[table]
|
|
|
|
if r['id'] == db_id][0]
|
|
|
|
|
|
|
|
exported_user_emails = get_set('zerver_userprofile', 'email')
|
2017-05-24 05:08:49 +02:00
|
|
|
self.assertIn(self.example_email('cordelia'), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
self.assertIn('default-bot@zulip.com', exported_user_emails)
|
|
|
|
self.assertIn('emailgateway@zulip.com', exported_user_emails)
|
|
|
|
|
|
|
|
exported_streams = get_set('zerver_stream', 'name')
|
|
|
|
self.assertEqual(
|
|
|
|
exported_streams,
|
|
|
|
set([u'Denmark', u'Rome', u'Scotland', u'Venice', u'Verona'])
|
|
|
|
)
|
|
|
|
|
|
|
|
data = full_data['message']
|
|
|
|
um = UserMessage.objects.all()[0]
|
|
|
|
exported_um = find_by_id('zerver_usermessage', um.id)
|
|
|
|
self.assertEqual(exported_um['message'], um.message_id)
|
|
|
|
self.assertEqual(exported_um['user_profile'], um.user_profile_id)
|
|
|
|
|
|
|
|
exported_message = find_by_id('zerver_message', um.message_id)
|
|
|
|
self.assertEqual(exported_message['content'], um.message.content)
|
|
|
|
|
|
|
|
# TODO, extract get_set/find_by_id, so we can split this test up
|
|
|
|
|
|
|
|
# Now, restrict users
|
2017-05-07 17:21:26 +02:00
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
hamlet = self.example_user('hamlet')
|
2016-08-13 20:22:23 +02:00
|
|
|
user_ids = set([cordelia.id, hamlet.id])
|
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=realm)
|
|
|
|
realm_emoji.delete()
|
2017-01-08 20:24:05 +01:00
|
|
|
full_data = self._export_realm(realm, exportable_user_ids=user_ids)
|
2018-05-26 21:18:54 +02:00
|
|
|
realm_emoji.save()
|
|
|
|
|
2016-08-13 20:22:23 +02:00
|
|
|
data = full_data['realm']
|
|
|
|
exported_user_emails = get_set('zerver_userprofile', 'email')
|
2017-05-24 05:08:49 +02:00
|
|
|
self.assertIn(self.example_email('cordelia'), exported_user_emails)
|
|
|
|
self.assertIn(self.example_email('hamlet'), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
self.assertNotIn('default-bot@zulip.com', exported_user_emails)
|
2017-05-24 05:08:49 +02:00
|
|
|
self.assertNotIn(self.example_email('iago'), exported_user_emails)
|
2016-08-13 20:22:23 +02:00
|
|
|
|
|
|
|
dummy_user_emails = get_set('zerver_userprofile_mirrordummy', 'email')
|
2017-05-24 05:08:49 +02:00
|
|
|
self.assertIn(self.example_email('iago'), dummy_user_emails)
|
|
|
|
self.assertNotIn(self.example_email('cordelia'), dummy_user_emails)
|
2018-05-30 17:09:52 +02:00
|
|
|
|
|
|
|
def test_export_single_user(self) -> None:
|
|
|
|
output_dir = self._make_output_dir()
|
|
|
|
cordelia = self.example_user('cordelia')
|
|
|
|
|
|
|
|
with patch('logging.info'):
|
|
|
|
do_export_user(cordelia, output_dir)
|
|
|
|
|
|
|
|
def read_file(fn: str) -> Any:
|
|
|
|
full_fn = os.path.join(output_dir, fn)
|
|
|
|
with open(full_fn) as f:
|
|
|
|
return ujson.load(f)
|
|
|
|
|
|
|
|
def get_set(data: List[Dict[str, Any]], field: str) -> Set[str]:
|
|
|
|
values = set(r[field] for r in data)
|
|
|
|
# print('set(%s)' % sorted(values))
|
|
|
|
return values
|
|
|
|
|
|
|
|
messages = read_file('messages-000001.json')
|
|
|
|
user = read_file('user.json')
|
|
|
|
|
|
|
|
exported_user_id = get_set(user['zerver_userprofile'], 'id')
|
|
|
|
self.assertEqual(exported_user_id, set([cordelia.id]))
|
|
|
|
exported_user_email = get_set(user['zerver_userprofile'], 'email')
|
|
|
|
self.assertEqual(exported_user_email, set([cordelia.email]))
|
|
|
|
|
|
|
|
exported_recipient_type_id = get_set(user['zerver_recipient'], 'type_id')
|
|
|
|
self.assertIn(cordelia.id, exported_recipient_type_id)
|
|
|
|
|
|
|
|
exported_stream_id = get_set(user['zerver_stream'], 'id')
|
|
|
|
self.assertIn(list(exported_stream_id)[0], exported_recipient_type_id)
|
|
|
|
|
|
|
|
exported_recipient_id = get_set(user['zerver_recipient'], 'id')
|
|
|
|
exported_subscription_recipient = get_set(user['zerver_subscription'], 'recipient')
|
|
|
|
self.assertEqual(exported_recipient_id, exported_subscription_recipient)
|
|
|
|
|
|
|
|
exported_messages_recipient = get_set(messages['zerver_message'], 'recipient')
|
|
|
|
self.assertIn(list(exported_messages_recipient)[0], exported_recipient_id)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
Tests for import_realm
|
|
|
|
"""
|
|
|
|
def test_import_realm(self) -> None:
|
|
|
|
|
|
|
|
original_realm = Realm.objects.get(string_id='zulip')
|
|
|
|
RealmEmoji.objects.get(realm=original_realm).delete()
|
2018-05-25 18:54:22 +02:00
|
|
|
# data to test import of huddles
|
|
|
|
huddle = [
|
|
|
|
self.example_email('hamlet'),
|
|
|
|
self.example_email('othello')
|
|
|
|
]
|
|
|
|
self.send_huddle_message(
|
|
|
|
self.example_email('cordelia'), huddle, 'test huddle message'
|
|
|
|
)
|
|
|
|
|
2018-07-12 16:34:26 +02:00
|
|
|
# data to test import of hotspots
|
2018-07-14 16:10:45 +02:00
|
|
|
sample_user = self.example_user('hamlet')
|
|
|
|
|
2018-07-12 16:34:26 +02:00
|
|
|
UserHotspot.objects.create(
|
2018-07-14 16:10:45 +02:00
|
|
|
user=sample_user, hotspot='intro_streams'
|
2018-07-12 16:34:26 +02:00
|
|
|
)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# data to test import of muted topic
|
|
|
|
stream = get_stream(u'Verona', original_realm)
|
|
|
|
add_topic_mute(
|
|
|
|
user_profile=sample_user,
|
|
|
|
stream_id=stream.id,
|
|
|
|
recipient_id=get_stream_recipient(stream.id).id,
|
|
|
|
topic_name=u'Verona2')
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# data to test import of botstoragedata and botconfigdata
|
|
|
|
bot_profile = do_create_user(
|
|
|
|
email="bot-1@zulip.com",
|
|
|
|
password="test",
|
|
|
|
realm=original_realm,
|
|
|
|
full_name="bot",
|
|
|
|
short_name="bot",
|
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
|
|
|
bot_owner=sample_user)
|
|
|
|
storage = StateHandler(bot_profile)
|
|
|
|
storage.put('some key', 'some value')
|
|
|
|
|
|
|
|
set_bot_config(bot_profile, 'entry 1', 'value 1')
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
self._export_realm(original_realm)
|
|
|
|
|
|
|
|
with patch('logging.info'):
|
2018-12-13 08:19:29 +01:00
|
|
|
with self.settings(BILLING_ENABLED=False):
|
|
|
|
do_import_realm('var/test-export', 'test-zulip')
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# sanity checks
|
|
|
|
|
|
|
|
# test realm
|
|
|
|
self.assertTrue(Realm.objects.filter(string_id='test-zulip').exists())
|
|
|
|
imported_realm = Realm.objects.get(string_id='test-zulip')
|
|
|
|
self.assertNotEqual(imported_realm.id, original_realm.id)
|
|
|
|
|
2018-07-12 17:34:31 +02:00
|
|
|
def assert_realm_values(f: Callable[[Realm], Any], equal: bool=True) -> None:
|
2018-07-10 21:12:02 +02:00
|
|
|
orig_realm_result = f(original_realm)
|
|
|
|
imported_realm_result = f(imported_realm)
|
2018-07-19 15:52:09 +02:00
|
|
|
# orig_realm_result should be truthy and have some values, otherwise
|
|
|
|
# the test is kind of meaningless
|
|
|
|
assert(orig_realm_result)
|
2018-07-12 17:34:31 +02:00
|
|
|
if equal:
|
|
|
|
self.assertEqual(orig_realm_result, imported_realm_result)
|
|
|
|
else:
|
|
|
|
self.assertNotEqual(orig_realm_result, imported_realm_result)
|
2018-07-10 21:12:02 +02:00
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test users
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(
|
|
|
|
lambda r: {user.email for user in r.get_admin_users()}
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_realm_values(
|
|
|
|
lambda r: {user.email for user in r.get_active_users()}
|
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test stream
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(
|
|
|
|
lambda r: {stream.name for stream in get_active_streams(r)}
|
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test recipients
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_recipient_stream(r: Realm) -> Stream:
|
2018-07-10 21:12:02 +02:00
|
|
|
return get_stream_recipient(
|
|
|
|
Stream.objects.get(name='Verona', realm=r).id
|
|
|
|
)
|
|
|
|
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_recipient_user(r: Realm) -> UserProfile:
|
2018-07-10 21:12:02 +02:00
|
|
|
return get_personal_recipient(
|
|
|
|
UserProfile.objects.get(full_name='Iago', realm=r).id
|
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2018-07-10 21:12:02 +02:00
|
|
|
assert_realm_values(lambda r: get_recipient_stream(r).type)
|
|
|
|
assert_realm_values(lambda r: get_recipient_user(r).type)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test subscription
|
2018-07-10 21:12:02 +02:00
|
|
|
def get_subscribers(recipient: Recipient) -> Set[str]:
|
|
|
|
subscriptions = Subscription.objects.filter(recipient=recipient)
|
|
|
|
users = {sub.user_profile.email for sub in subscriptions}
|
|
|
|
return users
|
|
|
|
|
|
|
|
assert_realm_values(
|
|
|
|
lambda r: get_subscribers(get_recipient_stream(r))
|
|
|
|
)
|
|
|
|
|
|
|
|
assert_realm_values(
|
|
|
|
lambda r: get_subscribers(get_recipient_user(r))
|
|
|
|
)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test custom profile fields
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_custom_profile_field_names(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
custom_profile_fields = CustomProfileField.objects.filter(realm=r)
|
|
|
|
custom_profile_field_names = {field.name for field in custom_profile_fields}
|
|
|
|
return custom_profile_field_names
|
|
|
|
|
|
|
|
assert_realm_values(get_custom_profile_field_names)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
2018-07-16 17:15:42 +02:00
|
|
|
def get_custom_profile_with_field_type_user(r: Realm) -> Tuple[Set[Any],
|
|
|
|
Set[Any],
|
|
|
|
Set[FrozenSet[str]]]:
|
|
|
|
fields = CustomProfileField.objects.filter(
|
|
|
|
field_type=CustomProfileField.USER,
|
|
|
|
realm=r)
|
|
|
|
|
|
|
|
def get_email(user_id: int) -> str:
|
|
|
|
return UserProfile.objects.get(id=user_id).email
|
|
|
|
|
|
|
|
def get_email_from_value(field_value: CustomProfileFieldValue) -> Set[str]:
|
|
|
|
user_id_list = ujson.loads(field_value.value)
|
|
|
|
return {get_email(user_id) for user_id in user_id_list}
|
|
|
|
|
|
|
|
def custom_profile_field_values_for(fields: List[CustomProfileField]) -> Set[FrozenSet[str]]:
|
|
|
|
user_emails = set() # type: Set[FrozenSet[str]]
|
|
|
|
for field in fields:
|
|
|
|
values = CustomProfileFieldValue.objects.filter(field=field)
|
|
|
|
for value in values:
|
|
|
|
user_emails.add(frozenset(get_email_from_value(value)))
|
|
|
|
return user_emails
|
|
|
|
|
|
|
|
field_names, field_hints = (set() for i in range(2))
|
|
|
|
for field in fields:
|
|
|
|
field_names.add(field.name)
|
|
|
|
field_hints.add(field.hint)
|
|
|
|
|
|
|
|
return (field_hints, field_names, custom_profile_field_values_for(fields))
|
|
|
|
|
|
|
|
assert_realm_values(get_custom_profile_with_field_type_user)
|
|
|
|
|
2018-07-05 20:08:40 +02:00
|
|
|
# test realmauditlog
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_realm_audit_log_event_type(r: Realm) -> Set[str]:
|
2019-02-13 02:22:36 +01:00
|
|
|
realmauditlogs = RealmAuditLog.objects.filter(realm=r).exclude(
|
|
|
|
event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED)
|
2018-07-10 21:12:02 +02:00
|
|
|
realmauditlog_event_type = {log.event_type for log in realmauditlogs}
|
|
|
|
return realmauditlog_event_type
|
|
|
|
|
|
|
|
assert_realm_values(get_realm_audit_log_event_type)
|
2018-07-05 20:08:40 +02:00
|
|
|
|
2018-05-25 18:54:22 +02:00
|
|
|
# test huddles
|
2018-07-12 17:34:31 +02:00
|
|
|
def get_huddle_hashes(r: str) -> str:
|
|
|
|
short_names = ['cordelia', 'hamlet', 'othello']
|
|
|
|
user_id_list = [UserProfile.objects.get(realm=r, short_name=name).id for name in short_names]
|
|
|
|
huddle_hash = get_huddle_hash(user_id_list)
|
|
|
|
return huddle_hash
|
|
|
|
|
|
|
|
assert_realm_values(get_huddle_hashes, equal=False)
|
|
|
|
|
|
|
|
def get_huddle_message(r: str) -> str:
|
|
|
|
huddle_hash = get_huddle_hashes(r)
|
|
|
|
huddle_id = Huddle.objects.get(huddle_hash=huddle_hash).id
|
|
|
|
huddle_recipient = Recipient.objects.get(type_id=huddle_id, type=3)
|
|
|
|
huddle_message = Message.objects.get(recipient=huddle_recipient)
|
|
|
|
return huddle_message.content
|
|
|
|
|
|
|
|
assert_realm_values(get_huddle_message)
|
|
|
|
self.assertEqual(get_huddle_message(imported_realm), 'test huddle message')
|
2018-05-25 18:54:22 +02:00
|
|
|
|
2018-07-12 16:34:26 +02:00
|
|
|
# test userhotspot
|
|
|
|
def get_user_hotspots(r: str) -> Set[str]:
|
|
|
|
user_profile = UserProfile.objects.get(realm=r, short_name='hamlet')
|
|
|
|
hotspots = UserHotspot.objects.filter(user=user_profile)
|
|
|
|
user_hotspots = {hotspot.hotspot for hotspot in hotspots}
|
|
|
|
return user_hotspots
|
|
|
|
|
|
|
|
assert_realm_values(get_user_hotspots)
|
|
|
|
|
2018-07-14 16:10:45 +02:00
|
|
|
# test muted topics
|
|
|
|
def get_muted_topics(r: Realm) -> Set[str]:
|
|
|
|
user_profile = UserProfile.objects.get(realm=r, short_name='hamlet')
|
|
|
|
muted_topics = MutedTopic.objects.filter(user_profile=user_profile)
|
|
|
|
topic_names = {muted_topic.topic_name for muted_topic in muted_topics}
|
|
|
|
return topic_names
|
|
|
|
|
|
|
|
assert_realm_values(get_muted_topics)
|
|
|
|
|
2018-07-12 13:27:12 +02:00
|
|
|
# test usergroups
|
|
|
|
assert_realm_values(
|
|
|
|
lambda r: {group.name for group in UserGroup.objects.filter(realm=r)}
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_user_membership(r: str) -> Set[str]:
|
|
|
|
usergroup = UserGroup.objects.get(realm=r, name='hamletcharacters')
|
|
|
|
usergroup_membership = UserGroupMembership.objects.filter(user_group=usergroup)
|
|
|
|
users = {membership.user_profile.email for membership in usergroup_membership}
|
|
|
|
return users
|
|
|
|
|
|
|
|
assert_realm_values(get_user_membership)
|
|
|
|
|
2018-07-17 19:11:16 +02:00
|
|
|
# test botstoragedata and botconfigdata
|
|
|
|
def get_botstoragedata(r: Realm) -> Dict[str, Any]:
|
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_storage_data = BotStorageData.objects.get(bot_profile=bot_profile)
|
|
|
|
return {'key': bot_storage_data.key, 'data': bot_storage_data.value}
|
|
|
|
|
|
|
|
assert_realm_values(get_botstoragedata)
|
|
|
|
|
|
|
|
def get_botconfigdata(r: Realm) -> Dict[str, Any]:
|
|
|
|
bot_profile = UserProfile.objects.get(full_name="bot", realm=r)
|
|
|
|
bot_config_data = BotConfigData.objects.get(bot_profile=bot_profile)
|
|
|
|
return {'key': bot_config_data.key, 'data': bot_config_data.value}
|
|
|
|
|
|
|
|
assert_realm_values(get_botconfigdata)
|
|
|
|
|
2018-06-04 18:21:58 +02:00
|
|
|
# test messages
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_messages(r: Realm) -> Message:
|
2018-07-10 21:12:02 +02:00
|
|
|
recipient = get_recipient_stream(r)
|
|
|
|
messages = Message.objects.filter(recipient=recipient)
|
|
|
|
return messages
|
|
|
|
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_stream_topics(r: Realm) -> Set[str]:
|
2018-07-10 21:12:02 +02:00
|
|
|
messages = get_stream_messages(r)
|
2018-11-10 16:11:12 +01:00
|
|
|
topics = {m.topic_name() for m in messages}
|
2018-07-10 21:12:02 +02:00
|
|
|
return topics
|
|
|
|
|
|
|
|
assert_realm_values(get_stream_topics)
|
2018-06-04 18:21:58 +02:00
|
|
|
|
|
|
|
# test usermessages
|
2018-07-12 17:38:06 +02:00
|
|
|
def get_usermessages_user(r: Realm) -> Set[Any]:
|
2018-07-10 21:12:02 +02:00
|
|
|
messages = get_stream_messages(r).order_by('content')
|
|
|
|
usermessage = UserMessage.objects.filter(message=messages[0])
|
|
|
|
usermessage_user = {um.user_profile.email for um in usermessage}
|
|
|
|
return usermessage_user
|
|
|
|
|
|
|
|
assert_realm_values(get_usermessages_user)
|
2018-06-06 18:06:16 +02:00
|
|
|
|
|
|
|
def test_import_files_from_local(self) -> None:
|
|
|
|
|
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
|
|
|
self._setup_export_files()
|
|
|
|
self._export_realm(realm)
|
|
|
|
|
|
|
|
with patch('logging.info'):
|
|
|
|
do_import_realm('var/test-export', 'test-zulip')
|
|
|
|
imported_realm = Realm.objects.get(string_id='test-zulip')
|
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
|
|
|
self.assertEqual(len(b'zulip!'), uploaded_file.size)
|
|
|
|
|
|
|
|
attachment_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, 'files', uploaded_file.path_id)
|
|
|
|
self.assertTrue(os.path.isfile(attachment_file_path))
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
|
|
|
emoji_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path)
|
|
|
|
self.assertTrue(os.path.isfile(emoji_file_path))
|
|
|
|
|
|
|
|
# Test avatars
|
|
|
|
user_email = Message.objects.all()[0].sender.email
|
|
|
|
user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm)
|
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
|
|
|
avatar_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_path_id)
|
|
|
|
self.assertTrue(os.path.isfile(avatar_file_path))
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
@use_s3_backend
|
|
|
|
def test_import_files_from_s3(self) -> None:
|
2018-12-07 18:15:51 +01:00
|
|
|
uploads_bucket, avatar_bucket = create_s3_buckets(
|
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET,
|
|
|
|
settings.S3_AVATAR_BUCKET)
|
2018-06-06 21:37:40 +02:00
|
|
|
|
|
|
|
realm = Realm.objects.get(string_id='zulip')
|
|
|
|
self._setup_export_files()
|
|
|
|
self._export_realm(realm)
|
|
|
|
with patch('logging.info'):
|
|
|
|
do_import_realm('var/test-export', 'test-zulip')
|
|
|
|
imported_realm = Realm.objects.get(string_id='test-zulip')
|
|
|
|
test_image_data = open(get_test_image_file('img.png').name, 'rb').read()
|
|
|
|
|
|
|
|
# Test attachments
|
|
|
|
uploaded_file = Attachment.objects.get(realm=imported_realm)
|
|
|
|
self.assertEqual(len(b'zulip!'), uploaded_file.size)
|
|
|
|
|
|
|
|
attachment_content = uploads_bucket.get_key(uploaded_file.path_id).get_contents_as_string()
|
|
|
|
self.assertEqual(b"zulip!", attachment_content)
|
|
|
|
|
|
|
|
# Test emojis
|
|
|
|
realm_emoji = RealmEmoji.objects.get(realm=imported_realm)
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=imported_realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name,
|
|
|
|
)
|
|
|
|
emoji_key = avatar_bucket.get_key(emoji_path)
|
|
|
|
self.assertIsNotNone(emoji_key)
|
|
|
|
self.assertEqual(emoji_key.key, emoji_path)
|
|
|
|
|
|
|
|
# Test avatars
|
|
|
|
user_email = Message.objects.all()[0].sender.email
|
|
|
|
user_profile = UserProfile.objects.get(email=user_email, realm=imported_realm)
|
|
|
|
avatar_path_id = user_avatar_path(user_profile) + ".original"
|
|
|
|
original_image_key = avatar_bucket.get_key(avatar_path_id)
|
|
|
|
self.assertEqual(original_image_key.key, avatar_path_id)
|
|
|
|
image_data = original_image_key.get_contents_as_string()
|
|
|
|
self.assertEqual(image_data, test_image_data)
|
2018-09-21 05:39:35 +02:00
|
|
|
|
2018-10-16 12:34:47 +02:00
|
|
|
def test_get_incoming_message_ids(self) -> None:
|
|
|
|
import_dir = os.path.join(settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "import_fixtures")
|
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [888, 999, 555])
|
|
|
|
|
|
|
|
message_ids = get_incoming_message_ids(
|
|
|
|
import_dir=import_dir,
|
|
|
|
sort_by_date=False,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(message_ids, [555, 888, 999])
|
|
|
|
|
2018-09-21 05:39:35 +02:00
|
|
|
def test_plan_type(self) -> None:
|
|
|
|
realm = get_realm('zulip')
|
2018-10-24 06:09:01 +02:00
|
|
|
realm.plan_type = Realm.STANDARD
|
2018-09-21 05:39:35 +02:00
|
|
|
realm.save(update_fields=['plan_type'])
|
|
|
|
|
|
|
|
self._setup_export_files()
|
|
|
|
self._export_realm(realm)
|
|
|
|
|
|
|
|
with patch('logging.info'):
|
|
|
|
with self.settings(BILLING_ENABLED=True):
|
|
|
|
realm = do_import_realm('var/test-export', 'test-zulip-1')
|
|
|
|
self.assertTrue(realm.plan_type, Realm.LIMITED)
|
2019-02-13 02:22:36 +01:00
|
|
|
self.assertTrue(RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED).exists())
|
2018-09-21 05:39:35 +02:00
|
|
|
with self.settings(BILLING_ENABLED=False):
|
|
|
|
realm = do_import_realm('var/test-export', 'test-zulip-2')
|
|
|
|
self.assertTrue(realm.plan_type, Realm.SELF_HOSTED)
|
2019-02-13 02:22:36 +01:00
|
|
|
self.assertTrue(RealmAuditLog.objects.filter(
|
|
|
|
realm=realm, event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED).exists())
|