2019-04-04 13:16:02 +02:00
|
|
|
import filecmp
|
2020-06-11 00:54:34 +02:00
|
|
|
import os
|
|
|
|
from typing import Any, Dict, List
|
2020-07-21 22:48:16 +02:00
|
|
|
from unittest.mock import call, patch
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.data_import.import_util import SubscriberHandler
|
|
|
|
from zerver.data_import.mattermost import (
|
|
|
|
build_reactions,
|
|
|
|
check_user_in_team,
|
|
|
|
convert_channel_data,
|
|
|
|
convert_huddle_data,
|
|
|
|
convert_user_data,
|
|
|
|
create_username_to_user_mapping,
|
|
|
|
do_convert_data,
|
|
|
|
generate_huddle_name,
|
|
|
|
get_mentioned_user_ids,
|
|
|
|
label_mirror_dummy_users,
|
|
|
|
mattermost_data_file_to_dict,
|
|
|
|
process_user,
|
|
|
|
reset_mirror_dummy_users,
|
|
|
|
write_emoticon_data,
|
2019-04-04 13:16:02 +02:00
|
|
|
)
|
|
|
|
from zerver.data_import.mattermost_user import UserHandler
|
|
|
|
from zerver.data_import.sequencer import IdMapper
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.emoji import name_to_codepoint
|
|
|
|
from zerver.lib.import_realm import do_import_realm
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.models import Message, Reaction, Recipient, UserProfile, get_realm, get_user
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
class MatterMostImporter(ZulipTestCase):
|
2019-09-25 15:56:34 +02:00
|
|
|
def test_mattermost_data_file_to_dict(self) -> None:
|
2019-04-04 13:16:02 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
2019-09-25 15:56:34 +02:00
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
2019-09-25 15:46:05 +02:00
|
|
|
self.assertEqual(len(mattermost_data), 7)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertEqual(mattermost_data["version"], [1])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertEqual(len(mattermost_data["team"]), 2)
|
|
|
|
self.assertEqual(mattermost_data["team"][0]["name"], "gryffindor")
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertEqual(len(mattermost_data["channel"]), 5)
|
|
|
|
self.assertEqual(mattermost_data["channel"][0]["name"], "gryffindor-common-room")
|
|
|
|
self.assertEqual(mattermost_data["channel"][0]["team"], "gryffindor")
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertEqual(len(mattermost_data["user"]), 5)
|
|
|
|
self.assertEqual(mattermost_data["user"][1]["username"], "harry")
|
|
|
|
self.assertEqual(len(mattermost_data["user"][1]["teams"]), 1)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
self.assertEqual(len(mattermost_data["post"]["channel_post"]), 20)
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["team"], "gryffindor")
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["channel"], "dumbledores-army")
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["user"], "harry")
|
|
|
|
self.assertEqual(len(mattermost_data["post"]["channel_post"][0]["replies"]), 1)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertEqual(len(mattermost_data["emoji"]), 2)
|
|
|
|
self.assertEqual(mattermost_data["emoji"][0]["name"], "peerdium")
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures/direct_channel")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
|
|
|
|
self.assertEqual(len(mattermost_data["post"]["channel_post"]), 4)
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["team"], "gryffindor")
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["channel"], "gryffindor-common-room")
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["user"], "ron")
|
|
|
|
self.assertEqual(mattermost_data["post"]["channel_post"][0]["replies"], None)
|
|
|
|
|
|
|
|
self.assertEqual(len(mattermost_data["post"]["direct_post"]), 7)
|
|
|
|
self.assertEqual(mattermost_data["post"]["direct_post"][0]["user"], "ron")
|
|
|
|
self.assertEqual(mattermost_data["post"]["direct_post"][0]["replies"], None)
|
|
|
|
self.assertEqual(mattermost_data["post"]["direct_post"][0]["message"], "hey harry")
|
|
|
|
self.assertEqual(mattermost_data["post"]["direct_post"][0]["channel_members"], ["ron", "harry"])
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
def test_process_user(self) -> None:
|
|
|
|
user_id_mapper = IdMapper()
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
harry_dict = username_to_user["harry"]
|
2019-04-04 13:16:02 +02:00
|
|
|
harry_dict["is_mirror_dummy"] = False
|
|
|
|
|
|
|
|
realm_id = 3
|
|
|
|
|
|
|
|
team_name = "gryffindor"
|
|
|
|
user = process_user(harry_dict, realm_id, team_name, user_id_mapper)
|
|
|
|
self.assertEqual(user["avatar_source"], 'G')
|
|
|
|
self.assertEqual(user["delivery_email"], "harry@zulip.com")
|
|
|
|
self.assertEqual(user["email"], "harry@zulip.com")
|
|
|
|
self.assertEqual(user["full_name"], "Harry Potter")
|
|
|
|
self.assertEqual(user["id"], 1)
|
|
|
|
self.assertEqual(user["is_active"], True)
|
2020-12-18 03:59:08 +01:00
|
|
|
self.assertEqual(user["role"], UserProfile.ROLE_REALM_OWNER)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(user["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(user["realm"], 3)
|
|
|
|
self.assertEqual(user["short_name"], "harry")
|
|
|
|
self.assertEqual(user["timezone"], "UTC")
|
|
|
|
|
2019-08-21 04:14:02 +02:00
|
|
|
# A user with a `null` team value shouldn't be an admin.
|
|
|
|
harry_dict["teams"] = None
|
|
|
|
user = process_user(harry_dict, realm_id, team_name, user_id_mapper)
|
2019-10-05 02:35:07 +02:00
|
|
|
self.assertEqual(user["role"], UserProfile.ROLE_MEMBER)
|
2019-08-21 04:14:02 +02:00
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
team_name = "slytherin"
|
2019-09-25 15:56:34 +02:00
|
|
|
snape_dict = username_to_user["snape"]
|
2019-04-04 13:16:02 +02:00
|
|
|
snape_dict["is_mirror_dummy"] = True
|
|
|
|
user = process_user(snape_dict, realm_id, team_name, user_id_mapper)
|
|
|
|
self.assertEqual(user["avatar_source"], 'G')
|
|
|
|
self.assertEqual(user["delivery_email"], "snape@zulip.com")
|
|
|
|
self.assertEqual(user["email"], "snape@zulip.com")
|
|
|
|
self.assertEqual(user["full_name"], "Severus Snape")
|
|
|
|
self.assertEqual(user["id"], 2)
|
|
|
|
self.assertEqual(user["is_active"], False)
|
2019-10-05 02:35:07 +02:00
|
|
|
self.assertEqual(user["role"], UserProfile.ROLE_MEMBER)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(user["is_mirror_dummy"], True)
|
|
|
|
self.assertEqual(user["realm"], 3)
|
|
|
|
self.assertEqual(user["short_name"], "snape")
|
|
|
|
self.assertEqual(user["timezone"], "UTC")
|
|
|
|
|
|
|
|
def test_convert_user_data(self) -> None:
|
|
|
|
user_id_mapper = IdMapper()
|
|
|
|
realm_id = 3
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
team_name = "gryffindor"
|
|
|
|
user_handler = UserHandler()
|
2019-09-25 15:56:34 +02:00
|
|
|
convert_user_data(user_handler, user_id_mapper, username_to_user, realm_id, team_name)
|
2019-09-25 15:46:05 +02:00
|
|
|
self.assertEqual(len(user_handler.get_all_users()), 2)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertTrue(user_id_mapper.has("harry"))
|
|
|
|
self.assertTrue(user_id_mapper.has("ron"))
|
|
|
|
self.assertEqual(user_handler.get_user(user_id_mapper.get("harry"))["full_name"], "Harry Potter")
|
|
|
|
self.assertEqual(user_handler.get_user(user_id_mapper.get("ron"))["full_name"], "Ron Weasley")
|
|
|
|
|
|
|
|
team_name = "slytherin"
|
|
|
|
user_handler = UserHandler()
|
2019-09-25 15:56:34 +02:00
|
|
|
convert_user_data(user_handler, user_id_mapper, username_to_user, realm_id, team_name)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(len(user_handler.get_all_users()), 3)
|
|
|
|
self.assertTrue(user_id_mapper.has("malfoy"))
|
|
|
|
self.assertTrue(user_id_mapper.has("pansy"))
|
|
|
|
self.assertTrue(user_id_mapper.has("snape"))
|
|
|
|
|
|
|
|
team_name = "gryffindor"
|
|
|
|
# Snape is a mirror dummy user in Harry's team.
|
2019-09-25 15:56:34 +02:00
|
|
|
label_mirror_dummy_users(2, team_name, mattermost_data, username_to_user)
|
2019-04-04 13:16:02 +02:00
|
|
|
user_handler = UserHandler()
|
2019-09-25 15:56:34 +02:00
|
|
|
convert_user_data(user_handler, user_id_mapper, username_to_user, realm_id, team_name)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(len(user_handler.get_all_users()), 3)
|
|
|
|
self.assertTrue(user_id_mapper.has("snape"))
|
|
|
|
|
|
|
|
team_name = "slytherin"
|
|
|
|
user_handler = UserHandler()
|
2019-09-25 15:56:34 +02:00
|
|
|
convert_user_data(user_handler, user_id_mapper, username_to_user, realm_id, team_name)
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(len(user_handler.get_all_users()), 3)
|
|
|
|
|
|
|
|
def test_convert_channel_data(self) -> None:
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
user_handler = UserHandler()
|
|
|
|
subscriber_handler = SubscriberHandler()
|
|
|
|
stream_id_mapper = IdMapper()
|
|
|
|
user_id_mapper = IdMapper()
|
|
|
|
team_name = "gryffindor"
|
|
|
|
|
|
|
|
convert_user_data(
|
|
|
|
user_handler=user_handler,
|
|
|
|
user_id_mapper=user_id_mapper,
|
2019-09-25 15:56:34 +02:00
|
|
|
user_data_map=username_to_user,
|
2019-04-04 13:16:02 +02:00
|
|
|
realm_id=3,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
zerver_stream = convert_channel_data(
|
2019-09-25 15:56:34 +02:00
|
|
|
channel_data=mattermost_data["channel"],
|
|
|
|
user_data_map=username_to_user,
|
2019-04-04 13:16:02 +02:00
|
|
|
subscriber_handler=subscriber_handler,
|
|
|
|
stream_id_mapper=stream_id_mapper,
|
|
|
|
user_id_mapper=user_id_mapper,
|
|
|
|
realm_id=3,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(len(zerver_stream), 3)
|
|
|
|
|
|
|
|
self.assertEqual(zerver_stream[0]["name"], "Gryffindor common room")
|
|
|
|
self.assertEqual(zerver_stream[0]["invite_only"], False)
|
|
|
|
self.assertEqual(zerver_stream[0]["description"], "A place for talking about Gryffindor common room")
|
|
|
|
self.assertEqual(zerver_stream[0]["rendered_description"], "")
|
|
|
|
self.assertEqual(zerver_stream[0]["realm"], 3)
|
|
|
|
|
|
|
|
self.assertEqual(zerver_stream[1]["name"], "Gryffindor quidditch team")
|
|
|
|
self.assertEqual(zerver_stream[1]["invite_only"], False)
|
|
|
|
self.assertEqual(zerver_stream[1]["description"], "A place for talking about Gryffindor quidditch team")
|
|
|
|
self.assertEqual(zerver_stream[1]["rendered_description"], "")
|
|
|
|
self.assertEqual(zerver_stream[1]["realm"], 3)
|
|
|
|
|
|
|
|
self.assertEqual(zerver_stream[2]["name"], "Dumbledores army")
|
|
|
|
self.assertEqual(zerver_stream[2]["invite_only"], True)
|
|
|
|
self.assertEqual(zerver_stream[2]["description"], "A place for talking about Dumbledores army")
|
|
|
|
self.assertEqual(zerver_stream[2]["rendered_description"], "")
|
|
|
|
self.assertEqual(zerver_stream[2]["realm"], 3)
|
|
|
|
|
|
|
|
self.assertTrue(stream_id_mapper.has("gryffindor-common-room"))
|
|
|
|
self.assertTrue(stream_id_mapper.has("gryffindor-quidditch-team"))
|
|
|
|
self.assertTrue(stream_id_mapper.has("dumbledores-army"))
|
|
|
|
|
|
|
|
# TODO: Add ginny
|
2019-09-05 14:30:07 +02:00
|
|
|
ron_id = user_id_mapper.get("ron")
|
|
|
|
harry_id = user_id_mapper.get("harry")
|
|
|
|
self.assertEqual({ron_id, harry_id}, {1, 2})
|
2019-08-27 12:01:07 +02:00
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("gryffindor-common-room")), {ron_id, harry_id})
|
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("gryffindor-quidditch-team")), {ron_id, harry_id})
|
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("dumbledores-army")), {ron_id, harry_id})
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-08-21 04:14:02 +02:00
|
|
|
# Converting channel data when a user's `teams` value is `null`.
|
2020-09-03 05:32:15 +02:00
|
|
|
username_to_user["ron"].update(teams=None)
|
2019-08-21 04:14:02 +02:00
|
|
|
zerver_stream = convert_channel_data(
|
2019-09-25 15:56:34 +02:00
|
|
|
channel_data=mattermost_data["channel"],
|
|
|
|
user_data_map=username_to_user,
|
2019-08-21 04:14:02 +02:00
|
|
|
subscriber_handler=subscriber_handler,
|
|
|
|
stream_id_mapper=stream_id_mapper,
|
|
|
|
user_id_mapper=user_id_mapper,
|
|
|
|
realm_id=3,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
2019-09-05 14:30:07 +02:00
|
|
|
harry_id = user_id_mapper.get("harry")
|
|
|
|
self.assertIn(harry_id, {1, 2})
|
2019-08-27 12:01:07 +02:00
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("gryffindor-common-room")), {harry_id})
|
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("gryffindor-quidditch-team")), {harry_id})
|
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("dumbledores-army")), {harry_id})
|
2019-08-21 04:14:02 +02:00
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
team_name = "slytherin"
|
|
|
|
zerver_stream = convert_channel_data(
|
2019-09-25 15:56:34 +02:00
|
|
|
channel_data=mattermost_data["channel"],
|
|
|
|
user_data_map=username_to_user,
|
2019-04-04 13:16:02 +02:00
|
|
|
subscriber_handler=subscriber_handler,
|
|
|
|
stream_id_mapper=stream_id_mapper,
|
|
|
|
user_id_mapper=user_id_mapper,
|
|
|
|
realm_id=4,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
|
|
|
|
2019-09-05 14:30:07 +02:00
|
|
|
malfoy_id = user_id_mapper.get("malfoy")
|
|
|
|
pansy_id = user_id_mapper.get("pansy")
|
|
|
|
snape_id = user_id_mapper.get("snape")
|
|
|
|
self.assertEqual({malfoy_id, pansy_id, snape_id}, {3, 4, 5})
|
2019-08-27 12:01:07 +02:00
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("slytherin-common-room")), {malfoy_id, pansy_id, snape_id})
|
|
|
|
self.assertEqual(subscriber_handler.get_users(stream_id=stream_id_mapper.get("slytherin-quidditch-team")), {malfoy_id, pansy_id})
|
2019-04-04 13:16:02 +02:00
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
def test_convert_huddle_data(self) -> None:
|
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures/direct_channel")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
|
|
|
|
|
|
|
user_handler = UserHandler()
|
|
|
|
subscriber_handler = SubscriberHandler()
|
|
|
|
huddle_id_mapper = IdMapper()
|
|
|
|
user_id_mapper = IdMapper()
|
|
|
|
team_name = "gryffindor"
|
|
|
|
|
|
|
|
convert_user_data(
|
|
|
|
user_handler=user_handler,
|
|
|
|
user_id_mapper=user_id_mapper,
|
|
|
|
user_data_map=username_to_user,
|
|
|
|
realm_id=3,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
zerver_huddle = convert_huddle_data(
|
|
|
|
huddle_data=mattermost_data["direct_channel"],
|
|
|
|
user_data_map=username_to_user,
|
|
|
|
subscriber_handler=subscriber_handler,
|
|
|
|
huddle_id_mapper=huddle_id_mapper,
|
|
|
|
user_id_mapper=user_id_mapper,
|
|
|
|
realm_id=3,
|
|
|
|
team_name=team_name,
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(len(zerver_huddle), 1)
|
|
|
|
huddle_members = mattermost_data["direct_channel"][1]["members"]
|
|
|
|
huddle_name = generate_huddle_name(huddle_members)
|
|
|
|
|
|
|
|
self.assertTrue(huddle_id_mapper.has(huddle_name))
|
|
|
|
self.assertEqual(subscriber_handler.get_users(huddle_id=huddle_id_mapper.get(huddle_name)), {1, 2, 3})
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
def test_write_emoticon_data(self) -> None:
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
2019-05-03 22:30:54 +02:00
|
|
|
output_dir = self.make_import_output_dir("mattermost")
|
2020-07-24 17:22:12 +02:00
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
zerver_realm_emoji = write_emoticon_data(
|
|
|
|
realm_id=3,
|
|
|
|
custom_emoji_data=mattermost_data["emoji"],
|
|
|
|
data_dir=self.fixture_file_name("", "mattermost_fixtures"),
|
|
|
|
output_dir = output_dir,
|
|
|
|
)
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertEqual(len(zerver_realm_emoji), 2)
|
|
|
|
self.assertEqual(zerver_realm_emoji[0]["file_name"], "peerdium")
|
|
|
|
self.assertEqual(zerver_realm_emoji[0]["realm"], 3)
|
|
|
|
self.assertEqual(zerver_realm_emoji[0]["deactivated"], False)
|
|
|
|
|
|
|
|
self.assertEqual(zerver_realm_emoji[1]["file_name"], "tick")
|
|
|
|
self.assertEqual(zerver_realm_emoji[1]["realm"], 3)
|
|
|
|
self.assertEqual(zerver_realm_emoji[1]["deactivated"], False)
|
|
|
|
|
2019-05-03 22:30:54 +02:00
|
|
|
records_file = os.path.join(output_dir, "emoji", "records.json")
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(records_file, "rb") as f:
|
|
|
|
records_json = orjson.loads(f.read())
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
self.assertEqual(records_json[0]["file_name"], "peerdium")
|
|
|
|
self.assertEqual(records_json[0]["realm_id"], 3)
|
2019-09-25 15:56:34 +02:00
|
|
|
exported_emoji_path = self.fixture_file_name(mattermost_data["emoji"][0]["image"], "mattermost_fixtures")
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertTrue(filecmp.cmp(records_json[0]["path"], exported_emoji_path))
|
|
|
|
|
|
|
|
self.assertEqual(records_json[1]["file_name"], "tick")
|
|
|
|
self.assertEqual(records_json[1]["realm_id"], 3)
|
2019-09-25 15:56:34 +02:00
|
|
|
exported_emoji_path = self.fixture_file_name(mattermost_data["emoji"][1]["image"], "mattermost_fixtures")
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertTrue(filecmp.cmp(records_json[1]["path"], exported_emoji_path))
|
|
|
|
|
|
|
|
def test_get_mentioned_user_ids(self) -> None:
|
|
|
|
user_id_mapper = IdMapper()
|
|
|
|
harry_id = user_id_mapper.get("harry")
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @harry",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "@harry How are you?",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "@harry @ron Where are you folks?",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ron_id = user_id_mapper.get("ron")
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id, ron_id])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "@harry.com How are you?",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "hello@harry.com How are you?",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [])
|
|
|
|
|
|
|
|
harry_id = user_id_mapper.get("harry_")
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @harry_",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id])
|
|
|
|
|
|
|
|
harry_id = user_id_mapper.get("harry.")
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @harry.",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id])
|
|
|
|
|
|
|
|
harry_id = user_id_mapper.get("ha_rry.")
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @ha_rry.",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [harry_id])
|
|
|
|
|
|
|
|
ron_id = user_id_mapper.get("ron")
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @ron.",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [])
|
|
|
|
|
|
|
|
raw_message = {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"content": "Hello @ron_",
|
2019-04-04 13:16:02 +02:00
|
|
|
}
|
|
|
|
ids = get_mentioned_user_ids(raw_message, user_id_mapper)
|
|
|
|
self.assertEqual(list(ids), [])
|
|
|
|
|
|
|
|
def test_check_user_in_team(self) -> None:
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
|
|
|
|
|
|
|
harry = username_to_user["harry"]
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertTrue(check_user_in_team(harry, "gryffindor"))
|
|
|
|
self.assertFalse(check_user_in_team(harry, "slytherin"))
|
|
|
|
|
2019-09-25 15:56:34 +02:00
|
|
|
snape = username_to_user["snape"]
|
2019-04-04 13:16:02 +02:00
|
|
|
self.assertFalse(check_user_in_team(snape, "gryffindor"))
|
|
|
|
self.assertTrue(check_user_in_team(snape, "slytherin"))
|
|
|
|
|
2020-09-03 05:32:15 +02:00
|
|
|
snape.update(teams=None)
|
2019-08-21 04:14:02 +02:00
|
|
|
self.assertFalse(check_user_in_team(snape, "slytherin"))
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
def test_label_mirror_dummy_users(self) -> None:
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
username_to_user = create_username_to_user_mapping(mattermost_data["user"])
|
|
|
|
reset_mirror_dummy_users(username_to_user)
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
label_mirror_dummy_users(
|
2019-08-31 12:46:01 +02:00
|
|
|
num_teams=2,
|
2019-04-04 13:16:02 +02:00
|
|
|
team_name="gryffindor",
|
2019-09-25 15:56:34 +02:00
|
|
|
mattermost_data=mattermost_data,
|
|
|
|
username_to_user=username_to_user,
|
2019-04-04 13:16:02 +02:00
|
|
|
)
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertFalse(username_to_user["harry"]["is_mirror_dummy"])
|
|
|
|
self.assertFalse(username_to_user["ron"]["is_mirror_dummy"])
|
|
|
|
self.assertFalse(username_to_user["malfoy"]["is_mirror_dummy"])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
# snape is mirror dummy since the user sent a message in gryffindor and
|
|
|
|
# left the team
|
2019-09-25 15:56:34 +02:00
|
|
|
self.assertTrue(username_to_user["snape"]["is_mirror_dummy"])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
def test_build_reactions(self) -> None:
|
2019-09-25 15:56:34 +02:00
|
|
|
fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures")
|
|
|
|
mattermost_data = mattermost_data_file_to_dict(fixture_file_name)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
total_reactions: List[Dict[str, Any]] = []
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
reactions = [
|
|
|
|
{"user": "harry", "create_at": 1553165521410, "emoji_name": "tick"},
|
|
|
|
{"user": "ron", "create_at": 1553166530805, "emoji_name": "smile"},
|
|
|
|
{"user": "ron", "create_at": 1553166540953, "emoji_name": "world_map"},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
{"user": "harry", "create_at": 1553166540957, "emoji_name": "world_map"},
|
2019-04-04 13:16:02 +02:00
|
|
|
]
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
zerver_realmemoji = write_emoticon_data(
|
|
|
|
realm_id=3,
|
|
|
|
custom_emoji_data=mattermost_data["emoji"],
|
|
|
|
data_dir=self.fixture_file_name("", "mattermost_fixtures"),
|
|
|
|
output_dir=self.make_import_output_dir("mattermost"),
|
|
|
|
)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
# Make sure tick is present in fixture data
|
|
|
|
self.assertEqual(zerver_realmemoji[1]["name"], "tick")
|
|
|
|
tick_emoji_code = zerver_realmemoji[1]["id"]
|
|
|
|
|
|
|
|
user_id_mapper = IdMapper()
|
|
|
|
harry_id = user_id_mapper.get("harry")
|
|
|
|
ron_id = user_id_mapper.get("ron")
|
|
|
|
|
|
|
|
build_reactions(
|
|
|
|
realm_id=3,
|
|
|
|
total_reactions=total_reactions,
|
|
|
|
reactions=reactions,
|
|
|
|
message_id=5,
|
|
|
|
user_id_mapper=user_id_mapper,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
zerver_realmemoji=zerver_realmemoji,
|
2019-04-04 13:16:02 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
smile_emoji_code = name_to_codepoint["smile"]
|
|
|
|
world_map_emoji_code = name_to_codepoint["world_map"]
|
|
|
|
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(total_reactions), 4)
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(self.get_set(total_reactions, "reaction_type"), {Reaction.REALM_EMOJI, Reaction.UNICODE_EMOJI})
|
|
|
|
self.assertEqual(self.get_set(total_reactions, "emoji_name"), {"tick", "smile", "world_map"})
|
|
|
|
self.assertEqual(self.get_set(total_reactions, "emoji_code"), {tick_emoji_code, smile_emoji_code,
|
|
|
|
world_map_emoji_code})
|
|
|
|
self.assertEqual(self.get_set(total_reactions, "user_profile"), {harry_id, ron_id})
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(self.get_set(total_reactions, "id")), 4)
|
|
|
|
self.assertEqual(len(self.get_set(total_reactions, "message")), 1)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
def team_output_dir(self, output_dir: str, team_name: str) -> str:
|
|
|
|
return os.path.join(output_dir, team_name)
|
|
|
|
|
|
|
|
def read_file(self, team_output_dir: str, output_file: str) -> Any:
|
|
|
|
full_path = os.path.join(team_output_dir, output_file)
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(full_path, "rb") as f:
|
|
|
|
return orjson.loads(f.read())
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
def test_do_convert_data(self) -> None:
|
|
|
|
mattermost_data_dir = self.fixture_file_name("", "mattermost_fixtures")
|
|
|
|
output_dir = self.make_import_output_dir("mattermost")
|
|
|
|
|
2020-07-22 01:52:38 +02:00
|
|
|
with patch('builtins.print') as mock_print, self.assertLogs(level='WARNING') as warn_log:
|
2020-07-21 22:48:16 +02:00
|
|
|
do_convert_data(
|
|
|
|
mattermost_data_dir=mattermost_data_dir,
|
|
|
|
output_dir=output_dir,
|
|
|
|
masking_content=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(mock_print.mock_calls, [
|
|
|
|
call('Generating data for', 'gryffindor'),
|
|
|
|
call('Generating data for', 'slytherin')
|
|
|
|
])
|
2020-07-22 01:52:38 +02:00
|
|
|
self.assertEqual(warn_log.output, [
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor")
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'avatars')), True)
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'emoji')), True)
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'attachment.json')), True)
|
|
|
|
|
|
|
|
realm = self.read_file(harry_team_output_dir, 'realm.json')
|
|
|
|
|
|
|
|
self.assertEqual('Organization imported from Mattermost!',
|
|
|
|
realm['zerver_realm'][0]['description'])
|
|
|
|
|
|
|
|
exported_user_ids = self.get_set(realm['zerver_userprofile'], 'id')
|
|
|
|
exported_user_full_names = self.get_set(realm['zerver_userprofile'], 'full_name')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual({'Harry Potter', 'Ron Weasley', 'Severus Snape'}, exported_user_full_names)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
exported_user_emails = self.get_set(realm['zerver_userprofile'], 'email')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual({'harry@zulip.com', 'ron@zulip.com', 'snape@zulip.com'}, exported_user_emails)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
self.assertEqual(len(realm['zerver_stream']), 3)
|
|
|
|
exported_stream_names = self.get_set(realm['zerver_stream'], 'name')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_stream_names, {'Gryffindor common room', 'Gryffindor quidditch team', 'Dumbledores army'})
|
|
|
|
self.assertEqual(self.get_set(realm['zerver_stream'], 'realm'), {realm['zerver_realm'][0]['id']})
|
|
|
|
self.assertEqual(self.get_set(realm['zerver_stream'], 'deactivated'), {False})
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
self.assertEqual(len(realm['zerver_defaultstream']), 0)
|
|
|
|
|
|
|
|
exported_recipient_ids = self.get_set(realm['zerver_recipient'], 'id')
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(exported_recipient_ids), 6)
|
2019-04-04 13:16:02 +02:00
|
|
|
exported_recipient_types = self.get_set(realm['zerver_recipient'], 'type')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_recipient_types, {1, 2})
|
2019-04-04 13:16:02 +02:00
|
|
|
exported_recipient_type_ids = self.get_set(realm['zerver_recipient'], 'type_id')
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(exported_recipient_type_ids), 3)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
exported_subscription_userprofile = self.get_set(realm['zerver_subscription'], 'user_profile')
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(exported_subscription_userprofile), 3)
|
2019-04-04 13:16:02 +02:00
|
|
|
exported_subscription_recipients = self.get_set(realm['zerver_subscription'], 'recipient')
|
2019-05-04 10:28:49 +02:00
|
|
|
self.assertEqual(len(exported_subscription_recipients), 6)
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
messages = self.read_file(harry_team_output_dir, 'messages-000001.json')
|
|
|
|
|
|
|
|
exported_messages_id = self.get_set(messages['zerver_message'], 'id')
|
|
|
|
self.assertIn(messages['zerver_message'][0]['sender'], exported_user_ids)
|
|
|
|
self.assertIn(messages['zerver_message'][0]['recipient'], exported_recipient_ids)
|
|
|
|
self.assertIn(messages['zerver_message'][0]['content'], 'harry joined the channel.\n\n')
|
|
|
|
|
|
|
|
exported_usermessage_userprofiles = self.get_set(messages['zerver_usermessage'], 'user_profile')
|
2019-08-31 14:32:59 +02:00
|
|
|
self.assertEqual(len(exported_usermessage_userprofiles), 3)
|
2019-04-04 13:16:02 +02:00
|
|
|
exported_usermessage_messages = self.get_set(messages['zerver_usermessage'], 'message')
|
|
|
|
self.assertEqual(exported_usermessage_messages, exported_messages_id)
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(
|
|
|
|
import_dir=harry_team_output_dir,
|
|
|
|
subdomain='gryffindor',
|
|
|
|
)
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
realm = get_realm('gryffindor')
|
|
|
|
|
2019-08-31 14:32:59 +02:00
|
|
|
self.assertFalse(get_user("harry@zulip.com", realm).is_mirror_dummy)
|
|
|
|
self.assertFalse(get_user("ron@zulip.com", realm).is_mirror_dummy)
|
|
|
|
self.assertTrue(get_user("snape@zulip.com", realm).is_mirror_dummy)
|
|
|
|
|
|
|
|
messages = Message.objects.filter(sender__realm=realm)
|
2019-04-04 13:16:02 +02:00
|
|
|
for message in messages:
|
|
|
|
self.assertIsNotNone(message.rendered_content)
|
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
def test_do_convert_data_with_direct_messages(self) -> None:
|
|
|
|
mattermost_data_dir = self.fixture_file_name("direct_channel", "mattermost_fixtures")
|
|
|
|
output_dir = self.make_import_output_dir("mattermost")
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with patch('builtins.print') as mock_print, self.assertLogs(level="INFO"):
|
2020-07-21 22:48:16 +02:00
|
|
|
do_convert_data(
|
|
|
|
mattermost_data_dir=mattermost_data_dir,
|
|
|
|
output_dir=output_dir,
|
|
|
|
masking_content=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(mock_print.mock_calls, [
|
|
|
|
call('Generating data for', 'gryffindor'),
|
|
|
|
])
|
2019-09-25 15:46:05 +02:00
|
|
|
|
|
|
|
harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor")
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'avatars')), True)
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'emoji')), True)
|
|
|
|
self.assertEqual(os.path.exists(os.path.join(harry_team_output_dir, 'attachment.json')), True)
|
|
|
|
|
|
|
|
realm = self.read_file(harry_team_output_dir, 'realm.json')
|
|
|
|
|
|
|
|
self.assertEqual('Organization imported from Mattermost!',
|
|
|
|
realm['zerver_realm'][0]['description'])
|
|
|
|
|
|
|
|
exported_user_ids = self.get_set(realm['zerver_userprofile'], 'id')
|
|
|
|
exported_user_full_names = self.get_set(realm['zerver_userprofile'], 'full_name')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual({'Harry Potter', 'Ron Weasley', 'Ginny Weasley', 'Tom Riddle'}, exported_user_full_names)
|
2019-09-25 15:46:05 +02:00
|
|
|
|
|
|
|
exported_user_emails = self.get_set(realm['zerver_userprofile'], 'email')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual({'harry@zulip.com', 'ron@zulip.com', 'ginny@zulip.com', 'voldemort@zulip.com'}, exported_user_emails)
|
2019-09-25 15:46:05 +02:00
|
|
|
|
|
|
|
self.assertEqual(len(realm['zerver_stream']), 3)
|
|
|
|
exported_stream_names = self.get_set(realm['zerver_stream'], 'name')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_stream_names, {'Gryffindor common room', 'Gryffindor quidditch team', 'Dumbledores army'})
|
|
|
|
self.assertEqual(self.get_set(realm['zerver_stream'], 'realm'), {realm['zerver_realm'][0]['id']})
|
|
|
|
self.assertEqual(self.get_set(realm['zerver_stream'], 'deactivated'), {False})
|
2019-09-25 15:46:05 +02:00
|
|
|
|
|
|
|
self.assertEqual(len(realm['zerver_defaultstream']), 0)
|
|
|
|
|
|
|
|
exported_recipient_ids = self.get_set(realm['zerver_recipient'], 'id')
|
|
|
|
self.assertEqual(len(exported_recipient_ids), 8)
|
|
|
|
exported_recipient_types = self.get_set(realm['zerver_recipient'], 'type')
|
2020-04-09 21:51:58 +02:00
|
|
|
self.assertEqual(exported_recipient_types, {1, 2, 3})
|
2019-09-25 15:46:05 +02:00
|
|
|
exported_recipient_type_ids = self.get_set(realm['zerver_recipient'], 'type_id')
|
|
|
|
self.assertEqual(len(exported_recipient_type_ids), 4)
|
|
|
|
|
|
|
|
exported_subscription_userprofile = self.get_set(realm['zerver_subscription'], 'user_profile')
|
|
|
|
self.assertEqual(len(exported_subscription_userprofile), 4)
|
|
|
|
exported_subscription_recipients = self.get_set(realm['zerver_subscription'], 'recipient')
|
|
|
|
self.assertEqual(len(exported_subscription_recipients), 8)
|
|
|
|
|
|
|
|
messages = self.read_file(harry_team_output_dir, 'messages-000001.json')
|
|
|
|
|
|
|
|
exported_messages_id = self.get_set(messages['zerver_message'], 'id')
|
|
|
|
self.assertIn(messages['zerver_message'][0]['sender'], exported_user_ids)
|
|
|
|
self.assertIn(messages['zerver_message'][0]['recipient'], exported_recipient_ids)
|
|
|
|
self.assertIn(messages['zerver_message'][0]['content'], 'ron joined the channel.\n\n')
|
|
|
|
|
|
|
|
exported_usermessage_userprofiles = self.get_set(messages['zerver_usermessage'], 'user_profile')
|
|
|
|
self.assertEqual(len(exported_usermessage_userprofiles), 3)
|
|
|
|
exported_usermessage_messages = self.get_set(messages['zerver_usermessage'], 'message')
|
|
|
|
self.assertEqual(exported_usermessage_messages, exported_messages_id)
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(
|
|
|
|
import_dir=harry_team_output_dir,
|
|
|
|
subdomain='gryffindor',
|
|
|
|
)
|
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
realm = get_realm('gryffindor')
|
|
|
|
|
|
|
|
messages = Message.objects.filter(sender__realm=realm)
|
|
|
|
for message in messages:
|
|
|
|
self.assertIsNotNone(message.rendered_content)
|
|
|
|
self.assertEqual(len(messages), 11)
|
|
|
|
|
2019-10-11 04:07:12 +02:00
|
|
|
stream_messages = messages.filter(recipient__type=Recipient.STREAM).order_by("date_sent")
|
2019-09-25 15:46:05 +02:00
|
|
|
stream_recipients = stream_messages.values_list("recipient", flat=True)
|
|
|
|
self.assertEqual(len(stream_messages), 4)
|
|
|
|
self.assertEqual(len(set(stream_recipients)), 2)
|
|
|
|
self.assertEqual(stream_messages[0].sender.email, "ron@zulip.com")
|
|
|
|
self.assertEqual(stream_messages[0].content, "ron joined the channel.\n\n")
|
|
|
|
|
2019-10-11 04:07:12 +02:00
|
|
|
huddle_messages = messages.filter(recipient__type=Recipient.HUDDLE).order_by("date_sent")
|
2019-09-25 15:46:05 +02:00
|
|
|
huddle_recipients = huddle_messages.values_list("recipient", flat=True)
|
|
|
|
self.assertEqual(len(huddle_messages), 3)
|
|
|
|
self.assertEqual(len(set(huddle_recipients)), 1)
|
|
|
|
self.assertEqual(huddle_messages[0].sender.email, "ginny@zulip.com")
|
2020-10-23 02:43:28 +02:00
|
|
|
self.assertEqual(huddle_messages[0].content, "Who is going to Hogsmeade this weekend?\n\n")
|
2019-09-25 15:46:05 +02:00
|
|
|
|
2019-10-11 04:07:12 +02:00
|
|
|
personal_messages = messages.filter(recipient__type=Recipient.PERSONAL).order_by("date_sent")
|
2019-09-25 15:46:05 +02:00
|
|
|
personal_recipients = personal_messages.values_list("recipient", flat=True)
|
|
|
|
self.assertEqual(len(personal_messages), 4)
|
|
|
|
self.assertEqual(len(set(personal_recipients)), 3)
|
|
|
|
self.assertEqual(personal_messages[0].sender.email, "ron@zulip.com")
|
|
|
|
self.assertEqual(personal_messages[0].content, "hey harry\n\n")
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
def test_do_convert_data_with_masking(self) -> None:
|
|
|
|
mattermost_data_dir = self.fixture_file_name("", "mattermost_fixtures")
|
|
|
|
output_dir = self.make_import_output_dir("mattermost")
|
|
|
|
|
2020-07-22 01:52:38 +02:00
|
|
|
with patch('builtins.print') as mock_print, self.assertLogs(level='WARNING') as warn_log:
|
2020-07-21 22:48:16 +02:00
|
|
|
do_convert_data(
|
|
|
|
mattermost_data_dir=mattermost_data_dir,
|
|
|
|
output_dir=output_dir,
|
|
|
|
masking_content=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(mock_print.mock_calls, [
|
|
|
|
call('Generating data for', 'gryffindor'),
|
|
|
|
call('Generating data for', 'slytherin')
|
|
|
|
])
|
2020-07-22 01:52:38 +02:00
|
|
|
self.assertEqual(warn_log.output, [
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor")
|
|
|
|
messages = self.read_file(harry_team_output_dir, 'messages-000001.json')
|
|
|
|
|
|
|
|
self.assertIn(messages['zerver_message'][0]['content'], 'xxxxx xxxxxx xxx xxxxxxx.\n\n')
|
|
|
|
|
|
|
|
def test_import_data_to_existing_database(self) -> None:
|
|
|
|
mattermost_data_dir = self.fixture_file_name("", "mattermost_fixtures")
|
|
|
|
output_dir = self.make_import_output_dir("mattermost")
|
|
|
|
|
2020-07-22 01:52:38 +02:00
|
|
|
with patch('builtins.print') as mock_print, self.assertLogs(level='WARNING') as warn_log:
|
2020-07-21 22:48:16 +02:00
|
|
|
do_convert_data(
|
|
|
|
mattermost_data_dir=mattermost_data_dir,
|
|
|
|
output_dir=output_dir,
|
|
|
|
masking_content=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(mock_print.mock_calls, [
|
|
|
|
call('Generating data for', 'gryffindor'),
|
|
|
|
call('Generating data for', 'slytherin')
|
|
|
|
])
|
2020-07-22 01:52:38 +02:00
|
|
|
self.assertEqual(warn_log.output, [
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
'WARNING:root:Skipping importing huddles and PMs since there are multiple teams in the export',
|
|
|
|
])
|
2019-04-04 13:16:02 +02:00
|
|
|
|
|
|
|
harry_team_output_dir = self.team_output_dir(output_dir, "gryffindor")
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
do_import_realm(
|
|
|
|
import_dir=harry_team_output_dir,
|
|
|
|
subdomain='gryffindor',
|
|
|
|
)
|
|
|
|
|
2019-04-04 13:16:02 +02:00
|
|
|
realm = get_realm('gryffindor')
|
|
|
|
|
|
|
|
realm_users = UserProfile.objects.filter(realm=realm)
|
|
|
|
messages = Message.objects.filter(sender__in=realm_users)
|
|
|
|
for message in messages:
|
|
|
|
self.assertIsNotNone(message.rendered_content)
|