2020-06-11 00:54:34 +02:00
|
|
|
import os
|
|
|
|
import shutil
|
2020-10-24 09:33:54 +02:00
|
|
|
from io import BytesIO
|
2021-06-09 00:15:45 +02:00
|
|
|
from typing import Any, Dict, Iterator, List, Set, Tuple
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2021-09-27 09:17:52 +02:00
|
|
|
from unittest.mock import ANY
|
|
|
|
from urllib.parse import parse_qs, urlparse
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2021-06-09 00:15:45 +02:00
|
|
|
import responses
|
2018-01-26 15:33:22 +01:00
|
|
|
from django.conf import settings
|
|
|
|
from django.utils.timezone import now as timezone_now
|
2021-06-09 00:15:45 +02:00
|
|
|
from requests.models import PreparedRequest
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.data_import.import_util import (
|
2021-07-16 22:11:10 +02:00
|
|
|
ZerverFieldsT,
|
2020-06-11 00:54:34 +02:00
|
|
|
build_defaultstream,
|
|
|
|
build_recipient,
|
|
|
|
build_subscription,
|
|
|
|
build_usermessages,
|
|
|
|
build_zerver_realm,
|
|
|
|
)
|
|
|
|
from zerver.data_import.sequencer import NEXT_ID
|
2018-08-01 00:18:04 +02:00
|
|
|
from zerver.data_import.slack import (
|
2018-12-17 02:26:51 +01:00
|
|
|
AddedChannelsT,
|
2019-07-02 07:41:51 +02:00
|
|
|
AddedMPIMsT,
|
2019-06-28 10:34:14 +02:00
|
|
|
DMMembersT,
|
data_import: Fix bot email address de-duplication.
4815f6e28b2e99e799c3b60dac5cb1f19fa31b8e tried to de-duplicate bot
email addresses, but instead caused duplicates to crash:
```
Traceback (most recent call last):
File "./manage.py", line 157, in <module>
execute_from_command_line(sys.argv)
File "./manage.py", line 122, in execute_from_command_line
utility.execute()
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/__init__.py", line 413, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/base.py", line 354, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/base.py", line 398, in execute
output = self.handle(*args, **options)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/management/commands/convert_slack_data.py", line 59, in handle
do_convert_data(path, output_dir, token, threads=num_threads)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 1320, in do_convert_data
) = slack_workspace_to_realm(
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 141, in slack_workspace_to_realm
) = users_to_zerver_userprofile(slack_data_dir, user_list, realm_id, int(NOW), domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 248, in users_to_zerver_userprofile
email = get_user_email(user, domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 406, in get_user_email
return SlackBotEmail.get_email(user["profile"], domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 85, in get_email
email_prefix += cls.duplicate_email_count[email]
TypeError: can only concatenate str (not "int") to str
```
Fix the stringification, make it case-insensitive, append with a dash
for readability, and add tests for all of the above.
2022-03-31 02:21:57 +02:00
|
|
|
SlackBotEmail,
|
2020-06-11 00:54:34 +02:00
|
|
|
channel_message_to_zerver_message,
|
|
|
|
channels_to_zerver_stream,
|
|
|
|
convert_slack_workspace_messages,
|
|
|
|
do_convert_data,
|
|
|
|
fetch_shared_channel_users,
|
|
|
|
get_admin,
|
|
|
|
get_guest,
|
|
|
|
get_message_sending_user,
|
|
|
|
get_owner,
|
|
|
|
get_slack_api_data,
|
|
|
|
get_subscription,
|
|
|
|
get_user_timezone,
|
|
|
|
process_message_files,
|
2021-09-16 09:37:49 +02:00
|
|
|
slack_emoji_name_to_codepoint,
|
2020-06-11 00:54:34 +02:00
|
|
|
slack_workspace_to_realm,
|
|
|
|
users_to_zerver_userprofile,
|
2018-02-08 21:42:27 +01:00
|
|
|
)
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.import_realm import do_import_realm
|
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2022-01-13 23:24:16 +01:00
|
|
|
from zerver.lib.test_helpers import read_test_image_file
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.topic import EXPORT_TOPIC_NAME
|
2022-09-29 19:56:30 +02:00
|
|
|
from zerver.models import Message, Realm, RealmAuditLog, Recipient, UserProfile, get_realm
|
2018-02-08 21:42:27 +01:00
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-02-08 21:42:27 +01:00
|
|
|
def remove_folder(path: str) -> None:
|
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-09 00:15:45 +02:00
|
|
|
def request_callback(request: PreparedRequest) -> Tuple[int, Dict[str, str], bytes]:
|
2021-09-27 09:17:52 +02:00
|
|
|
valid_endpoint = False
|
|
|
|
endpoints = [
|
|
|
|
"https://slack.com/api/users.list",
|
|
|
|
"https://slack.com/api/users.info",
|
|
|
|
"https://slack.com/api/team.info",
|
|
|
|
]
|
|
|
|
for endpoint in endpoints:
|
|
|
|
if request.url and endpoint in request.url:
|
|
|
|
valid_endpoint = True
|
|
|
|
break
|
|
|
|
if not valid_endpoint:
|
2021-06-09 00:15:45 +02:00
|
|
|
return (404, {}, b"")
|
2018-02-01 00:56:57 +01:00
|
|
|
|
2021-06-09 00:15:45 +02:00
|
|
|
if request.headers.get("Authorization") != "Bearer xoxp-valid-token":
|
|
|
|
return (200, {}, orjson.dumps({"ok": False, "error": "invalid_auth"}))
|
2018-02-01 00:56:57 +01:00
|
|
|
|
2021-09-27 09:17:52 +02:00
|
|
|
if request.url == "https://slack.com/api/users.list":
|
|
|
|
return (200, {}, orjson.dumps({"ok": True, "members": "user_data"}))
|
|
|
|
|
|
|
|
query_from_url = str(urlparse(request.url).query)
|
|
|
|
qs = parse_qs(query_from_url)
|
|
|
|
if request.url and "https://slack.com/api/users.info" in request.url:
|
|
|
|
user2team_dict = {
|
|
|
|
"U061A3E0G": "T6LARQE2Z",
|
|
|
|
"U061A8H1G": "T7KJRQE8Y",
|
|
|
|
"U8X25EBAB": "T5YFFM2QY",
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
user_id = qs["user"][0]
|
|
|
|
team_id = user2team_dict[user_id]
|
|
|
|
except KeyError:
|
|
|
|
return (200, {}, orjson.dumps({"ok": False, "error": "user_not_found"}))
|
|
|
|
return (200, {}, orjson.dumps({"ok": True, "user": {"id": user_id, "team_id": team_id}}))
|
|
|
|
# Else, https://slack.com/api/team.info
|
|
|
|
team_not_found: Tuple[int, Dict[str, str], bytes] = (
|
|
|
|
200,
|
|
|
|
{},
|
|
|
|
orjson.dumps({"ok": False, "error": "team_not_found"}),
|
|
|
|
)
|
|
|
|
try:
|
|
|
|
team_id = qs["team"][0]
|
|
|
|
except KeyError:
|
|
|
|
return team_not_found
|
|
|
|
|
|
|
|
team_dict = {
|
|
|
|
"T6LARQE2Z": "foreignteam1",
|
|
|
|
"T7KJRQE8Y": "foreignteam2",
|
|
|
|
}
|
|
|
|
try:
|
|
|
|
team_domain = team_dict[team_id]
|
|
|
|
except KeyError:
|
|
|
|
return team_not_found
|
|
|
|
return (200, {}, orjson.dumps({"ok": True, "team": {"id": team_id, "domain": team_domain}}))
|
2021-03-04 15:58:20 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
class SlackImporter(ZulipTestCase):
|
2021-06-09 00:15:45 +02:00
|
|
|
@responses.activate
|
|
|
|
def test_get_slack_api_data(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
token = "xoxp-valid-token"
|
2021-09-27 09:17:52 +02:00
|
|
|
|
|
|
|
# Users list
|
2018-03-29 14:38:11 +02:00
|
|
|
slack_user_list_url = "https://slack.com/api/users.list"
|
2021-06-09 00:15:45 +02:00
|
|
|
responses.add_callback(responses.GET, slack_user_list_url, callback=request_callback)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
get_slack_api_data(slack_user_list_url, "members", token=token), "user_data"
|
|
|
|
)
|
2021-09-27 09:17:52 +02:00
|
|
|
|
|
|
|
# Users info
|
|
|
|
slack_users_info_url = "https://slack.com/api/users.info"
|
|
|
|
user_id = "U8X25EBAB"
|
|
|
|
responses.add_callback(responses.GET, slack_users_info_url, callback=request_callback)
|
|
|
|
self.assertEqual(
|
|
|
|
get_slack_api_data(slack_users_info_url, "user", token=token, user=user_id),
|
|
|
|
{"id": user_id, "team_id": "T5YFFM2QY"},
|
|
|
|
)
|
|
|
|
# Should error if the required user argument is not specified
|
|
|
|
with self.assertRaises(Exception) as invalid:
|
|
|
|
get_slack_api_data(slack_users_info_url, "user", token=token)
|
|
|
|
self.assertEqual(invalid.exception.args, ("Error accessing Slack API: user_not_found",))
|
|
|
|
# Should error if the required user is not found
|
|
|
|
with self.assertRaises(Exception) as invalid:
|
|
|
|
get_slack_api_data(slack_users_info_url, "user", token=token, user="idontexist")
|
|
|
|
self.assertEqual(invalid.exception.args, ("Error accessing Slack API: user_not_found",))
|
|
|
|
|
|
|
|
# Team info
|
|
|
|
slack_team_info_url = "https://slack.com/api/team.info"
|
|
|
|
responses.add_callback(responses.GET, slack_team_info_url, callback=request_callback)
|
|
|
|
with self.assertRaises(Exception) as invalid:
|
|
|
|
get_slack_api_data(slack_team_info_url, "team", token=token, team="wedontexist")
|
|
|
|
self.assertEqual(invalid.exception.args, ("Error accessing Slack API: team_not_found",))
|
|
|
|
# Should error if the required user argument is not specified
|
|
|
|
with self.assertRaises(Exception) as invalid:
|
|
|
|
get_slack_api_data(slack_team_info_url, "team", token=token)
|
|
|
|
self.assertEqual(invalid.exception.args, ("Error accessing Slack API: team_not_found",))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
token = "xoxp-invalid-token"
|
2018-02-01 00:56:57 +01:00
|
|
|
with self.assertRaises(Exception) as invalid:
|
2019-08-07 13:58:16 +02:00
|
|
|
get_slack_api_data(slack_user_list_url, "members", token=token)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(invalid.exception.args, ("Error accessing Slack API: invalid_auth",))
|
2020-01-22 23:46:39 +01:00
|
|
|
|
2019-08-08 19:39:26 +02:00
|
|
|
with self.assertRaises(Exception) as invalid:
|
|
|
|
get_slack_api_data(slack_user_list_url, "members")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(invalid.exception.args, ("Slack token missing in kwargs",))
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
token = "xoxp-status404"
|
2018-03-29 14:38:11 +02:00
|
|
|
wrong_url = "https://slack.com/api/wrong"
|
2021-06-09 00:15:45 +02:00
|
|
|
responses.add_callback(responses.GET, wrong_url, callback=request_callback)
|
2018-03-29 14:38:11 +02:00
|
|
|
with self.assertRaises(Exception) as invalid:
|
2019-08-07 13:58:16 +02:00
|
|
|
get_slack_api_data(wrong_url, "members", token=token)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(invalid.exception.args, ("HTTP error accessing the Slack API.",))
|
2018-03-29 14:38:11 +02:00
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
def test_build_zerver_realm(self) -> None:
|
|
|
|
realm_id = 2
|
|
|
|
realm_subdomain = "test-realm"
|
|
|
|
time = float(timezone_now().timestamp())
|
2021-02-12 08:19:30 +01:00
|
|
|
test_realm: List[Dict[str, Any]] = build_zerver_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm_id, realm_subdomain, time, "Slack"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
test_zerver_realm_dict = test_realm[0]
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(test_zerver_realm_dict["id"], realm_id)
|
|
|
|
self.assertEqual(test_zerver_realm_dict["string_id"], realm_subdomain)
|
|
|
|
self.assertEqual(test_zerver_realm_dict["name"], realm_subdomain)
|
|
|
|
self.assertEqual(test_zerver_realm_dict["date_created"], time)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2020-06-08 21:01:42 +02:00
|
|
|
def test_get_owner(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
{"is_owner": False, "is_primary_owner": False},
|
|
|
|
{"is_owner": True, "is_primary_owner": False},
|
|
|
|
{"is_owner": False, "is_primary_owner": True},
|
|
|
|
{"is_owner": True, "is_primary_owner": True},
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2020-06-08 21:01:42 +02:00
|
|
|
self.assertEqual(get_owner(user_data[0]), False)
|
|
|
|
self.assertEqual(get_owner(user_data[1]), True)
|
|
|
|
self.assertEqual(get_owner(user_data[2]), True)
|
|
|
|
self.assertEqual(get_owner(user_data[3]), True)
|
|
|
|
|
2018-02-06 21:02:23 +01:00
|
|
|
def test_get_admin(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_data = [{"is_admin": True}, {"is_admin": False}]
|
2018-02-06 21:02:23 +01:00
|
|
|
self.assertEqual(get_admin(user_data[0]), True)
|
2020-06-08 21:01:42 +02:00
|
|
|
self.assertEqual(get_admin(user_data[1]), False)
|
2018-02-06 21:02:23 +01:00
|
|
|
|
2019-10-31 13:18:49 +01:00
|
|
|
def test_get_guest(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_data = [
|
2021-02-12 08:20:45 +01:00
|
|
|
{"is_restricted": False, "is_ultra_restricted": False},
|
|
|
|
{"is_restricted": True, "is_ultra_restricted": False},
|
|
|
|
{"is_restricted": False, "is_ultra_restricted": True},
|
|
|
|
{"is_restricted": True, "is_ultra_restricted": True},
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2019-10-31 13:18:49 +01:00
|
|
|
self.assertEqual(get_guest(user_data[0]), False)
|
|
|
|
self.assertEqual(get_guest(user_data[1]), True)
|
|
|
|
self.assertEqual(get_guest(user_data[2]), True)
|
|
|
|
self.assertEqual(get_guest(user_data[3]), True)
|
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
def test_get_timezone(self) -> None:
|
2018-07-02 21:59:33 +02:00
|
|
|
user_chicago_timezone = {"tz": "America/Chicago"}
|
2018-01-26 15:33:22 +01:00
|
|
|
user_timezone_none = {"tz": None}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_no_timezone: Dict[str, Any] = {}
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-07-02 21:59:33 +02:00
|
|
|
self.assertEqual(get_user_timezone(user_chicago_timezone), "America/Chicago")
|
2018-01-26 15:33:22 +01:00
|
|
|
self.assertEqual(get_user_timezone(user_timezone_none), "America/New_York")
|
|
|
|
self.assertEqual(get_user_timezone(user_no_timezone), "America/New_York")
|
|
|
|
|
2019-08-08 19:39:26 +02:00
|
|
|
@mock.patch("zerver.data_import.slack.get_data_file")
|
|
|
|
@mock.patch("zerver.data_import.slack.get_messages_iterator")
|
2021-09-27 09:17:52 +02:00
|
|
|
@responses.activate
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_fetch_shared_channel_users(
|
2021-09-27 09:17:52 +02:00
|
|
|
self, messages_mock: mock.Mock, data_file_mock: mock.Mock
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> None:
|
2019-08-08 19:39:26 +02:00
|
|
|
users = [{"id": "U061A1R2R"}, {"id": "U061A5N1G"}, {"id": "U064KUGRJ"}]
|
|
|
|
data_file_mock.side_effect = [
|
|
|
|
[
|
|
|
|
{"name": "general", "members": ["U061A1R2R", "U061A5N1G"]},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
{"name": "sharedchannel", "members": ["U061A1R2R", "U061A3E0G"]},
|
2019-08-08 19:39:26 +02:00
|
|
|
],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
[],
|
2019-08-08 19:39:26 +02:00
|
|
|
]
|
|
|
|
messages_mock.return_value = [
|
|
|
|
{"user": "U061A1R2R"},
|
|
|
|
{"user": "U061A5N1G"},
|
|
|
|
{"user": "U061A8H1G"},
|
|
|
|
]
|
2021-09-27 09:17:52 +02:00
|
|
|
# Users info
|
|
|
|
slack_users_info_url = "https://slack.com/api/users.info"
|
|
|
|
responses.add_callback(responses.GET, slack_users_info_url, callback=request_callback)
|
|
|
|
# Team info
|
|
|
|
slack_team_info_url = "https://slack.com/api/team.info"
|
|
|
|
responses.add_callback(responses.GET, slack_team_info_url, callback=request_callback)
|
2021-02-12 08:20:45 +01:00
|
|
|
slack_data_dir = self.fixture_file_name("", type="slack_fixtures")
|
2021-09-27 09:17:52 +02:00
|
|
|
fetch_shared_channel_users(users, slack_data_dir, "xoxp-valid-token")
|
2019-08-08 19:39:26 +02:00
|
|
|
|
|
|
|
# Normal users
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(users, 5)
|
2019-08-08 19:39:26 +02:00
|
|
|
self.assertEqual(users[0]["id"], "U061A1R2R")
|
|
|
|
self.assertEqual(users[0]["is_mirror_dummy"], False)
|
|
|
|
self.assertFalse("team_domain" in users[0])
|
|
|
|
self.assertEqual(users[1]["id"], "U061A5N1G")
|
|
|
|
self.assertEqual(users[2]["id"], "U064KUGRJ")
|
|
|
|
|
|
|
|
# Shared channel users
|
2021-09-27 09:17:52 +02:00
|
|
|
# We need to do this because the outcome order of `users` list is
|
|
|
|
# not deterministic.
|
|
|
|
fourth_fifth = [users[3], users[4]]
|
|
|
|
fourth_fifth.sort(key=lambda x: x["id"])
|
|
|
|
self.assertEqual(fourth_fifth[0]["id"], "U061A3E0G")
|
|
|
|
self.assertEqual(fourth_fifth[0]["team_domain"], "foreignteam1")
|
|
|
|
self.assertEqual(fourth_fifth[0]["is_mirror_dummy"], True)
|
|
|
|
self.assertEqual(fourth_fifth[1]["id"], "U061A8H1G")
|
|
|
|
self.assertEqual(fourth_fifth[1]["team_domain"], "foreignteam2")
|
|
|
|
self.assertEqual(fourth_fifth[1]["is_mirror_dummy"], True)
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2018-08-01 00:18:04 +02:00
|
|
|
@mock.patch("zerver.data_import.slack.get_data_file")
|
2018-04-09 13:53:32 +02:00
|
|
|
def test_users_to_zerver_userprofile(self, mock_get_data_file: mock.Mock) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
custom_profile_field_user1 = {
|
|
|
|
"Xf06054BBB": {"value": "random1"},
|
|
|
|
"Xf023DSCdd": {"value": "employee"},
|
|
|
|
}
|
|
|
|
custom_profile_field_user2 = {
|
|
|
|
"Xf06054BBB": {"value": "random2"},
|
|
|
|
"Xf023DSCdd": {"value": "employer"},
|
|
|
|
}
|
|
|
|
user_data = [
|
|
|
|
{
|
|
|
|
"id": "U08RGD1RD",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "john",
|
|
|
|
"deleted": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"real_name": "John Doe",
|
|
|
|
"profile": {
|
|
|
|
"image_32": "",
|
|
|
|
"email": "jon@gmail.com",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
"phone": "+1-123-456-77-868",
|
|
|
|
"fields": custom_profile_field_user1,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U0CBK5KAT",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"is_admin": True,
|
|
|
|
"is_bot": False,
|
|
|
|
"is_owner": True,
|
|
|
|
"is_primary_owner": True,
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": "Jane",
|
2021-02-12 08:19:30 +01:00
|
|
|
"real_name": "Jane Doe",
|
|
|
|
"deleted": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random.png",
|
|
|
|
"fields": custom_profile_field_user2,
|
|
|
|
"email": "jane@foo.com",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U09TYF5Sk",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "Bot",
|
|
|
|
"real_name": "Bot",
|
|
|
|
"is_bot": True,
|
|
|
|
"deleted": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random1.png",
|
|
|
|
"skype": "test_skype_name",
|
|
|
|
"email": "bot1@zulipchat.com",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "UHSG7OPQN",
|
|
|
|
"team_id": "T6LARQE2Z",
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": "matt.perry",
|
|
|
|
"color": "9d8eee",
|
2021-02-12 08:19:30 +01:00
|
|
|
"is_bot": False,
|
|
|
|
"is_app_user": False,
|
|
|
|
"is_mirror_dummy": True,
|
|
|
|
"team_domain": "foreignteam",
|
|
|
|
"profile": {
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random6.png",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
"first_name": "Matt",
|
|
|
|
"last_name": "Perry",
|
|
|
|
"real_name": "Matt Perry",
|
|
|
|
"display_name": "matt.perry",
|
|
|
|
"team": "T6LARQE2Z",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U8VAHEVUY",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "steviejacob34",
|
|
|
|
"real_name": "Steve Jacob",
|
|
|
|
"is_admin": False,
|
|
|
|
"is_owner": False,
|
|
|
|
"is_primary_owner": False,
|
|
|
|
"is_restricted": True,
|
|
|
|
"is_ultra_restricted": False,
|
|
|
|
"is_bot": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
|
|
|
"email": "steviejacob34@yahoo.com",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random6.png",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U8X25EBAB",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "pratikweb_0",
|
|
|
|
"real_name": "Pratik",
|
|
|
|
"is_admin": False,
|
|
|
|
"is_owner": False,
|
|
|
|
"is_primary_owner": False,
|
|
|
|
"is_restricted": True,
|
|
|
|
"is_ultra_restricted": True,
|
|
|
|
"is_bot": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
|
|
|
"email": "pratik@mit.edu",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random.png",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U015J7JSE",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "georgesm27",
|
|
|
|
"real_name": "George",
|
|
|
|
"is_admin": True,
|
|
|
|
"is_owner": True,
|
|
|
|
"is_primary_owner": False,
|
|
|
|
"is_restricted": False,
|
|
|
|
"is_ultra_restricted": False,
|
|
|
|
"is_bot": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
2022-02-08 00:13:33 +01:00
|
|
|
"email": "george@yahoo.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
"avatar_hash": "hash",
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random5.png",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "U1RDFEC80",
|
|
|
|
"team_id": "T5YFFM2QY",
|
|
|
|
"name": "daniel.smith",
|
|
|
|
"real_name": "Daniel Smith",
|
|
|
|
"is_admin": True,
|
|
|
|
"is_owner": False,
|
|
|
|
"is_primary_owner": False,
|
|
|
|
"is_restricted": False,
|
|
|
|
"is_ultra_restricted": False,
|
|
|
|
"is_bot": False,
|
|
|
|
"is_mirror_dummy": False,
|
|
|
|
"profile": {
|
|
|
|
"email": "daniel@gmail.com",
|
|
|
|
"avatar_hash": "hash",
|
|
|
|
"image_32": "https://secure.gravatar.com/avatar/random7.png",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
]
|
2018-02-06 22:19:47 +01:00
|
|
|
|
2018-04-09 22:58:03 +02:00
|
|
|
mock_get_data_file.return_value = user_data
|
2018-02-06 22:19:47 +01:00
|
|
|
# As user with slack_id 'U0CBK5KAT' is the primary owner, that user should be imported first
|
|
|
|
# and hence has zulip_id = 1
|
2021-02-12 08:19:30 +01:00
|
|
|
test_slack_user_id_to_zulip_user_id = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"U08RGD1RD": 1,
|
|
|
|
"U0CBK5KAT": 0,
|
|
|
|
"U09TYF5Sk": 2,
|
|
|
|
"UHSG7OPQN": 3,
|
|
|
|
"U8VAHEVUY": 4,
|
|
|
|
"U8X25EBAB": 5,
|
|
|
|
"U015J7JSE": 6,
|
|
|
|
"U1RDFEC80": 7,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
slack_data_dir = "./random_path"
|
2018-01-26 15:33:22 +01:00
|
|
|
timestamp = int(timezone_now().timestamp())
|
2018-04-09 13:53:32 +02:00
|
|
|
mock_get_data_file.return_value = user_data
|
2020-07-24 17:22:12 +02:00
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
zerver_userprofile,
|
|
|
|
avatar_list,
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
customprofilefield,
|
|
|
|
customprofilefield_value,
|
2021-02-12 08:20:45 +01:00
|
|
|
) = users_to_zerver_userprofile(slack_data_dir, user_data, 1, timestamp, "test_domain")
|
2018-04-09 22:58:03 +02:00
|
|
|
|
|
|
|
# Test custom profile fields
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(customprofilefield[0]["field_type"], 1)
|
|
|
|
self.assertEqual(customprofilefield[3]["name"], "skype")
|
|
|
|
cpf_name = {cpf["name"] for cpf in customprofilefield}
|
|
|
|
self.assertIn("phone", cpf_name)
|
|
|
|
self.assertIn("skype", cpf_name)
|
|
|
|
cpf_name.remove("phone")
|
|
|
|
cpf_name.remove("skype")
|
2018-06-09 13:44:47 +02:00
|
|
|
for name in cpf_name:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(name.startswith("Slack custom field "))
|
2018-06-09 13:44:47 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(customprofilefield_value, 6)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(customprofilefield_value[0]["field"], 0)
|
|
|
|
self.assertEqual(customprofilefield_value[0]["user_profile"], 1)
|
|
|
|
self.assertEqual(customprofilefield_value[3]["user_profile"], 0)
|
|
|
|
self.assertEqual(customprofilefield_value[5]["value"], "test_skype_name")
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-02-06 22:19:47 +01:00
|
|
|
# test that the primary owner should always be imported first
|
2019-08-12 13:44:07 +02:00
|
|
|
self.assertDictEqual(slack_user_id_to_zulip_user_id, test_slack_user_id_to_zulip_user_id)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(avatar_list, 8)
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(zerver_userprofile, 8)
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[0]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["is_bot"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["role"], UserProfile.ROLE_MEMBER)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["enable_desktop_notifications"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["email"], "jon@gmail.com")
|
|
|
|
self.assertEqual(zerver_userprofile[0]["full_name"], "John Doe")
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[1]["id"], test_slack_user_id_to_zulip_user_id["U0CBK5KAT"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[1]["role"], UserProfile.ROLE_REALM_OWNER)
|
|
|
|
self.assertEqual(zerver_userprofile[1]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[1]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[0]["is_mirror_dummy"], False)
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[2]["id"], test_slack_user_id_to_zulip_user_id["U09TYF5Sk"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[2]["is_bot"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[2]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[2]["is_mirror_dummy"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[2]["email"], "bot1@zulipchat.com")
|
|
|
|
self.assertEqual(zerver_userprofile[2]["bot_type"], 1)
|
|
|
|
self.assertEqual(zerver_userprofile[2]["avatar_source"], "U")
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[3]["id"], test_slack_user_id_to_zulip_user_id["UHSG7OPQN"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[3]["role"], UserProfile.ROLE_MEMBER)
|
|
|
|
self.assertEqual(zerver_userprofile[3]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[3]["is_active"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[3]["email"], "matt.perry@foreignteam.slack.com")
|
|
|
|
self.assertEqual(zerver_userprofile[3]["realm"], 1)
|
|
|
|
self.assertEqual(zerver_userprofile[3]["full_name"], "Matt Perry")
|
|
|
|
self.assertEqual(zerver_userprofile[3]["is_mirror_dummy"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[3]["can_forge_sender"], False)
|
2019-08-08 19:39:26 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[4]["id"], test_slack_user_id_to_zulip_user_id["U8VAHEVUY"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[4]["role"], UserProfile.ROLE_GUEST)
|
|
|
|
self.assertEqual(zerver_userprofile[4]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[4]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[4]["is_mirror_dummy"], False)
|
2019-10-31 13:18:49 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[5]["id"], test_slack_user_id_to_zulip_user_id["U8X25EBAB"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[5]["role"], UserProfile.ROLE_GUEST)
|
|
|
|
self.assertEqual(zerver_userprofile[5]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[5]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[5]["is_mirror_dummy"], False)
|
2019-10-31 13:18:49 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[6]["id"], test_slack_user_id_to_zulip_user_id["U015J7JSE"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[6]["role"], UserProfile.ROLE_REALM_OWNER)
|
|
|
|
self.assertEqual(zerver_userprofile[6]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[6]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[6]["is_mirror_dummy"], False)
|
2020-06-08 21:01:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile[7]["id"], test_slack_user_id_to_zulip_user_id["U1RDFEC80"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_userprofile[7]["role"], UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
self.assertEqual(zerver_userprofile[7]["is_staff"], False)
|
|
|
|
self.assertEqual(zerver_userprofile[7]["is_active"], True)
|
|
|
|
self.assertEqual(zerver_userprofile[7]["is_mirror_dummy"], False)
|
2020-06-08 21:01:42 +02:00
|
|
|
|
2018-02-12 23:26:52 +01:00
|
|
|
def test_build_defaultstream(self) -> None:
|
2018-01-26 15:33:22 +01:00
|
|
|
realm_id = 1
|
|
|
|
stream_id = 1
|
2018-08-03 21:19:47 +02:00
|
|
|
default_channel_general = build_defaultstream(realm_id, stream_id, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
test_default_channel = {"stream": 1, "realm": 1, "id": 1}
|
2018-01-26 15:33:22 +01:00
|
|
|
self.assertDictEqual(test_default_channel, default_channel_general)
|
2018-08-03 21:19:47 +02:00
|
|
|
default_channel_general = build_defaultstream(realm_id, stream_id, 1)
|
2021-02-12 08:20:45 +01:00
|
|
|
test_default_channel = {"stream": 1, "realm": 1, "id": 1}
|
2018-01-26 15:33:22 +01:00
|
|
|
self.assertDictEqual(test_default_channel, default_channel_general)
|
|
|
|
|
|
|
|
def test_build_pm_recipient_sub_from_user(self) -> None:
|
|
|
|
zulip_user_id = 3
|
|
|
|
recipient_id = 5
|
|
|
|
subscription_id = 7
|
2018-08-02 00:35:02 +02:00
|
|
|
sub = build_subscription(recipient_id, zulip_user_id, subscription_id)
|
|
|
|
recipient = build_recipient(zulip_user_id, recipient_id, Recipient.PERSONAL)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(recipient["id"], sub["recipient"])
|
|
|
|
self.assertEqual(recipient["type_id"], sub["user_profile"])
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(recipient["type"], Recipient.PERSONAL)
|
|
|
|
self.assertEqual(recipient["type_id"], 3)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(sub["recipient"], 5)
|
|
|
|
self.assertEqual(sub["id"], 7)
|
|
|
|
self.assertEqual(sub["active"], True)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
def test_build_subscription(self) -> None:
|
|
|
|
channel_members = ["U061A1R2R", "U061A3E0G", "U061A5N1G", "U064KUGRJ"]
|
2021-02-12 08:19:30 +01:00
|
|
|
slack_user_id_to_zulip_user_id = {
|
|
|
|
"U061A1R2R": 1,
|
|
|
|
"U061A3E0G": 8,
|
|
|
|
"U061A5N1G": 7,
|
|
|
|
"U064KUGRJ": 5,
|
|
|
|
}
|
2018-02-12 23:26:52 +01:00
|
|
|
subscription_id_count = 0
|
2018-01-26 15:33:22 +01:00
|
|
|
recipient_id = 12
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
zerver_subscription: List[Dict[str, Any]] = []
|
2021-02-12 08:19:30 +01:00
|
|
|
final_subscription_id = get_subscription(
|
|
|
|
channel_members,
|
|
|
|
zerver_subscription,
|
|
|
|
recipient_id,
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
subscription_id_count,
|
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
# sanity checks
|
2018-03-07 14:07:28 +01:00
|
|
|
self.assertEqual(final_subscription_id, 4)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_subscription[0]["recipient"], 12)
|
|
|
|
self.assertEqual(zerver_subscription[0]["id"], 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_subscription[0]["user_profile"],
|
2021-02-12 08:19:30 +01:00
|
|
|
slack_user_id_to_zulip_user_id[channel_members[0]],
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_subscription[2]["user_profile"],
|
2021-02-12 08:19:30 +01:00
|
|
|
slack_user_id_to_zulip_user_id[channel_members[2]],
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_subscription[3]["id"], 3)
|
|
|
|
self.assertEqual(zerver_subscription[1]["recipient"], zerver_subscription[3]["recipient"])
|
|
|
|
self.assertEqual(zerver_subscription[1]["pin_to_top"], False)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-07-01 14:34:56 +02:00
|
|
|
def test_channels_to_zerver_stream(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
slack_user_id_to_zulip_user_id = {
|
|
|
|
"U061A1R2R": 1,
|
|
|
|
"U061A3E0G": 8,
|
|
|
|
"U061A5N1G": 7,
|
|
|
|
"U064KUGRJ": 5,
|
|
|
|
}
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_userprofile = [{"id": 1}, {"id": 8}, {"id": 7}, {"id": 5}]
|
2018-01-26 15:33:22 +01:00
|
|
|
realm_id = 3
|
|
|
|
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.assertLogs(level="INFO"):
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
realm,
|
|
|
|
added_channels,
|
|
|
|
added_mpims,
|
|
|
|
dm_members,
|
|
|
|
slack_recipient_name_to_zulip_recipient_id,
|
|
|
|
) = channels_to_zerver_stream(
|
|
|
|
self.fixture_file_name("", "slack_fixtures"),
|
|
|
|
realm_id,
|
|
|
|
{"zerver_userpresence": []},
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
zerver_userprofile,
|
|
|
|
)
|
|
|
|
|
|
|
|
test_added_channels = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"sharedchannel": ("C061A0HJG", 3),
|
|
|
|
"general": ("C061A0YJG", 1),
|
|
|
|
"general1": ("C061A0YJP", 2),
|
|
|
|
"random": ("C061A0WJG", 0),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
test_added_mpims = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"mpdm-user9--user2--user10-1": ("G9HBG2A5D", 0),
|
|
|
|
"mpdm-user6--user7--user4-1": ("G6H1Z0ZPS", 1),
|
|
|
|
"mpdm-user4--user1--user5-1": ("G6N944JPL", 2),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
test_dm_members = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"DJ47BL849": ("U061A1R2R", "U061A5N1G"),
|
|
|
|
"DHX1UP7EG": ("U061A5N1G", "U064KUGRJ"),
|
|
|
|
"DK8HSJDHS": ("U061A1R2R", "U064KUGRJ"),
|
|
|
|
"DRS3PSLDK": ("U064KUGRJ", "U064KUGRJ"),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
slack_recipient_names = (
|
|
|
|
set(slack_user_id_to_zulip_user_id.keys())
|
|
|
|
| set(test_added_channels.keys())
|
2019-08-12 13:44:07 +02:00
|
|
|
| set(test_added_mpims.keys())
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-06-28 10:34:14 +02:00
|
|
|
self.assertDictEqual(test_added_channels, added_channels)
|
2020-05-13 02:18:57 +02:00
|
|
|
# zerver defaultstream already tested in helper functions.
|
|
|
|
# Note that the `random` stream is archived and thus should
|
|
|
|
# not be created as a DefaultStream.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(realm["zerver_defaultstream"], [{"id": 0, "realm": 3, "stream": 1}])
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-07-02 07:41:51 +02:00
|
|
|
self.assertDictEqual(test_added_mpims, added_mpims)
|
2019-06-28 10:34:14 +02:00
|
|
|
self.assertDictEqual(test_dm_members, dm_members)
|
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# We can't do an assertDictEqual since during the construction of personal
|
2020-03-28 01:25:56 +01:00
|
|
|
# recipients, slack_user_id_to_zulip_user_id are iterated in different order in Python 3.5 and 3.6.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
set(slack_recipient_name_to_zulip_recipient_id.keys()), slack_recipient_names
|
|
|
|
)
|
2020-09-02 08:13:11 +02:00
|
|
|
self.assertEqual(set(slack_recipient_name_to_zulip_recipient_id.values()), set(range(11)))
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
# functioning of zerver subscriptions are already tested in the helper functions
|
|
|
|
# This is to check the concatenation of the output lists from the helper functions
|
|
|
|
# subscriptions for stream
|
2019-07-26 18:39:50 +02:00
|
|
|
zerver_subscription = realm["zerver_subscription"]
|
|
|
|
zerver_recipient = realm["zerver_recipient"]
|
|
|
|
zerver_stream = realm["zerver_stream"]
|
|
|
|
|
2020-09-02 08:13:11 +02:00
|
|
|
self.assertEqual(self.get_set(zerver_subscription, "recipient"), set(range(11)))
|
2019-07-02 07:41:51 +02:00
|
|
|
self.assertEqual(self.get_set(zerver_subscription, "user_profile"), {1, 5, 7, 8})
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
self.get_set(zerver_recipient, "id"), self.get_set(zerver_subscription, "recipient")
|
|
|
|
)
|
2019-07-02 07:41:51 +02:00
|
|
|
self.assertEqual(self.get_set(zerver_recipient, "type_id"), {0, 1, 2, 3, 5, 7, 8})
|
|
|
|
self.assertEqual(self.get_set(zerver_recipient, "type"), {1, 2, 3})
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
# stream mapping
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_stream[0]["name"], "random")
|
|
|
|
self.assertEqual(zerver_stream[0]["deactivated"], True)
|
|
|
|
self.assertEqual(zerver_stream[0]["description"], "no purpose")
|
|
|
|
self.assertEqual(zerver_stream[0]["invite_only"], False)
|
2022-04-27 18:05:48 +02:00
|
|
|
self.assertEqual(zerver_stream[0]["history_public_to_subscribers"], True)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_stream[0]["realm"], realm_id)
|
|
|
|
self.assertEqual(zerver_stream[2]["id"], test_added_channels[zerver_stream[2]["name"]][1])
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-07-26 18:39:50 +02:00
|
|
|
self.assertEqual(self.get_set(realm["zerver_huddle"], "id"), {0, 1, 2})
|
|
|
|
self.assertEqual(realm["zerver_userpresence"], [])
|
2019-07-02 07:41:51 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@mock.patch(
|
|
|
|
"zerver.data_import.slack.users_to_zerver_userprofile", return_value=[[], [], {}, [], []]
|
|
|
|
)
|
|
|
|
@mock.patch(
|
|
|
|
"zerver.data_import.slack.channels_to_zerver_stream",
|
|
|
|
return_value=[{"zerver_stream": []}, {}, {}, {}, {}],
|
|
|
|
)
|
|
|
|
def test_slack_workspace_to_realm(
|
|
|
|
self, mock_channels_to_zerver_stream: mock.Mock, mock_users_to_zerver_userprofile: mock.Mock
|
|
|
|
) -> None:
|
2018-01-26 15:33:22 +01:00
|
|
|
realm_id = 1
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_list: List[Dict[str, Any]] = []
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
realm,
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
slack_recipient_name_to_zulip_recipient_id,
|
|
|
|
added_channels,
|
|
|
|
added_mpims,
|
|
|
|
dm_members,
|
|
|
|
avatar_list,
|
|
|
|
em,
|
|
|
|
) = slack_workspace_to_realm(
|
2021-02-12 08:20:45 +01:00
|
|
|
"testdomain", realm_id, user_list, "test-realm", "./random_path", {}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
test_zerver_realmdomain = [
|
2021-02-12 08:20:45 +01:00
|
|
|
{"realm": realm_id, "allow_subdomains": False, "domain": "testdomain", "id": realm_id}
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
2018-01-26 15:33:22 +01:00
|
|
|
# Functioning already tests in helper functions
|
2019-08-12 13:44:07 +02:00
|
|
|
self.assertEqual(slack_user_id_to_zulip_user_id, {})
|
2018-01-26 15:33:22 +01:00
|
|
|
self.assertEqual(added_channels, {})
|
2019-07-02 07:41:51 +02:00
|
|
|
self.assertEqual(added_mpims, {})
|
2019-08-13 10:02:03 +02:00
|
|
|
self.assertEqual(slack_recipient_name_to_zulip_recipient_id, {})
|
2018-02-17 00:42:59 +01:00
|
|
|
self.assertEqual(avatar_list, [])
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-07-26 18:39:50 +02:00
|
|
|
mock_channels_to_zerver_stream.assert_called_once_with("./random_path", 1, ANY, {}, [])
|
|
|
|
passed_realm = mock_channels_to_zerver_stream.call_args_list[0][0][2]
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_realmdomain = passed_realm["zerver_realmdomain"]
|
2018-01-26 15:33:22 +01:00
|
|
|
self.assertListEqual(zerver_realmdomain, test_zerver_realmdomain)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
passed_realm["zerver_realm"][0]["description"], "Organization imported from Slack!"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(passed_realm["zerver_userpresence"], [])
|
2023-04-16 21:53:22 +02:00
|
|
|
self.assert_length(passed_realm.keys(), 16)
|
2019-07-26 18:39:50 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(realm["zerver_stream"], [])
|
|
|
|
self.assertEqual(realm["zerver_userprofile"], [])
|
|
|
|
self.assertEqual(realm["zerver_realmemoji"], [])
|
|
|
|
self.assertEqual(realm["zerver_customprofilefield"], [])
|
|
|
|
self.assertEqual(realm["zerver_customprofilefieldvalue"], [])
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(realm.keys(), 5)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
def test_get_message_sending_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
message_with_file = {"subtype": "file", "type": "message", "file": {"user": "U064KUGRJ"}}
|
2021-04-25 22:54:23 +02:00
|
|
|
message_without_file = {"subtype": "file", "type": "message", "user": "U064KUGRJ"}
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
user_file = get_message_sending_user(message_with_file)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_file, "U064KUGRJ")
|
2018-01-26 15:33:22 +01:00
|
|
|
user_without_file = get_message_sending_user(message_without_file)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(user_without_file, "U064KUGRJ")
|
2018-01-26 15:33:22 +01:00
|
|
|
|
|
|
|
def test_build_zerver_message(self) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
zerver_usermessage: List[Dict[str, Any]] = []
|
2018-10-25 17:33:52 +02:00
|
|
|
|
|
|
|
# recipient_id -> set of user_ids
|
|
|
|
subscriber_map = {
|
|
|
|
2: {3, 7, 15, 16}, # these we care about
|
|
|
|
4: {12},
|
|
|
|
6: {19, 21},
|
|
|
|
}
|
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
recipient_id = 2
|
2018-10-25 17:33:52 +02:00
|
|
|
mentioned_user_ids = [7]
|
2018-01-26 15:33:22 +01:00
|
|
|
message_id = 9
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
um_id = NEXT_ID("user_message")
|
2018-10-23 22:45:43 +02:00
|
|
|
|
2018-10-25 17:33:52 +02:00
|
|
|
build_usermessages(
|
|
|
|
zerver_usermessage=zerver_usermessage,
|
|
|
|
subscriber_map=subscriber_map,
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
mentioned_user_ids=mentioned_user_ids,
|
|
|
|
message_id=message_id,
|
2019-07-02 07:41:51 +02:00
|
|
|
is_private=False,
|
2018-10-25 17:33:52 +02:00
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_usermessage[0]["id"], um_id + 1)
|
|
|
|
self.assertEqual(zerver_usermessage[0]["message"], message_id)
|
|
|
|
self.assertEqual(zerver_usermessage[0]["flags_mask"], 1)
|
2018-10-25 17:33:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_usermessage[1]["id"], um_id + 2)
|
|
|
|
self.assertEqual(zerver_usermessage[1]["message"], message_id)
|
|
|
|
self.assertEqual(zerver_usermessage[1]["user_profile"], 7)
|
|
|
|
self.assertEqual(zerver_usermessage[1]["flags_mask"], 9) # mentioned
|
2018-10-25 17:33:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_usermessage[2]["id"], um_id + 3)
|
|
|
|
self.assertEqual(zerver_usermessage[2]["message"], message_id)
|
2018-10-25 17:33:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_usermessage[3]["id"], um_id + 4)
|
|
|
|
self.assertEqual(zerver_usermessage[3]["message"], message_id)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
@mock.patch("zerver.data_import.slack.build_usermessages", return_value=(2, 4))
|
2018-08-03 21:11:47 +02:00
|
|
|
def test_channel_message_to_zerver_message(self, mock_build_usermessage: mock.Mock) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_data = [
|
|
|
|
{"id": "U066MTL5U", "name": "john doe", "deleted": False, "real_name": "John"},
|
|
|
|
{"id": "U061A5N1G", "name": "jane doe", "deleted": False, "real_name": "Jane"},
|
|
|
|
{"id": "U061A1R2R", "name": "jon", "deleted": False, "real_name": "Jon"},
|
|
|
|
]
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2019-08-12 13:44:07 +02:00
|
|
|
slack_user_id_to_zulip_user_id = {"U066MTL5U": 5, "U061A5N1G": 24, "U061A1R2R": 43}
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-03-25 13:42:04 +02:00
|
|
|
reactions = [{"name": "grinning", "users": ["U061A5N1G"], "count": 1}]
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
all_messages: List[Dict[str, Any]] = [
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
|
|
|
"text": "<@U066MTL5U> has joined the channel",
|
|
|
|
"subtype": "channel_join",
|
|
|
|
"user": "U066MTL5U",
|
|
|
|
"ts": "1434139102.000002",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "<@U061A5N1G>: hey!",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"ts": "1437868294.000006",
|
|
|
|
"has_image": True,
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"reactions": reactions,
|
|
|
|
"ts": "1439868294.000006",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "without a user",
|
|
|
|
"user": None, # this message will be ignored as it has no user
|
|
|
|
"ts": "1239868294.000006",
|
|
|
|
"channel_name": "general",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "<http://journals.plos.org/plosone/article>",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"ts": "1463868370.000008",
|
|
|
|
"channel_name": "general",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "added bot",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"subtype": "bot_add",
|
|
|
|
"ts": "1433868549.000010",
|
|
|
|
"channel_name": "general",
|
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
# This message will be ignored since it has no user and file is None.
|
|
|
|
# See #9217 for the situation; likely file uploads on archived channels
|
2021-02-12 08:19:30 +01:00
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"upload": False,
|
|
|
|
"file": None,
|
|
|
|
"text": "A file was shared",
|
|
|
|
"channel_name": "general",
|
|
|
|
"type": "message",
|
|
|
|
"ts": "1433868549.000011",
|
|
|
|
"subtype": "file_share",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random test",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"ts": "1433868669.000012",
|
|
|
|
"channel_name": "general",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "Hello everyone",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"type": "message",
|
|
|
|
"ts": "1433868669.000015",
|
|
|
|
"mpim_name": "mpdm-user9--user2--user10-1",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "Who is watching the World Cup",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"type": "message",
|
|
|
|
"ts": "1433868949.000015",
|
|
|
|
"mpim_name": "mpdm-user6--user7--user4-1",
|
|
|
|
},
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"client_msg_id": "998d9229-35aa-424f-8d87-99e00df27dc9",
|
|
|
|
"type": "message",
|
|
|
|
"text": "Who is coming for camping this weekend?",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"ts": "1553607595.000700",
|
|
|
|
"pm_name": "DHX1UP7EG",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"client_msg_id": "998d9229-35aa-424f-8d87-99e00df27dc9",
|
|
|
|
"type": "message",
|
|
|
|
"text": "<@U061A5N1G>: Are you in Kochi?",
|
|
|
|
"user": "U066MTL5U",
|
|
|
|
"ts": "1553607595.000700",
|
|
|
|
"pm_name": "DJ47BL849",
|
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
]
|
2018-02-25 09:54:53 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
slack_recipient_name_to_zulip_recipient_id = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"random": 2,
|
|
|
|
"general": 1,
|
|
|
|
"mpdm-user9--user2--user10-1": 5,
|
|
|
|
"mpdm-user6--user7--user4-1": 6,
|
|
|
|
"U066MTL5U": 7,
|
|
|
|
"U061A5N1G": 8,
|
|
|
|
"U061A1R2R": 8,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
dm_members = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"DJ47BL849": ("U066MTL5U", "U061A5N1G"),
|
|
|
|
"DHX1UP7EG": ("U061A5N1G", "U061A1R2R"),
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
2018-01-26 15:33:22 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
zerver_usermessage: List[Dict[str, Any]] = []
|
2020-09-02 08:14:51 +02:00
|
|
|
subscriber_map: Dict[int, Set[int]] = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
added_channels: Dict[str, Tuple[str, int]] = {"random": ("c5", 1), "general": ("c6", 2)}
|
2018-10-28 18:55:32 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
|
|
|
zerver_message,
|
|
|
|
zerver_usermessage,
|
|
|
|
attachment,
|
|
|
|
uploads,
|
|
|
|
reaction,
|
|
|
|
) = channel_message_to_zerver_message(
|
|
|
|
1,
|
|
|
|
user_data,
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
slack_recipient_name_to_zulip_recipient_id,
|
|
|
|
all_messages,
|
|
|
|
[],
|
|
|
|
subscriber_map,
|
|
|
|
added_channels,
|
|
|
|
dm_members,
|
2021-02-12 08:20:45 +01:00
|
|
|
"domain",
|
2021-02-12 08:19:30 +01:00
|
|
|
set(),
|
2021-08-13 05:48:16 +02:00
|
|
|
convert_slack_threads=False,
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
# functioning already tested in helper function
|
|
|
|
self.assertEqual(zerver_usermessage, [])
|
|
|
|
# subtype: channel_join is filtered
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(zerver_message, 9)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-02-26 10:03:48 +01:00
|
|
|
self.assertEqual(uploads, [])
|
2018-02-26 10:16:34 +01:00
|
|
|
self.assertEqual(attachment, [])
|
2018-02-26 10:03:48 +01:00
|
|
|
|
2018-03-25 13:42:04 +02:00
|
|
|
# Test reactions
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(reaction[0]["user_profile"], 24)
|
|
|
|
self.assertEqual(reaction[0]["emoji_name"], reactions[0]["name"])
|
2018-03-25 13:42:04 +02:00
|
|
|
|
2018-01-26 15:33:22 +01:00
|
|
|
# Message conversion already tested in tests.test_slack_message_conversion
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_message[0]["content"], "@**Jane**: hey!")
|
|
|
|
self.assertEqual(zerver_message[0]["has_link"], False)
|
|
|
|
self.assertEqual(zerver_message[2]["content"], "http://journals.plos.org/plosone/article")
|
|
|
|
self.assertEqual(zerver_message[2]["has_link"], True)
|
|
|
|
self.assertEqual(zerver_message[5]["has_link"], False)
|
|
|
|
self.assertEqual(zerver_message[7]["has_link"], False)
|
|
|
|
|
|
|
|
self.assertEqual(zerver_message[3][EXPORT_TOPIC_NAME], "imported from Slack")
|
|
|
|
self.assertEqual(zerver_message[3]["content"], "/me added bot")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[4]["recipient"], slack_recipient_name_to_zulip_recipient_id["general"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_message[2][EXPORT_TOPIC_NAME], "imported from Slack")
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[1]["recipient"], slack_recipient_name_to_zulip_recipient_id["random"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[5]["recipient"],
|
|
|
|
slack_recipient_name_to_zulip_recipient_id["mpdm-user9--user2--user10-1"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[6]["recipient"],
|
|
|
|
slack_recipient_name_to_zulip_recipient_id["mpdm-user6--user7--user4-1"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[7]["recipient"], slack_recipient_name_to_zulip_recipient_id["U061A5N1G"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message[7]["recipient"], slack_recipient_name_to_zulip_recipient_id["U061A5N1G"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_message[3]["id"], zerver_message[0]["id"] + 3)
|
|
|
|
self.assertEqual(zerver_message[4]["id"], zerver_message[0]["id"] + 4)
|
|
|
|
self.assertEqual(zerver_message[5]["id"], zerver_message[0]["id"] + 5)
|
|
|
|
self.assertEqual(zerver_message[7]["id"], zerver_message[0]["id"] + 7)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIsNone(zerver_message[3]["rendered_content"])
|
|
|
|
self.assertEqual(zerver_message[0]["has_image"], False)
|
|
|
|
self.assertEqual(zerver_message[0]["date_sent"], float(all_messages[1]["ts"]))
|
|
|
|
self.assertEqual(zerver_message[2]["rendered_content_version"], 1)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(zerver_message[0]["sender"], 43)
|
|
|
|
self.assertEqual(zerver_message[3]["sender"], 24)
|
|
|
|
self.assertEqual(zerver_message[5]["sender"], 43)
|
|
|
|
self.assertEqual(zerver_message[6]["sender"], 24)
|
|
|
|
self.assertEqual(zerver_message[7]["sender"], 43)
|
|
|
|
self.assertEqual(zerver_message[8]["sender"], 5)
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-08-13 05:48:16 +02:00
|
|
|
@mock.patch("zerver.data_import.slack.build_usermessages", return_value=(2, 4))
|
|
|
|
def test_channel_message_to_zerver_message_with_threads(
|
|
|
|
self, mock_build_usermessage: mock.Mock
|
|
|
|
) -> None:
|
|
|
|
user_data = [
|
|
|
|
{"id": "U066MTL5U", "name": "john doe", "deleted": False, "real_name": "John"},
|
|
|
|
{"id": "U061A5N1G", "name": "jane doe", "deleted": False, "real_name": "Jane"},
|
|
|
|
{"id": "U061A1R2R", "name": "jon", "deleted": False, "real_name": "Jon"},
|
|
|
|
]
|
|
|
|
|
|
|
|
slack_user_id_to_zulip_user_id = {"U066MTL5U": 5, "U061A5N1G": 24, "U061A1R2R": 43}
|
|
|
|
|
|
|
|
all_messages: List[Dict[str, Any]] = [
|
|
|
|
{
|
|
|
|
"text": "<@U066MTL5U> has joined the channel",
|
|
|
|
"subtype": "channel_join",
|
|
|
|
"user": "U066MTL5U",
|
|
|
|
"ts": "1434139102.000002",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "<@U061A5N1G>: hey!",
|
|
|
|
"user": "U061A1R2R",
|
|
|
|
"ts": "1437868294.000006",
|
|
|
|
"has_image": True,
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"ts": "1439868294.000006",
|
|
|
|
# Thread!
|
|
|
|
"thread_ts": "1434139102.000002",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"ts": "1439868294.000007",
|
|
|
|
"thread_ts": "1434139102.000002",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"ts": "1439868294.000008",
|
|
|
|
# A different Thread!
|
|
|
|
"thread_ts": "1439868294.000008",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"text": "random",
|
|
|
|
"user": "U061A5N1G",
|
|
|
|
"ts": "1439868295.000008",
|
|
|
|
# Another different Thread!
|
|
|
|
"thread_ts": "1439868295.000008",
|
|
|
|
"channel_name": "random",
|
|
|
|
},
|
|
|
|
]
|
|
|
|
|
|
|
|
slack_recipient_name_to_zulip_recipient_id = {
|
|
|
|
"random": 2,
|
|
|
|
"general": 1,
|
|
|
|
}
|
|
|
|
dm_members: DMMembersT = {}
|
|
|
|
|
|
|
|
zerver_usermessage: List[Dict[str, Any]] = []
|
|
|
|
subscriber_map: Dict[int, Set[int]] = {}
|
|
|
|
added_channels: Dict[str, Tuple[str, int]] = {"random": ("c5", 1), "general": ("c6", 2)}
|
|
|
|
|
|
|
|
(
|
|
|
|
zerver_message,
|
|
|
|
zerver_usermessage,
|
|
|
|
attachment,
|
|
|
|
uploads,
|
|
|
|
reaction,
|
|
|
|
) = channel_message_to_zerver_message(
|
|
|
|
1,
|
|
|
|
user_data,
|
|
|
|
slack_user_id_to_zulip_user_id,
|
|
|
|
slack_recipient_name_to_zulip_recipient_id,
|
|
|
|
all_messages,
|
|
|
|
[],
|
|
|
|
subscriber_map,
|
|
|
|
added_channels,
|
|
|
|
dm_members,
|
|
|
|
"domain",
|
|
|
|
set(),
|
|
|
|
convert_slack_threads=True,
|
|
|
|
)
|
|
|
|
# functioning already tested in helper function
|
|
|
|
self.assertEqual(zerver_usermessage, [])
|
|
|
|
# subtype: channel_join is filtered
|
|
|
|
self.assert_length(zerver_message, 5)
|
|
|
|
|
|
|
|
self.assertEqual(uploads, [])
|
|
|
|
self.assertEqual(attachment, [])
|
|
|
|
|
|
|
|
# Message conversion already tested in tests.test_slack_message_conversion
|
|
|
|
self.assertEqual(zerver_message[0]["content"], "@**Jane**: hey!")
|
|
|
|
self.assertEqual(zerver_message[0]["has_link"], False)
|
|
|
|
self.assertEqual(zerver_message[1]["content"], "random")
|
|
|
|
self.assertEqual(zerver_message[1][EXPORT_TOPIC_NAME], "2015-06-12 Slack thread 1")
|
|
|
|
self.assertEqual(zerver_message[2][EXPORT_TOPIC_NAME], "2015-06-12 Slack thread 1")
|
|
|
|
# A new thread with a different date from 2015-06-12, starts the counter from 1.
|
|
|
|
self.assertEqual(zerver_message[3][EXPORT_TOPIC_NAME], "2015-08-18 Slack thread 1")
|
|
|
|
# A new thread with a different timestamp, but the same date as 2015-08-18, starts the
|
|
|
|
# counter from 2.
|
|
|
|
self.assertEqual(zerver_message[4][EXPORT_TOPIC_NAME], "2015-08-18 Slack thread 2")
|
|
|
|
self.assertEqual(
|
|
|
|
zerver_message[1]["recipient"], slack_recipient_name_to_zulip_recipient_id["random"]
|
|
|
|
)
|
|
|
|
|
2018-08-01 00:18:04 +02:00
|
|
|
@mock.patch("zerver.data_import.slack.channel_message_to_zerver_message")
|
2018-12-05 04:13:47 +01:00
|
|
|
@mock.patch("zerver.data_import.slack.get_messages_iterator")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_convert_slack_workspace_messages(
|
|
|
|
self, mock_get_messages_iterator: mock.Mock, mock_message: mock.Mock
|
|
|
|
) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
output_dir = os.path.join(settings.TEST_WORKER_DIR, "test-slack-import")
|
2019-07-06 02:07:56 +02:00
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
added_channels: Dict[str, Tuple[str, int]] = {"random": ("c5", 1), "general": ("c6", 2)}
|
2018-12-17 02:26:51 +01:00
|
|
|
|
2018-06-17 19:16:42 +02:00
|
|
|
time = float(timezone_now().timestamp())
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_message = [{"id": 1, "ts": time}, {"id": 5, "ts": time}]
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def fake_get_messages_iter(
|
|
|
|
slack_data_dir: str,
|
|
|
|
added_channels: AddedChannelsT,
|
|
|
|
added_mpims: AddedMPIMsT,
|
|
|
|
dm_members: DMMembersT,
|
|
|
|
) -> Iterator[ZerverFieldsT]:
|
2018-12-17 02:26:51 +01:00
|
|
|
import copy
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-12-17 02:26:51 +01:00
|
|
|
return iter(copy.deepcopy(zerver_message))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm: Dict[str, Any] = {"zerver_subscription": []}
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_list: List[Dict[str, Any]] = []
|
2018-06-13 20:15:35 +02:00
|
|
|
reactions = [{"name": "grinning", "users": ["U061A5N1G"], "count": 1}]
|
2020-05-09 00:10:17 +02:00
|
|
|
attachments: List[Dict[str, Any]] = []
|
|
|
|
uploads: List[Dict[str, Any]] = []
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
zerver_usermessage = [{"id": 3}, {"id": 5}, {"id": 6}, {"id": 9}]
|
2018-01-26 15:33:22 +01:00
|
|
|
|
2018-12-17 02:26:51 +01:00
|
|
|
mock_get_messages_iterator.side_effect = fake_get_messages_iter
|
2021-02-12 08:19:30 +01:00
|
|
|
mock_message.side_effect = [
|
|
|
|
[zerver_message[:1], zerver_usermessage[:2], attachments, uploads, reactions[:1]],
|
|
|
|
[zerver_message[1:2], zerver_usermessage[2:5], attachments, uploads, reactions[1:1]],
|
|
|
|
]
|
2020-07-24 17:22:12 +02:00
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"):
|
|
|
|
# Hacky: We should include a zerver_userprofile, not the empty []
|
|
|
|
test_reactions, uploads, zerver_attachment = convert_slack_workspace_messages(
|
2021-02-12 08:20:45 +01:00
|
|
|
"./random_path",
|
2021-02-12 08:19:30 +01:00
|
|
|
user_list,
|
|
|
|
2,
|
|
|
|
{},
|
|
|
|
{},
|
|
|
|
added_channels,
|
|
|
|
{},
|
|
|
|
{},
|
|
|
|
realm,
|
|
|
|
[],
|
|
|
|
[],
|
2021-02-12 08:20:45 +01:00
|
|
|
"domain",
|
2021-02-12 08:19:30 +01:00
|
|
|
output_dir=output_dir,
|
2021-08-13 05:48:16 +02:00
|
|
|
convert_slack_threads=False,
|
2021-02-12 08:19:30 +01:00
|
|
|
chunk_size=1,
|
|
|
|
)
|
2020-07-24 17:22:12 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
messages_file_1 = os.path.join(output_dir, "messages-000001.json")
|
2018-06-17 19:16:42 +02:00
|
|
|
self.assertTrue(os.path.exists(messages_file_1))
|
2021-02-12 08:20:45 +01:00
|
|
|
messages_file_2 = os.path.join(output_dir, "messages-000002.json")
|
2018-06-17 19:16:42 +02:00
|
|
|
self.assertTrue(os.path.exists(messages_file_2))
|
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(messages_file_1, "rb") as f:
|
|
|
|
message_json = orjson.loads(f.read())
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_json["zerver_message"], zerver_message[:1])
|
|
|
|
self.assertEqual(message_json["zerver_usermessage"], zerver_usermessage[:2])
|
2018-06-13 20:15:35 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(messages_file_2, "rb") as f:
|
|
|
|
message_json = orjson.loads(f.read())
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(message_json["zerver_message"], zerver_message[1:2])
|
|
|
|
self.assertEqual(message_json["zerver_usermessage"], zerver_usermessage[2:5])
|
2018-06-17 19:16:42 +02:00
|
|
|
|
2018-06-13 20:15:35 +02:00
|
|
|
self.assertEqual(test_reactions, reactions)
|
2018-02-08 21:42:27 +01:00
|
|
|
|
2020-01-24 15:00:18 +01:00
|
|
|
@mock.patch("zerver.data_import.slack.requests.get")
|
2021-02-12 08:19:30 +01:00
|
|
|
@mock.patch("zerver.data_import.slack.process_uploads", return_value=[])
|
|
|
|
@mock.patch("zerver.data_import.slack.build_attachment", return_value=[])
|
2018-08-01 00:18:04 +02:00
|
|
|
@mock.patch("zerver.data_import.slack.build_avatar_url")
|
|
|
|
@mock.patch("zerver.data_import.slack.build_avatar")
|
|
|
|
@mock.patch("zerver.data_import.slack.get_slack_api_data")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_slack_import_to_existing_database(
|
|
|
|
self,
|
|
|
|
mock_get_slack_api_data: mock.Mock,
|
|
|
|
mock_build_avatar_url: mock.Mock,
|
|
|
|
mock_build_avatar: mock.Mock,
|
|
|
|
mock_process_uploads: mock.Mock,
|
|
|
|
mock_attachment: mock.Mock,
|
|
|
|
mock_requests_get: mock.Mock,
|
|
|
|
) -> None:
|
|
|
|
test_slack_dir = os.path.join(
|
|
|
|
settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "slack_fixtures"
|
|
|
|
)
|
2018-03-08 14:30:16 +01:00
|
|
|
test_slack_zip_file = os.path.join(test_slack_dir, "test_slack_importer.zip")
|
|
|
|
test_slack_unzipped_file = os.path.join(test_slack_dir, "test_slack_importer")
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
test_realm_subdomain = "test-slack-import"
|
2018-03-09 18:03:38 +01:00
|
|
|
output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-slack-importer-data")
|
2021-02-12 08:20:45 +01:00
|
|
|
token = "xoxp-valid-token"
|
2018-02-08 21:42:27 +01:00
|
|
|
|
2018-03-08 14:30:16 +01:00
|
|
|
# If the test fails, the 'output_dir' would not be deleted and hence it would give an
|
|
|
|
# error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir'
|
|
|
|
# hence we remove it before running 'do_convert_data'
|
2019-04-04 12:03:54 +02:00
|
|
|
self.rm_tree(output_dir)
|
2018-03-08 14:30:16 +01:00
|
|
|
# Also the unzipped data file should be removed if the test fails at 'do_convert_data'
|
2019-04-04 12:03:54 +02:00
|
|
|
self.rm_tree(test_slack_unzipped_file)
|
2018-03-08 14:30:16 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_data_fixture = orjson.loads(self.fixture_data("user_data.json", type="slack_fixtures"))
|
|
|
|
team_info_fixture = orjson.loads(self.fixture_data("team_info.json", type="slack_fixtures"))
|
2021-02-12 08:19:30 +01:00
|
|
|
mock_get_slack_api_data.side_effect = [
|
2021-02-12 08:20:45 +01:00
|
|
|
user_data_fixture["members"],
|
2021-02-12 08:19:30 +01:00
|
|
|
{},
|
|
|
|
team_info_fixture["team"],
|
|
|
|
]
|
2022-01-13 23:24:16 +01:00
|
|
|
mock_requests_get.return_value.raw = BytesIO(read_test_image_file("img.png"))
|
2018-02-08 21:42:27 +01:00
|
|
|
|
2022-08-26 23:14:12 +02:00
|
|
|
with self.assertLogs(level="INFO"), self.settings(EXTERNAL_HOST="zulip.example.com"):
|
|
|
|
# We need to mock EXTERNAL_HOST to be a valid domain because Slack's importer
|
|
|
|
# uses it to generate email addresses for users without an email specified.
|
2020-07-24 17:22:12 +02:00
|
|
|
do_convert_data(test_slack_zip_file, output_dir, token)
|
|
|
|
|
2018-02-08 21:42:27 +01:00
|
|
|
self.assertTrue(os.path.exists(output_dir))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(os.path.exists(output_dir + "/realm.json"))
|
2018-02-08 21:42:27 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
realm_icons_path = os.path.join(output_dir, "realm_icons")
|
|
|
|
realm_icon_records_path = os.path.join(realm_icons_path, "records.json")
|
2020-01-24 15:00:18 +01:00
|
|
|
|
|
|
|
self.assertTrue(os.path.exists(realm_icon_records_path))
|
2020-08-07 01:09:47 +02:00
|
|
|
with open(realm_icon_records_path, "rb") as f:
|
|
|
|
records = orjson.loads(f.read())
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(records, 2)
|
2020-01-24 15:00:18 +01:00
|
|
|
self.assertEqual(records[0]["path"], "0/icon.original")
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(realm_icons_path, records[0]["path"])))
|
|
|
|
|
|
|
|
self.assertEqual(records[1]["path"], "0/icon.png")
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(realm_icons_path, records[1]["path"])))
|
|
|
|
|
2018-02-08 21:42:27 +01:00
|
|
|
# test import of the converted slack data into an existing database
|
2020-07-24 17:22:12 +02:00
|
|
|
with self.settings(BILLING_ENABLED=False), self.assertLogs(level="INFO"):
|
2018-12-13 08:19:29 +01:00
|
|
|
do_import_realm(output_dir, test_realm_subdomain)
|
2018-07-05 21:28:21 +02:00
|
|
|
realm = get_realm(test_realm_subdomain)
|
|
|
|
self.assertTrue(realm.name, test_realm_subdomain)
|
2020-01-24 15:00:18 +01:00
|
|
|
self.assertEqual(realm.icon_source, Realm.ICON_UPLOADED)
|
2018-07-05 21:28:21 +02:00
|
|
|
|
|
|
|
# test RealmAuditLog
|
|
|
|
realmauditlog = RealmAuditLog.objects.filter(realm=realm)
|
|
|
|
realmauditlog_event_type = {log.event_type for log in realmauditlog}
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
realmauditlog_event_type,
|
2021-04-20 12:29:19 +02:00
|
|
|
{
|
|
|
|
RealmAuditLog.SUBSCRIPTION_CREATED,
|
|
|
|
RealmAuditLog.REALM_PLAN_TYPE_CHANGED,
|
|
|
|
RealmAuditLog.REALM_CREATED,
|
|
|
|
},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-07-05 21:28:21 +02:00
|
|
|
|
2022-09-29 19:56:30 +02:00
|
|
|
self.assertEqual(Message.objects.filter(realm=realm).count(), 82)
|
|
|
|
|
2023-04-16 21:53:22 +02:00
|
|
|
# All auth backends are enabled initially.
|
|
|
|
for name, enabled in realm.authentication_methods_dict().items():
|
|
|
|
self.assertTrue(enabled)
|
|
|
|
|
2018-02-08 21:42:27 +01:00
|
|
|
Realm.objects.filter(name=test_realm_subdomain).delete()
|
|
|
|
|
|
|
|
remove_folder(output_dir)
|
|
|
|
self.assertFalse(os.path.exists(output_dir))
|
2018-10-28 16:48:52 +01:00
|
|
|
|
|
|
|
def test_message_files(self) -> None:
|
|
|
|
alice_id = 7
|
|
|
|
alice = dict(
|
|
|
|
id=alice_id,
|
|
|
|
profile=dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="alice@example.com",
|
2018-10-28 16:48:52 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
files = [
|
|
|
|
dict(
|
2023-01-18 18:40:44 +01:00
|
|
|
url_private="https://files.slack.com/apple.png",
|
2021-02-12 08:20:45 +01:00
|
|
|
title="Apple",
|
|
|
|
name="apple.png",
|
|
|
|
mimetype="image/png",
|
2018-10-28 16:48:52 +01:00
|
|
|
timestamp=9999,
|
|
|
|
created=8888,
|
|
|
|
size=3000000,
|
|
|
|
),
|
|
|
|
dict(
|
2023-01-18 18:40:44 +01:00
|
|
|
url_private="https://example.com/banana.zip",
|
2021-02-12 08:20:45 +01:00
|
|
|
title="banana",
|
2018-10-28 16:48:52 +01:00
|
|
|
),
|
|
|
|
]
|
|
|
|
message = dict(
|
|
|
|
user=alice_id,
|
|
|
|
files=files,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
domain_name = "example.com"
|
2018-10-28 16:48:52 +01:00
|
|
|
realm_id = 5
|
|
|
|
message_id = 99
|
2021-02-12 08:20:45 +01:00
|
|
|
slack_user_id = "alice"
|
2018-10-28 16:48:52 +01:00
|
|
|
users = [alice]
|
2019-08-12 13:44:07 +02:00
|
|
|
slack_user_id_to_zulip_user_id = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"alice": alice_id,
|
2018-10-28 16:48:52 +01:00
|
|
|
}
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
zerver_attachment: List[Dict[str, Any]] = []
|
|
|
|
uploads_list: List[Dict[str, Any]] = []
|
2018-10-28 16:48:52 +01:00
|
|
|
|
|
|
|
info = process_message_files(
|
|
|
|
message=message,
|
|
|
|
domain_name=domain_name,
|
|
|
|
realm_id=realm_id,
|
|
|
|
message_id=message_id,
|
2019-08-12 13:44:07 +02:00
|
|
|
slack_user_id=slack_user_id,
|
2018-10-28 16:48:52 +01:00
|
|
|
users=users,
|
2019-08-12 13:44:07 +02:00
|
|
|
slack_user_id_to_zulip_user_id=slack_user_id_to_zulip_user_id,
|
2018-10-28 16:48:52 +01:00
|
|
|
zerver_attachment=zerver_attachment,
|
|
|
|
uploads_list=uploads_list,
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(zerver_attachment, 1)
|
|
|
|
self.assert_length(uploads_list, 1)
|
2018-10-28 16:48:52 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
image_path = zerver_attachment[0]["path_id"]
|
2023-01-18 18:40:44 +01:00
|
|
|
expected_content = (
|
|
|
|
f"[Apple](/user_uploads/{image_path})\n[banana](https://example.com/banana.zip)"
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info["content"], expected_content)
|
2018-10-28 16:48:52 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(info["has_link"])
|
|
|
|
self.assertTrue(info["has_image"])
|
2018-10-28 16:48:52 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(uploads_list[0]["s3_path"], image_path)
|
|
|
|
self.assertEqual(uploads_list[0]["realm_id"], realm_id)
|
|
|
|
self.assertEqual(uploads_list[0]["user_profile_email"], "alice@example.com")
|
data_import: Fix bot email address de-duplication.
4815f6e28b2e99e799c3b60dac5cb1f19fa31b8e tried to de-duplicate bot
email addresses, but instead caused duplicates to crash:
```
Traceback (most recent call last):
File "./manage.py", line 157, in <module>
execute_from_command_line(sys.argv)
File "./manage.py", line 122, in execute_from_command_line
utility.execute()
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/__init__.py", line 413, in execute
self.fetch_command(subcommand).run_from_argv(self.argv)
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/base.py", line 354, in run_from_argv
self.execute(*args, **cmd_options)
File "/srv/zulip-venv-cache/56ac6adf406011a100282dd526d03537be84d23e/zulip-py3-venv/lib/python3.8/site-packages/django/core/management/base.py", line 398, in execute
output = self.handle(*args, **options)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/management/commands/convert_slack_data.py", line 59, in handle
do_convert_data(path, output_dir, token, threads=num_threads)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 1320, in do_convert_data
) = slack_workspace_to_realm(
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 141, in slack_workspace_to_realm
) = users_to_zerver_userprofile(slack_data_dir, user_list, realm_id, int(NOW), domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 248, in users_to_zerver_userprofile
email = get_user_email(user, domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 406, in get_user_email
return SlackBotEmail.get_email(user["profile"], domain_name)
File "/home/zulip/deployments/2022-03-16-22-25-42/zerver/data_import/slack.py", line 85, in get_email
email_prefix += cls.duplicate_email_count[email]
TypeError: can only concatenate str (not "int") to str
```
Fix the stringification, make it case-insensitive, append with a dash
for readability, and add tests for all of the above.
2022-03-31 02:21:57 +02:00
|
|
|
|
|
|
|
def test_bot_duplicates(self) -> None:
|
|
|
|
self.assertEqual(
|
|
|
|
SlackBotEmail.get_email(
|
|
|
|
{"real_name_normalized": "Real Bot", "bot_id": "foo"}, "example.com"
|
|
|
|
),
|
|
|
|
"real-bot@example.com",
|
|
|
|
)
|
|
|
|
|
|
|
|
# SlackBotEmail keeps state -- doing it again appends a "2", "3", etc
|
|
|
|
self.assertEqual(
|
|
|
|
SlackBotEmail.get_email(
|
|
|
|
{"real_name_normalized": "Real Bot", "bot_id": "bar"}, "example.com"
|
|
|
|
),
|
|
|
|
"real-bot-2@example.com",
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
SlackBotEmail.get_email(
|
|
|
|
{"real_name_normalized": "Real Bot", "bot_id": "baz"}, "example.com"
|
|
|
|
),
|
|
|
|
"real-bot-3@example.com",
|
|
|
|
)
|
|
|
|
|
|
|
|
# But caches based on the bot_id
|
|
|
|
self.assertEqual(
|
|
|
|
SlackBotEmail.get_email(
|
|
|
|
{"real_name_normalized": "Real Bot", "bot_id": "foo"}, "example.com"
|
|
|
|
),
|
|
|
|
"real-bot@example.com",
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
SlackBotEmail.get_email({"first_name": "Other Name", "bot_id": "other"}, "example.com"),
|
|
|
|
"othername-bot@example.com",
|
|
|
|
)
|
2021-09-16 09:37:49 +02:00
|
|
|
|
|
|
|
def test_slack_emoji_name_to_codepoint(self) -> None:
|
|
|
|
self.assertEqual(slack_emoji_name_to_codepoint["thinking_face"], "1f914")
|
|
|
|
self.assertEqual(slack_emoji_name_to_codepoint["tophat"], "1f3a9")
|
|
|
|
self.assertEqual(slack_emoji_name_to_codepoint["dog2"], "1f415")
|
|
|
|
self.assertEqual(slack_emoji_name_to_codepoint["dog"], "1f436")
|
2023-01-27 21:02:08 +01:00
|
|
|
|
|
|
|
@mock.patch("zerver.data_import.slack.requests.get")
|
|
|
|
@mock.patch("zerver.data_import.slack.process_uploads", return_value=[])
|
|
|
|
@mock.patch("zerver.data_import.slack.build_attachment", return_value=[])
|
|
|
|
@mock.patch("zerver.data_import.slack.build_avatar_url")
|
|
|
|
@mock.patch("zerver.data_import.slack.build_avatar")
|
|
|
|
@mock.patch("zerver.data_import.slack.get_slack_api_data")
|
|
|
|
def test_slack_import_unicode_filenames(
|
|
|
|
self,
|
|
|
|
mock_get_slack_api_data: mock.Mock,
|
|
|
|
mock_build_avatar_url: mock.Mock,
|
|
|
|
mock_build_avatar: mock.Mock,
|
|
|
|
mock_process_uploads: mock.Mock,
|
|
|
|
mock_attachment: mock.Mock,
|
|
|
|
mock_requests_get: mock.Mock,
|
|
|
|
) -> None:
|
|
|
|
test_slack_dir = os.path.join(
|
|
|
|
settings.DEPLOY_ROOT, "zerver", "tests", "fixtures", "slack_fixtures"
|
|
|
|
)
|
|
|
|
test_slack_zip_file = os.path.join(test_slack_dir, "test_unicode_slack_importer.zip")
|
|
|
|
test_slack_unzipped_file = os.path.join(test_slack_dir, "test_unicode_slack_importer")
|
|
|
|
output_dir = os.path.join(settings.DEPLOY_ROOT, "var", "test-unicode-slack-importer-data")
|
|
|
|
token = "xoxp-valid-token"
|
|
|
|
|
|
|
|
# If the test fails, the 'output_dir' would not be deleted and hence it would give an
|
|
|
|
# error when we run the tests next time, as 'do_convert_data' expects an empty 'output_dir'
|
|
|
|
# hence we remove it before running 'do_convert_data'
|
|
|
|
self.rm_tree(output_dir)
|
|
|
|
# Also the unzipped data file should be removed if the test fails at 'do_convert_data'
|
|
|
|
self.rm_tree(test_slack_unzipped_file)
|
|
|
|
|
|
|
|
user_data_fixture = orjson.loads(
|
|
|
|
self.fixture_data("unicode_user_data.json", type="slack_fixtures")
|
|
|
|
)
|
|
|
|
team_info_fixture = orjson.loads(
|
|
|
|
self.fixture_data("unicode_team_info.json", type="slack_fixtures")
|
|
|
|
)
|
|
|
|
mock_get_slack_api_data.side_effect = [
|
|
|
|
user_data_fixture["members"],
|
|
|
|
{},
|
|
|
|
team_info_fixture["team"],
|
|
|
|
]
|
|
|
|
mock_requests_get.return_value.raw = BytesIO(read_test_image_file("img.png"))
|
|
|
|
|
|
|
|
with self.assertLogs(level="INFO"), self.settings(EXTERNAL_HOST="zulip.example.com"):
|
|
|
|
# We need to mock EXTERNAL_HOST to be a valid domain because Slack's importer
|
|
|
|
# uses it to generate email addresses for users without an email specified.
|
|
|
|
do_convert_data(test_slack_zip_file, output_dir, token)
|