2017-12-08 11:27:42 +01:00
|
|
|
import os
|
|
|
|
import json
|
2018-03-25 13:42:04 +02:00
|
|
|
import ujson
|
2017-12-08 11:27:42 +01:00
|
|
|
import hashlib
|
|
|
|
import sys
|
|
|
|
import argparse
|
|
|
|
import shutil
|
|
|
|
import subprocess
|
|
|
|
import re
|
2018-02-08 00:06:02 +01:00
|
|
|
import logging
|
2018-03-13 20:43:39 +01:00
|
|
|
import random
|
2018-02-01 00:56:57 +01:00
|
|
|
import requests
|
2018-02-26 06:57:00 +01:00
|
|
|
import random
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-02-21 10:02:44 +01:00
|
|
|
from django.conf import settings
|
2018-02-09 16:03:18 +01:00
|
|
|
from django.db import connection
|
2017-12-08 11:27:42 +01:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2018-04-23 23:36:40 +02:00
|
|
|
from django.forms.models import model_to_dict
|
2018-04-25 19:18:32 +02:00
|
|
|
from typing import Any, Dict, List, Optional, Tuple
|
2018-01-09 11:46:56 +01:00
|
|
|
from zerver.forms import check_subdomain_available
|
2018-05-01 06:30:11 +02:00
|
|
|
from zerver.models import Reaction, RealmEmoji, Realm, UserProfile
|
2018-01-11 15:52:31 +01:00
|
|
|
from zerver.lib.slack_message_conversion import convert_to_zulip_markdown, \
|
|
|
|
get_user_full_name
|
2018-04-15 15:24:04 +02:00
|
|
|
from zerver.lib.parallel import run_parallel
|
2018-02-17 01:46:50 +01:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path_from_ids
|
2018-03-13 20:43:39 +01:00
|
|
|
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS as stream_colors
|
2018-02-26 06:57:00 +01:00
|
|
|
from zerver.lib.upload import random_name, sanitize_name
|
2018-06-18 16:24:01 +02:00
|
|
|
from zerver.lib.export import MESSAGE_BATCH_CHUNK_SIZE
|
2018-03-25 13:42:04 +02:00
|
|
|
from zerver.lib.emoji import NAME_TO_CODEPOINT_PATH
|
2017-12-29 10:57:48 +01:00
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
# stubs
|
|
|
|
ZerverFieldsT = Dict[str, Any]
|
|
|
|
AddedUsersT = Dict[str, int]
|
2018-04-07 00:00:05 +02:00
|
|
|
AddedChannelsT = Dict[str, Tuple[str, int]]
|
2018-01-17 15:35:24 +01:00
|
|
|
AddedRecipientsT = Dict[str, int]
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
def rm_tree(path: str) -> None:
|
|
|
|
if os.path.exists(path):
|
|
|
|
shutil.rmtree(path)
|
|
|
|
|
2018-02-25 14:46:34 +01:00
|
|
|
def slack_workspace_to_realm(domain_name: str, realm_id: int, user_list: List[ZerverFieldsT],
|
2018-04-18 19:10:17 +02:00
|
|
|
realm_subdomain: str, slack_data_dir: str,
|
2018-03-29 18:58:10 +02:00
|
|
|
custom_emoji_list: ZerverFieldsT)-> Tuple[ZerverFieldsT, AddedUsersT,
|
|
|
|
AddedRecipientsT,
|
|
|
|
AddedChannelsT,
|
|
|
|
List[ZerverFieldsT],
|
|
|
|
ZerverFieldsT]:
|
2018-01-23 19:04:59 +01:00
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
1. realm, Converted Realm data
|
|
|
|
2. added_users, which is a dictionary to map from slack user id to zulip user id
|
|
|
|
3. added_recipient, which is a dictionary to map from channel name to zulip recipient_id
|
2018-04-07 00:00:05 +02:00
|
|
|
4. added_channels, which is a dictionary to map from channel name to channel id, zulip stream_id
|
2018-02-17 00:42:59 +01:00
|
|
|
5. avatars, which is list to map avatars to zulip avatar records.json
|
2018-03-29 18:58:10 +02:00
|
|
|
6. emoji_url_map, which is maps emoji name to its slack url
|
2018-01-23 19:04:59 +01:00
|
|
|
"""
|
|
|
|
NOW = float(timezone_now().timestamp())
|
|
|
|
|
2018-04-18 19:10:17 +02:00
|
|
|
zerver_realm = build_zerver_realm(realm_id, realm_subdomain, NOW)
|
2018-01-23 19:04:59 +01:00
|
|
|
|
|
|
|
realm = dict(zerver_client=[{"name": "populate_db", "id": 1},
|
|
|
|
{"name": "website", "id": 2},
|
|
|
|
{"name": "API", "id": 3}],
|
|
|
|
zerver_userpresence=[], # shows last logged in data, which is not available in slack
|
|
|
|
zerver_userprofile_mirrordummy=[],
|
2018-03-07 14:14:08 +01:00
|
|
|
zerver_realmdomain=[{"realm": realm_id,
|
2018-01-23 19:04:59 +01:00
|
|
|
"allow_subdomains": False,
|
2018-02-25 14:46:34 +01:00
|
|
|
"domain": domain_name,
|
2018-03-07 14:14:08 +01:00
|
|
|
"id": realm_id}],
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_useractivity=[],
|
|
|
|
zerver_realm=zerver_realm,
|
|
|
|
zerver_huddle=[],
|
|
|
|
zerver_userprofile_crossrealm=[],
|
|
|
|
zerver_useractivityinterval=[],
|
2018-03-29 18:58:10 +02:00
|
|
|
zerver_realmfilter=[])
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-04-09 22:58:03 +02:00
|
|
|
zerver_userprofile, avatars, added_users, zerver_customprofilefield, \
|
|
|
|
zerver_customprofilefield_value = users_to_zerver_userprofile(slack_data_dir, user_list,
|
|
|
|
realm_id, int(NOW), domain_name)
|
2018-01-23 19:04:59 +01:00
|
|
|
channels_to_zerver_stream_fields = channels_to_zerver_stream(slack_data_dir,
|
2018-03-07 14:14:08 +01:00
|
|
|
realm_id,
|
2018-01-23 19:04:59 +01:00
|
|
|
added_users,
|
|
|
|
zerver_userprofile)
|
2018-03-29 18:58:10 +02:00
|
|
|
zerver_realmemoji, emoji_url_map = build_realmemoji(custom_emoji_list, realm_id)
|
|
|
|
realm['zerver_realmemoji'] = zerver_realmemoji
|
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
# See https://zulipchat.com/help/set-default-streams-for-new-users
|
|
|
|
# for documentation on zerver_defaultstream
|
|
|
|
realm['zerver_userprofile'] = zerver_userprofile
|
|
|
|
|
2018-04-09 22:58:03 +02:00
|
|
|
# Custom profile fields
|
|
|
|
realm['zerver_customprofilefield'] = zerver_customprofilefield
|
2018-05-31 19:17:55 +02:00
|
|
|
realm['zerver_customprofilefieldvalue'] = zerver_customprofilefield_value
|
2018-04-09 22:58:03 +02:00
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
realm['zerver_defaultstream'] = channels_to_zerver_stream_fields[0]
|
|
|
|
realm['zerver_stream'] = channels_to_zerver_stream_fields[1]
|
|
|
|
realm['zerver_subscription'] = channels_to_zerver_stream_fields[3]
|
|
|
|
realm['zerver_recipient'] = channels_to_zerver_stream_fields[4]
|
|
|
|
added_channels = channels_to_zerver_stream_fields[2]
|
|
|
|
added_recipient = channels_to_zerver_stream_fields[5]
|
|
|
|
|
2018-03-29 18:58:10 +02:00
|
|
|
return realm, added_users, added_recipient, added_channels, avatars, emoji_url_map
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-04-18 19:10:17 +02:00
|
|
|
def build_zerver_realm(realm_id: int, realm_subdomain: str,
|
2018-01-26 15:33:22 +01:00
|
|
|
time: float) -> List[ZerverFieldsT]:
|
2018-04-23 23:36:40 +02:00
|
|
|
realm = Realm(id=realm_id, date_created=time,
|
|
|
|
name=realm_subdomain, string_id=realm_subdomain,
|
|
|
|
description="Organization imported from Slack!")
|
|
|
|
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
|
|
|
|
realm_dict = model_to_dict(realm, exclude='authentication_methods')
|
|
|
|
realm_dict['authentication_methods'] = auth_methods
|
|
|
|
return[realm_dict]
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-03-29 18:58:10 +02:00
|
|
|
def build_realmemoji(custom_emoji_list: ZerverFieldsT,
|
|
|
|
realm_id: int) -> Tuple[List[ZerverFieldsT],
|
|
|
|
ZerverFieldsT]:
|
|
|
|
zerver_realmemoji = []
|
|
|
|
emoji_url_map = {}
|
|
|
|
emoji_id = 0
|
|
|
|
for emoji_name, url in custom_emoji_list.items():
|
|
|
|
if 'emoji.slack-edge.com' in url:
|
|
|
|
# Some of the emojis we get from the api have invalid links
|
|
|
|
# this is to prevent errors related to them
|
|
|
|
realmemoji = dict(
|
|
|
|
name=emoji_name,
|
|
|
|
id=emoji_id,
|
|
|
|
author=None,
|
|
|
|
realm=realm_id,
|
|
|
|
file_name=os.path.basename(url),
|
|
|
|
deactivated=False)
|
|
|
|
emoji_url_map[emoji_name] = url
|
|
|
|
zerver_realmemoji.append(realmemoji)
|
|
|
|
emoji_id += 1
|
|
|
|
return zerver_realmemoji, emoji_url_map
|
|
|
|
|
2018-02-01 00:56:57 +01:00
|
|
|
def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int,
|
|
|
|
timestamp: Any, domain_name: str) -> Tuple[List[ZerverFieldsT],
|
2018-02-17 00:42:59 +01:00
|
|
|
List[ZerverFieldsT],
|
2018-04-09 22:58:03 +02:00
|
|
|
AddedUsersT,
|
|
|
|
List[ZerverFieldsT],
|
|
|
|
List[ZerverFieldsT]]:
|
2017-12-08 11:27:42 +01:00
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
1. zerver_userprofile, which is a list of user profile
|
2018-02-17 00:42:59 +01:00
|
|
|
2. avatar_list, which is list to map avatars to zulip avatard records.json
|
|
|
|
3. added_users, which is a dictionary to map from slack user id to zulip
|
2017-12-08 11:27:42 +01:00
|
|
|
user id
|
2018-04-09 22:58:03 +02:00
|
|
|
4. zerver_customprofilefield, which is a list of all custom profile fields
|
|
|
|
5. zerver_customprofilefield_values, which is a list of user profile fields
|
2017-12-08 11:27:42 +01:00
|
|
|
"""
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### IMPORTING USERS STARTED #########\n')
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_userprofile = []
|
2018-04-09 13:53:32 +02:00
|
|
|
zerver_customprofilefield = [] # type: List[ZerverFieldsT]
|
|
|
|
zerver_customprofilefield_values = [] # type: List[ZerverFieldsT]
|
2018-02-16 23:54:38 +01:00
|
|
|
avatar_list = [] # type: List[ZerverFieldsT]
|
2017-12-08 11:27:42 +01:00
|
|
|
added_users = {}
|
2018-01-20 10:01:17 +01:00
|
|
|
|
2018-04-09 13:53:32 +02:00
|
|
|
# The user data we get from the slack api does not contain custom profile data
|
|
|
|
# Hence we get it from the slack zip file
|
|
|
|
slack_data_file_user_list = get_data_file(slack_data_dir + '/users.json')
|
|
|
|
|
|
|
|
# To map user id with the custom profile fields of the corresponding user
|
2018-06-09 13:44:47 +02:00
|
|
|
slack_user_custom_field_map = {} # type: ZerverFieldsT
|
2018-04-09 13:53:32 +02:00
|
|
|
# To store custom fields corresponding to their ids
|
|
|
|
custom_field_map = {} # type: ZerverFieldsT
|
|
|
|
|
|
|
|
for user in slack_data_file_user_list:
|
2018-06-09 13:44:47 +02:00
|
|
|
process_slack_custom_fields(user, slack_user_custom_field_map)
|
2018-04-09 13:53:32 +02:00
|
|
|
|
2018-02-06 22:19:47 +01:00
|
|
|
# We have only one primary owner in slack, see link
|
|
|
|
# https://get.slack.help/hc/en-us/articles/201912948-Owners-and-Administrators
|
|
|
|
# This is to import the primary owner first from all the users
|
2018-04-09 13:53:32 +02:00
|
|
|
user_id_count = custom_field_id_count = customprofilefield_id = 0
|
2018-02-06 22:19:47 +01:00
|
|
|
primary_owner_id = user_id_count
|
|
|
|
user_id_count += 1
|
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
for user in users:
|
|
|
|
slack_user_id = user['id']
|
|
|
|
|
2018-02-06 22:19:47 +01:00
|
|
|
if user.get('is_primary_owner', False):
|
2018-03-17 12:15:57 +01:00
|
|
|
user_id = primary_owner_id
|
2018-02-06 22:19:47 +01:00
|
|
|
else:
|
2018-03-17 12:15:57 +01:00
|
|
|
user_id = user_id_count
|
2018-02-06 22:19:47 +01:00
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
# email
|
2018-01-23 19:04:59 +01:00
|
|
|
email = get_user_email(user, domain_name)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-02-16 23:54:38 +01:00
|
|
|
# avatar
|
|
|
|
# ref: https://chat.zulip.org/help/change-your-avatar
|
|
|
|
avatar_url = build_avatar_url(slack_user_id, user['team_id'],
|
|
|
|
user['profile']['avatar_hash'])
|
2018-03-07 13:53:35 +01:00
|
|
|
build_avatar(user_id, realm_id, email, avatar_url, timestamp, avatar_list)
|
2018-02-16 23:54:38 +01:00
|
|
|
|
2018-02-06 21:02:23 +01:00
|
|
|
# check if user is the admin
|
|
|
|
realm_admin = get_admin(user)
|
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
# timezone
|
2018-01-23 19:04:59 +01:00
|
|
|
timezone = get_user_timezone(user)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-04-09 13:53:32 +02:00
|
|
|
# Check for custom profile fields
|
|
|
|
if slack_user_id in slack_user_custom_field_map:
|
|
|
|
# For processing the fields
|
|
|
|
custom_field_map, customprofilefield_id = build_customprofile_field(
|
|
|
|
zerver_customprofilefield, slack_user_custom_field_map[slack_user_id],
|
|
|
|
customprofilefield_id, realm_id, custom_field_map)
|
|
|
|
# Store the custom field values for the corresponding user
|
|
|
|
custom_field_id_count = build_customprofilefields_values(
|
|
|
|
custom_field_map, slack_user_custom_field_map[slack_user_id], user_id,
|
|
|
|
custom_field_id_count, zerver_customprofilefield_values)
|
|
|
|
|
2018-05-01 18:16:12 +02:00
|
|
|
userprofile = UserProfile(
|
|
|
|
full_name=get_user_full_name(user),
|
|
|
|
short_name=user['name'],
|
|
|
|
is_active=not user['deleted'],
|
|
|
|
id=user_id,
|
|
|
|
email=email,
|
|
|
|
avatar_source='U',
|
|
|
|
is_bot=user.get('is_bot', False),
|
|
|
|
pointer=-1,
|
|
|
|
is_realm_admin=realm_admin,
|
|
|
|
bot_type=1 if user.get('is_bot', False) else None,
|
|
|
|
date_joined=timestamp,
|
|
|
|
timezone=timezone,
|
|
|
|
last_login=timestamp)
|
2018-05-01 06:30:11 +02:00
|
|
|
userprofile_dict = model_to_dict(userprofile)
|
|
|
|
# Set realm id separately as the corresponding realm is not yet a Realm model instance
|
|
|
|
userprofile_dict['realm'] = realm_id
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-05-01 06:30:11 +02:00
|
|
|
zerver_userprofile.append(userprofile_dict)
|
2018-02-06 22:19:47 +01:00
|
|
|
added_users[slack_user_id] = user_id
|
|
|
|
if not user.get('is_primary_owner', False):
|
|
|
|
user_id_count += 1
|
|
|
|
|
2018-05-01 06:30:11 +02:00
|
|
|
logging.info(u"{} -> {}".format(user['name'], userprofile_dict['email']))
|
2018-04-09 13:53:32 +02:00
|
|
|
|
|
|
|
process_customprofilefields(zerver_customprofilefield, zerver_customprofilefield_values)
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### IMPORTING USERS FINISHED #########\n')
|
2018-04-09 22:58:03 +02:00
|
|
|
return zerver_userprofile, avatar_list, added_users, zerver_customprofilefield, \
|
|
|
|
zerver_customprofilefield_values
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-04-09 13:53:32 +02:00
|
|
|
def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields: ZerverFieldsT,
|
|
|
|
customprofilefield_id: int, realm_id: int,
|
|
|
|
custom_field_map: ZerverFieldsT) -> Tuple[ZerverFieldsT, int]:
|
|
|
|
# The name of the custom profile field is not provided in the slack data
|
|
|
|
# Hash keys of the fields are provided
|
|
|
|
# Reference: https://api.slack.com/methods/users.profile.set
|
|
|
|
for field, value in fields.items():
|
|
|
|
if field not in custom_field_map:
|
2018-06-09 13:44:47 +02:00
|
|
|
slack_custom_fields = ['phone', 'skype']
|
|
|
|
if field in slack_custom_fields:
|
|
|
|
field_name = field
|
|
|
|
else:
|
|
|
|
field_name = ("slack custom field %s" % str(customprofilefield_id + 1))
|
2018-04-09 13:53:32 +02:00
|
|
|
customprofilefield = dict(
|
|
|
|
id=customprofilefield_id,
|
|
|
|
realm=realm_id,
|
|
|
|
name=field_name,
|
|
|
|
field_type=1 # For now this is defaulted to 'SHORT_TEXT'
|
|
|
|
# Processing is done in the function 'process_customprofilefields'
|
|
|
|
)
|
|
|
|
custom_field_map[field] = customprofilefield_id
|
|
|
|
customprofilefield_id += 1
|
|
|
|
customprofile_field.append(customprofilefield)
|
|
|
|
return custom_field_map, customprofilefield_id
|
|
|
|
|
2018-06-09 13:44:47 +02:00
|
|
|
def process_slack_custom_fields(user: ZerverFieldsT,
|
|
|
|
slack_user_custom_field_map: ZerverFieldsT) -> None:
|
|
|
|
slack_user_custom_field_map[user['id']] = {}
|
|
|
|
if user['profile'].get('fields'):
|
|
|
|
slack_user_custom_field_map[user['id']] = user['profile']['fields']
|
|
|
|
|
|
|
|
slack_custom_fields = ['phone', 'skype']
|
|
|
|
for field in slack_custom_fields:
|
|
|
|
if field in user['profile']:
|
|
|
|
slack_user_custom_field_map[user['id']][field] = {'value': user['profile'][field]}
|
|
|
|
|
2018-04-09 13:53:32 +02:00
|
|
|
def build_customprofilefields_values(custom_field_map: ZerverFieldsT, fields: ZerverFieldsT,
|
|
|
|
user_id: int, custom_field_id: int,
|
|
|
|
custom_field_values: List[ZerverFieldsT]) -> int:
|
|
|
|
for field, value in fields.items():
|
|
|
|
custom_field_value = dict(
|
|
|
|
id=custom_field_id,
|
|
|
|
user_profile=user_id,
|
|
|
|
field=custom_field_map[field],
|
|
|
|
value=value['value'])
|
|
|
|
custom_field_values.append(custom_field_value)
|
|
|
|
custom_field_id += 1
|
|
|
|
return custom_field_id
|
|
|
|
|
|
|
|
def process_customprofilefields(customprofilefield: List[ZerverFieldsT],
|
|
|
|
customprofilefield_value: List[ZerverFieldsT]) -> None:
|
|
|
|
# Process the field types by checking all field values
|
|
|
|
for field in customprofilefield:
|
|
|
|
for field_value in customprofilefield_value:
|
|
|
|
if field_value['field'] == field['id'] and len(field_value['value']) > 50:
|
2018-05-06 11:40:31 +02:00
|
|
|
field['field_type'] = 2 # corresponding to Long text
|
2018-04-09 13:53:32 +02:00
|
|
|
break
|
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
def get_user_email(user: ZerverFieldsT, domain_name: str) -> str:
|
2018-03-13 00:21:54 +01:00
|
|
|
if 'email' in user['profile']:
|
|
|
|
return user['profile']['email']
|
|
|
|
if 'bot_id' in user['profile']:
|
|
|
|
if 'real_name_normalized' in user['profile']:
|
|
|
|
slack_bot_name = user['profile']['real_name_normalized']
|
|
|
|
elif 'first_name' in user['profile']:
|
|
|
|
slack_bot_name = user['profile']['first_name']
|
|
|
|
else:
|
|
|
|
raise AssertionError("Could not identify bot type")
|
|
|
|
return slack_bot_name.replace("Bot", "").replace(" ", "") + "-bot@%s" % (domain_name,)
|
|
|
|
# TODO: Do we need this fallback case at all?
|
|
|
|
return (hashlib.sha256(user['real_name'].encode()).hexdigest() +
|
|
|
|
"@%s" % (domain_name,))
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-02-16 23:54:38 +01:00
|
|
|
def build_avatar_url(slack_user_id: str, team_id: str, avatar_hash: str) -> str:
|
|
|
|
avatar_url = "https://ca.slack-edge.com/{}-{}-{}".format(team_id, slack_user_id,
|
|
|
|
avatar_hash)
|
|
|
|
return avatar_url
|
|
|
|
|
|
|
|
def build_avatar(zulip_user_id: int, realm_id: int, email: str, avatar_url: str,
|
2018-03-07 13:53:35 +01:00
|
|
|
timestamp: Any, avatar_list: List[ZerverFieldsT]) -> None:
|
2018-02-16 23:54:38 +01:00
|
|
|
avatar = dict(
|
|
|
|
path=avatar_url, # Save slack's url here, which is used later while processing
|
|
|
|
realm_id=realm_id,
|
|
|
|
content_type=None,
|
|
|
|
user_profile_id=zulip_user_id,
|
|
|
|
last_modified=timestamp,
|
|
|
|
user_profile_email=email,
|
|
|
|
s3_path="",
|
|
|
|
size="")
|
|
|
|
avatar_list.append(avatar)
|
|
|
|
|
2018-02-06 21:02:23 +01:00
|
|
|
def get_admin(user: ZerverFieldsT) -> bool:
|
|
|
|
admin = user.get('is_admin', False)
|
|
|
|
owner = user.get('is_owner', False)
|
|
|
|
primary_owner = user.get('is_primary_owner', False)
|
|
|
|
|
|
|
|
if admin or owner or primary_owner:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
def get_user_timezone(user: ZerverFieldsT) -> str:
|
|
|
|
_default_timezone = "America/New_York"
|
|
|
|
timezone = user.get("tz", _default_timezone)
|
|
|
|
if timezone is None or '/' not in timezone:
|
|
|
|
timezone = _default_timezone
|
|
|
|
return timezone
|
|
|
|
|
2018-01-06 19:42:18 +01:00
|
|
|
def channels_to_zerver_stream(slack_data_dir: str, realm_id: int, added_users: AddedUsersT,
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_userprofile: List[ZerverFieldsT]) -> Tuple[List[ZerverFieldsT],
|
|
|
|
List[ZerverFieldsT],
|
|
|
|
AddedChannelsT,
|
|
|
|
List[ZerverFieldsT],
|
2018-01-17 15:35:24 +01:00
|
|
|
List[ZerverFieldsT],
|
|
|
|
AddedRecipientsT]:
|
2017-12-08 11:27:42 +01:00
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
1. zerver_defaultstream, which is a list of the default streams
|
|
|
|
2. zerver_stream, while is a list of all streams
|
2018-04-07 00:00:05 +02:00
|
|
|
3. added_channels, which is a dictionary to map from channel name to channel id, zulip stream_id
|
2017-12-08 11:27:42 +01:00
|
|
|
4. zerver_subscription, which is a list of the subscriptions
|
|
|
|
5. zerver_recipient, which is a list of the recipients
|
2018-01-23 19:04:59 +01:00
|
|
|
6. added_recipient, which is a dictionary to map from channel name to zulip recipient_id
|
2017-12-08 11:27:42 +01:00
|
|
|
"""
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### IMPORTING CHANNELS STARTED #########\n')
|
2018-01-23 19:04:59 +01:00
|
|
|
channels = get_data_file(slack_data_dir + '/channels.json')
|
2018-01-20 10:01:17 +01:00
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
added_channels = {}
|
2018-01-20 10:01:17 +01:00
|
|
|
added_recipient = {}
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
zerver_stream = []
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_subscription = [] # type: List[ZerverFieldsT]
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_recipient = []
|
|
|
|
zerver_defaultstream = []
|
|
|
|
|
2018-02-12 23:26:52 +01:00
|
|
|
stream_id_count = subscription_id_count = recipient_id_count = defaultstream_id = 0
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
for channel in channels:
|
|
|
|
# slack_channel_id = channel['id']
|
|
|
|
|
|
|
|
# map Slack's topic and purpose content into Zulip's stream description.
|
|
|
|
# WARN This mapping is lossy since the topic.creator, topic.last_set,
|
|
|
|
# purpose.creator, purpose.last_set fields are not preserved.
|
2018-03-13 20:13:40 +01:00
|
|
|
description = channel["purpose"]["value"]
|
2018-03-17 12:15:57 +01:00
|
|
|
stream_id = stream_id_count
|
|
|
|
recipient_id = recipient_id_count
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
# construct the stream object and append it to zerver_stream
|
|
|
|
stream = dict(
|
|
|
|
realm=realm_id,
|
|
|
|
name=channel["name"],
|
|
|
|
deactivated=channel["is_archived"],
|
|
|
|
description=description,
|
2018-02-25 03:49:14 +01:00
|
|
|
invite_only=False, # TODO: private channels are not
|
|
|
|
# exported with Slack's standard plan;
|
|
|
|
# so this field is always false
|
2017-12-08 11:27:42 +01:00
|
|
|
date_created=float(channel["created"]),
|
2018-02-12 23:26:52 +01:00
|
|
|
id=stream_id)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
# construct defaultstream object
|
2018-02-23 10:16:03 +01:00
|
|
|
# slack has the default channel 'general' and 'random'
|
|
|
|
# where every user is subscribed
|
|
|
|
default_channels = ['general', 'random'] # Slack specific
|
|
|
|
if channel['name'] in default_channels:
|
|
|
|
defaultstream = build_defaultstream(channel['name'], realm_id, stream_id,
|
2018-03-17 12:15:57 +01:00
|
|
|
defaultstream_id)
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_defaultstream.append(defaultstream)
|
2018-01-23 19:04:59 +01:00
|
|
|
defaultstream_id += 1
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
zerver_stream.append(stream)
|
2018-04-07 00:00:05 +02:00
|
|
|
added_channels[stream['name']] = (channel['id'], stream_id)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
# construct the recipient object and append it to zerver_recipient
|
|
|
|
# type 1: private
|
|
|
|
# type 2: stream
|
|
|
|
# type 3: huddle
|
|
|
|
recipient = dict(
|
2018-02-12 23:26:52 +01:00
|
|
|
type_id=stream_id,
|
|
|
|
id=recipient_id,
|
2017-12-08 11:27:42 +01:00
|
|
|
type=2)
|
|
|
|
zerver_recipient.append(recipient)
|
2018-02-12 23:26:52 +01:00
|
|
|
added_recipient[stream['name']] = recipient_id
|
2018-05-01 06:47:19 +02:00
|
|
|
# TODO add recipients for private message and huddles
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
# construct the subscription object and append it to zerver_subscription
|
2018-03-07 14:07:28 +01:00
|
|
|
subscription_id_count = build_subscription(channel['members'], zerver_subscription,
|
|
|
|
recipient_id, added_users,
|
2018-03-17 12:15:57 +01:00
|
|
|
subscription_id_count)
|
2018-05-01 06:47:19 +02:00
|
|
|
# TODO add zerver_subscription which correspond to
|
2018-01-23 19:04:59 +01:00
|
|
|
# huddles type recipient
|
|
|
|
# For huddles:
|
|
|
|
# sub['recipient']=recipient['id'] where recipient['type_id']=added_users[member]
|
|
|
|
|
2017-12-08 11:27:42 +01:00
|
|
|
stream_id_count += 1
|
2018-01-17 15:35:24 +01:00
|
|
|
recipient_id_count += 1
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info(u"{} -> created".format(channel['name']))
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
# TODO map Slack's pins to Zulip's stars
|
|
|
|
# There is the security model that Slack's pins are known to the team owner
|
|
|
|
# as evident from where it is stored at (channels)
|
|
|
|
# "pins": [
|
|
|
|
# {
|
|
|
|
# "id": "1444755381.000003",
|
|
|
|
# "type": "C",
|
|
|
|
# "user": "U061A5N1G",
|
|
|
|
# "owner": "U061A5N1G",
|
|
|
|
# "created": "1444755463"
|
|
|
|
# }
|
|
|
|
# ],
|
|
|
|
|
|
|
|
for user in zerver_userprofile:
|
|
|
|
zulip_user_id = user['id']
|
|
|
|
# this maps the recipients and subscriptions
|
|
|
|
# related to private messages
|
2018-03-17 12:15:57 +01:00
|
|
|
recipient_id = recipient_id_count
|
|
|
|
subscription_id = subscription_id_count
|
2018-02-12 23:26:52 +01:00
|
|
|
|
|
|
|
recipient, sub = build_pm_recipient_sub_from_user(zulip_user_id, recipient_id,
|
|
|
|
subscription_id)
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_recipient.append(recipient)
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_subscription.append(sub)
|
|
|
|
subscription_id_count += 1
|
|
|
|
recipient_id_count += 1
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### IMPORTING STREAMS FINISHED #########\n')
|
2018-01-23 19:04:59 +01:00
|
|
|
return zerver_defaultstream, zerver_stream, added_channels, zerver_subscription, \
|
|
|
|
zerver_recipient, added_recipient
|
|
|
|
|
|
|
|
def build_defaultstream(channel_name: str, realm_id: int, stream_id: int,
|
|
|
|
defaultstream_id: int) -> ZerverFieldsT:
|
2018-02-23 10:16:03 +01:00
|
|
|
defaultstream = dict(
|
|
|
|
stream=stream_id,
|
|
|
|
realm=realm_id,
|
|
|
|
id=defaultstream_id)
|
|
|
|
return defaultstream
|
2018-01-23 19:04:59 +01:00
|
|
|
|
|
|
|
def build_pm_recipient_sub_from_user(zulip_user_id: int, recipient_id: int,
|
|
|
|
subscription_id: int) -> Tuple[ZerverFieldsT,
|
|
|
|
ZerverFieldsT]:
|
|
|
|
recipient = dict(
|
|
|
|
type_id=zulip_user_id,
|
|
|
|
id=recipient_id,
|
|
|
|
type=1)
|
|
|
|
|
|
|
|
sub = dict(
|
|
|
|
recipient=recipient_id,
|
|
|
|
notifications=False,
|
2018-03-13 20:43:39 +01:00
|
|
|
color=random.choice(stream_colors),
|
2018-01-23 19:04:59 +01:00
|
|
|
desktop_notifications=True,
|
|
|
|
pin_to_top=False,
|
|
|
|
in_home_view=True,
|
|
|
|
active=True,
|
|
|
|
user_profile=zulip_user_id,
|
|
|
|
id=subscription_id)
|
|
|
|
|
|
|
|
return recipient, sub
|
|
|
|
|
|
|
|
def build_subscription(channel_members: List[str], zerver_subscription: List[ZerverFieldsT],
|
|
|
|
recipient_id: int, added_users: AddedUsersT,
|
2018-03-17 12:15:57 +01:00
|
|
|
subscription_id: int) -> int:
|
2018-01-23 19:04:59 +01:00
|
|
|
for member in channel_members:
|
2017-12-08 11:27:42 +01:00
|
|
|
sub = dict(
|
2018-01-23 19:04:59 +01:00
|
|
|
recipient=recipient_id,
|
2017-12-08 11:27:42 +01:00
|
|
|
notifications=False,
|
2018-03-13 20:43:39 +01:00
|
|
|
color=random.choice(stream_colors),
|
2017-12-08 11:27:42 +01:00
|
|
|
desktop_notifications=True,
|
|
|
|
pin_to_top=False,
|
|
|
|
in_home_view=True,
|
|
|
|
active=True,
|
2018-01-23 19:04:59 +01:00
|
|
|
user_profile=added_users[member],
|
|
|
|
id=subscription_id)
|
2018-05-01 06:47:19 +02:00
|
|
|
# The recipient corresponds to a stream for stream-readable message.
|
2017-12-08 11:27:42 +01:00
|
|
|
zerver_subscription.append(sub)
|
2018-03-17 12:15:57 +01:00
|
|
|
subscription_id += 1
|
|
|
|
return subscription_id
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-03-07 14:14:08 +01:00
|
|
|
def convert_slack_workspace_messages(slack_data_dir: str, users: List[ZerverFieldsT], realm_id: int,
|
2018-02-01 00:56:57 +01:00
|
|
|
added_users: AddedUsersT, added_recipient: AddedRecipientsT,
|
2018-02-26 10:03:48 +01:00
|
|
|
added_channels: AddedChannelsT, realm: ZerverFieldsT,
|
2018-06-13 20:15:35 +02:00
|
|
|
zerver_realmemoji: List[ZerverFieldsT], domain_name: str,
|
2018-06-18 16:24:01 +02:00
|
|
|
output_dir: str,
|
|
|
|
chunk_size: int=MESSAGE_BATCH_CHUNK_SIZE) -> Tuple[List[ZerverFieldsT],
|
|
|
|
List[ZerverFieldsT],
|
|
|
|
List[ZerverFieldsT]]:
|
2018-01-23 19:04:59 +01:00
|
|
|
"""
|
|
|
|
Returns:
|
2018-06-13 20:15:35 +02:00
|
|
|
1. reactions, which is a list of the reactions
|
2018-02-26 10:03:48 +01:00
|
|
|
2. uploads, which is a list of uploads to be mapped in uploads records.json
|
2018-02-26 10:16:34 +01:00
|
|
|
3. attachment, which is a list of the attachments
|
2018-01-23 19:04:59 +01:00
|
|
|
"""
|
2018-02-25 07:08:28 +01:00
|
|
|
all_messages = get_all_messages(slack_data_dir, added_channels)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-02-25 10:12:30 +01:00
|
|
|
# we sort the messages according to the timestamp to show messages with
|
|
|
|
# the proper date order
|
|
|
|
all_messages = sorted(all_messages, key=lambda message: message['ts'])
|
|
|
|
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### IMPORTING MESSAGES STARTED #########\n')
|
2018-02-25 09:09:32 +01:00
|
|
|
|
2018-06-17 19:16:42 +02:00
|
|
|
total_reactions = [] # type: List[ZerverFieldsT]
|
|
|
|
total_attachments = [] # type: List[ZerverFieldsT]
|
|
|
|
total_uploads = [] # type: List[ZerverFieldsT]
|
|
|
|
|
2018-06-17 19:10:54 +02:00
|
|
|
message_id = usermessage_id = reaction_id = attachment_id = 0
|
|
|
|
id_list = (message_id, usermessage_id, reaction_id, attachment_id)
|
|
|
|
|
2018-06-17 19:16:42 +02:00
|
|
|
# The messages are stored in batches
|
|
|
|
low_index = 0
|
|
|
|
upper_index = low_index + chunk_size
|
|
|
|
dump_file_id = 1
|
|
|
|
|
|
|
|
while True:
|
|
|
|
message_data = all_messages[low_index:upper_index]
|
|
|
|
if len(message_data) == 0:
|
|
|
|
break
|
|
|
|
zerver_message, zerver_usermessage, attachment, uploads, \
|
|
|
|
reactions, id_list = channel_message_to_zerver_message(
|
|
|
|
realm_id, users, added_users, added_recipient, message_data,
|
|
|
|
zerver_realmemoji, realm['zerver_subscription'], added_channels,
|
|
|
|
id_list, domain_name)
|
|
|
|
|
|
|
|
message_json = dict(
|
|
|
|
zerver_message=zerver_message,
|
|
|
|
zerver_usermessage=zerver_usermessage)
|
|
|
|
|
|
|
|
message_file = "/messages-%06d.json" % (dump_file_id,)
|
|
|
|
logging.info("Writing Messages to %s\n" % (output_dir + message_file))
|
|
|
|
create_converted_data_files(message_json, output_dir, message_file)
|
|
|
|
|
|
|
|
total_reactions += reactions
|
|
|
|
total_attachments += attachment
|
|
|
|
total_uploads += uploads
|
|
|
|
|
|
|
|
low_index = upper_index
|
|
|
|
upper_index = chunk_size + low_index
|
|
|
|
dump_file_id += 1
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-06-13 20:15:35 +02:00
|
|
|
logging.info('######### IMPORTING MESSAGES FINISHED #########\n')
|
2018-06-17 19:16:42 +02:00
|
|
|
return total_reactions, total_uploads, total_attachments
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-02-25 07:08:28 +01:00
|
|
|
def get_all_messages(slack_data_dir: str, added_channels: AddedChannelsT) -> List[ZerverFieldsT]:
|
|
|
|
all_messages = [] # type: List[ZerverFieldsT]
|
|
|
|
for channel_name in added_channels.keys():
|
|
|
|
channel_dir = os.path.join(slack_data_dir, channel_name)
|
|
|
|
json_names = os.listdir(channel_dir)
|
|
|
|
for json_name in json_names:
|
|
|
|
message_dir = os.path.join(channel_dir, json_name)
|
|
|
|
messages = get_data_file(message_dir)
|
|
|
|
for message in messages:
|
|
|
|
# To give every message the channel information
|
|
|
|
message['channel_name'] = channel_name
|
|
|
|
all_messages += messages
|
|
|
|
return all_messages
|
|
|
|
|
2018-03-07 14:14:08 +01:00
|
|
|
def channel_message_to_zerver_message(realm_id: int, users: List[ZerverFieldsT],
|
2018-02-25 09:54:53 +01:00
|
|
|
added_users: AddedUsersT,
|
2018-02-01 00:56:57 +01:00
|
|
|
added_recipient: AddedRecipientsT,
|
2018-02-25 07:08:28 +01:00
|
|
|
all_messages: List[ZerverFieldsT],
|
2018-03-30 12:38:03 +02:00
|
|
|
zerver_realmemoji: List[ZerverFieldsT],
|
2017-12-29 10:57:48 +01:00
|
|
|
zerver_subscription: List[ZerverFieldsT],
|
2018-04-07 00:00:05 +02:00
|
|
|
added_channels: AddedChannelsT,
|
2018-06-17 19:10:54 +02:00
|
|
|
id_list: Tuple[int, int, int, int],
|
2018-03-17 12:15:57 +01:00
|
|
|
domain_name: str) -> Tuple[List[ZerverFieldsT],
|
2018-03-25 13:42:04 +02:00
|
|
|
List[ZerverFieldsT],
|
2018-03-17 12:15:57 +01:00
|
|
|
List[ZerverFieldsT],
|
|
|
|
List[ZerverFieldsT],
|
2018-06-17 19:10:54 +02:00
|
|
|
List[ZerverFieldsT],
|
|
|
|
Tuple[int, int, int, int]]:
|
2017-12-29 10:57:48 +01:00
|
|
|
"""
|
|
|
|
Returns:
|
|
|
|
1. zerver_message, which is a list of the messages
|
|
|
|
2. zerver_usermessage, which is a list of the usermessages
|
2018-02-26 10:16:34 +01:00
|
|
|
3. zerver_attachment, which is a list of the attachments
|
|
|
|
4. uploads_list, which is a list of uploads to be mapped in uploads records.json
|
2018-03-25 13:42:04 +02:00
|
|
|
5. reaction_list, which is a list of all user reactions
|
2018-06-17 19:10:54 +02:00
|
|
|
6. id_list, which is a tuple of max ids of messages, usermessages, reactions and attachments
|
2017-12-29 10:57:48 +01:00
|
|
|
"""
|
2018-06-17 19:10:54 +02:00
|
|
|
message_id_count, usermessage_id_count, reaction_id_count, attachment_id_count = id_list
|
2017-12-29 10:57:48 +01:00
|
|
|
zerver_message = []
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_usermessage = [] # type: List[ZerverFieldsT]
|
2018-02-26 06:57:00 +01:00
|
|
|
uploads_list = [] # type: List[ZerverFieldsT]
|
2018-02-26 08:48:14 +01:00
|
|
|
zerver_attachment = [] # type: List[ZerverFieldsT]
|
2018-03-25 13:42:04 +02:00
|
|
|
reaction_list = [] # type: List[ZerverFieldsT]
|
|
|
|
|
|
|
|
# For unicode emoji
|
|
|
|
with open(NAME_TO_CODEPOINT_PATH) as fp:
|
|
|
|
name_to_codepoint = ujson.load(fp)
|
2017-12-29 10:57:48 +01:00
|
|
|
|
2018-02-25 09:54:53 +01:00
|
|
|
for message in all_messages:
|
|
|
|
user = get_message_sending_user(message)
|
|
|
|
if not user:
|
|
|
|
# Ignore messages without user names
|
|
|
|
# These are Sometimes produced by slack
|
|
|
|
continue
|
2018-04-18 20:35:59 +02:00
|
|
|
if message.get('subtype') in [
|
2018-04-18 20:36:31 +02:00
|
|
|
# Zulip doesn't have a pinned_item concept
|
|
|
|
"pinned_item",
|
|
|
|
"unpinned_item",
|
|
|
|
# Slack's channel join/leave notices are spammy
|
2018-04-18 20:35:59 +02:00
|
|
|
"channel_join",
|
|
|
|
"channel_leave",
|
|
|
|
"channel_name"
|
|
|
|
]:
|
|
|
|
continue
|
2018-02-25 09:54:53 +01:00
|
|
|
|
2018-02-26 06:57:00 +01:00
|
|
|
has_attachment = has_image = False
|
2018-04-18 20:29:40 +02:00
|
|
|
try:
|
|
|
|
content, mentioned_users_id, has_link = convert_to_zulip_markdown(
|
|
|
|
message['text'], users, added_channels, added_users)
|
|
|
|
except Exception:
|
|
|
|
print("Slack message unexpectedly missing text representation:")
|
|
|
|
print(json.dumps(message, indent=4))
|
|
|
|
continue
|
2018-02-25 09:54:53 +01:00
|
|
|
rendered_content = None
|
2018-02-26 08:48:14 +01:00
|
|
|
|
|
|
|
recipient_id = added_recipient[message['channel_name']]
|
2018-03-17 12:15:57 +01:00
|
|
|
message_id = message_id_count
|
2018-02-26 08:48:14 +01:00
|
|
|
|
2018-03-25 13:42:04 +02:00
|
|
|
# Process message reactions
|
|
|
|
if 'reactions' in message.keys():
|
|
|
|
reaction_id_count = build_reactions(reaction_list, message['reactions'], added_users,
|
2018-03-30 12:38:03 +02:00
|
|
|
message_id, reaction_id_count, name_to_codepoint,
|
|
|
|
zerver_realmemoji)
|
2018-03-25 13:42:04 +02:00
|
|
|
|
2018-03-20 19:26:35 +01:00
|
|
|
# Process different subtypes of slack messages
|
2018-02-25 09:54:53 +01:00
|
|
|
if 'subtype' in message.keys():
|
|
|
|
subtype = message['subtype']
|
2018-03-20 19:26:35 +01:00
|
|
|
# Subtypes which have only the action in the message should
|
|
|
|
# be rendered with '/me' in the content initially
|
|
|
|
# For example "sh_room_created" has the message 'started a call'
|
|
|
|
# which should be displayed as '/me started a call'
|
2018-04-18 20:35:59 +02:00
|
|
|
if subtype in ["bot_add", "sh_room_created", "me_message"]:
|
2018-03-20 19:26:35 +01:00
|
|
|
content = ('/me %s' % (content))
|
|
|
|
|
2018-03-15 14:12:38 +01:00
|
|
|
# For attachments with slack download link
|
|
|
|
elif subtype == "file_share" and 'files.slack.com' in message['file']['url_private']:
|
2018-02-26 06:57:00 +01:00
|
|
|
fileinfo = message['file']
|
|
|
|
|
|
|
|
has_attachment = has_link = True
|
|
|
|
has_image = True if 'image' in fileinfo['mimetype'] else False
|
|
|
|
|
|
|
|
file_user = [iterate_user for iterate_user in users if message['user'] == user]
|
|
|
|
file_user_email = get_user_email(file_user[0], domain_name)
|
|
|
|
|
|
|
|
s3_path, content = get_attachment_path_and_content(fileinfo, realm_id)
|
|
|
|
|
|
|
|
# construct attachments
|
|
|
|
build_uploads(added_users[user], realm_id, file_user_email, fileinfo, s3_path,
|
|
|
|
uploads_list)
|
|
|
|
|
2018-03-17 12:15:57 +01:00
|
|
|
attachment_id = attachment_id_count
|
2018-02-26 08:48:14 +01:00
|
|
|
build_zerver_attachment(realm_id, message_id, attachment_id, added_users[user],
|
|
|
|
fileinfo, s3_path, zerver_attachment)
|
|
|
|
attachment_id_count += 1
|
|
|
|
|
2018-03-15 14:12:38 +01:00
|
|
|
# For attachments with link not from slack
|
|
|
|
# Example: Google drive integration
|
|
|
|
elif subtype == "file_share":
|
|
|
|
fileinfo = message['file']
|
|
|
|
has_link = True
|
|
|
|
if 'title' in fileinfo:
|
|
|
|
file_name = fileinfo['title']
|
|
|
|
else:
|
|
|
|
file_name = fileinfo['name']
|
|
|
|
content = '[%s](%s)' % (file_name, fileinfo['url_private'])
|
|
|
|
|
2018-02-25 09:54:53 +01:00
|
|
|
# construct message
|
|
|
|
zulip_message = dict(
|
|
|
|
sending_client=1,
|
|
|
|
rendered_content_version=1, # This is Zulip-specific
|
2018-02-26 06:57:00 +01:00
|
|
|
has_image=has_image,
|
2018-03-13 20:09:27 +01:00
|
|
|
subject='imported from slack', # This is Zulip-specific
|
2018-02-25 09:54:53 +01:00
|
|
|
pub_date=float(message['ts']),
|
|
|
|
id=message_id,
|
|
|
|
has_attachment=has_attachment, # attachment will be posted in the subsequent message;
|
|
|
|
# this is how Slack does it, i.e. less like email
|
|
|
|
edit_history=None,
|
|
|
|
sender=added_users[user], # map slack id to zulip id
|
|
|
|
content=content,
|
|
|
|
rendered_content=rendered_content, # slack doesn't cache this
|
|
|
|
recipient=recipient_id,
|
|
|
|
last_edit_time=None,
|
|
|
|
has_link=has_link)
|
|
|
|
zerver_message.append(zulip_message)
|
|
|
|
|
|
|
|
# construct usermessages
|
2018-03-07 13:48:21 +01:00
|
|
|
usermessage_id_count = build_zerver_usermessage(
|
2018-03-17 12:15:57 +01:00
|
|
|
zerver_usermessage, usermessage_id_count, zerver_subscription,
|
|
|
|
recipient_id, mentioned_users_id, message_id)
|
2018-02-25 09:54:53 +01:00
|
|
|
|
|
|
|
message_id_count += 1
|
2018-06-17 19:10:54 +02:00
|
|
|
|
|
|
|
id_list = (message_id_count, usermessage_id_count,
|
|
|
|
reaction_id_count, attachment_id_count)
|
|
|
|
return zerver_message, zerver_usermessage, zerver_attachment, uploads_list, \
|
|
|
|
reaction_list, id_list
|
2017-12-29 10:57:48 +01:00
|
|
|
|
2018-02-26 06:57:00 +01:00
|
|
|
def get_attachment_path_and_content(fileinfo: ZerverFieldsT, realm_id: int) -> Tuple[str,
|
|
|
|
str]:
|
|
|
|
# Should be kept in sync with its equivalent in zerver/lib/uploads in the function
|
2018-03-28 18:14:17 +02:00
|
|
|
# 'upload_message_file'
|
2018-02-26 06:57:00 +01:00
|
|
|
s3_path = "/".join([
|
|
|
|
str(realm_id),
|
2018-04-03 20:52:16 +02:00
|
|
|
'SlackImportAttachment', # This is a special placeholder which should be kept
|
|
|
|
# in sync with 'exports.py' function 'import_message_data'
|
2018-02-26 06:57:00 +01:00
|
|
|
format(random.randint(0, 255), 'x'),
|
|
|
|
random_name(18),
|
|
|
|
sanitize_name(fileinfo['name'])
|
|
|
|
])
|
|
|
|
attachment_path = ('/user_uploads/%s' % (s3_path))
|
2018-03-13 00:33:42 +01:00
|
|
|
content = '[%s](%s)' % (fileinfo['title'], attachment_path)
|
2018-02-26 06:57:00 +01:00
|
|
|
|
|
|
|
return s3_path, content
|
|
|
|
|
2018-03-25 13:42:04 +02:00
|
|
|
def build_reactions(reaction_list: List[ZerverFieldsT], reactions: List[ZerverFieldsT],
|
|
|
|
added_users: AddedUsersT, message_id: int, reaction_id: int,
|
2018-03-30 12:38:03 +02:00
|
|
|
name_to_codepoint: ZerverFieldsT,
|
|
|
|
zerver_realmemoji: List[ZerverFieldsT]) -> int:
|
|
|
|
realmemoji = {}
|
|
|
|
for realm_emoji in zerver_realmemoji:
|
|
|
|
realmemoji[realm_emoji['name']] = realm_emoji['id']
|
|
|
|
|
2018-03-25 13:42:04 +02:00
|
|
|
# For the unicode emoji codes, we use equivalent of
|
|
|
|
# function 'emoji_name_to_emoji_code' in 'zerver/lib/emoji' here
|
|
|
|
for slack_reaction in reactions:
|
|
|
|
emoji_name = slack_reaction['name']
|
2018-03-30 12:38:03 +02:00
|
|
|
# Check in unicode emoji
|
2018-03-25 13:42:04 +02:00
|
|
|
if emoji_name in name_to_codepoint:
|
2018-03-30 12:38:03 +02:00
|
|
|
emoji_code = name_to_codepoint[emoji_name]
|
|
|
|
reaction_type = Reaction.UNICODE_EMOJI
|
|
|
|
# Check in realm emoji
|
|
|
|
elif emoji_name in realmemoji:
|
|
|
|
emoji_code = realmemoji[emoji_name]
|
|
|
|
reaction_type = Reaction.REALM_EMOJI
|
2018-03-25 13:42:04 +02:00
|
|
|
else:
|
|
|
|
continue
|
2018-03-30 12:38:03 +02:00
|
|
|
|
|
|
|
for user in slack_reaction['users']:
|
|
|
|
reaction = dict(
|
|
|
|
id=reaction_id,
|
|
|
|
emoji_code=emoji_code,
|
|
|
|
emoji_name=emoji_name,
|
|
|
|
message=message_id,
|
|
|
|
reaction_type=reaction_type,
|
|
|
|
user_profile=added_users[user])
|
|
|
|
reaction_id += 1
|
|
|
|
reaction_list.append(reaction)
|
2018-03-25 13:42:04 +02:00
|
|
|
return reaction_id
|
|
|
|
|
2018-02-26 06:57:00 +01:00
|
|
|
def build_uploads(user_id: int, realm_id: int, email: str, fileinfo: ZerverFieldsT, s3_path: str,
|
|
|
|
uploads_list: List[ZerverFieldsT]) -> None:
|
|
|
|
upload = dict(
|
|
|
|
path=fileinfo['url_private'], # Save slack's url here, which is used later while processing
|
|
|
|
realm_id=realm_id,
|
|
|
|
content_type=None,
|
|
|
|
user_profile_id=user_id,
|
|
|
|
last_modified=fileinfo['timestamp'],
|
|
|
|
user_profile_email=email,
|
|
|
|
s3_path=s3_path,
|
|
|
|
size=fileinfo['size'])
|
|
|
|
uploads_list.append(upload)
|
|
|
|
|
2018-02-26 08:48:14 +01:00
|
|
|
def build_zerver_attachment(realm_id: int, message_id: int, attachment_id: int,
|
|
|
|
user_id: int, fileinfo: ZerverFieldsT, s3_path: str,
|
|
|
|
zerver_attachment: List[ZerverFieldsT]) -> None:
|
|
|
|
attachment = dict(
|
|
|
|
owner=user_id,
|
|
|
|
messages=[message_id],
|
|
|
|
id=attachment_id,
|
|
|
|
size=fileinfo['size'],
|
|
|
|
create_time=fileinfo['created'],
|
|
|
|
is_realm_public=True, # is always true for stream message
|
|
|
|
path_id=s3_path,
|
|
|
|
realm=realm_id,
|
|
|
|
file_name=fileinfo['name'])
|
|
|
|
zerver_attachment.append(attachment)
|
|
|
|
|
2018-04-25 19:18:32 +02:00
|
|
|
def get_message_sending_user(message: ZerverFieldsT) -> Optional[str]:
|
|
|
|
if 'user' in message:
|
|
|
|
return message['user']
|
|
|
|
if message.get('file'):
|
|
|
|
return message['file'].get('user')
|
|
|
|
return None
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-03-17 12:15:57 +01:00
|
|
|
def build_zerver_usermessage(zerver_usermessage: List[ZerverFieldsT], usermessage_id: int,
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_subscription: List[ZerverFieldsT], recipient_id: int,
|
2018-03-07 13:48:21 +01:00
|
|
|
mentioned_users_id: List[int], message_id: int) -> int:
|
2018-01-23 19:04:59 +01:00
|
|
|
for subscription in zerver_subscription:
|
|
|
|
if subscription['recipient'] == recipient_id:
|
|
|
|
flags_mask = 1 # For read
|
|
|
|
if subscription['user_profile'] in mentioned_users_id:
|
|
|
|
flags_mask = 9 # For read and mentioned
|
|
|
|
|
|
|
|
usermessage = dict(
|
|
|
|
user_profile=subscription['user_profile'],
|
2018-03-17 12:15:57 +01:00
|
|
|
id=usermessage_id,
|
2018-01-23 19:04:59 +01:00
|
|
|
flags_mask=flags_mask,
|
|
|
|
message=message_id)
|
2018-03-17 12:15:57 +01:00
|
|
|
usermessage_id += 1
|
2018-01-23 19:04:59 +01:00
|
|
|
zerver_usermessage.append(usermessage)
|
2018-03-17 12:15:57 +01:00
|
|
|
return usermessage_id
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-04-15 16:21:02 +02:00
|
|
|
def do_convert_data(slack_zip_file: str, output_dir: str, token: str, threads: int=6) -> None:
|
2018-04-06 02:21:16 +02:00
|
|
|
# Subdomain is set by the user while running the import command
|
|
|
|
realm_subdomain = ""
|
2018-04-18 19:10:17 +02:00
|
|
|
realm_id = 0
|
2018-02-25 14:46:34 +01:00
|
|
|
domain_name = settings.EXTERNAL_HOST
|
|
|
|
|
2018-01-06 19:42:18 +01:00
|
|
|
slack_data_dir = slack_zip_file.replace('.zip', '')
|
2018-01-20 14:49:40 +01:00
|
|
|
if not os.path.exists(slack_data_dir):
|
|
|
|
os.makedirs(slack_data_dir)
|
2018-02-08 21:38:14 +01:00
|
|
|
|
|
|
|
os.makedirs(output_dir, exist_ok=True)
|
|
|
|
# output directory should be empty initially
|
|
|
|
if os.listdir(output_dir):
|
|
|
|
raise Exception('Output directory should be empty!')
|
|
|
|
|
2018-01-20 14:49:40 +01:00
|
|
|
subprocess.check_call(['unzip', '-q', slack_zip_file, '-d', slack_data_dir])
|
2017-12-08 11:27:42 +01:00
|
|
|
# with zipfile.ZipFile(slack_zip_file, 'r') as zip_ref:
|
2018-01-06 19:42:18 +01:00
|
|
|
# zip_ref.extractall(slack_data_dir)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-03-29 14:38:11 +02:00
|
|
|
# We get the user data from the legacy token method of slack api, which is depreciated
|
|
|
|
# but we use it as the user email data is provided only in this method
|
|
|
|
user_list = get_slack_api_data(token, "https://slack.com/api/users.list", "members")
|
2018-03-29 18:58:10 +02:00
|
|
|
# Get custom emoji from slack api
|
|
|
|
custom_emoji_list = get_slack_api_data(token, "https://slack.com/api/emoji.list", "emoji")
|
2018-03-29 14:38:11 +02:00
|
|
|
|
2018-03-29 18:58:10 +02:00
|
|
|
realm, added_users, added_recipient, added_channels, avatar_list, \
|
|
|
|
emoji_url_map = slack_workspace_to_realm(domain_name, realm_id, user_list,
|
2018-04-18 19:10:17 +02:00
|
|
|
realm_subdomain,
|
2018-03-29 18:58:10 +02:00
|
|
|
slack_data_dir, custom_emoji_list)
|
2018-02-17 00:42:59 +01:00
|
|
|
|
2018-06-13 20:15:35 +02:00
|
|
|
reactions, uploads_list, zerver_attachment = convert_slack_workspace_messages(
|
2018-02-26 10:03:48 +01:00
|
|
|
slack_data_dir, user_list, realm_id, added_users, added_recipient, added_channels,
|
2018-06-13 20:15:35 +02:00
|
|
|
realm, realm['zerver_realmemoji'], domain_name, output_dir)
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-05-24 13:56:15 +02:00
|
|
|
# Move zerver_reactions to realm.json file
|
2018-06-13 20:15:35 +02:00
|
|
|
realm['zerver_reaction'] = reactions
|
2018-05-24 13:56:15 +02:00
|
|
|
|
2018-03-30 00:09:29 +02:00
|
|
|
emoji_folder = os.path.join(output_dir, 'emoji')
|
|
|
|
os.makedirs(emoji_folder, exist_ok=True)
|
2018-04-15 16:21:02 +02:00
|
|
|
emoji_records = process_emojis(realm['zerver_realmemoji'], emoji_folder, emoji_url_map, threads)
|
2018-03-30 00:09:29 +02:00
|
|
|
|
2018-02-17 01:46:50 +01:00
|
|
|
avatar_folder = os.path.join(output_dir, 'avatars')
|
2018-03-07 14:14:08 +01:00
|
|
|
avatar_realm_folder = os.path.join(avatar_folder, str(realm_id))
|
2018-02-17 01:46:50 +01:00
|
|
|
os.makedirs(avatar_realm_folder, exist_ok=True)
|
2018-04-15 16:21:02 +02:00
|
|
|
avatar_records = process_avatars(avatar_list, avatar_folder, realm_id, threads)
|
2018-02-17 01:46:50 +01:00
|
|
|
|
2018-02-26 11:04:13 +01:00
|
|
|
uploads_folder = os.path.join(output_dir, 'uploads')
|
|
|
|
os.makedirs(os.path.join(uploads_folder, str(realm_id)), exist_ok=True)
|
2018-04-15 16:21:02 +02:00
|
|
|
uploads_records = process_uploads(uploads_list, uploads_folder, threads)
|
2018-01-23 19:04:59 +01:00
|
|
|
attachment = {"zerver_attachment": zerver_attachment}
|
2017-12-08 11:27:42 +01:00
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
# IO realm.json
|
2018-03-09 17:50:48 +01:00
|
|
|
create_converted_data_files(realm, output_dir, '/realm.json')
|
2018-03-30 00:09:29 +02:00
|
|
|
# IO emoji records
|
|
|
|
create_converted_data_files(emoji_records, output_dir, '/emoji/records.json')
|
2017-12-08 11:27:42 +01:00
|
|
|
# IO avatar records
|
2018-03-09 17:50:48 +01:00
|
|
|
create_converted_data_files(avatar_records, output_dir, '/avatars/records.json')
|
2018-05-01 06:47:19 +02:00
|
|
|
# IO uploads records
|
2018-03-09 17:50:48 +01:00
|
|
|
create_converted_data_files(uploads_records, output_dir, '/uploads/records.json')
|
2018-05-01 06:47:19 +02:00
|
|
|
# IO attachments records
|
2018-03-09 17:50:48 +01:00
|
|
|
create_converted_data_files(attachment, output_dir, '/attachment.json')
|
2017-12-08 11:27:42 +01:00
|
|
|
|
|
|
|
# remove slack dir
|
2018-01-06 19:42:18 +01:00
|
|
|
rm_tree(slack_data_dir)
|
2017-12-08 11:27:42 +01:00
|
|
|
subprocess.check_call(["tar", "-czf", output_dir + '.tar.gz', output_dir, '-P'])
|
|
|
|
|
2018-02-08 00:06:02 +01:00
|
|
|
logging.info('######### DATA CONVERSION FINISHED #########\n')
|
|
|
|
logging.info("Zulip data dump created at %s" % (output_dir))
|
2018-01-23 19:04:59 +01:00
|
|
|
|
2018-03-30 00:09:29 +02:00
|
|
|
def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str,
|
2018-04-15 16:21:02 +02:00
|
|
|
emoji_url_map: ZerverFieldsT, threads: int) -> List[ZerverFieldsT]:
|
2018-03-30 00:09:29 +02:00
|
|
|
"""
|
|
|
|
This function gets the custom emojis and saves in the output emoji folder
|
|
|
|
"""
|
2018-04-15 15:36:52 +02:00
|
|
|
def get_emojis(upload: List[str]) -> int:
|
|
|
|
slack_emoji_url = upload[0]
|
|
|
|
emoji_path = upload[1]
|
|
|
|
upload_emoji_path = os.path.join(emoji_dir, emoji_path)
|
|
|
|
|
|
|
|
response = requests.get(slack_emoji_url, stream=True)
|
|
|
|
os.makedirs(os.path.dirname(upload_emoji_path), exist_ok=True)
|
|
|
|
with open(upload_emoji_path, 'wb') as emoji_file:
|
|
|
|
shutil.copyfileobj(response.raw, emoji_file)
|
|
|
|
return 0
|
|
|
|
|
2018-03-30 00:09:29 +02:00
|
|
|
emoji_records = []
|
2018-04-15 15:36:52 +02:00
|
|
|
upload_emoji_list = []
|
2018-03-30 00:09:29 +02:00
|
|
|
logging.info('######### GETTING EMOJIS #########\n')
|
|
|
|
logging.info('DOWNLOADING EMOJIS .......\n')
|
|
|
|
for emoji in zerver_realmemoji:
|
|
|
|
slack_emoji_url = emoji_url_map[emoji['name']]
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=emoji['realm'],
|
|
|
|
emoji_file_name=emoji['name'])
|
|
|
|
|
2018-04-15 15:36:52 +02:00
|
|
|
upload_emoji_list.append([slack_emoji_url, emoji_path])
|
2018-03-30 00:09:29 +02:00
|
|
|
|
|
|
|
emoji_record = dict(emoji)
|
|
|
|
emoji_record['path'] = emoji_path
|
|
|
|
emoji_record['s3_path'] = emoji_path
|
|
|
|
emoji_record['realm_id'] = emoji_record['realm']
|
|
|
|
emoji_record.pop('realm')
|
|
|
|
|
|
|
|
emoji_records.append(emoji_record)
|
2018-04-15 15:36:52 +02:00
|
|
|
|
|
|
|
# Run downloads parallely
|
|
|
|
output = []
|
2018-04-15 16:21:02 +02:00
|
|
|
for (status, job) in run_parallel(get_emojis, upload_emoji_list, threads=threads):
|
2018-04-15 15:36:52 +02:00
|
|
|
output.append(job)
|
|
|
|
|
2018-03-30 00:09:29 +02:00
|
|
|
logging.info('######### GETTING EMOJIS FINISHED #########\n')
|
|
|
|
return emoji_records
|
|
|
|
|
2018-02-17 01:46:50 +01:00
|
|
|
def process_avatars(avatar_list: List[ZerverFieldsT], avatar_dir: str,
|
2018-04-15 16:21:02 +02:00
|
|
|
realm_id: int, threads: int) -> List[ZerverFieldsT]:
|
2018-02-17 01:46:50 +01:00
|
|
|
"""
|
|
|
|
This function gets the avatar of size 512 px and saves it in the
|
|
|
|
user's avatar directory with both the extensions
|
|
|
|
'.png' and '.original'
|
|
|
|
"""
|
2018-04-15 15:55:41 +02:00
|
|
|
def get_avatar(avatar_upload_list: List[str]) -> int:
|
|
|
|
# get avatar of size 512
|
|
|
|
slack_avatar_url = avatar_upload_list[0]
|
|
|
|
image_path = avatar_upload_list[1]
|
|
|
|
original_image_path = avatar_upload_list[2]
|
|
|
|
response = requests.get(slack_avatar_url + '-512', stream=True)
|
|
|
|
with open(image_path, 'wb') as image_file:
|
|
|
|
shutil.copyfileobj(response.raw, image_file)
|
|
|
|
shutil.copy(image_path, original_image_path)
|
|
|
|
return 0
|
|
|
|
|
2018-02-17 01:46:50 +01:00
|
|
|
logging.info('######### GETTING AVATARS #########\n')
|
2018-03-20 20:03:33 +01:00
|
|
|
logging.info('DOWNLOADING AVATARS .......\n')
|
2018-02-17 01:46:50 +01:00
|
|
|
avatar_original_list = []
|
2018-04-15 15:55:41 +02:00
|
|
|
avatar_upload_list = []
|
2018-02-17 01:46:50 +01:00
|
|
|
for avatar in avatar_list:
|
|
|
|
avatar_hash = user_avatar_path_from_ids(avatar['user_profile_id'], realm_id)
|
|
|
|
slack_avatar_url = avatar['path']
|
|
|
|
avatar_original = dict(avatar)
|
|
|
|
|
|
|
|
image_path = ('%s/%s.png' % (avatar_dir, avatar_hash))
|
|
|
|
original_image_path = ('%s/%s.original' % (avatar_dir, avatar_hash))
|
|
|
|
|
2018-04-15 15:55:41 +02:00
|
|
|
avatar_upload_list.append([slack_avatar_url, image_path, original_image_path])
|
2018-02-17 01:46:50 +01:00
|
|
|
|
2018-04-16 23:12:15 +02:00
|
|
|
# We don't add the size field here in avatar's records.json,
|
|
|
|
# since the metadata is not needed on the import end, and we
|
|
|
|
# don't have it until we've downloaded the files anyway.
|
2018-02-17 01:46:50 +01:00
|
|
|
avatar['path'] = image_path
|
|
|
|
avatar['s3_path'] = image_path
|
|
|
|
|
|
|
|
avatar_original['path'] = original_image_path
|
|
|
|
avatar_original['s3_path'] = original_image_path
|
|
|
|
avatar_original_list.append(avatar_original)
|
2018-04-15 15:55:41 +02:00
|
|
|
|
|
|
|
# Run downloads parallely
|
|
|
|
output = []
|
2018-04-15 16:21:02 +02:00
|
|
|
for (status, job) in run_parallel(get_avatar, avatar_upload_list, threads=threads):
|
2018-04-15 15:55:41 +02:00
|
|
|
output.append(job)
|
|
|
|
|
2018-02-17 01:46:50 +01:00
|
|
|
logging.info('######### GETTING AVATARS FINISHED #########\n')
|
|
|
|
return avatar_list + avatar_original_list
|
|
|
|
|
2018-04-15 16:21:02 +02:00
|
|
|
def process_uploads(upload_list: List[ZerverFieldsT], upload_dir: str,
|
|
|
|
threads: int) -> List[ZerverFieldsT]:
|
2018-02-26 11:04:13 +01:00
|
|
|
"""
|
|
|
|
This function gets the uploads and saves it in the realm's upload directory
|
|
|
|
"""
|
2018-04-15 15:24:04 +02:00
|
|
|
def get_uploads(upload: List[str]) -> int:
|
|
|
|
upload_url = upload[0]
|
|
|
|
upload_path = upload[1]
|
|
|
|
upload_path = os.path.join(upload_dir, upload_path)
|
|
|
|
|
|
|
|
response = requests.get(upload_url, stream=True)
|
|
|
|
os.makedirs(os.path.dirname(upload_path), exist_ok=True)
|
|
|
|
with open(upload_path, 'wb') as upload_file:
|
|
|
|
shutil.copyfileobj(response.raw, upload_file)
|
|
|
|
return 0
|
|
|
|
|
2018-02-26 11:04:13 +01:00
|
|
|
logging.info('######### GETTING ATTACHMENTS #########\n')
|
2018-03-20 20:03:33 +01:00
|
|
|
logging.info('DOWNLOADING ATTACHMENTS .......\n')
|
2018-04-15 15:24:04 +02:00
|
|
|
upload_url_list = []
|
2018-02-26 11:04:13 +01:00
|
|
|
for upload in upload_list:
|
|
|
|
upload_url = upload['path']
|
|
|
|
upload_s3_path = upload['s3_path']
|
2018-04-15 15:24:04 +02:00
|
|
|
upload_url_list.append([upload_url, upload_s3_path])
|
|
|
|
upload['path'] = upload_s3_path
|
2018-02-26 11:04:13 +01:00
|
|
|
|
2018-04-15 15:24:04 +02:00
|
|
|
# Run downloads parallely
|
|
|
|
output = []
|
2018-04-15 16:21:02 +02:00
|
|
|
for (status, job) in run_parallel(get_uploads, upload_url_list, threads=threads):
|
2018-04-15 15:24:04 +02:00
|
|
|
output.append(job)
|
2018-02-26 11:04:13 +01:00
|
|
|
|
|
|
|
logging.info('######### GETTING ATTACHMENTS FINISHED #########\n')
|
|
|
|
return upload_list
|
|
|
|
|
2018-01-23 19:04:59 +01:00
|
|
|
def get_data_file(path: str) -> Any:
|
|
|
|
data = json.load(open(path))
|
|
|
|
return data
|
|
|
|
|
2018-03-29 18:58:10 +02:00
|
|
|
def get_slack_api_data(token: str, slack_api_url: str, get_param: str) -> Any:
|
2018-03-29 14:38:11 +02:00
|
|
|
data = requests.get('%s?token=%s' % (slack_api_url, token))
|
|
|
|
if data.status_code == requests.codes.ok:
|
|
|
|
if 'error' in data.json():
|
|
|
|
raise Exception('Enter a valid token!')
|
|
|
|
json_data = data.json()[get_param]
|
|
|
|
return json_data
|
2018-02-01 00:56:57 +01:00
|
|
|
else:
|
2018-03-29 14:38:11 +02:00
|
|
|
raise Exception('Something went wrong. Please try again!')
|
2018-02-01 00:56:57 +01:00
|
|
|
|
2018-03-09 17:50:48 +01:00
|
|
|
def create_converted_data_files(data: Any, output_dir: str, file_path: str) -> None:
|
2018-01-23 19:04:59 +01:00
|
|
|
output_file = output_dir + file_path
|
2018-04-09 19:00:12 +02:00
|
|
|
json.dump(data, open(output_file, 'w'), indent=4)
|