2018-08-02 15:24:34 +02:00
|
|
|
import logging
|
2019-07-28 01:04:47 +02:00
|
|
|
import multiprocessing
|
2018-08-03 19:15:01 +02:00
|
|
|
import os
|
2020-06-11 00:54:34 +02:00
|
|
|
import random
|
|
|
|
import shutil
|
2019-07-28 01:04:47 +02:00
|
|
|
from functools import partial
|
|
|
|
from typing import AbstractSet, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, TypeVar
|
2018-08-02 15:24:34 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
import requests
|
2018-08-01 01:01:55 +02:00
|
|
|
from django.forms.models import model_to_dict
|
2021-02-16 00:44:06 +01:00
|
|
|
from typing_extensions import Protocol
|
2018-08-01 01:01:55 +02:00
|
|
|
|
2018-10-23 21:58:51 +02:00
|
|
|
from zerver.data_import.sequencer import NEXT_ID
|
2018-08-02 00:35:02 +02:00
|
|
|
from zerver.lib.actions import STREAM_ASSIGNMENT_COLORS as stream_colors
|
2018-08-02 15:24:34 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path_from_ids
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
Attachment,
|
|
|
|
Huddle,
|
|
|
|
Message,
|
|
|
|
Realm,
|
|
|
|
RealmEmoji,
|
|
|
|
Recipient,
|
|
|
|
Stream,
|
|
|
|
Subscription,
|
|
|
|
UserProfile,
|
|
|
|
)
|
2018-08-01 01:01:55 +02:00
|
|
|
|
|
|
|
# stubs
|
|
|
|
ZerverFieldsT = Dict[str, Any]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-03-20 13:36:09 +01:00
|
|
|
class SubscriberHandler:
|
|
|
|
def __init__(self) -> None:
|
2020-09-02 08:14:51 +02:00
|
|
|
self.stream_info: Dict[int, Set[int]] = {}
|
|
|
|
self.huddle_info: Dict[int, Set[int]] = {}
|
2019-03-20 13:36:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def set_info(
|
|
|
|
self,
|
|
|
|
users: Set[int],
|
|
|
|
stream_id: Optional[int] = None,
|
|
|
|
huddle_id: Optional[int] = None,
|
|
|
|
) -> None:
|
2019-08-27 12:01:07 +02:00
|
|
|
if stream_id is not None:
|
|
|
|
self.stream_info[stream_id] = users
|
|
|
|
elif huddle_id is not None:
|
|
|
|
self.huddle_info[huddle_id] = users
|
|
|
|
else:
|
|
|
|
raise AssertionError("stream_id or huddle_id is required")
|
2019-03-20 13:36:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_users(
|
|
|
|
self, stream_id: Optional[int] = None, huddle_id: Optional[int] = None
|
|
|
|
) -> Set[int]:
|
2019-08-27 12:01:07 +02:00
|
|
|
if stream_id is not None:
|
|
|
|
return self.stream_info[stream_id]
|
|
|
|
elif huddle_id is not None:
|
|
|
|
return self.huddle_info[huddle_id]
|
|
|
|
else:
|
|
|
|
raise AssertionError("stream_id or huddle_id is required")
|
2019-03-20 13:36:09 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_zerver_realm(
|
|
|
|
realm_id: int, realm_subdomain: str, time: float, other_product: str
|
|
|
|
) -> List[ZerverFieldsT]:
|
|
|
|
realm = Realm(
|
|
|
|
id=realm_id,
|
|
|
|
date_created=time,
|
|
|
|
name=realm_subdomain,
|
|
|
|
string_id=realm_subdomain,
|
|
|
|
description=f"Organization imported from {other_product}!",
|
|
|
|
)
|
2018-08-01 01:01:55 +02:00
|
|
|
auth_methods = [[flag[0], flag[1]] for flag in realm.authentication_methods]
|
2021-02-12 08:20:45 +01:00
|
|
|
realm_dict = model_to_dict(realm, exclude="authentication_methods")
|
|
|
|
realm_dict["authentication_methods"] = auth_methods
|
2021-02-12 08:19:30 +01:00
|
|
|
return [realm_dict]
|
|
|
|
|
|
|
|
|
|
|
|
def build_user_profile(
|
|
|
|
avatar_source: str,
|
|
|
|
date_joined: Any,
|
|
|
|
delivery_email: str,
|
|
|
|
email: str,
|
|
|
|
full_name: str,
|
|
|
|
id: int,
|
|
|
|
is_active: bool,
|
|
|
|
role: int,
|
|
|
|
is_mirror_dummy: bool,
|
|
|
|
realm_id: int,
|
|
|
|
short_name: str,
|
|
|
|
timezone: Optional[str],
|
|
|
|
) -> ZerverFieldsT:
|
2018-09-28 13:00:32 +02:00
|
|
|
obj = UserProfile(
|
|
|
|
avatar_source=avatar_source,
|
|
|
|
date_joined=date_joined,
|
|
|
|
delivery_email=delivery_email,
|
|
|
|
email=email,
|
|
|
|
full_name=full_name,
|
|
|
|
id=id,
|
2019-03-27 08:27:36 +01:00
|
|
|
is_mirror_dummy=is_mirror_dummy,
|
2018-09-28 23:49:59 +02:00
|
|
|
is_active=is_active,
|
2019-10-05 02:35:07 +02:00
|
|
|
role=role,
|
2018-09-28 13:00:32 +02:00
|
|
|
realm_id=realm_id,
|
|
|
|
timezone=timezone,
|
|
|
|
)
|
|
|
|
dct = model_to_dict(obj)
|
2020-07-16 14:10:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2020-07-16 14:10:43 +02:00
|
|
|
Even though short_name is no longer in the Zulip
|
|
|
|
UserProfile, it's helpful to have it in our import
|
|
|
|
dictionaries for legacy reasons.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
|
|
|
dct["short_name"] = short_name
|
2018-09-28 13:00:32 +02:00
|
|
|
return dct
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_avatar(
|
|
|
|
zulip_user_id: int,
|
|
|
|
realm_id: int,
|
|
|
|
email: str,
|
|
|
|
avatar_url: str,
|
|
|
|
timestamp: Any,
|
|
|
|
avatar_list: List[ZerverFieldsT],
|
|
|
|
) -> None:
|
2018-08-01 01:10:55 +02:00
|
|
|
avatar = dict(
|
2020-10-23 02:43:28 +02:00
|
|
|
path=avatar_url, # Save original avatar URL here, which is downloaded later
|
2018-08-01 01:10:55 +02:00
|
|
|
realm_id=realm_id,
|
|
|
|
content_type=None,
|
|
|
|
user_profile_id=zulip_user_id,
|
|
|
|
last_modified=timestamp,
|
|
|
|
user_profile_email=email,
|
|
|
|
s3_path="",
|
2021-02-12 08:19:30 +01:00
|
|
|
size="",
|
|
|
|
)
|
2018-08-01 01:10:55 +02:00
|
|
|
avatar_list.append(avatar)
|
2018-08-02 00:35:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-23 16:53:09 +02:00
|
|
|
def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, Set[int]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2018-10-23 16:53:09 +02:00
|
|
|
This can be convenient for building up UserMessage
|
|
|
|
rows.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2020-09-02 08:14:51 +02:00
|
|
|
subscriber_map: Dict[int, Set[int]] = {}
|
2018-10-23 16:53:09 +02:00
|
|
|
for sub in zerver_subscription:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_id = sub["user_profile"]
|
|
|
|
recipient_id = sub["recipient"]
|
2018-10-23 16:53:09 +02:00
|
|
|
if recipient_id not in subscriber_map:
|
|
|
|
subscriber_map[recipient_id] = set()
|
|
|
|
subscriber_map[recipient_id].add(user_id)
|
|
|
|
|
|
|
|
return subscriber_map
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def make_user_messages(
|
|
|
|
zerver_message: List[ZerverFieldsT],
|
|
|
|
subscriber_map: Dict[int, Set[int]],
|
|
|
|
is_pm_data: bool,
|
|
|
|
mention_map: Dict[int, Set[int]],
|
|
|
|
) -> List[ZerverFieldsT]:
|
2019-03-26 12:46:29 +01:00
|
|
|
|
|
|
|
zerver_usermessage = []
|
|
|
|
|
|
|
|
for message in zerver_message:
|
2021-02-12 08:20:45 +01:00
|
|
|
message_id = message["id"]
|
|
|
|
recipient_id = message["recipient"]
|
|
|
|
sender_id = message["sender"]
|
2019-03-26 12:46:29 +01:00
|
|
|
mention_user_ids = mention_map[message_id]
|
|
|
|
subscriber_ids = subscriber_map.get(recipient_id, set())
|
|
|
|
user_ids = subscriber_ids | {sender_id}
|
|
|
|
|
|
|
|
for user_id in user_ids:
|
|
|
|
is_mentioned = user_id in mention_user_ids
|
|
|
|
user_message = build_user_message(
|
|
|
|
user_id=user_id,
|
|
|
|
message_id=message_id,
|
|
|
|
is_private=is_pm_data,
|
|
|
|
is_mentioned=is_mentioned,
|
|
|
|
)
|
|
|
|
zerver_usermessage.append(user_message)
|
|
|
|
|
|
|
|
return zerver_usermessage
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_subscription(recipient_id: int, user_id: int, subscription_id: int) -> ZerverFieldsT:
|
|
|
|
subscription = Subscription(color=random.choice(stream_colors), id=subscription_id)
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_dict = model_to_dict(subscription, exclude=["user_profile", "recipient_id"])
|
|
|
|
subscription_dict["user_profile"] = user_id
|
|
|
|
subscription_dict["recipient"] = recipient_id
|
2018-08-17 00:51:32 +02:00
|
|
|
return subscription_dict
|
2018-08-02 00:35:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-23 23:56:14 +02:00
|
|
|
def build_public_stream_subscriptions(
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver_userprofile: List[ZerverFieldsT],
|
|
|
|
zerver_recipient: List[ZerverFieldsT],
|
|
|
|
zerver_stream: List[ZerverFieldsT],
|
|
|
|
) -> List[ZerverFieldsT]:
|
|
|
|
"""
|
2020-12-23 10:00:24 +01:00
|
|
|
This function was only used for HipChat, but it may apply to
|
|
|
|
future conversions. We often did't get full subscriber data in
|
2020-10-23 02:43:28 +02:00
|
|
|
the HipChat export, so this function just autosubscribes all
|
2018-09-28 13:00:32 +02:00
|
|
|
users to every public stream. This returns a list of Subscription
|
|
|
|
dicts.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subscriptions: List[ZerverFieldsT] = []
|
2018-09-28 13:00:32 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
public_stream_ids = {stream["id"] for stream in zerver_stream if not stream["invite_only"]}
|
2018-09-28 13:00:32 +02:00
|
|
|
|
|
|
|
public_stream_recipient_ids = {
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient["id"]
|
2018-09-28 13:00:32 +02:00
|
|
|
for recipient in zerver_recipient
|
2021-02-12 08:20:45 +01:00
|
|
|
if recipient["type"] == Recipient.STREAM and recipient["type_id"] in public_stream_ids
|
2018-09-28 13:00:32 +02:00
|
|
|
}
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user_ids = [user["id"] for user in zerver_userprofile]
|
2018-09-28 13:00:32 +02:00
|
|
|
|
|
|
|
for recipient_id in public_stream_recipient_ids:
|
|
|
|
for user_id in user_ids:
|
|
|
|
subscription = build_subscription(
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
user_id=user_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_id=NEXT_ID("subscription"),
|
2018-10-24 18:15:29 +02:00
|
|
|
)
|
|
|
|
subscriptions.append(subscription)
|
|
|
|
|
|
|
|
return subscriptions
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-16 00:44:06 +01:00
|
|
|
class GetUsers(Protocol):
|
|
|
|
def __call__(self, stream_id: int = ..., huddle_id: int = ...) -> Set[int]:
|
|
|
|
...
|
|
|
|
|
|
|
|
|
2019-01-10 01:17:54 +01:00
|
|
|
def build_stream_subscriptions(
|
2021-02-16 00:44:06 +01:00
|
|
|
get_users: GetUsers,
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver_recipient: List[ZerverFieldsT],
|
|
|
|
zerver_stream: List[ZerverFieldsT],
|
|
|
|
) -> List[ZerverFieldsT]:
|
2018-10-24 18:15:29 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subscriptions: List[ZerverFieldsT] = []
|
2018-10-24 18:15:29 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_ids = {stream["id"] for stream in zerver_stream}
|
2018-10-24 18:15:29 +02:00
|
|
|
|
|
|
|
recipient_map = {
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient["id"]: recipient["type_id"] # recipient_id -> stream_id
|
2018-10-24 18:15:29 +02:00
|
|
|
for recipient in zerver_recipient
|
2021-02-12 08:20:45 +01:00
|
|
|
if recipient["type"] == Recipient.STREAM and recipient["type_id"] in stream_ids
|
2018-10-24 18:15:29 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for recipient_id, stream_id in recipient_map.items():
|
|
|
|
user_ids = get_users(stream_id=stream_id)
|
|
|
|
for user_id in user_ids:
|
|
|
|
subscription = build_subscription(
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
user_id=user_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_id=NEXT_ID("subscription"),
|
2018-09-28 13:00:32 +02:00
|
|
|
)
|
|
|
|
subscriptions.append(subscription)
|
|
|
|
|
2018-10-23 23:56:14 +02:00
|
|
|
return subscriptions
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
def build_huddle_subscriptions(
|
2021-02-16 00:44:06 +01:00
|
|
|
get_users: GetUsers,
|
2021-02-12 08:19:30 +01:00
|
|
|
zerver_recipient: List[ZerverFieldsT],
|
|
|
|
zerver_huddle: List[ZerverFieldsT],
|
|
|
|
) -> List[ZerverFieldsT]:
|
2019-09-25 15:46:05 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subscriptions: List[ZerverFieldsT] = []
|
2019-09-25 15:46:05 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
huddle_ids = {huddle["id"] for huddle in zerver_huddle}
|
2019-09-25 15:46:05 +02:00
|
|
|
|
|
|
|
recipient_map = {
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient["id"]: recipient["type_id"] # recipient_id -> stream_id
|
2019-09-25 15:46:05 +02:00
|
|
|
for recipient in zerver_recipient
|
2021-02-12 08:20:45 +01:00
|
|
|
if recipient["type"] == Recipient.HUDDLE and recipient["type_id"] in huddle_ids
|
2019-09-25 15:46:05 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for recipient_id, huddle_id in recipient_map.items():
|
|
|
|
user_ids = get_users(huddle_id=huddle_id)
|
|
|
|
for user_id in user_ids:
|
|
|
|
subscription = build_subscription(
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
user_id=user_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_id=NEXT_ID("subscription"),
|
2019-09-25 15:46:05 +02:00
|
|
|
)
|
|
|
|
subscriptions.append(subscription)
|
|
|
|
|
|
|
|
return subscriptions
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-23 23:56:14 +02:00
|
|
|
def build_personal_subscriptions(zerver_recipient: List[ZerverFieldsT]) -> List[ZerverFieldsT]:
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
subscriptions: List[ZerverFieldsT] = []
|
2018-10-23 23:56:14 +02:00
|
|
|
|
2018-09-28 13:00:32 +02:00
|
|
|
personal_recipients = [
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient for recipient in zerver_recipient if recipient["type"] == Recipient.PERSONAL
|
2018-09-28 13:00:32 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for recipient in personal_recipients:
|
2021-02-12 08:20:45 +01:00
|
|
|
recipient_id = recipient["id"]
|
|
|
|
user_id = recipient["type_id"]
|
2018-09-28 13:00:32 +02:00
|
|
|
subscription = build_subscription(
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
user_id=user_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
subscription_id=NEXT_ID("subscription"),
|
2018-09-28 13:00:32 +02:00
|
|
|
)
|
|
|
|
subscriptions.append(subscription)
|
|
|
|
|
|
|
|
return subscriptions
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-02 00:35:02 +02:00
|
|
|
def build_recipient(type_id: int, recipient_id: int, type: int) -> ZerverFieldsT:
|
2018-08-17 00:51:32 +02:00
|
|
|
recipient = Recipient(
|
2018-08-02 00:35:02 +02:00
|
|
|
type_id=type_id, # stream id
|
|
|
|
id=recipient_id,
|
2020-09-02 02:50:08 +02:00
|
|
|
type=type,
|
|
|
|
)
|
2018-08-17 00:51:32 +02:00
|
|
|
recipient_dict = model_to_dict(recipient)
|
|
|
|
return recipient_dict
|
2018-08-02 15:24:34 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_recipients(
|
|
|
|
zerver_userprofile: Iterable[ZerverFieldsT],
|
|
|
|
zerver_stream: Iterable[ZerverFieldsT],
|
|
|
|
zerver_huddle: Iterable[ZerverFieldsT] = [],
|
|
|
|
) -> List[ZerverFieldsT]:
|
|
|
|
"""
|
2020-12-23 10:00:24 +01:00
|
|
|
This function was only used HipChat import, this function may be
|
|
|
|
required for future conversions. The Slack and Gitter conversions do it more
|
2018-09-28 13:00:32 +02:00
|
|
|
tightly integrated with creating other objects.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2018-09-28 13:00:32 +02:00
|
|
|
|
|
|
|
recipients = []
|
|
|
|
|
|
|
|
for user in zerver_userprofile:
|
2021-02-12 08:20:45 +01:00
|
|
|
type_id = user["id"]
|
2018-09-28 13:00:32 +02:00
|
|
|
type = Recipient.PERSONAL
|
|
|
|
recipient = Recipient(
|
|
|
|
type_id=type_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
id=NEXT_ID("recipient"),
|
2018-09-28 13:00:32 +02:00
|
|
|
type=type,
|
|
|
|
)
|
|
|
|
recipient_dict = model_to_dict(recipient)
|
|
|
|
recipients.append(recipient_dict)
|
|
|
|
|
|
|
|
for stream in zerver_stream:
|
2021-02-12 08:20:45 +01:00
|
|
|
type_id = stream["id"]
|
2018-09-28 13:00:32 +02:00
|
|
|
type = Recipient.STREAM
|
|
|
|
recipient = Recipient(
|
|
|
|
type_id=type_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
id=NEXT_ID("recipient"),
|
2018-09-28 13:00:32 +02:00
|
|
|
type=type,
|
|
|
|
)
|
|
|
|
recipient_dict = model_to_dict(recipient)
|
|
|
|
recipients.append(recipient_dict)
|
|
|
|
|
2019-09-25 15:46:05 +02:00
|
|
|
for huddle in zerver_huddle:
|
2021-02-12 08:20:45 +01:00
|
|
|
type_id = huddle["id"]
|
2019-09-25 15:46:05 +02:00
|
|
|
type = Recipient.HUDDLE
|
|
|
|
recipient = Recipient(
|
|
|
|
type_id=type_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
id=NEXT_ID("recipient"),
|
2019-09-25 15:46:05 +02:00
|
|
|
type=type,
|
|
|
|
)
|
|
|
|
recipient_dict = model_to_dict(recipient)
|
|
|
|
recipients.append(recipient_dict)
|
2018-09-28 13:00:32 +02:00
|
|
|
return recipients
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_realm(
|
|
|
|
zerver_realm: List[ZerverFieldsT], realm_id: int, domain_name: str
|
|
|
|
) -> ZerverFieldsT:
|
|
|
|
realm = dict(
|
|
|
|
zerver_client=[
|
|
|
|
{"name": "populate_db", "id": 1},
|
|
|
|
{"name": "website", "id": 2},
|
|
|
|
{"name": "API", "id": 3},
|
|
|
|
],
|
|
|
|
zerver_customprofilefield=[],
|
|
|
|
zerver_customprofilefieldvalue=[],
|
|
|
|
zerver_userpresence=[], # shows last logged in data, which is not available
|
|
|
|
zerver_userprofile_mirrordummy=[],
|
|
|
|
zerver_realmdomain=[
|
|
|
|
{"realm": realm_id, "allow_subdomains": False, "domain": domain_name, "id": realm_id}
|
|
|
|
],
|
|
|
|
zerver_useractivity=[],
|
|
|
|
zerver_realm=zerver_realm,
|
|
|
|
zerver_huddle=[],
|
|
|
|
zerver_userprofile_crossrealm=[],
|
|
|
|
zerver_useractivityinterval=[],
|
|
|
|
zerver_reaction=[],
|
|
|
|
zerver_realmemoji=[],
|
|
|
|
zerver_realmfilter=[],
|
|
|
|
)
|
2018-08-10 02:27:17 +02:00
|
|
|
return realm
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_usermessages(
|
|
|
|
zerver_usermessage: List[ZerverFieldsT],
|
|
|
|
subscriber_map: Dict[int, Set[int]],
|
|
|
|
recipient_id: int,
|
|
|
|
mentioned_user_ids: List[int],
|
|
|
|
message_id: int,
|
|
|
|
is_private: bool,
|
|
|
|
long_term_idle: AbstractSet[int] = set(),
|
|
|
|
) -> Tuple[int, int]:
|
2018-10-25 17:33:52 +02:00
|
|
|
user_ids = subscriber_map.get(recipient_id, set())
|
|
|
|
|
2018-12-17 02:26:51 +01:00
|
|
|
user_messages_created = 0
|
|
|
|
user_messages_skipped = 0
|
2018-10-25 17:33:52 +02:00
|
|
|
if user_ids:
|
|
|
|
for user_id in sorted(user_ids):
|
|
|
|
is_mentioned = user_id in mentioned_user_ids
|
2018-10-25 16:33:50 +02:00
|
|
|
|
2018-12-17 02:26:51 +01:00
|
|
|
if not is_mentioned and not is_private and user_id in long_term_idle:
|
|
|
|
# these users are long-term idle
|
|
|
|
user_messages_skipped += 1
|
|
|
|
continue
|
|
|
|
user_messages_created += 1
|
|
|
|
|
2018-10-25 16:33:50 +02:00
|
|
|
usermessage = build_user_message(
|
|
|
|
user_id=user_id,
|
|
|
|
message_id=message_id,
|
|
|
|
is_private=is_private,
|
|
|
|
is_mentioned=is_mentioned,
|
|
|
|
)
|
|
|
|
|
2018-08-03 21:11:47 +02:00
|
|
|
zerver_usermessage.append(usermessage)
|
2018-12-17 02:26:51 +01:00
|
|
|
return (user_messages_created, user_messages_skipped)
|
2018-08-03 21:11:47 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_user_message(
|
|
|
|
user_id: int, message_id: int, is_private: bool, is_mentioned: bool
|
|
|
|
) -> ZerverFieldsT:
|
2018-09-29 21:11:22 +02:00
|
|
|
flags_mask = 1 # For read
|
|
|
|
if is_mentioned:
|
|
|
|
flags_mask += 8 # For mentioned
|
2018-10-25 15:58:53 +02:00
|
|
|
if is_private:
|
|
|
|
flags_mask += 2048 # For is_private
|
2018-09-29 21:11:22 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
id = NEXT_ID("user_message")
|
2018-10-25 16:33:50 +02:00
|
|
|
|
2018-09-29 21:11:22 +02:00
|
|
|
usermessage = dict(
|
|
|
|
id=id,
|
|
|
|
user_profile=user_id,
|
|
|
|
message=message_id,
|
|
|
|
flags_mask=flags_mask,
|
|
|
|
)
|
|
|
|
return usermessage
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_defaultstream(realm_id: int, stream_id: int, defaultstream_id: int) -> ZerverFieldsT:
|
|
|
|
defaultstream = dict(stream=stream_id, realm=realm_id, id=defaultstream_id)
|
2018-08-03 21:19:47 +02:00
|
|
|
return defaultstream
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_stream(
|
|
|
|
date_created: Any,
|
|
|
|
realm_id: int,
|
|
|
|
name: str,
|
|
|
|
description: str,
|
|
|
|
stream_id: int,
|
|
|
|
deactivated: bool = False,
|
|
|
|
invite_only: bool = False,
|
|
|
|
) -> ZerverFieldsT:
|
2018-08-17 20:17:28 +02:00
|
|
|
stream = Stream(
|
|
|
|
name=name,
|
|
|
|
deactivated=deactivated,
|
2019-02-20 21:09:21 +01:00
|
|
|
description=description.replace("\n", " "),
|
2019-01-11 13:48:22 +01:00
|
|
|
# We don't set rendered_description here; it'll be added on import
|
2018-08-17 20:17:28 +02:00
|
|
|
date_created=date_created,
|
|
|
|
invite_only=invite_only,
|
2021-02-12 08:19:30 +01:00
|
|
|
id=stream_id,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
stream_dict = model_to_dict(stream, exclude=["realm"])
|
|
|
|
stream_dict["realm"] = realm_id
|
2018-08-17 20:17:28 +02:00
|
|
|
return stream_dict
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-07-02 07:41:51 +02:00
|
|
|
def build_huddle(huddle_id: int) -> ZerverFieldsT:
|
|
|
|
huddle = Huddle(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
id=huddle_id,
|
2019-07-02 07:41:51 +02:00
|
|
|
)
|
|
|
|
return model_to_dict(huddle)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_message(
|
|
|
|
topic_name: str,
|
|
|
|
date_sent: float,
|
|
|
|
message_id: int,
|
|
|
|
content: str,
|
|
|
|
rendered_content: Optional[str],
|
|
|
|
user_id: int,
|
|
|
|
recipient_id: int,
|
|
|
|
has_image: bool = False,
|
|
|
|
has_link: bool = False,
|
|
|
|
has_attachment: bool = True,
|
|
|
|
) -> ZerverFieldsT:
|
2018-08-17 23:47:37 +02:00
|
|
|
zulip_message = Message(
|
|
|
|
rendered_content_version=1, # this is Zulip specific
|
2019-08-28 02:43:19 +02:00
|
|
|
date_sent=date_sent,
|
2018-08-17 23:47:37 +02:00
|
|
|
id=message_id,
|
|
|
|
content=content,
|
|
|
|
rendered_content=rendered_content,
|
|
|
|
has_image=has_image,
|
|
|
|
has_attachment=has_attachment,
|
2021-02-12 08:19:30 +01:00
|
|
|
has_link=has_link,
|
|
|
|
)
|
2018-11-10 17:10:45 +01:00
|
|
|
zulip_message.set_topic_name(topic_name)
|
2021-02-12 08:19:30 +01:00
|
|
|
zulip_message_dict = model_to_dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip_message, exclude=["recipient", "sender", "sending_client"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
zulip_message_dict["sender"] = user_id
|
|
|
|
zulip_message_dict["sending_client"] = 1
|
|
|
|
zulip_message_dict["recipient"] = recipient_id
|
2018-08-17 23:47:37 +02:00
|
|
|
|
|
|
|
return zulip_message_dict
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_attachment(
|
|
|
|
realm_id: int,
|
|
|
|
message_ids: Set[int],
|
|
|
|
user_id: int,
|
|
|
|
fileinfo: ZerverFieldsT,
|
|
|
|
s3_path: str,
|
|
|
|
zerver_attachment: List[ZerverFieldsT],
|
|
|
|
) -> None:
|
2018-08-03 22:33:41 +02:00
|
|
|
"""
|
|
|
|
This function should be passed a 'fileinfo' dictionary, which contains
|
|
|
|
information about 'size', 'created' (created time) and ['name'] (filename).
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_id = NEXT_ID("attachment")
|
2018-10-28 18:32:15 +01:00
|
|
|
|
2018-08-17 02:21:52 +02:00
|
|
|
attachment = Attachment(
|
2018-08-03 22:33:41 +02:00
|
|
|
id=attachment_id,
|
2021-02-12 08:20:45 +01:00
|
|
|
size=fileinfo["size"],
|
|
|
|
create_time=fileinfo["created"],
|
2018-08-03 22:33:41 +02:00
|
|
|
is_realm_public=True,
|
|
|
|
path_id=s3_path,
|
2021-02-12 08:20:45 +01:00
|
|
|
file_name=fileinfo["name"],
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-17 02:21:52 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
attachment_dict = model_to_dict(attachment, exclude=["owner", "messages", "realm"])
|
|
|
|
attachment_dict["owner"] = user_id
|
|
|
|
attachment_dict["messages"] = list(message_ids)
|
|
|
|
attachment_dict["realm"] = realm_id
|
2018-08-17 02:21:52 +02:00
|
|
|
|
|
|
|
zerver_attachment.append(attachment_dict)
|
2018-08-03 22:33:41 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-07-28 01:04:47 +02:00
|
|
|
def get_avatar(avatar_dir: str, size_url_suffix: str, avatar_upload_item: List[str]) -> None:
|
|
|
|
avatar_url = avatar_upload_item[0]
|
|
|
|
|
|
|
|
image_path = os.path.join(avatar_dir, avatar_upload_item[1])
|
|
|
|
original_image_path = os.path.join(avatar_dir, avatar_upload_item[2])
|
|
|
|
|
|
|
|
response = requests.get(avatar_url + size_url_suffix, stream=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "wb") as image_file:
|
2019-07-28 01:04:47 +02:00
|
|
|
shutil.copyfileobj(response.raw, image_file)
|
|
|
|
shutil.copy(image_path, original_image_path)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def process_avatars(
|
|
|
|
avatar_list: List[ZerverFieldsT],
|
|
|
|
avatar_dir: str,
|
|
|
|
realm_id: int,
|
|
|
|
threads: int,
|
2021-02-12 08:20:45 +01:00
|
|
|
size_url_suffix: str = "",
|
2021-02-12 08:19:30 +01:00
|
|
|
) -> List[ZerverFieldsT]:
|
2018-08-02 15:24:34 +02:00
|
|
|
"""
|
|
|
|
This function gets the avatar of the user and saves it in the
|
|
|
|
user's avatar directory with both the extensions '.png' and '.original'
|
|
|
|
Required parameters:
|
|
|
|
|
|
|
|
1. avatar_list: List of avatars to be mapped in avatars records.json file
|
|
|
|
2. avatar_dir: Folder where the downloaded avatars are saved
|
|
|
|
3. realm_id: Realm ID.
|
2018-09-29 03:11:55 +02:00
|
|
|
|
|
|
|
We use this for Slack and Gitter conversions, where avatars need to be
|
|
|
|
downloaded. For simpler conversions see write_avatar_png.
|
2018-08-02 15:24:34 +02:00
|
|
|
"""
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING AVATARS #########\n")
|
|
|
|
logging.info("DOWNLOADING AVATARS .......\n")
|
2018-08-02 15:24:34 +02:00
|
|
|
avatar_original_list = []
|
|
|
|
avatar_upload_list = []
|
|
|
|
for avatar in avatar_list:
|
2021-02-12 08:20:45 +01:00
|
|
|
avatar_hash = user_avatar_path_from_ids(avatar["user_profile_id"], realm_id)
|
|
|
|
avatar_url = avatar["path"]
|
2018-08-02 15:24:34 +02:00
|
|
|
avatar_original = dict(avatar)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
image_path = f"{avatar_hash}.png"
|
|
|
|
original_image_path = f"{avatar_hash}.original"
|
2018-08-02 15:24:34 +02:00
|
|
|
|
|
|
|
avatar_upload_list.append([avatar_url, image_path, original_image_path])
|
|
|
|
# We don't add the size field here in avatar's records.json,
|
|
|
|
# since the metadata is not needed on the import end, and we
|
|
|
|
# don't have it until we've downloaded the files anyway.
|
2021-02-12 08:20:45 +01:00
|
|
|
avatar["path"] = image_path
|
|
|
|
avatar["s3_path"] = image_path
|
2018-08-02 15:24:34 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
avatar_original["path"] = original_image_path
|
|
|
|
avatar_original["s3_path"] = original_image_path
|
2018-08-02 15:24:34 +02:00
|
|
|
avatar_original_list.append(avatar_original)
|
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Run downloads in parallel
|
2021-02-12 08:19:30 +01:00
|
|
|
run_parallel_wrapper(
|
|
|
|
partial(get_avatar, avatar_dir, size_url_suffix), avatar_upload_list, threads=threads
|
|
|
|
)
|
2018-08-02 15:24:34 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING AVATARS FINISHED #########\n")
|
2018-08-02 15:24:34 +02:00
|
|
|
return avatar_list + avatar_original_list
|
2018-08-03 19:15:01 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def write_avatar_png(avatar_folder: str, realm_id: int, user_id: int, bits: bytes) -> ZerverFieldsT:
|
|
|
|
"""
|
2020-10-23 02:43:28 +02:00
|
|
|
Use this function for conversions like HipChat where
|
2018-09-29 03:11:55 +02:00
|
|
|
the bits for the .png file come in something like
|
|
|
|
a users.json file, and where we don't have to
|
|
|
|
fetch avatar images externally.
|
2021-02-12 08:19:30 +01:00
|
|
|
"""
|
2018-09-29 03:11:55 +02:00
|
|
|
avatar_hash = user_avatar_path_from_ids(
|
|
|
|
user_profile_id=user_id,
|
|
|
|
realm_id=realm_id,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
image_fn = avatar_hash + ".original"
|
2018-09-29 03:11:55 +02:00
|
|
|
image_path = os.path.join(avatar_folder, image_fn)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(image_path, "wb") as image_file:
|
2018-09-29 03:11:55 +02:00
|
|
|
image_file.write(bits)
|
|
|
|
|
|
|
|
# Return metadata that eventually goes in records.json.
|
|
|
|
metadata = dict(
|
|
|
|
path=image_path,
|
|
|
|
s3_path=image_path,
|
|
|
|
realm_id=realm_id,
|
|
|
|
user_profile_id=user_id,
|
2018-11-27 02:28:34 +01:00
|
|
|
# We only write the .original file; ask the importer to do the thumbnailing.
|
|
|
|
importer_should_thumbnail=True,
|
2018-09-29 03:11:55 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return metadata
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
ListJobData = TypeVar("ListJobData")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2019-07-28 01:04:47 +02:00
|
|
|
def wrapping_function(f: Callable[[ListJobData], None], item: ListJobData) -> None:
|
|
|
|
try:
|
|
|
|
f(item)
|
|
|
|
except Exception:
|
|
|
|
logging.exception("Error processing item: %s", item, stack_info=True)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def run_parallel_wrapper(
|
|
|
|
f: Callable[[ListJobData], None], full_items: List[ListJobData], threads: int = 6
|
|
|
|
) -> None:
|
2020-05-02 08:44:14 +02:00
|
|
|
logging.info("Distributing %s items across %s threads", len(full_items), threads)
|
2018-12-01 01:15:55 +01:00
|
|
|
|
2019-07-28 01:04:47 +02:00
|
|
|
with multiprocessing.Pool(threads) as p:
|
2018-12-01 01:15:55 +01:00
|
|
|
count = 0
|
2019-07-28 01:04:47 +02:00
|
|
|
for out in p.imap_unordered(partial(wrapping_function, f), full_items):
|
2018-12-01 01:15:55 +01:00
|
|
|
count += 1
|
|
|
|
if count % 1000 == 0:
|
2019-07-28 01:04:47 +02:00
|
|
|
logging.info("Finished %s items", count)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-07-28 01:04:47 +02:00
|
|
|
def get_uploads(upload_dir: str, upload: List[str]) -> None:
|
|
|
|
upload_url = upload[0]
|
|
|
|
upload_path = upload[1]
|
|
|
|
upload_path = os.path.join(upload_dir, upload_path)
|
|
|
|
|
|
|
|
response = requests.get(upload_url, stream=True)
|
|
|
|
os.makedirs(os.path.dirname(upload_path), exist_ok=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(upload_path, "wb") as upload_file:
|
2019-07-28 01:04:47 +02:00
|
|
|
shutil.copyfileobj(response.raw, upload_file)
|
2018-12-01 01:15:55 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def process_uploads(
|
|
|
|
upload_list: List[ZerverFieldsT], upload_dir: str, threads: int
|
|
|
|
) -> List[ZerverFieldsT]:
|
2018-08-03 19:15:01 +02:00
|
|
|
"""
|
|
|
|
This function downloads the uploads and saves it in the realm's upload directory.
|
|
|
|
Required parameters:
|
|
|
|
|
|
|
|
1. upload_list: List of uploads to be mapped in uploads records.json file
|
|
|
|
2. upload_dir: Folder where the downloaded uploads are saved
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING ATTACHMENTS #########\n")
|
|
|
|
logging.info("DOWNLOADING ATTACHMENTS .......\n")
|
2018-08-03 19:15:01 +02:00
|
|
|
upload_url_list = []
|
|
|
|
for upload in upload_list:
|
2021-02-12 08:20:45 +01:00
|
|
|
upload_url = upload["path"]
|
|
|
|
upload_s3_path = upload["s3_path"]
|
2018-08-03 19:15:01 +02:00
|
|
|
upload_url_list.append([upload_url, upload_s3_path])
|
2021-02-12 08:20:45 +01:00
|
|
|
upload["path"] = upload_s3_path
|
2018-08-03 19:15:01 +02:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Run downloads in parallel
|
2019-07-28 01:04:47 +02:00
|
|
|
run_parallel_wrapper(partial(get_uploads, upload_dir), upload_url_list, threads=threads)
|
2018-08-03 19:15:01 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING ATTACHMENTS FINISHED #########\n")
|
2018-08-03 19:15:01 +02:00
|
|
|
return upload_list
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def build_realm_emoji(realm_id: int, name: str, id: int, file_name: str) -> ZerverFieldsT:
|
2018-09-29 15:43:53 +02:00
|
|
|
return model_to_dict(
|
|
|
|
RealmEmoji(
|
|
|
|
realm_id=realm_id,
|
|
|
|
name=name,
|
|
|
|
id=id,
|
|
|
|
file_name=file_name,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
),
|
2018-09-29 15:43:53 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-07-28 01:04:47 +02:00
|
|
|
def get_emojis(emoji_dir: str, upload: List[str]) -> None:
|
|
|
|
emoji_url = upload[0]
|
|
|
|
emoji_path = upload[1]
|
|
|
|
upload_emoji_path = os.path.join(emoji_dir, emoji_path)
|
|
|
|
|
|
|
|
response = requests.get(emoji_url, stream=True)
|
|
|
|
os.makedirs(os.path.dirname(upload_emoji_path), exist_ok=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(upload_emoji_path, "wb") as emoji_file:
|
2019-07-28 01:04:47 +02:00
|
|
|
shutil.copyfileobj(response.raw, emoji_file)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def process_emojis(
|
|
|
|
zerver_realmemoji: List[ZerverFieldsT],
|
|
|
|
emoji_dir: str,
|
|
|
|
emoji_url_map: ZerverFieldsT,
|
|
|
|
threads: int,
|
|
|
|
) -> List[ZerverFieldsT]:
|
2018-08-03 19:15:01 +02:00
|
|
|
"""
|
|
|
|
This function downloads the custom emojis and saves in the output emoji folder.
|
|
|
|
Required parameters:
|
|
|
|
|
|
|
|
1. zerver_realmemoji: List of all RealmEmoji objects to be imported
|
|
|
|
2. emoji_dir: Folder where the downloaded emojis are saved
|
|
|
|
3. emoji_url_map: Maps emoji name to its url
|
|
|
|
"""
|
|
|
|
emoji_records = []
|
|
|
|
upload_emoji_list = []
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING EMOJIS #########\n")
|
|
|
|
logging.info("DOWNLOADING EMOJIS .......\n")
|
2018-08-03 19:15:01 +02:00
|
|
|
for emoji in zerver_realmemoji:
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_url = emoji_url_map[emoji["name"]]
|
2018-08-03 19:15:01 +02:00
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
2021-02-12 08:20:45 +01:00
|
|
|
realm_id=emoji["realm"], emoji_file_name=emoji["name"]
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-08-03 19:15:01 +02:00
|
|
|
|
|
|
|
upload_emoji_list.append([emoji_url, emoji_path])
|
|
|
|
|
|
|
|
emoji_record = dict(emoji)
|
2021-02-12 08:20:45 +01:00
|
|
|
emoji_record["path"] = emoji_path
|
|
|
|
emoji_record["s3_path"] = emoji_path
|
|
|
|
emoji_record["realm_id"] = emoji_record["realm"]
|
|
|
|
emoji_record.pop("realm")
|
2018-08-03 19:15:01 +02:00
|
|
|
|
|
|
|
emoji_records.append(emoji_record)
|
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Run downloads in parallel
|
2019-07-28 01:04:47 +02:00
|
|
|
run_parallel_wrapper(partial(get_emojis, emoji_dir), upload_emoji_list, threads=threads)
|
2018-08-03 19:15:01 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
logging.info("######### GETTING EMOJIS FINISHED #########\n")
|
2018-08-03 19:15:01 +02:00
|
|
|
return emoji_records
|
2018-10-13 16:41:18 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-10-13 16:41:18 +02:00
|
|
|
def create_converted_data_files(data: Any, output_dir: str, file_path: str) -> None:
|
|
|
|
output_file = output_dir + file_path
|
|
|
|
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
2021-02-12 08:20:45 +01:00
|
|
|
with open(output_file, "wb") as fp:
|
2020-08-07 01:09:47 +02:00
|
|
|
fp.write(orjson.dumps(data, option=orjson.OPT_INDENT_2))
|