2019-02-28 21:19:47 +01:00
|
|
|
# This is the main code for the `./manage.py export` data export tool.
|
|
|
|
# User docs: https://zulip.readthedocs.io/en/latest/production/export-and-import.html
|
|
|
|
#
|
|
|
|
# Most developers will interact with this primarily when they add a
|
|
|
|
# new table to the schema, in which case they likely need to (1) add
|
|
|
|
# it the lists in `ALL_ZULIP_TABLES` and similar data structures and
|
|
|
|
# (2) if it doesn't belong in EXCLUDED_TABLES, add a Config object for
|
|
|
|
# it to get_realm_config.
|
2016-04-05 00:27:37 +02:00
|
|
|
import datetime
|
|
|
|
from boto.s3.connection import S3Connection
|
2018-12-07 17:12:09 +01:00
|
|
|
from boto.s3.key import Key # for mypy
|
2018-05-31 19:13:56 +02:00
|
|
|
from django.apps import apps
|
2016-04-05 00:27:37 +02:00
|
|
|
from django.conf import settings
|
|
|
|
from django.forms.models import model_to_dict
|
2017-04-15 04:09:56 +02:00
|
|
|
from django.utils.timezone import make_aware as timezone_make_aware
|
2017-04-15 04:08:10 +02:00
|
|
|
from django.utils.timezone import is_naive as timezone_is_naive
|
2016-08-09 02:19:29 +02:00
|
|
|
import glob
|
2016-04-05 00:27:37 +02:00
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import ujson
|
|
|
|
import subprocess
|
|
|
|
import tempfile
|
2018-07-18 23:50:16 +02:00
|
|
|
import shutil
|
|
|
|
from scripts.lib.zulip_tools import overwrite_symlink
|
2018-04-23 23:28:27 +02:00
|
|
|
from zerver.lib.avatar_hash import user_avatar_path_from_ids
|
2019-01-05 01:15:49 +01:00
|
|
|
from analytics.models import RealmCount, UserCount, StreamCount
|
2016-04-05 00:27:37 +02:00
|
|
|
from zerver.models import UserProfile, Realm, Client, Huddle, Stream, \
|
2018-05-26 18:25:50 +02:00
|
|
|
UserMessage, Subscription, Message, RealmEmoji, RealmFilter, Reaction, \
|
2017-03-31 16:20:07 +02:00
|
|
|
RealmDomain, Recipient, DefaultStream, get_user_profile_by_id, \
|
2018-05-23 08:50:11 +02:00
|
|
|
UserPresence, UserActivity, UserActivityInterval, CustomProfileField, \
|
2018-07-05 18:18:40 +02:00
|
|
|
CustomProfileFieldValue, get_display_recipient, Attachment, get_system_bot, \
|
2018-07-11 19:57:15 +02:00
|
|
|
RealmAuditLog, UserHotspot, MutedTopic, Service, UserGroup, \
|
2018-07-17 18:58:43 +02:00
|
|
|
UserGroupMembership, BotStorageData, BotConfigData
|
2019-07-10 02:12:34 +02:00
|
|
|
import zerver.lib.upload
|
2017-12-15 13:34:48 +01:00
|
|
|
from typing import Any, Callable, Dict, List, Optional, Set, Tuple, \
|
2019-02-02 23:53:34 +01:00
|
|
|
Union
|
2016-08-09 04:31:26 +02:00
|
|
|
|
|
|
|
# Custom mypy types follow:
|
|
|
|
Record = Dict[str, Any]
|
|
|
|
TableName = str
|
|
|
|
TableData = Dict[TableName, List[Record]]
|
|
|
|
Field = str
|
2016-09-12 02:48:49 +02:00
|
|
|
Path = str
|
2016-08-11 01:21:53 +02:00
|
|
|
Context = Dict[str, Any]
|
2016-09-11 16:17:39 +02:00
|
|
|
FilterArgs = Dict[str, Any]
|
|
|
|
IdSource = Tuple[TableName, Field]
|
|
|
|
SourceFilter = Callable[[Record], bool]
|
|
|
|
|
|
|
|
# These next two types are callbacks, which mypy does not
|
|
|
|
# support well, because PEP 484 says "using callbacks
|
|
|
|
# with keyword arguments is not perceived as a common use case."
|
|
|
|
# CustomFetch = Callable[[TableData, Config, Context], None]
|
|
|
|
# PostProcessData = Callable[[TableData, Config, Context], None]
|
2017-05-17 21:09:08 +02:00
|
|
|
CustomFetch = Any # TODO: make more specific, see above
|
|
|
|
PostProcessData = Any # TODO: make more specific
|
2016-08-09 04:31:26 +02:00
|
|
|
|
|
|
|
# The keys of our MessageOutput variables are normally
|
|
|
|
# List[Record], but when we write partials, we can get
|
|
|
|
# lists of integers or a single integer.
|
2018-06-15 00:22:56 +02:00
|
|
|
# TODO: This could maybe be improved using TypedDict?
|
|
|
|
MessageOutput = Dict[str, Union[List[Record], List[int], int]]
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2018-06-18 16:24:01 +02:00
|
|
|
MESSAGE_BATCH_CHUNK_SIZE = 1000
|
|
|
|
|
2018-05-31 19:34:54 +02:00
|
|
|
ALL_ZULIP_TABLES = {
|
2018-05-31 19:13:56 +02:00
|
|
|
'analytics_fillstate',
|
|
|
|
'analytics_installationcount',
|
|
|
|
'analytics_realmcount',
|
|
|
|
'analytics_streamcount',
|
|
|
|
'analytics_usercount',
|
|
|
|
'otp_static_staticdevice',
|
|
|
|
'otp_static_statictoken',
|
|
|
|
'otp_totp_totpdevice',
|
|
|
|
'social_auth_association',
|
|
|
|
'social_auth_code',
|
|
|
|
'social_auth_nonce',
|
|
|
|
'social_auth_partial',
|
|
|
|
'social_auth_usersocialauth',
|
|
|
|
'two_factor_phonedevice',
|
|
|
|
'zerver_archivedattachment',
|
|
|
|
'zerver_archivedattachment_messages',
|
|
|
|
'zerver_archivedmessage',
|
|
|
|
'zerver_archivedusermessage',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_attachment',
|
|
|
|
'zerver_attachment_messages',
|
2019-05-29 15:52:57 +02:00
|
|
|
'zerver_archivedreaction',
|
2019-05-29 16:01:34 +02:00
|
|
|
'zerver_archivedsubmessage',
|
2019-06-18 19:54:09 +02:00
|
|
|
'zerver_archivetransaction',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_botconfigdata',
|
|
|
|
'zerver_botstoragedata',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_client',
|
2018-05-23 08:50:11 +02:00
|
|
|
'zerver_customprofilefield',
|
2018-05-31 19:17:55 +02:00
|
|
|
'zerver_customprofilefieldvalue',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_defaultstream',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_defaultstreamgroup',
|
|
|
|
'zerver_defaultstreamgroup_streams',
|
|
|
|
'zerver_emailchangestatus',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_huddle',
|
|
|
|
'zerver_message',
|
2019-12-25 22:28:05 +01:00
|
|
|
'zerver_missedmessageemailaddress',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_multiuseinvite',
|
|
|
|
'zerver_multiuseinvite_streams',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_preregistrationuser',
|
|
|
|
'zerver_preregistrationuser_streams',
|
|
|
|
'zerver_pushdevicetoken',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_reaction',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_realm',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_realmauditlog',
|
2017-03-31 16:20:07 +02:00
|
|
|
'zerver_realmdomain',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_realmemoji',
|
|
|
|
'zerver_realmfilter',
|
|
|
|
'zerver_recipient',
|
2017-07-02 21:10:41 +02:00
|
|
|
'zerver_scheduledemail',
|
2019-01-04 01:50:21 +01:00
|
|
|
'zerver_scheduledemail_users',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_scheduledmessage',
|
|
|
|
'zerver_service',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_stream',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_submessage',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_subscription',
|
|
|
|
'zerver_useractivity',
|
|
|
|
'zerver_useractivityinterval',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_usergroup',
|
|
|
|
'zerver_usergroupmembership',
|
|
|
|
'zerver_userhotspot',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_usermessage',
|
|
|
|
'zerver_userpresence',
|
|
|
|
'zerver_userprofile',
|
|
|
|
'zerver_userprofile_groups',
|
|
|
|
'zerver_userprofile_user_permissions',
|
2018-12-17 16:19:18 +01:00
|
|
|
'zerver_userstatus',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_mutedtopic',
|
|
|
|
}
|
2016-08-11 20:27:26 +02:00
|
|
|
|
2019-02-28 21:19:47 +01:00
|
|
|
# This set contains those database tables that we expect to not be
|
|
|
|
# included in the export. This tool does validation to ensure that
|
|
|
|
# every table in the database is either exported or listed here, to
|
|
|
|
# ensure we never accidentally fail to export a table.
|
2018-05-31 19:13:56 +02:00
|
|
|
NON_EXPORTED_TABLES = {
|
2018-07-23 17:28:20 +02:00
|
|
|
# These invitation/confirmation flow tables don't make sense to
|
|
|
|
# export, since invitations links will be broken by the server URL
|
|
|
|
# change anyway:
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_emailchangestatus',
|
|
|
|
'zerver_multiuseinvite',
|
|
|
|
'zerver_multiuseinvite_streams',
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_preregistrationuser',
|
|
|
|
'zerver_preregistrationuser_streams',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
2019-12-25 22:28:05 +01:00
|
|
|
# Missed message addresses are low value to export since
|
|
|
|
# missed-message email addresses include the server's hostname and
|
|
|
|
# expire after a few days.
|
|
|
|
'zerver_missedmessageemailaddress',
|
|
|
|
|
2018-07-23 17:28:20 +02:00
|
|
|
# When switching servers, clients will need to re-login and
|
|
|
|
# reregister for push notifications anyway.
|
2018-07-20 13:43:25 +02:00
|
|
|
'zerver_pushdevicetoken',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
|
|
|
# We don't use these generated Django tables
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_userprofile_groups',
|
|
|
|
'zerver_userprofile_user_permissions',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
|
|
|
# These is used for scheduling future activity; it could make
|
|
|
|
# sense to export, but is relatively low value.
|
|
|
|
'zerver_scheduledemail',
|
2019-04-23 22:50:30 +02:00
|
|
|
'zerver_scheduledemail_users',
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_scheduledmessage',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
|
|
|
# These tables are related to a user's 2FA authentication
|
|
|
|
# configuration, which will need to be re-setup on the new server.
|
2018-05-31 19:13:56 +02:00
|
|
|
'two_factor_phonedevice',
|
|
|
|
'otp_static_staticdevice',
|
|
|
|
'otp_static_statictoken',
|
|
|
|
'otp_totp_totpdevice',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
|
|
|
# These archive tables should not be exported (they are to support
|
|
|
|
# restoring content accidentally deleted due to software bugs in
|
|
|
|
# the retention policy feature)
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_archivedmessage',
|
|
|
|
'zerver_archivedusermessage',
|
|
|
|
'zerver_archivedattachment',
|
|
|
|
'zerver_archivedattachment_messages',
|
2019-05-29 15:52:57 +02:00
|
|
|
'zerver_archivedreaction',
|
2019-05-29 16:01:34 +02:00
|
|
|
'zerver_archivedsubmessage',
|
2019-06-18 19:54:09 +02:00
|
|
|
'zerver_archivetransaction',
|
2018-05-31 19:13:56 +02:00
|
|
|
|
2018-07-23 17:28:20 +02:00
|
|
|
# Social auth tables are not needed post-export, since we don't
|
|
|
|
# use any of this state outside of a direct authentication flow.
|
2018-05-31 19:13:56 +02:00
|
|
|
'social_auth_association',
|
|
|
|
'social_auth_code',
|
|
|
|
'social_auth_nonce',
|
|
|
|
'social_auth_partial',
|
|
|
|
'social_auth_usersocialauth',
|
2016-08-11 20:27:26 +02:00
|
|
|
|
2018-07-23 17:28:20 +02:00
|
|
|
# We will likely never want to migrate this table, since it's a
|
|
|
|
# total of all the realmcount values on the server. Might need to
|
|
|
|
# recompute it after a fillstate import.
|
2018-05-31 19:13:56 +02:00
|
|
|
'analytics_installationcount',
|
2018-07-23 17:28:20 +02:00
|
|
|
|
|
|
|
# Fillstate will require some cleverness to do the right partial export.
|
|
|
|
'analytics_fillstate',
|
|
|
|
|
|
|
|
# These are for unfinished features; we'll want to add them ot the
|
|
|
|
# export before they reach full production status.
|
|
|
|
'zerver_defaultstreamgroup',
|
|
|
|
'zerver_defaultstreamgroup_streams',
|
|
|
|
'zerver_submessage',
|
|
|
|
|
2018-12-17 16:19:18 +01:00
|
|
|
# This is low priority, since users can easily just reset themselves to away.
|
|
|
|
'zerver_userstatus',
|
|
|
|
|
2018-07-23 17:28:20 +02:00
|
|
|
# For any tables listed below here, it's a bug that they are not present in the export.
|
2018-05-31 19:13:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
IMPLICIT_TABLES = {
|
2019-02-28 21:19:47 +01:00
|
|
|
# ManyToMany relationships are exported implicitly when importing
|
|
|
|
# the parent table.
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_attachment_messages',
|
2018-05-31 19:13:56 +02:00
|
|
|
}
|
2016-08-11 20:27:26 +02:00
|
|
|
|
2018-05-31 19:13:56 +02:00
|
|
|
ATTACHMENT_TABLES = {
|
2016-08-13 03:33:19 +02:00
|
|
|
'zerver_attachment',
|
2018-05-31 19:13:56 +02:00
|
|
|
}
|
2016-08-13 03:33:19 +02:00
|
|
|
|
2018-05-31 19:13:56 +02:00
|
|
|
MESSAGE_TABLES = {
|
2019-02-28 21:19:47 +01:00
|
|
|
# message tables get special treatment, because they're by far our
|
|
|
|
# largest tables and need to be paginated.
|
2016-08-11 20:27:26 +02:00
|
|
|
'zerver_message',
|
|
|
|
'zerver_usermessage',
|
2019-02-28 21:19:47 +01:00
|
|
|
# zerver_reaction belongs here, since it's added late because it
|
|
|
|
# has a foreign key into the Message table.
|
2018-05-31 19:13:56 +02:00
|
|
|
'zerver_reaction',
|
|
|
|
}
|
2016-08-11 20:27:26 +02:00
|
|
|
|
2019-02-28 21:19:47 +01:00
|
|
|
# These get their own file as analytics data can be quite large and
|
|
|
|
# would otherwise make realm.json unpleasant to manually inspect
|
2019-01-30 08:54:29 +01:00
|
|
|
ANALYTICS_TABLES = {
|
|
|
|
'analytics_realmcount',
|
|
|
|
'analytics_streamcount',
|
|
|
|
'analytics_usercount',
|
|
|
|
}
|
|
|
|
|
2019-02-28 21:19:47 +01:00
|
|
|
# This data structure lists all the Django DateTimeField fields in the
|
|
|
|
# data model. These are converted to floats during the export process
|
|
|
|
# via floatify_datetime_fields, and back during the import process.
|
|
|
|
#
|
|
|
|
# TODO: This data structure could likely eventually be replaced by
|
|
|
|
# inspecting the corresponding Django models
|
2016-08-09 16:35:43 +02:00
|
|
|
DATE_FIELDS = {
|
|
|
|
'zerver_attachment': ['create_time'],
|
2019-08-28 02:43:19 +02:00
|
|
|
'zerver_message': ['last_edit_time', 'date_sent'],
|
2016-08-09 16:35:43 +02:00
|
|
|
'zerver_realm': ['date_created'],
|
|
|
|
'zerver_stream': ['date_created'],
|
|
|
|
'zerver_useractivity': ['last_visit'],
|
|
|
|
'zerver_useractivityinterval': ['start', 'end'],
|
|
|
|
'zerver_userpresence': ['timestamp'],
|
|
|
|
'zerver_userprofile': ['date_joined', 'last_login', 'last_reminder'],
|
2018-07-05 18:18:40 +02:00
|
|
|
'zerver_realmauditlog': ['event_time'],
|
2018-07-12 16:26:03 +02:00
|
|
|
'zerver_userhotspot': ['timestamp'],
|
2019-01-31 00:39:02 +01:00
|
|
|
'analytics_installationcount': ['end_time'],
|
2019-01-05 01:15:49 +01:00
|
|
|
'analytics_realmcount': ['end_time'],
|
|
|
|
'analytics_usercount': ['end_time'],
|
|
|
|
'analytics_streamcount': ['end_time'],
|
2017-05-17 21:09:08 +02:00
|
|
|
} # type: Dict[TableName, List[Field]]
|
2016-08-09 16:35:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def sanity_check_output(data: TableData) -> None:
|
2018-05-31 19:13:56 +02:00
|
|
|
# First, we verify that the export tool has a declared
|
2019-02-28 21:19:47 +01:00
|
|
|
# configuration for every table declared in the `models.py` files.
|
2018-05-31 19:13:56 +02:00
|
|
|
target_models = (
|
|
|
|
list(apps.get_app_config('analytics').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('django_otp').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('otp_static').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('otp_totp').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('social_django').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('two_factor').get_models(include_auto_created=True)) +
|
|
|
|
list(apps.get_app_config('zerver').get_models(include_auto_created=True))
|
|
|
|
)
|
|
|
|
all_tables_db = set(model._meta.db_table for model in target_models)
|
|
|
|
|
|
|
|
# These assertion statements will fire when we add a new database
|
|
|
|
# table that is not included in Zulip's data exports. Generally,
|
2018-05-31 19:34:54 +02:00
|
|
|
# you can add your new table to `ALL_ZULIP_TABLES` and
|
2018-05-31 19:13:56 +02:00
|
|
|
# `NON_EXPORTED_TABLES` during early work on a new feature so that
|
|
|
|
# CI passes.
|
|
|
|
#
|
|
|
|
# We'll want to make sure we handle it for exports before
|
|
|
|
# releasing the new feature, but doing so correctly requires some
|
|
|
|
# expertise on this export system.
|
2018-05-31 19:34:54 +02:00
|
|
|
assert ALL_ZULIP_TABLES == all_tables_db
|
|
|
|
assert NON_EXPORTED_TABLES.issubset(ALL_ZULIP_TABLES)
|
|
|
|
assert IMPLICIT_TABLES.issubset(ALL_ZULIP_TABLES)
|
|
|
|
assert ATTACHMENT_TABLES.issubset(ALL_ZULIP_TABLES)
|
2019-01-30 08:54:29 +01:00
|
|
|
assert ANALYTICS_TABLES.issubset(ALL_ZULIP_TABLES)
|
2018-05-31 19:13:56 +02:00
|
|
|
|
2018-05-31 19:34:54 +02:00
|
|
|
tables = set(ALL_ZULIP_TABLES)
|
2018-05-31 19:13:56 +02:00
|
|
|
tables -= NON_EXPORTED_TABLES
|
|
|
|
tables -= IMPLICIT_TABLES
|
|
|
|
tables -= MESSAGE_TABLES
|
|
|
|
tables -= ATTACHMENT_TABLES
|
2019-01-30 08:54:29 +01:00
|
|
|
tables -= ANALYTICS_TABLES
|
2016-08-11 20:27:26 +02:00
|
|
|
|
|
|
|
for table in tables:
|
|
|
|
if table not in data:
|
2017-10-02 11:11:42 +02:00
|
|
|
logging.warning('??? NO DATA EXPORTED FOR TABLE %s!!!' % (table,))
|
2016-08-09 16:35:43 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def write_data_to_file(output_file: Path, data: Any) -> None:
|
2016-08-11 16:08:13 +02:00
|
|
|
with open(output_file, "w") as f:
|
|
|
|
f.write(ujson.dumps(data, indent=4))
|
|
|
|
|
2018-03-23 23:53:31 +01:00
|
|
|
def make_raw(query: Any, exclude: Optional[List[Field]]=None) -> List[Record]:
|
2016-08-09 17:30:52 +02:00
|
|
|
'''
|
|
|
|
Takes a Django query and returns a JSONable list
|
|
|
|
of dictionaries corresponding to the database rows.
|
|
|
|
'''
|
2016-11-08 10:23:57 +01:00
|
|
|
rows = []
|
|
|
|
for instance in query:
|
|
|
|
data = model_to_dict(instance, exclude=exclude)
|
|
|
|
"""
|
2017-10-03 07:43:45 +02:00
|
|
|
In Django 1.11.5, model_to_dict evaluates the QuerySet of
|
|
|
|
many-to-many field to give us a list of instances. We require
|
|
|
|
a list of primary keys, so we get the primary keys from the
|
|
|
|
instances below.
|
2016-11-08 10:23:57 +01:00
|
|
|
"""
|
|
|
|
for field in instance._meta.many_to_many:
|
|
|
|
value = data[field.name]
|
2017-10-03 07:43:45 +02:00
|
|
|
data[field.name] = [row.id for row in value]
|
2016-11-08 10:23:57 +01:00
|
|
|
|
|
|
|
rows.append(data)
|
|
|
|
|
|
|
|
return rows
|
2016-08-09 17:30:52 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def floatify_datetime_fields(data: TableData, table: TableName) -> None:
|
2016-04-05 00:27:37 +02:00
|
|
|
for item in data[table]:
|
2016-08-09 16:35:43 +02:00
|
|
|
for field in DATE_FIELDS[table]:
|
|
|
|
orig_dt = item[field]
|
|
|
|
if orig_dt is None:
|
|
|
|
continue
|
2017-04-15 04:08:10 +02:00
|
|
|
if timezone_is_naive(orig_dt):
|
2016-08-09 16:35:43 +02:00
|
|
|
logging.warning("Naive datetime:", item)
|
2017-04-15 04:09:56 +02:00
|
|
|
dt = timezone_make_aware(orig_dt)
|
2016-08-09 16:35:43 +02:00
|
|
|
else:
|
|
|
|
dt = orig_dt
|
|
|
|
utc_naive = dt.replace(tzinfo=None) - dt.utcoffset()
|
|
|
|
item[field] = (utc_naive - datetime.datetime(1970, 1, 1)).total_seconds()
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class Config:
|
2019-02-28 21:19:47 +01:00
|
|
|
'''A Config object configures a single table for exporting (and, maybe
|
|
|
|
some day importing as well. This configuration defines what
|
|
|
|
process needs to be followed to correctly extract the set of
|
|
|
|
objects to export.
|
2016-08-11 21:13:02 +02:00
|
|
|
|
|
|
|
You should never mutate Config objects as part of the export;
|
|
|
|
instead use the data to determine how you populate other
|
|
|
|
data structures.
|
|
|
|
|
|
|
|
There are parent/children relationships between Config objects.
|
|
|
|
The parent should be instantiated first. The child will
|
|
|
|
append itself to the parent's list of children.
|
|
|
|
|
|
|
|
'''
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2018-03-23 23:53:31 +01:00
|
|
|
def __init__(self, table: Optional[str]=None,
|
|
|
|
model: Optional[Any]=None,
|
|
|
|
normal_parent: Optional['Config']=None,
|
|
|
|
virtual_parent: Optional['Config']=None,
|
|
|
|
filter_args: Optional[FilterArgs]=None,
|
|
|
|
custom_fetch: Optional[CustomFetch]=None,
|
|
|
|
custom_tables: Optional[List[TableName]]=None,
|
|
|
|
post_process_data: Optional[PostProcessData]=None,
|
|
|
|
concat_and_destroy: Optional[List[TableName]]=None,
|
|
|
|
id_source: Optional[IdSource]=None,
|
|
|
|
source_filter: Optional[SourceFilter]=None,
|
|
|
|
parent_key: Optional[Field]=None,
|
|
|
|
use_all: bool=False,
|
|
|
|
is_seeded: bool=False,
|
|
|
|
exclude: Optional[List[Field]]=None) -> None:
|
2016-08-11 21:13:02 +02:00
|
|
|
assert table or custom_tables
|
|
|
|
self.table = table
|
|
|
|
self.model = model
|
|
|
|
self.normal_parent = normal_parent
|
|
|
|
self.virtual_parent = virtual_parent
|
|
|
|
self.filter_args = filter_args
|
|
|
|
self.parent_key = parent_key
|
|
|
|
self.use_all = use_all
|
|
|
|
self.is_seeded = is_seeded
|
|
|
|
self.exclude = exclude
|
|
|
|
self.custom_fetch = custom_fetch
|
|
|
|
self.custom_tables = custom_tables
|
2016-08-12 20:41:01 +02:00
|
|
|
self.post_process_data = post_process_data
|
2016-08-11 21:13:02 +02:00
|
|
|
self.concat_and_destroy = concat_and_destroy
|
|
|
|
self.id_source = id_source
|
2016-11-28 23:29:01 +01:00
|
|
|
self.source_filter = source_filter
|
2017-05-17 21:09:08 +02:00
|
|
|
self.children = [] # type: List[Config]
|
2016-08-11 21:13:02 +02:00
|
|
|
|
2017-05-25 01:41:24 +02:00
|
|
|
if normal_parent is not None:
|
|
|
|
self.parent = normal_parent # type: Optional[Config]
|
2016-08-11 21:13:02 +02:00
|
|
|
else:
|
|
|
|
self.parent = None
|
|
|
|
|
2017-05-25 01:41:24 +02:00
|
|
|
if virtual_parent is not None and normal_parent is not None:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('''
|
2016-08-12 20:59:22 +02:00
|
|
|
If you specify a normal_parent, please
|
|
|
|
do not create a virtual_parent.
|
|
|
|
''')
|
|
|
|
|
2017-05-25 01:41:24 +02:00
|
|
|
if normal_parent is not None:
|
2016-08-11 21:13:02 +02:00
|
|
|
normal_parent.children.append(self)
|
2017-05-25 01:41:24 +02:00
|
|
|
elif virtual_parent is not None:
|
2016-08-12 20:59:22 +02:00
|
|
|
virtual_parent.children.append(self)
|
2017-05-25 01:41:24 +02:00
|
|
|
elif is_seeded is None:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('''
|
2016-08-12 20:59:22 +02:00
|
|
|
You must specify a parent if you are
|
|
|
|
not using is_seeded.
|
|
|
|
''')
|
2016-08-11 21:13:02 +02:00
|
|
|
|
2017-05-25 01:41:24 +02:00
|
|
|
if self.id_source is not None:
|
|
|
|
if self.virtual_parent is None:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('''
|
2017-05-25 01:41:24 +02:00
|
|
|
You must specify a virtual_parent if you are
|
|
|
|
using id_source.''')
|
2016-08-11 21:13:02 +02:00
|
|
|
if self.id_source[0] != self.virtual_parent.table:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('''
|
2016-08-11 21:13:02 +02:00
|
|
|
Configuration error. To populate %s, you
|
|
|
|
want data from %s, but that differs from
|
|
|
|
the table name of your virtual parent (%s),
|
|
|
|
which suggests you many not have set up
|
|
|
|
the ordering correctly. You may simply
|
|
|
|
need to assign a virtual_parent, or there
|
|
|
|
may be deeper issues going on.''' % (
|
2017-01-24 07:06:13 +01:00
|
|
|
self.table,
|
|
|
|
self.id_source[0],
|
|
|
|
self.virtual_parent.table))
|
2016-08-11 21:13:02 +02:00
|
|
|
|
|
|
|
|
2018-03-23 23:53:31 +01:00
|
|
|
def export_from_config(response: TableData, config: Config, seed_object: Optional[Any]=None,
|
|
|
|
context: Optional[Context]=None) -> None:
|
2016-08-11 21:13:02 +02:00
|
|
|
table = config.table
|
|
|
|
parent = config.parent
|
|
|
|
model = config.model
|
|
|
|
|
|
|
|
if context is None:
|
|
|
|
context = {}
|
|
|
|
|
|
|
|
if table:
|
|
|
|
exported_tables = [table]
|
|
|
|
else:
|
2017-05-25 01:41:24 +02:00
|
|
|
if config.custom_tables is None:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('''
|
2017-05-25 01:41:24 +02:00
|
|
|
You must specify config.custom_tables if you
|
|
|
|
are not specifying config.table''')
|
2016-08-11 21:13:02 +02:00
|
|
|
exported_tables = config.custom_tables
|
|
|
|
|
|
|
|
for t in exported_tables:
|
|
|
|
logging.info('Exporting via export_from_config: %s' % (t,))
|
|
|
|
|
|
|
|
rows = None
|
|
|
|
if config.is_seeded:
|
|
|
|
rows = [seed_object]
|
|
|
|
|
|
|
|
elif config.custom_fetch:
|
|
|
|
config.custom_fetch(
|
|
|
|
response=response,
|
|
|
|
config=config,
|
|
|
|
context=context
|
|
|
|
)
|
|
|
|
if config.custom_tables:
|
|
|
|
for t in config.custom_tables:
|
|
|
|
if t not in response:
|
2018-05-10 01:20:56 +02:00
|
|
|
raise AssertionError('Custom fetch failed to populate %s' % (t,))
|
2016-08-11 21:13:02 +02:00
|
|
|
|
|
|
|
elif config.concat_and_destroy:
|
|
|
|
# When we concat_and_destroy, we are working with
|
|
|
|
# temporary "tables" that are lists of records that
|
|
|
|
# should already be ready to export.
|
2017-05-17 21:09:08 +02:00
|
|
|
data = [] # type: List[Record]
|
2016-08-11 21:13:02 +02:00
|
|
|
for t in config.concat_and_destroy:
|
|
|
|
data += response[t]
|
|
|
|
del response[t]
|
|
|
|
logging.info('Deleted temporary %s' % (t,))
|
2017-05-25 01:41:24 +02:00
|
|
|
assert table is not None
|
2016-08-11 21:13:02 +02:00
|
|
|
response[table] = data
|
|
|
|
|
|
|
|
elif config.use_all:
|
2017-05-25 01:41:24 +02:00
|
|
|
assert model is not None
|
2016-08-11 21:13:02 +02:00
|
|
|
query = model.objects.all()
|
|
|
|
rows = list(query)
|
|
|
|
|
|
|
|
elif config.normal_parent:
|
|
|
|
# In this mode, our current model is figuratively Article,
|
|
|
|
# and normal_parent is figuratively Blog, and
|
|
|
|
# now we just need to get all the articles
|
|
|
|
# contained by the blogs.
|
|
|
|
model = config.model
|
2017-05-25 01:41:24 +02:00
|
|
|
assert parent is not None
|
|
|
|
assert parent.table is not None
|
|
|
|
assert config.parent_key is not None
|
2016-08-11 21:13:02 +02:00
|
|
|
parent_ids = [r['id'] for r in response[parent.table]]
|
2017-05-25 01:41:24 +02:00
|
|
|
filter_parms = {config.parent_key: parent_ids} # type: Dict[str, Any]
|
|
|
|
if config.filter_args is not None:
|
2016-08-11 21:13:02 +02:00
|
|
|
filter_parms.update(config.filter_args)
|
2017-05-25 01:41:24 +02:00
|
|
|
assert model is not None
|
2016-08-11 21:13:02 +02:00
|
|
|
query = model.objects.filter(**filter_parms)
|
|
|
|
rows = list(query)
|
|
|
|
|
|
|
|
elif config.id_source:
|
2017-10-18 11:11:25 +02:00
|
|
|
# In this mode, we are the figurative Blog, and we now
|
2016-08-11 21:13:02 +02:00
|
|
|
# need to look at the current response to get all the
|
|
|
|
# blog ids from the Article rows we fetched previously.
|
|
|
|
model = config.model
|
2017-05-25 01:41:24 +02:00
|
|
|
assert model is not None
|
2016-08-11 21:13:02 +02:00
|
|
|
# This will be a tuple of the form ('zerver_article', 'blog').
|
|
|
|
(child_table, field) = config.id_source
|
|
|
|
child_rows = response[child_table]
|
|
|
|
if config.source_filter:
|
|
|
|
child_rows = [r for r in child_rows if config.source_filter(r)]
|
|
|
|
lookup_ids = [r[field] for r in child_rows]
|
|
|
|
filter_parms = dict(id__in=lookup_ids)
|
|
|
|
if config.filter_args:
|
|
|
|
filter_parms.update(config.filter_args)
|
|
|
|
query = model.objects.filter(**filter_parms)
|
|
|
|
rows = list(query)
|
|
|
|
|
|
|
|
# Post-process rows (which won't apply to custom fetches/concats)
|
|
|
|
if rows is not None:
|
2017-05-25 01:41:24 +02:00
|
|
|
assert table is not None # Hint for mypy
|
2016-08-11 21:13:02 +02:00
|
|
|
response[table] = make_raw(rows, exclude=config.exclude)
|
|
|
|
if table in DATE_FIELDS:
|
|
|
|
floatify_datetime_fields(response, table)
|
|
|
|
|
2016-08-12 20:41:01 +02:00
|
|
|
if config.post_process_data:
|
|
|
|
config.post_process_data(
|
|
|
|
response=response,
|
|
|
|
config=config,
|
|
|
|
context=context
|
|
|
|
)
|
|
|
|
|
2016-08-11 21:13:02 +02:00
|
|
|
# Now walk our children. It's extremely important to respect
|
|
|
|
# the order of children here.
|
|
|
|
for child_config in config.children:
|
|
|
|
export_from_config(
|
|
|
|
response=response,
|
|
|
|
config=child_config,
|
|
|
|
context=context,
|
|
|
|
)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_realm_config() -> Config:
|
2019-02-28 21:19:47 +01:00
|
|
|
# This function generates the main Config object that defines how
|
|
|
|
# to do a full-realm export of a single realm from a Zulip server.
|
2016-08-10 18:45:39 +02:00
|
|
|
|
|
|
|
realm_config = Config(
|
|
|
|
table='zerver_realm',
|
|
|
|
is_seeded=True
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_defaultstream',
|
|
|
|
model=DefaultStream,
|
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
2018-05-23 08:50:11 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_customprofilefield',
|
|
|
|
model=CustomProfileField,
|
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
2016-08-10 18:45:39 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_realmemoji',
|
|
|
|
model=RealmEmoji,
|
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
2017-03-31 16:20:07 +02:00
|
|
|
table='zerver_realmdomain',
|
|
|
|
model=RealmDomain,
|
2016-08-10 18:45:39 +02:00
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_realmfilter',
|
|
|
|
model=RealmFilter,
|
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_client',
|
|
|
|
model=Client,
|
|
|
|
virtual_parent=realm_config,
|
|
|
|
use_all=True
|
|
|
|
)
|
|
|
|
|
|
|
|
user_profile_config = Config(
|
2016-08-14 22:49:46 +02:00
|
|
|
custom_tables=[
|
|
|
|
'zerver_userprofile',
|
|
|
|
'zerver_userprofile_mirrordummy',
|
|
|
|
],
|
|
|
|
# set table for children who treat us as normal parent
|
2016-08-10 18:45:39 +02:00
|
|
|
table='zerver_userprofile',
|
2016-08-14 22:49:46 +02:00
|
|
|
virtual_parent=realm_config,
|
|
|
|
custom_fetch=fetch_user_profile,
|
2016-08-10 18:45:39 +02:00
|
|
|
)
|
|
|
|
|
2018-07-12 13:27:12 +02:00
|
|
|
user_groups_config = Config(
|
2018-07-11 19:57:15 +02:00
|
|
|
table='zerver_usergroup',
|
|
|
|
model=UserGroup,
|
|
|
|
normal_parent=realm_config,
|
|
|
|
parent_key='realm__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_usergroupmembership',
|
|
|
|
model=UserGroupMembership,
|
2018-07-12 13:27:12 +02:00
|
|
|
normal_parent=user_groups_config,
|
2018-07-11 19:57:15 +02:00
|
|
|
parent_key='user_group__in',
|
|
|
|
)
|
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
Config(
|
|
|
|
custom_tables=[
|
|
|
|
'zerver_userprofile_crossrealm',
|
|
|
|
],
|
|
|
|
virtual_parent=user_profile_config,
|
|
|
|
custom_fetch=fetch_user_profile_cross_realm,
|
|
|
|
)
|
|
|
|
|
2016-08-10 18:45:39 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_userpresence',
|
|
|
|
model=UserPresence,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
2018-05-23 08:50:11 +02:00
|
|
|
Config(
|
2018-05-31 19:17:55 +02:00
|
|
|
table='zerver_customprofilefieldvalue',
|
2018-05-23 08:50:11 +02:00
|
|
|
model=CustomProfileFieldValue,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
2016-08-10 18:45:39 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_useractivity',
|
|
|
|
model=UserActivity,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_useractivityinterval',
|
|
|
|
model=UserActivityInterval,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
2018-07-05 18:18:40 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_realmauditlog',
|
|
|
|
model=RealmAuditLog,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='modified_user__in',
|
|
|
|
)
|
|
|
|
|
2018-07-12 16:26:03 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_userhotspot',
|
|
|
|
model=UserHotspot,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user__in',
|
|
|
|
)
|
|
|
|
|
2018-07-14 13:07:04 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_mutedtopic',
|
|
|
|
model=MutedTopic,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
2018-07-16 15:06:52 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_service',
|
|
|
|
model=Service,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
|
|
|
|
2018-07-17 18:58:43 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_botstoragedata',
|
|
|
|
model=BotStorageData,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='bot_profile__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='zerver_botconfigdata',
|
|
|
|
model=BotConfigData,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='bot_profile__in',
|
|
|
|
)
|
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
# Some of these tables are intermediate "tables" that we
|
|
|
|
# create only for the export. Think of them as similar to views.
|
2016-08-12 20:29:37 +02:00
|
|
|
|
|
|
|
user_subscription_config = Config(
|
|
|
|
table='_user_subscription',
|
|
|
|
model=Subscription,
|
2016-08-10 20:57:35 +02:00
|
|
|
normal_parent=user_profile_config,
|
2016-08-12 20:29:37 +02:00
|
|
|
filter_args={'recipient__type': Recipient.PERSONAL},
|
|
|
|
parent_key='user_profile__in',
|
2016-08-10 20:57:35 +02:00
|
|
|
)
|
2016-08-10 15:52:03 +02:00
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
Config(
|
2016-08-12 20:29:37 +02:00
|
|
|
table='_user_recipient',
|
|
|
|
model=Recipient,
|
|
|
|
virtual_parent=user_subscription_config,
|
|
|
|
id_source=('_user_subscription', 'recipient'),
|
2016-08-10 20:57:35 +02:00
|
|
|
)
|
2016-08-10 15:52:03 +02:00
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
#
|
2016-08-12 20:37:39 +02:00
|
|
|
stream_subscription_config = Config(
|
|
|
|
table='_stream_subscription',
|
|
|
|
model=Subscription,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
filter_args={'recipient__type': Recipient.STREAM},
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-12 20:50:57 +02:00
|
|
|
stream_recipient_config = Config(
|
2016-08-10 20:57:35 +02:00
|
|
|
table='_stream_recipient',
|
|
|
|
model=Recipient,
|
2016-08-12 20:37:39 +02:00
|
|
|
virtual_parent=stream_subscription_config,
|
|
|
|
id_source=('_stream_subscription', 'recipient'),
|
2016-08-10 20:57:35 +02:00
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-12 20:50:57 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_stream',
|
|
|
|
model=Stream,
|
|
|
|
virtual_parent=stream_recipient_config,
|
|
|
|
id_source=('_stream_recipient', 'type_id'),
|
|
|
|
source_filter=lambda r: r['type'] == Recipient.STREAM,
|
|
|
|
exclude=['email_token'],
|
|
|
|
post_process_data=sanity_check_stream_data
|
|
|
|
)
|
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
#
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
Config(
|
|
|
|
custom_tables=[
|
|
|
|
'_huddle_recipient',
|
|
|
|
'_huddle_subscription',
|
|
|
|
'zerver_huddle',
|
|
|
|
],
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
custom_fetch=fetch_huddle_objects,
|
|
|
|
)
|
2016-08-11 01:21:53 +02:00
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
# Now build permanent tables from our temp tables.
|
|
|
|
Config(
|
|
|
|
table='zerver_recipient',
|
|
|
|
virtual_parent=user_profile_config,
|
|
|
|
concat_and_destroy=[
|
|
|
|
'_user_recipient',
|
|
|
|
'_stream_recipient',
|
|
|
|
'_huddle_recipient',
|
|
|
|
],
|
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_subscription',
|
|
|
|
virtual_parent=user_profile_config,
|
|
|
|
concat_and_destroy=[
|
|
|
|
'_user_subscription',
|
|
|
|
'_stream_subscription',
|
|
|
|
'_huddle_subscription',
|
|
|
|
]
|
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-12 20:59:22 +02:00
|
|
|
return realm_config
|
2016-08-11 02:39:21 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def sanity_check_stream_data(response: TableData, config: Config, context: Context) -> None:
|
2016-08-12 20:46:49 +02:00
|
|
|
|
2016-08-14 22:49:46 +02:00
|
|
|
if context['exportable_user_ids'] is not None:
|
|
|
|
# If we restrict which user ids are exportable,
|
|
|
|
# the way that we find # streams is a little too
|
|
|
|
# complex to have a sanity check.
|
|
|
|
return
|
|
|
|
|
2017-11-10 03:34:13 +01:00
|
|
|
actual_streams = set([stream.name for stream in Stream.objects.filter(
|
|
|
|
realm=response["zerver_realm"][0]['id'])])
|
2016-08-12 20:46:49 +02:00
|
|
|
streams_in_response = set([stream['name'] for stream in response['zerver_stream']])
|
|
|
|
|
2018-07-31 07:30:19 +02:00
|
|
|
if len(streams_in_response - actual_streams) > 0:
|
|
|
|
print("Error: Streams not present in the realm were exported:")
|
|
|
|
print(" ", streams_in_response - actual_streams)
|
|
|
|
print("This is likely due to a bug in the export tool.")
|
|
|
|
raise AssertionError("Aborting! Please investigate.")
|
|
|
|
if len(actual_streams - streams_in_response) > 0:
|
|
|
|
print("Error: Some streams present in the realm were not exported:")
|
|
|
|
print(" ", actual_streams - streams_in_response)
|
|
|
|
print("Usually, this is caused by a stream having been created that never had subscribers.")
|
|
|
|
print("(Due to a bug elsewhere in Zulip, not in the export tool)")
|
|
|
|
raise AssertionError("Aborting! Please investigate.")
|
2016-08-12 20:46:49 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_user_profile(response: TableData, config: Config, context: Context) -> None:
|
2016-08-14 22:49:46 +02:00
|
|
|
realm = context['realm']
|
|
|
|
exportable_user_ids = context['exportable_user_ids']
|
|
|
|
|
|
|
|
query = UserProfile.objects.filter(realm_id=realm.id)
|
2016-11-28 23:29:01 +01:00
|
|
|
exclude = ['password', 'api_key']
|
2016-08-14 22:49:46 +02:00
|
|
|
rows = make_raw(list(query), exclude=exclude)
|
|
|
|
|
2017-05-17 21:09:08 +02:00
|
|
|
normal_rows = [] # type: List[Record]
|
|
|
|
dummy_rows = [] # type: List[Record]
|
2016-08-14 22:49:46 +02:00
|
|
|
|
|
|
|
for row in rows:
|
|
|
|
if exportable_user_ids is not None:
|
|
|
|
if row['id'] in exportable_user_ids:
|
|
|
|
assert not row['is_mirror_dummy']
|
|
|
|
else:
|
|
|
|
# Convert non-exportable users to
|
|
|
|
# inactive is_mirror_dummy users.
|
|
|
|
row['is_mirror_dummy'] = True
|
|
|
|
row['is_active'] = False
|
|
|
|
|
|
|
|
if row['is_mirror_dummy']:
|
|
|
|
dummy_rows.append(row)
|
|
|
|
else:
|
|
|
|
normal_rows.append(row)
|
|
|
|
|
|
|
|
response['zerver_userprofile'] = normal_rows
|
|
|
|
response['zerver_userprofile_mirrordummy'] = dummy_rows
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_user_profile_cross_realm(response: TableData, config: Config, context: Context) -> None:
|
2016-08-11 02:39:21 +02:00
|
|
|
realm = context['realm']
|
2018-09-21 02:55:17 +02:00
|
|
|
response['zerver_userprofile_crossrealm'] = []
|
2016-08-11 02:39:21 +02:00
|
|
|
|
2017-11-27 23:46:07 +01:00
|
|
|
if realm.string_id == settings.SYSTEM_BOT_REALM:
|
2018-09-21 02:55:17 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
for bot_user in [
|
2017-05-22 23:37:15 +02:00
|
|
|
get_system_bot(settings.NOTIFICATION_BOT),
|
|
|
|
get_system_bot(settings.EMAIL_GATEWAY_BOT),
|
|
|
|
get_system_bot(settings.WELCOME_BOT),
|
2018-09-21 02:55:17 +02:00
|
|
|
]:
|
|
|
|
recipient_id = Recipient.objects.get(type_id=bot_user.id, type=Recipient.PERSONAL).id
|
|
|
|
response['zerver_userprofile_crossrealm'].append(dict(
|
|
|
|
email=bot_user.email,
|
|
|
|
id=bot_user.id,
|
|
|
|
recipient_id=recipient_id,
|
|
|
|
))
|
2016-08-11 02:39:21 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_attachment_data(response: TableData, realm_id: int, message_ids: Set[int]) -> None:
|
2016-08-13 15:29:24 +02:00
|
|
|
filter_args = {'realm_id': realm_id}
|
2016-08-13 03:06:05 +02:00
|
|
|
query = Attachment.objects.filter(**filter_args)
|
|
|
|
response['zerver_attachment'] = make_raw(list(query))
|
|
|
|
floatify_datetime_fields(response, 'zerver_attachment')
|
|
|
|
|
2016-08-13 15:29:24 +02:00
|
|
|
'''
|
|
|
|
We usually export most messages for the realm, but not
|
|
|
|
quite ALL messages for the realm. So, we need to
|
|
|
|
clean up our attachment data to have correct
|
|
|
|
values for response['zerver_attachment'][<n>]['messages'].
|
|
|
|
'''
|
|
|
|
for row in response['zerver_attachment']:
|
|
|
|
filterer_message_ids = set(row['messages']).intersection(message_ids)
|
|
|
|
row['messages'] = sorted(list(filterer_message_ids))
|
|
|
|
|
|
|
|
'''
|
|
|
|
Attachments can be connected to multiple messages, although
|
|
|
|
it's most common to have just one message. Regardless,
|
|
|
|
if none of those message(s) survived the filtering above
|
|
|
|
for a particular attachment, then we won't export the
|
|
|
|
attachment row.
|
|
|
|
'''
|
|
|
|
response['zerver_attachment'] = [
|
|
|
|
row for row in response['zerver_attachment']
|
|
|
|
if row['messages']]
|
|
|
|
|
2018-05-26 18:25:50 +02:00
|
|
|
def fetch_reaction_data(response: TableData, message_ids: Set[int]) -> None:
|
|
|
|
query = Reaction.objects.filter(message_id__in=list(message_ids))
|
|
|
|
response['zerver_reaction'] = make_raw(list(query))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_huddle_objects(response: TableData, config: Config, context: Context) -> None:
|
2016-08-11 01:21:53 +02:00
|
|
|
|
|
|
|
realm = context['realm']
|
2017-05-25 01:41:24 +02:00
|
|
|
assert config.parent is not None
|
|
|
|
assert config.parent.table is not None
|
2016-08-10 20:57:35 +02:00
|
|
|
user_profile_ids = set(r['id'] for r in response[config.parent.table])
|
2016-08-11 01:21:53 +02:00
|
|
|
|
|
|
|
# First we get all huddles involving someone in the realm.
|
2017-11-10 03:34:13 +01:00
|
|
|
realm_huddle_subs = Subscription.objects.select_related("recipient").filter(
|
|
|
|
recipient__type=Recipient.HUDDLE, user_profile__in=user_profile_ids)
|
2016-08-11 01:21:53 +02:00
|
|
|
realm_huddle_recipient_ids = set(sub.recipient_id for sub in realm_huddle_subs)
|
|
|
|
|
|
|
|
# Mark all Huddles whose recipient ID contains a cross-realm user.
|
|
|
|
unsafe_huddle_recipient_ids = set()
|
|
|
|
for sub in Subscription.objects.select_related().filter(recipient__in=realm_huddle_recipient_ids):
|
|
|
|
if sub.user_profile.realm != realm:
|
|
|
|
# In almost every case the other realm will be zulip.com
|
|
|
|
unsafe_huddle_recipient_ids.add(sub.recipient_id)
|
|
|
|
|
|
|
|
# Now filter down to just those huddles that are entirely within the realm.
|
|
|
|
#
|
|
|
|
# This is important for ensuring that the User objects needed
|
|
|
|
# to import it on the other end exist (since we're only
|
|
|
|
# exporting the users from this realm), at the cost of losing
|
|
|
|
# some of these cross-realm messages.
|
|
|
|
huddle_subs = [sub for sub in realm_huddle_subs if sub.recipient_id not in unsafe_huddle_recipient_ids]
|
|
|
|
huddle_recipient_ids = set(sub.recipient_id for sub in huddle_subs)
|
|
|
|
huddle_ids = set(sub.recipient.type_id for sub in huddle_subs)
|
|
|
|
|
|
|
|
huddle_subscription_dicts = make_raw(huddle_subs)
|
|
|
|
huddle_recipients = make_raw(Recipient.objects.filter(id__in=huddle_recipient_ids))
|
|
|
|
|
2016-08-10 20:57:35 +02:00
|
|
|
response['_huddle_recipient'] = huddle_recipients
|
|
|
|
response['_huddle_subscription'] = huddle_subscription_dicts
|
|
|
|
response['zerver_huddle'] = make_raw(Huddle.objects.filter(id__in=huddle_ids))
|
2016-08-11 01:21:53 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def fetch_usermessages(realm: Realm,
|
|
|
|
message_ids: Set[int],
|
|
|
|
user_profile_ids: Set[int],
|
2019-05-10 14:28:38 +02:00
|
|
|
message_filename: Path,
|
|
|
|
consent_message_id: Optional[int]=None) -> List[Record]:
|
2016-04-05 00:27:37 +02:00
|
|
|
# UserMessage export security rule: You can export UserMessages
|
|
|
|
# for the messages you exported for the users in your realm.
|
|
|
|
user_message_query = UserMessage.objects.filter(user_profile__realm=realm,
|
|
|
|
message_id__in=message_ids)
|
2019-05-10 14:28:38 +02:00
|
|
|
if consent_message_id is not None:
|
|
|
|
consented_user_ids = get_consented_user_ids(consent_message_id)
|
|
|
|
user_profile_ids = user_profile_ids & consented_user_ids
|
2016-04-05 00:27:37 +02:00
|
|
|
user_message_chunk = []
|
|
|
|
for user_message in user_message_query:
|
|
|
|
if user_message.user_profile_id not in user_profile_ids:
|
|
|
|
continue
|
|
|
|
user_message_obj = model_to_dict(user_message)
|
|
|
|
user_message_obj['flags_mask'] = user_message.flags.mask
|
|
|
|
del user_message_obj['flags']
|
|
|
|
user_message_chunk.append(user_message_obj)
|
|
|
|
logging.info("Fetched UserMessages for %s" % (message_filename,))
|
|
|
|
return user_message_chunk
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def export_usermessages_batch(input_path: Path, output_path: Path,
|
|
|
|
consent_message_id: Optional[int]=None) -> None:
|
2016-04-05 00:27:37 +02:00
|
|
|
"""As part of the system for doing parallel exports, this runs on one
|
|
|
|
batch of Message objects and adds the corresponding UserMessage
|
2016-08-10 02:32:02 +02:00
|
|
|
objects. (This is called by the export_usermessage_batch
|
|
|
|
management command)."""
|
2016-04-05 00:27:37 +02:00
|
|
|
with open(input_path, "r") as input_file:
|
|
|
|
output = ujson.loads(input_file.read())
|
|
|
|
message_ids = [item['id'] for item in output['zerver_message']]
|
|
|
|
user_profile_ids = set(output['zerver_userprofile_ids'])
|
|
|
|
del output['zerver_userprofile_ids']
|
|
|
|
realm = Realm.objects.get(id=output['realm_id'])
|
|
|
|
del output['realm_id']
|
2019-05-10 14:28:38 +02:00
|
|
|
output['zerver_usermessage'] = fetch_usermessages(realm, set(message_ids), user_profile_ids,
|
|
|
|
output_path, consent_message_id)
|
2016-04-05 00:27:37 +02:00
|
|
|
write_message_export(output_path, output)
|
|
|
|
os.unlink(input_path)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def write_message_export(message_filename: Path, output: MessageOutput) -> None:
|
2016-08-11 16:08:13 +02:00
|
|
|
write_data_to_file(output_file=message_filename, data=output)
|
2016-04-05 00:27:37 +02:00
|
|
|
logging.info("Dumped to %s" % (message_filename,))
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_partial_message_files(realm: Realm,
|
|
|
|
response: TableData,
|
2018-06-18 16:24:01 +02:00
|
|
|
chunk_size: int=MESSAGE_BATCH_CHUNK_SIZE,
|
2019-01-08 01:51:11 +01:00
|
|
|
output_dir: Optional[Path]=None,
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only: bool=False,
|
|
|
|
consent_message_id: Optional[int]=None) -> Set[int]:
|
2016-04-05 00:27:37 +02:00
|
|
|
if output_dir is None:
|
|
|
|
output_dir = tempfile.mkdtemp(prefix="zulip-export")
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_ids(records: List[Record]) -> Set[int]:
|
2016-08-11 15:26:47 +02:00
|
|
|
return set(x['id'] for x in records)
|
|
|
|
|
2016-08-14 22:55:41 +02:00
|
|
|
# Basic security rule: You can export everything either...
|
|
|
|
# - sent by someone in your exportable_user_ids
|
|
|
|
# OR
|
|
|
|
# - received by someone in your exportable_user_ids (which
|
|
|
|
# equates to a recipient object we are exporting)
|
2016-04-05 00:27:37 +02:00
|
|
|
#
|
|
|
|
# TODO: In theory, you should be able to export messages in
|
|
|
|
# cross-realm PM threads; currently, this only exports cross-realm
|
|
|
|
# messages received by your realm that were sent by Zulip system
|
|
|
|
# bots (e.g. emailgateway, notification-bot).
|
2016-08-14 22:55:41 +02:00
|
|
|
|
|
|
|
# Here, "we" and "us" refers to the inner circle of users who
|
|
|
|
# were specified as being allowed to be exported. "Them"
|
|
|
|
# refers to other users.
|
|
|
|
user_ids_for_us = get_ids(
|
|
|
|
response['zerver_userprofile']
|
|
|
|
)
|
|
|
|
ids_of_our_possible_senders = get_ids(
|
|
|
|
response['zerver_userprofile'] +
|
|
|
|
response['zerver_userprofile_mirrordummy'] +
|
|
|
|
response['zerver_userprofile_crossrealm'])
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
consented_user_ids = set() # type: Set[int]
|
|
|
|
if consent_message_id is not None:
|
|
|
|
consented_user_ids = get_consented_user_ids(consent_message_id)
|
|
|
|
|
2019-01-08 01:51:11 +01:00
|
|
|
if public_only:
|
|
|
|
recipient_streams = Stream.objects.filter(realm=realm, invite_only=False)
|
|
|
|
recipient_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, type_id__in=recipient_streams).values_list("id", flat=True)
|
|
|
|
recipient_ids_for_us = get_ids(response['zerver_recipient']) & set(recipient_ids)
|
2019-05-10 14:28:38 +02:00
|
|
|
elif consent_message_id is not None:
|
|
|
|
public_streams = Stream.objects.filter(realm=realm, invite_only=False)
|
|
|
|
public_stream_recipient_ids = Recipient.objects.filter(
|
|
|
|
type=Recipient.STREAM, type_id__in=public_streams).values_list("id", flat=True)
|
|
|
|
|
|
|
|
consented_recipient_ids = Subscription.objects.filter(user_profile__id__in=consented_user_ids). \
|
|
|
|
values_list("recipient_id", flat=True)
|
|
|
|
|
|
|
|
recipient_ids = set(public_stream_recipient_ids) | set(consented_recipient_ids)
|
|
|
|
recipient_ids_for_us = get_ids(response['zerver_recipient']) & recipient_ids
|
2019-01-08 01:51:11 +01:00
|
|
|
else:
|
|
|
|
recipient_ids_for_us = get_ids(response['zerver_recipient'])
|
2019-05-10 14:28:38 +02:00
|
|
|
# For a full export, we have implicit consent for all users in the export.
|
|
|
|
consented_user_ids = user_ids_for_us
|
2016-08-14 22:55:41 +02:00
|
|
|
|
2019-01-08 01:51:11 +01:00
|
|
|
if public_only:
|
2019-05-10 14:28:38 +02:00
|
|
|
messages_we_received = Message.objects.filter(
|
|
|
|
sender__in=ids_of_our_possible_senders,
|
|
|
|
recipient__in=recipient_ids_for_us,
|
|
|
|
).order_by('id')
|
|
|
|
|
2019-01-08 01:51:11 +01:00
|
|
|
# For the public stream export, we only need the messages those streams received.
|
|
|
|
message_queries = [
|
|
|
|
messages_we_received,
|
|
|
|
]
|
|
|
|
else:
|
2019-05-10 14:28:38 +02:00
|
|
|
# We capture most messages here: Messages that were sent by
|
|
|
|
# anyone in the export and received by any of the users who we
|
|
|
|
# have consent to export.
|
|
|
|
messages_we_received = Message.objects.filter(
|
|
|
|
sender__in=ids_of_our_possible_senders,
|
|
|
|
recipient__in=recipient_ids_for_us,
|
|
|
|
).order_by('id')
|
|
|
|
|
|
|
|
# The above query is missing some messages that consenting
|
|
|
|
# users have access to, namely, PMs sent by one of the users
|
|
|
|
# in our export to another user (since the only subscriber to
|
|
|
|
# a Recipient object for Recipient.PERSONAL is the recipient,
|
|
|
|
# not the sender). The `consented_user_ids` list has
|
|
|
|
# precisely those users whose Recipient.PERSONAL recipient ID
|
|
|
|
# was already present in recipient_ids_for_us above.
|
|
|
|
ids_of_non_exported_possible_recipients = ids_of_our_possible_senders - consented_user_ids
|
2019-01-08 01:51:11 +01:00
|
|
|
|
|
|
|
recipients_for_them = Recipient.objects.filter(
|
|
|
|
type=Recipient.PERSONAL,
|
|
|
|
type_id__in=ids_of_non_exported_possible_recipients).values("id")
|
|
|
|
recipient_ids_for_them = get_ids(recipients_for_them)
|
|
|
|
|
|
|
|
messages_we_sent_to_them = Message.objects.filter(
|
2019-05-10 14:28:38 +02:00
|
|
|
sender__in=consented_user_ids,
|
2019-01-08 01:51:11 +01:00
|
|
|
recipient__in=recipient_ids_for_them,
|
|
|
|
).order_by('id')
|
|
|
|
|
|
|
|
message_queries = [
|
|
|
|
messages_we_received,
|
|
|
|
messages_we_sent_to_them,
|
|
|
|
]
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-05-17 21:09:08 +02:00
|
|
|
all_message_ids = set() # type: Set[int]
|
2016-04-05 00:27:37 +02:00
|
|
|
dump_file_id = 1
|
2016-08-14 22:55:41 +02:00
|
|
|
|
|
|
|
for message_query in message_queries:
|
|
|
|
dump_file_id = write_message_partial_for_query(
|
|
|
|
realm=realm,
|
|
|
|
message_query=message_query,
|
|
|
|
dump_file_id=dump_file_id,
|
|
|
|
all_message_ids=all_message_ids,
|
|
|
|
output_dir=output_dir,
|
|
|
|
user_profile_ids=user_ids_for_us,
|
2018-06-18 16:24:01 +02:00
|
|
|
chunk_size=chunk_size,
|
2016-08-14 22:55:41 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
return all_message_ids
|
|
|
|
|
2017-12-14 10:32:15 +01:00
|
|
|
def write_message_partial_for_query(realm: Realm, message_query: Any, dump_file_id: int,
|
|
|
|
all_message_ids: Set[int], output_dir: Path,
|
2018-06-18 16:24:01 +02:00
|
|
|
user_profile_ids: Set[int],
|
|
|
|
chunk_size: int=MESSAGE_BATCH_CHUNK_SIZE) -> int:
|
2016-08-14 22:55:41 +02:00
|
|
|
min_id = -1
|
|
|
|
|
2016-04-05 00:27:37 +02:00
|
|
|
while True:
|
|
|
|
actual_query = message_query.filter(id__gt=min_id)[0:chunk_size]
|
2016-08-09 17:30:52 +02:00
|
|
|
message_chunk = make_raw(actual_query)
|
2016-04-05 00:27:37 +02:00
|
|
|
message_ids = set(m['id'] for m in message_chunk)
|
2016-08-14 22:55:41 +02:00
|
|
|
assert len(message_ids.intersection(all_message_ids)) == 0
|
|
|
|
|
2016-08-13 14:56:52 +02:00
|
|
|
all_message_ids.update(message_ids)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
if len(message_chunk) == 0:
|
|
|
|
break
|
|
|
|
|
2016-08-10 03:28:46 +02:00
|
|
|
# Figure out the name of our shard file.
|
2016-04-05 00:27:37 +02:00
|
|
|
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
|
2016-08-10 02:32:02 +02:00
|
|
|
message_filename += '.partial'
|
2016-04-05 00:27:37 +02:00
|
|
|
logging.info("Fetched Messages for %s" % (message_filename,))
|
|
|
|
|
2016-08-10 03:28:46 +02:00
|
|
|
# Clean up our messages.
|
2017-05-17 21:09:08 +02:00
|
|
|
table_data = {} # type: TableData
|
2016-08-10 03:28:46 +02:00
|
|
|
table_data['zerver_message'] = message_chunk
|
|
|
|
floatify_datetime_fields(table_data, 'zerver_message')
|
|
|
|
|
|
|
|
# Build up our output for the .partial file, which needs
|
|
|
|
# a list of user_profile_ids to search for (as well as
|
|
|
|
# the realm id).
|
2017-05-17 21:09:08 +02:00
|
|
|
output = {} # type: MessageOutput
|
2016-08-10 03:28:46 +02:00
|
|
|
output['zerver_message'] = table_data['zerver_message']
|
2016-08-10 02:32:02 +02:00
|
|
|
output['zerver_userprofile_ids'] = list(user_profile_ids)
|
|
|
|
output['realm_id'] = realm.id
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-10 03:28:46 +02:00
|
|
|
# And write the data.
|
2016-04-05 00:27:37 +02:00
|
|
|
write_message_export(message_filename, output)
|
|
|
|
min_id = max(message_ids)
|
|
|
|
dump_file_id += 1
|
|
|
|
|
2016-08-14 22:55:41 +02:00
|
|
|
return dump_file_id
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_uploads_and_avatars(realm: Realm, output_dir: Path) -> None:
|
2016-08-13 16:09:26 +02:00
|
|
|
uploads_output_dir = os.path.join(output_dir, 'uploads')
|
|
|
|
avatars_output_dir = os.path.join(output_dir, 'avatars')
|
2018-05-26 21:18:54 +02:00
|
|
|
emoji_output_dir = os.path.join(output_dir, 'emoji')
|
2016-08-13 16:09:26 +02:00
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
for output_dir in (uploads_output_dir, avatars_output_dir, emoji_output_dir):
|
2016-08-13 16:09:26 +02:00
|
|
|
if not os.path.exists(output_dir):
|
|
|
|
os.makedirs(output_dir)
|
|
|
|
|
2016-08-11 14:48:52 +02:00
|
|
|
if settings.LOCAL_UPLOADS_DIR:
|
|
|
|
# Small installations and developers will usually just store files locally.
|
2016-08-13 16:09:26 +02:00
|
|
|
export_uploads_from_local(realm,
|
|
|
|
local_dir=os.path.join(settings.LOCAL_UPLOADS_DIR, "files"),
|
|
|
|
output_dir=uploads_output_dir)
|
|
|
|
export_avatars_from_local(realm,
|
|
|
|
local_dir=os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars"),
|
|
|
|
output_dir=avatars_output_dir)
|
2018-05-26 21:18:54 +02:00
|
|
|
export_emoji_from_local(realm,
|
|
|
|
local_dir=os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars"),
|
|
|
|
output_dir=emoji_output_dir)
|
2016-08-11 14:48:52 +02:00
|
|
|
else:
|
|
|
|
# Some bigger installations will have their data stored on S3.
|
2016-08-13 16:09:26 +02:00
|
|
|
export_files_from_s3(realm,
|
|
|
|
settings.S3_AVATAR_BUCKET,
|
|
|
|
output_dir=avatars_output_dir,
|
|
|
|
processing_avatars=True)
|
|
|
|
export_files_from_s3(realm,
|
|
|
|
settings.S3_AUTH_UPLOADS_BUCKET,
|
|
|
|
output_dir=uploads_output_dir)
|
2018-05-26 21:18:54 +02:00
|
|
|
export_files_from_s3(realm,
|
|
|
|
settings.S3_AVATAR_BUCKET,
|
|
|
|
output_dir=emoji_output_dir,
|
|
|
|
processing_emoji=True)
|
2016-08-11 14:48:52 +02:00
|
|
|
|
2018-12-07 17:12:09 +01:00
|
|
|
def _check_key_metadata(email_gateway_bot: Optional[UserProfile],
|
|
|
|
key: Key, processing_avatars: bool,
|
|
|
|
realm: Realm, user_ids: Set[int]) -> None:
|
|
|
|
# Helper function for export_files_from_s3
|
|
|
|
if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id):
|
|
|
|
if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id):
|
|
|
|
raise AssertionError("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id))
|
|
|
|
# Email gateway bot sends messages, potentially including attachments, cross-realm.
|
|
|
|
print("File uploaded by email gateway bot: %s / %s" % (key.name, key.metadata))
|
|
|
|
elif processing_avatars:
|
|
|
|
if 'user_profile_id' not in key.metadata:
|
|
|
|
raise AssertionError("Missing user_profile_id in key metadata: %s" % (key.metadata,))
|
|
|
|
if int(key.metadata['user_profile_id']) not in user_ids:
|
|
|
|
raise AssertionError("Wrong user_profile_id in key metadata: %s" % (key.metadata,))
|
|
|
|
elif 'realm_id' not in key.metadata:
|
|
|
|
raise AssertionError("Missing realm_id in key metadata: %s" % (key.metadata,))
|
|
|
|
|
2018-12-07 17:18:09 +01:00
|
|
|
def _get_exported_s3_record(
|
|
|
|
bucket_name: str,
|
|
|
|
key: Key,
|
|
|
|
processing_avatars: bool,
|
|
|
|
processing_emoji: bool) -> Dict[str, Union[str, int]]:
|
|
|
|
# Helper function for export_files_from_s3
|
|
|
|
record = dict(s3_path=key.name, bucket=bucket_name,
|
|
|
|
size=key.size, last_modified=key.last_modified,
|
|
|
|
content_type=key.content_type, md5=key.md5)
|
|
|
|
record.update(key.metadata)
|
|
|
|
|
|
|
|
if processing_emoji:
|
|
|
|
record['file_name'] = os.path.basename(key.name)
|
|
|
|
|
|
|
|
# A few early avatars don't have 'realm_id' on the object; fix their metadata
|
|
|
|
user_profile = get_user_profile_by_id(record['user_profile_id'])
|
|
|
|
if 'realm_id' not in record:
|
|
|
|
record['realm_id'] = user_profile.realm_id
|
|
|
|
record['user_profile_email'] = user_profile.email
|
|
|
|
|
|
|
|
# Fix the record ids
|
|
|
|
record['user_profile_id'] = int(record['user_profile_id'])
|
|
|
|
record['realm_id'] = int(record['realm_id'])
|
|
|
|
|
|
|
|
return record
|
|
|
|
|
2018-12-07 17:26:48 +01:00
|
|
|
def _save_s3_object_to_file(
|
|
|
|
key: Key,
|
|
|
|
output_dir: str,
|
|
|
|
processing_avatars: bool,
|
|
|
|
processing_emoji: bool) -> None:
|
|
|
|
|
|
|
|
# Helper function for export_files_from_s3
|
|
|
|
if processing_avatars or processing_emoji:
|
|
|
|
filename = os.path.join(output_dir, key.name)
|
|
|
|
else:
|
|
|
|
fields = key.name.split('/')
|
|
|
|
if len(fields) != 3:
|
2019-04-20 01:00:46 +02:00
|
|
|
raise AssertionError("Suspicious key with invalid format %s" % (key.name,))
|
2018-12-07 17:26:48 +01:00
|
|
|
filename = os.path.join(output_dir, key.name)
|
|
|
|
|
|
|
|
dirname = os.path.dirname(filename)
|
|
|
|
if not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
key.get_contents_to_filename(filename)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path,
|
2018-05-26 21:18:54 +02:00
|
|
|
processing_avatars: bool=False,
|
|
|
|
processing_emoji: bool=False) -> None:
|
2016-04-05 00:27:37 +02:00
|
|
|
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
|
|
|
|
bucket = conn.get_bucket(bucket_name, validate=True)
|
|
|
|
records = []
|
|
|
|
|
2019-04-20 01:00:46 +02:00
|
|
|
logging.info("Downloading uploaded files from %s" % (bucket_name,))
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
avatar_hash_values = set()
|
|
|
|
user_ids = set()
|
2016-08-11 16:37:02 +02:00
|
|
|
if processing_avatars:
|
2016-04-05 00:27:37 +02:00
|
|
|
bucket_list = bucket.list()
|
|
|
|
for user_profile in UserProfile.objects.filter(realm=realm):
|
2017-10-18 06:10:39 +02:00
|
|
|
avatar_path = user_avatar_path_from_ids(user_profile.id, realm.id)
|
|
|
|
avatar_hash_values.add(avatar_path)
|
|
|
|
avatar_hash_values.add(avatar_path + ".original")
|
2016-04-05 00:27:37 +02:00
|
|
|
user_ids.add(user_profile.id)
|
2018-05-26 21:18:54 +02:00
|
|
|
if processing_emoji:
|
|
|
|
bucket_list = bucket.list(prefix="%s/emoji/images/" % (realm.id,))
|
2016-04-05 00:27:37 +02:00
|
|
|
else:
|
|
|
|
bucket_list = bucket.list(prefix="%s/" % (realm.id,))
|
|
|
|
|
|
|
|
if settings.EMAIL_GATEWAY_BOT is not None:
|
2017-12-24 21:05:31 +01:00
|
|
|
email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) # type: Optional[UserProfile]
|
2016-04-05 00:27:37 +02:00
|
|
|
else:
|
|
|
|
email_gateway_bot = None
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
for bkey in bucket_list:
|
2016-08-11 16:37:02 +02:00
|
|
|
if processing_avatars and bkey.name not in avatar_hash_values:
|
2016-04-05 00:27:37 +02:00
|
|
|
continue
|
|
|
|
key = bucket.get_key(bkey.name)
|
|
|
|
|
|
|
|
# This can happen if an email address has moved realms
|
2018-12-07 17:12:09 +01:00
|
|
|
_check_key_metadata(email_gateway_bot, key, processing_avatars, realm, user_ids)
|
2018-12-07 17:18:09 +01:00
|
|
|
record = _get_exported_s3_record(bucket_name, key, processing_avatars, processing_emoji)
|
2018-06-06 21:27:04 +02:00
|
|
|
|
2018-09-21 04:57:24 +02:00
|
|
|
record['path'] = key.name
|
2018-12-07 17:26:48 +01:00
|
|
|
_save_s3_object_to_file(key, output_dir, processing_avatars, processing_emoji)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
records.append(record)
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
if (count % 100 == 0):
|
|
|
|
logging.info("Finished %s" % (count,))
|
|
|
|
|
|
|
|
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
|
|
|
|
ujson.dump(records, records_file, indent=4)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_uploads_from_local(realm: Realm, local_dir: Path, output_dir: Path) -> None:
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
count = 0
|
|
|
|
records = []
|
2016-08-09 23:33:42 +02:00
|
|
|
for attachment in Attachment.objects.filter(realm_id=realm.id):
|
2016-04-05 00:27:37 +02:00
|
|
|
local_path = os.path.join(local_dir, attachment.path_id)
|
|
|
|
output_path = os.path.join(output_dir, attachment.path_id)
|
2017-10-25 20:06:11 +02:00
|
|
|
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
2018-07-18 23:50:16 +02:00
|
|
|
shutil.copy2(local_path, output_path)
|
2016-04-05 00:27:37 +02:00
|
|
|
stat = os.stat(local_path)
|
2017-01-03 21:04:55 +01:00
|
|
|
record = dict(realm_id=attachment.realm_id,
|
2016-04-05 00:27:37 +02:00
|
|
|
user_profile_id=attachment.owner.id,
|
|
|
|
user_profile_email=attachment.owner.email,
|
|
|
|
s3_path=attachment.path_id,
|
|
|
|
path=attachment.path_id,
|
|
|
|
size=stat.st_size,
|
|
|
|
last_modified=stat.st_mtime,
|
|
|
|
content_type=None)
|
|
|
|
records.append(record)
|
|
|
|
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
if (count % 100 == 0):
|
|
|
|
logging.info("Finished %s" % (count,))
|
|
|
|
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
|
|
|
|
ujson.dump(records, records_file, indent=4)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_avatars_from_local(realm: Realm, local_dir: Path, output_dir: Path) -> None:
|
2016-08-09 02:19:29 +02:00
|
|
|
|
|
|
|
count = 0
|
|
|
|
records = []
|
|
|
|
|
|
|
|
users = list(UserProfile.objects.filter(realm=realm))
|
|
|
|
users += [
|
2017-05-22 23:37:15 +02:00
|
|
|
get_system_bot(settings.NOTIFICATION_BOT),
|
|
|
|
get_system_bot(settings.EMAIL_GATEWAY_BOT),
|
|
|
|
get_system_bot(settings.WELCOME_BOT),
|
2016-08-09 02:19:29 +02:00
|
|
|
]
|
|
|
|
for user in users:
|
|
|
|
if user.avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
|
|
|
|
continue
|
|
|
|
|
2017-10-18 06:10:39 +02:00
|
|
|
avatar_path = user_avatar_path_from_ids(user.id, realm.id)
|
|
|
|
wildcard = os.path.join(local_dir, avatar_path + '.*')
|
2016-08-09 02:19:29 +02:00
|
|
|
|
|
|
|
for local_path in glob.glob(wildcard):
|
|
|
|
logging.info('Copying avatar file for user %s from %s' % (
|
|
|
|
user.email, local_path))
|
2017-10-18 06:10:39 +02:00
|
|
|
fn = os.path.relpath(local_path, local_dir)
|
2016-08-09 02:19:29 +02:00
|
|
|
output_path = os.path.join(output_dir, fn)
|
2017-10-25 20:06:11 +02:00
|
|
|
os.makedirs(str(os.path.dirname(output_path)), exist_ok=True)
|
2018-07-18 23:50:16 +02:00
|
|
|
shutil.copy2(str(local_path), str(output_path))
|
2016-08-09 02:19:29 +02:00
|
|
|
stat = os.stat(local_path)
|
|
|
|
record = dict(realm_id=realm.id,
|
|
|
|
user_profile_id=user.id,
|
|
|
|
user_profile_email=user.email,
|
|
|
|
s3_path=fn,
|
|
|
|
path=fn,
|
|
|
|
size=stat.st_size,
|
|
|
|
last_modified=stat.st_mtime,
|
|
|
|
content_type=None)
|
|
|
|
records.append(record)
|
|
|
|
|
|
|
|
count += 1
|
|
|
|
|
|
|
|
if (count % 100 == 0):
|
|
|
|
logging.info("Finished %s" % (count,))
|
|
|
|
|
|
|
|
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
|
|
|
|
ujson.dump(records, records_file, indent=4)
|
|
|
|
|
2018-05-26 21:18:54 +02:00
|
|
|
def export_emoji_from_local(realm: Realm, local_dir: Path, output_dir: Path) -> None:
|
|
|
|
|
|
|
|
count = 0
|
|
|
|
records = []
|
|
|
|
for realm_emoji in RealmEmoji.objects.filter(realm_id=realm.id):
|
|
|
|
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
|
|
|
|
realm_id=realm.id,
|
|
|
|
emoji_file_name=realm_emoji.file_name
|
|
|
|
)
|
|
|
|
local_path = os.path.join(local_dir, emoji_path)
|
|
|
|
output_path = os.path.join(output_dir, emoji_path)
|
|
|
|
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
2018-07-18 23:50:16 +02:00
|
|
|
shutil.copy2(local_path, output_path)
|
2019-02-11 13:26:57 +01:00
|
|
|
# Realm Emoji author is optional.
|
|
|
|
author = realm_emoji.author
|
|
|
|
author_id = None
|
|
|
|
if author:
|
|
|
|
author_id = realm_emoji.author.id
|
2018-05-26 21:18:54 +02:00
|
|
|
record = dict(realm_id=realm.id,
|
2019-02-11 13:26:57 +01:00
|
|
|
author=author_id,
|
2018-05-26 21:18:54 +02:00
|
|
|
path=emoji_path,
|
|
|
|
s3_path=emoji_path,
|
|
|
|
file_name=realm_emoji.file_name,
|
|
|
|
name=realm_emoji.name,
|
|
|
|
deactivated=realm_emoji.deactivated)
|
|
|
|
records.append(record)
|
|
|
|
|
|
|
|
count += 1
|
|
|
|
if (count % 100 == 0):
|
|
|
|
logging.info("Finished %s" % (count,))
|
|
|
|
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
|
|
|
|
ujson.dump(records, records_file, indent=4)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
|
2016-08-12 02:38:19 +02:00
|
|
|
stats_file = os.path.join(output_dir, 'stats.txt')
|
|
|
|
realm_file = os.path.join(output_dir, 'realm.json')
|
2016-08-13 03:33:19 +02:00
|
|
|
attachment_file = os.path.join(output_dir, 'attachment.json')
|
2019-01-30 08:54:29 +01:00
|
|
|
analytics_file = os.path.join(output_dir, 'analytics.json')
|
2016-08-12 02:38:19 +02:00
|
|
|
message_files = glob.glob(os.path.join(output_dir, 'messages-*.json'))
|
2019-01-30 08:54:29 +01:00
|
|
|
fns = sorted([analytics_file] + [attachment_file] + message_files + [realm_file])
|
2016-08-12 02:38:19 +02:00
|
|
|
|
|
|
|
logging.info('Writing stats file: %s\n' % (stats_file,))
|
|
|
|
with open(stats_file, 'w') as f:
|
|
|
|
for fn in fns:
|
2016-11-28 23:29:01 +01:00
|
|
|
f.write(os.path.basename(fn) + '\n')
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(fn, 'r') as filename:
|
|
|
|
payload = filename.read()
|
2016-08-12 02:38:19 +02:00
|
|
|
data = ujson.loads(payload)
|
|
|
|
for k in sorted(data):
|
|
|
|
f.write('%5d %s\n' % (len(data[k]), k))
|
|
|
|
f.write('\n')
|
|
|
|
|
|
|
|
avatar_file = os.path.join(output_dir, 'avatars/records.json')
|
|
|
|
uploads_file = os.path.join(output_dir, 'uploads/records.json')
|
|
|
|
|
|
|
|
for fn in [avatar_file, uploads_file]:
|
|
|
|
f.write(fn+'\n')
|
2019-07-14 21:37:08 +02:00
|
|
|
with open(fn, 'r') as filename:
|
|
|
|
payload = filename.read()
|
2016-08-12 02:38:19 +02:00
|
|
|
data = ujson.loads(payload)
|
2019-04-20 01:00:46 +02:00
|
|
|
f.write('%5d records\n' % (len(data),))
|
2016-08-12 02:38:19 +02:00
|
|
|
f.write('\n')
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_export_realm(realm: Realm, output_dir: Path, threads: int,
|
2019-01-08 01:51:11 +01:00
|
|
|
exportable_user_ids: Optional[Set[int]]=None,
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only: bool=False,
|
2019-06-21 20:30:00 +02:00
|
|
|
consent_message_id: Optional[int]=None) -> str:
|
2017-05-17 21:09:08 +02:00
|
|
|
response = {} # type: TableData
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-10 02:32:02 +02:00
|
|
|
# We need at least one thread running to export
|
|
|
|
# UserMessage rows. The management command should
|
|
|
|
# enforce this for us.
|
2016-08-13 20:22:23 +02:00
|
|
|
if not settings.TEST_SUITE:
|
|
|
|
assert threads >= 1
|
2016-08-10 02:32:02 +02:00
|
|
|
|
2016-08-10 18:45:39 +02:00
|
|
|
realm_config = get_realm_config()
|
|
|
|
|
2016-08-11 23:59:19 +02:00
|
|
|
create_soft_link(source=output_dir, in_progress=True)
|
|
|
|
|
2016-08-12 20:59:22 +02:00
|
|
|
logging.info("Exporting data from get_realm_config()...")
|
2016-08-10 18:45:39 +02:00
|
|
|
export_from_config(
|
|
|
|
response=response,
|
|
|
|
config=realm_config,
|
|
|
|
seed_object=realm,
|
2016-08-14 22:49:46 +02:00
|
|
|
context=dict(realm=realm, exportable_user_ids=exportable_user_ids)
|
2016-08-10 18:45:39 +02:00
|
|
|
)
|
2016-08-12 20:59:22 +02:00
|
|
|
logging.info('...DONE with get_realm_config() data')
|
2016-08-10 20:57:35 +02:00
|
|
|
|
2016-08-11 20:27:26 +02:00
|
|
|
sanity_check_output(response)
|
|
|
|
|
2016-04-05 00:27:37 +02:00
|
|
|
logging.info("Exporting uploaded files and avatars")
|
2016-08-11 14:48:52 +02:00
|
|
|
export_uploads_and_avatars(realm, output_dir)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-11 15:26:47 +02:00
|
|
|
# We (sort of) export zerver_message rows here. We write
|
|
|
|
# them to .partial files that are subsequently fleshed out
|
|
|
|
# by parallel processes to add in zerver_usermessage data.
|
|
|
|
# This is for performance reasons, of course. Some installations
|
|
|
|
# have millions of messages.
|
|
|
|
logging.info("Exporting .partial files messages")
|
2019-01-08 01:51:11 +01:00
|
|
|
message_ids = export_partial_message_files(realm, response, output_dir=output_dir,
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only=public_only,
|
|
|
|
consent_message_id=consent_message_id)
|
2019-04-20 01:00:46 +02:00
|
|
|
logging.info('%d messages were exported' % (len(message_ids),))
|
2016-08-10 02:32:02 +02:00
|
|
|
|
2018-05-26 18:25:50 +02:00
|
|
|
# zerver_reaction
|
|
|
|
zerver_reaction = {} # type: TableData
|
|
|
|
fetch_reaction_data(response=zerver_reaction, message_ids=message_ids)
|
|
|
|
response.update(zerver_reaction)
|
|
|
|
|
|
|
|
# Write realm data
|
|
|
|
export_file = os.path.join(output_dir, "realm.json")
|
|
|
|
write_data_to_file(output_file=export_file, data=response)
|
|
|
|
logging.info('Writing realm data to %s' % (export_file,))
|
|
|
|
|
2019-01-30 08:54:29 +01:00
|
|
|
# Write analytics data
|
|
|
|
export_analytics_tables(realm=realm, output_dir=output_dir)
|
|
|
|
|
2016-08-13 03:33:19 +02:00
|
|
|
# zerver_attachment
|
2016-08-13 15:29:24 +02:00
|
|
|
export_attachment_table(realm=realm, output_dir=output_dir, message_ids=message_ids)
|
2016-08-13 03:33:19 +02:00
|
|
|
|
2016-08-11 15:43:58 +02:00
|
|
|
# Start parallel jobs to export the UserMessage objects.
|
2019-05-10 14:28:38 +02:00
|
|
|
launch_user_message_subprocesses(threads=threads, output_dir=output_dir,
|
|
|
|
consent_message_id=consent_message_id)
|
2016-08-11 15:43:58 +02:00
|
|
|
|
2019-04-20 01:00:46 +02:00
|
|
|
logging.info("Finished exporting %s" % (realm.string_id,))
|
2016-08-11 23:59:19 +02:00
|
|
|
create_soft_link(source=output_dir, in_progress=False)
|
|
|
|
|
2019-06-21 20:30:00 +02:00
|
|
|
do_write_stats_file_for_realm_export(output_dir)
|
|
|
|
|
|
|
|
# We need to change back to the current working directory after writing
|
|
|
|
# the tarball to the output directory, otherwise the state is compromised
|
|
|
|
# for our unit tests.
|
|
|
|
reset_dir = os.getcwd()
|
|
|
|
tarball_path = output_dir.rstrip('/') + '.tar.gz'
|
|
|
|
os.chdir(os.path.dirname(output_dir))
|
|
|
|
subprocess.check_call(["tar", "-czf", tarball_path, os.path.basename(output_dir)])
|
|
|
|
os.chdir(reset_dir)
|
|
|
|
return tarball_path
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_attachment_table(realm: Realm, output_dir: Path, message_ids: Set[int]) -> None:
|
2017-05-17 21:09:08 +02:00
|
|
|
response = {} # type: TableData
|
2016-08-13 15:29:24 +02:00
|
|
|
fetch_attachment_data(response=response, realm_id=realm.id, message_ids=message_ids)
|
2016-08-13 03:33:19 +02:00
|
|
|
output_file = os.path.join(output_dir, "attachment.json")
|
|
|
|
logging.info('Writing attachment table data to %s' % (output_file,))
|
|
|
|
write_data_to_file(output_file=output_file, data=response)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def create_soft_link(source: Path, in_progress: bool=True) -> None:
|
2016-08-11 23:59:19 +02:00
|
|
|
is_done = not in_progress
|
2019-01-15 03:00:03 +01:00
|
|
|
if settings.DEVELOPMENT:
|
|
|
|
in_progress_link = os.path.join(settings.DEPLOY_ROOT, 'var', 'export-in-progress')
|
|
|
|
done_link = os.path.join(settings.DEPLOY_ROOT, 'var', 'export-most-recent')
|
|
|
|
else:
|
|
|
|
in_progress_link = '/home/zulip/export-in-progress'
|
|
|
|
done_link = '/home/zulip/export-most-recent'
|
2016-08-11 23:59:19 +02:00
|
|
|
|
|
|
|
if in_progress:
|
|
|
|
new_target = in_progress_link
|
|
|
|
else:
|
2018-07-18 23:50:16 +02:00
|
|
|
try:
|
|
|
|
os.remove(in_progress_link)
|
|
|
|
except FileNotFoundError:
|
|
|
|
pass
|
2016-08-11 23:59:19 +02:00
|
|
|
new_target = done_link
|
|
|
|
|
2018-07-18 23:50:16 +02:00
|
|
|
overwrite_symlink(source, new_target)
|
2016-08-11 23:59:19 +02:00
|
|
|
if is_done:
|
|
|
|
logging.info('See %s for output files' % (new_target,))
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def launch_user_message_subprocesses(threads: int, output_dir: Path,
|
|
|
|
consent_message_id: Optional[int]=None) -> None:
|
2016-08-10 02:32:02 +02:00
|
|
|
logging.info('Launching %d PARALLEL subprocesses to export UserMessage rows' % (threads,))
|
2019-12-29 20:19:50 +01:00
|
|
|
pids = {}
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2019-12-29 20:19:50 +01:00
|
|
|
for shard_id in range(threads):
|
2019-05-10 14:28:38 +02:00
|
|
|
arguments = [
|
|
|
|
os.path.join(settings.DEPLOY_ROOT, "manage.py"),
|
|
|
|
'export_usermessage_batch',
|
|
|
|
'--path', str(output_dir),
|
2019-12-29 20:19:50 +01:00
|
|
|
'--thread', str(shard_id)
|
2019-05-10 14:28:38 +02:00
|
|
|
]
|
|
|
|
if consent_message_id is not None:
|
|
|
|
arguments.extend(['--consent-message-id', str(consent_message_id)])
|
|
|
|
|
2019-12-29 20:19:50 +01:00
|
|
|
process = subprocess.Popen(arguments)
|
|
|
|
pids[process.pid] = shard_id
|
2016-08-10 02:32:02 +02:00
|
|
|
|
2019-12-29 20:19:50 +01:00
|
|
|
while pids:
|
|
|
|
pid, status = os.wait()
|
|
|
|
shard = pids.pop(pid)
|
|
|
|
print('Shard %s finished, status %s' % (shard, status))
|
2016-08-10 02:32:02 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def do_export_user(user_profile: UserProfile, output_dir: Path) -> None:
|
2017-05-17 21:09:08 +02:00
|
|
|
response = {} # type: TableData
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
export_single_user(user_profile, response)
|
|
|
|
export_file = os.path.join(output_dir, "user.json")
|
2016-08-11 16:08:13 +02:00
|
|
|
write_data_to_file(output_file=export_file, data=response)
|
2016-04-05 00:27:37 +02:00
|
|
|
logging.info("Exporting messages")
|
2017-05-25 01:54:45 +02:00
|
|
|
export_messages_single_user(user_profile, output_dir)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def export_single_user(user_profile: UserProfile, response: TableData) -> None:
|
2016-08-11 17:57:57 +02:00
|
|
|
|
2016-08-11 19:03:16 +02:00
|
|
|
config = get_single_user_config()
|
|
|
|
export_from_config(
|
|
|
|
response=response,
|
|
|
|
config=config,
|
|
|
|
seed_object=user_profile,
|
|
|
|
)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_single_user_config() -> Config:
|
2019-02-28 21:19:47 +01:00
|
|
|
# This function defines the limited configuration for what data to
|
|
|
|
# export when exporting all data that a single Zulip user has
|
|
|
|
# access to in an organization.
|
2016-08-11 19:03:16 +02:00
|
|
|
|
2016-08-11 17:57:57 +02:00
|
|
|
# zerver_userprofile
|
2016-08-11 19:03:16 +02:00
|
|
|
user_profile_config = Config(
|
|
|
|
table='zerver_userprofile',
|
|
|
|
is_seeded=True,
|
|
|
|
exclude=['password', 'api_key'],
|
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-11 17:57:57 +02:00
|
|
|
# zerver_subscription
|
2016-08-11 19:03:16 +02:00
|
|
|
subscription_config = Config(
|
|
|
|
table='zerver_subscription',
|
|
|
|
model=Subscription,
|
|
|
|
normal_parent=user_profile_config,
|
|
|
|
parent_key='user_profile__in',
|
|
|
|
)
|
2016-08-09 17:30:52 +02:00
|
|
|
|
2016-08-11 17:57:57 +02:00
|
|
|
# zerver_recipient
|
2016-08-11 19:03:16 +02:00
|
|
|
recipient_config = Config(
|
|
|
|
table='zerver_recipient',
|
|
|
|
model=Recipient,
|
|
|
|
virtual_parent=subscription_config,
|
|
|
|
id_source=('zerver_subscription', 'recipient'),
|
|
|
|
)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2016-08-11 17:57:57 +02:00
|
|
|
# zerver_stream
|
2019-05-10 14:28:38 +02:00
|
|
|
#
|
|
|
|
# TODO: We currently export the existence of private streams, but
|
|
|
|
# not their message history, in the "export with partial member
|
|
|
|
# consent" code path. This consistent with our documented policy,
|
|
|
|
# since that data is available to the organization administrator
|
|
|
|
# who initiated the export, but unnecessary and potentially
|
|
|
|
# confusing; it'd be better to just skip those streams from the
|
|
|
|
# export (which would require more complex export logic for the
|
|
|
|
# subscription/recipient/stream tables to exclude private streams
|
|
|
|
# with no consenting subscribers).
|
2016-08-11 19:03:16 +02:00
|
|
|
Config(
|
|
|
|
table='zerver_stream',
|
|
|
|
model=Stream,
|
|
|
|
virtual_parent=recipient_config,
|
|
|
|
id_source=('zerver_recipient', 'type_id'),
|
|
|
|
source_filter=lambda r: r['type'] == Recipient.STREAM,
|
|
|
|
exclude=['email_token'],
|
|
|
|
)
|
|
|
|
|
|
|
|
return user_profile_config
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2018-06-18 16:24:01 +02:00
|
|
|
def export_messages_single_user(user_profile: UserProfile, output_dir: Path,
|
|
|
|
chunk_size: int=MESSAGE_BATCH_CHUNK_SIZE) -> None:
|
2016-08-14 22:05:36 +02:00
|
|
|
user_message_query = UserMessage.objects.filter(user_profile=user_profile).order_by("id")
|
2016-04-05 00:27:37 +02:00
|
|
|
min_id = -1
|
|
|
|
dump_file_id = 1
|
|
|
|
while True:
|
2017-10-27 02:31:10 +02:00
|
|
|
actual_query = user_message_query.select_related(
|
|
|
|
"message", "message__sending_client").filter(id__gt=min_id)[0:chunk_size]
|
2016-04-05 00:27:37 +02:00
|
|
|
user_message_chunk = [um for um in actual_query]
|
|
|
|
user_message_ids = set(um.id for um in user_message_chunk)
|
|
|
|
|
|
|
|
if len(user_message_chunk) == 0:
|
|
|
|
break
|
|
|
|
|
|
|
|
message_chunk = []
|
|
|
|
for user_message in user_message_chunk:
|
|
|
|
item = model_to_dict(user_message.message)
|
|
|
|
item['flags'] = user_message.flags_list()
|
|
|
|
item['flags_mask'] = user_message.flags.mask
|
|
|
|
# Add a few nice, human-readable details
|
|
|
|
item['sending_client_name'] = user_message.message.sending_client.name
|
|
|
|
item['display_recipient'] = get_display_recipient(user_message.message.recipient)
|
|
|
|
message_chunk.append(item)
|
|
|
|
|
|
|
|
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
|
|
|
|
logging.info("Fetched Messages for %s" % (message_filename,))
|
|
|
|
|
|
|
|
output = {'zerver_message': message_chunk}
|
2016-08-09 16:35:43 +02:00
|
|
|
floatify_datetime_fields(output, 'zerver_message')
|
2018-06-15 00:22:56 +02:00
|
|
|
message_output = dict(output) # type: MessageOutput
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2018-06-15 00:22:56 +02:00
|
|
|
write_message_export(message_filename, message_output)
|
2016-04-05 00:27:37 +02:00
|
|
|
min_id = max(user_message_ids)
|
|
|
|
dump_file_id += 1
|
2019-01-30 08:54:29 +01:00
|
|
|
|
|
|
|
def export_analytics_tables(realm: Realm, output_dir: Path) -> None:
|
|
|
|
response = {} # type: TableData
|
|
|
|
|
|
|
|
export_file = os.path.join(output_dir, "analytics.json")
|
|
|
|
logging.info("Writing analytics table data to %s", (export_file))
|
|
|
|
config = get_analytics_config()
|
|
|
|
export_from_config(
|
|
|
|
response=response,
|
|
|
|
config=config,
|
|
|
|
seed_object=realm,
|
|
|
|
)
|
|
|
|
write_data_to_file(output_file=export_file, data=response)
|
|
|
|
|
|
|
|
def get_analytics_config() -> Config:
|
2019-02-28 21:19:47 +01:00
|
|
|
# The Config function defines what data to export for the
|
|
|
|
# analytics.json file in a full-realm export.
|
2019-01-30 08:54:29 +01:00
|
|
|
|
|
|
|
analytics_config = Config(
|
|
|
|
table='zerver_analytics',
|
|
|
|
is_seeded=True,
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='analytics_realmcount',
|
|
|
|
model=RealmCount,
|
|
|
|
normal_parent=analytics_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='analytics_usercount',
|
|
|
|
model=UserCount,
|
|
|
|
normal_parent=analytics_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
Config(
|
|
|
|
table='analytics_streamcount',
|
|
|
|
model=StreamCount,
|
|
|
|
normal_parent=analytics_config,
|
|
|
|
parent_key='realm_id__in',
|
|
|
|
)
|
|
|
|
|
|
|
|
return analytics_config
|
2019-03-25 22:18:28 +01:00
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
def get_consented_user_ids(consent_message_id: int) -> Set[int]:
|
|
|
|
return set(Reaction.objects.filter(message__id=consent_message_id,
|
|
|
|
reaction_type="unicode_emoji",
|
2019-06-18 00:37:49 +02:00
|
|
|
# outbox = 1f4e4
|
|
|
|
emoji_code="1f4e4").
|
2019-05-10 14:28:38 +02:00
|
|
|
values_list("user_profile", flat=True))
|
2019-03-26 00:36:37 +01:00
|
|
|
|
|
|
|
def export_realm_wrapper(realm: Realm, output_dir: str,
|
2019-05-10 09:10:47 +02:00
|
|
|
threads: int, upload: bool,
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only: bool,
|
|
|
|
delete_after_upload: bool,
|
|
|
|
consent_message_id: Optional[int]=None) -> Optional[str]:
|
2019-06-21 20:30:00 +02:00
|
|
|
tarball_path = do_export_realm(realm=realm, output_dir=output_dir,
|
|
|
|
threads=threads, public_only=public_only,
|
|
|
|
consent_message_id=consent_message_id)
|
|
|
|
print("Finished exporting to %s" % (output_dir,))
|
2019-03-25 22:18:28 +01:00
|
|
|
print("Tarball written to %s" % (tarball_path,))
|
|
|
|
|
2019-05-10 09:10:47 +02:00
|
|
|
if not upload:
|
2019-04-27 01:32:29 +02:00
|
|
|
return None
|
2019-03-25 22:18:28 +01:00
|
|
|
|
2019-05-11 09:57:33 +02:00
|
|
|
# We upload to the `avatars` bucket because that's world-readable
|
|
|
|
# without additional configuration. We'll likely want to change
|
2019-06-21 22:46:04 +02:00
|
|
|
# that in the future.
|
|
|
|
print("Uploading export tarball...")
|
2019-07-10 02:12:34 +02:00
|
|
|
public_url = zerver.lib.upload.upload_backend.upload_export_tarball(realm, tarball_path)
|
2019-05-01 01:37:23 +02:00
|
|
|
print()
|
2019-03-25 22:18:28 +01:00
|
|
|
print("Uploaded to %s" % (public_url,))
|
2019-03-26 00:36:37 +01:00
|
|
|
|
|
|
|
if delete_after_upload:
|
|
|
|
os.remove(tarball_path)
|
|
|
|
print("Successfully deleted the tarball at %s" % (tarball_path,))
|
2019-04-27 01:32:29 +02:00
|
|
|
return public_url
|
2019-06-23 22:57:14 +02:00
|
|
|
|
2019-06-24 02:51:13 +02:00
|
|
|
def get_realm_exports_serialized(user: UserProfile) -> List[Dict[str, Any]]:
|
2019-06-23 22:57:14 +02:00
|
|
|
all_exports = RealmAuditLog.objects.filter(realm=user.realm,
|
|
|
|
event_type=RealmAuditLog.REALM_EXPORTED)
|
|
|
|
exports_dict = {}
|
|
|
|
for export in all_exports:
|
2019-09-24 22:46:53 +02:00
|
|
|
export_data = ujson.loads(export.extra_data)
|
|
|
|
export_url = zerver.lib.upload.upload_backend.get_export_tarball_url(
|
|
|
|
user.realm, export_data['export_path'])
|
2019-06-23 22:57:14 +02:00
|
|
|
exports_dict[export.id] = dict(
|
|
|
|
id=export.id,
|
2019-08-08 21:46:44 +02:00
|
|
|
export_time=export.event_time.timestamp(),
|
2019-06-23 22:57:14 +02:00
|
|
|
acting_user_id=export.acting_user.id,
|
2019-09-24 22:46:53 +02:00
|
|
|
export_url=export_url,
|
|
|
|
deleted_timestamp=export_data.get('deleted_timestamp'),
|
2019-06-23 22:57:14 +02:00
|
|
|
)
|
|
|
|
return sorted(exports_dict.values(), key=lambda export_dict: export_dict['id'])
|