2016-04-05 00:27:37 +02:00
|
|
|
import os
|
2020-07-30 22:10:15 +02:00
|
|
|
import sys
|
2016-04-05 00:27:37 +02:00
|
|
|
import tempfile
|
2019-01-09 19:39:29 +01:00
|
|
|
from argparse import ArgumentParser
|
2017-11-16 00:43:27 +01:00
|
|
|
from typing import Any
|
|
|
|
|
2020-06-21 13:18:08 +02:00
|
|
|
from django.conf import settings
|
2017-11-16 00:43:27 +01:00
|
|
|
from django.core.management.base import CommandError
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2019-03-25 22:18:28 +01:00
|
|
|
from zerver.lib.export import export_realm_wrapper
|
2020-01-14 21:59:46 +01:00
|
|
|
from zerver.lib.management import ZulipBaseCommand
|
2020-08-29 00:10:32 +02:00
|
|
|
from zerver.models import Message, Reaction, UserProfile
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2020-01-14 21:59:46 +01:00
|
|
|
|
2017-08-07 17:46:32 +02:00
|
|
|
class Command(ZulipBaseCommand):
|
2016-04-05 00:27:37 +02:00
|
|
|
help = """Exports all data from a Zulip realm
|
|
|
|
|
|
|
|
This command exports all significant data from a Zulip realm. The
|
|
|
|
result can be imported using the `./manage.py import` command.
|
|
|
|
|
|
|
|
Things that are exported:
|
|
|
|
* All user-accessible data in the Zulip database (Messages,
|
|
|
|
Streams, UserMessages, RealmEmoji, etc.)
|
|
|
|
* Copies of all uploaded files and avatar images along with
|
|
|
|
metadata needed to restore them even in the ab
|
|
|
|
|
|
|
|
Things that are not exported:
|
2017-07-07 10:03:18 +02:00
|
|
|
* Confirmation and PreregistrationUser (transient tables)
|
2020-08-11 02:20:10 +02:00
|
|
|
* Sessions (everyone will need to log in again post-export)
|
2016-04-05 00:27:37 +02:00
|
|
|
* Users' passwords and API keys (users will need to use SSO or reset password)
|
|
|
|
* Mobile tokens for APNS/GCM (users will need to reconnect their mobile devices)
|
2017-07-02 21:10:41 +02:00
|
|
|
* ScheduledEmail (Not relevant on a new server)
|
2017-10-12 08:26:54 +02:00
|
|
|
* RemoteZulipServer (Unlikely to be migrated)
|
2016-04-05 00:27:37 +02:00
|
|
|
* third_party_api_results cache (this means rerending all old
|
|
|
|
messages could be expensive)
|
|
|
|
|
|
|
|
Things that will break as a result of the export:
|
|
|
|
* Passwords will not be transferred. They will all need to go
|
|
|
|
through the password reset flow to obtain a new password (unless
|
|
|
|
they intend to only use e.g. Google Auth).
|
2020-08-11 02:20:10 +02:00
|
|
|
* Users will need to log out and re-log in to the Zulip desktop and
|
2016-04-05 00:27:37 +02:00
|
|
|
mobile apps. The apps now all have an option on the login page
|
|
|
|
where you can specify which Zulip server to use; your users
|
|
|
|
should enter <domain name>.
|
|
|
|
* All bots will stop working since they will be pointing to the
|
|
|
|
wrong server URL, and all users' API keys have been rotated as
|
|
|
|
part of the migration. So to re-enable your integrations, you
|
|
|
|
will need to direct your integrations at the new server.
|
|
|
|
Usually this means updating the URL and the bots' API keys. You
|
|
|
|
can see a list of all the bots that have been configured for
|
2017-04-07 21:39:58 +02:00
|
|
|
your realm on the `/#organization` page, and use that list to
|
2016-04-05 00:27:37 +02:00
|
|
|
make sure you migrate them all.
|
|
|
|
|
|
|
|
The proper procedure for using this to export a realm is as follows:
|
|
|
|
|
|
|
|
* Use `./manage.py deactivate_realm` to deactivate the realm, so
|
|
|
|
nothing happens in the realm being exported during the export
|
|
|
|
process.
|
|
|
|
|
|
|
|
* Use `./manage.py export` to export the realm, producing a data
|
|
|
|
tarball.
|
|
|
|
|
|
|
|
* Transfer the tarball to the new server and unpack it.
|
|
|
|
|
|
|
|
* Use `./manage.py import` to import the realm
|
|
|
|
|
|
|
|
* Use `./manage.py reactivate_realm` to reactivate the realm, so
|
2020-08-11 02:20:10 +02:00
|
|
|
users can log in again.
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
* Inform the users about the things broken above.
|
|
|
|
|
|
|
|
We recommend testing by exporting without having deactivated the
|
|
|
|
realm first, to make sure you have the procedure right and
|
|
|
|
minimize downtime.
|
|
|
|
|
|
|
|
Performance: In one test, the tool exported a realm with hundreds
|
|
|
|
of users and ~1M messages of history with --threads=1 in about 3
|
|
|
|
hours of serial runtime (goes down to ~50m with --threads=6 on a
|
|
|
|
machine with 8 CPUs). Importing that same data set took about 30
|
|
|
|
minutes. But this will vary a lot depending on the average number
|
|
|
|
of recipients of messages in the realm, hardware, etc."""
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def add_arguments(self, parser: ArgumentParser) -> None:
|
2016-04-05 00:27:37 +02:00
|
|
|
parser.add_argument('--output',
|
|
|
|
dest='output_dir',
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
help='Directory to write exported data to.')
|
|
|
|
parser.add_argument('--threads',
|
|
|
|
dest='threads',
|
|
|
|
action="store",
|
2020-06-21 13:18:08 +02:00
|
|
|
default=settings.DEFAULT_DATA_EXPORT_IMPORT_PARALLELISM,
|
2016-04-05 00:27:37 +02:00
|
|
|
help='Threads to use in exporting UserMessage objects in parallel')
|
2019-01-08 01:51:11 +01:00
|
|
|
parser.add_argument('--public-only',
|
|
|
|
action="store_true",
|
|
|
|
help='Export only public stream messages and associated attachments')
|
2019-05-10 14:28:38 +02:00
|
|
|
parser.add_argument('--consent-message-id',
|
|
|
|
dest="consent_message_id",
|
|
|
|
action="store",
|
|
|
|
default=None,
|
|
|
|
type=int,
|
|
|
|
help='ID of the message advertising users to react with thumbs up')
|
2019-05-10 09:10:47 +02:00
|
|
|
parser.add_argument('--upload',
|
2019-01-08 00:15:56 +01:00
|
|
|
action="store_true",
|
2019-05-10 09:10:47 +02:00
|
|
|
help="Whether to upload resulting tarball to s3 or LOCAL_UPLOADS_DIR")
|
2019-03-26 00:36:37 +01:00
|
|
|
parser.add_argument('--delete-after-upload',
|
|
|
|
action="store_true",
|
|
|
|
help='Automatically delete the local tarball after a successful export')
|
2017-08-07 17:46:32 +02:00
|
|
|
self.add_realm_args(parser, True)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def handle(self, *args: Any, **options: Any) -> None:
|
2017-08-07 17:46:32 +02:00
|
|
|
realm = self.get_realm(options)
|
2017-09-26 01:25:39 +02:00
|
|
|
assert realm is not None # Should be ensured by parser
|
|
|
|
|
2016-04-05 00:27:37 +02:00
|
|
|
output_dir = options["output_dir"]
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only = options["public_only"]
|
|
|
|
consent_message_id = options["consent_message_id"]
|
|
|
|
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f"\033[94mExporting realm\033[0m: {realm.string_id}")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2016-08-10 02:32:02 +02:00
|
|
|
num_threads = int(options['threads'])
|
|
|
|
if num_threads < 1:
|
|
|
|
raise CommandError('You must have at least one thread.')
|
|
|
|
|
2019-05-10 14:28:38 +02:00
|
|
|
if public_only and consent_message_id is not None:
|
|
|
|
raise CommandError('Please pass either --public-only or --consent-message-id')
|
|
|
|
|
|
|
|
if consent_message_id is not None:
|
|
|
|
try:
|
|
|
|
message = Message.objects.get(id=consent_message_id)
|
|
|
|
except Message.DoesNotExist:
|
|
|
|
raise CommandError("Message with given ID does not exist. Aborting...")
|
|
|
|
|
|
|
|
if message.last_edit_time is not None:
|
|
|
|
raise CommandError("Message was edited. Aborting...")
|
|
|
|
|
|
|
|
# Since the message might have been sent by
|
|
|
|
# Notification Bot, we can't trivially check the realm of
|
|
|
|
# the message through message.sender.realm. So instead we
|
|
|
|
# check the realm of the people who reacted to the message
|
|
|
|
# (who must all be in the message's realm).
|
2019-06-18 00:37:49 +02:00
|
|
|
reactions = Reaction.objects.filter(message=message,
|
|
|
|
# outbox = 1f4e4
|
|
|
|
emoji_code="1f4e4",
|
2019-05-10 14:28:38 +02:00
|
|
|
reaction_type="unicode_emoji")
|
|
|
|
for reaction in reactions:
|
|
|
|
if reaction.user_profile.realm != realm:
|
|
|
|
raise CommandError("Users from a different realm reacted to message. Aborting...")
|
|
|
|
|
2020-06-09 00:25:09 +02:00
|
|
|
print(f"\n\033[94mMessage content:\033[0m\n{message.content}\n")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2020-08-29 00:10:32 +02:00
|
|
|
user_count = UserProfile.objects.filter(realm_id=realm.id).count()
|
|
|
|
print(f"\033[94mNumber of users that reacted outbox:\033[0m {len(reactions)} / {user_count} total users\n")
|
2019-05-10 14:28:38 +02:00
|
|
|
|
2020-08-29 00:11:10 +02:00
|
|
|
proceed = input("Continue? [y/N] ")
|
|
|
|
if proceed.lower() not in ('y', 'yes'):
|
|
|
|
raise CommandError("Aborting!")
|
|
|
|
|
2020-08-29 00:08:48 +02:00
|
|
|
if output_dir is None:
|
|
|
|
output_dir = tempfile.mkdtemp(prefix="zulip-export-")
|
|
|
|
else:
|
|
|
|
output_dir = os.path.realpath(os.path.expanduser(output_dir))
|
|
|
|
if os.path.exists(output_dir):
|
|
|
|
if os.listdir(output_dir):
|
|
|
|
raise CommandError(
|
|
|
|
f"Refusing to overwrite nonempty directory: {output_dir}. Aborting...",
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
os.makedirs(output_dir)
|
|
|
|
|
|
|
|
tarball_path = output_dir.rstrip("/") + ".tar.gz"
|
|
|
|
try:
|
|
|
|
os.close(os.open(tarball_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY, 0o666))
|
|
|
|
except FileExistsError:
|
|
|
|
raise CommandError(f"Refusing to overwrite existing tarball: {tarball_path}. Aborting...")
|
|
|
|
|
2020-07-30 22:10:15 +02:00
|
|
|
def percent_callback(bytes_transferred: Any) -> None:
|
|
|
|
sys.stdout.write('.')
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2019-03-25 22:18:28 +01:00
|
|
|
# Allows us to trigger exports separately from command line argument parsing
|
|
|
|
export_realm_wrapper(realm=realm, output_dir=output_dir,
|
2019-05-10 09:10:47 +02:00
|
|
|
threads=num_threads, upload=options['upload'],
|
2019-05-10 14:28:38 +02:00
|
|
|
public_only=public_only,
|
|
|
|
delete_after_upload=options["delete_after_upload"],
|
2020-07-30 22:10:15 +02:00
|
|
|
percent_callback=percent_callback,
|
2019-05-10 14:28:38 +02:00
|
|
|
consent_message_id=consent_message_id)
|