2017-11-16 00:43:27 +01:00
|
|
|
import argparse
|
|
|
|
import os
|
|
|
|
import subprocess
|
|
|
|
from typing import Any
|
|
|
|
|
|
|
|
from django.conf import settings
|
2020-12-19 18:44:53 +01:00
|
|
|
from django.core.exceptions import ValidationError
|
2016-04-05 00:27:37 +02:00
|
|
|
from django.core.management import call_command
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.core.management.base import BaseCommand, CommandError, CommandParser
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2018-04-06 02:21:16 +02:00
|
|
|
from zerver.forms import check_subdomain_available
|
2022-08-11 20:04:10 +02:00
|
|
|
from zerver.lib.import_realm import do_import_realm
|
2020-01-14 21:59:46 +01:00
|
|
|
|
2016-09-12 00:49:00 +02:00
|
|
|
|
2016-04-05 00:27:37 +02:00
|
|
|
class Command(BaseCommand):
|
2018-05-28 15:29:21 +02:00
|
|
|
help = """Import extracted Zulip database dump directories into a fresh Zulip instance.
|
2016-04-05 00:27:37 +02:00
|
|
|
|
|
|
|
This command should be used only on a newly created, empty Zulip instance to
|
2017-10-06 17:45:22 +02:00
|
|
|
import a database dump from one or more JSON files."""
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def add_arguments(self, parser: CommandParser) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--destroy-rebuild-database",
|
2021-02-12 08:19:30 +01:00
|
|
|
action="store_true",
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Destroys and rebuilds the databases prior to import.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--import-into-nonempty",
|
2021-02-12 08:19:30 +01:00
|
|
|
action="store_true",
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Import into an existing nonempty database.",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--allow-reserved-subdomain",
|
2021-02-12 08:19:30 +01:00
|
|
|
action="store_true",
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Allow use of reserved subdomains",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
parser.add_argument("subdomain", metavar="<subdomain>", help="Subdomain")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"export_paths",
|
|
|
|
nargs="+",
|
|
|
|
metavar="<export path>",
|
2021-02-12 08:19:30 +01:00
|
|
|
help="list of export directories to import",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
2021-02-12 08:20:45 +01:00
|
|
|
"--processes",
|
2021-02-12 08:19:30 +01:00
|
|
|
default=settings.DEFAULT_DATA_EXPORT_IMPORT_PARALLELISM,
|
2021-02-12 08:20:45 +01:00
|
|
|
help="Number of processes to use for uploading Avatars to S3 in parallel",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2017-10-06 17:41:01 +02:00
|
|
|
parser.formatter_class = argparse.RawTextHelpFormatter
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def do_destroy_and_rebuild_database(self, db_name: str) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
call_command("flush", verbosity=0, interactive=False)
|
2016-04-05 00:27:37 +02:00
|
|
|
subprocess.check_call([os.path.join(settings.DEPLOY_ROOT, "scripts/setup/flush-memcached")])
|
|
|
|
|
2017-10-26 11:35:57 +02:00
|
|
|
def handle(self, *args: Any, **options: Any) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
num_processes = int(options["processes"])
|
2019-01-25 20:40:49 +01:00
|
|
|
if num_processes < 1:
|
2021-02-12 08:20:45 +01:00
|
|
|
raise CommandError("You must have at least one process.")
|
2019-01-25 20:40:49 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
subdomain = options["subdomain"]
|
2018-04-06 02:21:16 +02:00
|
|
|
|
2016-04-05 00:27:37 +02:00
|
|
|
if options["destroy_rebuild_database"]:
|
|
|
|
print("Rebuilding the database!")
|
2021-02-12 08:20:45 +01:00
|
|
|
db_name = settings.DATABASES["default"]["NAME"]
|
2016-04-05 00:27:37 +02:00
|
|
|
self.do_destroy_and_rebuild_database(db_name)
|
2018-05-28 15:29:21 +02:00
|
|
|
elif options["import_into_nonempty"]:
|
|
|
|
print("NOTE: The argument 'import_into_nonempty' is now the default behavior.")
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2020-12-19 18:44:53 +01:00
|
|
|
allow_reserved_subdomain = False
|
|
|
|
|
|
|
|
if options["allow_reserved_subdomain"]:
|
|
|
|
allow_reserved_subdomain = True
|
|
|
|
|
|
|
|
try:
|
|
|
|
check_subdomain_available(subdomain, allow_reserved_subdomain)
|
2021-06-09 21:55:50 +02:00
|
|
|
except ValidationError as e:
|
|
|
|
raise CommandError(
|
|
|
|
e.messages[0]
|
|
|
|
+ "\nPass --allow-reserved-subdomain to override subdomain restrictions."
|
|
|
|
)
|
2018-04-20 19:38:33 +02:00
|
|
|
|
2018-08-13 22:30:43 +02:00
|
|
|
paths = []
|
2021-02-12 08:20:45 +01:00
|
|
|
for path in options["export_paths"]:
|
2018-08-13 22:30:43 +02:00
|
|
|
path = os.path.realpath(os.path.expanduser(path))
|
2016-04-05 00:27:37 +02:00
|
|
|
if not os.path.exists(path):
|
2020-06-10 06:41:04 +02:00
|
|
|
raise CommandError(f"Directory not found: '{path}'")
|
2018-05-28 15:29:21 +02:00
|
|
|
if not os.path.isdir(path):
|
2021-02-12 08:19:30 +01:00
|
|
|
raise CommandError(
|
|
|
|
"Export file should be folder; if it's a tarball, please unpack it first."
|
|
|
|
)
|
2018-08-13 22:30:43 +02:00
|
|
|
paths.append(path)
|
2016-04-05 00:27:37 +02:00
|
|
|
|
2018-08-13 22:30:43 +02:00
|
|
|
for path in paths:
|
2020-06-10 06:41:04 +02:00
|
|
|
print(f"Processing dump: {path} ...")
|
2022-08-11 20:04:10 +02:00
|
|
|
do_import_realm(path, subdomain, num_processes)
|