2016-09-13 22:40:13 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2018-05-21 18:09:55 +02:00
|
|
|
import json
|
2016-09-13 22:40:13 +02:00
|
|
|
import os
|
|
|
|
import re
|
2016-10-19 11:37:32 +02:00
|
|
|
import hashlib
|
2017-10-18 04:23:06 +02:00
|
|
|
import sys
|
2018-05-10 19:13:36 +02:00
|
|
|
from typing import Any, List, Optional
|
2016-09-13 22:40:13 +02:00
|
|
|
from importlib import import_module
|
2017-11-06 02:56:09 +01:00
|
|
|
from io import StringIO
|
2016-09-13 22:40:13 +02:00
|
|
|
|
|
|
|
from django.db import connections, DEFAULT_DB_ALIAS
|
2016-11-11 14:11:11 +01:00
|
|
|
from django.db.utils import OperationalError
|
2016-09-13 22:40:13 +02:00
|
|
|
from django.apps import apps
|
2017-10-18 04:23:06 +02:00
|
|
|
from django.conf import settings
|
2016-09-13 22:40:13 +02:00
|
|
|
from django.core.management import call_command
|
|
|
|
from django.utils.module_loading import module_has_submodule
|
|
|
|
|
2017-10-18 04:23:06 +02:00
|
|
|
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
|
|
|
|
from scripts.lib.zulip_tools import get_dev_uuid_var_path
|
|
|
|
|
|
|
|
UUID_VAR_DIR = get_dev_uuid_var_path()
|
2016-10-21 12:48:15 +02:00
|
|
|
FILENAME_SPLITTER = re.compile('[\W\-_]')
|
|
|
|
|
2018-05-10 19:13:36 +02:00
|
|
|
def database_exists(database_name: str, **options: Any) -> bool:
|
2016-09-13 22:40:13 +02:00
|
|
|
db = options.get('database', DEFAULT_DB_ALIAS)
|
2016-11-11 14:11:11 +01:00
|
|
|
try:
|
|
|
|
connection = connections[db]
|
2016-09-13 22:40:13 +02:00
|
|
|
|
2016-11-11 14:11:11 +01:00
|
|
|
with connection.cursor() as cursor:
|
|
|
|
cursor.execute("SELECT 1 from pg_database WHERE datname='{}';".format(database_name))
|
|
|
|
return_value = bool(cursor.fetchone())
|
|
|
|
connections.close_all()
|
|
|
|
return return_value
|
|
|
|
except OperationalError:
|
|
|
|
return False
|
2016-09-13 22:40:13 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def get_migration_status(**options: Any) -> str:
|
2016-09-13 22:40:13 +02:00
|
|
|
verbosity = options.get('verbosity', 1)
|
|
|
|
|
|
|
|
for app_config in apps.get_app_configs():
|
|
|
|
if module_has_submodule(app_config.module, "management"):
|
|
|
|
import_module('.management', app_config.name)
|
|
|
|
|
2018-01-31 06:31:06 +01:00
|
|
|
app_label = options['app_label'] if options.get('app_label') else None
|
2016-09-13 22:40:13 +02:00
|
|
|
db = options.get('database', DEFAULT_DB_ALIAS)
|
|
|
|
out = StringIO()
|
|
|
|
call_command(
|
|
|
|
'showmigrations',
|
|
|
|
'--list',
|
2018-01-31 06:31:06 +01:00
|
|
|
app_label=app_label,
|
2016-09-13 22:40:13 +02:00
|
|
|
database=db,
|
|
|
|
no_color=options.get('no_color', False),
|
|
|
|
settings=options.get('settings', os.environ['DJANGO_SETTINGS_MODULE']),
|
|
|
|
stdout=out,
|
|
|
|
traceback=options.get('traceback', True),
|
|
|
|
verbosity=verbosity,
|
|
|
|
)
|
|
|
|
connections.close_all()
|
|
|
|
out.seek(0)
|
|
|
|
output = out.read()
|
|
|
|
return re.sub('\x1b\[(1|0)m', '', output)
|
|
|
|
|
2018-05-10 19:13:36 +02:00
|
|
|
def are_migrations_the_same(migration_file: str, **options: Any) -> bool:
|
2016-09-13 22:40:13 +02:00
|
|
|
if not os.path.exists(migration_file):
|
|
|
|
return False
|
|
|
|
|
|
|
|
with open(migration_file) as f:
|
|
|
|
migration_content = f.read()
|
|
|
|
return migration_content == get_migration_status(**options)
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def _get_hash_file_path(source_file_path: str, status_dir: str) -> str:
|
2016-10-21 12:48:15 +02:00
|
|
|
basename = os.path.basename(source_file_path)
|
|
|
|
filename = '_'.join(FILENAME_SPLITTER.split(basename)).lower()
|
2017-06-02 21:45:24 +02:00
|
|
|
return os.path.join(status_dir, filename)
|
2016-10-21 12:48:15 +02:00
|
|
|
|
2018-05-21 18:09:55 +02:00
|
|
|
def _check_hash(source_hash_file: str, target_content: str) -> bool:
|
2016-10-19 11:37:32 +02:00
|
|
|
"""
|
|
|
|
This function has a side effect of creating a new hash file or
|
|
|
|
updating the old hash file.
|
|
|
|
"""
|
2018-05-21 18:09:55 +02:00
|
|
|
target_hash_content = hashlib.sha1(target_content.encode('utf8')).hexdigest()
|
2016-10-19 11:37:32 +02:00
|
|
|
|
2017-02-11 05:26:24 +01:00
|
|
|
if not os.path.exists(source_hash_file):
|
|
|
|
source_hash_content = None
|
|
|
|
else:
|
2016-10-21 07:21:39 +02:00
|
|
|
with open(source_hash_file) as f:
|
|
|
|
source_hash_content = f.read().strip()
|
2016-10-19 11:37:32 +02:00
|
|
|
|
2016-10-21 13:37:42 +02:00
|
|
|
with open(source_hash_file, 'w') as f:
|
2016-10-19 11:37:32 +02:00
|
|
|
f.write(target_hash_content)
|
2016-10-21 13:37:42 +02:00
|
|
|
|
|
|
|
return source_hash_content == target_hash_content
|
2016-10-19 11:37:32 +02:00
|
|
|
|
2018-05-21 18:09:55 +02:00
|
|
|
def check_file_hash(target_file_path: str, status_dir: str) -> bool:
|
|
|
|
source_hash_file = _get_hash_file_path(target_file_path, status_dir)
|
|
|
|
|
|
|
|
with open(target_file_path) as f:
|
|
|
|
target_content = f.read()
|
|
|
|
|
|
|
|
return _check_hash(source_hash_file, target_content)
|
|
|
|
|
|
|
|
def check_setting_hash(setting_name: str, status_dir: str) -> bool:
|
|
|
|
hash_filename = '_'.join(['settings', setting_name])
|
|
|
|
source_hash_file = os.path.join(status_dir, hash_filename)
|
|
|
|
|
|
|
|
target_content = json.dumps(getattr(settings, setting_name), sort_keys=True)
|
|
|
|
|
|
|
|
return _check_hash(source_hash_file, target_content)
|
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
def is_template_database_current(
|
2017-12-24 05:02:55 +01:00
|
|
|
database_name: str='zulip_test_template',
|
2018-03-23 23:42:54 +01:00
|
|
|
migration_status: Optional[str]=None,
|
2017-12-24 05:02:55 +01:00
|
|
|
settings: str='zproject.test_settings',
|
2018-03-23 23:42:54 +01:00
|
|
|
status_dir: Optional[str]=None,
|
2018-05-21 18:09:55 +02:00
|
|
|
check_files: Optional[List[str]]=None,
|
|
|
|
check_settings: Optional[List[str]]=None) -> bool:
|
2016-10-21 12:49:14 +02:00
|
|
|
# Using str type for check_files because re.split doesn't accept unicode
|
|
|
|
if check_files is None:
|
|
|
|
check_files = [
|
|
|
|
'zilencer/management/commands/populate_db.py',
|
2018-05-02 17:46:18 +02:00
|
|
|
'zerver/lib/bulk_create.py',
|
2017-05-23 23:57:42 +02:00
|
|
|
'zerver/lib/generate_test_data.py',
|
2016-10-21 12:49:14 +02:00
|
|
|
'tools/setup/postgres-init-test-db',
|
2016-10-21 12:05:18 +02:00
|
|
|
'tools/setup/postgres-init-dev-db',
|
2016-10-21 12:49:14 +02:00
|
|
|
]
|
2018-05-21 18:09:55 +02:00
|
|
|
if check_settings is None:
|
|
|
|
check_settings = [
|
|
|
|
'REALM_INTERNAL_BOTS',
|
|
|
|
]
|
2017-10-18 04:23:06 +02:00
|
|
|
if status_dir is None:
|
|
|
|
status_dir = os.path.join(UUID_VAR_DIR, 'test_db_status')
|
|
|
|
if migration_status is None:
|
|
|
|
migration_status = os.path.join(UUID_VAR_DIR, 'migration_status_test')
|
2016-10-21 12:48:15 +02:00
|
|
|
|
2017-06-02 21:45:24 +02:00
|
|
|
if not os.path.exists(status_dir):
|
|
|
|
os.mkdir(status_dir)
|
2016-10-21 12:48:15 +02:00
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
if database_exists(database_name):
|
2016-10-21 12:49:14 +02:00
|
|
|
# To ensure Python evaluates all the hash tests (and thus creates the
|
2016-10-19 11:37:32 +02:00
|
|
|
# hash files about the current state), we evaluate them in a
|
2016-10-21 12:49:14 +02:00
|
|
|
# list and then process the result
|
2018-05-21 18:09:55 +02:00
|
|
|
files_hash_status = all([check_file_hash(fn, status_dir) for fn in check_files])
|
|
|
|
settings_hash_status = all([check_setting_hash(setting_name, status_dir)
|
|
|
|
for setting_name in check_settings])
|
|
|
|
hash_status = files_hash_status and settings_hash_status
|
|
|
|
|
2016-10-21 12:49:14 +02:00
|
|
|
return are_migrations_the_same(migration_status, settings=settings) and hash_status
|
2016-10-19 11:37:32 +02:00
|
|
|
|
2016-09-13 22:40:13 +02:00
|
|
|
return False
|