mirror of https://github.com/zulip/zulip.git
python: Replace list literal concatenation with * unpacking.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
a5dbab8fb0
commit
1ded51aa9d
|
@ -302,7 +302,7 @@ class TestGetChartData(ZulipTestCase):
|
||||||
data = result.json()
|
data = result.json()
|
||||||
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
|
end_times = [ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(-1, 4)]
|
||||||
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
|
self.assertEqual(data['end_times'], [datetime_to_timestamp(dt) for dt in end_times])
|
||||||
self.assertEqual(data['everyone'], {'_1day': [0]+self.data(100), '_15day': [0]+self.data(100), 'all_time': [0]+self.data(100)})
|
self.assertEqual(data['everyone'], {'_1day': [0, *self.data(100)], '_15day': [0, *self.data(100)], 'all_time': [0, *self.data(100)]})
|
||||||
|
|
||||||
def test_non_existent_chart(self) -> None:
|
def test_non_existent_chart(self) -> None:
|
||||||
result = self.client_get('/json/analytics/chart_data',
|
result = self.client_get('/json/analytics/chart_data',
|
||||||
|
|
|
@ -20,9 +20,9 @@ vendor = distro_info['ID']
|
||||||
os_version = distro_info['VERSION_ID']
|
os_version = distro_info['VERSION_ID']
|
||||||
VENV_DEPENDENCIES = get_venv_dependencies(vendor, os_version)
|
VENV_DEPENDENCIES = get_venv_dependencies(vendor, os_version)
|
||||||
if "debian" in os_families():
|
if "debian" in os_families():
|
||||||
run(["apt-get", "-y", "install"] + VENV_DEPENDENCIES)
|
run(["apt-get", "-y", "install", *VENV_DEPENDENCIES])
|
||||||
elif "fedora" in os_families():
|
elif "fedora" in os_families():
|
||||||
run(["yum", "-y", "install"] + VENV_DEPENDENCIES)
|
run(["yum", "-y", "install", *VENV_DEPENDENCIES])
|
||||||
else:
|
else:
|
||||||
print("Unsupported platform: {}".format(distro_info['ID']))
|
print("Unsupported platform: {}".format(distro_info['ID']))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -21,9 +21,9 @@ args = parser.parse_args()
|
||||||
# install dependencies for setting up the virtualenv
|
# install dependencies for setting up the virtualenv
|
||||||
distro_info = parse_os_release()
|
distro_info = parse_os_release()
|
||||||
if "debian" in os_families():
|
if "debian" in os_families():
|
||||||
run(["apt-get", "-y", "install"] + THUMBOR_VENV_DEPENDENCIES)
|
run(["apt-get", "-y", "install", *THUMBOR_VENV_DEPENDENCIES])
|
||||||
elif "fedora" in os_families():
|
elif "fedora" in os_families():
|
||||||
run(["yum", "-y", "install"] + YUM_THUMBOR_VENV_DEPENDENCIES)
|
run(["yum", "-y", "install", *YUM_THUMBOR_VENV_DEPENDENCIES])
|
||||||
else:
|
else:
|
||||||
print("Unsupported platform: {}".format(distro_info['ID']))
|
print("Unsupported platform: {}".format(distro_info['ID']))
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -83,7 +83,7 @@ def do_yarn_install(
|
||||||
shutil.copytree("node_modules/", cached_node_modules, symlinks=True)
|
shutil.copytree("node_modules/", cached_node_modules, symlinks=True)
|
||||||
if os.environ.get('CUSTOM_CA_CERTIFICATES'):
|
if os.environ.get('CUSTOM_CA_CERTIFICATES'):
|
||||||
run([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']])
|
run([YARN_BIN, "config", "set", "cafile", os.environ['CUSTOM_CA_CERTIFICATES']])
|
||||||
run([YARN_BIN, "install", "--non-interactive", "--frozen-lockfile"] + yarn_args,
|
run([YARN_BIN, "install", "--non-interactive", "--frozen-lockfile", *yarn_args],
|
||||||
cwd=target_path)
|
cwd=target_path)
|
||||||
with open(success_stamp, 'w'):
|
with open(success_stamp, 'w'):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -59,12 +59,14 @@ COMMON_YUM_VENV_DEPENDENCIES = [
|
||||||
"jq",
|
"jq",
|
||||||
]
|
]
|
||||||
|
|
||||||
REDHAT_VENV_DEPENDENCIES = COMMON_YUM_VENV_DEPENDENCIES + [
|
REDHAT_VENV_DEPENDENCIES = [
|
||||||
|
*COMMON_YUM_VENV_DEPENDENCIES,
|
||||||
"python36-devel",
|
"python36-devel",
|
||||||
"python-virtualenv",
|
"python-virtualenv",
|
||||||
]
|
]
|
||||||
|
|
||||||
FEDORA_VENV_DEPENDENCIES = COMMON_YUM_VENV_DEPENDENCIES + [
|
FEDORA_VENV_DEPENDENCIES = [
|
||||||
|
*COMMON_YUM_VENV_DEPENDENCIES,
|
||||||
"python3-pip",
|
"python3-pip",
|
||||||
"virtualenv", # see https://unix.stackexchange.com/questions/27877/install-virtualenv-on-fedora-16
|
"virtualenv", # see https://unix.stackexchange.com/questions/27877/install-virtualenv-on-fedora-16
|
||||||
]
|
]
|
||||||
|
|
|
@ -65,7 +65,7 @@ try:
|
||||||
# version is much better for fixing bugs in the upgrade process).
|
# version is much better for fixing bugs in the upgrade process).
|
||||||
deploy_path = deploy_path.strip()
|
deploy_path = deploy_path.strip()
|
||||||
os.chdir(deploy_path)
|
os.chdir(deploy_path)
|
||||||
subprocess.check_call([os.path.abspath("./scripts/lib/upgrade-zulip-stage-2"), deploy_path]
|
subprocess.check_call([os.path.abspath("./scripts/lib/upgrade-zulip-stage-2"), deploy_path,
|
||||||
+ deploy_options)
|
*deploy_options])
|
||||||
finally:
|
finally:
|
||||||
release_deployment_lock()
|
release_deployment_lock()
|
||||||
|
|
|
@ -78,6 +78,6 @@ try:
|
||||||
overwrite_symlink(deploy_path, os.path.join(DEPLOYMENTS_DIR, "next"))
|
overwrite_symlink(deploy_path, os.path.join(DEPLOYMENTS_DIR, "next"))
|
||||||
|
|
||||||
subprocess.check_call([os.path.join(deploy_path, "scripts", "lib", "upgrade-zulip-stage-2"),
|
subprocess.check_call([os.path.join(deploy_path, "scripts", "lib", "upgrade-zulip-stage-2"),
|
||||||
deploy_path, "--from-git"] + deploy_options)
|
deploy_path, "--from-git", *deploy_options])
|
||||||
finally:
|
finally:
|
||||||
release_deployment_lock()
|
release_deployment_lock()
|
||||||
|
|
|
@ -104,7 +104,7 @@ def shutdown_server() -> None:
|
||||||
core_server_services.append("zulip-thumbor")
|
core_server_services.append("zulip-thumbor")
|
||||||
|
|
||||||
logging.info("Stopping Zulip...")
|
logging.info("Stopping Zulip...")
|
||||||
subprocess.check_call(["supervisorctl", "stop"] + core_server_services + worker_services,
|
subprocess.check_call(["supervisorctl", "stop", *core_server_services, *worker_services],
|
||||||
preexec_fn=su_to_zulip)
|
preexec_fn=su_to_zulip)
|
||||||
IS_SERVER_UP = False
|
IS_SERVER_UP = False
|
||||||
|
|
||||||
|
|
|
@ -460,7 +460,7 @@ def is_root() -> bool:
|
||||||
def run_as_root(args: List[str], **kwargs: Any) -> None:
|
def run_as_root(args: List[str], **kwargs: Any) -> None:
|
||||||
sudo_args = kwargs.pop('sudo_args', [])
|
sudo_args = kwargs.pop('sudo_args', [])
|
||||||
if not is_root():
|
if not is_root():
|
||||||
args = ['sudo'] + sudo_args + ['--'] + args
|
args = ['sudo', *sudo_args, '--', *args]
|
||||||
run(args, **kwargs)
|
run(args, **kwargs)
|
||||||
|
|
||||||
def assert_not_running_as_root() -> None:
|
def assert_not_running_as_root() -> None:
|
||||||
|
|
|
@ -62,7 +62,7 @@ def main() -> None:
|
||||||
print("Cleaning orphaned/unused caches...")
|
print("Cleaning orphaned/unused caches...")
|
||||||
|
|
||||||
# Call 'clean-unused-caches' script to clean any orphaned/unused caches.
|
# Call 'clean-unused-caches' script to clean any orphaned/unused caches.
|
||||||
subprocess.check_call([os.path.join(ZULIP_PATH, "scripts/lib/clean-unused-caches")] + sys.argv[1:])
|
subprocess.check_call([os.path.join(ZULIP_PATH, "scripts/lib/clean-unused-caches"), *sys.argv[1:]])
|
||||||
print("Done!")
|
print("Done!")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -75,10 +75,10 @@ else:
|
||||||
logging.info("Stopping workers")
|
logging.info("Stopping workers")
|
||||||
subprocess.check_call(["supervisorctl", "stop", "zulip-workers:*"])
|
subprocess.check_call(["supervisorctl", "stop", "zulip-workers:*"])
|
||||||
logging.info("Stopping server core")
|
logging.info("Stopping server core")
|
||||||
subprocess.check_call(["supervisorctl", "stop"] + core_server_services)
|
subprocess.check_call(["supervisorctl", "stop", *core_server_services])
|
||||||
|
|
||||||
logging.info("Starting server core")
|
logging.info("Starting server core")
|
||||||
subprocess.check_call(["supervisorctl", "start"] + core_server_services)
|
subprocess.check_call(["supervisorctl", "start", *core_server_services])
|
||||||
logging.info("Starting workers")
|
logging.info("Starting workers")
|
||||||
subprocess.check_call(["supervisorctl", "start", "zulip-workers:*"])
|
subprocess.check_call(["supervisorctl", "start", "zulip-workers:*"])
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
|
||||||
|
|
||||||
os.mkdir(os.path.join(tmp, "zulip-backup"))
|
os.mkdir(os.path.join(tmp, "zulip-backup"))
|
||||||
tarball_file.seek(0, 0)
|
tarball_file.seek(0, 0)
|
||||||
run(["tar", "-C", tmp] + transform_args + ["-xPz"], stdin=tarball_file)
|
run(["tar", "-C", tmp, *transform_args, "-xPz"], stdin=tarball_file)
|
||||||
|
|
||||||
# Now, extract the the database backup, destroy the old
|
# Now, extract the the database backup, destroy the old
|
||||||
# database, and create a new, empty database.
|
# database, and create a new, empty database.
|
||||||
|
|
|
@ -50,7 +50,7 @@ if (distro_info['ID'], distro_info['VERSION_ID']) in [('ubuntu', '20.04')]:
|
||||||
puppet_env["RUBYOPT"] = "-W0"
|
puppet_env["RUBYOPT"] = "-W0"
|
||||||
|
|
||||||
if not args.noop and not args.force:
|
if not args.noop and not args.force:
|
||||||
subprocess.check_call(puppet_cmd + ['--noop', '--show_diff'], env=puppet_env)
|
subprocess.check_call([*puppet_cmd, '--noop', '--show_diff'], env=puppet_env)
|
||||||
|
|
||||||
do_apply = None
|
do_apply = None
|
||||||
while do_apply != 'y':
|
while do_apply != 'y':
|
||||||
|
@ -60,7 +60,7 @@ if not args.noop and not args.force:
|
||||||
if do_apply == '' or do_apply == 'n':
|
if do_apply == '' or do_apply == 'n':
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
ret = subprocess.call(puppet_cmd + ['--detailed-exitcodes'], env=puppet_env)
|
ret = subprocess.call([*puppet_cmd, '--detailed-exitcodes'], env=puppet_env)
|
||||||
# ret = 0 => no changes, no errors
|
# ret = 0 => no changes, no errors
|
||||||
# ret = 2 => changes, no errors
|
# ret = 2 => changes, no errors
|
||||||
# ret = 4 => no changes, yes errors
|
# ret = 4 => no changes, yes errors
|
||||||
|
|
|
@ -66,10 +66,11 @@ class TagInfo:
|
||||||
self.classes = classes
|
self.classes = classes
|
||||||
self.ids = ids
|
self.ids = ids
|
||||||
self.token = token
|
self.token = token
|
||||||
self.words = \
|
self.words = [
|
||||||
[self.tag] + \
|
self.tag,
|
||||||
['.' + s for s in classes] + \
|
*('.' + s for s in classes),
|
||||||
['#' + s for s in ids]
|
*('#' + s for s in ids),
|
||||||
|
]
|
||||||
|
|
||||||
def text(self) -> str:
|
def text(self) -> str:
|
||||||
s = self.tag
|
s = self.tag
|
||||||
|
|
|
@ -145,64 +145,68 @@ COMMON_DEPENDENCIES = [
|
||||||
# Puppeteer dependencies end here.
|
# Puppeteer dependencies end here.
|
||||||
]
|
]
|
||||||
|
|
||||||
UBUNTU_COMMON_APT_DEPENDENCIES = COMMON_DEPENDENCIES + [
|
UBUNTU_COMMON_APT_DEPENDENCIES = [
|
||||||
|
*COMMON_DEPENDENCIES,
|
||||||
"redis-server",
|
"redis-server",
|
||||||
"hunspell-en-us",
|
"hunspell-en-us",
|
||||||
"puppet-lint",
|
"puppet-lint",
|
||||||
"netcat", # Used for flushing memcached
|
"netcat", # Used for flushing memcached
|
||||||
"default-jre-headless", # Required by vnu-jar
|
"default-jre-headless", # Required by vnu-jar
|
||||||
] + THUMBOR_VENV_DEPENDENCIES
|
*THUMBOR_VENV_DEPENDENCIES,
|
||||||
|
]
|
||||||
|
|
||||||
COMMON_YUM_DEPENDENCIES = COMMON_DEPENDENCIES + [
|
COMMON_YUM_DEPENDENCIES = [
|
||||||
|
*COMMON_DEPENDENCIES,
|
||||||
"redis",
|
"redis",
|
||||||
"hunspell-en-US",
|
"hunspell-en-US",
|
||||||
"rubygem-puppet-lint",
|
"rubygem-puppet-lint",
|
||||||
"nmap-ncat",
|
"nmap-ncat",
|
||||||
] + YUM_THUMBOR_VENV_DEPENDENCIES
|
*YUM_THUMBOR_VENV_DEPENDENCIES,
|
||||||
|
]
|
||||||
|
|
||||||
BUILD_PGROONGA_FROM_SOURCE = False
|
BUILD_PGROONGA_FROM_SOURCE = False
|
||||||
if vendor == 'debian' and os_version in [] or vendor == 'ubuntu' and os_version in []:
|
if vendor == 'debian' and os_version in [] or vendor == 'ubuntu' and os_version in []:
|
||||||
# For platforms without a pgroonga release, we need to build it
|
# For platforms without a pgroonga release, we need to build it
|
||||||
# from source.
|
# from source.
|
||||||
BUILD_PGROONGA_FROM_SOURCE = True
|
BUILD_PGROONGA_FROM_SOURCE = True
|
||||||
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
|
SYSTEM_DEPENDENCIES = [
|
||||||
pkg.format(POSTGRES_VERSION) for pkg in [
|
*UBUNTU_COMMON_APT_DEPENDENCIES,
|
||||||
"postgresql-{0}",
|
"postgresql-{0}".format(POSTGRES_VERSION),
|
||||||
# Dependency for building pgroonga from source
|
# Dependency for building pgroonga from source
|
||||||
"postgresql-server-dev-{0}",
|
"postgresql-server-dev-{0}".format(POSTGRES_VERSION),
|
||||||
"libgroonga-dev",
|
"libgroonga-dev",
|
||||||
"libmsgpack-dev",
|
"libmsgpack-dev",
|
||||||
"clang-9",
|
"clang-9",
|
||||||
"llvm-9-dev",
|
"llvm-9-dev",
|
||||||
]
|
*VENV_DEPENDENCIES,
|
||||||
] + VENV_DEPENDENCIES
|
]
|
||||||
elif "debian" in os_families():
|
elif "debian" in os_families():
|
||||||
SYSTEM_DEPENDENCIES = UBUNTU_COMMON_APT_DEPENDENCIES + [
|
SYSTEM_DEPENDENCIES = [
|
||||||
pkg.format(POSTGRES_VERSION) for pkg in [
|
*UBUNTU_COMMON_APT_DEPENDENCIES,
|
||||||
"postgresql-{0}",
|
"postgresql-{0}".format(POSTGRES_VERSION),
|
||||||
"postgresql-{0}-pgroonga",
|
"postgresql-{0}-pgroonga".format(POSTGRES_VERSION),
|
||||||
]
|
*VENV_DEPENDENCIES,
|
||||||
] + VENV_DEPENDENCIES
|
]
|
||||||
elif "rhel" in os_families():
|
elif "rhel" in os_families():
|
||||||
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
|
SYSTEM_DEPENDENCIES = [
|
||||||
pkg.format(POSTGRES_VERSION) for pkg in [
|
*COMMON_YUM_DEPENDENCIES,
|
||||||
"postgresql{0}-server",
|
"postgresql{0}-server".format(POSTGRES_VERSION),
|
||||||
"postgresql{0}",
|
"postgresql{0}".format(POSTGRES_VERSION),
|
||||||
"postgresql{0}-devel",
|
"postgresql{0}-devel".format(POSTGRES_VERSION),
|
||||||
"postgresql{0}-pgroonga",
|
"postgresql{0}-pgroonga".format(POSTGRES_VERSION),
|
||||||
]
|
*VENV_DEPENDENCIES,
|
||||||
] + VENV_DEPENDENCIES
|
]
|
||||||
elif "fedora" in os_families():
|
elif "fedora" in os_families():
|
||||||
SYSTEM_DEPENDENCIES = COMMON_YUM_DEPENDENCIES + [
|
SYSTEM_DEPENDENCIES = [
|
||||||
pkg.format(POSTGRES_VERSION) for pkg in [
|
*COMMON_YUM_DEPENDENCIES,
|
||||||
"postgresql{0}-server",
|
"postgresql{0}-server".format(POSTGRES_VERSION),
|
||||||
"postgresql{0}",
|
"postgresql{0}".format(POSTGRES_VERSION),
|
||||||
"postgresql{0}-devel",
|
"postgresql{0}-devel".format(POSTGRES_VERSION),
|
||||||
# Needed to build pgroonga from source
|
# Needed to build pgroonga from source
|
||||||
"groonga-devel",
|
"groonga-devel",
|
||||||
"msgpack-devel",
|
"msgpack-devel",
|
||||||
]
|
*VENV_DEPENDENCIES,
|
||||||
] + VENV_DEPENDENCIES
|
]
|
||||||
BUILD_PGROONGA_FROM_SOURCE = True
|
BUILD_PGROONGA_FROM_SOURCE = True
|
||||||
|
|
||||||
if "fedora" in os_families():
|
if "fedora" in os_families():
|
||||||
|
@ -247,9 +251,8 @@ def install_apt_deps(deps_to_install: List[str]) -> None:
|
||||||
run_as_root(
|
run_as_root(
|
||||||
[
|
[
|
||||||
"env", "DEBIAN_FRONTEND=noninteractive",
|
"env", "DEBIAN_FRONTEND=noninteractive",
|
||||||
"apt-get", "-y", "install", "--no-install-recommends",
|
"apt-get", "-y", "install", "--no-install-recommends", *deps_to_install,
|
||||||
]
|
]
|
||||||
+ deps_to_install,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def install_yum_deps(deps_to_install: List[str]) -> None:
|
def install_yum_deps(deps_to_install: List[str]) -> None:
|
||||||
|
@ -274,7 +277,7 @@ def install_yum_deps(deps_to_install: List[str]) -> None:
|
||||||
else:
|
else:
|
||||||
print("Unrecognized output. `subscription-manager` might not be available")
|
print("Unrecognized output. `subscription-manager` might not be available")
|
||||||
|
|
||||||
run_as_root(["yum", "install", "-y"] + yum_extra_flags + deps_to_install)
|
run_as_root(["yum", "install", "-y", *yum_extra_flags, *deps_to_install])
|
||||||
if "rhel" in os_families():
|
if "rhel" in os_families():
|
||||||
# This is how a pip3 is installed to /usr/bin in CentOS/RHEL
|
# This is how a pip3 is installed to /usr/bin in CentOS/RHEL
|
||||||
# for python35 and later.
|
# for python35 and later.
|
||||||
|
@ -363,7 +366,7 @@ def main(options: argparse.Namespace) -> "NoReturn":
|
||||||
"https_proxy=" + os.environ.get("https_proxy", ""),
|
"https_proxy=" + os.environ.get("https_proxy", ""),
|
||||||
"no_proxy=" + os.environ.get("no_proxy", ""),
|
"no_proxy=" + os.environ.get("no_proxy", ""),
|
||||||
]
|
]
|
||||||
run_as_root(proxy_env + ["scripts/lib/install-node"], sudo_args = ['-H'])
|
run_as_root([*proxy_env, "scripts/lib/install-node"], sudo_args = ['-H'])
|
||||||
|
|
||||||
if not os.access(NODE_MODULES_CACHE_PATH, os.W_OK):
|
if not os.access(NODE_MODULES_CACHE_PATH, os.W_OK):
|
||||||
run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
|
run_as_root(["mkdir", "-p", NODE_MODULES_CACHE_PATH])
|
||||||
|
|
|
@ -61,7 +61,7 @@ def run() -> None:
|
||||||
description="Runs the puppet parser validator, "
|
description="Runs the puppet parser validator, "
|
||||||
"checking for syntax errors.")
|
"checking for syntax errors.")
|
||||||
linter_config.external_linter('puppet-lint',
|
linter_config.external_linter('puppet-lint',
|
||||||
['puppet-lint', '--fail-on-warnings'] + PUPPET_CHECK_RULES_TO_EXCLUDE,
|
['puppet-lint', '--fail-on-warnings', *PUPPET_CHECK_RULES_TO_EXCLUDE],
|
||||||
['pp'],
|
['pp'],
|
||||||
fix_arg='--fix',
|
fix_arg='--fix',
|
||||||
description="Standard puppet linter"
|
description="Standard puppet linter"
|
||||||
|
|
|
@ -75,7 +75,8 @@ comma_whitespace_rule: List["Rule"] = [
|
||||||
'good_lines': ['foo(1, 2, 3)', 'foo = bar # some inline comment'],
|
'good_lines': ['foo(1, 2, 3)', 'foo = bar # some inline comment'],
|
||||||
'bad_lines': ['foo(1, 2, 3)', 'foo(1, 2, 3)']},
|
'bad_lines': ['foo(1, 2, 3)', 'foo(1, 2, 3)']},
|
||||||
]
|
]
|
||||||
markdown_whitespace_rules = list([rule for rule in whitespace_rules if rule['pattern'] != r'\s+$']) + [
|
markdown_whitespace_rules: List["Rule"] = [
|
||||||
|
*(rule for rule in whitespace_rules if rule['pattern'] != r'\s+$'),
|
||||||
# Two spaces trailing a line with other content is okay--it's a Markdown line break.
|
# Two spaces trailing a line with other content is okay--it's a Markdown line break.
|
||||||
# This rule finds one space trailing a non-space, three or more trailing spaces, and
|
# This rule finds one space trailing a non-space, three or more trailing spaces, and
|
||||||
# spaces on an empty line.
|
# spaces on an empty line.
|
||||||
|
@ -246,7 +247,7 @@ python_rules = RuleList(
|
||||||
'exclude': {'scripts/lib/setup_venv.py'},
|
'exclude': {'scripts/lib/setup_venv.py'},
|
||||||
'exclude_line': {
|
'exclude_line': {
|
||||||
('scripts/lib/zulip_tools.py', 'sudo_args = kwargs.pop(\'sudo_args\', [])'),
|
('scripts/lib/zulip_tools.py', 'sudo_args = kwargs.pop(\'sudo_args\', [])'),
|
||||||
('scripts/lib/zulip_tools.py', 'args = [\'sudo\'] + sudo_args + [\'--\'] + args'),
|
('scripts/lib/zulip_tools.py', 'args = [\'sudo\', *sudo_args, \'--\', *args]'),
|
||||||
},
|
},
|
||||||
'description': 'Most scripts are intended to run on systems without sudo.',
|
'description': 'Most scripts are intended to run on systems without sudo.',
|
||||||
'good_lines': ['subprocess.check_call(["ls"])'],
|
'good_lines': ['subprocess.check_call(["ls"])'],
|
||||||
|
@ -442,7 +443,9 @@ prose_style_rules: List["Rule"] = [
|
||||||
'description': "Use Botserver instead of botserver or bot server."},
|
'description': "Use Botserver instead of botserver or bot server."},
|
||||||
*comma_whitespace_rule,
|
*comma_whitespace_rule,
|
||||||
]
|
]
|
||||||
html_rules: List["Rule"] = whitespace_rules + prose_style_rules + [
|
html_rules: List["Rule"] = [
|
||||||
|
*whitespace_rules,
|
||||||
|
*prose_style_rules,
|
||||||
{'pattern': 'subject|SUBJECT',
|
{'pattern': 'subject|SUBJECT',
|
||||||
'exclude': {'templates/zerver/email.html'},
|
'exclude': {'templates/zerver/email.html'},
|
||||||
'exclude_pattern': 'email subject',
|
'exclude_pattern': 'email subject',
|
||||||
|
@ -578,7 +581,8 @@ html_rules: List["Rule"] = whitespace_rules + prose_style_rules + [
|
||||||
|
|
||||||
handlebars_rules = RuleList(
|
handlebars_rules = RuleList(
|
||||||
langs=['hbs'],
|
langs=['hbs'],
|
||||||
rules=html_rules + [
|
rules=[
|
||||||
|
*html_rules,
|
||||||
{'pattern': "[<]script",
|
{'pattern': "[<]script",
|
||||||
'description': "Do not use inline <script> tags here; put JavaScript in static/js instead."},
|
'description': "Do not use inline <script> tags here; put JavaScript in static/js instead."},
|
||||||
{'pattern': '{{ t ("|\')',
|
{'pattern': '{{ t ("|\')',
|
||||||
|
@ -600,7 +604,8 @@ handlebars_rules = RuleList(
|
||||||
|
|
||||||
jinja2_rules = RuleList(
|
jinja2_rules = RuleList(
|
||||||
langs=['html'],
|
langs=['html'],
|
||||||
rules=html_rules + [
|
rules=[
|
||||||
|
*html_rules,
|
||||||
{'pattern': r"{% endtrans %}[\.\?!]",
|
{'pattern': r"{% endtrans %}[\.\?!]",
|
||||||
'description': "Period should be part of the translatable string."},
|
'description': "Period should be part of the translatable string."},
|
||||||
{'pattern': r"{{ _(.+) }}[\.\?!]",
|
{'pattern': r"{{ _(.+) }}[\.\?!]",
|
||||||
|
@ -653,7 +658,9 @@ markdown_docs_length_exclude = {
|
||||||
|
|
||||||
markdown_rules = RuleList(
|
markdown_rules = RuleList(
|
||||||
langs=['md'],
|
langs=['md'],
|
||||||
rules=markdown_whitespace_rules + prose_style_rules + [
|
rules=[
|
||||||
|
*markdown_whitespace_rules,
|
||||||
|
*prose_style_rules,
|
||||||
{'pattern': r'\[(?P<url>[^\]]+)\]\((?P=url)\)',
|
{'pattern': r'\[(?P<url>[^\]]+)\]\((?P=url)\)',
|
||||||
'description': 'Linkified Markdown URLs should use cleaner <http://example.com> syntax.'},
|
'description': 'Linkified Markdown URLs should use cleaner <http://example.com> syntax.'},
|
||||||
{'pattern': 'https://zulip.readthedocs.io/en/latest/[a-zA-Z0-9]',
|
{'pattern': 'https://zulip.readthedocs.io/en/latest/[a-zA-Z0-9]',
|
||||||
|
|
|
@ -135,10 +135,10 @@ with open(pid_file_path, 'w+') as f:
|
||||||
|
|
||||||
def server_processes() -> List[List[str]]:
|
def server_processes() -> List[List[str]]:
|
||||||
main_cmds = [
|
main_cmds = [
|
||||||
['./manage.py', 'runserver'] +
|
['./manage.py', 'runserver',
|
||||||
manage_args + runserver_args + [f'127.0.0.1:{django_port}'],
|
*manage_args, *runserver_args, f'127.0.0.1:{django_port}'],
|
||||||
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado'] +
|
['env', 'PYTHONUNBUFFERED=1', './manage.py', 'runtornado',
|
||||||
manage_args + [f'127.0.0.1:{tornado_port}'],
|
*manage_args, f'127.0.0.1:{tornado_port}'],
|
||||||
]
|
]
|
||||||
|
|
||||||
if options.streamlined:
|
if options.streamlined:
|
||||||
|
@ -147,7 +147,7 @@ def server_processes() -> List[List[str]]:
|
||||||
return main_cmds
|
return main_cmds
|
||||||
|
|
||||||
other_cmds = [
|
other_cmds = [
|
||||||
['./manage.py', 'process_queue', '--all'] + manage_args,
|
['./manage.py', 'process_queue', '--all', *manage_args],
|
||||||
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
['env', 'PGHOST=127.0.0.1', # Force password authentication using .pgpass
|
||||||
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
'./puppet/zulip/files/postgresql/process_fts_updates', '--quiet'],
|
||||||
['./manage.py', 'deliver_scheduled_messages'],
|
['./manage.py', 'deliver_scheduled_messages'],
|
||||||
|
|
|
@ -70,8 +70,8 @@ if not python_files and not pyi_files:
|
||||||
mypy_args: List[str] = []
|
mypy_args: List[str] = []
|
||||||
if args.quiet:
|
if args.quiet:
|
||||||
mypy_args += ["--no-error-summary"]
|
mypy_args += ["--no-error-summary"]
|
||||||
mypy_args += ["--"] + python_files + pyi_files
|
mypy_args += ["--", *python_files, *pyi_files]
|
||||||
rc = subprocess.call([mypy_command] + mypy_args)
|
rc = subprocess.call([mypy_command, *mypy_args])
|
||||||
|
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
print("")
|
print("")
|
||||||
|
|
|
@ -56,7 +56,7 @@ def check_valid_emoji_name(emoji_name: str) -> None:
|
||||||
def check_emoji_names(canonical_name: str, aliases: List[str]) -> None:
|
def check_emoji_names(canonical_name: str, aliases: List[str]) -> None:
|
||||||
if canonical_name == 'X':
|
if canonical_name == 'X':
|
||||||
return
|
return
|
||||||
names_to_check = [canonical_name] + aliases
|
names_to_check = [canonical_name, *aliases]
|
||||||
for name in names_to_check:
|
for name in names_to_check:
|
||||||
check_valid_emoji_name(name)
|
check_valid_emoji_name(name)
|
||||||
check_uniqueness(name)
|
check_uniqueness(name)
|
||||||
|
|
|
@ -55,14 +55,14 @@ def vnu_servlet() -> Iterator[None]:
|
||||||
with vnu_servlet(), \
|
with vnu_servlet(), \
|
||||||
test_server_running(options.force, external_host, log_file=LOG_FILE,
|
test_server_running(options.force, external_host, log_file=LOG_FILE,
|
||||||
dots=True, use_db=True):
|
dots=True, use_db=True):
|
||||||
ret_help_doc = subprocess.call(['scrapy', 'crawl_with_status'] + extra_args +
|
ret_help_doc = subprocess.call(['scrapy', 'crawl_with_status', *extra_args,
|
||||||
['help_documentation_crawler'],
|
'help_documentation_crawler'],
|
||||||
cwd='tools/documentation_crawler')
|
cwd='tools/documentation_crawler')
|
||||||
ret_api_doc = subprocess.call(['scrapy', 'crawl_with_status'] + extra_args +
|
ret_api_doc = subprocess.call(['scrapy', 'crawl_with_status', *extra_args,
|
||||||
['api_documentation_crawler'],
|
'api_documentation_crawler'],
|
||||||
cwd='tools/documentation_crawler')
|
cwd='tools/documentation_crawler')
|
||||||
ret_portico_doc = subprocess.call(['scrapy', 'crawl_with_status'] + extra_args +
|
ret_portico_doc = subprocess.call(['scrapy', 'crawl_with_status', *extra_args,
|
||||||
['portico_documentation_crawler'],
|
'portico_documentation_crawler'],
|
||||||
cwd='tools/documentation_crawler')
|
cwd='tools/documentation_crawler')
|
||||||
|
|
||||||
if ret_help_doc != 0 or ret_api_doc != 0 or ret_portico_doc != 0:
|
if ret_help_doc != 0 or ret_api_doc != 0 or ret_portico_doc != 0:
|
||||||
|
|
|
@ -64,7 +64,7 @@ def run_tests(files: Iterable[str], external_host: str) -> None:
|
||||||
current_test_num = test_number
|
current_test_num = test_number
|
||||||
for test_file in test_files[test_number:]:
|
for test_file in test_files[test_number:]:
|
||||||
test_name = os.path.basename(test_file)
|
test_name = os.path.basename(test_file)
|
||||||
cmd = ["node"] + [test_file]
|
cmd = ["node", test_file]
|
||||||
print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
print("\n\n===================== {}\nRunning {}\n\n".format(test_name, " ".join(map(shlex.quote, cmd))), flush=True)
|
||||||
ret = subprocess.call(cmd)
|
ret = subprocess.call(cmd)
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
|
|
|
@ -97,7 +97,7 @@ def main() -> None:
|
||||||
# Cache the hash so that we need not to run the `update_locked_requirements`
|
# Cache the hash so that we need not to run the `update_locked_requirements`
|
||||||
# tool again for checking this set of requirements.
|
# tool again for checking this set of requirements.
|
||||||
valid_hash = get_requirements_hash(tmp_dir, use_test_lock_files=True)
|
valid_hash = get_requirements_hash(tmp_dir, use_test_lock_files=True)
|
||||||
update_cache([h for h in hash_list if h != valid_hash] + [valid_hash])
|
update_cache([*(h for h in hash_list if h != valid_hash), valid_hash])
|
||||||
if not requirements_are_consistent:
|
if not requirements_are_consistent:
|
||||||
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
|
for test_locked_file in glob.glob(os.path.join(tmp_dir, "*.txt")):
|
||||||
fn = os.path.basename(test_locked_file)
|
fn = os.path.basename(test_locked_file)
|
||||||
|
|
|
@ -80,7 +80,7 @@ def check_events_dict(
|
||||||
assert "type" in rkeys
|
assert "type" in rkeys
|
||||||
assert "id" not in keys
|
assert "id" not in keys
|
||||||
return check_dict_only(
|
return check_dict_only(
|
||||||
required_keys=list(required_keys) + [("id", check_int)],
|
required_keys=[*required_keys, ("id", check_int)],
|
||||||
optional_keys=optional_keys,
|
optional_keys=optional_keys,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1466,7 +1466,7 @@ def do_write_stats_file_for_realm_export(output_dir: Path) -> None:
|
||||||
attachment_file = os.path.join(output_dir, 'attachment.json')
|
attachment_file = os.path.join(output_dir, 'attachment.json')
|
||||||
analytics_file = os.path.join(output_dir, 'analytics.json')
|
analytics_file = os.path.join(output_dir, 'analytics.json')
|
||||||
message_files = glob.glob(os.path.join(output_dir, 'messages-*.json'))
|
message_files = glob.glob(os.path.join(output_dir, 'messages-*.json'))
|
||||||
fns = sorted([analytics_file] + [attachment_file] + message_files + [realm_file])
|
fns = sorted([analytics_file, attachment_file, *message_files, realm_file])
|
||||||
|
|
||||||
logging.info('Writing stats file: %s\n', stats_file)
|
logging.info('Writing stats file: %s\n', stats_file)
|
||||||
with open(stats_file, 'w') as f:
|
with open(stats_file, 'w') as f:
|
||||||
|
|
|
@ -80,7 +80,7 @@ class APIArgumentsTablePreprocessor(Preprocessor):
|
||||||
line_split = REGEXP.split(line, maxsplit=0)
|
line_split = REGEXP.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -52,7 +52,7 @@ class APIReturnValuesTablePreprocessor(Preprocessor):
|
||||||
line_split = REGEXP.split(line, maxsplit=0)
|
line_split = REGEXP.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -95,7 +95,7 @@ class RelativeLinks(Preprocessor):
|
||||||
line_split = REGEXP.split(line, maxsplit=0)
|
line_split = REGEXP.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -91,7 +91,7 @@ class Setting(Preprocessor):
|
||||||
line_split = REGEXP.split(line, maxsplit=0)
|
line_split = REGEXP.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -103,7 +103,7 @@ class TabbedSectionsPreprocessor(Preprocessor):
|
||||||
|
|
||||||
start = tab_section['start_tabs_index']
|
start = tab_section['start_tabs_index']
|
||||||
end = tab_section['end_tabs_index'] + 1
|
end = tab_section['end_tabs_index'] + 1
|
||||||
lines = lines[:start] + [rendered_tabs] + lines[end:]
|
lines = [*lines[:start], rendered_tabs, *lines[end:]]
|
||||||
tab_section = self.parse_tabs(lines)
|
tab_section = self.parse_tabs(lines)
|
||||||
return lines
|
return lines
|
||||||
|
|
||||||
|
|
|
@ -100,12 +100,12 @@ class Database:
|
||||||
'ZULIP_DB_NAME=' + self.database_name,
|
'ZULIP_DB_NAME=' + self.database_name,
|
||||||
]
|
]
|
||||||
|
|
||||||
run(env_prelude + [
|
run([
|
||||||
'./manage.py', 'migrate', '--no-input',
|
*env_prelude, './manage.py', 'migrate', '--no-input',
|
||||||
])
|
])
|
||||||
|
|
||||||
run(env_prelude + [
|
run([
|
||||||
'./manage.py', 'get_migration_status', '--output='+self.migration_status_file,
|
*env_prelude, './manage.py', 'get_migration_status', '--output='+self.migration_status_file,
|
||||||
])
|
])
|
||||||
|
|
||||||
def what_to_do_with_migrations(self) -> str:
|
def what_to_do_with_migrations(self) -> str:
|
||||||
|
|
|
@ -416,7 +416,7 @@ def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> N
|
||||||
assert len(pattern_cnt) > 100
|
assert len(pattern_cnt) > 100
|
||||||
untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}
|
untested_patterns = {p.replace("\\", "") for p in pattern_cnt if pattern_cnt[p] == 0}
|
||||||
|
|
||||||
exempt_patterns = set([
|
exempt_patterns = {
|
||||||
# We exempt some patterns that are called via Tornado.
|
# We exempt some patterns that are called via Tornado.
|
||||||
'api/v1/events',
|
'api/v1/events',
|
||||||
'api/v1/events/internal',
|
'api/v1/events/internal',
|
||||||
|
@ -429,7 +429,8 @@ def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> N
|
||||||
'docs/(?P<path>.+)',
|
'docs/(?P<path>.+)',
|
||||||
'casper/(?P<path>.+)',
|
'casper/(?P<path>.+)',
|
||||||
'static/(?P<path>.*)',
|
'static/(?P<path>.*)',
|
||||||
] + [webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks])
|
*(webhook.url for webhook in WEBHOOK_INTEGRATIONS if not include_webhooks),
|
||||||
|
}
|
||||||
|
|
||||||
untested_patterns -= exempt_patterns
|
untested_patterns -= exempt_patterns
|
||||||
|
|
||||||
|
|
|
@ -103,7 +103,7 @@ def exclude_topic_mutes(conditions: List[Selectable],
|
||||||
return and_(stream_cond, topic_cond)
|
return and_(stream_cond, topic_cond)
|
||||||
|
|
||||||
condition = not_(or_(*list(map(mute_cond, rows))))
|
condition = not_(or_(*list(map(mute_cond, rows))))
|
||||||
return conditions + [condition]
|
return [*conditions, condition]
|
||||||
|
|
||||||
def build_topic_mute_checker(user_profile: UserProfile) -> Callable[[int, str], bool]:
|
def build_topic_mute_checker(user_profile: UserProfile) -> Callable[[int, str], bool]:
|
||||||
rows = MutedTopic.objects.filter(
|
rows = MutedTopic.objects.filter(
|
||||||
|
|
|
@ -394,7 +394,7 @@ def user_profile_to_user_row(user_profile: UserProfile) -> Dict[str, Any]:
|
||||||
# changing realm_user_dict_fields to name the bot owner with
|
# changing realm_user_dict_fields to name the bot owner with
|
||||||
# the less readable `bot_owner` (instead of `bot_owner_id`).
|
# the less readable `bot_owner` (instead of `bot_owner_id`).
|
||||||
user_row = model_to_dict(user_profile,
|
user_row = model_to_dict(user_profile,
|
||||||
fields=realm_user_dict_fields + ['bot_owner'])
|
fields=[*realm_user_dict_fields, 'bot_owner'])
|
||||||
user_row['bot_owner_id'] = user_row['bot_owner']
|
user_row['bot_owner_id'] = user_row['bot_owner']
|
||||||
del user_row['bot_owner']
|
del user_row['bot_owner']
|
||||||
return user_row
|
return user_row
|
||||||
|
|
|
@ -117,10 +117,10 @@ class Command(ZulipBaseCommand):
|
||||||
tarball_path = options["output"]
|
tarball_path = options["output"]
|
||||||
|
|
||||||
run(
|
run(
|
||||||
["tar", "-C", tmp, "-cPzf", tarball_path]
|
["tar", "-C", tmp, "-cPzf", tarball_path,
|
||||||
+ transform_args
|
*transform_args,
|
||||||
+ ["--"]
|
"--",
|
||||||
+ members,
|
*members]
|
||||||
)
|
)
|
||||||
print(f"Backup tarball written to {tarball_path}")
|
print(f"Backup tarball written to {tarball_path}")
|
||||||
except BaseException:
|
except BaseException:
|
||||||
|
|
|
@ -249,8 +249,8 @@ def generate_curl_example(endpoint: str, method: str,
|
||||||
format_dict[param["name"]] = example_value
|
format_dict[param["name"]] = example_value
|
||||||
example_endpoint = endpoint.format_map(format_dict)
|
example_endpoint = endpoint.format_map(format_dict)
|
||||||
|
|
||||||
curl_first_line_parts = ["curl"] + curl_method_arguments(example_endpoint, method,
|
curl_first_line_parts = ["curl", *curl_method_arguments(example_endpoint, method,
|
||||||
api_url)
|
api_url)]
|
||||||
lines.append(" ".join(curl_first_line_parts))
|
lines.append(" ".join(curl_first_line_parts))
|
||||||
|
|
||||||
insecure_operations = ['/dev_fetch_api_key:post']
|
insecure_operations = ['/dev_fetch_api_key:post']
|
||||||
|
@ -386,7 +386,7 @@ class APICodeExamplesPreprocessor(Preprocessor):
|
||||||
line_split = MACRO_REGEXP.split(line, maxsplit=0)
|
line_split = MACRO_REGEXP.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -429,7 +429,7 @@ class APIDescriptionPreprocessor(Preprocessor):
|
||||||
line_split = MACRO_REGEXP_DESC.split(line, maxsplit=0)
|
line_split = MACRO_REGEXP_DESC.split(line, maxsplit=0)
|
||||||
preceding = line_split[0]
|
preceding = line_split[0]
|
||||||
following = line_split[-1]
|
following = line_split[-1]
|
||||||
text = [preceding] + text + [following]
|
text = [preceding, *text, following]
|
||||||
lines = lines[:loc] + text + lines[loc+1:]
|
lines = lines[:loc] + text + lines[loc+1:]
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -124,11 +124,11 @@ def render_markdown_path(markdown_file_path: str,
|
||||||
#
|
#
|
||||||
# TODO: Convert this to something more efficient involving
|
# TODO: Convert this to something more efficient involving
|
||||||
# passing the API URL as a direct parameter.
|
# passing the API URL as a direct parameter.
|
||||||
extensions = extensions + [zerver.openapi.markdown_extension.makeExtension(
|
extensions = [*extensions, zerver.openapi.markdown_extension.makeExtension(
|
||||||
api_url=context["api_url"],
|
api_url=context["api_url"],
|
||||||
)]
|
)]
|
||||||
if not any(doc in markdown_file_path for doc in docs_without_macros):
|
if not any(doc in markdown_file_path for doc in docs_without_macros):
|
||||||
extensions = [md_macro_extension] + extensions
|
extensions = [md_macro_extension, *extensions]
|
||||||
|
|
||||||
md_engine = markdown.Markdown(extensions=extensions)
|
md_engine = markdown.Markdown(extensions=extensions)
|
||||||
md_engine.reset()
|
md_engine.reset()
|
||||||
|
|
|
@ -4141,21 +4141,21 @@ class TestLDAP(ZulipLDAPTestCase):
|
||||||
common_attrs = ['cn', 'userPassword', 'phoneNumber', 'birthDate']
|
common_attrs = ['cn', 'userPassword', 'phoneNumber', 'birthDate']
|
||||||
for key, value in ldap_dir.items():
|
for key, value in ldap_dir.items():
|
||||||
self.assertTrue(regex.match(key))
|
self.assertTrue(regex.match(key))
|
||||||
self.assertCountEqual(list(value.keys()), common_attrs + ['uid', 'thumbnailPhoto', 'userAccountControl'])
|
self.assertCountEqual(list(value.keys()), [*common_attrs, 'uid', 'thumbnailPhoto', 'userAccountControl'])
|
||||||
|
|
||||||
ldap_dir = generate_dev_ldap_dir('b', 9)
|
ldap_dir = generate_dev_ldap_dir('b', 9)
|
||||||
self.assertEqual(len(ldap_dir), 9)
|
self.assertEqual(len(ldap_dir), 9)
|
||||||
regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)')
|
regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)')
|
||||||
for key, value in ldap_dir.items():
|
for key, value in ldap_dir.items():
|
||||||
self.assertTrue(regex.match(key))
|
self.assertTrue(regex.match(key))
|
||||||
self.assertCountEqual(list(value.keys()), common_attrs + ['uid', 'jpegPhoto'])
|
self.assertCountEqual(list(value.keys()), [*common_attrs, 'uid', 'jpegPhoto'])
|
||||||
|
|
||||||
ldap_dir = generate_dev_ldap_dir('c', 8)
|
ldap_dir = generate_dev_ldap_dir('c', 8)
|
||||||
self.assertEqual(len(ldap_dir), 8)
|
self.assertEqual(len(ldap_dir), 8)
|
||||||
regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)')
|
regex = re.compile(r'(uid\=)+[a-zA-Z0-9_.+-]+(\,ou\=users\,dc\=zulip\,dc\=com)')
|
||||||
for key, value in ldap_dir.items():
|
for key, value in ldap_dir.items():
|
||||||
self.assertTrue(regex.match(key))
|
self.assertTrue(regex.match(key))
|
||||||
self.assertCountEqual(list(value.keys()), common_attrs + ['uid', 'email'])
|
self.assertCountEqual(list(value.keys()), [*common_attrs, 'uid', 'email'])
|
||||||
|
|
||||||
@override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',))
|
@override_settings(AUTHENTICATION_BACKENDS=('zproject.backends.ZulipLDAPAuthBackend',))
|
||||||
def test_dev_ldap_fail_login(self) -> None:
|
def test_dev_ldap_fail_login(self) -> None:
|
||||||
|
|
|
@ -719,7 +719,7 @@ class NormalActionsTest(BaseAction):
|
||||||
timezone_now(),
|
timezone_now(),
|
||||||
UserPresence.ACTIVE),
|
UserPresence.ACTIVE),
|
||||||
slim_presence=False)
|
slim_presence=False)
|
||||||
schema_checker = check_events_dict(fields + [email_field])
|
schema_checker = check_events_dict([*fields, email_field])
|
||||||
schema_checker('events[0]', events[0])
|
schema_checker('events[0]', events[0])
|
||||||
|
|
||||||
events = self.verify_action(
|
events = self.verify_action(
|
||||||
|
|
|
@ -1391,11 +1391,11 @@ class GetOldMessagesTest(ZulipTestCase):
|
||||||
|
|
||||||
def dr_emails(dr: DisplayRecipientT) -> str:
|
def dr_emails(dr: DisplayRecipientT) -> str:
|
||||||
assert isinstance(dr, list)
|
assert isinstance(dr, list)
|
||||||
return ','.join(sorted(set([r['email'] for r in dr] + [me.email])))
|
return ','.join(sorted({*(r['email'] for r in dr), me.email}))
|
||||||
|
|
||||||
def dr_ids(dr: DisplayRecipientT) -> List[int]:
|
def dr_ids(dr: DisplayRecipientT) -> List[int]:
|
||||||
assert isinstance(dr, list)
|
assert isinstance(dr, list)
|
||||||
return list(sorted(set([r['id'] for r in dr] + [self.example_user('hamlet').id])))
|
return sorted({*(r['id'] for r in dr), self.example_user('hamlet').id})
|
||||||
|
|
||||||
self.send_personal_message(me, self.example_user("iago"))
|
self.send_personal_message(me, self.example_user("iago"))
|
||||||
|
|
||||||
|
@ -2449,7 +2449,7 @@ class GetOldMessagesTest(ZulipTestCase):
|
||||||
"""
|
"""
|
||||||
self.login('hamlet')
|
self.login('hamlet')
|
||||||
|
|
||||||
other_params = [("narrow", {}), ("anchor", 0)]
|
other_params = {"narrow": {}, "anchor": 0}
|
||||||
int_params = ["num_before", "num_after"]
|
int_params = ["num_before", "num_after"]
|
||||||
|
|
||||||
bad_types = (False, "", "-1", -1)
|
bad_types = (False, "", "-1", -1)
|
||||||
|
@ -2457,10 +2457,12 @@ class GetOldMessagesTest(ZulipTestCase):
|
||||||
for type in bad_types:
|
for type in bad_types:
|
||||||
# Rotate through every bad type for every integer
|
# Rotate through every bad type for every integer
|
||||||
# parameter, one at a time.
|
# parameter, one at a time.
|
||||||
post_params = dict(other_params + [(param, type)] +
|
post_params = {
|
||||||
[(other_param, 0) for other_param in
|
**other_params,
|
||||||
int_params[:idx] + int_params[idx + 1:]],
|
param: type,
|
||||||
)
|
**{other_param: 0 for other_param in
|
||||||
|
int_params[:idx] + int_params[idx + 1:]},
|
||||||
|
}
|
||||||
result = self.client_get("/json/messages", post_params)
|
result = self.client_get("/json/messages", post_params)
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
f"Bad value for '{param}': {type}")
|
f"Bad value for '{param}': {type}")
|
||||||
|
@ -2471,14 +2473,14 @@ class GetOldMessagesTest(ZulipTestCase):
|
||||||
"""
|
"""
|
||||||
self.login('hamlet')
|
self.login('hamlet')
|
||||||
|
|
||||||
other_params: List[Tuple[str, Union[int, str, bool]]] = [("anchor", 0), ("num_before", 0), ("num_after", 0)]
|
other_params = {"anchor": 0, "num_before": 0, "num_after": 0}
|
||||||
|
|
||||||
bad_types: Tuple[Union[int, str, bool], ...] = (
|
bad_types: Tuple[Union[int, str, bool], ...] = (
|
||||||
False, 0, '', '{malformed json,',
|
False, 0, '', '{malformed json,',
|
||||||
'{foo: 3}', '[1,2]', '[["x","y","z"]]',
|
'{foo: 3}', '[1,2]', '[["x","y","z"]]',
|
||||||
)
|
)
|
||||||
for type in bad_types:
|
for type in bad_types:
|
||||||
post_params = dict(other_params + [("narrow", type)])
|
post_params = {**other_params, "narrow": type}
|
||||||
result = self.client_get("/json/messages", post_params)
|
result = self.client_get("/json/messages", post_params)
|
||||||
self.assert_json_error(result,
|
self.assert_json_error(result,
|
||||||
f"Bad value for 'narrow': {type}")
|
f"Bad value for 'narrow': {type}")
|
||||||
|
@ -2538,10 +2540,9 @@ class GetOldMessagesTest(ZulipTestCase):
|
||||||
def exercise_bad_narrow_operand(self, operator: str,
|
def exercise_bad_narrow_operand(self, operator: str,
|
||||||
operands: Sequence[Any],
|
operands: Sequence[Any],
|
||||||
error_msg: str) -> None:
|
error_msg: str) -> None:
|
||||||
other_params: List[Tuple[str, Any]] = [("anchor", 0), ("num_before", 0), ("num_after", 0)]
|
other_params = {"anchor": "0", "num_before": "0", "num_after": "0"}
|
||||||
for operand in operands:
|
for operand in operands:
|
||||||
post_params = dict(other_params + [
|
post_params = {**other_params, "narrow": orjson.dumps([[operator, operand]]).decode()}
|
||||||
("narrow", orjson.dumps([[operator, operand]]).decode())])
|
|
||||||
result = self.client_get("/json/messages", post_params)
|
result = self.client_get("/json/messages", post_params)
|
||||||
self.assert_json_error_contains(result, error_msg)
|
self.assert_json_error_contains(result, error_msg)
|
||||||
|
|
||||||
|
|
|
@ -320,7 +320,7 @@ class TestArchiveMessagesGeneral(ArchiveMessagesTestingBase):
|
||||||
timezone_now() - timedelta(ZULIP_REALM_DAYS+1),
|
timezone_now() - timedelta(ZULIP_REALM_DAYS+1),
|
||||||
)
|
)
|
||||||
|
|
||||||
expired_msg_ids = expired_mit_msg_ids + expired_zulip_msg_ids + [expired_crossrealm_msg_id]
|
expired_msg_ids = [*expired_mit_msg_ids, *expired_zulip_msg_ids, expired_crossrealm_msg_id]
|
||||||
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
expired_usermsg_ids = self._get_usermessage_ids(expired_msg_ids)
|
||||||
|
|
||||||
archive_messages(chunk_size=2) # Specify low chunk_size to test batching.
|
archive_messages(chunk_size=2) # Specify low chunk_size to test batching.
|
||||||
|
|
|
@ -147,7 +147,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
||||||
self.example_user('iago'),
|
self.example_user('iago'),
|
||||||
self.example_user('cordelia'),
|
self.example_user('cordelia'),
|
||||||
]
|
]
|
||||||
all_users = users + [hamlet]
|
all_users = [*users, hamlet]
|
||||||
for user in all_users:
|
for user in all_users:
|
||||||
self.subscribe(user, stream)
|
self.subscribe(user, stream)
|
||||||
|
|
||||||
|
@ -182,7 +182,7 @@ class UserSoftDeactivationTests(ZulipTestCase):
|
||||||
sender = self.example_user('hamlet')
|
sender = self.example_user('hamlet')
|
||||||
realm = get_realm('zulip')
|
realm = get_realm('zulip')
|
||||||
stream_name = 'announce'
|
stream_name = 'announce'
|
||||||
for user in users + [sender]:
|
for user in [*users, sender]:
|
||||||
self.subscribe(user, stream_name)
|
self.subscribe(user, stream_name)
|
||||||
|
|
||||||
client, _ = Client.objects.get_or_create(name='website')
|
client, _ = Client.objects.get_or_create(name='website')
|
||||||
|
|
|
@ -2707,9 +2707,9 @@ class SubscriptionAPITest(ZulipTestCase):
|
||||||
"""
|
"""
|
||||||
Subscribing to a stream name with non-ASCII characters succeeds.
|
Subscribing to a stream name with non-ASCII characters succeeds.
|
||||||
"""
|
"""
|
||||||
self.helper_check_subs_before_and_after_add(self.streams + ["hümbüǵ"], {},
|
self.helper_check_subs_before_and_after_add([*self.streams, "hümbüǵ"], {},
|
||||||
["hümbüǵ"], self.streams, self.test_email,
|
["hümbüǵ"], self.streams, self.test_email,
|
||||||
self.streams + ["hümbüǵ"], self.test_realm)
|
[*self.streams, "hümbüǵ"], self.test_realm)
|
||||||
|
|
||||||
def test_subscriptions_add_too_long(self) -> None:
|
def test_subscriptions_add_too_long(self) -> None:
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1367,7 +1367,7 @@ class SocialAuthMixin(ZulipAuthMixin, ExternalAuthMethod, BaseAuth):
|
||||||
# it should be False.
|
# it should be False.
|
||||||
full_name_validated = False
|
full_name_validated = False
|
||||||
|
|
||||||
standard_relay_params = settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION + ['next']
|
standard_relay_params = [*settings.SOCIAL_AUTH_FIELDS_STORED_IN_SESSION, 'next']
|
||||||
|
|
||||||
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
def auth_complete(self, *args: Any, **kwargs: Any) -> Optional[HttpResponse]:
|
||||||
"""This is a small wrapper around the core `auth_complete` method of
|
"""This is a small wrapper around the core `auth_complete` method of
|
||||||
|
|
|
@ -695,10 +695,10 @@ else:
|
||||||
# This is disabled in a few tests.
|
# This is disabled in a few tests.
|
||||||
LOGGING_ENABLED = True
|
LOGGING_ENABLED = True
|
||||||
|
|
||||||
DEFAULT_ZULIP_HANDLERS = (
|
DEFAULT_ZULIP_HANDLERS = [
|
||||||
(['zulip_admins'] if ERROR_REPORTING else []) +
|
*(['zulip_admins'] if ERROR_REPORTING else []),
|
||||||
['console', 'file', 'errors_file']
|
'console', 'file', 'errors_file',
|
||||||
)
|
]
|
||||||
|
|
||||||
LOGGING: Dict[str, Any] = {
|
LOGGING: Dict[str, Any] = {
|
||||||
'version': 1,
|
'version': 1,
|
||||||
|
@ -892,7 +892,7 @@ LOGGING: Dict[str, Any] = {
|
||||||
},
|
},
|
||||||
'zulip.auth': {
|
'zulip.auth': {
|
||||||
'level': 'DEBUG',
|
'level': 'DEBUG',
|
||||||
'handlers': DEFAULT_ZULIP_HANDLERS + ['auth_file'],
|
'handlers': [*DEFAULT_ZULIP_HANDLERS, 'auth_file'],
|
||||||
'propagate': False,
|
'propagate': False,
|
||||||
},
|
},
|
||||||
'zulip.ldap': {
|
'zulip.ldap': {
|
||||||
|
|
Loading…
Reference in New Issue