python: Convert function type annotations to Python 3 style.

Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:

-    def __init__(self, token: Token, parent: Optional[Node]) -> None:
+    def __init__(self, token: Token, parent: "Optional[Node]") -> None:

-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":

-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":

-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:

-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:

-    method_kwarg_pairs: List[FuncKwargPair],
+    method_kwarg_pairs: "List[FuncKwargPair]",

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
Anders Kaseorg 2020-04-18 18:48:37 -07:00 committed by Tim Abbott
parent 43ac901ad9
commit 5901e7ba7e
68 changed files with 389 additions and 691 deletions

View File

@ -85,8 +85,7 @@ states = {
"UNKNOWN": 3
}
def report(state, timestamp=None, msg=None):
# type: (str, Any, Optional[str]) -> None
def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None:
now = int(time.time())
if msg is None:
msg = "send time was %s" % (timestamp,)
@ -97,14 +96,12 @@ def report(state, timestamp=None, msg=None):
print("%s: %s" % (state, msg))
exit(states[state])
def send_zulip(sender, message):
# type: (zulip.Client, Dict[str, Any]) -> None
def send_zulip(sender: zulip.Client, message: Dict[str, Any]) -> None:
result = sender.send_message(message)
if result["result"] != "success" and options.nagios:
report("CRITICAL", msg="Error sending Zulip, args were: %s, %s" % (message, result))
def get_zulips():
# type: () -> List[Dict[str, Any]]
def get_zulips() -> List[Dict[str, Any]]:
global queue_id, last_event_id
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
if 'error' in res.get('result', {}):

View File

@ -20,13 +20,11 @@ states = {
"UNKNOWN": 3
}
def report(state, msg):
# type: (str, str) -> NoReturn
def report(state: str, msg: str) -> NoReturn:
print("%s: %s" % (state, msg))
exit(states[state])
def get_loc_over_ssh(host, func):
# type: (str, str) -> str
def get_loc_over_ssh(host: str, func: str) -> str:
try:
return subprocess.check_output(['ssh', host,
'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT %s()"' % (func,)],
@ -35,8 +33,7 @@ def get_loc_over_ssh(host, func):
except subprocess.CalledProcessError as e:
report('CRITICAL', 'ssh failed: %s: %s' % (str(e), e.output))
def loc_to_abs_offset(loc_str):
# type: (str) -> int
def loc_to_abs_offset(loc_str: str) -> int:
m = re.match(r'^\s*([0-9a-fA-F]+)/([0-9a-fA-F]+)\s*$', loc_str)
if not m:
raise ValueError("Unknown xlog location format: " + loc_str)

View File

@ -21,8 +21,7 @@ states = {
"UNKNOWN": 3
}
def report(state, num):
# type: (str, str) -> None
def report(state: str, num: str) -> None:
print("%s: %s rows in fts_update_log table" % (state, num))
exit(states[state])

View File

@ -13,8 +13,7 @@ states = {
"UNKNOWN": 3
}
def report(state, msg):
# type: (str, str) -> None
def report(state: str, msg: str) -> None:
print("%s: %s" % (state, msg))
exit(states[state])

View File

@ -16,8 +16,7 @@ logging.Formatter.converter = time.gmtime
logging.basicConfig(format="%(asctime)s %(levelname)s: %(message)s")
logger = logging.getLogger(__name__)
def run(args, dry_run=False):
# type: (List[str], bool) -> str
def run(args: List[str], dry_run: bool = False) -> str:
if dry_run:
print("Would have run: " + " ".join(map(shlex.quote, args)))
return ""

View File

@ -33,8 +33,7 @@ import os
BATCH_SIZE = 1000
def update_fts_columns(cursor):
# type: (psycopg2.extensions.cursor) -> int
def update_fts_columns(cursor: psycopg2.extensions.cursor) -> int:
cursor.execute("SELECT id, message_id FROM fts_update_log LIMIT %s;" % (
BATCH_SIZE,))
ids = []
@ -52,8 +51,7 @@ def update_fts_columns(cursor):
cursor.execute("DELETE FROM fts_update_log WHERE id = ANY(%s)", (ids,))
return len(ids)
def am_master(cursor):
# type: (psycopg2.extensions.cursor) -> bool
def am_master(cursor: psycopg2.extensions.cursor) -> bool:
cursor.execute("SELECT pg_is_in_recovery()")
return not cursor.fetchall()[0][0]

View File

@ -21,8 +21,7 @@ states = {
"UNKNOWN": 3
} # type: Dict[str, int]
def report(state, output):
# type: (str, str) -> None
def report(state: str, output: str) -> None:
print("%s\n%s" % (state, output))
exit(states[state])

View File

@ -35,8 +35,7 @@ states = {
"UNKNOWN": 3
} # type: Dict[str, int]
def report(state, short_msg, too_old=None):
# type: (str, str, Optional[Set[Any]]) -> None
def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> None:
too_old_data = ""
if too_old:
too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join(

View File

@ -22,8 +22,7 @@ states = {
"UNKNOWN": 3
} # type: Dict[str, int]
def report(state, data, last_check):
# type: (str, str, float) -> None
def report(state: str, data: str, last_check: float) -> None:
print("%s: Last test run completed at %s\n%s" % (
state, time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)),
data))

View File

@ -7,8 +7,7 @@ sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import parse_cache_script_args
from scripts.lib import clean_venv_cache, clean_node_cache, clean_emoji_cache
def main():
# type: () -> None
def main() -> None:
args = parse_cache_script_args("This script cleans unused zulip caches.")
os.chdir(ZULIP_PATH)
clean_venv_cache.main(args)

View File

@ -16,8 +16,7 @@ EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
if ENV == "travis":
EMOJI_CACHE_PATH = os.path.join(os.environ["HOME"], "zulip-emoji-cache")
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
def get_caches_in_use(threshold_days: int) -> Set[str]:
setups_to_check = {ZULIP_PATH}
caches_in_use = set()

View File

@ -23,8 +23,7 @@ if ENV == "travis":
'hence yarn is not installed. Exiting without cleaning npm cache.')
sys.exit(0)
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
def get_caches_in_use(threshold_days: int) -> Set[str]:
setups_to_check = {ZULIP_PATH}
caches_in_use = set()

View File

@ -18,8 +18,7 @@ VENV_CACHE_DIR = '/srv/zulip-venv-cache'
if ENV == "travis":
VENV_CACHE_DIR = os.path.join(os.environ["HOME"], "zulip-venv-cache")
def get_caches_in_use(threshold_days):
# type: (int) -> Set[str]
def get_caches_in_use(threshold_days: int) -> Set[str]:
setups_to_check = {ZULIP_PATH}
caches_in_use = set()

View File

@ -82,8 +82,7 @@ options = parser.parse_args()
MAX_ALLOWED_PAYLOAD = 25 * 1024 * 1024
def process_response_error(e):
# type: (HTTPError) -> None
def process_response_error(e: HTTPError) -> None:
if e.code == 400:
response_content = e.read()
response_data = json.loads(response_content.decode('utf8'))
@ -94,8 +93,9 @@ def process_response_error(e):
exit(1)
def send_email_mirror(rcpt_to, shared_secret, host, url, test, verify_ssl):
# type: (str, str, str, str, bool, bool) -> None
def send_email_mirror(
rcpt_to: str, shared_secret: str, host: str, url: str, test: bool, verify_ssl: bool
) -> None:
if not rcpt_to:
print("5.1.1 Bad destination mailbox address: No missed message email address.")
exit(posix.EX_NOUSER)

View File

@ -5,8 +5,7 @@ import argparse
import hashlib
from typing import Iterable, List, MutableSet
def expand_reqs_helper(fpath, visited):
# type: (str, MutableSet[str]) -> List[str]
def expand_reqs_helper(fpath: str, visited: MutableSet[str]) -> List[str]:
if fpath in visited:
return []
else:
@ -27,8 +26,7 @@ def expand_reqs_helper(fpath, visited):
result.append(dep)
return result
def expand_reqs(fpath):
# type: (str) -> List[str]
def expand_reqs(fpath: str) -> List[str]:
"""
Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
Removes comments from the output and recursively visits files specified inside `fpath`.
@ -38,13 +36,11 @@ def expand_reqs(fpath):
output = expand_reqs_helper(absfpath, set())
return sorted(set(output))
def hash_deps(deps):
# type: (Iterable[str]) -> str
def hash_deps(deps: Iterable[str]) -> str:
deps_str = "\n".join(deps) + "\n"
return hashlib.sha1(deps_str.encode('utf-8')).hexdigest()
def main():
# type: () -> int
def main() -> int:
description = ("Finds the SHA1 hash of list of dependencies in a requirements file"
" after recursively visiting all files specified in it.")
parser = argparse.ArgumentParser(description=description)

View File

@ -20,16 +20,16 @@ YARN_PACKAGE_JSON = os.path.join(ZULIP_SRV_PATH, 'zulip-yarn/package.json')
DEFAULT_PRODUCTION = False
def get_yarn_args(production):
# type: (bool) -> List[str]
def get_yarn_args(production: bool) -> List[str]:
if production:
yarn_args = ["--prod"]
else:
yarn_args = []
return yarn_args
def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION):
# type: (Optional[str], bool) -> str
def generate_sha1sum_node_modules(
setup_dir: Optional[str] = None, production: bool = DEFAULT_PRODUCTION
) -> str:
if setup_dir is None:
setup_dir = os.path.realpath(os.getcwd())
PACKAGE_JSON_FILE_PATH = os.path.join(setup_dir, 'package.json')
@ -47,9 +47,12 @@ def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION)
sha1sum.update(''.join(sorted(yarn_args)).encode('utf8'))
return sha1sum.hexdigest()
def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None,
prefer_offline=False):
# type: (bool, Optional[IO[Any]], Optional[IO[Any]], bool) -> None
def setup_node_modules(
production: bool = DEFAULT_PRODUCTION,
stdout: Optional[IO[Any]] = None,
stderr: Optional[IO[Any]] = None,
prefer_offline: bool = False,
) -> None:
yarn_args = get_yarn_args(production=production)
if prefer_offline:
yarn_args.append("--prefer-offline")
@ -72,8 +75,13 @@ def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None,
shutil.rmtree('node_modules')
os.symlink(cached_node_modules, 'node_modules')
def do_yarn_install(target_path, yarn_args, success_stamp, stdout=None, stderr=None):
# type: (str, List[str], str, Optional[IO[Any]], Optional[IO[Any]]) -> None
def do_yarn_install(
target_path: str,
yarn_args: List[str],
success_stamp: str,
stdout: Optional[IO[Any]] = None,
stderr: Optional[IO[Any]] = None,
) -> None:
os.makedirs(target_path, exist_ok=True)
shutil.copy('package.json', target_path)
shutil.copy("yarn.lock", target_path)

View File

@ -102,8 +102,7 @@ YUM_THUMBOR_VENV_DEPENDENCIES = [
"gifsicle",
]
def get_venv_dependencies(vendor, os_version):
# type: (str, str) -> List[str]
def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
if vendor == 'ubuntu' and os_version == '20.04':
return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("2"), ]
elif "debian" in os_families():
@ -115,18 +114,15 @@ def get_venv_dependencies(vendor, os_version):
else:
raise AssertionError("Invalid vendor")
def install_venv_deps(pip, requirements_file, python2):
# type: (str, str, bool) -> None
def install_venv_deps(pip: str, requirements_file: str, python2: bool) -> None:
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip2.txt" if python2 else "pip.txt")
run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements])
run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file])
def get_index_filename(venv_path):
# type: (str) -> str
def get_index_filename(venv_path: str) -> str:
return os.path.join(venv_path, 'package_index')
def get_package_names(requirements_file):
# type: (str) -> List[str]
def get_package_names(requirements_file: str) -> List[str]:
packages = expand_reqs(requirements_file)
cleaned = []
operators = ['~=', '==', '!=', '<', '>']
@ -148,8 +144,7 @@ def get_package_names(requirements_file):
return sorted(cleaned)
def create_requirements_index_file(venv_path, requirements_file):
# type: (str, str) -> str
def create_requirements_index_file(venv_path: str, requirements_file: str) -> str:
"""
Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the
@ -164,8 +159,7 @@ def create_requirements_index_file(venv_path, requirements_file):
return index_filename
def get_venv_packages(venv_path):
# type: (str) -> Set[str]
def get_venv_packages(venv_path: str) -> Set[str]:
"""
Returns the packages installed in the virtual environment using the
package index file.
@ -173,8 +167,7 @@ def get_venv_packages(venv_path):
with open(get_index_filename(venv_path)) as reader:
return {p.strip() for p in reader.read().split('\n') if p.strip()}
def try_to_copy_venv(venv_path, new_packages):
# type: (str, Set[str]) -> bool
def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
"""
Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows:
@ -247,12 +240,12 @@ def try_to_copy_venv(venv_path, new_packages):
return False
def get_logfile_name(venv_path):
# type: (str) -> str
def get_logfile_name(venv_path: str) -> str:
return "{}/setup-venv.log".format(venv_path)
def create_log_entry(target_log, parent, copied_packages, new_packages):
# type: (str, str, Set[str], Set[str]) -> None
def create_log_entry(
target_log: str, parent: str, copied_packages: Set[str], new_packages: Set[str]
) -> None:
venv_path = os.path.dirname(target_log)
with open(target_log, 'a') as writer:
@ -267,13 +260,11 @@ def create_log_entry(target_log, parent, copied_packages, new_packages):
writer.write("\n".join('- {}'.format(p) for p in sorted(new_packages)))
writer.write("\n\n")
def copy_parent_log(source_log, target_log):
# type: (str, str) -> None
def copy_parent_log(source_log: str, target_log: str) -> None:
if os.path.exists(source_log):
shutil.copyfile(source_log, target_log)
def do_patch_activate_script(venv_path):
# type: (str) -> None
def do_patch_activate_script(venv_path: str) -> None:
"""
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced.
@ -290,8 +281,12 @@ def do_patch_activate_script(venv_path):
with open(script_path, 'w') as f:
f.write("".join(lines))
def setup_virtualenv(target_venv_path, requirements_file, python2=False, patch_activate_script=False):
# type: (Optional[str], str, bool, bool) -> str
def setup_virtualenv(
target_venv_path: Optional[str],
requirements_file: str,
python2: bool = False,
patch_activate_script: bool = False,
) -> str:
# Check if a cached version already exists
path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
@ -314,15 +309,13 @@ def setup_virtualenv(target_venv_path, requirements_file, python2=False, patch_a
do_patch_activate_script(target_venv_path)
return cached_venv_path
def add_cert_to_pipconf():
# type: () -> None
def add_cert_to_pipconf() -> None:
conffile = os.path.expanduser("~/.pip/pip.conf")
confdir = os.path.expanduser("~/.pip/")
os.makedirs(confdir, exist_ok=True)
run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
def do_setup_virtualenv(venv_path, requirements_file, python2):
# type: (str, str, bool) -> None
def do_setup_virtualenv(venv_path: str, requirements_file: str, python2: bool) -> None:
# Setup Python virtualenv
new_packages = set(get_package_names(requirements_file))

View File

@ -39,8 +39,7 @@ BLUE = '\x1b[34m'
MAGENTA = '\x1b[35m'
CYAN = '\x1b[36m'
def overwrite_symlink(src, dst):
# type: (str, str) -> None
def overwrite_symlink(src: str, dst: str) -> None:
while True:
tmp = tempfile.mktemp(
prefix='.' + os.path.basename(dst) + '.',
@ -56,8 +55,7 @@ def overwrite_symlink(src, dst):
os.remove(tmp)
raise
def parse_cache_script_args(description):
# type: (str) -> argparse.Namespace
def parse_cache_script_args(description: str) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
@ -88,8 +86,7 @@ def get_deploy_root() -> str:
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", ".."))
)
def get_deployment_version(extract_path):
# type: (str) -> str
def get_deployment_version(extract_path: str) -> str:
version = '0.0.0'
for item in os.listdir(extract_path):
item_path = os.path.join(extract_path, item)
@ -101,14 +98,12 @@ def get_deployment_version(extract_path):
break
return version
def is_invalid_upgrade(current_version, new_version):
# type: (str, str) -> bool
def is_invalid_upgrade(current_version: str, new_version: str) -> bool:
if new_version > '1.4.3' and current_version <= '1.3.10':
return True
return False
def subprocess_text_output(args):
# type: (Sequence[str]) -> str
def subprocess_text_output(args: Sequence[str]) -> str:
return subprocess.check_output(args, universal_newlines=True).strip()
def get_zulip_pwent() -> pwd.struct_passwd:
@ -121,8 +116,7 @@ def get_zulip_pwent() -> pwd.struct_passwd:
# `zulip` user as that's the correct value in production.
return pwd.getpwnam("zulip")
def su_to_zulip(save_suid=False):
# type: (bool) -> None
def su_to_zulip(save_suid: bool = False) -> None:
"""Warning: su_to_zulip assumes that the zulip checkout is owned by
the zulip user (or whatever normal user is running the Zulip
installation). It should never be run from the installer or other
@ -136,14 +130,12 @@ def su_to_zulip(save_suid=False):
os.setuid(pwent.pw_uid)
os.environ['HOME'] = pwent.pw_dir
def make_deploy_path():
# type: () -> str
def make_deploy_path() -> str:
timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT)
return os.path.join(DEPLOYMENTS_DIR, timestamp)
TEMPLATE_DATABASE_DIR = "test-backend/databases"
def get_dev_uuid_var_path(create_if_missing=False):
# type: (bool) -> str
def get_dev_uuid_var_path(create_if_missing: bool = False) -> str:
zulip_path = get_deploy_root()
uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid")
if os.path.exists(uuid_path):
@ -163,8 +155,7 @@ def get_dev_uuid_var_path(create_if_missing=False):
os.makedirs(result_path, exist_ok=True)
return result_path
def get_deployment_lock(error_rerun_script):
# type: (str) -> None
def get_deployment_lock(error_rerun_script: str) -> None:
start_time = time.time()
got_lock = False
while time.time() - start_time < 300:
@ -187,12 +178,10 @@ def get_deployment_lock(error_rerun_script):
ENDC)
sys.exit(1)
def release_deployment_lock():
# type: () -> None
def release_deployment_lock() -> None:
shutil.rmtree(LOCK_DIR)
def run(args, **kwargs):
# type: (Sequence[str], **Any) -> None
def run(args: Sequence[str], **kwargs: Any) -> None:
# Output what we're doing in the `set -x` style
print("+ %s" % (" ".join(map(shlex.quote, args)),))
@ -208,8 +197,7 @@ def run(args, **kwargs):
print()
raise
def log_management_command(cmd, log_path):
# type: (str, str) -> None
def log_management_command(cmd: str, log_path: str) -> None:
log_dir = os.path.dirname(log_path)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
@ -223,16 +211,14 @@ def log_management_command(cmd, log_path):
logger.info("Ran '%s'" % (cmd,))
def get_environment():
# type: () -> str
def get_environment() -> str:
if os.path.exists(DEPLOYMENTS_DIR):
return "prod"
if os.environ.get("TRAVIS"):
return "travis"
return "dev"
def get_recent_deployments(threshold_days):
# type: (int) -> Set[str]
def get_recent_deployments(threshold_days: int) -> Set[str]:
# Returns a list of deployments not older than threshold days
# including `/root/zulip` directory if it exists.
recent = set()
@ -259,16 +245,14 @@ def get_recent_deployments(threshold_days):
recent.add("/root/zulip")
return recent
def get_threshold_timestamp(threshold_days):
# type: (int) -> int
def get_threshold_timestamp(threshold_days: int) -> int:
# Given number of days, this function returns timestamp corresponding
# to the time prior to given number of days.
threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
threshold_timestamp = int(time.mktime(threshold.utctimetuple()))
return threshold_timestamp
def get_caches_to_be_purged(caches_dir, caches_in_use, threshold_days):
# type: (str, Set[str], int) -> Set[str]
def get_caches_to_be_purged(caches_dir: str, caches_in_use: Set[str], threshold_days: int) -> Set[str]:
# Given a directory containing caches, a list of caches in use
# and threshold days, this function return a list of caches
# which can be purged. Remove the cache only if it is:
@ -287,8 +271,9 @@ def get_caches_to_be_purged(caches_dir, caches_in_use, threshold_days):
caches_to_purge.add(cache_dir)
return caches_to_purge
def purge_unused_caches(caches_dir, caches_in_use, cache_type, args):
# type: (str, Set[str], str, argparse.Namespace) -> None
def purge_unused_caches(
caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace
) -> None:
all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)}
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)
caches_to_keep = all_caches - caches_to_purge
@ -298,8 +283,7 @@ def purge_unused_caches(caches_dir, caches_in_use, cache_type, args):
if args.verbose:
print("Done!")
def generate_sha1sum_emoji(zulip_path):
# type: (str) -> str
def generate_sha1sum_emoji(zulip_path: str) -> str:
ZULIP_EMOJI_DIR = os.path.join(zulip_path, 'tools', 'setup', 'emoji')
sha = hashlib.sha1()
@ -332,8 +316,14 @@ def generate_sha1sum_emoji(zulip_path):
return sha.hexdigest()
def may_be_perform_purging(dirs_to_purge, dirs_to_keep, dir_type, dry_run, verbose, no_headings):
# type: (Set[str], Set[str], str, bool, bool, bool) -> None
def may_be_perform_purging(
dirs_to_purge: Set[str],
dirs_to_keep: Set[str],
dir_type: str,
dry_run: bool,
verbose: bool,
no_headings: bool,
) -> None:
if dry_run:
print("Performing a dry run...")
if not no_headings:
@ -350,8 +340,7 @@ def may_be_perform_purging(dirs_to_purge, dirs_to_keep, dir_type, dry_run, verbo
print("Keeping used %s: %s" % (dir_type, directory))
@functools.lru_cache(None)
def parse_os_release():
# type: () -> Dict[str, str]
def parse_os_release() -> Dict[str, str]:
"""
Example of the useful subset of the data:
{
@ -423,8 +412,7 @@ def is_root() -> bool:
return True
return False
def run_as_root(args, **kwargs):
# type: (List[str], **Any) -> None
def run_as_root(args: List[str], **kwargs: Any) -> None:
sudo_args = kwargs.pop('sudo_args', [])
if not is_root():
args = ['sudo'] + sudo_args + ['--'] + args
@ -454,8 +442,12 @@ def assert_running_as_root(strip_lib_from_paths: bool=False) -> None:
print("{} must be run as root.".format(script_name))
sys.exit(1)
def get_config(config_file, section, key, default_value=""):
# type: (configparser.RawConfigParser, str, str, str) -> str
def get_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
default_value: str = "",
) -> str:
if config_file.has_option(section, key):
return config_file.get(section, key)
return default_value
@ -465,8 +457,7 @@ def get_config_file() -> configparser.RawConfigParser:
config_file.read("/etc/zulip/zulip.conf")
return config_file
def get_deploy_options(config_file):
# type: (configparser.RawConfigParser) -> List[str]
def get_deploy_options(config_file: configparser.RawConfigParser) -> List[str]:
return get_config(config_file, 'deployment', 'deploy_options', "").strip().split()
def get_or_create_dev_uuid_var_path(path: str) -> str:

View File

@ -37,8 +37,7 @@ options = parser.parse_args()
config_file = configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf")
def get_config(section, key, default_value):
# type: (str, str, str) -> str
def get_config(section: str, key: str, default_value: str) -> str:
if config_file.has_option(section, key):
return config_file.get(section, key)
return default_value

View File

@ -2,8 +2,7 @@ import time
from typing import Tuple
def nagios_from_file(results_file):
# type: (str) -> Tuple[int, str]
def nagios_from_file(results_file: str) -> Tuple[int, str]:
"""Returns a nagios-appropriate string and return code obtained by
parsing the desired file on disk. The file on disk should be of format

View File

@ -11,8 +11,7 @@ sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, get_recent_deployments, \
may_be_perform_purging
def parse_args():
# type: () -> argparse.Namespace
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="This script can be used for cleaning old unused deployments.",
epilog="Orphaned/unused caches older than threshold days will be automatically "
@ -34,8 +33,7 @@ def parse_args():
args.verbose |= args.dry_run # Always print a detailed report in case of dry run.
return args
def get_deployments_to_be_purged(recent_deployments):
# type: (Set[str]) -> Set[str]
def get_deployments_to_be_purged(recent_deployments: Set[str]) -> Set[str]:
all_deployments = {os.path.join(DEPLOYMENTS_DIR, deployment)
for deployment in os.listdir(DEPLOYMENTS_DIR)}
deployments_to_purge = set()
@ -52,8 +50,7 @@ def get_deployments_to_be_purged(recent_deployments):
deployments_to_purge.add(deployment)
return deployments_to_purge
def main():
# type: () -> None
def main() -> None:
args = parse_args()
deployments_to_keep = get_recent_deployments(args.threshold_days)
deployments_to_purge = get_deployments_to_be_purged(deployments_to_keep)

View File

@ -31,14 +31,12 @@ AUTOGENERATED_SETTINGS = [
'thumbor_key',
]
def generate_django_secretkey():
# type: () -> str
def generate_django_secretkey() -> str:
"""Secret key generation taken from Django's startproject.py"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars)
def get_old_conf(output_filename):
# type: (str) -> Dict[str, str]
def get_old_conf(output_filename: str) -> Dict[str, str]:
if not os.path.exists(output_filename) or os.path.getsize(output_filename) == 0:
return {}
@ -47,8 +45,7 @@ def get_old_conf(output_filename):
return dict(secrets_file.items("secrets"))
def generate_secrets(development=False):
# type: (bool) -> None
def generate_secrets(development: bool = False) -> None:
if development:
OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf"
else:
@ -59,12 +56,10 @@ def generate_secrets(development=False):
if len(current_conf) == 0:
lines = ['[secrets]\n']
def need_secret(name):
# type: (str) -> bool
def need_secret(name: str) -> bool:
return name not in current_conf
def add_secret(name, value):
# type: (str, str) -> None
def add_secret(name: str, value: str) -> None:
lines.append("%s = %s\n" % (name, value))
current_conf[name] = value

View File

@ -19,8 +19,7 @@ parser = argparse.ArgumentParser()
parser.add_argument("tarball", help="Filename of input tarball")
def restore_backup(tarball_file):
# type: (IO[bytes]) -> None
def restore_backup(tarball_file: IO[bytes]) -> None:
su_to_zulip(save_suid=True)

View File

@ -14,8 +14,7 @@ import subprocess
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from scripts.lib.zulip_tools import WARNING, FAIL, ENDC
def find_handlebars(translatable_strings):
# type: (List[str]) -> List[str]
def find_handlebars(translatable_strings: List[str]) -> List[str]:
errored = []
for string in translatable_strings:
if '{{' in string:

View File

@ -21,34 +21,29 @@ from typing import Any, Dict, Optional
# usage: python check-issue-labels
# Pass --force as an argument to run without a token.
def get_config():
# type: () -> ConfigParser
def get_config() -> ConfigParser:
config = ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini'))
return config
def area_labeled(issue):
# type: (Dict[str, Any]) -> bool
def area_labeled(issue: Dict[str, Any]) -> bool:
for label in issue["labels"]:
label_name = str(label["name"])
if "area:" in label_name:
return True
return False
def is_issue(item):
# type: (Dict[str, Any]) -> bool
def is_issue(item: Dict[str, Any]) -> bool:
return "issues" in item["html_url"]
def get_next_page_url(link_header):
# type: (str) -> Optional[str]
def get_next_page_url(link_header: str) -> Optional[str]:
matches = re.findall(r'\<(\S+)\>; rel=\"next\"', link_header)
try:
return matches[0]
except IndexError:
return None
def check_issue_labels():
# type: () -> None
def check_issue_labels() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--force', action="store_true", dest="force", default=False)
args = parser.parse_args()

View File

@ -12,8 +12,7 @@ from tools.lib.test_script import (
assert_provisioning_status_ok,
)
def run():
# type: () -> None
def run() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--force', default=False,
action="store_true",

View File

@ -22,8 +22,7 @@ EXCLUDED_FILES = [
'static/assets/icons/template.hbs',
]
def check_our_files(modified_only, all_dups, fix, targets):
# type: (bool, bool, bool, List[str]) -> None
def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: List[str]) -> None:
by_lang = lister.list_files(
targets=targets,
modified_only=args.modified,
@ -34,8 +33,7 @@ def check_our_files(modified_only, all_dups, fix, targets):
check_handlebar_templates(by_lang['hbs'], fix)
check_html_templates(by_lang['html'], all_dups, fix)
def check_html_templates(templates, all_dups, fix):
# type: (Iterable[str], bool, bool) -> None
def check_html_templates(templates: Iterable[str], all_dups: bool, fix: bool) -> None:
# Our files with .html extensions are usually for Django, but we also
# have a few static .html files.
#
@ -143,8 +141,7 @@ def check_html_templates(templates, all_dups, fix):
if not validate_indent_html(fn, fix):
sys.exit(1)
def check_handlebar_templates(templates, fix):
# type: (Iterable[str], bool) -> None
def check_handlebar_templates(templates: Iterable[str], fix: bool) -> None:
# Check all our handlebars templates.
templates = [fn for fn in templates if fn.endswith('.hbs')]

View File

@ -14,8 +14,7 @@ import ujson
Call = Dict[str, Any]
def clean_up_pattern(s):
# type: (str) -> str
def clean_up_pattern(s: str) -> str:
paren_level = 0
in_braces = False
result = ''
@ -35,8 +34,7 @@ def clean_up_pattern(s):
prior_char = c
return result
def encode_info(info):
# type: (Any) -> str
def encode_info(info: Any) -> str:
try:
result = ''
try:
@ -54,12 +52,10 @@ def encode_info(info):
pass
return 'NOT ENCODABLE'
def fix_test_name(s):
# type: (str) -> str
def fix_test_name(s: str) -> str:
return s.replace('zerver.tests.', '')
def create_single_page(pattern, out_dir, href, calls):
# type: (str, str, str, List[Call]) -> None
def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call]) -> None:
fn = out_dir + '/' + href
with open(fn, 'w') as f:
f.write('''
@ -85,8 +81,7 @@ def create_single_page(pattern, out_dir, href, calls):
f.write('<br>')
f.write('</div>')
def create_user_docs():
# type: () -> None
def create_user_docs() -> None:
fn = 'var/url_coverage.txt' # TODO: make path more robust, maybe use json suffix
out_dir = 'var/api_docs'

View File

@ -14,8 +14,7 @@ sys.path.insert(0, ROOT_DIR)
from scripts.lib.zulip_tools import get_dev_uuid_var_path
UUID_VAR_PATH = get_dev_uuid_var_path()
def run(check_func):
# type: (Callable[[], bool]) -> None
def run(check_func: Callable[[], bool]) -> None:
'''
This decorator simply runs functions. It makes it more
convenient to add new checks without a big main() function.
@ -24,32 +23,27 @@ def run(check_func):
if not rc:
sys.exit(1)
def run_command(args):
# type: (List[str]) -> None
def run_command(args: List[str]) -> None:
print(' '.join(map(shlex.quote, args)))
subprocess.check_call(args)
@run
def check_python_version():
# type: () -> bool
def check_python_version() -> bool:
subprocess.check_call(['/usr/bin/env', 'python', '-V'])
return True
@run
def pwd():
# type: () -> bool
def pwd() -> bool:
print(os.getcwd())
return True
@run
def host_info():
# type: () -> bool
def host_info() -> bool:
print(platform.platform())
return True
@run
def check_django():
# type: () -> bool
def check_django() -> bool:
try:
import django
print('Django version:', django.get_version())
@ -70,8 +64,7 @@ def check_django():
return False
@run
def provision_version():
# type: () -> bool
def provision_version() -> bool:
fn = os.path.join(UUID_VAR_PATH, 'provision_version')
with open(fn) as f:
version = f.read().strip()
@ -84,15 +77,13 @@ def provision_version():
return True
@run
def node_stuff():
# type: () -> bool
def node_stuff() -> bool:
print('node version:')
subprocess.check_call(['node', '--version'])
return True
@run
def test_models():
# type: () -> bool
def test_models() -> bool:
settings_module = "zproject.settings"
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
import django
@ -103,8 +94,7 @@ def test_models():
return True
@run
def check_venv():
# type: () -> bool
def check_venv() -> bool:
path = os.path.join(ROOT_DIR, 'scripts', 'lib', 'hash_reqs.py')
cache_dir = '/srv/zulip-venv-cache/'
for fn in ['dev.txt']:
@ -118,8 +108,7 @@ def check_venv():
return True
@run
def check_migrations():
# type: () -> bool
def check_migrations() -> bool:
print()
rc = subprocess.check_call('./tools/test-migrations')
return (rc == 0)

View File

@ -33,14 +33,12 @@ parser.add_argument("username", help="Github username for whom you want to creat
parser.add_argument('--tags', nargs='+', default=[])
parser.add_argument('-f', '--recreate', dest='recreate', action="store_true", default=False)
def get_config():
# type: () -> configparser.ConfigParser
def get_config() -> configparser.ConfigParser:
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini'))
return config
def user_exists(username):
# type: (str) -> bool
def user_exists(username: str) -> bool:
print("Checking to see if GitHub user {} exists...".format(username))
user_api_url = "https://api.github.com/users/{}".format(username)
try:
@ -53,8 +51,7 @@ def user_exists(username):
print("Does the github user {} exist?".format(username))
sys.exit(1)
def get_keys(username):
# type: (str) -> List[Dict[str, Any]]
def get_keys(username: str) -> List[Dict[str, Any]]:
print("Checking to see that GitHub user has available public keys...")
apiurl_keys = "https://api.github.com/users/{}/keys".format(username)
try:
@ -70,8 +67,7 @@ def get_keys(username):
print("Has user {} added ssh keys to their github account?".format(username))
sys.exit(1)
def fork_exists(username):
# type: (str) -> bool
def fork_exists(username: str) -> bool:
print("Checking to see GitHub user has forked zulip/zulip...")
apiurl_fork = "https://api.github.com/repos/{}/zulip".format(username)
try:
@ -100,8 +96,7 @@ def exit_if_droplet_exists(my_token: str, username: str, recreate: bool) -> None
return
print("...No droplet found...proceeding.")
def set_user_data(username, userkeys):
# type: (str, List[Dict[str, Any]]) -> str
def set_user_data(username: str, userkeys: List[Dict[str, Any]]) -> str:
print("Setting cloud-config data, populated with GitHub user's public keys...")
ssh_authorized_keys = ""
@ -137,8 +132,7 @@ cd /home/zulipdev/{1} && git remote add origin https://github.com/{0}/{1}.git &&
print("...returning cloud-config data.")
return cloudconf
def create_droplet(my_token, template_id, username, tags, user_data):
# type: (str, str, str, List[str], str) -> str
def create_droplet(my_token: str, template_id: str, username: str, tags: List[str], user_data: str) -> str:
droplet = digitalocean.Droplet(
token=my_token,
name='{}.zulipdev.org'.format(username),
@ -177,8 +171,7 @@ def delete_existing_records(records: List[digitalocean.Record], record_name: str
if count:
print("Deleted {} existing A records for {}.zulipdev.org.".format(count, record_name))
def create_dns_record(my_token, username, ip_address):
# type: (str, str, str) -> None
def create_dns_record(my_token: str, username: str, ip_address: str) -> None:
domain = digitalocean.Domain(token=my_token, name='zulipdev.org')
domain.load()
records = domain.get_records()
@ -192,8 +185,7 @@ def create_dns_record(my_token, username, ip_address):
print("Creating new A record for *.{}.zulipdev.org that points to {}.".format(username, ip_address))
domain.create_new_domain_record(type='A', name=wildcard_name, data=ip_address)
def print_completion(username):
# type: (str) -> None
def print_completion(username: str) -> None:
print("""
COMPLETE! Droplet for GitHub user {0} is available at {0}.zulipdev.org.

View File

@ -9,12 +9,10 @@ sanity_check.check_venv(__file__)
from typing import Any, Dict, List
def debug(obj):
# type: (Any) -> None
def debug(obj: Any) -> None:
print(json.dumps(obj, indent=4))
def parse_file(fn):
# type: (str) -> Dict[str, Any]
def parse_file(fn: str) -> Dict[str, Any]:
with open(fn) as f:
text = f.read()
tags = re.findall(r'{+\s*(.*?)\s*}+', text)
@ -22,8 +20,7 @@ def parse_file(fn):
context = root
stack = [] # type: List[Dict[str, Any]]
def set_var(var, val):
# type: (str, Any) -> None
def set_var(var: str, val: Any) -> None:
num_levels_up = len(re.findall(r'\.\.', var))
if num_levels_up:
var = var.split('/')[-1]
@ -95,8 +92,7 @@ def parse_file(fn):
set_var(tag, '')
def clean_this(obj):
# type: (Any) -> Any
def clean_this(obj: Any) -> Any:
if isinstance(obj, list):
return [clean_this(item) for item in obj]
if isinstance(obj, dict):

View File

@ -46,8 +46,7 @@ USAGE = '''
TODO: allow specific files to be searched.'''
def check_our_files():
# type: () -> None
def check_our_files() -> None:
parser = argparse.ArgumentParser(description=USAGE,
formatter_class=argparse.RawTextHelpFormatter)

View File

@ -31,8 +31,7 @@ JS_FILES_DIR = os.path.join(ROOT_DIR, 'static/js')
OUTPUT_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.dot'))
PNG_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.png'))
def get_js_edges():
# type: () -> Tuple[EdgeSet, MethodDict]
def get_js_edges() -> Tuple[EdgeSet, MethodDict]:
names = set()
modules = [] # type: List[Dict[str, Any]]
for js_file in os.listdir(JS_FILES_DIR):
@ -75,8 +74,7 @@ def get_js_edges():
methods[tup].append(method)
return edges, methods
def find_edges_to_remove(graph, methods):
# type: (Graph, MethodDict) -> Tuple[Graph, List[Edge]]
def find_edges_to_remove(graph: Graph, methods: MethodDict) -> Tuple[Graph, List[Edge]]:
EXEMPT_EDGES = [
# These are sensible dependencies, so don't cut them.
('rows', 'message_store'),
@ -144,8 +142,7 @@ def find_edges_to_remove(graph, methods):
('message_edit', 'resize'),
] # type: List[Edge]
def is_exempt(edge):
# type: (Tuple[str, str]) -> bool
def is_exempt(edge: Tuple[str, str]) -> bool:
parent, child = edge
if edge == ('server_events', 'reload'):
return False
@ -223,8 +220,7 @@ def find_edges_to_remove(graph, methods):
('emoji_picker', 'reactions'),
]
def cut_is_legal(edge):
# type: (Edge) -> bool
def cut_is_legal(edge: Edge) -> bool:
parent, child = edge
if child in ['reload', 'popovers', 'overlays', 'notifications',
'server_events', 'compose_actions']:
@ -255,8 +251,7 @@ def find_edges_to_remove(graph, methods):
return graph, removed_edges
def report_roadmap(edges, methods):
# type: (List[Edge], MethodDict) -> None
def report_roadmap(edges: List[Edge], methods: MethodDict) -> None:
child_modules = {child for parent, child in edges}
module_methods = defaultdict(set) # type: DefaultDict[str, Set[str]]
callers = defaultdict(set) # type: DefaultDict[Tuple[str, str], Set[str]]
@ -277,8 +272,7 @@ def report_roadmap(edges, methods):
print()
print()
def produce_partial_output(graph):
# type: (Graph) -> None
def produce_partial_output(graph: Graph) -> None:
print(graph.num_edges())
buffer = make_dot_file(graph)
@ -290,8 +284,7 @@ def produce_partial_output(graph):
print('See dot file here: {}'.format(OUTPUT_FILE_PATH))
print('See output png file: {}'.format(PNG_FILE_PATH))
def run():
# type: () -> None
def run() -> None:
edges, methods = get_js_edges()
graph = Graph(edges)
graph, removed_edges = find_edges_to_remove(graph, methods)

View File

@ -180,8 +180,7 @@ BANNED_WORDS = {
'Use organization instead.'),
}
def get_safe_phrase(phrase):
# type: (str) -> str
def get_safe_phrase(phrase: str) -> str:
"""
Safe phrase is in lower case and doesn't contain characters which can
conflict with split boundaries. All conflicting characters are replaced
@ -190,8 +189,7 @@ def get_safe_phrase(phrase):
phrase = SPLIT_BOUNDARY_REGEX.sub('_', phrase)
return phrase.lower()
def replace_with_safe_phrase(matchobj):
# type: (Match[str]) -> str
def replace_with_safe_phrase(matchobj: Match[str]) -> str:
"""
The idea is to convert IGNORED_PHRASES into safe phrases, see
`get_safe_phrase()` function. The only exception is when the
@ -215,8 +213,7 @@ def replace_with_safe_phrase(matchobj):
return safe_string
def get_safe_text(text):
# type: (str) -> str
def get_safe_text(text: str) -> str:
"""
This returns text which is rendered by BeautifulSoup and is in the
form that can be split easily and has all IGNORED_PHRASES processed.
@ -228,8 +225,7 @@ def get_safe_text(text):
return text
def is_capitalized(safe_text):
# type: (str) -> bool
def is_capitalized(safe_text: str) -> bool:
sentences = SPLIT_BOUNDARY_REGEX.split(safe_text)
sentences = [sentence.strip()
for sentence in sentences if sentence.strip()]
@ -259,8 +255,7 @@ def check_banned_words(text: str) -> List[str]:
return errors
def check_capitalization(strings):
# type: (List[str]) -> Tuple[List[str], List[str], List[str]]
def check_capitalization(strings: List[str]) -> Tuple[List[str], List[str], List[str]]:
errors = []
ignored = []
banned_word_errors = []

View File

@ -84,8 +84,7 @@ imperative_forms = sorted([
])
def head_binary_search(key, words):
# type: (Text, List[str]) -> str
def head_binary_search(key: Text, words: List[str]) -> str:
""" Find the imperative mood version of `word` by looking at the first
3 characters. """
@ -124,8 +123,7 @@ class ImperativeMood(LineRule):
error_msg = ('The first word in commit title should be in imperative mood '
'("{word}" -> "{imperative}"): "{title}"')
def validate(self, line, commit):
# type: (Text, gitlint.commit) -> List[RuleViolation]
def validate(self, line: Text, commit: gitlint.commit) -> List[RuleViolation]:
violations = []
# Ignore the section tag (ie `<section tag>: <message body>.`)
@ -153,8 +151,7 @@ class TitleMatchRegexAllowException(LineRule):
target = CommitMessageTitle
options_spec = [StrOption('regex', ".*", "Regex the title should match")]
def validate(self, title, commit):
# type: (Text, gitlint.commit) -> List[RuleViolation]
def validate(self, title: Text, commit: gitlint.commit) -> List[RuleViolation]:
regex = self.options['regex'].value
pattern = re.compile(regex, re.UNICODE)

View File

@ -6,8 +6,7 @@ Edge = Tuple[str, str]
EdgeSet = Set[Edge]
class Graph:
def __init__(self, tuples):
# type: (EdgeSet) -> None
def __init__(self, tuples: EdgeSet) -> None:
self.children = defaultdict(list) # type: DefaultDict[str, List[str]]
self.parents = defaultdict(list) # type: DefaultDict[str, List[str]]
self.nodes = set() # type: Set[str]
@ -18,30 +17,25 @@ class Graph:
self.nodes.add(parent)
self.nodes.add(child)
def copy(self):
# type: () -> 'Graph'
def copy(self) -> 'Graph':
return Graph(self.edges())
def num_edges(self):
# type: () -> int
def num_edges(self) -> int:
return len(self.edges())
def minus_edge(self, edge):
# type: (Edge) -> 'Graph'
def minus_edge(self, edge: Edge) -> 'Graph':
edges = self.edges().copy()
edges.remove(edge)
return Graph(edges)
def edges(self):
# type: () -> EdgeSet
def edges(self) -> EdgeSet:
s = set()
for parent in self.nodes:
for child in self.children[parent]:
s.add((parent, child))
return s
def remove_exterior_nodes(self):
# type: () -> None
def remove_exterior_nodes(self) -> None:
still_work_to_do = True
while still_work_to_do:
still_work_to_do = False # for now
@ -51,8 +45,7 @@ class Graph:
still_work_to_do = True
break
def is_exterior_node(self, node):
# type: (str) -> bool
def is_exterior_node(self, node: str) -> bool:
parents = self.parents[node]
children = self.children[node]
if not parents:
@ -66,16 +59,14 @@ class Graph:
# effectively be collapsed into the parent, so don't add clutter.
return parents[0] == children[0]
def remove(self, node):
# type: (str) -> None
def remove(self, node: str) -> None:
for parent in self.parents[node]:
self.children[parent].remove(node)
for child in self.children[node]:
self.parents[child].remove(node)
self.nodes.remove(node)
def report(self):
# type: () -> None
def report(self) -> None:
print('parents/children/module')
tups = sorted([
(len(self.parents[node]), len(self.children[node]), node)
@ -83,14 +74,12 @@ class Graph:
for tup in tups:
print(tup)
def best_edge_to_remove(orig_graph, is_exempt):
# type: (Graph, Callable[[Edge], bool]) -> Optional[Edge]
def best_edge_to_remove(orig_graph: Graph, is_exempt: Callable[[Edge], bool]) -> Optional[Edge]:
# expects an already reduced graph as input
orig_edges = orig_graph.edges()
def get_choices():
# type: () -> Iterator[Tuple[int, Edge]]
def get_choices() -> Iterator[Tuple[int, Edge]]:
for edge in orig_edges:
if is_exempt(edge):
continue
@ -107,8 +96,7 @@ def best_edge_to_remove(orig_graph, is_exempt):
raise Exception('no edges work here')
return best_edge
def make_dot_file(graph):
# type: (Graph) -> str
def make_dot_file(graph: Graph) -> str:
buffer = 'digraph G {\n'
for node in graph.nodes:
buffer += node + ';\n'
@ -117,8 +105,7 @@ def make_dot_file(graph):
buffer += '}'
return buffer
def test():
# type: () -> None
def test() -> None:
graph = Graph({
('x', 'a'),
('a', 'b'),

View File

@ -21,8 +21,7 @@ class HtmlTreeBranch:
conceptually be something like "p div(#yo) span(.bar)".
"""
def __init__(self, tags, fn):
# type: (List['TagInfo'], Optional[str]) -> None
def __init__(self, tags: List['TagInfo'], fn: Optional[str]) -> None:
self.tags = tags
self.fn = fn
self.line = tags[-1].token.line
@ -32,8 +31,7 @@ class HtmlTreeBranch:
for word in tag.words:
self.words.add(word)
def staircase_text(self):
# type: () -> str
def staircase_text(self) -> str:
"""
produces representation of a node in staircase-like format:
@ -49,8 +47,7 @@ class HtmlTreeBranch:
indent += ' ' * 4
return res
def text(self):
# type: () -> str
def text(self) -> str:
"""
produces one-line representation of branch:
@ -60,16 +57,15 @@ class HtmlTreeBranch:
class Node:
def __init__(self, token, parent): # FIXME parent parameter is not used!
# type: (Token, Optional[Node]) -> None
def __init__(self, token: Token, parent: "Optional[Node]") -> None:
# FIXME parent parameter is not used!
self.token = token
self.children = [] # type: List[Node]
self.parent = None # type: Optional[Node]
class TagInfo:
def __init__(self, tag, classes, ids, token):
# type: (str, List[str], List[str], Token) -> None
def __init__(self, tag: str, classes: List[str], ids: List[str], token: Token) -> None:
self.tag = tag
self.classes = classes
self.ids = ids
@ -79,8 +75,7 @@ class TagInfo:
['.' + s for s in classes] + \
['#' + s for s in ids]
def text(self):
# type: () -> str
def text(self) -> str:
s = self.tag
if self.classes:
s += '.' + '.'.join(self.classes)
@ -89,8 +84,7 @@ class TagInfo:
return s
def get_tag_info(token):
# type: (Token) -> TagInfo
def get_tag_info(token: Token) -> TagInfo:
s = token.s
tag = token.tag
classes = [] # type: List[str]
@ -112,8 +106,7 @@ def get_tag_info(token):
return TagInfo(tag=tag, classes=classes, ids=ids, token=token)
def split_for_id_and_class(element):
# type: (str) -> List[str]
def split_for_id_and_class(element: str) -> List[str]:
# Here we split a given string which is expected to contain id or class
# attributes from HTML tags. This also takes care of template variables
# in string during splitting process. For eg. 'red black {{ a|b|c }}'
@ -139,13 +132,11 @@ def split_for_id_and_class(element):
return lst
def html_branches(text, fn=None):
# type: (str, Optional[str]) -> List[HtmlTreeBranch]
def html_branches(text: str, fn: Optional[str] = None) -> List[HtmlTreeBranch]:
tree = html_tag_tree(text)
branches = [] # type: List[HtmlTreeBranch]
def walk(node, tag_info_list=None):
# type: (Node, Optional[List[TagInfo]]) -> None
def walk(node: Node, tag_info_list: Optional[List[TagInfo]] = None) -> None:
info = get_tag_info(node.token)
if tag_info_list is None:
tag_info_list = [info]
@ -165,8 +156,7 @@ def html_branches(text, fn=None):
return branches
def html_tag_tree(text):
# type: (str) -> Node
def html_tag_tree(text: str) -> Node:
tokens = tokenize(text)
top_level = Node(token=None, parent=None)
stack = [top_level]
@ -188,8 +178,7 @@ def html_tag_tree(text):
return top_level
def build_id_dict(templates):
# type: (List[str]) -> (Dict[str, List[str]])
def build_id_dict(templates: List[str]) -> (Dict[str, List[str]]):
template_id_dict = defaultdict(list) # type: (Dict[str, List[str]])
for fn in templates:

View File

@ -3,8 +3,7 @@ from typing import Dict, List, Set
from .html_branches import html_branches, HtmlTreeBranch
def show_all_branches(fns):
# type: (List[str]) -> None
def show_all_branches(fns: List[str]) -> None:
for fn in fns:
print(fn)
with open(fn) as f:
@ -21,8 +20,7 @@ class Grepper:
HtmlTreeBranch objects.
'''
def __init__(self, fns):
# type: (List[str]) -> None
def __init__(self, fns: List[str]) -> None:
all_branches = [] # type: List[HtmlTreeBranch]
for fn in fns:
@ -38,8 +36,7 @@ class Grepper:
self.all_branches = set(all_branches)
def grep(self, word_set):
# type: (Set[str]) -> None
def grep(self, word_set: Set[str]) -> None:
words = list(word_set) # type: List[str]
@ -57,7 +54,6 @@ class Grepper:
print(branch.staircase_text())
print('')
def grep(fns, words):
# type: (List[str], Set[str]) -> None
def grep(fns: List[str], words: Set[str]) -> None:
grepper = Grepper(fns)
grepper.grep(words)

View File

@ -9,8 +9,7 @@ from zulint.printer import GREEN, ENDC
import subprocess
def pretty_print_html(html, num_spaces=4):
# type: (str, int) -> str
def pretty_print_html(html: str, num_spaces: int = 4) -> str:
# We use 1-based indexing for both rows and columns.
tokens = tokenize(html)
lines = html.split('\n')
@ -191,8 +190,7 @@ def pretty_print_html(html, num_spaces=4):
return '\n'.join(formatted_lines)
def validate_indent_html(fn, fix):
# type: (str, bool) -> int
def validate_indent_html(fn: str, fix: bool) -> int:
with open(fn) as f:
html = f.read()
phtml = pretty_print_html(html)

View File

@ -217,8 +217,7 @@ REPO_STOPWORDS_PATH = os.path.join(
"zulip_english.stop",
)
def install_system_deps():
# type: () -> None
def install_system_deps() -> None:
# By doing list -> set -> list conversion, we remove duplicates.
deps_to_install = sorted(set(SYSTEM_DEPENDENCIES))
@ -235,8 +234,7 @@ def install_system_deps():
if BUILD_PGROONGA_FROM_SOURCE:
run_as_root(["./scripts/lib/build-pgroonga"])
def install_apt_deps(deps_to_install):
# type: (List[str]) -> None
def install_apt_deps(deps_to_install: List[str]) -> None:
# setup-apt-repo does an `apt-get update` if the sources.list files changed.
run_as_root(["./scripts/lib/setup-apt-repo"])
@ -253,8 +251,7 @@ def install_apt_deps(deps_to_install):
+ deps_to_install
)
def install_yum_deps(deps_to_install):
# type: (List[str]) -> None
def install_yum_deps(deps_to_install: List[str]) -> None:
print(WARNING + "RedHat support is still experimental.")
run_as_root(["./scripts/lib/setup-yum-repo"])
@ -314,8 +311,7 @@ def install_yum_deps(deps_to_install):
overwrite_symlink("/usr/share/myspell/en_US.aff", "/usr/pgsql-%s/share/tsearch_data/en_us.affix"
% (POSTGRES_VERSION,))
def main(options):
# type: (argparse.Namespace) -> NoReturn
def main(options: argparse.Namespace) -> "NoReturn":
# yarn and management commands expect to be run from the root of the
# project.

View File

@ -33,12 +33,10 @@ def create_var_directories() -> None:
path = os.path.join(var_dir, sub_dir)
os.makedirs(path, exist_ok=True)
def setup_shell_profile(shell_profile):
# type: (str) -> None
def setup_shell_profile(shell_profile: str) -> None:
shell_profile_path = os.path.expanduser(shell_profile)
def write_command(command):
# type: (str) -> None
def write_command(command: str) -> None:
if os.path.exists(shell_profile_path):
with open(shell_profile_path) as shell_profile_file:
lines = [line.strip() for line in shell_profile_file.readlines()]

View File

@ -2,8 +2,7 @@ import os
import pwd
import sys
def check_venv(filename):
# type: (str) -> None
def check_venv(filename: str) -> None:
try:
import django
import ujson

View File

@ -1,30 +1,25 @@
from typing import Callable, List, Optional, Text
class TemplateParserException(Exception):
def __init__(self, message):
# type: (str) -> None
def __init__(self, message: str) -> None:
self.message = message
def __str__(self):
# type: () -> str
def __str__(self) -> str:
return self.message
class TokenizationException(Exception):
def __init__(self, message, line_content=None):
# type: (str, Optional[str]) -> None
def __init__(self, message: str, line_content: Optional[str] = None) -> None:
self.message = message
self.line_content = line_content
class TokenizerState:
def __init__(self):
# type: () -> None
def __init__(self) -> None:
self.i = 0
self.line = 1
self.col = 1
class Token:
def __init__(self, kind, s, tag, line, col, line_span):
# type: (str, str, str, int, int, int) -> None
def __init__(self, kind: str, s: str, tag: str, line: int, col: int, line_span: int) -> None:
self.kind = kind
self.s = s
self.tag = tag
@ -32,10 +27,8 @@ class Token:
self.col = col
self.line_span = line_span
def tokenize(text):
# type: (str) -> List[Token]
def advance(n):
# type: (int) -> None
def tokenize(text: str) -> List[Token]:
def advance(n: int) -> None:
for _ in range(n):
state.i += 1
if state.i >= 0 and text[state.i - 1] == '\n':
@ -44,55 +37,43 @@ def tokenize(text):
else:
state.col += 1
def looking_at(s):
# type: (str) -> bool
def looking_at(s: str) -> bool:
return text[state.i:state.i+len(s)] == s
def looking_at_htmlcomment():
# type: () -> bool
def looking_at_htmlcomment() -> bool:
return looking_at("<!--")
def looking_at_handlebarcomment():
# type: () -> bool
def looking_at_handlebarcomment() -> bool:
return looking_at("{{!")
def looking_at_djangocomment():
# type: () -> bool
def looking_at_djangocomment() -> bool:
return looking_at("{#")
def looking_at_handlebarpartial() -> bool:
return looking_at("{{>")
def looking_at_html_start():
# type: () -> bool
def looking_at_html_start() -> bool:
return looking_at("<") and not looking_at("</")
def looking_at_html_end():
# type: () -> bool
def looking_at_html_end() -> bool:
return looking_at("</")
def looking_at_handlebars_start():
# type: () -> bool
def looking_at_handlebars_start() -> bool:
return looking_at("{{#") or looking_at("{{^")
def looking_at_handlebars_end():
# type: () -> bool
def looking_at_handlebars_end() -> bool:
return looking_at("{{/")
def looking_at_django_start():
# type: () -> bool
def looking_at_django_start() -> bool:
return looking_at("{% ") and not looking_at("{% end")
def looking_at_django_end():
# type: () -> bool
def looking_at_django_end() -> bool:
return looking_at("{% end")
def looking_at_jinja2_end_whitespace_stripped():
# type: () -> bool
def looking_at_jinja2_end_whitespace_stripped() -> bool:
return looking_at("{%- end")
def looking_at_jinja2_start_whitespace_stripped_type2():
# type: () -> bool
def looking_at_jinja2_start_whitespace_stripped_type2() -> bool:
# This function detects tag like {%- if foo -%}...{% endif %}
return looking_at("{%-") and not looking_at("{%- end")
@ -206,8 +187,7 @@ def tokenize(text):
return tokens
def validate(fn=None, text=None, check_indent=True):
# type: (Optional[str], Optional[str], bool) -> None
def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent: bool = True) -> None:
assert fn or text
if fn is None:
@ -220,13 +200,11 @@ def validate(fn=None, text=None, check_indent=True):
tokens = tokenize(text)
class State:
def __init__(self, func):
# type: (Callable[[Token], None]) -> None
def __init__(self, func: Callable[[Token], None]) -> None:
self.depth = 0
self.matcher = func
def no_start_tag(token):
# type: (Token) -> None
def no_start_tag(token: Token) -> None:
raise TemplateParserException('''
No start tag
fn: %s
@ -237,8 +215,7 @@ def validate(fn=None, text=None, check_indent=True):
state = State(no_start_tag)
def start_tag_matcher(start_token):
# type: (Token) -> None
def start_tag_matcher(start_token: Token) -> None:
state.depth += 1
start_tag = start_token.tag.strip('~')
start_line = start_token.line
@ -246,8 +223,7 @@ def validate(fn=None, text=None, check_indent=True):
old_matcher = state.matcher
def f(end_token):
# type: (Token) -> None
def f(end_token: Token) -> None:
end_tag = end_token.tag.strip('~')
end_line = end_token.line
@ -305,8 +281,7 @@ def validate(fn=None, text=None, check_indent=True):
if state.depth != 0:
raise TemplateParserException('Missing end tag')
def is_special_html_tag(s, tag):
# type: (str, str) -> bool
def is_special_html_tag(s: str, tag: str) -> bool:
return tag in ['link', 'meta', '!DOCTYPE']
def is_self_closing_html_tag(s: Text, tag: Text) -> bool:
@ -327,8 +302,7 @@ def is_self_closing_html_tag(s: Text, tag: Text) -> bool:
singleton_tag = s.endswith('/>')
return self_closing_tag or singleton_tag
def is_django_block_tag(tag):
# type: (str) -> bool
def is_django_block_tag(tag: str) -> bool:
return tag in [
'autoescape',
'block',
@ -344,8 +318,7 @@ def is_django_block_tag(tag):
'with',
]
def get_handlebars_tag(text, i):
# type: (str, int) -> str
def get_handlebars_tag(text: str, i: int) -> str:
end = i + 2
while end < len(text) - 1 and text[end] != '}':
end += 1
@ -354,8 +327,7 @@ def get_handlebars_tag(text, i):
s = text[i:end+2]
return s
def get_django_tag(text, i, stripped=False):
# type: (str, int, bool) -> str
def get_django_tag(text: str, i: int, stripped: bool = False) -> str:
end = i + 2
if stripped:
end += 1
@ -366,8 +338,7 @@ def get_django_tag(text, i, stripped=False):
s = text[i:end+2]
return s
def get_html_tag(text, i):
# type: (str, int) -> str
def get_html_tag(text: str, i: int) -> str:
quote_count = 0
end = i + 1
unclosed_end = 0
@ -387,8 +358,7 @@ def get_html_tag(text, i):
s = text[i:end+1]
return s
def get_html_comment(text, i):
# type: (str, int) -> str
def get_html_comment(text: str, i: int) -> str:
end = i + 7
unclosed_end = 0
while end <= len(text):
@ -399,8 +369,7 @@ def get_html_comment(text, i):
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_comment(text, i):
# type: (str, int) -> str
def get_handlebar_comment(text: str, i: int) -> str:
end = i + 5
unclosed_end = 0
while end <= len(text):
@ -411,8 +380,7 @@ def get_handlebar_comment(text, i):
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_django_comment(text, i):
# type: (str, int) -> str
def get_django_comment(text: str, i: int) -> str:
end = i + 4
unclosed_end = 0
while end <= len(text):
@ -423,8 +391,7 @@ def get_django_comment(text, i):
end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_partial(text, i):
# type: (str, int) -> str
def get_handlebar_partial(text: str, i: int) -> str:
end = i + 10
unclosed_end = 0
while end <= len(text):

View File

@ -13,8 +13,7 @@ ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__f
def get_major_version(v: str) -> int:
return int(v.split('.')[0])
def get_version_file():
# type: () -> str
def get_version_file() -> str:
uuid_var_path = get_dev_uuid_var_path()
return os.path.join(uuid_var_path, 'provision_version')

View File

@ -23,8 +23,7 @@ if TOOLS_DIR not in sys.path:
from zerver.lib.test_fixtures import update_test_databases_if_required
from scripts.lib.zulip_tools import get_or_create_dev_uuid_var_path
def set_up_django(external_host):
# type: (str) -> None
def set_up_django(external_host: str) -> None:
os.environ['EXTERNAL_HOST'] = external_host
os.environ["TORNADO_SERVER"] = "http://127.0.0.1:9983"
os.environ["LOCAL_UPLOADS_DIR"] = get_or_create_dev_uuid_var_path(
@ -33,8 +32,7 @@ def set_up_django(external_host):
django.setup()
os.environ['PYTHONUNBUFFERED'] = 'y'
def assert_server_running(server, log_file):
# type: (subprocess.Popen[bytes], Optional[str]) -> None
def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
"""Get the exit code of the server, or None if it is still running."""
if server.poll() is not None:
message = 'Server died unexpectedly!'
@ -42,8 +40,7 @@ def assert_server_running(server, log_file):
message += '\nSee %s\n' % (log_file,)
raise RuntimeError(message)
def server_is_up(server, log_file):
# type: (subprocess.Popen[bytes], Optional[str]) -> bool
def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
assert_server_running(server, log_file)
try:
# We could get a 501 error if the reverse proxy is up but the Django app isn't.

View File

@ -11,8 +11,7 @@ from linter_lib.custom_check import python_rules, non_py_rules
from zulint.command import add_default_linter_arguments, LinterConfig
import random
def run():
# type: () -> None
def run() -> None:
parser = argparse.ArgumentParser()
parser.add_argument('--force', default=False,
action="store_true",
@ -96,15 +95,13 @@ def run():
"(config: ./tools/sgrep.yml)")
@linter_config.lint
def custom_py():
# type: () -> int
def custom_py() -> int:
"""Runs custom checks for python files (config: tools/linter_lib/custom_check.py)"""
failed = python_rules.check(by_lang, verbose=args.verbose)
return 1 if failed else 0
@linter_config.lint
def custom_nonpy():
# type: () -> int
def custom_nonpy() -> int:
"""Runs custom checks for non-python files (config: tools/linter_lib/custom_check.py)"""
failed = False
for rule in non_py_rules:
@ -112,8 +109,7 @@ def run():
return 1 if failed else 0
@linter_config.lint
def pyflakes():
# type: () -> int
def pyflakes() -> int:
"""Standard Python bug and code smell linter (config: tools/linter_lib/pyflakes.py)"""
failed = check_pyflakes(by_lang['py'], args)
return 1 if failed else 0
@ -122,15 +118,13 @@ def run():
python_part2 = {y for y in by_lang['py'] if y not in python_part1}
@linter_config.lint
def pep8_1of2():
# type: () -> int
def pep8_1of2() -> int:
"""Standard Python style linter on 50% of files (config: tools/linter_lib/pep8.py)"""
failed = check_pep8(list(python_part1))
return 1 if failed else 0
@linter_config.lint
def pep8_2of2():
# type: () -> int
def pep8_2of2() -> int:
"""Standard Python style linter on other 50% of files (config: tools/linter_lib/pep8.py)"""
failed = check_pep8(list(python_part2))
return 1 if failed else 0

View File

@ -2,8 +2,7 @@ from zulint.linters import run_pycodestyle
from typing import List
def check_pep8(files):
# type: (List[str]) -> bool
def check_pep8(files: List[str]) -> bool:
ignored_rules = [
# Each of these rules are ignored for the explained reason.

View File

@ -5,8 +5,7 @@ from typing import List
from zulint.linters import run_pyflakes
def check_pyflakes(files, options):
# type: (List[str], argparse.Namespace) -> bool
def check_pyflakes(files: List[str], options: argparse.Namespace) -> bool:
suppress_patterns = [
("scripts/lib/pythonrc.py", "imported but unused"),
# Intentionally imported by zerver/lib/webhooks/common.py

View File

@ -3,8 +3,7 @@ from typing import List
from lib.pretty_print import pretty_print_html
import sys
def clean_html(filenames):
# type: (List[str]) -> None
def clean_html(filenames: List[str]) -> None:
for fn in filenames:
print('Prettifying: %s' % (fn,))
with open(fn) as f:

View File

@ -8,8 +8,7 @@ import re
from typing import List
def validate_order(order, length):
# type: (List[int], int) -> None
def validate_order(order: List[int], length: int) -> None:
if len(order) != length:
print("Please enter the sequence of all the conflicting files at once")
sys.exit(1)
@ -19,8 +18,7 @@ def validate_order(order, length):
print("Incorrect input")
sys.exit(1)
def renumber_migration(conflicts, order, last_correct_migration):
# type: (List[str], List[int], str) -> None
def renumber_migration(conflicts: List[str], order: List[int], last_correct_migration: str) -> None:
stack = [] # type: List[str]
for i in order:
if conflicts[i-1][0:4] not in stack:
@ -38,8 +36,7 @@ def renumber_migration(conflicts, order, last_correct_migration):
last_correct_migration = new_name.replace('.py', '')
def resolve_conflicts(conflicts, files_list):
# type: (List[str], List[str]) -> None
def resolve_conflicts(conflicts: List[str], files_list: List[str]) -> None:
print("Conflicting migrations:")
for i in range(0, len(conflicts)):
print(str(i+1) + '. ' + conflicts[i])

View File

@ -6,8 +6,7 @@ from typing import List, Dict, Union
from zulint.lister import list_files
def do_replace(listing, old_string, new_string):
# type: (Union[Dict[str, List[str]], List[str]], str, str) -> None
def do_replace(listing: Union[Dict[str, List[str]], List[str]], old_string: str, new_string: str) -> None:
for filename in listing:
regex = 's/{}/{}/g'.format(old_string, new_string)
check_call(['sed', '-i', regex, filename])

View File

@ -5,36 +5,30 @@ import subprocess
import sys
from typing import List
def exit(message):
# type: (str) -> None
def exit(message: str) -> None:
print('PROBLEM!')
print(message)
sys.exit(1)
def run(command):
# type: (List[str]) -> None
def run(command: List[str]) -> None:
print('\n>>> ' + ' '.join(map(shlex.quote, command)))
subprocess.check_call(command)
def check_output(command):
# type: (List[str]) -> str
def check_output(command: List[str]) -> str:
return subprocess.check_output(command).decode('ascii')
def get_git_branch():
# type: () -> str
def get_git_branch() -> str:
command = ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
output = check_output(command)
return output.strip()
def check_git_pristine():
# type: () -> None
def check_git_pristine() -> None:
command = ['git', 'status', '--porcelain']
output = check_output(command)
if output.strip():
exit('Git is not pristine:\n' + output)
def ensure_on_clean_master():
# type: () -> None
def ensure_on_clean_master() -> None:
branch = get_git_branch()
if branch != 'master':
exit('You are still on a feature branch: %s' % (branch,))
@ -42,8 +36,7 @@ def ensure_on_clean_master():
run(['git', 'fetch', 'upstream', 'master'])
run(['git', 'rebase', 'upstream/master'])
def create_pull_branch(pull_id):
# type: (int) -> None
def create_pull_branch(pull_id: int) -> None:
run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)])
run(['git', 'checkout', '-B', 'review-%s' % (pull_id,), 'FETCH_HEAD'])
run(['git', 'rebase', 'upstream/master'])
@ -55,8 +48,7 @@ def create_pull_branch(pull_id):
print(subprocess.check_output(['git', 'log', 'HEAD~..',
'--pretty=format:Author: %an']))
def review_pr():
# type: () -> None
def review_pr() -> None:
try:
pull_id = int(sys.argv[1])
except Exception:

View File

@ -173,8 +173,7 @@ for cmd in cmds:
subprocess.Popen(cmd)
def transform_url(protocol, path, query, target_port, target_host):
# type: (str, str, str, int, str) -> str
def transform_url(protocol: str, path: str, query: str, target_port: int, target_host: str) -> str:
# generate url with target host
host = ":".join((target_host, str(target_port)))
# Here we are going to rewrite the path a bit so that it is in parity with
@ -186,8 +185,7 @@ def transform_url(protocol, path, query, target_port, target_host):
@gen.engine
def fetch_request(url, callback, **kwargs):
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
# use large timeouts to handle polling requests
req = httpclient.HTTPRequest(
url,
@ -208,8 +206,9 @@ class BaseHandler(web.RequestHandler):
# target server port
target_port = None # type: int
def _add_request_headers(self, exclude_lower_headers_list=None):
# type: (Optional[List[str]]) -> httputil.HTTPHeaders
def _add_request_headers(
self, exclude_lower_headers_list: Optional[List[str]] = None
) -> httputil.HTTPHeaders:
exclude_lower_headers_list = exclude_lower_headers_list or []
headers = httputil.HTTPHeaders()
for header, v in self.request.headers.get_all():
@ -217,36 +216,28 @@ class BaseHandler(web.RequestHandler):
headers.add(header, v)
return headers
def get(self):
# type: () -> None
def get(self) -> None:
pass
def head(self):
# type: () -> None
def head(self) -> None:
pass
def post(self):
# type: () -> None
def post(self) -> None:
pass
def put(self):
# type: () -> None
def put(self) -> None:
pass
def patch(self):
# type: () -> None
def patch(self) -> None:
pass
def options(self):
# type: () -> None
def options(self) -> None:
pass
def delete(self):
# type: () -> None
def delete(self) -> None:
pass
def handle_response(self, response):
# type: (Any) -> None
def handle_response(self, response: Any) -> None:
if response.error and not isinstance(response.error, httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
@ -262,8 +253,7 @@ class BaseHandler(web.RequestHandler):
self.finish()
@web.asynchronous
def prepare(self):
# type: () -> None
def prepare(self) -> None:
if 'X-REAL-IP' not in self.request.headers:
self.request.headers['X-REAL-IP'] = self.request.remote_ip
if 'X-FORWARDED_PORT' not in self.request.headers:
@ -311,8 +301,7 @@ class ThumborHandler(BaseHandler):
class Application(web.Application):
def __init__(self, enable_logging=False):
# type: (bool) -> None
def __init__(self, enable_logging: bool = False) -> None:
handlers = [
(r"/json/events.*", TornadoHandler),
(r"/api/v1/events.*", TornadoHandler),
@ -322,19 +311,16 @@ class Application(web.Application):
]
super().__init__(handlers, enable_logging=enable_logging)
def log_request(self, handler):
# type: (BaseHandler) -> None
def log_request(self, handler: BaseHandler) -> None:
if self.settings['enable_logging']:
super().log_request(handler)
def on_shutdown():
# type: () -> None
def on_shutdown() -> None:
IOLoop.instance().stop()
def shutdown_handler(*args, **kwargs):
# type: (*Any, **Any) -> None
def shutdown_handler(*args: Any, **kwargs: Any) -> None:
io_loop = IOLoop.instance()
if io_loop._callbacks:
io_loop.call_later(1, shutdown_handler)

View File

@ -25,8 +25,7 @@ def generate_zulip_bots_static_files() -> None:
os.makedirs(bots_dir, exist_ok=True)
def copyfiles(paths):
# type: (List[str]) -> None
def copyfiles(paths: List[str]) -> None:
for src_path in paths:
bot_name = os.path.basename(os.path.dirname(src_path))

View File

@ -36,8 +36,7 @@ def generate_files(source_file: str, tmp_dir: str) -> None:
'--input-file', input_file_path, '--output-file', output_file_path],
stdout=subprocess.DEVNULL)
def print_diff(path_file1, path_file2):
# type: (str, str) -> None
def print_diff(path_file1: str, path_file2: str) -> None:
with open(path_file1) as file1:
with open(path_file2) as file2:
diff = difflib.unified_diff(

View File

@ -245,8 +245,7 @@ def run_tests_via_node_js() -> int:
sys.exit(1)
return ret
def check_line_coverage(fn, line_coverage, line_mapping, log=True):
# type: (str, Dict[Any, Any], Dict[Any, Any], bool) -> bool
def check_line_coverage(fn: str, line_coverage: Dict[Any, Any], line_mapping: Dict[Any, Any], log: bool = True) -> bool:
missing_lines = []
for line in line_coverage:
if line_coverage[line] == 0:

View File

@ -20,8 +20,7 @@ if 'TRAVIS' in os.environ:
CACHE_DIR = os.path.join(os.environ['HOME'], 'misc')
CACHE_FILE = os.path.join(CACHE_DIR, 'requirements_hashes')
def print_diff(path_file1, path_file2):
# type: (str, str) -> None
def print_diff(path_file1: str, path_file2: str) -> None:
with open(path_file1) as file1:
with open(path_file2) as file2:
diff = difflib.unified_diff(
@ -32,8 +31,7 @@ def print_diff(path_file1, path_file2):
)
sys.stdout.writelines(diff)
def test_locked_requirements(tmp_dir):
# type: (str) -> bool
def test_locked_requirements(tmp_dir: str) -> bool:
# `pip-compile` tries to avoid unnecessarily updating recursive dependencies
# if lock files are present already. If we don't copy these files to the tmp
# dir then recursive dependencies will get updated to their latest version
@ -53,8 +51,7 @@ def test_locked_requirements(tmp_dir):
return same
def get_requirements_hash(tmp_dir, use_test_lock_files=False):
# type: (str, Optional[bool]) -> str
def get_requirements_hash(tmp_dir: str, use_test_lock_files: Optional[bool] = False) -> str:
sha1 = hashlib.sha1()
reqs_files = sorted(glob.glob(os.path.join(REQS_DIR, "*.in")))
lock_files_path = REQS_DIR
@ -66,21 +63,18 @@ def get_requirements_hash(tmp_dir, use_test_lock_files=False):
sha1.update(fp.read().encode("utf-8"))
return sha1.hexdigest()
def may_be_setup_cache():
# type: () -> None
def may_be_setup_cache() -> None:
os.makedirs(CACHE_DIR, exist_ok=True)
if not os.path.exists(CACHE_FILE):
with open(CACHE_FILE, 'w') as fp:
ujson.dump([], fp)
def load_cache():
# type: () -> List[str]
def load_cache() -> List[str]:
with open(CACHE_FILE) as fp:
hash_list = ujson.load(fp)
return hash_list
def update_cache(hash_list):
# type: (List[str]) -> None
def update_cache(hash_list: List[str]) -> None:
# We store last 100 hash entries. Aggressive caching is
# not a problem as it is cheap to do.
if len(hash_list) > 100:
@ -88,8 +82,7 @@ def update_cache(hash_list):
with open(CACHE_FILE, 'w') as fp:
ujson.dump(hash_list, fp)
def main():
# type: () -> None
def main() -> None:
may_be_setup_cache()
hash_list = load_cache()
tmp = tempfile.TemporaryDirectory()

View File

@ -17,8 +17,7 @@ if __name__ == '__main__':
default=False, help='compute test coverage')
args = parser.parse_args()
def dir_join(dir1, dir2):
# type: (str, str) -> str
def dir_join(dir1: str, dir2: str) -> str:
return os.path.abspath(os.path.join(dir1, dir2))
tools_dir = os.path.dirname(os.path.abspath(__file__))

View File

@ -10,8 +10,7 @@ os.chdir(os.path.join(os.path.dirname(__file__), '..'))
STATIC_PATH = 'static/'
def build_for_prod_or_casper(quiet):
# type: (bool) -> NoReturn
def build_for_prod_or_casper(quiet: bool) -> NoReturn:
"""Builds for production, writing the output to disk"""
webpack_args = ['node', 'node_modules/.bin/webpack-cli',
@ -22,8 +21,7 @@ def build_for_prod_or_casper(quiet):
print('Starting webpack compilation')
os.execvp(webpack_args[0], webpack_args)
def build_for_dev_server(host, port, minify, disable_host_check):
# type: (str, str, bool, bool) -> None
def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check: bool) -> None:
"""watches and rebuilds on changes, serving files from memory via webpack-dev-server"""
# This is our most dynamic configuration, which we use for our
@ -68,8 +66,7 @@ def build_for_dev_server(host, port, minify, disable_host_check):
webpack_process = subprocess.Popen(webpack_args)
class WebpackConfigFileChangeHandler(pyinotify.ProcessEvent):
def process_default(self, event):
# type: (pyinotify.Event) -> None
def process_default(self, event: pyinotify.Event) -> None:
nonlocal webpack_process
print('Restarting webpack-dev-server due to config changes...')
webpack_process.terminate()
@ -86,8 +83,7 @@ def build_for_dev_server(host, port, minify, disable_host_check):
webpack_process.terminate()
webpack_process.wait()
def build_for_most_tests():
# type: () -> None
def build_for_most_tests() -> None:
"""Generates a stub asset stat file for django so backend test can render a page"""
# Tests like test-backend, test-api, and test-home-documentation use

View File

@ -46,8 +46,7 @@ def ensure_users(ids_list: List[int], user_names: List[str]) -> None:
assert ids_list == user_ids
@openapi_test_function("/users/me/subscriptions:post")
def add_subscriptions(client):
# type: (Client) -> None
def add_subscriptions(client: Client) -> None:
# {code_example|start}
# Subscribe to the stream "new stream"
@ -77,8 +76,7 @@ def add_subscriptions(client):
assert result['result'] == 'success'
assert 'newbie@zulip.com' in result['subscribed']
def test_add_subscriptions_already_subscribed(client):
# type: (Client) -> None
def test_add_subscriptions_already_subscribed(client: Client) -> None:
result = client.add_subscriptions(
streams=[
{'name': 'new stream', 'description': 'New stream for testing'}
@ -89,8 +87,7 @@ def test_add_subscriptions_already_subscribed(client):
validate_against_openapi_schema(result, '/users/me/subscriptions', 'post',
'200_1')
def test_authorization_errors_fatal(client, nonadmin_client):
# type: (Client, Client) -> None
def test_authorization_errors_fatal(client: Client, nonadmin_client: Client) -> None:
client.add_subscriptions(
streams=[
{'name': 'private_stream'}
@ -125,8 +122,7 @@ def test_authorization_errors_fatal(client, nonadmin_client):
'400_1')
@openapi_test_function("/users/{email}/presence:get")
def get_user_presence(client):
# type: (Client) -> None
def get_user_presence(client: Client) -> None:
# {code_example|start}
# Get presence information for "iago@zulip.com"
@ -136,8 +132,7 @@ def get_user_presence(client):
validate_against_openapi_schema(result, '/users/{email}/presence', 'get', '200')
@openapi_test_function("/users/me/presence:post")
def update_presence(client):
# type: (Client) -> None
def update_presence(client: Client) -> None:
request = {
'status': 'active',
'ping_only': False,
@ -149,8 +144,7 @@ def update_presence(client):
assert result['result'] == 'success'
@openapi_test_function("/users:post")
def create_user(client):
# type: (Client) -> None
def create_user(client: Client) -> None:
# {code_example|start}
# Create a user
@ -171,8 +165,7 @@ def create_user(client):
validate_against_openapi_schema(result, '/users', 'post', '400')
@openapi_test_function("/users:get")
def get_members(client):
# type: (Client) -> None
def get_members(client: Client) -> None:
# {code_example|start}
# Get all users in the realm
@ -208,8 +201,7 @@ def get_members(client):
assert member.get('profile_data', None) is not None
@openapi_test_function("/users/{user_id}:get")
def get_single_user(client):
# type: (Client) -> None
def get_single_user(client: Client) -> None:
# {code_example|start}
# Fetch details on a user given a user ID
@ -225,8 +217,7 @@ def get_single_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'get', '200')
@openapi_test_function("/users/{user_id}:delete")
def deactivate_user(client):
# type: (Client) -> None
def deactivate_user(client: Client) -> None:
# {code_example|start}
# Deactivate a user
@ -240,8 +231,7 @@ def deactivate_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'delete', '200')
@openapi_test_function("/users/{user_id}:patch")
def update_user(client):
# type: (Client) -> None
def update_user(client: Client) -> None:
# {code_example|start}
# Change a user's full name.
@ -270,8 +260,7 @@ def update_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'patch', '400')
@openapi_test_function("/realm/filters:get")
def get_realm_filters(client):
# type: (Client) -> None
def get_realm_filters(client: Client) -> None:
# {code_example|start}
# Fetch all the filters in this organization
@ -281,8 +270,7 @@ def get_realm_filters(client):
validate_against_openapi_schema(result, '/realm/filters', 'get', '200')
@openapi_test_function("/realm/filters:post")
def add_realm_filter(client):
# type: (Client) -> None
def add_realm_filter(client: Client) -> None:
# {code_example|start}
# Add a filter to automatically linkify #<number> to the corresponding
@ -294,8 +282,7 @@ def add_realm_filter(client):
validate_against_openapi_schema(result, '/realm/filters', 'post', '200')
@openapi_test_function("/realm/filters/{filter_id}:delete")
def remove_realm_filter(client):
# type: (Client) -> None
def remove_realm_filter(client: Client) -> None:
# {code_example|start}
# Remove the organization filter with ID 42
@ -305,8 +292,7 @@ def remove_realm_filter(client):
validate_against_openapi_schema(result, '/realm/filters/{filter_id}', 'delete', '200')
@openapi_test_function("/users/me:get")
def get_profile(client):
# type: (Client) -> None
def get_profile(client: Client) -> None:
# {code_example|start}
# Get the profile of the user/bot that requests this endpoint,
@ -317,8 +303,7 @@ def get_profile(client):
validate_against_openapi_schema(result, '/users/me', 'get', '200')
@openapi_test_function("/get_stream_id:get")
def get_stream_id(client):
# type: (Client) -> int
def get_stream_id(client: Client) -> int:
# {code_example|start}
# Get the ID of a given stream
@ -331,8 +316,7 @@ def get_stream_id(client):
return result['stream_id']
@openapi_test_function("/streams/{stream_id}:delete")
def delete_stream(client, stream_id):
# type: (Client, int) -> None
def delete_stream(client: Client, stream_id: int) -> None:
result = client.add_subscriptions(
streams=[
{
@ -352,8 +336,7 @@ def delete_stream(client, stream_id):
assert result['result'] == 'success'
@openapi_test_function("/streams:get")
def get_streams(client):
# type: (Client) -> None
def get_streams(client: Client) -> None:
# {code_example|start}
# Get all streams that the user has access to
@ -374,8 +357,7 @@ def get_streams(client):
assert len(result['streams']) == 4
@openapi_test_function("/streams/{stream_id}:patch")
def update_stream(client, stream_id):
# type: (Client, int) -> None
def update_stream(client: Client, stream_id: int) -> None:
# {code_example|start}
# Update the stream by a given ID
@ -392,8 +374,7 @@ def update_stream(client, stream_id):
assert result['result'] == 'success'
@openapi_test_function("/user_groups:get")
def get_user_groups(client):
# type: (Client) -> int
def get_user_groups(client: Client) -> int:
# {code_example|start}
# Get all user groups of the realm
@ -409,27 +390,23 @@ def get_user_groups(client):
if u['name'] == "marketing"][0]
return marketing_user_group['id']
def test_user_not_authorized_error(nonadmin_client):
# type: (Client) -> None
def test_user_not_authorized_error(nonadmin_client: Client) -> None:
result = nonadmin_client.get_streams(include_all_active=True)
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_2')
def get_subscribers(client):
# type: (Client) -> None
def get_subscribers(client: Client) -> None:
result = client.get_subscribers(stream='new stream')
assert result['subscribers'] == ['iago@zulip.com', 'newbie@zulip.com']
def get_user_agent(client):
# type: (Client) -> None
def get_user_agent(client: Client) -> None:
result = client.get_user_agent()
assert result.startswith('ZulipPython/')
@openapi_test_function("/users/me/subscriptions:get")
def list_subscriptions(client):
# type: (Client) -> None
def list_subscriptions(client: Client) -> None:
# {code_example|start}
# Get all streams that the user is subscribed to
result = client.list_subscriptions()
@ -442,8 +419,7 @@ def list_subscriptions(client):
assert streams[0]['description'] == 'New stream for testing'
@openapi_test_function("/users/me/subscriptions:delete")
def remove_subscriptions(client):
# type: (Client) -> None
def remove_subscriptions(client: Client) -> None:
# {code_example|start}
# Unsubscribe from the stream "new stream"
@ -473,8 +449,7 @@ def remove_subscriptions(client):
'delete', '200')
@openapi_test_function("/users/me/subscriptions/muted_topics:patch")
def toggle_mute_topic(client):
# type: (Client) -> None
def toggle_mute_topic(client: Client) -> None:
# Send a test message
message = {
@ -518,8 +493,7 @@ def toggle_mute_topic(client):
'patch', '200')
@openapi_test_function("/mark_all_as_read:post")
def mark_all_as_read(client):
# type: (Client) -> None
def mark_all_as_read(client: Client) -> None:
# {code_example|start}
# Mark all of the user's unread messages as read
@ -529,8 +503,7 @@ def mark_all_as_read(client):
validate_against_openapi_schema(result, '/mark_all_as_read', 'post', '200')
@openapi_test_function("/mark_stream_as_read:post")
def mark_stream_as_read(client):
# type: (Client) -> None
def mark_stream_as_read(client: Client) -> None:
# {code_example|start}
# Mark the unread messages in stream with ID "1" as read
@ -540,8 +513,7 @@ def mark_stream_as_read(client):
validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200')
@openapi_test_function("/mark_topic_as_read:post")
def mark_topic_as_read(client):
# type: (Client) -> None
def mark_topic_as_read(client: Client) -> None:
# Grab an existing topic name
topic_name = client.get_stream_topics(1)['topics'][0]['name']
@ -554,8 +526,7 @@ def mark_topic_as_read(client):
validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200')
@openapi_test_function("/users/me/subscriptions/properties:post")
def update_subscription_settings(client):
# type: (Client) -> None
def update_subscription_settings(client: Client) -> None:
# {code_example|start}
# Update the user's subscription in stream #1 to pin it to the top of the
@ -577,8 +548,7 @@ def update_subscription_settings(client):
'POST', '200')
@openapi_test_function("/messages/render:post")
def render_message(client):
# type: (Client) -> None
def render_message(client: Client) -> None:
# {code_example|start}
# Render a message
@ -591,8 +561,7 @@ def render_message(client):
validate_against_openapi_schema(result, '/messages/render', 'post', '200')
@openapi_test_function("/messages:get")
def get_messages(client):
# type: (Client) -> None
def get_messages(client: Client) -> None:
# {code_example|start}
# Get the 100 last messages sent by "iago@zulip.com" to the stream "Verona"
@ -610,8 +579,7 @@ def get_messages(client):
assert len(result['messages']) <= request['num_before']
@openapi_test_function("/messages/{message_id}:get")
def get_raw_message(client, message_id):
# type: (Client, int) -> None
def get_raw_message(client: Client, message_id: int) -> None:
assert int(message_id)
@ -624,8 +592,7 @@ def get_raw_message(client, message_id):
'200')
@openapi_test_function("/messages:post")
def send_message(client):
# type: (Client) -> int
def send_message(client: Client) -> int:
request = {} # type: Dict[str, Any]
@ -680,8 +647,7 @@ def send_message(client):
return message_id
@openapi_test_function("/messages/{message_id}/reactions:post")
def add_reaction(client, message_id):
# type: (Client, int) -> None
def add_reaction(client: Client, message_id: int) -> None:
# {code_example|start}
# Add an emoji reaction
request = {
@ -694,8 +660,7 @@ def add_reaction(client, message_id):
validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'post', '200')
@openapi_test_function("/messages/{message_id}/reactions:delete")
def remove_reaction(client, message_id):
# type: (Client, int) -> None
def remove_reaction(client: Client, message_id: int) -> None:
# {code_example|start}
# Remove an emoji reaction
request = {
@ -707,8 +672,7 @@ def remove_reaction(client, message_id):
# {code_example|end}
validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'delete', '200')
def test_nonexistent_stream_error(client):
# type: (Client) -> None
def test_nonexistent_stream_error(client: Client) -> None:
request = {
"type": "stream",
"to": "nonexistent_stream",
@ -720,8 +684,7 @@ def test_nonexistent_stream_error(client):
validate_against_openapi_schema(result, '/messages', 'post',
'400_0')
def test_private_message_invalid_recipient(client):
# type: (Client) -> None
def test_private_message_invalid_recipient(client: Client) -> None:
request = {
"type": "private",
"to": "eeshan@zulip.com",
@ -733,8 +696,7 @@ def test_private_message_invalid_recipient(client):
'400_1')
@openapi_test_function("/messages/{message_id}:patch")
def update_message(client, message_id):
# type: (Client, int) -> None
def update_message(client: Client, message_id: int) -> None:
assert int(message_id)
@ -761,8 +723,7 @@ def update_message(client, message_id):
assert result['result'] == 'success'
assert result['raw_content'] == request['content']
def test_update_message_edit_permission_error(client, nonadmin_client):
# type: (Client, Client) -> None
def test_update_message_edit_permission_error(client: Client, nonadmin_client: Client) -> None:
request = {
"type": "stream",
"to": "Denmark",
@ -780,8 +741,7 @@ def test_update_message_edit_permission_error(client, nonadmin_client):
validate_against_openapi_schema(result, '/messages/{message_id}', 'patch', '400')
@openapi_test_function("/messages/{message_id}:delete")
def delete_message(client, message_id):
# type: (Client, int) -> None
def delete_message(client: Client, message_id: int) -> None:
# {code_example|start}
# Delete the message with ID "message_id"
@ -791,8 +751,7 @@ def delete_message(client, message_id):
validate_against_openapi_schema(result, '/messages/{message_id}', 'delete',
'200')
def test_delete_message_edit_permission_error(client, nonadmin_client):
# type: (Client, Client) -> None
def test_delete_message_edit_permission_error(client: Client, nonadmin_client: Client) -> None:
request = {
"type": "stream",
"to": "Denmark",
@ -807,8 +766,7 @@ def test_delete_message_edit_permission_error(client, nonadmin_client):
'400_1')
@openapi_test_function("/messages/{message_id}/history:get")
def get_message_history(client, message_id):
# type: (Client, int) -> None
def get_message_history(client: Client, message_id: int) -> None:
# {code_example|start}
# Get the edit history for message with ID "message_id"
@ -819,8 +777,7 @@ def get_message_history(client, message_id):
'get', '200')
@openapi_test_function("/realm/emoji:get")
def get_realm_emoji(client):
# type: (Client) -> None
def get_realm_emoji(client: Client) -> None:
# {code_example|start}
result = client.get_realm_emoji()
@ -829,8 +786,7 @@ def get_realm_emoji(client):
validate_against_openapi_schema(result, '/realm/emoji', 'GET', '200')
@openapi_test_function("/messages/flags:post")
def update_message_flags(client):
# type: (Client) -> None
def update_message_flags(client: Client) -> None:
# Send a few test messages
request = {
@ -870,8 +826,7 @@ def update_message_flags(client):
'200')
@openapi_test_function("/register:post")
def register_queue(client):
# type: (Client) -> str
def register_queue(client: Client) -> str:
# {code_example|start}
# Register the queue
@ -884,8 +839,7 @@ def register_queue(client):
return result['queue_id']
@openapi_test_function("/events:delete")
def deregister_queue(client, queue_id):
# type: (Client, str) -> None
def deregister_queue(client: Client, queue_id: str) -> None:
# {code_example|start}
# Delete a queue (queue_id is the ID of the queue
@ -900,8 +854,7 @@ def deregister_queue(client, queue_id):
validate_against_openapi_schema(result, '/events', 'delete', '400')
@openapi_test_function("/server_settings:get")
def get_server_settings(client):
# type: (Client) -> None
def get_server_settings(client: Client) -> None:
# {code_example|start}
# Fetch the settings for this server
@ -911,8 +864,7 @@ def get_server_settings(client):
validate_against_openapi_schema(result, '/server_settings', 'get', '200')
@openapi_test_function("/settings/notifications:patch")
def update_notification_settings(client):
# type: (Client) -> None
def update_notification_settings(client: Client) -> None:
# {code_example|start}
# Enable push notifications even when online
@ -926,8 +878,7 @@ def update_notification_settings(client):
validate_against_openapi_schema(result, '/settings/notifications', 'patch', '200')
@openapi_test_function("/user_uploads:post")
def upload_file(client):
# type: (Client) -> None
def upload_file(client: Client) -> None:
path_to_file = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg')
# {code_example|start}
@ -950,8 +901,7 @@ def upload_file(client):
validate_against_openapi_schema(result, '/user_uploads', 'post', '200')
@openapi_test_function("/users/me/{stream_id}/topics:get")
def get_stream_topics(client, stream_id):
# type: (Client, int) -> None
def get_stream_topics(client: Client, stream_id: int) -> None:
# {code_example|start}
result = client.get_stream_topics(stream_id)
@ -961,8 +911,7 @@ def get_stream_topics(client, stream_id):
'get', '200')
@openapi_test_function("/typing:post")
def set_typing_status(client):
# type: (Client) -> None
def set_typing_status(client: Client) -> None:
ensure_users([9, 10], ['hamlet', 'iago'])
# {code_example|start}
@ -994,8 +943,7 @@ def set_typing_status(client):
validate_against_openapi_schema(result, '/typing', 'post', '200')
@openapi_test_function("/realm/emoji/{emoji_name}:post")
def upload_custom_emoji(client):
# type: (Client) -> None
def upload_custom_emoji(client: Client) -> None:
emoji_path = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg')
# {code_example|start}
@ -1014,15 +962,13 @@ def upload_custom_emoji(client):
'post', '200')
@openapi_test_function("/users/me/alert_words:get")
def get_alert_words(client):
# type: (Client) -> None
def get_alert_words(client: Client) -> None:
result = client.get_alert_words()
assert result['result'] == 'success'
@openapi_test_function("/users/me/alert_words:post")
def add_alert_words(client):
# type: (Client) -> None
def add_alert_words(client: Client) -> None:
word = ['foo', 'bar']
result = client.add_alert_words(word)
@ -1030,8 +976,7 @@ def add_alert_words(client):
assert result['result'] == 'success'
@openapi_test_function("/users/me/alert_words:delete")
def remove_alert_words(client):
# type: (Client) -> None
def remove_alert_words(client: Client) -> None:
word = ['foo']
result = client.remove_alert_words(word)
@ -1039,8 +984,7 @@ def remove_alert_words(client):
assert result['result'] == 'success'
@openapi_test_function("/user_groups/create:post")
def create_user_group(client):
# type: (Client) -> None
def create_user_group(client: Client) -> None:
ensure_users([6, 7, 8, 9], ['aaron', 'zoe', 'cordelia', 'hamlet'])
# {code_example|start}
@ -1057,8 +1001,7 @@ def create_user_group(client):
assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}:patch")
def update_user_group(client, group_id):
# type: (Client, int) -> None
def update_user_group(client: Client, group_id: int) -> None:
# {code_example|start}
request = {
'group_id': group_id,
@ -1071,8 +1014,7 @@ def update_user_group(client, group_id):
assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}:delete")
def remove_user_group(client, group_id):
# type: (Client, int) -> None
def remove_user_group(client: Client, group_id: int) -> None:
# {code_example|start}
result = client.remove_user_group(group_id)
# {code_example|end}
@ -1081,8 +1023,7 @@ def remove_user_group(client, group_id):
assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}/members:post")
def update_user_group_members(client, group_id):
# type: (Client, int) -> None
def update_user_group_members(client: Client, group_id: int) -> None:
ensure_users([8, 9, 10], ['cordelia', 'hamlet', 'iago'])
request = {
@ -1095,28 +1036,24 @@ def update_user_group_members(client, group_id):
assert result['result'] == 'success'
def test_invalid_api_key(client_with_invalid_key):
# type: (Client) -> None
def test_invalid_api_key(client_with_invalid_key: Client) -> None:
result = client_with_invalid_key.list_subscriptions()
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_0')
def test_missing_request_argument(client):
# type: (Client) -> None
def test_missing_request_argument(client: Client) -> None:
result = client.render_message({})
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_1')
def test_invalid_stream_error(client):
# type: (Client) -> None
def test_invalid_stream_error(client: Client) -> None:
result = client.get_stream_id('nonexistent')
validate_against_openapi_schema(result, '/get_stream_id', 'get', '400')
# SETUP METHODS FOLLOW
def test_against_fixture(result, fixture, check_if_equal=[], check_if_exists=[]):
# type: (Dict[str, Any], Dict[str, Any], Optional[Iterable[str]], Optional[Iterable[str]]) -> None
def test_against_fixture(result: Dict[str, Any], fixture: Dict[str, Any], check_if_equal: Optional[Iterable[str]] = [], check_if_exists: Optional[Iterable[str]] = []) -> None:
assertLength(result, fixture)
if not check_if_equal and not check_if_exists:
@ -1131,8 +1068,7 @@ def test_against_fixture(result, fixture, check_if_equal=[], check_if_exists=[])
for key in check_if_exists:
assertIn(key, result)
def assertEqual(key, result, fixture):
# type: (str, Dict[str, Any], Dict[str, Any]) -> None
def assertEqual(key: str, result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
if result[key] != fixture[key]:
first = "{key} = {value}".format(key=key, value=result[key])
second = "{key} = {value}".format(key=key, value=fixture[key])
@ -1141,8 +1077,7 @@ def assertEqual(key, result, fixture):
else:
assert result[key] == fixture[key]
def assertLength(result, fixture):
# type: (Dict[str, Any], Dict[str, Any]) -> None
def assertLength(result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
if len(result) != len(fixture):
result_string = json.dumps(result, indent=4, sort_keys=True)
fixture_string = json.dumps(fixture, indent=4, sort_keys=True)
@ -1151,8 +1086,7 @@ def assertLength(result, fixture):
else:
assert len(result) == len(fixture)
def assertIn(key, result):
# type: (str, Dict[str, Any]) -> None
def assertIn(key: str, result: Dict[str, Any]) -> None:
if key not in result.keys():
raise AssertionError(
"The actual output does not contain the the key `{key}`.".format(key=key)
@ -1160,8 +1094,7 @@ def assertIn(key, result):
else:
assert key in result
def test_messages(client, nonadmin_client):
# type: (Client, Client) -> None
def test_messages(client: Client, nonadmin_client: Client) -> None:
render_message(client)
message_id = send_message(client)
@ -1182,8 +1115,7 @@ def test_messages(client, nonadmin_client):
test_update_message_edit_permission_error(client, nonadmin_client)
test_delete_message_edit_permission_error(client, nonadmin_client)
def test_users(client):
# type: (Client) -> None
def test_users(client: Client) -> None:
create_user(client)
get_members(client)
@ -1205,8 +1137,7 @@ def test_users(client):
add_alert_words(client)
remove_alert_words(client)
def test_streams(client, nonadmin_client):
# type: (Client, Client) -> None
def test_streams(client: Client, nonadmin_client: Client) -> None:
add_subscriptions(client)
test_add_subscriptions_already_subscribed(client)
@ -1226,8 +1157,7 @@ def test_streams(client, nonadmin_client):
test_authorization_errors_fatal(client, nonadmin_client)
def test_queues(client):
# type: (Client) -> None
def test_queues(client: Client) -> None:
# Note that the example for api/get-events-from-queue is not tested.
# Since, methods such as client.get_events() or client.call_on_each_message
# are blocking calls and since the event queue backend is already
@ -1236,8 +1166,7 @@ def test_queues(client):
queue_id = register_queue(client)
deregister_queue(client, queue_id)
def test_server_organizations(client):
# type: (Client) -> None
def test_server_organizations(client: Client) -> None:
get_realm_filters(client)
add_realm_filter(client)
@ -1246,13 +1175,11 @@ def test_server_organizations(client):
get_realm_emoji(client)
upload_custom_emoji(client)
def test_errors(client):
# type: (Client) -> None
def test_errors(client: Client) -> None:
test_missing_request_argument(client)
test_invalid_stream_error(client)
def test_the_api(client, nonadmin_client):
# type: (Client, Client) -> None
def test_the_api(client: Client, nonadmin_client: Client) -> None:
get_user_agent(client)
test_users(client)

View File

@ -2610,8 +2610,7 @@ class TestTwoFactor(ZulipTestCase):
self.assertIn('otp_device_id', self.client.session.keys())
@mock.patch('two_factor.models.totp')
def test_two_factor_login_with_ldap(self, mock_totp):
# type: (mock.MagicMock) -> None
def test_two_factor_login_with_ldap(self, mock_totp: mock.MagicMock) -> None:
token = 123456
email = self.example_email('hamlet')
password = self.ldap_password('hamlet')
@ -2621,8 +2620,7 @@ class TestTwoFactor(ZulipTestCase):
user_profile.save()
self.create_default_device(user_profile)
def totp(*args, **kwargs):
# type: (*Any, **Any) -> int
def totp(*args: Any, **kwargs: Any) -> int:
return token
mock_totp.side_effect = totp

View File

@ -1400,8 +1400,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
config_data = ujson.loads(result.content)['config_data']
self.assertEqual(config_data, ujson.loads(bot_info['config_data']))
def test_outgoing_webhook_invalid_interface(self):
# type: () -> None
def test_outgoing_webhook_invalid_interface(self) -> None:
self.login('hamlet')
bot_info = {
'full_name': 'Outgoing Webhook test bot',

View File

@ -2312,8 +2312,7 @@ class EventsRegisterTest(ZulipTestCase):
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
def test_do_update_outgoing_webhook_service(self):
# type: () -> None
def test_do_update_outgoing_webhook_service(self) -> None:
update_outgoing_webhook_service_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),

View File

@ -3133,10 +3133,9 @@ class EditMessageTest(ZulipTestCase):
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing,
message_content_edit_limit_seconds,
allow_community_topic_editing):
# type: (bool, int, bool) -> None
def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds: int,
allow_community_topic_editing: bool) -> None:
result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
@ -3144,16 +3143,14 @@ class EditMessageTest(ZulipTestCase):
})
self.assert_json_success(result)
def do_edit_message_assert_success(id_, unique_str):
# type: (int, str) -> None
def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_, unique_str, error):
# type: (int, str, str) -> None
def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
message = Message.objects.get(id=id_)
old_topic = message.topic_name()
old_content = message.content

View File

@ -1220,8 +1220,7 @@ class TestGetAPNsPayload(PushNotificationTest):
self.assertDictEqual(payload, expected)
mock_push_notifications.assert_called()
def test_get_message_payload_apns_stream_message(self):
# type: () -> None
def test_get_message_payload_apns_stream_message(self) -> None:
stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id)
message.trigger = 'push_stream_notify'
@ -1252,8 +1251,7 @@ class TestGetAPNsPayload(PushNotificationTest):
}
self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_mention(self):
# type: () -> None
def test_get_message_payload_apns_stream_mention(self) -> None:
user_profile = self.example_user("othello")
stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id)
@ -1285,8 +1283,7 @@ class TestGetAPNsPayload(PushNotificationTest):
}
self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_wildcard_mention(self):
# type: () -> None
def test_get_message_payload_apns_stream_wildcard_mention(self) -> None:
user_profile = self.example_user("othello")
stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id)

View File

@ -252,8 +252,7 @@ class TestServiceBotStateHandler(ZulipTestCase):
self.assertTrue(storage.contains('another key'))
self.assertRaises(StateError, lambda: storage.remove('some key'))
def test_internal_endpoint(self):
# type: () -> None
def test_internal_endpoint(self) -> None:
self.login_user(self.user_profile)
# Store some data.

View File

@ -3803,8 +3803,7 @@ class NoReplyEmailTest(ZulipTestCase):
class TwoFactorAuthTest(ZulipTestCase):
@patch('two_factor.models.totp')
def test_two_factor_login(self, mock_totp):
# type: (MagicMock) -> None
def test_two_factor_login(self, mock_totp: MagicMock) -> None:
token = 123456
email = self.example_email('hamlet')
password = self.ldap_password('hamlet')
@ -3814,8 +3813,7 @@ class TwoFactorAuthTest(ZulipTestCase):
user_profile.save()
self.create_default_device(user_profile)
def totp(*args, **kwargs):
# type: (*Any, **Any) -> int
def totp(*args: Any, **kwargs: Any) -> int:
return token
mock_totp.side_effect = totp

View File

@ -221,8 +221,11 @@ def update_subscriptions_backend(
] # type: List[FuncKwargPair]
return compose_views(request, user_profile, method_kwarg_pairs)
def compose_views(request, user_profile, method_kwarg_pairs):
# type: (HttpRequest, UserProfile, List[FuncKwargPair]) -> HttpResponse
def compose_views(
request: HttpRequest,
user_profile: UserProfile,
method_kwarg_pairs: "List[FuncKwargPair]",
) -> HttpResponse:
'''
This takes a series of view methods from method_kwarg_pairs and calls
them in sequence, and it smushes all the json results into a single