python: Convert function type annotations to Python 3 style.

Generated by com2ann (slightly patched to avoid also converting
assignment type annotations, which require Python 3.6), followed by
some manual whitespace adjustment, and six fixes for runtime issues:

-    def __init__(self, token: Token, parent: Optional[Node]) -> None:
+    def __init__(self, token: Token, parent: "Optional[Node]") -> None:

-def main(options: argparse.Namespace) -> NoReturn:
+def main(options: argparse.Namespace) -> "NoReturn":

-def fetch_request(url: str, callback: Any, **kwargs: Any) -> Generator[Callable[..., Any], Any, None]:
+def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":

-def assert_server_running(server: subprocess.Popen[bytes], log_file: Optional[str]) -> None:
+def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:

-def server_is_up(server: subprocess.Popen[bytes], log_file: Optional[str]) -> bool:
+def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:

-    method_kwarg_pairs: List[FuncKwargPair],
+    method_kwarg_pairs: "List[FuncKwargPair]",

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
Anders Kaseorg 2020-04-18 18:48:37 -07:00 committed by Tim Abbott
parent 43ac901ad9
commit 5901e7ba7e
68 changed files with 389 additions and 691 deletions

View File

@ -85,8 +85,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} }
def report(state, timestamp=None, msg=None): def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None:
# type: (str, Any, Optional[str]) -> None
now = int(time.time()) now = int(time.time())
if msg is None: if msg is None:
msg = "send time was %s" % (timestamp,) msg = "send time was %s" % (timestamp,)
@ -97,14 +96,12 @@ def report(state, timestamp=None, msg=None):
print("%s: %s" % (state, msg)) print("%s: %s" % (state, msg))
exit(states[state]) exit(states[state])
def send_zulip(sender, message): def send_zulip(sender: zulip.Client, message: Dict[str, Any]) -> None:
# type: (zulip.Client, Dict[str, Any]) -> None
result = sender.send_message(message) result = sender.send_message(message)
if result["result"] != "success" and options.nagios: if result["result"] != "success" and options.nagios:
report("CRITICAL", msg="Error sending Zulip, args were: %s, %s" % (message, result)) report("CRITICAL", msg="Error sending Zulip, args were: %s, %s" % (message, result))
def get_zulips(): def get_zulips() -> List[Dict[str, Any]]:
# type: () -> List[Dict[str, Any]]
global queue_id, last_event_id global queue_id, last_event_id
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id) res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
if 'error' in res.get('result', {}): if 'error' in res.get('result', {}):

View File

@ -20,13 +20,11 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} }
def report(state, msg): def report(state: str, msg: str) -> NoReturn:
# type: (str, str) -> NoReturn
print("%s: %s" % (state, msg)) print("%s: %s" % (state, msg))
exit(states[state]) exit(states[state])
def get_loc_over_ssh(host, func): def get_loc_over_ssh(host: str, func: str) -> str:
# type: (str, str) -> str
try: try:
return subprocess.check_output(['ssh', host, return subprocess.check_output(['ssh', host,
'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT %s()"' % (func,)], 'psql -v ON_ERROR_STOP=1 zulip -t -c "SELECT %s()"' % (func,)],
@ -35,8 +33,7 @@ def get_loc_over_ssh(host, func):
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
report('CRITICAL', 'ssh failed: %s: %s' % (str(e), e.output)) report('CRITICAL', 'ssh failed: %s: %s' % (str(e), e.output))
def loc_to_abs_offset(loc_str): def loc_to_abs_offset(loc_str: str) -> int:
# type: (str) -> int
m = re.match(r'^\s*([0-9a-fA-F]+)/([0-9a-fA-F]+)\s*$', loc_str) m = re.match(r'^\s*([0-9a-fA-F]+)/([0-9a-fA-F]+)\s*$', loc_str)
if not m: if not m:
raise ValueError("Unknown xlog location format: " + loc_str) raise ValueError("Unknown xlog location format: " + loc_str)

View File

@ -21,8 +21,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} }
def report(state, num): def report(state: str, num: str) -> None:
# type: (str, str) -> None
print("%s: %s rows in fts_update_log table" % (state, num)) print("%s: %s rows in fts_update_log table" % (state, num))
exit(states[state]) exit(states[state])

View File

@ -13,8 +13,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} }
def report(state, msg): def report(state: str, msg: str) -> None:
# type: (str, str) -> None
print("%s: %s" % (state, msg)) print("%s: %s" % (state, msg))
exit(states[state]) exit(states[state])

View File

@ -16,8 +16,7 @@ logging.Formatter.converter = time.gmtime
logging.basicConfig(format="%(asctime)s %(levelname)s: %(message)s") logging.basicConfig(format="%(asctime)s %(levelname)s: %(message)s")
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
def run(args, dry_run=False): def run(args: List[str], dry_run: bool = False) -> str:
# type: (List[str], bool) -> str
if dry_run: if dry_run:
print("Would have run: " + " ".join(map(shlex.quote, args))) print("Would have run: " + " ".join(map(shlex.quote, args)))
return "" return ""

View File

@ -33,8 +33,7 @@ import os
BATCH_SIZE = 1000 BATCH_SIZE = 1000
def update_fts_columns(cursor): def update_fts_columns(cursor: psycopg2.extensions.cursor) -> int:
# type: (psycopg2.extensions.cursor) -> int
cursor.execute("SELECT id, message_id FROM fts_update_log LIMIT %s;" % ( cursor.execute("SELECT id, message_id FROM fts_update_log LIMIT %s;" % (
BATCH_SIZE,)) BATCH_SIZE,))
ids = [] ids = []
@ -52,8 +51,7 @@ def update_fts_columns(cursor):
cursor.execute("DELETE FROM fts_update_log WHERE id = ANY(%s)", (ids,)) cursor.execute("DELETE FROM fts_update_log WHERE id = ANY(%s)", (ids,))
return len(ids) return len(ids)
def am_master(cursor): def am_master(cursor: psycopg2.extensions.cursor) -> bool:
# type: (psycopg2.extensions.cursor) -> bool
cursor.execute("SELECT pg_is_in_recovery()") cursor.execute("SELECT pg_is_in_recovery()")
return not cursor.fetchall()[0][0] return not cursor.fetchall()[0][0]

View File

@ -21,8 +21,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} # type: Dict[str, int] } # type: Dict[str, int]
def report(state, output): def report(state: str, output: str) -> None:
# type: (str, str) -> None
print("%s\n%s" % (state, output)) print("%s\n%s" % (state, output))
exit(states[state]) exit(states[state])

View File

@ -35,8 +35,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} # type: Dict[str, int] } # type: Dict[str, int]
def report(state, short_msg, too_old=None): def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> None:
# type: (str, str, Optional[Set[Any]]) -> None
too_old_data = "" too_old_data = ""
if too_old: if too_old:
too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join( too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join(

View File

@ -22,8 +22,7 @@ states = {
"UNKNOWN": 3 "UNKNOWN": 3
} # type: Dict[str, int] } # type: Dict[str, int]
def report(state, data, last_check): def report(state: str, data: str, last_check: float) -> None:
# type: (str, str, float) -> None
print("%s: Last test run completed at %s\n%s" % ( print("%s: Last test run completed at %s\n%s" % (
state, time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)), state, time.strftime("%Y-%m-%d %H:%M %Z", time.gmtime(last_check)),
data)) data))

View File

@ -7,8 +7,7 @@ sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import parse_cache_script_args from scripts.lib.zulip_tools import parse_cache_script_args
from scripts.lib import clean_venv_cache, clean_node_cache, clean_emoji_cache from scripts.lib import clean_venv_cache, clean_node_cache, clean_emoji_cache
def main(): def main() -> None:
# type: () -> None
args = parse_cache_script_args("This script cleans unused zulip caches.") args = parse_cache_script_args("This script cleans unused zulip caches.")
os.chdir(ZULIP_PATH) os.chdir(ZULIP_PATH)
clean_venv_cache.main(args) clean_venv_cache.main(args)

View File

@ -16,8 +16,7 @@ EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
if ENV == "travis": if ENV == "travis":
EMOJI_CACHE_PATH = os.path.join(os.environ["HOME"], "zulip-emoji-cache") EMOJI_CACHE_PATH = os.path.join(os.environ["HOME"], "zulip-emoji-cache")
def get_caches_in_use(threshold_days): def get_caches_in_use(threshold_days: int) -> Set[str]:
# type: (int) -> Set[str]
setups_to_check = {ZULIP_PATH} setups_to_check = {ZULIP_PATH}
caches_in_use = set() caches_in_use = set()

View File

@ -23,8 +23,7 @@ if ENV == "travis":
'hence yarn is not installed. Exiting without cleaning npm cache.') 'hence yarn is not installed. Exiting without cleaning npm cache.')
sys.exit(0) sys.exit(0)
def get_caches_in_use(threshold_days): def get_caches_in_use(threshold_days: int) -> Set[str]:
# type: (int) -> Set[str]
setups_to_check = {ZULIP_PATH} setups_to_check = {ZULIP_PATH}
caches_in_use = set() caches_in_use = set()

View File

@ -18,8 +18,7 @@ VENV_CACHE_DIR = '/srv/zulip-venv-cache'
if ENV == "travis": if ENV == "travis":
VENV_CACHE_DIR = os.path.join(os.environ["HOME"], "zulip-venv-cache") VENV_CACHE_DIR = os.path.join(os.environ["HOME"], "zulip-venv-cache")
def get_caches_in_use(threshold_days): def get_caches_in_use(threshold_days: int) -> Set[str]:
# type: (int) -> Set[str]
setups_to_check = {ZULIP_PATH} setups_to_check = {ZULIP_PATH}
caches_in_use = set() caches_in_use = set()

View File

@ -82,8 +82,7 @@ options = parser.parse_args()
MAX_ALLOWED_PAYLOAD = 25 * 1024 * 1024 MAX_ALLOWED_PAYLOAD = 25 * 1024 * 1024
def process_response_error(e): def process_response_error(e: HTTPError) -> None:
# type: (HTTPError) -> None
if e.code == 400: if e.code == 400:
response_content = e.read() response_content = e.read()
response_data = json.loads(response_content.decode('utf8')) response_data = json.loads(response_content.decode('utf8'))
@ -94,8 +93,9 @@ def process_response_error(e):
exit(1) exit(1)
def send_email_mirror(rcpt_to, shared_secret, host, url, test, verify_ssl): def send_email_mirror(
# type: (str, str, str, str, bool, bool) -> None rcpt_to: str, shared_secret: str, host: str, url: str, test: bool, verify_ssl: bool
) -> None:
if not rcpt_to: if not rcpt_to:
print("5.1.1 Bad destination mailbox address: No missed message email address.") print("5.1.1 Bad destination mailbox address: No missed message email address.")
exit(posix.EX_NOUSER) exit(posix.EX_NOUSER)

View File

@ -5,8 +5,7 @@ import argparse
import hashlib import hashlib
from typing import Iterable, List, MutableSet from typing import Iterable, List, MutableSet
def expand_reqs_helper(fpath, visited): def expand_reqs_helper(fpath: str, visited: MutableSet[str]) -> List[str]:
# type: (str, MutableSet[str]) -> List[str]
if fpath in visited: if fpath in visited:
return [] return []
else: else:
@ -27,8 +26,7 @@ def expand_reqs_helper(fpath, visited):
result.append(dep) result.append(dep)
return result return result
def expand_reqs(fpath): def expand_reqs(fpath: str) -> List[str]:
# type: (str) -> List[str]
""" """
Returns a sorted list of unique dependencies specified by the requirements file `fpath`. Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
Removes comments from the output and recursively visits files specified inside `fpath`. Removes comments from the output and recursively visits files specified inside `fpath`.
@ -38,13 +36,11 @@ def expand_reqs(fpath):
output = expand_reqs_helper(absfpath, set()) output = expand_reqs_helper(absfpath, set())
return sorted(set(output)) return sorted(set(output))
def hash_deps(deps): def hash_deps(deps: Iterable[str]) -> str:
# type: (Iterable[str]) -> str
deps_str = "\n".join(deps) + "\n" deps_str = "\n".join(deps) + "\n"
return hashlib.sha1(deps_str.encode('utf-8')).hexdigest() return hashlib.sha1(deps_str.encode('utf-8')).hexdigest()
def main(): def main() -> int:
# type: () -> int
description = ("Finds the SHA1 hash of list of dependencies in a requirements file" description = ("Finds the SHA1 hash of list of dependencies in a requirements file"
" after recursively visiting all files specified in it.") " after recursively visiting all files specified in it.")
parser = argparse.ArgumentParser(description=description) parser = argparse.ArgumentParser(description=description)

View File

@ -20,16 +20,16 @@ YARN_PACKAGE_JSON = os.path.join(ZULIP_SRV_PATH, 'zulip-yarn/package.json')
DEFAULT_PRODUCTION = False DEFAULT_PRODUCTION = False
def get_yarn_args(production): def get_yarn_args(production: bool) -> List[str]:
# type: (bool) -> List[str]
if production: if production:
yarn_args = ["--prod"] yarn_args = ["--prod"]
else: else:
yarn_args = [] yarn_args = []
return yarn_args return yarn_args
def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION): def generate_sha1sum_node_modules(
# type: (Optional[str], bool) -> str setup_dir: Optional[str] = None, production: bool = DEFAULT_PRODUCTION
) -> str:
if setup_dir is None: if setup_dir is None:
setup_dir = os.path.realpath(os.getcwd()) setup_dir = os.path.realpath(os.getcwd())
PACKAGE_JSON_FILE_PATH = os.path.join(setup_dir, 'package.json') PACKAGE_JSON_FILE_PATH = os.path.join(setup_dir, 'package.json')
@ -47,9 +47,12 @@ def generate_sha1sum_node_modules(setup_dir=None, production=DEFAULT_PRODUCTION)
sha1sum.update(''.join(sorted(yarn_args)).encode('utf8')) sha1sum.update(''.join(sorted(yarn_args)).encode('utf8'))
return sha1sum.hexdigest() return sha1sum.hexdigest()
def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None, def setup_node_modules(
prefer_offline=False): production: bool = DEFAULT_PRODUCTION,
# type: (bool, Optional[IO[Any]], Optional[IO[Any]], bool) -> None stdout: Optional[IO[Any]] = None,
stderr: Optional[IO[Any]] = None,
prefer_offline: bool = False,
) -> None:
yarn_args = get_yarn_args(production=production) yarn_args = get_yarn_args(production=production)
if prefer_offline: if prefer_offline:
yarn_args.append("--prefer-offline") yarn_args.append("--prefer-offline")
@ -72,8 +75,13 @@ def setup_node_modules(production=DEFAULT_PRODUCTION, stdout=None, stderr=None,
shutil.rmtree('node_modules') shutil.rmtree('node_modules')
os.symlink(cached_node_modules, 'node_modules') os.symlink(cached_node_modules, 'node_modules')
def do_yarn_install(target_path, yarn_args, success_stamp, stdout=None, stderr=None): def do_yarn_install(
# type: (str, List[str], str, Optional[IO[Any]], Optional[IO[Any]]) -> None target_path: str,
yarn_args: List[str],
success_stamp: str,
stdout: Optional[IO[Any]] = None,
stderr: Optional[IO[Any]] = None,
) -> None:
os.makedirs(target_path, exist_ok=True) os.makedirs(target_path, exist_ok=True)
shutil.copy('package.json', target_path) shutil.copy('package.json', target_path)
shutil.copy("yarn.lock", target_path) shutil.copy("yarn.lock", target_path)

View File

@ -102,8 +102,7 @@ YUM_THUMBOR_VENV_DEPENDENCIES = [
"gifsicle", "gifsicle",
] ]
def get_venv_dependencies(vendor, os_version): def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
# type: (str, str) -> List[str]
if vendor == 'ubuntu' and os_version == '20.04': if vendor == 'ubuntu' and os_version == '20.04':
return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("2"), ] return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("2"), ]
elif "debian" in os_families(): elif "debian" in os_families():
@ -115,18 +114,15 @@ def get_venv_dependencies(vendor, os_version):
else: else:
raise AssertionError("Invalid vendor") raise AssertionError("Invalid vendor")
def install_venv_deps(pip, requirements_file, python2): def install_venv_deps(pip: str, requirements_file: str, python2: bool) -> None:
# type: (str, str, bool) -> None
pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip2.txt" if python2 else "pip.txt") pip_requirements = os.path.join(ZULIP_PATH, "requirements", "pip2.txt" if python2 else "pip.txt")
run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements]) run([pip, "install", "--force-reinstall", "--require-hashes", "--requirement", pip_requirements])
run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file]) run([pip, "install", "--no-deps", "--require-hashes", "--requirement", requirements_file])
def get_index_filename(venv_path): def get_index_filename(venv_path: str) -> str:
# type: (str) -> str
return os.path.join(venv_path, 'package_index') return os.path.join(venv_path, 'package_index')
def get_package_names(requirements_file): def get_package_names(requirements_file: str) -> List[str]:
# type: (str) -> List[str]
packages = expand_reqs(requirements_file) packages = expand_reqs(requirements_file)
cleaned = [] cleaned = []
operators = ['~=', '==', '!=', '<', '>'] operators = ['~=', '==', '!=', '<', '>']
@ -148,8 +144,7 @@ def get_package_names(requirements_file):
return sorted(cleaned) return sorted(cleaned)
def create_requirements_index_file(venv_path, requirements_file): def create_requirements_index_file(venv_path: str, requirements_file: str) -> str:
# type: (str, str) -> str
""" """
Creates a file, called package_index, in the virtual environment Creates a file, called package_index, in the virtual environment
directory that contains all the PIP packages installed in the directory that contains all the PIP packages installed in the
@ -164,8 +159,7 @@ def create_requirements_index_file(venv_path, requirements_file):
return index_filename return index_filename
def get_venv_packages(venv_path): def get_venv_packages(venv_path: str) -> Set[str]:
# type: (str) -> Set[str]
""" """
Returns the packages installed in the virtual environment using the Returns the packages installed in the virtual environment using the
package index file. package index file.
@ -173,8 +167,7 @@ def get_venv_packages(venv_path):
with open(get_index_filename(venv_path)) as reader: with open(get_index_filename(venv_path)) as reader:
return {p.strip() for p in reader.read().split('\n') if p.strip()} return {p.strip() for p in reader.read().split('\n') if p.strip()}
def try_to_copy_venv(venv_path, new_packages): def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
# type: (str, Set[str]) -> bool
""" """
Tries to copy packages from an old virtual environment in the cache Tries to copy packages from an old virtual environment in the cache
to the new virtual environment. The algorithm works as follows: to the new virtual environment. The algorithm works as follows:
@ -247,12 +240,12 @@ def try_to_copy_venv(venv_path, new_packages):
return False return False
def get_logfile_name(venv_path): def get_logfile_name(venv_path: str) -> str:
# type: (str) -> str
return "{}/setup-venv.log".format(venv_path) return "{}/setup-venv.log".format(venv_path)
def create_log_entry(target_log, parent, copied_packages, new_packages): def create_log_entry(
# type: (str, str, Set[str], Set[str]) -> None target_log: str, parent: str, copied_packages: Set[str], new_packages: Set[str]
) -> None:
venv_path = os.path.dirname(target_log) venv_path = os.path.dirname(target_log)
with open(target_log, 'a') as writer: with open(target_log, 'a') as writer:
@ -267,13 +260,11 @@ def create_log_entry(target_log, parent, copied_packages, new_packages):
writer.write("\n".join('- {}'.format(p) for p in sorted(new_packages))) writer.write("\n".join('- {}'.format(p) for p in sorted(new_packages)))
writer.write("\n\n") writer.write("\n\n")
def copy_parent_log(source_log, target_log): def copy_parent_log(source_log: str, target_log: str) -> None:
# type: (str, str) -> None
if os.path.exists(source_log): if os.path.exists(source_log):
shutil.copyfile(source_log, target_log) shutil.copyfile(source_log, target_log)
def do_patch_activate_script(venv_path): def do_patch_activate_script(venv_path: str) -> None:
# type: (str) -> None
""" """
Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV Patches the bin/activate script so that the value of the environment variable VIRTUAL_ENV
is set to venv_path during the script's execution whenever it is sourced. is set to venv_path during the script's execution whenever it is sourced.
@ -290,8 +281,12 @@ def do_patch_activate_script(venv_path):
with open(script_path, 'w') as f: with open(script_path, 'w') as f:
f.write("".join(lines)) f.write("".join(lines))
def setup_virtualenv(target_venv_path, requirements_file, python2=False, patch_activate_script=False): def setup_virtualenv(
# type: (Optional[str], str, bool, bool) -> str target_venv_path: Optional[str],
requirements_file: str,
python2: bool = False,
patch_activate_script: bool = False,
) -> str:
# Check if a cached version already exists # Check if a cached version already exists
path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py') path = os.path.join(ZULIP_PATH, 'scripts', 'lib', 'hash_reqs.py')
@ -314,15 +309,13 @@ def setup_virtualenv(target_venv_path, requirements_file, python2=False, patch_a
do_patch_activate_script(target_venv_path) do_patch_activate_script(target_venv_path)
return cached_venv_path return cached_venv_path
def add_cert_to_pipconf(): def add_cert_to_pipconf() -> None:
# type: () -> None
conffile = os.path.expanduser("~/.pip/pip.conf") conffile = os.path.expanduser("~/.pip/pip.conf")
confdir = os.path.expanduser("~/.pip/") confdir = os.path.expanduser("~/.pip/")
os.makedirs(confdir, exist_ok=True) os.makedirs(confdir, exist_ok=True)
run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]]) run(["crudini", "--set", conffile, "global", "cert", os.environ["CUSTOM_CA_CERTIFICATES"]])
def do_setup_virtualenv(venv_path, requirements_file, python2): def do_setup_virtualenv(venv_path: str, requirements_file: str, python2: bool) -> None:
# type: (str, str, bool) -> None
# Setup Python virtualenv # Setup Python virtualenv
new_packages = set(get_package_names(requirements_file)) new_packages = set(get_package_names(requirements_file))

View File

@ -39,8 +39,7 @@ BLUE = '\x1b[34m'
MAGENTA = '\x1b[35m' MAGENTA = '\x1b[35m'
CYAN = '\x1b[36m' CYAN = '\x1b[36m'
def overwrite_symlink(src, dst): def overwrite_symlink(src: str, dst: str) -> None:
# type: (str, str) -> None
while True: while True:
tmp = tempfile.mktemp( tmp = tempfile.mktemp(
prefix='.' + os.path.basename(dst) + '.', prefix='.' + os.path.basename(dst) + '.',
@ -56,8 +55,7 @@ def overwrite_symlink(src, dst):
os.remove(tmp) os.remove(tmp)
raise raise
def parse_cache_script_args(description): def parse_cache_script_args(description: str) -> argparse.Namespace:
# type: (str) -> argparse.Namespace
parser = argparse.ArgumentParser(description=description) parser = argparse.ArgumentParser(description=description)
parser.add_argument( parser.add_argument(
@ -88,8 +86,7 @@ def get_deploy_root() -> str:
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")) os.path.normpath(os.path.join(os.path.dirname(__file__), "..", ".."))
) )
def get_deployment_version(extract_path): def get_deployment_version(extract_path: str) -> str:
# type: (str) -> str
version = '0.0.0' version = '0.0.0'
for item in os.listdir(extract_path): for item in os.listdir(extract_path):
item_path = os.path.join(extract_path, item) item_path = os.path.join(extract_path, item)
@ -101,14 +98,12 @@ def get_deployment_version(extract_path):
break break
return version return version
def is_invalid_upgrade(current_version, new_version): def is_invalid_upgrade(current_version: str, new_version: str) -> bool:
# type: (str, str) -> bool
if new_version > '1.4.3' and current_version <= '1.3.10': if new_version > '1.4.3' and current_version <= '1.3.10':
return True return True
return False return False
def subprocess_text_output(args): def subprocess_text_output(args: Sequence[str]) -> str:
# type: (Sequence[str]) -> str
return subprocess.check_output(args, universal_newlines=True).strip() return subprocess.check_output(args, universal_newlines=True).strip()
def get_zulip_pwent() -> pwd.struct_passwd: def get_zulip_pwent() -> pwd.struct_passwd:
@ -121,8 +116,7 @@ def get_zulip_pwent() -> pwd.struct_passwd:
# `zulip` user as that's the correct value in production. # `zulip` user as that's the correct value in production.
return pwd.getpwnam("zulip") return pwd.getpwnam("zulip")
def su_to_zulip(save_suid=False): def su_to_zulip(save_suid: bool = False) -> None:
# type: (bool) -> None
"""Warning: su_to_zulip assumes that the zulip checkout is owned by """Warning: su_to_zulip assumes that the zulip checkout is owned by
the zulip user (or whatever normal user is running the Zulip the zulip user (or whatever normal user is running the Zulip
installation). It should never be run from the installer or other installation). It should never be run from the installer or other
@ -136,14 +130,12 @@ def su_to_zulip(save_suid=False):
os.setuid(pwent.pw_uid) os.setuid(pwent.pw_uid)
os.environ['HOME'] = pwent.pw_dir os.environ['HOME'] = pwent.pw_dir
def make_deploy_path(): def make_deploy_path() -> str:
# type: () -> str
timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT) timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT)
return os.path.join(DEPLOYMENTS_DIR, timestamp) return os.path.join(DEPLOYMENTS_DIR, timestamp)
TEMPLATE_DATABASE_DIR = "test-backend/databases" TEMPLATE_DATABASE_DIR = "test-backend/databases"
def get_dev_uuid_var_path(create_if_missing=False): def get_dev_uuid_var_path(create_if_missing: bool = False) -> str:
# type: (bool) -> str
zulip_path = get_deploy_root() zulip_path = get_deploy_root()
uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid") uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid")
if os.path.exists(uuid_path): if os.path.exists(uuid_path):
@ -163,8 +155,7 @@ def get_dev_uuid_var_path(create_if_missing=False):
os.makedirs(result_path, exist_ok=True) os.makedirs(result_path, exist_ok=True)
return result_path return result_path
def get_deployment_lock(error_rerun_script): def get_deployment_lock(error_rerun_script: str) -> None:
# type: (str) -> None
start_time = time.time() start_time = time.time()
got_lock = False got_lock = False
while time.time() - start_time < 300: while time.time() - start_time < 300:
@ -187,12 +178,10 @@ def get_deployment_lock(error_rerun_script):
ENDC) ENDC)
sys.exit(1) sys.exit(1)
def release_deployment_lock(): def release_deployment_lock() -> None:
# type: () -> None
shutil.rmtree(LOCK_DIR) shutil.rmtree(LOCK_DIR)
def run(args, **kwargs): def run(args: Sequence[str], **kwargs: Any) -> None:
# type: (Sequence[str], **Any) -> None
# Output what we're doing in the `set -x` style # Output what we're doing in the `set -x` style
print("+ %s" % (" ".join(map(shlex.quote, args)),)) print("+ %s" % (" ".join(map(shlex.quote, args)),))
@ -208,8 +197,7 @@ def run(args, **kwargs):
print() print()
raise raise
def log_management_command(cmd, log_path): def log_management_command(cmd: str, log_path: str) -> None:
# type: (str, str) -> None
log_dir = os.path.dirname(log_path) log_dir = os.path.dirname(log_path)
if not os.path.exists(log_dir): if not os.path.exists(log_dir):
os.makedirs(log_dir) os.makedirs(log_dir)
@ -223,16 +211,14 @@ def log_management_command(cmd, log_path):
logger.info("Ran '%s'" % (cmd,)) logger.info("Ran '%s'" % (cmd,))
def get_environment(): def get_environment() -> str:
# type: () -> str
if os.path.exists(DEPLOYMENTS_DIR): if os.path.exists(DEPLOYMENTS_DIR):
return "prod" return "prod"
if os.environ.get("TRAVIS"): if os.environ.get("TRAVIS"):
return "travis" return "travis"
return "dev" return "dev"
def get_recent_deployments(threshold_days): def get_recent_deployments(threshold_days: int) -> Set[str]:
# type: (int) -> Set[str]
# Returns a list of deployments not older than threshold days # Returns a list of deployments not older than threshold days
# including `/root/zulip` directory if it exists. # including `/root/zulip` directory if it exists.
recent = set() recent = set()
@ -259,16 +245,14 @@ def get_recent_deployments(threshold_days):
recent.add("/root/zulip") recent.add("/root/zulip")
return recent return recent
def get_threshold_timestamp(threshold_days): def get_threshold_timestamp(threshold_days: int) -> int:
# type: (int) -> int
# Given number of days, this function returns timestamp corresponding # Given number of days, this function returns timestamp corresponding
# to the time prior to given number of days. # to the time prior to given number of days.
threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days) threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
threshold_timestamp = int(time.mktime(threshold.utctimetuple())) threshold_timestamp = int(time.mktime(threshold.utctimetuple()))
return threshold_timestamp return threshold_timestamp
def get_caches_to_be_purged(caches_dir, caches_in_use, threshold_days): def get_caches_to_be_purged(caches_dir: str, caches_in_use: Set[str], threshold_days: int) -> Set[str]:
# type: (str, Set[str], int) -> Set[str]
# Given a directory containing caches, a list of caches in use # Given a directory containing caches, a list of caches in use
# and threshold days, this function return a list of caches # and threshold days, this function return a list of caches
# which can be purged. Remove the cache only if it is: # which can be purged. Remove the cache only if it is:
@ -287,8 +271,9 @@ def get_caches_to_be_purged(caches_dir, caches_in_use, threshold_days):
caches_to_purge.add(cache_dir) caches_to_purge.add(cache_dir)
return caches_to_purge return caches_to_purge
def purge_unused_caches(caches_dir, caches_in_use, cache_type, args): def purge_unused_caches(
# type: (str, Set[str], str, argparse.Namespace) -> None caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace
) -> None:
all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)} all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)}
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days) caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)
caches_to_keep = all_caches - caches_to_purge caches_to_keep = all_caches - caches_to_purge
@ -298,8 +283,7 @@ def purge_unused_caches(caches_dir, caches_in_use, cache_type, args):
if args.verbose: if args.verbose:
print("Done!") print("Done!")
def generate_sha1sum_emoji(zulip_path): def generate_sha1sum_emoji(zulip_path: str) -> str:
# type: (str) -> str
ZULIP_EMOJI_DIR = os.path.join(zulip_path, 'tools', 'setup', 'emoji') ZULIP_EMOJI_DIR = os.path.join(zulip_path, 'tools', 'setup', 'emoji')
sha = hashlib.sha1() sha = hashlib.sha1()
@ -332,8 +316,14 @@ def generate_sha1sum_emoji(zulip_path):
return sha.hexdigest() return sha.hexdigest()
def may_be_perform_purging(dirs_to_purge, dirs_to_keep, dir_type, dry_run, verbose, no_headings): def may_be_perform_purging(
# type: (Set[str], Set[str], str, bool, bool, bool) -> None dirs_to_purge: Set[str],
dirs_to_keep: Set[str],
dir_type: str,
dry_run: bool,
verbose: bool,
no_headings: bool,
) -> None:
if dry_run: if dry_run:
print("Performing a dry run...") print("Performing a dry run...")
if not no_headings: if not no_headings:
@ -350,8 +340,7 @@ def may_be_perform_purging(dirs_to_purge, dirs_to_keep, dir_type, dry_run, verbo
print("Keeping used %s: %s" % (dir_type, directory)) print("Keeping used %s: %s" % (dir_type, directory))
@functools.lru_cache(None) @functools.lru_cache(None)
def parse_os_release(): def parse_os_release() -> Dict[str, str]:
# type: () -> Dict[str, str]
""" """
Example of the useful subset of the data: Example of the useful subset of the data:
{ {
@ -423,8 +412,7 @@ def is_root() -> bool:
return True return True
return False return False
def run_as_root(args, **kwargs): def run_as_root(args: List[str], **kwargs: Any) -> None:
# type: (List[str], **Any) -> None
sudo_args = kwargs.pop('sudo_args', []) sudo_args = kwargs.pop('sudo_args', [])
if not is_root(): if not is_root():
args = ['sudo'] + sudo_args + ['--'] + args args = ['sudo'] + sudo_args + ['--'] + args
@ -454,8 +442,12 @@ def assert_running_as_root(strip_lib_from_paths: bool=False) -> None:
print("{} must be run as root.".format(script_name)) print("{} must be run as root.".format(script_name))
sys.exit(1) sys.exit(1)
def get_config(config_file, section, key, default_value=""): def get_config(
# type: (configparser.RawConfigParser, str, str, str) -> str config_file: configparser.RawConfigParser,
section: str,
key: str,
default_value: str = "",
) -> str:
if config_file.has_option(section, key): if config_file.has_option(section, key):
return config_file.get(section, key) return config_file.get(section, key)
return default_value return default_value
@ -465,8 +457,7 @@ def get_config_file() -> configparser.RawConfigParser:
config_file.read("/etc/zulip/zulip.conf") config_file.read("/etc/zulip/zulip.conf")
return config_file return config_file
def get_deploy_options(config_file): def get_deploy_options(config_file: configparser.RawConfigParser) -> List[str]:
# type: (configparser.RawConfigParser) -> List[str]
return get_config(config_file, 'deployment', 'deploy_options', "").strip().split() return get_config(config_file, 'deployment', 'deploy_options', "").strip().split()
def get_or_create_dev_uuid_var_path(path: str) -> str: def get_or_create_dev_uuid_var_path(path: str) -> str:

View File

@ -37,8 +37,7 @@ options = parser.parse_args()
config_file = configparser.RawConfigParser() config_file = configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf") config_file.read("/etc/zulip/zulip.conf")
def get_config(section, key, default_value): def get_config(section: str, key: str, default_value: str) -> str:
# type: (str, str, str) -> str
if config_file.has_option(section, key): if config_file.has_option(section, key):
return config_file.get(section, key) return config_file.get(section, key)
return default_value return default_value

View File

@ -2,8 +2,7 @@ import time
from typing import Tuple from typing import Tuple
def nagios_from_file(results_file): def nagios_from_file(results_file: str) -> Tuple[int, str]:
# type: (str) -> Tuple[int, str]
"""Returns a nagios-appropriate string and return code obtained by """Returns a nagios-appropriate string and return code obtained by
parsing the desired file on disk. The file on disk should be of format parsing the desired file on disk. The file on disk should be of format

View File

@ -11,8 +11,7 @@ sys.path.append(ZULIP_PATH)
from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, get_recent_deployments, \ from scripts.lib.zulip_tools import DEPLOYMENTS_DIR, get_recent_deployments, \
may_be_perform_purging may_be_perform_purging
def parse_args(): def parse_args() -> argparse.Namespace:
# type: () -> argparse.Namespace
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
description="This script can be used for cleaning old unused deployments.", description="This script can be used for cleaning old unused deployments.",
epilog="Orphaned/unused caches older than threshold days will be automatically " epilog="Orphaned/unused caches older than threshold days will be automatically "
@ -34,8 +33,7 @@ def parse_args():
args.verbose |= args.dry_run # Always print a detailed report in case of dry run. args.verbose |= args.dry_run # Always print a detailed report in case of dry run.
return args return args
def get_deployments_to_be_purged(recent_deployments): def get_deployments_to_be_purged(recent_deployments: Set[str]) -> Set[str]:
# type: (Set[str]) -> Set[str]
all_deployments = {os.path.join(DEPLOYMENTS_DIR, deployment) all_deployments = {os.path.join(DEPLOYMENTS_DIR, deployment)
for deployment in os.listdir(DEPLOYMENTS_DIR)} for deployment in os.listdir(DEPLOYMENTS_DIR)}
deployments_to_purge = set() deployments_to_purge = set()
@ -52,8 +50,7 @@ def get_deployments_to_be_purged(recent_deployments):
deployments_to_purge.add(deployment) deployments_to_purge.add(deployment)
return deployments_to_purge return deployments_to_purge
def main(): def main() -> None:
# type: () -> None
args = parse_args() args = parse_args()
deployments_to_keep = get_recent_deployments(args.threshold_days) deployments_to_keep = get_recent_deployments(args.threshold_days)
deployments_to_purge = get_deployments_to_be_purged(deployments_to_keep) deployments_to_purge = get_deployments_to_be_purged(deployments_to_keep)

View File

@ -31,14 +31,12 @@ AUTOGENERATED_SETTINGS = [
'thumbor_key', 'thumbor_key',
] ]
def generate_django_secretkey(): def generate_django_secretkey() -> str:
# type: () -> str
"""Secret key generation taken from Django's startproject.py""" """Secret key generation taken from Django's startproject.py"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars) return get_random_string(50, chars)
def get_old_conf(output_filename): def get_old_conf(output_filename: str) -> Dict[str, str]:
# type: (str) -> Dict[str, str]
if not os.path.exists(output_filename) or os.path.getsize(output_filename) == 0: if not os.path.exists(output_filename) or os.path.getsize(output_filename) == 0:
return {} return {}
@ -47,8 +45,7 @@ def get_old_conf(output_filename):
return dict(secrets_file.items("secrets")) return dict(secrets_file.items("secrets"))
def generate_secrets(development=False): def generate_secrets(development: bool = False) -> None:
# type: (bool) -> None
if development: if development:
OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf" OUTPUT_SETTINGS_FILENAME = "zproject/dev-secrets.conf"
else: else:
@ -59,12 +56,10 @@ def generate_secrets(development=False):
if len(current_conf) == 0: if len(current_conf) == 0:
lines = ['[secrets]\n'] lines = ['[secrets]\n']
def need_secret(name): def need_secret(name: str) -> bool:
# type: (str) -> bool
return name not in current_conf return name not in current_conf
def add_secret(name, value): def add_secret(name: str, value: str) -> None:
# type: (str, str) -> None
lines.append("%s = %s\n" % (name, value)) lines.append("%s = %s\n" % (name, value))
current_conf[name] = value current_conf[name] = value

View File

@ -19,8 +19,7 @@ parser = argparse.ArgumentParser()
parser.add_argument("tarball", help="Filename of input tarball") parser.add_argument("tarball", help="Filename of input tarball")
def restore_backup(tarball_file): def restore_backup(tarball_file: IO[bytes]) -> None:
# type: (IO[bytes]) -> None
su_to_zulip(save_suid=True) su_to_zulip(save_suid=True)

View File

@ -14,8 +14,7 @@ import subprocess
sys.path.append(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from scripts.lib.zulip_tools import WARNING, FAIL, ENDC from scripts.lib.zulip_tools import WARNING, FAIL, ENDC
def find_handlebars(translatable_strings): def find_handlebars(translatable_strings: List[str]) -> List[str]:
# type: (List[str]) -> List[str]
errored = [] errored = []
for string in translatable_strings: for string in translatable_strings:
if '{{' in string: if '{{' in string:

View File

@ -21,34 +21,29 @@ from typing import Any, Dict, Optional
# usage: python check-issue-labels # usage: python check-issue-labels
# Pass --force as an argument to run without a token. # Pass --force as an argument to run without a token.
def get_config(): def get_config() -> ConfigParser:
# type: () -> ConfigParser
config = ConfigParser() config = ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini')) config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini'))
return config return config
def area_labeled(issue): def area_labeled(issue: Dict[str, Any]) -> bool:
# type: (Dict[str, Any]) -> bool
for label in issue["labels"]: for label in issue["labels"]:
label_name = str(label["name"]) label_name = str(label["name"])
if "area:" in label_name: if "area:" in label_name:
return True return True
return False return False
def is_issue(item): def is_issue(item: Dict[str, Any]) -> bool:
# type: (Dict[str, Any]) -> bool
return "issues" in item["html_url"] return "issues" in item["html_url"]
def get_next_page_url(link_header): def get_next_page_url(link_header: str) -> Optional[str]:
# type: (str) -> Optional[str]
matches = re.findall(r'\<(\S+)\>; rel=\"next\"', link_header) matches = re.findall(r'\<(\S+)\>; rel=\"next\"', link_header)
try: try:
return matches[0] return matches[0]
except IndexError: except IndexError:
return None return None
def check_issue_labels(): def check_issue_labels() -> None:
# type: () -> None
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--force', action="store_true", dest="force", default=False) parser.add_argument('--force', action="store_true", dest="force", default=False)
args = parser.parse_args() args = parser.parse_args()

View File

@ -12,8 +12,7 @@ from tools.lib.test_script import (
assert_provisioning_status_ok, assert_provisioning_status_ok,
) )
def run(): def run() -> None:
# type: () -> None
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--force', default=False, parser.add_argument('--force', default=False,
action="store_true", action="store_true",

View File

@ -22,8 +22,7 @@ EXCLUDED_FILES = [
'static/assets/icons/template.hbs', 'static/assets/icons/template.hbs',
] ]
def check_our_files(modified_only, all_dups, fix, targets): def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: List[str]) -> None:
# type: (bool, bool, bool, List[str]) -> None
by_lang = lister.list_files( by_lang = lister.list_files(
targets=targets, targets=targets,
modified_only=args.modified, modified_only=args.modified,
@ -34,8 +33,7 @@ def check_our_files(modified_only, all_dups, fix, targets):
check_handlebar_templates(by_lang['hbs'], fix) check_handlebar_templates(by_lang['hbs'], fix)
check_html_templates(by_lang['html'], all_dups, fix) check_html_templates(by_lang['html'], all_dups, fix)
def check_html_templates(templates, all_dups, fix): def check_html_templates(templates: Iterable[str], all_dups: bool, fix: bool) -> None:
# type: (Iterable[str], bool, bool) -> None
# Our files with .html extensions are usually for Django, but we also # Our files with .html extensions are usually for Django, but we also
# have a few static .html files. # have a few static .html files.
# #
@ -143,8 +141,7 @@ def check_html_templates(templates, all_dups, fix):
if not validate_indent_html(fn, fix): if not validate_indent_html(fn, fix):
sys.exit(1) sys.exit(1)
def check_handlebar_templates(templates, fix): def check_handlebar_templates(templates: Iterable[str], fix: bool) -> None:
# type: (Iterable[str], bool) -> None
# Check all our handlebars templates. # Check all our handlebars templates.
templates = [fn for fn in templates if fn.endswith('.hbs')] templates = [fn for fn in templates if fn.endswith('.hbs')]

View File

@ -14,8 +14,7 @@ import ujson
Call = Dict[str, Any] Call = Dict[str, Any]
def clean_up_pattern(s): def clean_up_pattern(s: str) -> str:
# type: (str) -> str
paren_level = 0 paren_level = 0
in_braces = False in_braces = False
result = '' result = ''
@ -35,8 +34,7 @@ def clean_up_pattern(s):
prior_char = c prior_char = c
return result return result
def encode_info(info): def encode_info(info: Any) -> str:
# type: (Any) -> str
try: try:
result = '' result = ''
try: try:
@ -54,12 +52,10 @@ def encode_info(info):
pass pass
return 'NOT ENCODABLE' return 'NOT ENCODABLE'
def fix_test_name(s): def fix_test_name(s: str) -> str:
# type: (str) -> str
return s.replace('zerver.tests.', '') return s.replace('zerver.tests.', '')
def create_single_page(pattern, out_dir, href, calls): def create_single_page(pattern: str, out_dir: str, href: str, calls: List[Call]) -> None:
# type: (str, str, str, List[Call]) -> None
fn = out_dir + '/' + href fn = out_dir + '/' + href
with open(fn, 'w') as f: with open(fn, 'w') as f:
f.write(''' f.write('''
@ -85,8 +81,7 @@ def create_single_page(pattern, out_dir, href, calls):
f.write('<br>') f.write('<br>')
f.write('</div>') f.write('</div>')
def create_user_docs(): def create_user_docs() -> None:
# type: () -> None
fn = 'var/url_coverage.txt' # TODO: make path more robust, maybe use json suffix fn = 'var/url_coverage.txt' # TODO: make path more robust, maybe use json suffix
out_dir = 'var/api_docs' out_dir = 'var/api_docs'

View File

@ -14,8 +14,7 @@ sys.path.insert(0, ROOT_DIR)
from scripts.lib.zulip_tools import get_dev_uuid_var_path from scripts.lib.zulip_tools import get_dev_uuid_var_path
UUID_VAR_PATH = get_dev_uuid_var_path() UUID_VAR_PATH = get_dev_uuid_var_path()
def run(check_func): def run(check_func: Callable[[], bool]) -> None:
# type: (Callable[[], bool]) -> None
''' '''
This decorator simply runs functions. It makes it more This decorator simply runs functions. It makes it more
convenient to add new checks without a big main() function. convenient to add new checks without a big main() function.
@ -24,32 +23,27 @@ def run(check_func):
if not rc: if not rc:
sys.exit(1) sys.exit(1)
def run_command(args): def run_command(args: List[str]) -> None:
# type: (List[str]) -> None
print(' '.join(map(shlex.quote, args))) print(' '.join(map(shlex.quote, args)))
subprocess.check_call(args) subprocess.check_call(args)
@run @run
def check_python_version(): def check_python_version() -> bool:
# type: () -> bool
subprocess.check_call(['/usr/bin/env', 'python', '-V']) subprocess.check_call(['/usr/bin/env', 'python', '-V'])
return True return True
@run @run
def pwd(): def pwd() -> bool:
# type: () -> bool
print(os.getcwd()) print(os.getcwd())
return True return True
@run @run
def host_info(): def host_info() -> bool:
# type: () -> bool
print(platform.platform()) print(platform.platform())
return True return True
@run @run
def check_django(): def check_django() -> bool:
# type: () -> bool
try: try:
import django import django
print('Django version:', django.get_version()) print('Django version:', django.get_version())
@ -70,8 +64,7 @@ def check_django():
return False return False
@run @run
def provision_version(): def provision_version() -> bool:
# type: () -> bool
fn = os.path.join(UUID_VAR_PATH, 'provision_version') fn = os.path.join(UUID_VAR_PATH, 'provision_version')
with open(fn) as f: with open(fn) as f:
version = f.read().strip() version = f.read().strip()
@ -84,15 +77,13 @@ def provision_version():
return True return True
@run @run
def node_stuff(): def node_stuff() -> bool:
# type: () -> bool
print('node version:') print('node version:')
subprocess.check_call(['node', '--version']) subprocess.check_call(['node', '--version'])
return True return True
@run @run
def test_models(): def test_models() -> bool:
# type: () -> bool
settings_module = "zproject.settings" settings_module = "zproject.settings"
os.environ['DJANGO_SETTINGS_MODULE'] = settings_module os.environ['DJANGO_SETTINGS_MODULE'] = settings_module
import django import django
@ -103,8 +94,7 @@ def test_models():
return True return True
@run @run
def check_venv(): def check_venv() -> bool:
# type: () -> bool
path = os.path.join(ROOT_DIR, 'scripts', 'lib', 'hash_reqs.py') path = os.path.join(ROOT_DIR, 'scripts', 'lib', 'hash_reqs.py')
cache_dir = '/srv/zulip-venv-cache/' cache_dir = '/srv/zulip-venv-cache/'
for fn in ['dev.txt']: for fn in ['dev.txt']:
@ -118,8 +108,7 @@ def check_venv():
return True return True
@run @run
def check_migrations(): def check_migrations() -> bool:
# type: () -> bool
print() print()
rc = subprocess.check_call('./tools/test-migrations') rc = subprocess.check_call('./tools/test-migrations')
return (rc == 0) return (rc == 0)

View File

@ -33,14 +33,12 @@ parser.add_argument("username", help="Github username for whom you want to creat
parser.add_argument('--tags', nargs='+', default=[]) parser.add_argument('--tags', nargs='+', default=[])
parser.add_argument('-f', '--recreate', dest='recreate', action="store_true", default=False) parser.add_argument('-f', '--recreate', dest='recreate', action="store_true", default=False)
def get_config(): def get_config() -> configparser.ConfigParser:
# type: () -> configparser.ConfigParser
config = configparser.ConfigParser() config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini')) config.read(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'conf.ini'))
return config return config
def user_exists(username): def user_exists(username: str) -> bool:
# type: (str) -> bool
print("Checking to see if GitHub user {} exists...".format(username)) print("Checking to see if GitHub user {} exists...".format(username))
user_api_url = "https://api.github.com/users/{}".format(username) user_api_url = "https://api.github.com/users/{}".format(username)
try: try:
@ -53,8 +51,7 @@ def user_exists(username):
print("Does the github user {} exist?".format(username)) print("Does the github user {} exist?".format(username))
sys.exit(1) sys.exit(1)
def get_keys(username): def get_keys(username: str) -> List[Dict[str, Any]]:
# type: (str) -> List[Dict[str, Any]]
print("Checking to see that GitHub user has available public keys...") print("Checking to see that GitHub user has available public keys...")
apiurl_keys = "https://api.github.com/users/{}/keys".format(username) apiurl_keys = "https://api.github.com/users/{}/keys".format(username)
try: try:
@ -70,8 +67,7 @@ def get_keys(username):
print("Has user {} added ssh keys to their github account?".format(username)) print("Has user {} added ssh keys to their github account?".format(username))
sys.exit(1) sys.exit(1)
def fork_exists(username): def fork_exists(username: str) -> bool:
# type: (str) -> bool
print("Checking to see GitHub user has forked zulip/zulip...") print("Checking to see GitHub user has forked zulip/zulip...")
apiurl_fork = "https://api.github.com/repos/{}/zulip".format(username) apiurl_fork = "https://api.github.com/repos/{}/zulip".format(username)
try: try:
@ -100,8 +96,7 @@ def exit_if_droplet_exists(my_token: str, username: str, recreate: bool) -> None
return return
print("...No droplet found...proceeding.") print("...No droplet found...proceeding.")
def set_user_data(username, userkeys): def set_user_data(username: str, userkeys: List[Dict[str, Any]]) -> str:
# type: (str, List[Dict[str, Any]]) -> str
print("Setting cloud-config data, populated with GitHub user's public keys...") print("Setting cloud-config data, populated with GitHub user's public keys...")
ssh_authorized_keys = "" ssh_authorized_keys = ""
@ -137,8 +132,7 @@ cd /home/zulipdev/{1} && git remote add origin https://github.com/{0}/{1}.git &&
print("...returning cloud-config data.") print("...returning cloud-config data.")
return cloudconf return cloudconf
def create_droplet(my_token, template_id, username, tags, user_data): def create_droplet(my_token: str, template_id: str, username: str, tags: List[str], user_data: str) -> str:
# type: (str, str, str, List[str], str) -> str
droplet = digitalocean.Droplet( droplet = digitalocean.Droplet(
token=my_token, token=my_token,
name='{}.zulipdev.org'.format(username), name='{}.zulipdev.org'.format(username),
@ -177,8 +171,7 @@ def delete_existing_records(records: List[digitalocean.Record], record_name: str
if count: if count:
print("Deleted {} existing A records for {}.zulipdev.org.".format(count, record_name)) print("Deleted {} existing A records for {}.zulipdev.org.".format(count, record_name))
def create_dns_record(my_token, username, ip_address): def create_dns_record(my_token: str, username: str, ip_address: str) -> None:
# type: (str, str, str) -> None
domain = digitalocean.Domain(token=my_token, name='zulipdev.org') domain = digitalocean.Domain(token=my_token, name='zulipdev.org')
domain.load() domain.load()
records = domain.get_records() records = domain.get_records()
@ -192,8 +185,7 @@ def create_dns_record(my_token, username, ip_address):
print("Creating new A record for *.{}.zulipdev.org that points to {}.".format(username, ip_address)) print("Creating new A record for *.{}.zulipdev.org that points to {}.".format(username, ip_address))
domain.create_new_domain_record(type='A', name=wildcard_name, data=ip_address) domain.create_new_domain_record(type='A', name=wildcard_name, data=ip_address)
def print_completion(username): def print_completion(username: str) -> None:
# type: (str) -> None
print(""" print("""
COMPLETE! Droplet for GitHub user {0} is available at {0}.zulipdev.org. COMPLETE! Droplet for GitHub user {0} is available at {0}.zulipdev.org.

View File

@ -9,12 +9,10 @@ sanity_check.check_venv(__file__)
from typing import Any, Dict, List from typing import Any, Dict, List
def debug(obj): def debug(obj: Any) -> None:
# type: (Any) -> None
print(json.dumps(obj, indent=4)) print(json.dumps(obj, indent=4))
def parse_file(fn): def parse_file(fn: str) -> Dict[str, Any]:
# type: (str) -> Dict[str, Any]
with open(fn) as f: with open(fn) as f:
text = f.read() text = f.read()
tags = re.findall(r'{+\s*(.*?)\s*}+', text) tags = re.findall(r'{+\s*(.*?)\s*}+', text)
@ -22,8 +20,7 @@ def parse_file(fn):
context = root context = root
stack = [] # type: List[Dict[str, Any]] stack = [] # type: List[Dict[str, Any]]
def set_var(var, val): def set_var(var: str, val: Any) -> None:
# type: (str, Any) -> None
num_levels_up = len(re.findall(r'\.\.', var)) num_levels_up = len(re.findall(r'\.\.', var))
if num_levels_up: if num_levels_up:
var = var.split('/')[-1] var = var.split('/')[-1]
@ -95,8 +92,7 @@ def parse_file(fn):
set_var(tag, '') set_var(tag, '')
def clean_this(obj): def clean_this(obj: Any) -> Any:
# type: (Any) -> Any
if isinstance(obj, list): if isinstance(obj, list):
return [clean_this(item) for item in obj] return [clean_this(item) for item in obj]
if isinstance(obj, dict): if isinstance(obj, dict):

View File

@ -46,8 +46,7 @@ USAGE = '''
TODO: allow specific files to be searched.''' TODO: allow specific files to be searched.'''
def check_our_files(): def check_our_files() -> None:
# type: () -> None
parser = argparse.ArgumentParser(description=USAGE, parser = argparse.ArgumentParser(description=USAGE,
formatter_class=argparse.RawTextHelpFormatter) formatter_class=argparse.RawTextHelpFormatter)

View File

@ -31,8 +31,7 @@ JS_FILES_DIR = os.path.join(ROOT_DIR, 'static/js')
OUTPUT_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.dot')) OUTPUT_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.dot'))
PNG_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.png')) PNG_FILE_PATH = os.path.relpath(os.path.join(ROOT_DIR, 'var/zulip-deps.png'))
def get_js_edges(): def get_js_edges() -> Tuple[EdgeSet, MethodDict]:
# type: () -> Tuple[EdgeSet, MethodDict]
names = set() names = set()
modules = [] # type: List[Dict[str, Any]] modules = [] # type: List[Dict[str, Any]]
for js_file in os.listdir(JS_FILES_DIR): for js_file in os.listdir(JS_FILES_DIR):
@ -75,8 +74,7 @@ def get_js_edges():
methods[tup].append(method) methods[tup].append(method)
return edges, methods return edges, methods
def find_edges_to_remove(graph, methods): def find_edges_to_remove(graph: Graph, methods: MethodDict) -> Tuple[Graph, List[Edge]]:
# type: (Graph, MethodDict) -> Tuple[Graph, List[Edge]]
EXEMPT_EDGES = [ EXEMPT_EDGES = [
# These are sensible dependencies, so don't cut them. # These are sensible dependencies, so don't cut them.
('rows', 'message_store'), ('rows', 'message_store'),
@ -144,8 +142,7 @@ def find_edges_to_remove(graph, methods):
('message_edit', 'resize'), ('message_edit', 'resize'),
] # type: List[Edge] ] # type: List[Edge]
def is_exempt(edge): def is_exempt(edge: Tuple[str, str]) -> bool:
# type: (Tuple[str, str]) -> bool
parent, child = edge parent, child = edge
if edge == ('server_events', 'reload'): if edge == ('server_events', 'reload'):
return False return False
@ -223,8 +220,7 @@ def find_edges_to_remove(graph, methods):
('emoji_picker', 'reactions'), ('emoji_picker', 'reactions'),
] ]
def cut_is_legal(edge): def cut_is_legal(edge: Edge) -> bool:
# type: (Edge) -> bool
parent, child = edge parent, child = edge
if child in ['reload', 'popovers', 'overlays', 'notifications', if child in ['reload', 'popovers', 'overlays', 'notifications',
'server_events', 'compose_actions']: 'server_events', 'compose_actions']:
@ -255,8 +251,7 @@ def find_edges_to_remove(graph, methods):
return graph, removed_edges return graph, removed_edges
def report_roadmap(edges, methods): def report_roadmap(edges: List[Edge], methods: MethodDict) -> None:
# type: (List[Edge], MethodDict) -> None
child_modules = {child for parent, child in edges} child_modules = {child for parent, child in edges}
module_methods = defaultdict(set) # type: DefaultDict[str, Set[str]] module_methods = defaultdict(set) # type: DefaultDict[str, Set[str]]
callers = defaultdict(set) # type: DefaultDict[Tuple[str, str], Set[str]] callers = defaultdict(set) # type: DefaultDict[Tuple[str, str], Set[str]]
@ -277,8 +272,7 @@ def report_roadmap(edges, methods):
print() print()
print() print()
def produce_partial_output(graph): def produce_partial_output(graph: Graph) -> None:
# type: (Graph) -> None
print(graph.num_edges()) print(graph.num_edges())
buffer = make_dot_file(graph) buffer = make_dot_file(graph)
@ -290,8 +284,7 @@ def produce_partial_output(graph):
print('See dot file here: {}'.format(OUTPUT_FILE_PATH)) print('See dot file here: {}'.format(OUTPUT_FILE_PATH))
print('See output png file: {}'.format(PNG_FILE_PATH)) print('See output png file: {}'.format(PNG_FILE_PATH))
def run(): def run() -> None:
# type: () -> None
edges, methods = get_js_edges() edges, methods = get_js_edges()
graph = Graph(edges) graph = Graph(edges)
graph, removed_edges = find_edges_to_remove(graph, methods) graph, removed_edges = find_edges_to_remove(graph, methods)

View File

@ -180,8 +180,7 @@ BANNED_WORDS = {
'Use organization instead.'), 'Use organization instead.'),
} }
def get_safe_phrase(phrase): def get_safe_phrase(phrase: str) -> str:
# type: (str) -> str
""" """
Safe phrase is in lower case and doesn't contain characters which can Safe phrase is in lower case and doesn't contain characters which can
conflict with split boundaries. All conflicting characters are replaced conflict with split boundaries. All conflicting characters are replaced
@ -190,8 +189,7 @@ def get_safe_phrase(phrase):
phrase = SPLIT_BOUNDARY_REGEX.sub('_', phrase) phrase = SPLIT_BOUNDARY_REGEX.sub('_', phrase)
return phrase.lower() return phrase.lower()
def replace_with_safe_phrase(matchobj): def replace_with_safe_phrase(matchobj: Match[str]) -> str:
# type: (Match[str]) -> str
""" """
The idea is to convert IGNORED_PHRASES into safe phrases, see The idea is to convert IGNORED_PHRASES into safe phrases, see
`get_safe_phrase()` function. The only exception is when the `get_safe_phrase()` function. The only exception is when the
@ -215,8 +213,7 @@ def replace_with_safe_phrase(matchobj):
return safe_string return safe_string
def get_safe_text(text): def get_safe_text(text: str) -> str:
# type: (str) -> str
""" """
This returns text which is rendered by BeautifulSoup and is in the This returns text which is rendered by BeautifulSoup and is in the
form that can be split easily and has all IGNORED_PHRASES processed. form that can be split easily and has all IGNORED_PHRASES processed.
@ -228,8 +225,7 @@ def get_safe_text(text):
return text return text
def is_capitalized(safe_text): def is_capitalized(safe_text: str) -> bool:
# type: (str) -> bool
sentences = SPLIT_BOUNDARY_REGEX.split(safe_text) sentences = SPLIT_BOUNDARY_REGEX.split(safe_text)
sentences = [sentence.strip() sentences = [sentence.strip()
for sentence in sentences if sentence.strip()] for sentence in sentences if sentence.strip()]
@ -259,8 +255,7 @@ def check_banned_words(text: str) -> List[str]:
return errors return errors
def check_capitalization(strings): def check_capitalization(strings: List[str]) -> Tuple[List[str], List[str], List[str]]:
# type: (List[str]) -> Tuple[List[str], List[str], List[str]]
errors = [] errors = []
ignored = [] ignored = []
banned_word_errors = [] banned_word_errors = []

View File

@ -84,8 +84,7 @@ imperative_forms = sorted([
]) ])
def head_binary_search(key, words): def head_binary_search(key: Text, words: List[str]) -> str:
# type: (Text, List[str]) -> str
""" Find the imperative mood version of `word` by looking at the first """ Find the imperative mood version of `word` by looking at the first
3 characters. """ 3 characters. """
@ -124,8 +123,7 @@ class ImperativeMood(LineRule):
error_msg = ('The first word in commit title should be in imperative mood ' error_msg = ('The first word in commit title should be in imperative mood '
'("{word}" -> "{imperative}"): "{title}"') '("{word}" -> "{imperative}"): "{title}"')
def validate(self, line, commit): def validate(self, line: Text, commit: gitlint.commit) -> List[RuleViolation]:
# type: (Text, gitlint.commit) -> List[RuleViolation]
violations = [] violations = []
# Ignore the section tag (ie `<section tag>: <message body>.`) # Ignore the section tag (ie `<section tag>: <message body>.`)
@ -153,8 +151,7 @@ class TitleMatchRegexAllowException(LineRule):
target = CommitMessageTitle target = CommitMessageTitle
options_spec = [StrOption('regex', ".*", "Regex the title should match")] options_spec = [StrOption('regex', ".*", "Regex the title should match")]
def validate(self, title, commit): def validate(self, title: Text, commit: gitlint.commit) -> List[RuleViolation]:
# type: (Text, gitlint.commit) -> List[RuleViolation]
regex = self.options['regex'].value regex = self.options['regex'].value
pattern = re.compile(regex, re.UNICODE) pattern = re.compile(regex, re.UNICODE)

View File

@ -6,8 +6,7 @@ Edge = Tuple[str, str]
EdgeSet = Set[Edge] EdgeSet = Set[Edge]
class Graph: class Graph:
def __init__(self, tuples): def __init__(self, tuples: EdgeSet) -> None:
# type: (EdgeSet) -> None
self.children = defaultdict(list) # type: DefaultDict[str, List[str]] self.children = defaultdict(list) # type: DefaultDict[str, List[str]]
self.parents = defaultdict(list) # type: DefaultDict[str, List[str]] self.parents = defaultdict(list) # type: DefaultDict[str, List[str]]
self.nodes = set() # type: Set[str] self.nodes = set() # type: Set[str]
@ -18,30 +17,25 @@ class Graph:
self.nodes.add(parent) self.nodes.add(parent)
self.nodes.add(child) self.nodes.add(child)
def copy(self): def copy(self) -> 'Graph':
# type: () -> 'Graph'
return Graph(self.edges()) return Graph(self.edges())
def num_edges(self): def num_edges(self) -> int:
# type: () -> int
return len(self.edges()) return len(self.edges())
def minus_edge(self, edge): def minus_edge(self, edge: Edge) -> 'Graph':
# type: (Edge) -> 'Graph'
edges = self.edges().copy() edges = self.edges().copy()
edges.remove(edge) edges.remove(edge)
return Graph(edges) return Graph(edges)
def edges(self): def edges(self) -> EdgeSet:
# type: () -> EdgeSet
s = set() s = set()
for parent in self.nodes: for parent in self.nodes:
for child in self.children[parent]: for child in self.children[parent]:
s.add((parent, child)) s.add((parent, child))
return s return s
def remove_exterior_nodes(self): def remove_exterior_nodes(self) -> None:
# type: () -> None
still_work_to_do = True still_work_to_do = True
while still_work_to_do: while still_work_to_do:
still_work_to_do = False # for now still_work_to_do = False # for now
@ -51,8 +45,7 @@ class Graph:
still_work_to_do = True still_work_to_do = True
break break
def is_exterior_node(self, node): def is_exterior_node(self, node: str) -> bool:
# type: (str) -> bool
parents = self.parents[node] parents = self.parents[node]
children = self.children[node] children = self.children[node]
if not parents: if not parents:
@ -66,16 +59,14 @@ class Graph:
# effectively be collapsed into the parent, so don't add clutter. # effectively be collapsed into the parent, so don't add clutter.
return parents[0] == children[0] return parents[0] == children[0]
def remove(self, node): def remove(self, node: str) -> None:
# type: (str) -> None
for parent in self.parents[node]: for parent in self.parents[node]:
self.children[parent].remove(node) self.children[parent].remove(node)
for child in self.children[node]: for child in self.children[node]:
self.parents[child].remove(node) self.parents[child].remove(node)
self.nodes.remove(node) self.nodes.remove(node)
def report(self): def report(self) -> None:
# type: () -> None
print('parents/children/module') print('parents/children/module')
tups = sorted([ tups = sorted([
(len(self.parents[node]), len(self.children[node]), node) (len(self.parents[node]), len(self.children[node]), node)
@ -83,14 +74,12 @@ class Graph:
for tup in tups: for tup in tups:
print(tup) print(tup)
def best_edge_to_remove(orig_graph, is_exempt): def best_edge_to_remove(orig_graph: Graph, is_exempt: Callable[[Edge], bool]) -> Optional[Edge]:
# type: (Graph, Callable[[Edge], bool]) -> Optional[Edge]
# expects an already reduced graph as input # expects an already reduced graph as input
orig_edges = orig_graph.edges() orig_edges = orig_graph.edges()
def get_choices(): def get_choices() -> Iterator[Tuple[int, Edge]]:
# type: () -> Iterator[Tuple[int, Edge]]
for edge in orig_edges: for edge in orig_edges:
if is_exempt(edge): if is_exempt(edge):
continue continue
@ -107,8 +96,7 @@ def best_edge_to_remove(orig_graph, is_exempt):
raise Exception('no edges work here') raise Exception('no edges work here')
return best_edge return best_edge
def make_dot_file(graph): def make_dot_file(graph: Graph) -> str:
# type: (Graph) -> str
buffer = 'digraph G {\n' buffer = 'digraph G {\n'
for node in graph.nodes: for node in graph.nodes:
buffer += node + ';\n' buffer += node + ';\n'
@ -117,8 +105,7 @@ def make_dot_file(graph):
buffer += '}' buffer += '}'
return buffer return buffer
def test(): def test() -> None:
# type: () -> None
graph = Graph({ graph = Graph({
('x', 'a'), ('x', 'a'),
('a', 'b'), ('a', 'b'),

View File

@ -21,8 +21,7 @@ class HtmlTreeBranch:
conceptually be something like "p div(#yo) span(.bar)". conceptually be something like "p div(#yo) span(.bar)".
""" """
def __init__(self, tags, fn): def __init__(self, tags: List['TagInfo'], fn: Optional[str]) -> None:
# type: (List['TagInfo'], Optional[str]) -> None
self.tags = tags self.tags = tags
self.fn = fn self.fn = fn
self.line = tags[-1].token.line self.line = tags[-1].token.line
@ -32,8 +31,7 @@ class HtmlTreeBranch:
for word in tag.words: for word in tag.words:
self.words.add(word) self.words.add(word)
def staircase_text(self): def staircase_text(self) -> str:
# type: () -> str
""" """
produces representation of a node in staircase-like format: produces representation of a node in staircase-like format:
@ -49,8 +47,7 @@ class HtmlTreeBranch:
indent += ' ' * 4 indent += ' ' * 4
return res return res
def text(self): def text(self) -> str:
# type: () -> str
""" """
produces one-line representation of branch: produces one-line representation of branch:
@ -60,16 +57,15 @@ class HtmlTreeBranch:
class Node: class Node:
def __init__(self, token, parent): # FIXME parent parameter is not used! def __init__(self, token: Token, parent: "Optional[Node]") -> None:
# type: (Token, Optional[Node]) -> None # FIXME parent parameter is not used!
self.token = token self.token = token
self.children = [] # type: List[Node] self.children = [] # type: List[Node]
self.parent = None # type: Optional[Node] self.parent = None # type: Optional[Node]
class TagInfo: class TagInfo:
def __init__(self, tag, classes, ids, token): def __init__(self, tag: str, classes: List[str], ids: List[str], token: Token) -> None:
# type: (str, List[str], List[str], Token) -> None
self.tag = tag self.tag = tag
self.classes = classes self.classes = classes
self.ids = ids self.ids = ids
@ -79,8 +75,7 @@ class TagInfo:
['.' + s for s in classes] + \ ['.' + s for s in classes] + \
['#' + s for s in ids] ['#' + s for s in ids]
def text(self): def text(self) -> str:
# type: () -> str
s = self.tag s = self.tag
if self.classes: if self.classes:
s += '.' + '.'.join(self.classes) s += '.' + '.'.join(self.classes)
@ -89,8 +84,7 @@ class TagInfo:
return s return s
def get_tag_info(token): def get_tag_info(token: Token) -> TagInfo:
# type: (Token) -> TagInfo
s = token.s s = token.s
tag = token.tag tag = token.tag
classes = [] # type: List[str] classes = [] # type: List[str]
@ -112,8 +106,7 @@ def get_tag_info(token):
return TagInfo(tag=tag, classes=classes, ids=ids, token=token) return TagInfo(tag=tag, classes=classes, ids=ids, token=token)
def split_for_id_and_class(element): def split_for_id_and_class(element: str) -> List[str]:
# type: (str) -> List[str]
# Here we split a given string which is expected to contain id or class # Here we split a given string which is expected to contain id or class
# attributes from HTML tags. This also takes care of template variables # attributes from HTML tags. This also takes care of template variables
# in string during splitting process. For eg. 'red black {{ a|b|c }}' # in string during splitting process. For eg. 'red black {{ a|b|c }}'
@ -139,13 +132,11 @@ def split_for_id_and_class(element):
return lst return lst
def html_branches(text, fn=None): def html_branches(text: str, fn: Optional[str] = None) -> List[HtmlTreeBranch]:
# type: (str, Optional[str]) -> List[HtmlTreeBranch]
tree = html_tag_tree(text) tree = html_tag_tree(text)
branches = [] # type: List[HtmlTreeBranch] branches = [] # type: List[HtmlTreeBranch]
def walk(node, tag_info_list=None): def walk(node: Node, tag_info_list: Optional[List[TagInfo]] = None) -> None:
# type: (Node, Optional[List[TagInfo]]) -> None
info = get_tag_info(node.token) info = get_tag_info(node.token)
if tag_info_list is None: if tag_info_list is None:
tag_info_list = [info] tag_info_list = [info]
@ -165,8 +156,7 @@ def html_branches(text, fn=None):
return branches return branches
def html_tag_tree(text): def html_tag_tree(text: str) -> Node:
# type: (str) -> Node
tokens = tokenize(text) tokens = tokenize(text)
top_level = Node(token=None, parent=None) top_level = Node(token=None, parent=None)
stack = [top_level] stack = [top_level]
@ -188,8 +178,7 @@ def html_tag_tree(text):
return top_level return top_level
def build_id_dict(templates): def build_id_dict(templates: List[str]) -> (Dict[str, List[str]]):
# type: (List[str]) -> (Dict[str, List[str]])
template_id_dict = defaultdict(list) # type: (Dict[str, List[str]]) template_id_dict = defaultdict(list) # type: (Dict[str, List[str]])
for fn in templates: for fn in templates:

View File

@ -3,8 +3,7 @@ from typing import Dict, List, Set
from .html_branches import html_branches, HtmlTreeBranch from .html_branches import html_branches, HtmlTreeBranch
def show_all_branches(fns): def show_all_branches(fns: List[str]) -> None:
# type: (List[str]) -> None
for fn in fns: for fn in fns:
print(fn) print(fn)
with open(fn) as f: with open(fn) as f:
@ -21,8 +20,7 @@ class Grepper:
HtmlTreeBranch objects. HtmlTreeBranch objects.
''' '''
def __init__(self, fns): def __init__(self, fns: List[str]) -> None:
# type: (List[str]) -> None
all_branches = [] # type: List[HtmlTreeBranch] all_branches = [] # type: List[HtmlTreeBranch]
for fn in fns: for fn in fns:
@ -38,8 +36,7 @@ class Grepper:
self.all_branches = set(all_branches) self.all_branches = set(all_branches)
def grep(self, word_set): def grep(self, word_set: Set[str]) -> None:
# type: (Set[str]) -> None
words = list(word_set) # type: List[str] words = list(word_set) # type: List[str]
@ -57,7 +54,6 @@ class Grepper:
print(branch.staircase_text()) print(branch.staircase_text())
print('') print('')
def grep(fns, words): def grep(fns: List[str], words: Set[str]) -> None:
# type: (List[str], Set[str]) -> None
grepper = Grepper(fns) grepper = Grepper(fns)
grepper.grep(words) grepper.grep(words)

View File

@ -9,8 +9,7 @@ from zulint.printer import GREEN, ENDC
import subprocess import subprocess
def pretty_print_html(html, num_spaces=4): def pretty_print_html(html: str, num_spaces: int = 4) -> str:
# type: (str, int) -> str
# We use 1-based indexing for both rows and columns. # We use 1-based indexing for both rows and columns.
tokens = tokenize(html) tokens = tokenize(html)
lines = html.split('\n') lines = html.split('\n')
@ -191,8 +190,7 @@ def pretty_print_html(html, num_spaces=4):
return '\n'.join(formatted_lines) return '\n'.join(formatted_lines)
def validate_indent_html(fn, fix): def validate_indent_html(fn: str, fix: bool) -> int:
# type: (str, bool) -> int
with open(fn) as f: with open(fn) as f:
html = f.read() html = f.read()
phtml = pretty_print_html(html) phtml = pretty_print_html(html)

View File

@ -217,8 +217,7 @@ REPO_STOPWORDS_PATH = os.path.join(
"zulip_english.stop", "zulip_english.stop",
) )
def install_system_deps(): def install_system_deps() -> None:
# type: () -> None
# By doing list -> set -> list conversion, we remove duplicates. # By doing list -> set -> list conversion, we remove duplicates.
deps_to_install = sorted(set(SYSTEM_DEPENDENCIES)) deps_to_install = sorted(set(SYSTEM_DEPENDENCIES))
@ -235,8 +234,7 @@ def install_system_deps():
if BUILD_PGROONGA_FROM_SOURCE: if BUILD_PGROONGA_FROM_SOURCE:
run_as_root(["./scripts/lib/build-pgroonga"]) run_as_root(["./scripts/lib/build-pgroonga"])
def install_apt_deps(deps_to_install): def install_apt_deps(deps_to_install: List[str]) -> None:
# type: (List[str]) -> None
# setup-apt-repo does an `apt-get update` if the sources.list files changed. # setup-apt-repo does an `apt-get update` if the sources.list files changed.
run_as_root(["./scripts/lib/setup-apt-repo"]) run_as_root(["./scripts/lib/setup-apt-repo"])
@ -253,8 +251,7 @@ def install_apt_deps(deps_to_install):
+ deps_to_install + deps_to_install
) )
def install_yum_deps(deps_to_install): def install_yum_deps(deps_to_install: List[str]) -> None:
# type: (List[str]) -> None
print(WARNING + "RedHat support is still experimental.") print(WARNING + "RedHat support is still experimental.")
run_as_root(["./scripts/lib/setup-yum-repo"]) run_as_root(["./scripts/lib/setup-yum-repo"])
@ -314,8 +311,7 @@ def install_yum_deps(deps_to_install):
overwrite_symlink("/usr/share/myspell/en_US.aff", "/usr/pgsql-%s/share/tsearch_data/en_us.affix" overwrite_symlink("/usr/share/myspell/en_US.aff", "/usr/pgsql-%s/share/tsearch_data/en_us.affix"
% (POSTGRES_VERSION,)) % (POSTGRES_VERSION,))
def main(options): def main(options: argparse.Namespace) -> "NoReturn":
# type: (argparse.Namespace) -> NoReturn
# yarn and management commands expect to be run from the root of the # yarn and management commands expect to be run from the root of the
# project. # project.

View File

@ -33,12 +33,10 @@ def create_var_directories() -> None:
path = os.path.join(var_dir, sub_dir) path = os.path.join(var_dir, sub_dir)
os.makedirs(path, exist_ok=True) os.makedirs(path, exist_ok=True)
def setup_shell_profile(shell_profile): def setup_shell_profile(shell_profile: str) -> None:
# type: (str) -> None
shell_profile_path = os.path.expanduser(shell_profile) shell_profile_path = os.path.expanduser(shell_profile)
def write_command(command): def write_command(command: str) -> None:
# type: (str) -> None
if os.path.exists(shell_profile_path): if os.path.exists(shell_profile_path):
with open(shell_profile_path) as shell_profile_file: with open(shell_profile_path) as shell_profile_file:
lines = [line.strip() for line in shell_profile_file.readlines()] lines = [line.strip() for line in shell_profile_file.readlines()]

View File

@ -2,8 +2,7 @@ import os
import pwd import pwd
import sys import sys
def check_venv(filename): def check_venv(filename: str) -> None:
# type: (str) -> None
try: try:
import django import django
import ujson import ujson

View File

@ -1,30 +1,25 @@
from typing import Callable, List, Optional, Text from typing import Callable, List, Optional, Text
class TemplateParserException(Exception): class TemplateParserException(Exception):
def __init__(self, message): def __init__(self, message: str) -> None:
# type: (str) -> None
self.message = message self.message = message
def __str__(self): def __str__(self) -> str:
# type: () -> str
return self.message return self.message
class TokenizationException(Exception): class TokenizationException(Exception):
def __init__(self, message, line_content=None): def __init__(self, message: str, line_content: Optional[str] = None) -> None:
# type: (str, Optional[str]) -> None
self.message = message self.message = message
self.line_content = line_content self.line_content = line_content
class TokenizerState: class TokenizerState:
def __init__(self): def __init__(self) -> None:
# type: () -> None
self.i = 0 self.i = 0
self.line = 1 self.line = 1
self.col = 1 self.col = 1
class Token: class Token:
def __init__(self, kind, s, tag, line, col, line_span): def __init__(self, kind: str, s: str, tag: str, line: int, col: int, line_span: int) -> None:
# type: (str, str, str, int, int, int) -> None
self.kind = kind self.kind = kind
self.s = s self.s = s
self.tag = tag self.tag = tag
@ -32,10 +27,8 @@ class Token:
self.col = col self.col = col
self.line_span = line_span self.line_span = line_span
def tokenize(text): def tokenize(text: str) -> List[Token]:
# type: (str) -> List[Token] def advance(n: int) -> None:
def advance(n):
# type: (int) -> None
for _ in range(n): for _ in range(n):
state.i += 1 state.i += 1
if state.i >= 0 and text[state.i - 1] == '\n': if state.i >= 0 and text[state.i - 1] == '\n':
@ -44,55 +37,43 @@ def tokenize(text):
else: else:
state.col += 1 state.col += 1
def looking_at(s): def looking_at(s: str) -> bool:
# type: (str) -> bool
return text[state.i:state.i+len(s)] == s return text[state.i:state.i+len(s)] == s
def looking_at_htmlcomment(): def looking_at_htmlcomment() -> bool:
# type: () -> bool
return looking_at("<!--") return looking_at("<!--")
def looking_at_handlebarcomment(): def looking_at_handlebarcomment() -> bool:
# type: () -> bool
return looking_at("{{!") return looking_at("{{!")
def looking_at_djangocomment(): def looking_at_djangocomment() -> bool:
# type: () -> bool
return looking_at("{#") return looking_at("{#")
def looking_at_handlebarpartial() -> bool: def looking_at_handlebarpartial() -> bool:
return looking_at("{{>") return looking_at("{{>")
def looking_at_html_start(): def looking_at_html_start() -> bool:
# type: () -> bool
return looking_at("<") and not looking_at("</") return looking_at("<") and not looking_at("</")
def looking_at_html_end(): def looking_at_html_end() -> bool:
# type: () -> bool
return looking_at("</") return looking_at("</")
def looking_at_handlebars_start(): def looking_at_handlebars_start() -> bool:
# type: () -> bool
return looking_at("{{#") or looking_at("{{^") return looking_at("{{#") or looking_at("{{^")
def looking_at_handlebars_end(): def looking_at_handlebars_end() -> bool:
# type: () -> bool
return looking_at("{{/") return looking_at("{{/")
def looking_at_django_start(): def looking_at_django_start() -> bool:
# type: () -> bool
return looking_at("{% ") and not looking_at("{% end") return looking_at("{% ") and not looking_at("{% end")
def looking_at_django_end(): def looking_at_django_end() -> bool:
# type: () -> bool
return looking_at("{% end") return looking_at("{% end")
def looking_at_jinja2_end_whitespace_stripped(): def looking_at_jinja2_end_whitespace_stripped() -> bool:
# type: () -> bool
return looking_at("{%- end") return looking_at("{%- end")
def looking_at_jinja2_start_whitespace_stripped_type2(): def looking_at_jinja2_start_whitespace_stripped_type2() -> bool:
# type: () -> bool
# This function detects tag like {%- if foo -%}...{% endif %} # This function detects tag like {%- if foo -%}...{% endif %}
return looking_at("{%-") and not looking_at("{%- end") return looking_at("{%-") and not looking_at("{%- end")
@ -206,8 +187,7 @@ def tokenize(text):
return tokens return tokens
def validate(fn=None, text=None, check_indent=True): def validate(fn: Optional[str] = None, text: Optional[str] = None, check_indent: bool = True) -> None:
# type: (Optional[str], Optional[str], bool) -> None
assert fn or text assert fn or text
if fn is None: if fn is None:
@ -220,13 +200,11 @@ def validate(fn=None, text=None, check_indent=True):
tokens = tokenize(text) tokens = tokenize(text)
class State: class State:
def __init__(self, func): def __init__(self, func: Callable[[Token], None]) -> None:
# type: (Callable[[Token], None]) -> None
self.depth = 0 self.depth = 0
self.matcher = func self.matcher = func
def no_start_tag(token): def no_start_tag(token: Token) -> None:
# type: (Token) -> None
raise TemplateParserException(''' raise TemplateParserException('''
No start tag No start tag
fn: %s fn: %s
@ -237,8 +215,7 @@ def validate(fn=None, text=None, check_indent=True):
state = State(no_start_tag) state = State(no_start_tag)
def start_tag_matcher(start_token): def start_tag_matcher(start_token: Token) -> None:
# type: (Token) -> None
state.depth += 1 state.depth += 1
start_tag = start_token.tag.strip('~') start_tag = start_token.tag.strip('~')
start_line = start_token.line start_line = start_token.line
@ -246,8 +223,7 @@ def validate(fn=None, text=None, check_indent=True):
old_matcher = state.matcher old_matcher = state.matcher
def f(end_token): def f(end_token: Token) -> None:
# type: (Token) -> None
end_tag = end_token.tag.strip('~') end_tag = end_token.tag.strip('~')
end_line = end_token.line end_line = end_token.line
@ -305,8 +281,7 @@ def validate(fn=None, text=None, check_indent=True):
if state.depth != 0: if state.depth != 0:
raise TemplateParserException('Missing end tag') raise TemplateParserException('Missing end tag')
def is_special_html_tag(s, tag): def is_special_html_tag(s: str, tag: str) -> bool:
# type: (str, str) -> bool
return tag in ['link', 'meta', '!DOCTYPE'] return tag in ['link', 'meta', '!DOCTYPE']
def is_self_closing_html_tag(s: Text, tag: Text) -> bool: def is_self_closing_html_tag(s: Text, tag: Text) -> bool:
@ -327,8 +302,7 @@ def is_self_closing_html_tag(s: Text, tag: Text) -> bool:
singleton_tag = s.endswith('/>') singleton_tag = s.endswith('/>')
return self_closing_tag or singleton_tag return self_closing_tag or singleton_tag
def is_django_block_tag(tag): def is_django_block_tag(tag: str) -> bool:
# type: (str) -> bool
return tag in [ return tag in [
'autoescape', 'autoescape',
'block', 'block',
@ -344,8 +318,7 @@ def is_django_block_tag(tag):
'with', 'with',
] ]
def get_handlebars_tag(text, i): def get_handlebars_tag(text: str, i: int) -> str:
# type: (str, int) -> str
end = i + 2 end = i + 2
while end < len(text) - 1 and text[end] != '}': while end < len(text) - 1 and text[end] != '}':
end += 1 end += 1
@ -354,8 +327,7 @@ def get_handlebars_tag(text, i):
s = text[i:end+2] s = text[i:end+2]
return s return s
def get_django_tag(text, i, stripped=False): def get_django_tag(text: str, i: int, stripped: bool = False) -> str:
# type: (str, int, bool) -> str
end = i + 2 end = i + 2
if stripped: if stripped:
end += 1 end += 1
@ -366,8 +338,7 @@ def get_django_tag(text, i, stripped=False):
s = text[i:end+2] s = text[i:end+2]
return s return s
def get_html_tag(text, i): def get_html_tag(text: str, i: int) -> str:
# type: (str, int) -> str
quote_count = 0 quote_count = 0
end = i + 1 end = i + 1
unclosed_end = 0 unclosed_end = 0
@ -387,8 +358,7 @@ def get_html_tag(text, i):
s = text[i:end+1] s = text[i:end+1]
return s return s
def get_html_comment(text, i): def get_html_comment(text: str, i: int) -> str:
# type: (str, int) -> str
end = i + 7 end = i + 7
unclosed_end = 0 unclosed_end = 0
while end <= len(text): while end <= len(text):
@ -399,8 +369,7 @@ def get_html_comment(text, i):
end += 1 end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end]) raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_comment(text, i): def get_handlebar_comment(text: str, i: int) -> str:
# type: (str, int) -> str
end = i + 5 end = i + 5
unclosed_end = 0 unclosed_end = 0
while end <= len(text): while end <= len(text):
@ -411,8 +380,7 @@ def get_handlebar_comment(text, i):
end += 1 end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end]) raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_django_comment(text, i): def get_django_comment(text: str, i: int) -> str:
# type: (str, int) -> str
end = i + 4 end = i + 4
unclosed_end = 0 unclosed_end = 0
while end <= len(text): while end <= len(text):
@ -423,8 +391,7 @@ def get_django_comment(text, i):
end += 1 end += 1
raise TokenizationException('Unclosed comment', text[i:unclosed_end]) raise TokenizationException('Unclosed comment', text[i:unclosed_end])
def get_handlebar_partial(text, i): def get_handlebar_partial(text: str, i: int) -> str:
# type: (str, int) -> str
end = i + 10 end = i + 10
unclosed_end = 0 unclosed_end = 0
while end <= len(text): while end <= len(text):

View File

@ -13,8 +13,7 @@ ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__f
def get_major_version(v: str) -> int: def get_major_version(v: str) -> int:
return int(v.split('.')[0]) return int(v.split('.')[0])
def get_version_file(): def get_version_file() -> str:
# type: () -> str
uuid_var_path = get_dev_uuid_var_path() uuid_var_path = get_dev_uuid_var_path()
return os.path.join(uuid_var_path, 'provision_version') return os.path.join(uuid_var_path, 'provision_version')

View File

@ -23,8 +23,7 @@ if TOOLS_DIR not in sys.path:
from zerver.lib.test_fixtures import update_test_databases_if_required from zerver.lib.test_fixtures import update_test_databases_if_required
from scripts.lib.zulip_tools import get_or_create_dev_uuid_var_path from scripts.lib.zulip_tools import get_or_create_dev_uuid_var_path
def set_up_django(external_host): def set_up_django(external_host: str) -> None:
# type: (str) -> None
os.environ['EXTERNAL_HOST'] = external_host os.environ['EXTERNAL_HOST'] = external_host
os.environ["TORNADO_SERVER"] = "http://127.0.0.1:9983" os.environ["TORNADO_SERVER"] = "http://127.0.0.1:9983"
os.environ["LOCAL_UPLOADS_DIR"] = get_or_create_dev_uuid_var_path( os.environ["LOCAL_UPLOADS_DIR"] = get_or_create_dev_uuid_var_path(
@ -33,8 +32,7 @@ def set_up_django(external_host):
django.setup() django.setup()
os.environ['PYTHONUNBUFFERED'] = 'y' os.environ['PYTHONUNBUFFERED'] = 'y'
def assert_server_running(server, log_file): def assert_server_running(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> None:
# type: (subprocess.Popen[bytes], Optional[str]) -> None
"""Get the exit code of the server, or None if it is still running.""" """Get the exit code of the server, or None if it is still running."""
if server.poll() is not None: if server.poll() is not None:
message = 'Server died unexpectedly!' message = 'Server died unexpectedly!'
@ -42,8 +40,7 @@ def assert_server_running(server, log_file):
message += '\nSee %s\n' % (log_file,) message += '\nSee %s\n' % (log_file,)
raise RuntimeError(message) raise RuntimeError(message)
def server_is_up(server, log_file): def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) -> bool:
# type: (subprocess.Popen[bytes], Optional[str]) -> bool
assert_server_running(server, log_file) assert_server_running(server, log_file)
try: try:
# We could get a 501 error if the reverse proxy is up but the Django app isn't. # We could get a 501 error if the reverse proxy is up but the Django app isn't.

View File

@ -11,8 +11,7 @@ from linter_lib.custom_check import python_rules, non_py_rules
from zulint.command import add_default_linter_arguments, LinterConfig from zulint.command import add_default_linter_arguments, LinterConfig
import random import random
def run(): def run() -> None:
# type: () -> None
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--force', default=False, parser.add_argument('--force', default=False,
action="store_true", action="store_true",
@ -96,15 +95,13 @@ def run():
"(config: ./tools/sgrep.yml)") "(config: ./tools/sgrep.yml)")
@linter_config.lint @linter_config.lint
def custom_py(): def custom_py() -> int:
# type: () -> int
"""Runs custom checks for python files (config: tools/linter_lib/custom_check.py)""" """Runs custom checks for python files (config: tools/linter_lib/custom_check.py)"""
failed = python_rules.check(by_lang, verbose=args.verbose) failed = python_rules.check(by_lang, verbose=args.verbose)
return 1 if failed else 0 return 1 if failed else 0
@linter_config.lint @linter_config.lint
def custom_nonpy(): def custom_nonpy() -> int:
# type: () -> int
"""Runs custom checks for non-python files (config: tools/linter_lib/custom_check.py)""" """Runs custom checks for non-python files (config: tools/linter_lib/custom_check.py)"""
failed = False failed = False
for rule in non_py_rules: for rule in non_py_rules:
@ -112,8 +109,7 @@ def run():
return 1 if failed else 0 return 1 if failed else 0
@linter_config.lint @linter_config.lint
def pyflakes(): def pyflakes() -> int:
# type: () -> int
"""Standard Python bug and code smell linter (config: tools/linter_lib/pyflakes.py)""" """Standard Python bug and code smell linter (config: tools/linter_lib/pyflakes.py)"""
failed = check_pyflakes(by_lang['py'], args) failed = check_pyflakes(by_lang['py'], args)
return 1 if failed else 0 return 1 if failed else 0
@ -122,15 +118,13 @@ def run():
python_part2 = {y for y in by_lang['py'] if y not in python_part1} python_part2 = {y for y in by_lang['py'] if y not in python_part1}
@linter_config.lint @linter_config.lint
def pep8_1of2(): def pep8_1of2() -> int:
# type: () -> int
"""Standard Python style linter on 50% of files (config: tools/linter_lib/pep8.py)""" """Standard Python style linter on 50% of files (config: tools/linter_lib/pep8.py)"""
failed = check_pep8(list(python_part1)) failed = check_pep8(list(python_part1))
return 1 if failed else 0 return 1 if failed else 0
@linter_config.lint @linter_config.lint
def pep8_2of2(): def pep8_2of2() -> int:
# type: () -> int
"""Standard Python style linter on other 50% of files (config: tools/linter_lib/pep8.py)""" """Standard Python style linter on other 50% of files (config: tools/linter_lib/pep8.py)"""
failed = check_pep8(list(python_part2)) failed = check_pep8(list(python_part2))
return 1 if failed else 0 return 1 if failed else 0

View File

@ -2,8 +2,7 @@ from zulint.linters import run_pycodestyle
from typing import List from typing import List
def check_pep8(files): def check_pep8(files: List[str]) -> bool:
# type: (List[str]) -> bool
ignored_rules = [ ignored_rules = [
# Each of these rules are ignored for the explained reason. # Each of these rules are ignored for the explained reason.

View File

@ -5,8 +5,7 @@ from typing import List
from zulint.linters import run_pyflakes from zulint.linters import run_pyflakes
def check_pyflakes(files, options): def check_pyflakes(files: List[str], options: argparse.Namespace) -> bool:
# type: (List[str], argparse.Namespace) -> bool
suppress_patterns = [ suppress_patterns = [
("scripts/lib/pythonrc.py", "imported but unused"), ("scripts/lib/pythonrc.py", "imported but unused"),
# Intentionally imported by zerver/lib/webhooks/common.py # Intentionally imported by zerver/lib/webhooks/common.py

View File

@ -3,8 +3,7 @@ from typing import List
from lib.pretty_print import pretty_print_html from lib.pretty_print import pretty_print_html
import sys import sys
def clean_html(filenames): def clean_html(filenames: List[str]) -> None:
# type: (List[str]) -> None
for fn in filenames: for fn in filenames:
print('Prettifying: %s' % (fn,)) print('Prettifying: %s' % (fn,))
with open(fn) as f: with open(fn) as f:

View File

@ -8,8 +8,7 @@ import re
from typing import List from typing import List
def validate_order(order, length): def validate_order(order: List[int], length: int) -> None:
# type: (List[int], int) -> None
if len(order) != length: if len(order) != length:
print("Please enter the sequence of all the conflicting files at once") print("Please enter the sequence of all the conflicting files at once")
sys.exit(1) sys.exit(1)
@ -19,8 +18,7 @@ def validate_order(order, length):
print("Incorrect input") print("Incorrect input")
sys.exit(1) sys.exit(1)
def renumber_migration(conflicts, order, last_correct_migration): def renumber_migration(conflicts: List[str], order: List[int], last_correct_migration: str) -> None:
# type: (List[str], List[int], str) -> None
stack = [] # type: List[str] stack = [] # type: List[str]
for i in order: for i in order:
if conflicts[i-1][0:4] not in stack: if conflicts[i-1][0:4] not in stack:
@ -38,8 +36,7 @@ def renumber_migration(conflicts, order, last_correct_migration):
last_correct_migration = new_name.replace('.py', '') last_correct_migration = new_name.replace('.py', '')
def resolve_conflicts(conflicts, files_list): def resolve_conflicts(conflicts: List[str], files_list: List[str]) -> None:
# type: (List[str], List[str]) -> None
print("Conflicting migrations:") print("Conflicting migrations:")
for i in range(0, len(conflicts)): for i in range(0, len(conflicts)):
print(str(i+1) + '. ' + conflicts[i]) print(str(i+1) + '. ' + conflicts[i])

View File

@ -6,8 +6,7 @@ from typing import List, Dict, Union
from zulint.lister import list_files from zulint.lister import list_files
def do_replace(listing, old_string, new_string): def do_replace(listing: Union[Dict[str, List[str]], List[str]], old_string: str, new_string: str) -> None:
# type: (Union[Dict[str, List[str]], List[str]], str, str) -> None
for filename in listing: for filename in listing:
regex = 's/{}/{}/g'.format(old_string, new_string) regex = 's/{}/{}/g'.format(old_string, new_string)
check_call(['sed', '-i', regex, filename]) check_call(['sed', '-i', regex, filename])

View File

@ -5,36 +5,30 @@ import subprocess
import sys import sys
from typing import List from typing import List
def exit(message): def exit(message: str) -> None:
# type: (str) -> None
print('PROBLEM!') print('PROBLEM!')
print(message) print(message)
sys.exit(1) sys.exit(1)
def run(command): def run(command: List[str]) -> None:
# type: (List[str]) -> None
print('\n>>> ' + ' '.join(map(shlex.quote, command))) print('\n>>> ' + ' '.join(map(shlex.quote, command)))
subprocess.check_call(command) subprocess.check_call(command)
def check_output(command): def check_output(command: List[str]) -> str:
# type: (List[str]) -> str
return subprocess.check_output(command).decode('ascii') return subprocess.check_output(command).decode('ascii')
def get_git_branch(): def get_git_branch() -> str:
# type: () -> str
command = ['git', 'rev-parse', '--abbrev-ref', 'HEAD'] command = ['git', 'rev-parse', '--abbrev-ref', 'HEAD']
output = check_output(command) output = check_output(command)
return output.strip() return output.strip()
def check_git_pristine(): def check_git_pristine() -> None:
# type: () -> None
command = ['git', 'status', '--porcelain'] command = ['git', 'status', '--porcelain']
output = check_output(command) output = check_output(command)
if output.strip(): if output.strip():
exit('Git is not pristine:\n' + output) exit('Git is not pristine:\n' + output)
def ensure_on_clean_master(): def ensure_on_clean_master() -> None:
# type: () -> None
branch = get_git_branch() branch = get_git_branch()
if branch != 'master': if branch != 'master':
exit('You are still on a feature branch: %s' % (branch,)) exit('You are still on a feature branch: %s' % (branch,))
@ -42,8 +36,7 @@ def ensure_on_clean_master():
run(['git', 'fetch', 'upstream', 'master']) run(['git', 'fetch', 'upstream', 'master'])
run(['git', 'rebase', 'upstream/master']) run(['git', 'rebase', 'upstream/master'])
def create_pull_branch(pull_id): def create_pull_branch(pull_id: int) -> None:
# type: (int) -> None
run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)]) run(['git', 'fetch', 'upstream', 'pull/%d/head' % (pull_id,)])
run(['git', 'checkout', '-B', 'review-%s' % (pull_id,), 'FETCH_HEAD']) run(['git', 'checkout', '-B', 'review-%s' % (pull_id,), 'FETCH_HEAD'])
run(['git', 'rebase', 'upstream/master']) run(['git', 'rebase', 'upstream/master'])
@ -55,8 +48,7 @@ def create_pull_branch(pull_id):
print(subprocess.check_output(['git', 'log', 'HEAD~..', print(subprocess.check_output(['git', 'log', 'HEAD~..',
'--pretty=format:Author: %an'])) '--pretty=format:Author: %an']))
def review_pr(): def review_pr() -> None:
# type: () -> None
try: try:
pull_id = int(sys.argv[1]) pull_id = int(sys.argv[1])
except Exception: except Exception:

View File

@ -173,8 +173,7 @@ for cmd in cmds:
subprocess.Popen(cmd) subprocess.Popen(cmd)
def transform_url(protocol, path, query, target_port, target_host): def transform_url(protocol: str, path: str, query: str, target_port: int, target_host: str) -> str:
# type: (str, str, str, int, str) -> str
# generate url with target host # generate url with target host
host = ":".join((target_host, str(target_port))) host = ":".join((target_host, str(target_port)))
# Here we are going to rewrite the path a bit so that it is in parity with # Here we are going to rewrite the path a bit so that it is in parity with
@ -186,8 +185,7 @@ def transform_url(protocol, path, query, target_port, target_host):
@gen.engine @gen.engine
def fetch_request(url, callback, **kwargs): def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable[..., Any], Any, None]":
# type: (str, Any, **Any) -> Generator[Callable[..., Any], Any, None]
# use large timeouts to handle polling requests # use large timeouts to handle polling requests
req = httpclient.HTTPRequest( req = httpclient.HTTPRequest(
url, url,
@ -208,8 +206,9 @@ class BaseHandler(web.RequestHandler):
# target server port # target server port
target_port = None # type: int target_port = None # type: int
def _add_request_headers(self, exclude_lower_headers_list=None): def _add_request_headers(
# type: (Optional[List[str]]) -> httputil.HTTPHeaders self, exclude_lower_headers_list: Optional[List[str]] = None
) -> httputil.HTTPHeaders:
exclude_lower_headers_list = exclude_lower_headers_list or [] exclude_lower_headers_list = exclude_lower_headers_list or []
headers = httputil.HTTPHeaders() headers = httputil.HTTPHeaders()
for header, v in self.request.headers.get_all(): for header, v in self.request.headers.get_all():
@ -217,36 +216,28 @@ class BaseHandler(web.RequestHandler):
headers.add(header, v) headers.add(header, v)
return headers return headers
def get(self): def get(self) -> None:
# type: () -> None
pass pass
def head(self): def head(self) -> None:
# type: () -> None
pass pass
def post(self): def post(self) -> None:
# type: () -> None
pass pass
def put(self): def put(self) -> None:
# type: () -> None
pass pass
def patch(self): def patch(self) -> None:
# type: () -> None
pass pass
def options(self): def options(self) -> None:
# type: () -> None
pass pass
def delete(self): def delete(self) -> None:
# type: () -> None
pass pass
def handle_response(self, response): def handle_response(self, response: Any) -> None:
# type: (Any) -> None
if response.error and not isinstance(response.error, httpclient.HTTPError): if response.error and not isinstance(response.error, httpclient.HTTPError):
self.set_status(500) self.set_status(500)
self.write('Internal server error:\n' + str(response.error)) self.write('Internal server error:\n' + str(response.error))
@ -262,8 +253,7 @@ class BaseHandler(web.RequestHandler):
self.finish() self.finish()
@web.asynchronous @web.asynchronous
def prepare(self): def prepare(self) -> None:
# type: () -> None
if 'X-REAL-IP' not in self.request.headers: if 'X-REAL-IP' not in self.request.headers:
self.request.headers['X-REAL-IP'] = self.request.remote_ip self.request.headers['X-REAL-IP'] = self.request.remote_ip
if 'X-FORWARDED_PORT' not in self.request.headers: if 'X-FORWARDED_PORT' not in self.request.headers:
@ -311,8 +301,7 @@ class ThumborHandler(BaseHandler):
class Application(web.Application): class Application(web.Application):
def __init__(self, enable_logging=False): def __init__(self, enable_logging: bool = False) -> None:
# type: (bool) -> None
handlers = [ handlers = [
(r"/json/events.*", TornadoHandler), (r"/json/events.*", TornadoHandler),
(r"/api/v1/events.*", TornadoHandler), (r"/api/v1/events.*", TornadoHandler),
@ -322,19 +311,16 @@ class Application(web.Application):
] ]
super().__init__(handlers, enable_logging=enable_logging) super().__init__(handlers, enable_logging=enable_logging)
def log_request(self, handler): def log_request(self, handler: BaseHandler) -> None:
# type: (BaseHandler) -> None
if self.settings['enable_logging']: if self.settings['enable_logging']:
super().log_request(handler) super().log_request(handler)
def on_shutdown(): def on_shutdown() -> None:
# type: () -> None
IOLoop.instance().stop() IOLoop.instance().stop()
def shutdown_handler(*args, **kwargs): def shutdown_handler(*args: Any, **kwargs: Any) -> None:
# type: (*Any, **Any) -> None
io_loop = IOLoop.instance() io_loop = IOLoop.instance()
if io_loop._callbacks: if io_loop._callbacks:
io_loop.call_later(1, shutdown_handler) io_loop.call_later(1, shutdown_handler)

View File

@ -25,8 +25,7 @@ def generate_zulip_bots_static_files() -> None:
os.makedirs(bots_dir, exist_ok=True) os.makedirs(bots_dir, exist_ok=True)
def copyfiles(paths): def copyfiles(paths: List[str]) -> None:
# type: (List[str]) -> None
for src_path in paths: for src_path in paths:
bot_name = os.path.basename(os.path.dirname(src_path)) bot_name = os.path.basename(os.path.dirname(src_path))

View File

@ -36,8 +36,7 @@ def generate_files(source_file: str, tmp_dir: str) -> None:
'--input-file', input_file_path, '--output-file', output_file_path], '--input-file', input_file_path, '--output-file', output_file_path],
stdout=subprocess.DEVNULL) stdout=subprocess.DEVNULL)
def print_diff(path_file1, path_file2): def print_diff(path_file1: str, path_file2: str) -> None:
# type: (str, str) -> None
with open(path_file1) as file1: with open(path_file1) as file1:
with open(path_file2) as file2: with open(path_file2) as file2:
diff = difflib.unified_diff( diff = difflib.unified_diff(

View File

@ -245,8 +245,7 @@ def run_tests_via_node_js() -> int:
sys.exit(1) sys.exit(1)
return ret return ret
def check_line_coverage(fn, line_coverage, line_mapping, log=True): def check_line_coverage(fn: str, line_coverage: Dict[Any, Any], line_mapping: Dict[Any, Any], log: bool = True) -> bool:
# type: (str, Dict[Any, Any], Dict[Any, Any], bool) -> bool
missing_lines = [] missing_lines = []
for line in line_coverage: for line in line_coverage:
if line_coverage[line] == 0: if line_coverage[line] == 0:

View File

@ -20,8 +20,7 @@ if 'TRAVIS' in os.environ:
CACHE_DIR = os.path.join(os.environ['HOME'], 'misc') CACHE_DIR = os.path.join(os.environ['HOME'], 'misc')
CACHE_FILE = os.path.join(CACHE_DIR, 'requirements_hashes') CACHE_FILE = os.path.join(CACHE_DIR, 'requirements_hashes')
def print_diff(path_file1, path_file2): def print_diff(path_file1: str, path_file2: str) -> None:
# type: (str, str) -> None
with open(path_file1) as file1: with open(path_file1) as file1:
with open(path_file2) as file2: with open(path_file2) as file2:
diff = difflib.unified_diff( diff = difflib.unified_diff(
@ -32,8 +31,7 @@ def print_diff(path_file1, path_file2):
) )
sys.stdout.writelines(diff) sys.stdout.writelines(diff)
def test_locked_requirements(tmp_dir): def test_locked_requirements(tmp_dir: str) -> bool:
# type: (str) -> bool
# `pip-compile` tries to avoid unnecessarily updating recursive dependencies # `pip-compile` tries to avoid unnecessarily updating recursive dependencies
# if lock files are present already. If we don't copy these files to the tmp # if lock files are present already. If we don't copy these files to the tmp
# dir then recursive dependencies will get updated to their latest version # dir then recursive dependencies will get updated to their latest version
@ -53,8 +51,7 @@ def test_locked_requirements(tmp_dir):
return same return same
def get_requirements_hash(tmp_dir, use_test_lock_files=False): def get_requirements_hash(tmp_dir: str, use_test_lock_files: Optional[bool] = False) -> str:
# type: (str, Optional[bool]) -> str
sha1 = hashlib.sha1() sha1 = hashlib.sha1()
reqs_files = sorted(glob.glob(os.path.join(REQS_DIR, "*.in"))) reqs_files = sorted(glob.glob(os.path.join(REQS_DIR, "*.in")))
lock_files_path = REQS_DIR lock_files_path = REQS_DIR
@ -66,21 +63,18 @@ def get_requirements_hash(tmp_dir, use_test_lock_files=False):
sha1.update(fp.read().encode("utf-8")) sha1.update(fp.read().encode("utf-8"))
return sha1.hexdigest() return sha1.hexdigest()
def may_be_setup_cache(): def may_be_setup_cache() -> None:
# type: () -> None
os.makedirs(CACHE_DIR, exist_ok=True) os.makedirs(CACHE_DIR, exist_ok=True)
if not os.path.exists(CACHE_FILE): if not os.path.exists(CACHE_FILE):
with open(CACHE_FILE, 'w') as fp: with open(CACHE_FILE, 'w') as fp:
ujson.dump([], fp) ujson.dump([], fp)
def load_cache(): def load_cache() -> List[str]:
# type: () -> List[str]
with open(CACHE_FILE) as fp: with open(CACHE_FILE) as fp:
hash_list = ujson.load(fp) hash_list = ujson.load(fp)
return hash_list return hash_list
def update_cache(hash_list): def update_cache(hash_list: List[str]) -> None:
# type: (List[str]) -> None
# We store last 100 hash entries. Aggressive caching is # We store last 100 hash entries. Aggressive caching is
# not a problem as it is cheap to do. # not a problem as it is cheap to do.
if len(hash_list) > 100: if len(hash_list) > 100:
@ -88,8 +82,7 @@ def update_cache(hash_list):
with open(CACHE_FILE, 'w') as fp: with open(CACHE_FILE, 'w') as fp:
ujson.dump(hash_list, fp) ujson.dump(hash_list, fp)
def main(): def main() -> None:
# type: () -> None
may_be_setup_cache() may_be_setup_cache()
hash_list = load_cache() hash_list = load_cache()
tmp = tempfile.TemporaryDirectory() tmp = tempfile.TemporaryDirectory()

View File

@ -17,8 +17,7 @@ if __name__ == '__main__':
default=False, help='compute test coverage') default=False, help='compute test coverage')
args = parser.parse_args() args = parser.parse_args()
def dir_join(dir1, dir2): def dir_join(dir1: str, dir2: str) -> str:
# type: (str, str) -> str
return os.path.abspath(os.path.join(dir1, dir2)) return os.path.abspath(os.path.join(dir1, dir2))
tools_dir = os.path.dirname(os.path.abspath(__file__)) tools_dir = os.path.dirname(os.path.abspath(__file__))

View File

@ -10,8 +10,7 @@ os.chdir(os.path.join(os.path.dirname(__file__), '..'))
STATIC_PATH = 'static/' STATIC_PATH = 'static/'
def build_for_prod_or_casper(quiet): def build_for_prod_or_casper(quiet: bool) -> NoReturn:
# type: (bool) -> NoReturn
"""Builds for production, writing the output to disk""" """Builds for production, writing the output to disk"""
webpack_args = ['node', 'node_modules/.bin/webpack-cli', webpack_args = ['node', 'node_modules/.bin/webpack-cli',
@ -22,8 +21,7 @@ def build_for_prod_or_casper(quiet):
print('Starting webpack compilation') print('Starting webpack compilation')
os.execvp(webpack_args[0], webpack_args) os.execvp(webpack_args[0], webpack_args)
def build_for_dev_server(host, port, minify, disable_host_check): def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check: bool) -> None:
# type: (str, str, bool, bool) -> None
"""watches and rebuilds on changes, serving files from memory via webpack-dev-server""" """watches and rebuilds on changes, serving files from memory via webpack-dev-server"""
# This is our most dynamic configuration, which we use for our # This is our most dynamic configuration, which we use for our
@ -68,8 +66,7 @@ def build_for_dev_server(host, port, minify, disable_host_check):
webpack_process = subprocess.Popen(webpack_args) webpack_process = subprocess.Popen(webpack_args)
class WebpackConfigFileChangeHandler(pyinotify.ProcessEvent): class WebpackConfigFileChangeHandler(pyinotify.ProcessEvent):
def process_default(self, event): def process_default(self, event: pyinotify.Event) -> None:
# type: (pyinotify.Event) -> None
nonlocal webpack_process nonlocal webpack_process
print('Restarting webpack-dev-server due to config changes...') print('Restarting webpack-dev-server due to config changes...')
webpack_process.terminate() webpack_process.terminate()
@ -86,8 +83,7 @@ def build_for_dev_server(host, port, minify, disable_host_check):
webpack_process.terminate() webpack_process.terminate()
webpack_process.wait() webpack_process.wait()
def build_for_most_tests(): def build_for_most_tests() -> None:
# type: () -> None
"""Generates a stub asset stat file for django so backend test can render a page""" """Generates a stub asset stat file for django so backend test can render a page"""
# Tests like test-backend, test-api, and test-home-documentation use # Tests like test-backend, test-api, and test-home-documentation use

View File

@ -46,8 +46,7 @@ def ensure_users(ids_list: List[int], user_names: List[str]) -> None:
assert ids_list == user_ids assert ids_list == user_ids
@openapi_test_function("/users/me/subscriptions:post") @openapi_test_function("/users/me/subscriptions:post")
def add_subscriptions(client): def add_subscriptions(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Subscribe to the stream "new stream" # Subscribe to the stream "new stream"
@ -77,8 +76,7 @@ def add_subscriptions(client):
assert result['result'] == 'success' assert result['result'] == 'success'
assert 'newbie@zulip.com' in result['subscribed'] assert 'newbie@zulip.com' in result['subscribed']
def test_add_subscriptions_already_subscribed(client): def test_add_subscriptions_already_subscribed(client: Client) -> None:
# type: (Client) -> None
result = client.add_subscriptions( result = client.add_subscriptions(
streams=[ streams=[
{'name': 'new stream', 'description': 'New stream for testing'} {'name': 'new stream', 'description': 'New stream for testing'}
@ -89,8 +87,7 @@ def test_add_subscriptions_already_subscribed(client):
validate_against_openapi_schema(result, '/users/me/subscriptions', 'post', validate_against_openapi_schema(result, '/users/me/subscriptions', 'post',
'200_1') '200_1')
def test_authorization_errors_fatal(client, nonadmin_client): def test_authorization_errors_fatal(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
client.add_subscriptions( client.add_subscriptions(
streams=[ streams=[
{'name': 'private_stream'} {'name': 'private_stream'}
@ -125,8 +122,7 @@ def test_authorization_errors_fatal(client, nonadmin_client):
'400_1') '400_1')
@openapi_test_function("/users/{email}/presence:get") @openapi_test_function("/users/{email}/presence:get")
def get_user_presence(client): def get_user_presence(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get presence information for "iago@zulip.com" # Get presence information for "iago@zulip.com"
@ -136,8 +132,7 @@ def get_user_presence(client):
validate_against_openapi_schema(result, '/users/{email}/presence', 'get', '200') validate_against_openapi_schema(result, '/users/{email}/presence', 'get', '200')
@openapi_test_function("/users/me/presence:post") @openapi_test_function("/users/me/presence:post")
def update_presence(client): def update_presence(client: Client) -> None:
# type: (Client) -> None
request = { request = {
'status': 'active', 'status': 'active',
'ping_only': False, 'ping_only': False,
@ -149,8 +144,7 @@ def update_presence(client):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/users:post") @openapi_test_function("/users:post")
def create_user(client): def create_user(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Create a user # Create a user
@ -171,8 +165,7 @@ def create_user(client):
validate_against_openapi_schema(result, '/users', 'post', '400') validate_against_openapi_schema(result, '/users', 'post', '400')
@openapi_test_function("/users:get") @openapi_test_function("/users:get")
def get_members(client): def get_members(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get all users in the realm # Get all users in the realm
@ -208,8 +201,7 @@ def get_members(client):
assert member.get('profile_data', None) is not None assert member.get('profile_data', None) is not None
@openapi_test_function("/users/{user_id}:get") @openapi_test_function("/users/{user_id}:get")
def get_single_user(client): def get_single_user(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Fetch details on a user given a user ID # Fetch details on a user given a user ID
@ -225,8 +217,7 @@ def get_single_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'get', '200') validate_against_openapi_schema(result, '/users/{user_id}', 'get', '200')
@openapi_test_function("/users/{user_id}:delete") @openapi_test_function("/users/{user_id}:delete")
def deactivate_user(client): def deactivate_user(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Deactivate a user # Deactivate a user
@ -240,8 +231,7 @@ def deactivate_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'delete', '200') validate_against_openapi_schema(result, '/users/{user_id}', 'delete', '200')
@openapi_test_function("/users/{user_id}:patch") @openapi_test_function("/users/{user_id}:patch")
def update_user(client): def update_user(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Change a user's full name. # Change a user's full name.
@ -270,8 +260,7 @@ def update_user(client):
validate_against_openapi_schema(result, '/users/{user_id}', 'patch', '400') validate_against_openapi_schema(result, '/users/{user_id}', 'patch', '400')
@openapi_test_function("/realm/filters:get") @openapi_test_function("/realm/filters:get")
def get_realm_filters(client): def get_realm_filters(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Fetch all the filters in this organization # Fetch all the filters in this organization
@ -281,8 +270,7 @@ def get_realm_filters(client):
validate_against_openapi_schema(result, '/realm/filters', 'get', '200') validate_against_openapi_schema(result, '/realm/filters', 'get', '200')
@openapi_test_function("/realm/filters:post") @openapi_test_function("/realm/filters:post")
def add_realm_filter(client): def add_realm_filter(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Add a filter to automatically linkify #<number> to the corresponding # Add a filter to automatically linkify #<number> to the corresponding
@ -294,8 +282,7 @@ def add_realm_filter(client):
validate_against_openapi_schema(result, '/realm/filters', 'post', '200') validate_against_openapi_schema(result, '/realm/filters', 'post', '200')
@openapi_test_function("/realm/filters/{filter_id}:delete") @openapi_test_function("/realm/filters/{filter_id}:delete")
def remove_realm_filter(client): def remove_realm_filter(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Remove the organization filter with ID 42 # Remove the organization filter with ID 42
@ -305,8 +292,7 @@ def remove_realm_filter(client):
validate_against_openapi_schema(result, '/realm/filters/{filter_id}', 'delete', '200') validate_against_openapi_schema(result, '/realm/filters/{filter_id}', 'delete', '200')
@openapi_test_function("/users/me:get") @openapi_test_function("/users/me:get")
def get_profile(client): def get_profile(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get the profile of the user/bot that requests this endpoint, # Get the profile of the user/bot that requests this endpoint,
@ -317,8 +303,7 @@ def get_profile(client):
validate_against_openapi_schema(result, '/users/me', 'get', '200') validate_against_openapi_schema(result, '/users/me', 'get', '200')
@openapi_test_function("/get_stream_id:get") @openapi_test_function("/get_stream_id:get")
def get_stream_id(client): def get_stream_id(client: Client) -> int:
# type: (Client) -> int
# {code_example|start} # {code_example|start}
# Get the ID of a given stream # Get the ID of a given stream
@ -331,8 +316,7 @@ def get_stream_id(client):
return result['stream_id'] return result['stream_id']
@openapi_test_function("/streams/{stream_id}:delete") @openapi_test_function("/streams/{stream_id}:delete")
def delete_stream(client, stream_id): def delete_stream(client: Client, stream_id: int) -> None:
# type: (Client, int) -> None
result = client.add_subscriptions( result = client.add_subscriptions(
streams=[ streams=[
{ {
@ -352,8 +336,7 @@ def delete_stream(client, stream_id):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/streams:get") @openapi_test_function("/streams:get")
def get_streams(client): def get_streams(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get all streams that the user has access to # Get all streams that the user has access to
@ -374,8 +357,7 @@ def get_streams(client):
assert len(result['streams']) == 4 assert len(result['streams']) == 4
@openapi_test_function("/streams/{stream_id}:patch") @openapi_test_function("/streams/{stream_id}:patch")
def update_stream(client, stream_id): def update_stream(client: Client, stream_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
# Update the stream by a given ID # Update the stream by a given ID
@ -392,8 +374,7 @@ def update_stream(client, stream_id):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/user_groups:get") @openapi_test_function("/user_groups:get")
def get_user_groups(client): def get_user_groups(client: Client) -> int:
# type: (Client) -> int
# {code_example|start} # {code_example|start}
# Get all user groups of the realm # Get all user groups of the realm
@ -409,27 +390,23 @@ def get_user_groups(client):
if u['name'] == "marketing"][0] if u['name'] == "marketing"][0]
return marketing_user_group['id'] return marketing_user_group['id']
def test_user_not_authorized_error(nonadmin_client): def test_user_not_authorized_error(nonadmin_client: Client) -> None:
# type: (Client) -> None
result = nonadmin_client.get_streams(include_all_active=True) result = nonadmin_client.get_streams(include_all_active=True)
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_2') validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_2')
def get_subscribers(client): def get_subscribers(client: Client) -> None:
# type: (Client) -> None
result = client.get_subscribers(stream='new stream') result = client.get_subscribers(stream='new stream')
assert result['subscribers'] == ['iago@zulip.com', 'newbie@zulip.com'] assert result['subscribers'] == ['iago@zulip.com', 'newbie@zulip.com']
def get_user_agent(client): def get_user_agent(client: Client) -> None:
# type: (Client) -> None
result = client.get_user_agent() result = client.get_user_agent()
assert result.startswith('ZulipPython/') assert result.startswith('ZulipPython/')
@openapi_test_function("/users/me/subscriptions:get") @openapi_test_function("/users/me/subscriptions:get")
def list_subscriptions(client): def list_subscriptions(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get all streams that the user is subscribed to # Get all streams that the user is subscribed to
result = client.list_subscriptions() result = client.list_subscriptions()
@ -442,8 +419,7 @@ def list_subscriptions(client):
assert streams[0]['description'] == 'New stream for testing' assert streams[0]['description'] == 'New stream for testing'
@openapi_test_function("/users/me/subscriptions:delete") @openapi_test_function("/users/me/subscriptions:delete")
def remove_subscriptions(client): def remove_subscriptions(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Unsubscribe from the stream "new stream" # Unsubscribe from the stream "new stream"
@ -473,8 +449,7 @@ def remove_subscriptions(client):
'delete', '200') 'delete', '200')
@openapi_test_function("/users/me/subscriptions/muted_topics:patch") @openapi_test_function("/users/me/subscriptions/muted_topics:patch")
def toggle_mute_topic(client): def toggle_mute_topic(client: Client) -> None:
# type: (Client) -> None
# Send a test message # Send a test message
message = { message = {
@ -518,8 +493,7 @@ def toggle_mute_topic(client):
'patch', '200') 'patch', '200')
@openapi_test_function("/mark_all_as_read:post") @openapi_test_function("/mark_all_as_read:post")
def mark_all_as_read(client): def mark_all_as_read(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Mark all of the user's unread messages as read # Mark all of the user's unread messages as read
@ -529,8 +503,7 @@ def mark_all_as_read(client):
validate_against_openapi_schema(result, '/mark_all_as_read', 'post', '200') validate_against_openapi_schema(result, '/mark_all_as_read', 'post', '200')
@openapi_test_function("/mark_stream_as_read:post") @openapi_test_function("/mark_stream_as_read:post")
def mark_stream_as_read(client): def mark_stream_as_read(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Mark the unread messages in stream with ID "1" as read # Mark the unread messages in stream with ID "1" as read
@ -540,8 +513,7 @@ def mark_stream_as_read(client):
validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200') validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200')
@openapi_test_function("/mark_topic_as_read:post") @openapi_test_function("/mark_topic_as_read:post")
def mark_topic_as_read(client): def mark_topic_as_read(client: Client) -> None:
# type: (Client) -> None
# Grab an existing topic name # Grab an existing topic name
topic_name = client.get_stream_topics(1)['topics'][0]['name'] topic_name = client.get_stream_topics(1)['topics'][0]['name']
@ -554,8 +526,7 @@ def mark_topic_as_read(client):
validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200') validate_against_openapi_schema(result, '/mark_stream_as_read', 'post', '200')
@openapi_test_function("/users/me/subscriptions/properties:post") @openapi_test_function("/users/me/subscriptions/properties:post")
def update_subscription_settings(client): def update_subscription_settings(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Update the user's subscription in stream #1 to pin it to the top of the # Update the user's subscription in stream #1 to pin it to the top of the
@ -577,8 +548,7 @@ def update_subscription_settings(client):
'POST', '200') 'POST', '200')
@openapi_test_function("/messages/render:post") @openapi_test_function("/messages/render:post")
def render_message(client): def render_message(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Render a message # Render a message
@ -591,8 +561,7 @@ def render_message(client):
validate_against_openapi_schema(result, '/messages/render', 'post', '200') validate_against_openapi_schema(result, '/messages/render', 'post', '200')
@openapi_test_function("/messages:get") @openapi_test_function("/messages:get")
def get_messages(client): def get_messages(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Get the 100 last messages sent by "iago@zulip.com" to the stream "Verona" # Get the 100 last messages sent by "iago@zulip.com" to the stream "Verona"
@ -610,8 +579,7 @@ def get_messages(client):
assert len(result['messages']) <= request['num_before'] assert len(result['messages']) <= request['num_before']
@openapi_test_function("/messages/{message_id}:get") @openapi_test_function("/messages/{message_id}:get")
def get_raw_message(client, message_id): def get_raw_message(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
assert int(message_id) assert int(message_id)
@ -624,8 +592,7 @@ def get_raw_message(client, message_id):
'200') '200')
@openapi_test_function("/messages:post") @openapi_test_function("/messages:post")
def send_message(client): def send_message(client: Client) -> int:
# type: (Client) -> int
request = {} # type: Dict[str, Any] request = {} # type: Dict[str, Any]
@ -680,8 +647,7 @@ def send_message(client):
return message_id return message_id
@openapi_test_function("/messages/{message_id}/reactions:post") @openapi_test_function("/messages/{message_id}/reactions:post")
def add_reaction(client, message_id): def add_reaction(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
# Add an emoji reaction # Add an emoji reaction
request = { request = {
@ -694,8 +660,7 @@ def add_reaction(client, message_id):
validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'post', '200') validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'post', '200')
@openapi_test_function("/messages/{message_id}/reactions:delete") @openapi_test_function("/messages/{message_id}/reactions:delete")
def remove_reaction(client, message_id): def remove_reaction(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
# Remove an emoji reaction # Remove an emoji reaction
request = { request = {
@ -707,8 +672,7 @@ def remove_reaction(client, message_id):
# {code_example|end} # {code_example|end}
validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'delete', '200') validate_against_openapi_schema(result, '/messages/{message_id}/reactions', 'delete', '200')
def test_nonexistent_stream_error(client): def test_nonexistent_stream_error(client: Client) -> None:
# type: (Client) -> None
request = { request = {
"type": "stream", "type": "stream",
"to": "nonexistent_stream", "to": "nonexistent_stream",
@ -720,8 +684,7 @@ def test_nonexistent_stream_error(client):
validate_against_openapi_schema(result, '/messages', 'post', validate_against_openapi_schema(result, '/messages', 'post',
'400_0') '400_0')
def test_private_message_invalid_recipient(client): def test_private_message_invalid_recipient(client: Client) -> None:
# type: (Client) -> None
request = { request = {
"type": "private", "type": "private",
"to": "eeshan@zulip.com", "to": "eeshan@zulip.com",
@ -733,8 +696,7 @@ def test_private_message_invalid_recipient(client):
'400_1') '400_1')
@openapi_test_function("/messages/{message_id}:patch") @openapi_test_function("/messages/{message_id}:patch")
def update_message(client, message_id): def update_message(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
assert int(message_id) assert int(message_id)
@ -761,8 +723,7 @@ def update_message(client, message_id):
assert result['result'] == 'success' assert result['result'] == 'success'
assert result['raw_content'] == request['content'] assert result['raw_content'] == request['content']
def test_update_message_edit_permission_error(client, nonadmin_client): def test_update_message_edit_permission_error(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
request = { request = {
"type": "stream", "type": "stream",
"to": "Denmark", "to": "Denmark",
@ -780,8 +741,7 @@ def test_update_message_edit_permission_error(client, nonadmin_client):
validate_against_openapi_schema(result, '/messages/{message_id}', 'patch', '400') validate_against_openapi_schema(result, '/messages/{message_id}', 'patch', '400')
@openapi_test_function("/messages/{message_id}:delete") @openapi_test_function("/messages/{message_id}:delete")
def delete_message(client, message_id): def delete_message(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
# Delete the message with ID "message_id" # Delete the message with ID "message_id"
@ -791,8 +751,7 @@ def delete_message(client, message_id):
validate_against_openapi_schema(result, '/messages/{message_id}', 'delete', validate_against_openapi_schema(result, '/messages/{message_id}', 'delete',
'200') '200')
def test_delete_message_edit_permission_error(client, nonadmin_client): def test_delete_message_edit_permission_error(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
request = { request = {
"type": "stream", "type": "stream",
"to": "Denmark", "to": "Denmark",
@ -807,8 +766,7 @@ def test_delete_message_edit_permission_error(client, nonadmin_client):
'400_1') '400_1')
@openapi_test_function("/messages/{message_id}/history:get") @openapi_test_function("/messages/{message_id}/history:get")
def get_message_history(client, message_id): def get_message_history(client: Client, message_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
# Get the edit history for message with ID "message_id" # Get the edit history for message with ID "message_id"
@ -819,8 +777,7 @@ def get_message_history(client, message_id):
'get', '200') 'get', '200')
@openapi_test_function("/realm/emoji:get") @openapi_test_function("/realm/emoji:get")
def get_realm_emoji(client): def get_realm_emoji(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
result = client.get_realm_emoji() result = client.get_realm_emoji()
@ -829,8 +786,7 @@ def get_realm_emoji(client):
validate_against_openapi_schema(result, '/realm/emoji', 'GET', '200') validate_against_openapi_schema(result, '/realm/emoji', 'GET', '200')
@openapi_test_function("/messages/flags:post") @openapi_test_function("/messages/flags:post")
def update_message_flags(client): def update_message_flags(client: Client) -> None:
# type: (Client) -> None
# Send a few test messages # Send a few test messages
request = { request = {
@ -870,8 +826,7 @@ def update_message_flags(client):
'200') '200')
@openapi_test_function("/register:post") @openapi_test_function("/register:post")
def register_queue(client): def register_queue(client: Client) -> str:
# type: (Client) -> str
# {code_example|start} # {code_example|start}
# Register the queue # Register the queue
@ -884,8 +839,7 @@ def register_queue(client):
return result['queue_id'] return result['queue_id']
@openapi_test_function("/events:delete") @openapi_test_function("/events:delete")
def deregister_queue(client, queue_id): def deregister_queue(client: Client, queue_id: str) -> None:
# type: (Client, str) -> None
# {code_example|start} # {code_example|start}
# Delete a queue (queue_id is the ID of the queue # Delete a queue (queue_id is the ID of the queue
@ -900,8 +854,7 @@ def deregister_queue(client, queue_id):
validate_against_openapi_schema(result, '/events', 'delete', '400') validate_against_openapi_schema(result, '/events', 'delete', '400')
@openapi_test_function("/server_settings:get") @openapi_test_function("/server_settings:get")
def get_server_settings(client): def get_server_settings(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Fetch the settings for this server # Fetch the settings for this server
@ -911,8 +864,7 @@ def get_server_settings(client):
validate_against_openapi_schema(result, '/server_settings', 'get', '200') validate_against_openapi_schema(result, '/server_settings', 'get', '200')
@openapi_test_function("/settings/notifications:patch") @openapi_test_function("/settings/notifications:patch")
def update_notification_settings(client): def update_notification_settings(client: Client) -> None:
# type: (Client) -> None
# {code_example|start} # {code_example|start}
# Enable push notifications even when online # Enable push notifications even when online
@ -926,8 +878,7 @@ def update_notification_settings(client):
validate_against_openapi_schema(result, '/settings/notifications', 'patch', '200') validate_against_openapi_schema(result, '/settings/notifications', 'patch', '200')
@openapi_test_function("/user_uploads:post") @openapi_test_function("/user_uploads:post")
def upload_file(client): def upload_file(client: Client) -> None:
# type: (Client) -> None
path_to_file = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg') path_to_file = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg')
# {code_example|start} # {code_example|start}
@ -950,8 +901,7 @@ def upload_file(client):
validate_against_openapi_schema(result, '/user_uploads', 'post', '200') validate_against_openapi_schema(result, '/user_uploads', 'post', '200')
@openapi_test_function("/users/me/{stream_id}/topics:get") @openapi_test_function("/users/me/{stream_id}/topics:get")
def get_stream_topics(client, stream_id): def get_stream_topics(client: Client, stream_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
result = client.get_stream_topics(stream_id) result = client.get_stream_topics(stream_id)
@ -961,8 +911,7 @@ def get_stream_topics(client, stream_id):
'get', '200') 'get', '200')
@openapi_test_function("/typing:post") @openapi_test_function("/typing:post")
def set_typing_status(client): def set_typing_status(client: Client) -> None:
# type: (Client) -> None
ensure_users([9, 10], ['hamlet', 'iago']) ensure_users([9, 10], ['hamlet', 'iago'])
# {code_example|start} # {code_example|start}
@ -994,8 +943,7 @@ def set_typing_status(client):
validate_against_openapi_schema(result, '/typing', 'post', '200') validate_against_openapi_schema(result, '/typing', 'post', '200')
@openapi_test_function("/realm/emoji/{emoji_name}:post") @openapi_test_function("/realm/emoji/{emoji_name}:post")
def upload_custom_emoji(client): def upload_custom_emoji(client: Client) -> None:
# type: (Client) -> None
emoji_path = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg') emoji_path = os.path.join(ZULIP_DIR, 'zerver', 'tests', 'images', 'img.jpg')
# {code_example|start} # {code_example|start}
@ -1014,15 +962,13 @@ def upload_custom_emoji(client):
'post', '200') 'post', '200')
@openapi_test_function("/users/me/alert_words:get") @openapi_test_function("/users/me/alert_words:get")
def get_alert_words(client): def get_alert_words(client: Client) -> None:
# type: (Client) -> None
result = client.get_alert_words() result = client.get_alert_words()
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/users/me/alert_words:post") @openapi_test_function("/users/me/alert_words:post")
def add_alert_words(client): def add_alert_words(client: Client) -> None:
# type: (Client) -> None
word = ['foo', 'bar'] word = ['foo', 'bar']
result = client.add_alert_words(word) result = client.add_alert_words(word)
@ -1030,8 +976,7 @@ def add_alert_words(client):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/users/me/alert_words:delete") @openapi_test_function("/users/me/alert_words:delete")
def remove_alert_words(client): def remove_alert_words(client: Client) -> None:
# type: (Client) -> None
word = ['foo'] word = ['foo']
result = client.remove_alert_words(word) result = client.remove_alert_words(word)
@ -1039,8 +984,7 @@ def remove_alert_words(client):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/user_groups/create:post") @openapi_test_function("/user_groups/create:post")
def create_user_group(client): def create_user_group(client: Client) -> None:
# type: (Client) -> None
ensure_users([6, 7, 8, 9], ['aaron', 'zoe', 'cordelia', 'hamlet']) ensure_users([6, 7, 8, 9], ['aaron', 'zoe', 'cordelia', 'hamlet'])
# {code_example|start} # {code_example|start}
@ -1057,8 +1001,7 @@ def create_user_group(client):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}:patch") @openapi_test_function("/user_groups/{group_id}:patch")
def update_user_group(client, group_id): def update_user_group(client: Client, group_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
request = { request = {
'group_id': group_id, 'group_id': group_id,
@ -1071,8 +1014,7 @@ def update_user_group(client, group_id):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}:delete") @openapi_test_function("/user_groups/{group_id}:delete")
def remove_user_group(client, group_id): def remove_user_group(client: Client, group_id: int) -> None:
# type: (Client, int) -> None
# {code_example|start} # {code_example|start}
result = client.remove_user_group(group_id) result = client.remove_user_group(group_id)
# {code_example|end} # {code_example|end}
@ -1081,8 +1023,7 @@ def remove_user_group(client, group_id):
assert result['result'] == 'success' assert result['result'] == 'success'
@openapi_test_function("/user_groups/{group_id}/members:post") @openapi_test_function("/user_groups/{group_id}/members:post")
def update_user_group_members(client, group_id): def update_user_group_members(client: Client, group_id: int) -> None:
# type: (Client, int) -> None
ensure_users([8, 9, 10], ['cordelia', 'hamlet', 'iago']) ensure_users([8, 9, 10], ['cordelia', 'hamlet', 'iago'])
request = { request = {
@ -1095,28 +1036,24 @@ def update_user_group_members(client, group_id):
assert result['result'] == 'success' assert result['result'] == 'success'
def test_invalid_api_key(client_with_invalid_key): def test_invalid_api_key(client_with_invalid_key: Client) -> None:
# type: (Client) -> None
result = client_with_invalid_key.list_subscriptions() result = client_with_invalid_key.list_subscriptions()
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_0') validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_0')
def test_missing_request_argument(client): def test_missing_request_argument(client: Client) -> None:
# type: (Client) -> None
result = client.render_message({}) result = client.render_message({})
validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_1') validate_against_openapi_schema(result, '/rest-error-handling', 'post', '400_1')
def test_invalid_stream_error(client): def test_invalid_stream_error(client: Client) -> None:
# type: (Client) -> None
result = client.get_stream_id('nonexistent') result = client.get_stream_id('nonexistent')
validate_against_openapi_schema(result, '/get_stream_id', 'get', '400') validate_against_openapi_schema(result, '/get_stream_id', 'get', '400')
# SETUP METHODS FOLLOW # SETUP METHODS FOLLOW
def test_against_fixture(result, fixture, check_if_equal=[], check_if_exists=[]): def test_against_fixture(result: Dict[str, Any], fixture: Dict[str, Any], check_if_equal: Optional[Iterable[str]] = [], check_if_exists: Optional[Iterable[str]] = []) -> None:
# type: (Dict[str, Any], Dict[str, Any], Optional[Iterable[str]], Optional[Iterable[str]]) -> None
assertLength(result, fixture) assertLength(result, fixture)
if not check_if_equal and not check_if_exists: if not check_if_equal and not check_if_exists:
@ -1131,8 +1068,7 @@ def test_against_fixture(result, fixture, check_if_equal=[], check_if_exists=[])
for key in check_if_exists: for key in check_if_exists:
assertIn(key, result) assertIn(key, result)
def assertEqual(key, result, fixture): def assertEqual(key: str, result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
# type: (str, Dict[str, Any], Dict[str, Any]) -> None
if result[key] != fixture[key]: if result[key] != fixture[key]:
first = "{key} = {value}".format(key=key, value=result[key]) first = "{key} = {value}".format(key=key, value=result[key])
second = "{key} = {value}".format(key=key, value=fixture[key]) second = "{key} = {value}".format(key=key, value=fixture[key])
@ -1141,8 +1077,7 @@ def assertEqual(key, result, fixture):
else: else:
assert result[key] == fixture[key] assert result[key] == fixture[key]
def assertLength(result, fixture): def assertLength(result: Dict[str, Any], fixture: Dict[str, Any]) -> None:
# type: (Dict[str, Any], Dict[str, Any]) -> None
if len(result) != len(fixture): if len(result) != len(fixture):
result_string = json.dumps(result, indent=4, sort_keys=True) result_string = json.dumps(result, indent=4, sort_keys=True)
fixture_string = json.dumps(fixture, indent=4, sort_keys=True) fixture_string = json.dumps(fixture, indent=4, sort_keys=True)
@ -1151,8 +1086,7 @@ def assertLength(result, fixture):
else: else:
assert len(result) == len(fixture) assert len(result) == len(fixture)
def assertIn(key, result): def assertIn(key: str, result: Dict[str, Any]) -> None:
# type: (str, Dict[str, Any]) -> None
if key not in result.keys(): if key not in result.keys():
raise AssertionError( raise AssertionError(
"The actual output does not contain the the key `{key}`.".format(key=key) "The actual output does not contain the the key `{key}`.".format(key=key)
@ -1160,8 +1094,7 @@ def assertIn(key, result):
else: else:
assert key in result assert key in result
def test_messages(client, nonadmin_client): def test_messages(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
render_message(client) render_message(client)
message_id = send_message(client) message_id = send_message(client)
@ -1182,8 +1115,7 @@ def test_messages(client, nonadmin_client):
test_update_message_edit_permission_error(client, nonadmin_client) test_update_message_edit_permission_error(client, nonadmin_client)
test_delete_message_edit_permission_error(client, nonadmin_client) test_delete_message_edit_permission_error(client, nonadmin_client)
def test_users(client): def test_users(client: Client) -> None:
# type: (Client) -> None
create_user(client) create_user(client)
get_members(client) get_members(client)
@ -1205,8 +1137,7 @@ def test_users(client):
add_alert_words(client) add_alert_words(client)
remove_alert_words(client) remove_alert_words(client)
def test_streams(client, nonadmin_client): def test_streams(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
add_subscriptions(client) add_subscriptions(client)
test_add_subscriptions_already_subscribed(client) test_add_subscriptions_already_subscribed(client)
@ -1226,8 +1157,7 @@ def test_streams(client, nonadmin_client):
test_authorization_errors_fatal(client, nonadmin_client) test_authorization_errors_fatal(client, nonadmin_client)
def test_queues(client): def test_queues(client: Client) -> None:
# type: (Client) -> None
# Note that the example for api/get-events-from-queue is not tested. # Note that the example for api/get-events-from-queue is not tested.
# Since, methods such as client.get_events() or client.call_on_each_message # Since, methods such as client.get_events() or client.call_on_each_message
# are blocking calls and since the event queue backend is already # are blocking calls and since the event queue backend is already
@ -1236,8 +1166,7 @@ def test_queues(client):
queue_id = register_queue(client) queue_id = register_queue(client)
deregister_queue(client, queue_id) deregister_queue(client, queue_id)
def test_server_organizations(client): def test_server_organizations(client: Client) -> None:
# type: (Client) -> None
get_realm_filters(client) get_realm_filters(client)
add_realm_filter(client) add_realm_filter(client)
@ -1246,13 +1175,11 @@ def test_server_organizations(client):
get_realm_emoji(client) get_realm_emoji(client)
upload_custom_emoji(client) upload_custom_emoji(client)
def test_errors(client): def test_errors(client: Client) -> None:
# type: (Client) -> None
test_missing_request_argument(client) test_missing_request_argument(client)
test_invalid_stream_error(client) test_invalid_stream_error(client)
def test_the_api(client, nonadmin_client): def test_the_api(client: Client, nonadmin_client: Client) -> None:
# type: (Client, Client) -> None
get_user_agent(client) get_user_agent(client)
test_users(client) test_users(client)

View File

@ -2610,8 +2610,7 @@ class TestTwoFactor(ZulipTestCase):
self.assertIn('otp_device_id', self.client.session.keys()) self.assertIn('otp_device_id', self.client.session.keys())
@mock.patch('two_factor.models.totp') @mock.patch('two_factor.models.totp')
def test_two_factor_login_with_ldap(self, mock_totp): def test_two_factor_login_with_ldap(self, mock_totp: mock.MagicMock) -> None:
# type: (mock.MagicMock) -> None
token = 123456 token = 123456
email = self.example_email('hamlet') email = self.example_email('hamlet')
password = self.ldap_password('hamlet') password = self.ldap_password('hamlet')
@ -2621,8 +2620,7 @@ class TestTwoFactor(ZulipTestCase):
user_profile.save() user_profile.save()
self.create_default_device(user_profile) self.create_default_device(user_profile)
def totp(*args, **kwargs): def totp(*args: Any, **kwargs: Any) -> int:
# type: (*Any, **Any) -> int
return token return token
mock_totp.side_effect = totp mock_totp.side_effect = totp

View File

@ -1400,8 +1400,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin):
config_data = ujson.loads(result.content)['config_data'] config_data = ujson.loads(result.content)['config_data']
self.assertEqual(config_data, ujson.loads(bot_info['config_data'])) self.assertEqual(config_data, ujson.loads(bot_info['config_data']))
def test_outgoing_webhook_invalid_interface(self): def test_outgoing_webhook_invalid_interface(self) -> None:
# type: () -> None
self.login('hamlet') self.login('hamlet')
bot_info = { bot_info = {
'full_name': 'Outgoing Webhook test bot', 'full_name': 'Outgoing Webhook test bot',

View File

@ -2312,8 +2312,7 @@ class EventsRegisterTest(ZulipTestCase):
error = change_bot_owner_checker_user('events[1]', events[1]) error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error) self.assert_on_error(error)
def test_do_update_outgoing_webhook_service(self): def test_do_update_outgoing_webhook_service(self) -> None:
# type: () -> None
update_outgoing_webhook_service_checker = self.check_events_dict([ update_outgoing_webhook_service_checker = self.check_events_dict([
('type', equals('realm_bot')), ('type', equals('realm_bot')),
('op', equals('update')), ('op', equals('update')),

View File

@ -3133,10 +3133,9 @@ class EditMessageTest(ZulipTestCase):
do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True) do_edit_message_assert_error(id_, 'G', "Your organization has turned off message editing", True)
def test_allow_community_topic_editing(self) -> None: def test_allow_community_topic_editing(self) -> None:
def set_message_editing_params(allow_message_editing, def set_message_editing_params(allow_message_editing: bool,
message_content_edit_limit_seconds, message_content_edit_limit_seconds: int,
allow_community_topic_editing): allow_community_topic_editing: bool) -> None:
# type: (bool, int, bool) -> None
result = self.client_patch("/json/realm", { result = self.client_patch("/json/realm", {
'allow_message_editing': ujson.dumps(allow_message_editing), 'allow_message_editing': ujson.dumps(allow_message_editing),
'message_content_edit_limit_seconds': message_content_edit_limit_seconds, 'message_content_edit_limit_seconds': message_content_edit_limit_seconds,
@ -3144,16 +3143,14 @@ class EditMessageTest(ZulipTestCase):
}) })
self.assert_json_success(result) self.assert_json_success(result)
def do_edit_message_assert_success(id_, unique_str): def do_edit_message_assert_success(id_: int, unique_str: str) -> None:
# type: (int, str) -> None
new_topic = 'topic' + unique_str new_topic = 'topic' + unique_str
params_dict = {'message_id': id_, 'topic': new_topic} params_dict = {'message_id': id_, 'topic': new_topic}
result = self.client_patch("/json/messages/" + str(id_), params_dict) result = self.client_patch("/json/messages/" + str(id_), params_dict)
self.assert_json_success(result) self.assert_json_success(result)
self.check_topic(id_, topic_name=new_topic) self.check_topic(id_, topic_name=new_topic)
def do_edit_message_assert_error(id_, unique_str, error): def do_edit_message_assert_error(id_: int, unique_str: str, error: str) -> None:
# type: (int, str, str) -> None
message = Message.objects.get(id=id_) message = Message.objects.get(id=id_)
old_topic = message.topic_name() old_topic = message.topic_name()
old_content = message.content old_content = message.content

View File

@ -1220,8 +1220,7 @@ class TestGetAPNsPayload(PushNotificationTest):
self.assertDictEqual(payload, expected) self.assertDictEqual(payload, expected)
mock_push_notifications.assert_called() mock_push_notifications.assert_called()
def test_get_message_payload_apns_stream_message(self): def test_get_message_payload_apns_stream_message(self) -> None:
# type: () -> None
stream = Stream.objects.filter(name='Verona').get() stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id) message = self.get_message(Recipient.STREAM, stream.id)
message.trigger = 'push_stream_notify' message.trigger = 'push_stream_notify'
@ -1252,8 +1251,7 @@ class TestGetAPNsPayload(PushNotificationTest):
} }
self.assertDictEqual(payload, expected) self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_mention(self): def test_get_message_payload_apns_stream_mention(self) -> None:
# type: () -> None
user_profile = self.example_user("othello") user_profile = self.example_user("othello")
stream = Stream.objects.filter(name='Verona').get() stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id) message = self.get_message(Recipient.STREAM, stream.id)
@ -1285,8 +1283,7 @@ class TestGetAPNsPayload(PushNotificationTest):
} }
self.assertDictEqual(payload, expected) self.assertDictEqual(payload, expected)
def test_get_message_payload_apns_stream_wildcard_mention(self): def test_get_message_payload_apns_stream_wildcard_mention(self) -> None:
# type: () -> None
user_profile = self.example_user("othello") user_profile = self.example_user("othello")
stream = Stream.objects.filter(name='Verona').get() stream = Stream.objects.filter(name='Verona').get()
message = self.get_message(Recipient.STREAM, stream.id) message = self.get_message(Recipient.STREAM, stream.id)

View File

@ -252,8 +252,7 @@ class TestServiceBotStateHandler(ZulipTestCase):
self.assertTrue(storage.contains('another key')) self.assertTrue(storage.contains('another key'))
self.assertRaises(StateError, lambda: storage.remove('some key')) self.assertRaises(StateError, lambda: storage.remove('some key'))
def test_internal_endpoint(self): def test_internal_endpoint(self) -> None:
# type: () -> None
self.login_user(self.user_profile) self.login_user(self.user_profile)
# Store some data. # Store some data.

View File

@ -3803,8 +3803,7 @@ class NoReplyEmailTest(ZulipTestCase):
class TwoFactorAuthTest(ZulipTestCase): class TwoFactorAuthTest(ZulipTestCase):
@patch('two_factor.models.totp') @patch('two_factor.models.totp')
def test_two_factor_login(self, mock_totp): def test_two_factor_login(self, mock_totp: MagicMock) -> None:
# type: (MagicMock) -> None
token = 123456 token = 123456
email = self.example_email('hamlet') email = self.example_email('hamlet')
password = self.ldap_password('hamlet') password = self.ldap_password('hamlet')
@ -3814,8 +3813,7 @@ class TwoFactorAuthTest(ZulipTestCase):
user_profile.save() user_profile.save()
self.create_default_device(user_profile) self.create_default_device(user_profile)
def totp(*args, **kwargs): def totp(*args: Any, **kwargs: Any) -> int:
# type: (*Any, **Any) -> int
return token return token
mock_totp.side_effect = totp mock_totp.side_effect = totp

View File

@ -221,8 +221,11 @@ def update_subscriptions_backend(
] # type: List[FuncKwargPair] ] # type: List[FuncKwargPair]
return compose_views(request, user_profile, method_kwarg_pairs) return compose_views(request, user_profile, method_kwarg_pairs)
def compose_views(request, user_profile, method_kwarg_pairs): def compose_views(
# type: (HttpRequest, UserProfile, List[FuncKwargPair]) -> HttpResponse request: HttpRequest,
user_profile: UserProfile,
method_kwarg_pairs: "List[FuncKwargPair]",
) -> HttpResponse:
''' '''
This takes a series of view methods from method_kwarg_pairs and calls This takes a series of view methods from method_kwarg_pairs and calls
them in sequence, and it smushes all the json results into a single them in sequence, and it smushes all the json results into a single