2020-09-15 00:24:01 +02:00
|
|
|
# Zulip's OpenAPI-based API documentation system is documented at
|
|
|
|
# https://zulip.readthedocs.io/en/latest/documentation/api.html
|
|
|
|
#
|
|
|
|
# This file defines the special Markdown extension that is used to
|
|
|
|
# render the code examples, example responses, etc. that appear in
|
|
|
|
# Zulip's public API documentation.
|
|
|
|
|
2018-01-26 22:08:42 +01:00
|
|
|
import inspect
|
2020-06-11 00:54:34 +02:00
|
|
|
import json
|
|
|
|
import re
|
|
|
|
from typing import Any, Dict, List, Optional, Pattern, Tuple
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import markdown
|
2019-10-21 12:43:00 +02:00
|
|
|
from django.conf import settings
|
2018-01-26 22:08:42 +01:00
|
|
|
from markdown.extensions import Extension
|
|
|
|
from markdown.preprocessors import Preprocessor
|
|
|
|
|
2019-08-04 18:14:48 +02:00
|
|
|
import zerver.openapi.python_examples
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.openapi.openapi import get_openapi_description, get_openapi_fixture, openapi_spec
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
MACRO_REGEXP = re.compile(
|
|
|
|
r'\{generate_code_example(\(\s*(.+?)\s*\))*\|\s*(.+?)\s*\|\s*(.+?)\s*(\(\s*(.+)\s*\))?\}')
|
2020-04-21 20:57:11 +02:00
|
|
|
PYTHON_EXAMPLE_REGEX = re.compile(r'\# \{code_example\|\s*(.+?)\s*\}')
|
2020-05-19 18:40:35 +02:00
|
|
|
JS_EXAMPLE_REGEX = re.compile(r'\/\/ \{code_example\|\s*(.+?)\s*\}')
|
2020-05-06 00:44:08 +02:00
|
|
|
MACRO_REGEXP_DESC = re.compile(r'\{generate_api_description(\(\s*(.+?)\s*\))}')
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2018-02-16 04:09:21 +01:00
|
|
|
PYTHON_CLIENT_CONFIG = """
|
2018-01-26 22:08:42 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import zulip
|
|
|
|
|
2018-10-16 21:23:23 +02:00
|
|
|
# Pass the path to your zuliprc file here.
|
|
|
|
client = zulip.Client(config_file="~/zuliprc")
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2018-01-31 05:34:53 +01:00
|
|
|
PYTHON_CLIENT_ADMIN_CONFIG = """
|
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import zulip
|
|
|
|
|
2018-10-16 21:23:23 +02:00
|
|
|
# The user for this zuliprc file must be an organization administrator
|
2018-01-31 05:34:53 +01:00
|
|
|
client = zulip.Client(config_file="~/zuliprc-admin")
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
JS_CLIENT_CONFIG = """
|
|
|
|
const Zulip = require('zulip-js');
|
|
|
|
|
|
|
|
// Pass the path to your zuliprc file here.
|
|
|
|
const config = { zuliprc: 'zuliprc' };
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
JS_CLIENT_ADMIN_CONFIG = """
|
|
|
|
const Zulip = require('zulip-js');
|
|
|
|
|
|
|
|
// The user for this zuliprc file must be an organization administrator.
|
|
|
|
const config = { zuliprc: 'zuliprc-admin' };
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
DEFAULT_AUTH_EMAIL = "BOT_EMAIL_ADDRESS"
|
|
|
|
DEFAULT_AUTH_API_KEY = "BOT_API_KEY"
|
|
|
|
DEFAULT_EXAMPLE = {
|
|
|
|
"integer": 1,
|
|
|
|
"string": "demo",
|
|
|
|
"boolean": False,
|
|
|
|
}
|
|
|
|
|
2019-08-04 08:14:08 +02:00
|
|
|
def parse_language_and_options(input_str: Optional[str]) -> Tuple[str, Dict[str, Any]]:
|
|
|
|
if not input_str:
|
|
|
|
return ("", {})
|
|
|
|
language_and_options = re.match(r"(?P<language>\w+)(,\s*(?P<options>[\"\'\w\d\[\],= ]+))?", input_str)
|
|
|
|
assert(language_and_options is not None)
|
|
|
|
kwargs_pattern = re.compile(r"(?P<key>\w+)\s*=\s*(?P<value>[\'\"\w\d]+|\[[\'\",\w\d ]+\])")
|
|
|
|
language = language_and_options.group("language")
|
|
|
|
assert(language is not None)
|
|
|
|
if language_and_options.group("options"):
|
|
|
|
_options = kwargs_pattern.finditer(language_and_options.group("options"))
|
|
|
|
options = {}
|
|
|
|
for m in _options:
|
|
|
|
options[m.group("key")] = json.loads(m.group("value").replace("'", '"'))
|
|
|
|
return (language, options)
|
|
|
|
return (language, {})
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
def extract_code_example(source: List[str], snippet: List[Any],
|
|
|
|
example_regex: Pattern[str]) -> List[Any]:
|
2018-02-16 04:09:21 +01:00
|
|
|
start = -1
|
|
|
|
end = -1
|
|
|
|
for line in source:
|
2020-04-21 20:57:11 +02:00
|
|
|
match = example_regex.search(line)
|
2018-02-16 04:09:21 +01:00
|
|
|
if match:
|
|
|
|
if match.group(1) == 'start':
|
|
|
|
start = source.index(line)
|
|
|
|
elif match.group(1) == 'end':
|
|
|
|
end = source.index(line)
|
|
|
|
break
|
|
|
|
|
|
|
|
if (start == -1 and end == -1):
|
|
|
|
return snippet
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
snippet.append(source[start + 1: end])
|
2018-02-16 04:09:21 +01:00
|
|
|
source = source[end + 1:]
|
2020-04-21 20:57:11 +02:00
|
|
|
return extract_code_example(source, snippet, example_regex)
|
2018-02-16 04:09:21 +01:00
|
|
|
|
2020-06-13 01:57:21 +02:00
|
|
|
def render_python_code_example(function: str, admin_config: bool=False,
|
2019-08-16 21:17:01 +02:00
|
|
|
**kwargs: Any) -> List[str]:
|
2019-08-04 18:14:48 +02:00
|
|
|
method = zerver.openapi.python_examples.TEST_FUNCTIONS[function]
|
2018-02-16 04:09:21 +01:00
|
|
|
function_source_lines = inspect.getsourcelines(method)[0]
|
|
|
|
|
|
|
|
if admin_config:
|
|
|
|
config = PYTHON_CLIENT_ADMIN_CONFIG.splitlines()
|
|
|
|
else:
|
|
|
|
config = PYTHON_CLIENT_CONFIG.splitlines()
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
snippets = extract_code_example(function_source_lines, [], PYTHON_EXAMPLE_REGEX)
|
2018-02-16 04:09:21 +01:00
|
|
|
|
|
|
|
code_example = []
|
|
|
|
code_example.append('```python')
|
|
|
|
code_example.extend(config)
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
for snippet in snippets:
|
|
|
|
for line in snippet:
|
|
|
|
# Remove one level of indentation and strip newlines
|
|
|
|
code_example.append(line[4:].rstrip())
|
2018-02-16 04:09:21 +01:00
|
|
|
|
2020-04-30 02:15:51 +02:00
|
|
|
code_example.append('print(result)')
|
2020-04-21 20:57:11 +02:00
|
|
|
code_example.append('\n')
|
2018-02-16 04:09:21 +01:00
|
|
|
code_example.append('```')
|
|
|
|
|
|
|
|
return code_example
|
|
|
|
|
2020-06-13 01:57:21 +02:00
|
|
|
def render_javascript_code_example(function: str, admin_config: bool=False,
|
2020-05-17 12:04:53 +02:00
|
|
|
**kwargs: Any) -> List[str]:
|
2020-05-19 18:40:35 +02:00
|
|
|
function_source_lines = []
|
|
|
|
with open('zerver/openapi/javascript_examples.js') as f:
|
|
|
|
parsing = False
|
|
|
|
for line in f:
|
|
|
|
if line.startswith("}"):
|
|
|
|
parsing = False
|
|
|
|
if parsing:
|
|
|
|
function_source_lines.append(line.rstrip())
|
|
|
|
if line.startswith("add_example(") and function in line:
|
|
|
|
parsing = True
|
|
|
|
|
|
|
|
snippets = extract_code_example(function_source_lines, [], JS_EXAMPLE_REGEX)
|
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
if admin_config:
|
|
|
|
config = JS_CLIENT_ADMIN_CONFIG.splitlines()
|
|
|
|
else:
|
|
|
|
config = JS_CLIENT_CONFIG.splitlines()
|
|
|
|
|
|
|
|
code_example = []
|
|
|
|
code_example.append('```js')
|
|
|
|
code_example.extend(config)
|
2020-05-19 18:40:35 +02:00
|
|
|
for snippet in snippets:
|
|
|
|
code_example.append("Zulip(config).then(async (client) => {")
|
|
|
|
for line in snippet:
|
|
|
|
result = re.search('const result.*=(.*);', line)
|
|
|
|
if result:
|
2020-06-09 00:25:09 +02:00
|
|
|
line = f" return{result.group(1)};"
|
2020-05-19 18:40:35 +02:00
|
|
|
# Strip newlines
|
|
|
|
code_example.append(line.rstrip())
|
|
|
|
code_example.append("}).then(console.log).catch(console.err);")
|
|
|
|
code_example.append(" ")
|
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
code_example.append('```')
|
2020-05-19 18:40:35 +02:00
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
return code_example
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
def curl_method_arguments(endpoint: str, method: str,
|
|
|
|
api_url: str) -> List[str]:
|
2019-08-07 10:55:41 +02:00
|
|
|
# We also include the -sS verbosity arguments here.
|
2019-07-29 15:46:48 +02:00
|
|
|
method = method.upper()
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{api_url}/v1{endpoint}"
|
2019-07-29 15:46:48 +02:00
|
|
|
valid_methods = ["GET", "POST", "DELETE", "PUT", "PATCH", "OPTIONS"]
|
2019-08-07 10:55:41 +02:00
|
|
|
if method == "GET":
|
2019-07-29 15:46:48 +02:00
|
|
|
# Then we need to make sure that each -d option translates to becoming
|
|
|
|
# a GET parameter (in the URL) and not a POST parameter (in the body).
|
|
|
|
# TODO: remove the -X part by updating the linting rule. It's redundant.
|
2019-08-07 10:55:41 +02:00
|
|
|
return ["-sSX", "GET", "-G", url]
|
2019-07-29 15:46:48 +02:00
|
|
|
elif method in valid_methods:
|
2019-08-07 10:55:41 +02:00
|
|
|
return ["-sSX", method, url]
|
2019-07-29 15:46:48 +02:00
|
|
|
else:
|
2020-06-10 06:40:53 +02:00
|
|
|
msg = f"The request method {method} is not one of {valid_methods}"
|
2019-07-29 15:46:48 +02:00
|
|
|
raise ValueError(msg)
|
|
|
|
|
2019-10-03 15:21:33 +02:00
|
|
|
def get_openapi_param_example_value_as_string(endpoint: str, method: str, param: Dict[str, Any],
|
|
|
|
curl_argument: bool=False) -> str:
|
2020-05-11 16:26:33 +02:00
|
|
|
jsonify = False
|
|
|
|
param_name = param["name"]
|
|
|
|
if "content" in param:
|
|
|
|
param = param["content"]["application/json"]
|
|
|
|
jsonify = True
|
2020-01-28 07:28:22 +01:00
|
|
|
if "type" in param["schema"]:
|
|
|
|
param_type = param["schema"]["type"]
|
|
|
|
else:
|
|
|
|
# Hack: Ideally, we'd extract a common function for handling
|
|
|
|
# oneOf values in types and do something with the resulting
|
|
|
|
# union type. But for this logic's purpose, it's good enough
|
|
|
|
# to just check the first parameter.
|
|
|
|
param_type = param["schema"]["oneOf"][0]["type"]
|
2019-10-03 15:21:33 +02:00
|
|
|
if param_type in ["object", "array"]:
|
|
|
|
example_value = param.get("example", None)
|
|
|
|
if not example_value:
|
2020-06-13 08:57:35 +02:00
|
|
|
msg = f"""All array and object type request parameters must have
|
|
|
|
concrete examples. The openAPI documentation for {endpoint}/{method} is missing an example
|
|
|
|
value for the {param_name} parameter. Without this we cannot automatically generate a
|
|
|
|
cURL example."""
|
2019-10-03 15:21:33 +02:00
|
|
|
raise ValueError(msg)
|
|
|
|
ordered_ex_val_str = json.dumps(example_value, sort_keys=True)
|
2020-06-27 19:23:50 +02:00
|
|
|
# We currently don't have any non-JSON encoded arrays.
|
|
|
|
assert(jsonify)
|
2019-10-03 15:21:33 +02:00
|
|
|
if curl_argument:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f" --data-urlencode {param_name}='{ordered_ex_val_str}'"
|
2019-10-03 15:21:33 +02:00
|
|
|
return ordered_ex_val_str # nocoverage
|
|
|
|
else:
|
2020-01-28 07:28:22 +01:00
|
|
|
example_value = param.get("example", DEFAULT_EXAMPLE[param_type])
|
2020-04-22 01:59:09 +02:00
|
|
|
if isinstance(example_value, bool):
|
2019-10-03 15:21:33 +02:00
|
|
|
example_value = str(example_value).lower()
|
2020-05-11 16:26:33 +02:00
|
|
|
if jsonify:
|
2019-10-18 08:47:27 +02:00
|
|
|
example_value = json.dumps(example_value)
|
2019-10-03 15:21:33 +02:00
|
|
|
if curl_argument:
|
2020-06-09 00:25:09 +02:00
|
|
|
return f" -d '{param_name}={example_value}'"
|
2019-10-03 15:21:33 +02:00
|
|
|
return example_value
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
def generate_curl_example(endpoint: str, method: str,
|
2019-08-16 21:17:01 +02:00
|
|
|
api_url: str,
|
2019-07-29 15:46:48 +02:00
|
|
|
auth_email: str=DEFAULT_AUTH_EMAIL,
|
|
|
|
auth_api_key: str=DEFAULT_AUTH_API_KEY,
|
2019-10-09 13:01:07 +02:00
|
|
|
exclude: Optional[List[str]]=None,
|
|
|
|
include: Optional[List[str]]=None) -> List[str]:
|
|
|
|
if exclude is not None and include is not None:
|
|
|
|
raise AssertionError("exclude and include cannot be set at the same time.")
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
lines = ["```curl"]
|
2019-12-04 12:20:51 +01:00
|
|
|
operation = endpoint + ":" + method.lower()
|
2020-08-12 04:54:48 +02:00
|
|
|
operation_entry = openapi_spec.openapi()['paths'][endpoint][method.lower()]
|
|
|
|
global_security = openapi_spec.openapi()['security']
|
2019-12-04 12:27:15 +01:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
operation_params = operation_entry.get("parameters", [])
|
|
|
|
operation_request_body = operation_entry.get("requestBody", None)
|
2019-12-04 12:27:15 +01:00
|
|
|
operation_security = operation_entry.get("security", None)
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-10-21 12:43:00 +02:00
|
|
|
if settings.RUNNING_OPENAPI_CURL_TEST: # nocoverage
|
2019-11-18 15:48:49 +01:00
|
|
|
from zerver.openapi.curl_param_value_generators import patch_openapi_example_values
|
2019-12-04 12:20:51 +01:00
|
|
|
operation_params, operation_request_body = patch_openapi_example_values(operation, operation_params,
|
|
|
|
operation_request_body)
|
2019-10-21 12:43:00 +02:00
|
|
|
|
2019-10-03 15:59:28 +02:00
|
|
|
format_dict = {}
|
2019-12-04 12:20:51 +01:00
|
|
|
for param in operation_params:
|
2019-10-03 15:59:28 +02:00
|
|
|
if param["in"] != "path":
|
|
|
|
continue
|
|
|
|
example_value = get_openapi_param_example_value_as_string(endpoint, method, param)
|
|
|
|
format_dict[param["name"]] = example_value
|
|
|
|
example_endpoint = endpoint.format_map(format_dict)
|
|
|
|
|
2020-09-02 06:59:07 +02:00
|
|
|
curl_first_line_parts = ["curl", *curl_method_arguments(example_endpoint, method,
|
|
|
|
api_url)]
|
2019-07-29 15:46:48 +02:00
|
|
|
lines.append(" ".join(curl_first_line_parts))
|
|
|
|
|
2019-12-04 12:27:15 +01:00
|
|
|
insecure_operations = ['/dev_fetch_api_key:post']
|
|
|
|
if operation_security is None:
|
|
|
|
if global_security == [{'basicAuth': []}]:
|
|
|
|
authentication_required = True
|
|
|
|
else:
|
2020-05-06 00:44:08 +02:00
|
|
|
raise AssertionError("Unhandled global securityScheme."
|
|
|
|
+ " Please update the code to handle this scheme.")
|
2019-12-04 12:27:15 +01:00
|
|
|
elif operation_security == []:
|
|
|
|
if operation in insecure_operations:
|
|
|
|
authentication_required = False
|
|
|
|
else:
|
2020-05-06 00:44:08 +02:00
|
|
|
raise AssertionError("Unknown operation without a securityScheme. "
|
|
|
|
+ "Please update insecure_operations.")
|
2019-12-04 12:27:15 +01:00
|
|
|
else:
|
|
|
|
raise AssertionError("Unhandled securityScheme. Please update the code to handle this scheme.")
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
if authentication_required:
|
2020-06-10 06:41:04 +02:00
|
|
|
lines.append(f" -u {auth_email}:{auth_api_key}")
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
for param in operation_params:
|
2019-10-09 13:01:07 +02:00
|
|
|
if param["in"] == "path":
|
|
|
|
continue
|
|
|
|
param_name = param["name"]
|
|
|
|
|
|
|
|
if include is not None and param_name not in include:
|
2019-10-03 15:02:51 +02:00
|
|
|
continue
|
2019-10-09 13:01:07 +02:00
|
|
|
|
|
|
|
if exclude is not None and param_name in exclude:
|
|
|
|
continue
|
|
|
|
|
2019-10-03 15:21:33 +02:00
|
|
|
example_value = get_openapi_param_example_value_as_string(endpoint, method, param,
|
|
|
|
curl_argument=True)
|
|
|
|
lines.append(example_value)
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
if "requestBody" in operation_entry:
|
|
|
|
properties = operation_entry["requestBody"]["content"]["multipart/form-data"]["schema"]["properties"]
|
2019-10-16 13:06:31 +02:00
|
|
|
for key, property in properties.items():
|
|
|
|
lines.append(' -F "{}=@{}"'.format(key, property["example"]))
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
for i in range(1, len(lines)-1):
|
|
|
|
lines[i] = lines[i] + " \\"
|
|
|
|
|
|
|
|
lines.append("```")
|
|
|
|
|
|
|
|
return lines
|
|
|
|
|
2019-08-16 21:17:01 +02:00
|
|
|
def render_curl_example(function: str, api_url: str,
|
2019-10-09 13:01:07 +02:00
|
|
|
exclude: Optional[List[str]]=None,
|
|
|
|
include: Optional[List[str]]=None) -> List[str]:
|
2019-07-29 15:46:48 +02:00
|
|
|
""" A simple wrapper around generate_curl_example. """
|
|
|
|
parts = function.split(":")
|
|
|
|
endpoint = parts[0]
|
|
|
|
method = parts[1]
|
2020-09-02 08:14:51 +02:00
|
|
|
kwargs: Dict[str, Any] = {}
|
2019-07-29 15:46:48 +02:00
|
|
|
if len(parts) > 2:
|
|
|
|
kwargs["auth_email"] = parts[2]
|
|
|
|
if len(parts) > 3:
|
|
|
|
kwargs["auth_api_key"] = parts[3]
|
2019-08-16 21:17:01 +02:00
|
|
|
kwargs["api_url"] = api_url
|
2019-08-04 08:14:08 +02:00
|
|
|
kwargs["exclude"] = exclude
|
2019-10-09 13:01:07 +02:00
|
|
|
kwargs["include"] = include
|
2019-07-29 15:46:48 +02:00
|
|
|
return generate_curl_example(endpoint, method, **kwargs)
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
SUPPORTED_LANGUAGES: Dict[str, Any] = {
|
2018-02-16 04:09:21 +01:00
|
|
|
'python': {
|
|
|
|
'client_config': PYTHON_CLIENT_CONFIG,
|
|
|
|
'admin_config': PYTHON_CLIENT_ADMIN_CONFIG,
|
|
|
|
'render': render_python_code_example,
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
|
|
|
'curl': {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'render': render_curl_example,
|
2020-05-17 12:04:53 +02:00
|
|
|
},
|
|
|
|
'javascript': {
|
|
|
|
'client_config': JS_CLIENT_CONFIG,
|
|
|
|
'admin_config': JS_CLIENT_ADMIN_CONFIG,
|
|
|
|
'render': render_javascript_code_example,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
class APIMarkdownExtension(Extension):
|
2019-08-16 21:17:01 +02:00
|
|
|
def __init__(self, api_url: Optional[str]) -> None:
|
|
|
|
self.config = {
|
|
|
|
'api_url': [
|
|
|
|
api_url,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'API URL to use when rendering curl examples',
|
|
|
|
],
|
2019-08-16 21:17:01 +02:00
|
|
|
}
|
|
|
|
|
2018-01-26 22:08:42 +01:00
|
|
|
def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None:
|
|
|
|
md.preprocessors.add(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'generate_code_example', APICodeExamplesPreprocessor(md, self.getConfigs()), '_begin',
|
2018-01-26 22:08:42 +01:00
|
|
|
)
|
2020-05-06 00:44:08 +02:00
|
|
|
md.preprocessors.add(
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
'generate_api_description', APIDescriptionPreprocessor(md, self.getConfigs()), '_begin',
|
2020-05-06 00:44:08 +02:00
|
|
|
)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
class APICodeExamplesPreprocessor(Preprocessor):
|
|
|
|
def __init__(self, md: markdown.Markdown, config: Dict[str, Any]) -> None:
|
2020-04-09 21:51:58 +02:00
|
|
|
super().__init__(md)
|
2019-08-16 21:17:01 +02:00
|
|
|
self.api_url = config['api_url']
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
def run(self, lines: List[str]) -> List[str]:
|
|
|
|
done = False
|
|
|
|
while not done:
|
|
|
|
for line in lines:
|
|
|
|
loc = lines.index(line)
|
2018-02-16 04:09:21 +01:00
|
|
|
match = MACRO_REGEXP.search(line)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
if match:
|
2019-08-04 08:14:08 +02:00
|
|
|
language, options = parse_language_and_options(match.group(2))
|
2018-02-16 04:09:21 +01:00
|
|
|
function = match.group(3)
|
|
|
|
key = match.group(4)
|
|
|
|
argument = match.group(6)
|
2019-08-16 21:17:01 +02:00
|
|
|
if self.api_url is None:
|
|
|
|
raise AssertionError("Cannot render curl API examples without API URL set.")
|
|
|
|
options['api_url'] = self.api_url
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
if key == 'fixture':
|
2018-02-06 04:07:12 +01:00
|
|
|
if argument:
|
|
|
|
text = self.render_fixture(function, name=argument)
|
2018-02-02 19:28:35 +01:00
|
|
|
elif key == 'example':
|
2018-02-06 04:07:12 +01:00
|
|
|
if argument == 'admin_config=True':
|
2018-02-16 04:09:21 +01:00
|
|
|
text = SUPPORTED_LANGUAGES[language]['render'](function, admin_config=True)
|
2018-02-06 04:07:12 +01:00
|
|
|
else:
|
2019-08-04 08:14:08 +02:00
|
|
|
text = SUPPORTED_LANGUAGES[language]['render'](function, **options)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
# The line that contains the directive to include the macro
|
|
|
|
# may be preceded or followed by text or tags, in that case
|
|
|
|
# we need to make sure that any preceding or following text
|
|
|
|
# stays the same.
|
2018-02-16 04:09:21 +01:00
|
|
|
line_split = MACRO_REGEXP.split(line, maxsplit=0)
|
2018-01-26 22:08:42 +01:00
|
|
|
preceding = line_split[0]
|
|
|
|
following = line_split[-1]
|
2020-09-02 06:59:07 +02:00
|
|
|
text = [preceding, *text, following]
|
2018-01-26 22:08:42 +01:00
|
|
|
lines = lines[:loc] + text + lines[loc+1:]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
done = True
|
|
|
|
return lines
|
|
|
|
|
2018-02-06 04:07:12 +01:00
|
|
|
def render_fixture(self, function: str, name: Optional[str]=None) -> List[str]:
|
2018-01-26 22:08:42 +01:00
|
|
|
fixture = []
|
|
|
|
|
2020-04-02 03:42:59 +02:00
|
|
|
path, method = function.rsplit(':', 1)
|
|
|
|
fixture_dict = get_openapi_fixture(path, method, name)
|
2018-02-21 00:40:38 +01:00
|
|
|
fixture_json = json.dumps(fixture_dict, indent=4, sort_keys=True,
|
|
|
|
separators=(',', ': '))
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-03-27 05:15:46 +01:00
|
|
|
fixture.append('``` json')
|
2018-01-26 22:08:42 +01:00
|
|
|
fixture.extend(fixture_json.splitlines())
|
|
|
|
fixture.append('```')
|
|
|
|
|
|
|
|
return fixture
|
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
class APIDescriptionPreprocessor(Preprocessor):
|
|
|
|
def __init__(self, md: markdown.Markdown, config: Dict[str, Any]) -> None:
|
|
|
|
super().__init__(md)
|
|
|
|
self.api_url = config['api_url']
|
|
|
|
|
|
|
|
def run(self, lines: List[str]) -> List[str]:
|
|
|
|
done = False
|
|
|
|
while not done:
|
|
|
|
for line in lines:
|
|
|
|
loc = lines.index(line)
|
|
|
|
match = MACRO_REGEXP_DESC.search(line)
|
|
|
|
|
|
|
|
if match:
|
|
|
|
function = match.group(2)
|
|
|
|
text = self.render_description(function)
|
|
|
|
# The line that contains the directive to include the macro
|
|
|
|
# may be preceded or followed by text or tags, in that case
|
|
|
|
# we need to make sure that any preceding or following text
|
|
|
|
# stays the same.
|
|
|
|
line_split = MACRO_REGEXP_DESC.split(line, maxsplit=0)
|
|
|
|
preceding = line_split[0]
|
|
|
|
following = line_split[-1]
|
2020-09-02 06:59:07 +02:00
|
|
|
text = [preceding, *text, following]
|
2020-05-06 00:44:08 +02:00
|
|
|
lines = lines[:loc] + text + lines[loc+1:]
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
done = True
|
|
|
|
return lines
|
|
|
|
|
|
|
|
def render_description(self, function: str) -> List[str]:
|
|
|
|
description: List[str] = []
|
|
|
|
path, method = function.rsplit(':', 1)
|
|
|
|
description_dict = get_openapi_description(path, method)
|
|
|
|
description_dict = description_dict.replace('{{api_url}}', self.api_url)
|
|
|
|
description.extend(description_dict.splitlines())
|
|
|
|
return description
|
|
|
|
|
|
|
|
def makeExtension(*args: Any, **kwargs: str) -> APIMarkdownExtension:
|
|
|
|
return APIMarkdownExtension(*args, **kwargs)
|