2020-09-15 00:24:01 +02:00
|
|
|
# Zulip's OpenAPI-based API documentation system is documented at
|
|
|
|
# https://zulip.readthedocs.io/en/latest/documentation/api.html
|
|
|
|
#
|
|
|
|
# This file defines the special Markdown extension that is used to
|
|
|
|
# render the code examples, example responses, etc. that appear in
|
|
|
|
# Zulip's public API documentation.
|
|
|
|
|
2018-01-26 22:08:42 +01:00
|
|
|
import inspect
|
2020-06-11 00:54:34 +02:00
|
|
|
import json
|
|
|
|
import re
|
2020-11-04 02:49:09 +01:00
|
|
|
import shlex
|
2020-12-11 05:39:23 +01:00
|
|
|
from textwrap import dedent
|
2020-10-20 02:49:02 +02:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, Pattern, Tuple
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
import markdown
|
2019-10-21 12:43:00 +02:00
|
|
|
from django.conf import settings
|
2018-01-26 22:08:42 +01:00
|
|
|
from markdown.extensions import Extension
|
|
|
|
from markdown.preprocessors import Preprocessor
|
|
|
|
|
2019-08-04 18:14:48 +02:00
|
|
|
import zerver.openapi.python_examples
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.openapi.openapi import get_openapi_description, get_openapi_fixture, openapi_spec
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
MACRO_REGEXP = re.compile(
|
2021-02-12 08:20:45 +01:00
|
|
|
r"\{generate_code_example(\(\s*(.+?)\s*\))*\|\s*(.+?)\s*\|\s*(.+?)\s*(\(\s*(.+)\s*\))?\}"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
PYTHON_EXAMPLE_REGEX = re.compile(r"\# \{code_example\|\s*(.+?)\s*\}")
|
|
|
|
JS_EXAMPLE_REGEX = re.compile(r"\/\/ \{code_example\|\s*(.+?)\s*\}")
|
|
|
|
MACRO_REGEXP_DESC = re.compile(r"\{generate_api_description(\(\s*(.+?)\s*\))}")
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2018-02-16 04:09:21 +01:00
|
|
|
PYTHON_CLIENT_CONFIG = """
|
2018-01-26 22:08:42 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
import zulip
|
|
|
|
|
2018-10-16 21:23:23 +02:00
|
|
|
# Pass the path to your zuliprc file here.
|
|
|
|
client = zulip.Client(config_file="~/zuliprc")
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2018-01-31 05:34:53 +01:00
|
|
|
PYTHON_CLIENT_ADMIN_CONFIG = """
|
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
import zulip
|
|
|
|
|
2018-10-16 21:23:23 +02:00
|
|
|
# The user for this zuliprc file must be an organization administrator
|
2018-01-31 05:34:53 +01:00
|
|
|
client = zulip.Client(config_file="~/zuliprc-admin")
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
JS_CLIENT_CONFIG = """
|
2020-12-11 05:39:23 +01:00
|
|
|
const zulipInit = require("zulip-js");
|
2020-05-17 12:04:53 +02:00
|
|
|
|
|
|
|
// Pass the path to your zuliprc file here.
|
2020-12-11 05:39:23 +01:00
|
|
|
const config = { zuliprc: "zuliprc" };
|
2020-05-17 12:04:53 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
JS_CLIENT_ADMIN_CONFIG = """
|
2020-12-11 05:39:23 +01:00
|
|
|
const zulipInit = require("zulip-js");
|
2020-05-17 12:04:53 +02:00
|
|
|
|
|
|
|
// The user for this zuliprc file must be an organization administrator.
|
2020-12-11 05:39:23 +01:00
|
|
|
const config = { zuliprc: "zuliprc-admin" };
|
2020-05-17 12:04:53 +02:00
|
|
|
|
|
|
|
"""
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
DEFAULT_AUTH_EMAIL = "BOT_EMAIL_ADDRESS"
|
|
|
|
DEFAULT_AUTH_API_KEY = "BOT_API_KEY"
|
|
|
|
DEFAULT_EXAMPLE = {
|
|
|
|
"integer": 1,
|
|
|
|
"string": "demo",
|
|
|
|
"boolean": False,
|
|
|
|
}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-04 08:14:08 +02:00
|
|
|
def parse_language_and_options(input_str: Optional[str]) -> Tuple[str, Dict[str, Any]]:
|
|
|
|
if not input_str:
|
|
|
|
return ("", {})
|
2021-02-12 08:19:30 +01:00
|
|
|
language_and_options = re.match(
|
|
|
|
r"(?P<language>\w+)(,\s*(?P<options>[\"\'\w\d\[\],= ]+))?", input_str
|
|
|
|
)
|
|
|
|
assert language_and_options is not None
|
2019-08-04 08:14:08 +02:00
|
|
|
kwargs_pattern = re.compile(r"(?P<key>\w+)\s*=\s*(?P<value>[\'\"\w\d]+|\[[\'\",\w\d ]+\])")
|
|
|
|
language = language_and_options.group("language")
|
2021-02-12 08:19:30 +01:00
|
|
|
assert language is not None
|
2019-08-04 08:14:08 +02:00
|
|
|
if language_and_options.group("options"):
|
|
|
|
_options = kwargs_pattern.finditer(language_and_options.group("options"))
|
|
|
|
options = {}
|
|
|
|
for m in _options:
|
|
|
|
options[m.group("key")] = json.loads(m.group("value").replace("'", '"'))
|
|
|
|
return (language, options)
|
|
|
|
return (language, {})
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def extract_code_example(
|
|
|
|
source: List[str], snippet: List[Any], example_regex: Pattern[str]
|
|
|
|
) -> List[Any]:
|
2018-02-16 04:09:21 +01:00
|
|
|
start = -1
|
|
|
|
end = -1
|
|
|
|
for line in source:
|
2020-04-21 20:57:11 +02:00
|
|
|
match = example_regex.search(line)
|
2018-02-16 04:09:21 +01:00
|
|
|
if match:
|
2021-02-12 08:20:45 +01:00
|
|
|
if match.group(1) == "start":
|
2018-02-16 04:09:21 +01:00
|
|
|
start = source.index(line)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif match.group(1) == "end":
|
2018-02-16 04:09:21 +01:00
|
|
|
end = source.index(line)
|
|
|
|
break
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
if start == -1 and end == -1:
|
2018-02-16 04:09:21 +01:00
|
|
|
return snippet
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
snippet.append(source[start + 1 : end])
|
|
|
|
source = source[end + 1 :]
|
2020-04-21 20:57:11 +02:00
|
|
|
return extract_code_example(source, snippet, example_regex)
|
2018-02-16 04:09:21 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def render_python_code_example(
|
|
|
|
function: str, admin_config: bool = False, **kwargs: Any
|
|
|
|
) -> List[str]:
|
2019-08-04 18:14:48 +02:00
|
|
|
method = zerver.openapi.python_examples.TEST_FUNCTIONS[function]
|
2018-02-16 04:09:21 +01:00
|
|
|
function_source_lines = inspect.getsourcelines(method)[0]
|
|
|
|
|
|
|
|
if admin_config:
|
|
|
|
config = PYTHON_CLIENT_ADMIN_CONFIG.splitlines()
|
|
|
|
else:
|
|
|
|
config = PYTHON_CLIENT_CONFIG.splitlines()
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
snippets = extract_code_example(function_source_lines, [], PYTHON_EXAMPLE_REGEX)
|
2018-02-16 04:09:21 +01:00
|
|
|
|
|
|
|
code_example = []
|
2021-02-12 08:20:45 +01:00
|
|
|
code_example.append("```python")
|
2018-02-16 04:09:21 +01:00
|
|
|
code_example.extend(config)
|
|
|
|
|
2020-05-20 15:43:59 +02:00
|
|
|
for snippet in snippets:
|
|
|
|
for line in snippet:
|
|
|
|
# Remove one level of indentation and strip newlines
|
|
|
|
code_example.append(line[4:].rstrip())
|
2018-02-16 04:09:21 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
code_example.append("print(result)")
|
|
|
|
code_example.append("\n")
|
|
|
|
code_example.append("```")
|
2018-02-16 04:09:21 +01:00
|
|
|
|
|
|
|
return code_example
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def render_javascript_code_example(
|
|
|
|
function: str, admin_config: bool = False, **kwargs: Any
|
|
|
|
) -> List[str]:
|
2020-12-11 05:39:23 +01:00
|
|
|
pattern = fr'^add_example\(\s*"[^"]*",\s*{re.escape(json.dumps(function))},\s*\d+,\s*async \(client, console\) => \{{\n(.*?)^(?:\}}| *\}},\n)\);$'
|
2021-02-12 08:20:45 +01:00
|
|
|
with open("zerver/openapi/javascript_examples.js") as f:
|
2020-12-11 05:39:23 +01:00
|
|
|
m = re.search(pattern, f.read(), re.M | re.S)
|
|
|
|
assert m is not None
|
|
|
|
function_source_lines = dedent(m.group(1)).splitlines()
|
2020-05-19 18:40:35 +02:00
|
|
|
|
|
|
|
snippets = extract_code_example(function_source_lines, [], JS_EXAMPLE_REGEX)
|
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
if admin_config:
|
|
|
|
config = JS_CLIENT_ADMIN_CONFIG.splitlines()
|
|
|
|
else:
|
|
|
|
config = JS_CLIENT_CONFIG.splitlines()
|
|
|
|
|
|
|
|
code_example = []
|
2021-02-12 08:20:45 +01:00
|
|
|
code_example.append("```js")
|
2020-05-17 12:04:53 +02:00
|
|
|
code_example.extend(config)
|
2020-12-11 05:39:23 +01:00
|
|
|
code_example.append("(async () => {")
|
|
|
|
code_example.append(" const client = await zulipInit(config);")
|
2020-05-19 18:40:35 +02:00
|
|
|
for snippet in snippets:
|
2020-12-11 05:39:23 +01:00
|
|
|
code_example.append("")
|
2020-05-19 18:40:35 +02:00
|
|
|
for line in snippet:
|
|
|
|
# Strip newlines
|
2020-12-11 05:39:23 +01:00
|
|
|
code_example.append(" " + line.rstrip())
|
|
|
|
code_example.append("})();")
|
2020-05-19 18:40:35 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
code_example.append("```")
|
2020-05-19 18:40:35 +02:00
|
|
|
|
2020-05-17 12:04:53 +02:00
|
|
|
return code_example
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def curl_method_arguments(endpoint: str, method: str, api_url: str) -> List[str]:
|
2019-08-07 10:55:41 +02:00
|
|
|
# We also include the -sS verbosity arguments here.
|
2019-07-29 15:46:48 +02:00
|
|
|
method = method.upper()
|
2020-06-09 00:25:09 +02:00
|
|
|
url = f"{api_url}/v1{endpoint}"
|
2019-07-29 15:46:48 +02:00
|
|
|
valid_methods = ["GET", "POST", "DELETE", "PUT", "PATCH", "OPTIONS"]
|
2019-08-07 10:55:41 +02:00
|
|
|
if method == "GET":
|
2019-07-29 15:46:48 +02:00
|
|
|
# Then we need to make sure that each -d option translates to becoming
|
|
|
|
# a GET parameter (in the URL) and not a POST parameter (in the body).
|
|
|
|
# TODO: remove the -X part by updating the linting rule. It's redundant.
|
2019-08-07 10:55:41 +02:00
|
|
|
return ["-sSX", "GET", "-G", url]
|
2019-07-29 15:46:48 +02:00
|
|
|
elif method in valid_methods:
|
2019-08-07 10:55:41 +02:00
|
|
|
return ["-sSX", method, url]
|
2019-07-29 15:46:48 +02:00
|
|
|
else:
|
2020-06-10 06:40:53 +02:00
|
|
|
msg = f"The request method {method} is not one of {valid_methods}"
|
2019-07-29 15:46:48 +02:00
|
|
|
raise ValueError(msg)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_openapi_param_example_value_as_string(
|
|
|
|
endpoint: str, method: str, param: Dict[str, Any], curl_argument: bool = False
|
|
|
|
) -> str:
|
2020-05-11 16:26:33 +02:00
|
|
|
jsonify = False
|
|
|
|
param_name = param["name"]
|
|
|
|
if "content" in param:
|
|
|
|
param = param["content"]["application/json"]
|
|
|
|
jsonify = True
|
2020-01-28 07:28:22 +01:00
|
|
|
if "type" in param["schema"]:
|
|
|
|
param_type = param["schema"]["type"]
|
|
|
|
else:
|
|
|
|
# Hack: Ideally, we'd extract a common function for handling
|
|
|
|
# oneOf values in types and do something with the resulting
|
|
|
|
# union type. But for this logic's purpose, it's good enough
|
|
|
|
# to just check the first parameter.
|
|
|
|
param_type = param["schema"]["oneOf"][0]["type"]
|
2019-10-03 15:21:33 +02:00
|
|
|
if param_type in ["object", "array"]:
|
|
|
|
example_value = param.get("example", None)
|
|
|
|
if not example_value:
|
2020-06-13 08:57:35 +02:00
|
|
|
msg = f"""All array and object type request parameters must have
|
|
|
|
concrete examples. The openAPI documentation for {endpoint}/{method} is missing an example
|
|
|
|
value for the {param_name} parameter. Without this we cannot automatically generate a
|
|
|
|
cURL example."""
|
2019-10-03 15:21:33 +02:00
|
|
|
raise ValueError(msg)
|
|
|
|
ordered_ex_val_str = json.dumps(example_value, sort_keys=True)
|
2020-06-27 19:23:50 +02:00
|
|
|
# We currently don't have any non-JSON encoded arrays.
|
2021-02-12 08:19:30 +01:00
|
|
|
assert jsonify
|
2019-10-03 15:21:33 +02:00
|
|
|
if curl_argument:
|
2020-11-04 02:49:09 +01:00
|
|
|
return " --data-urlencode " + shlex.quote(f"{param_name}={ordered_ex_val_str}")
|
2019-10-03 15:21:33 +02:00
|
|
|
return ordered_ex_val_str # nocoverage
|
|
|
|
else:
|
2020-01-28 07:28:22 +01:00
|
|
|
example_value = param.get("example", DEFAULT_EXAMPLE[param_type])
|
2020-04-22 01:59:09 +02:00
|
|
|
if isinstance(example_value, bool):
|
2019-10-03 15:21:33 +02:00
|
|
|
example_value = str(example_value).lower()
|
2020-05-11 16:26:33 +02:00
|
|
|
if jsonify:
|
2019-10-18 08:47:27 +02:00
|
|
|
example_value = json.dumps(example_value)
|
2019-10-03 15:21:33 +02:00
|
|
|
if curl_argument:
|
2020-11-04 02:49:09 +01:00
|
|
|
return " --data-urlencode " + shlex.quote(f"{param_name}={example_value}")
|
2019-10-03 15:21:33 +02:00
|
|
|
return example_value
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def generate_curl_example(
|
|
|
|
endpoint: str,
|
|
|
|
method: str,
|
|
|
|
api_url: str,
|
|
|
|
auth_email: str = DEFAULT_AUTH_EMAIL,
|
|
|
|
auth_api_key: str = DEFAULT_AUTH_API_KEY,
|
|
|
|
exclude: Optional[List[str]] = None,
|
|
|
|
include: Optional[List[str]] = None,
|
|
|
|
) -> List[str]:
|
2019-10-09 13:01:07 +02:00
|
|
|
if exclude is not None and include is not None:
|
|
|
|
raise AssertionError("exclude and include cannot be set at the same time.")
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
lines = ["```curl"]
|
2019-12-04 12:20:51 +01:00
|
|
|
operation = endpoint + ":" + method.lower()
|
2021-02-12 08:20:45 +01:00
|
|
|
operation_entry = openapi_spec.openapi()["paths"][endpoint][method.lower()]
|
|
|
|
global_security = openapi_spec.openapi()["security"]
|
2019-12-04 12:27:15 +01:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
operation_params = operation_entry.get("parameters", [])
|
|
|
|
operation_request_body = operation_entry.get("requestBody", None)
|
2019-12-04 12:27:15 +01:00
|
|
|
operation_security = operation_entry.get("security", None)
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-10-21 12:43:00 +02:00
|
|
|
if settings.RUNNING_OPENAPI_CURL_TEST: # nocoverage
|
2019-11-18 15:48:49 +01:00
|
|
|
from zerver.openapi.curl_param_value_generators import patch_openapi_example_values
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
operation_params, operation_request_body = patch_openapi_example_values(
|
|
|
|
operation, operation_params, operation_request_body
|
|
|
|
)
|
2019-10-21 12:43:00 +02:00
|
|
|
|
2019-10-03 15:59:28 +02:00
|
|
|
format_dict = {}
|
2019-12-04 12:20:51 +01:00
|
|
|
for param in operation_params:
|
2019-10-03 15:59:28 +02:00
|
|
|
if param["in"] != "path":
|
|
|
|
continue
|
|
|
|
example_value = get_openapi_param_example_value_as_string(endpoint, method, param)
|
|
|
|
format_dict[param["name"]] = example_value
|
|
|
|
example_endpoint = endpoint.format_map(format_dict)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
curl_first_line_parts = ["curl", *curl_method_arguments(example_endpoint, method, api_url)]
|
2020-11-04 02:49:09 +01:00
|
|
|
lines.append(" ".join(map(shlex.quote, curl_first_line_parts)))
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
insecure_operations = ["/dev_fetch_api_key:post", "/fetch_api_key:post"]
|
2019-12-04 12:27:15 +01:00
|
|
|
if operation_security is None:
|
2021-02-12 08:20:45 +01:00
|
|
|
if global_security == [{"basicAuth": []}]:
|
2019-12-04 12:27:15 +01:00
|
|
|
authentication_required = True
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise AssertionError(
|
|
|
|
"Unhandled global securityScheme."
|
|
|
|
+ " Please update the code to handle this scheme."
|
|
|
|
)
|
2019-12-04 12:27:15 +01:00
|
|
|
elif operation_security == []:
|
|
|
|
if operation in insecure_operations:
|
|
|
|
authentication_required = False
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise AssertionError(
|
|
|
|
"Unknown operation without a securityScheme. "
|
|
|
|
+ "Please update insecure_operations."
|
|
|
|
)
|
2019-12-04 12:27:15 +01:00
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise AssertionError(
|
|
|
|
"Unhandled securityScheme. Please update the code to handle this scheme."
|
|
|
|
)
|
2019-12-04 12:27:15 +01:00
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
if authentication_required:
|
2020-11-04 02:49:09 +01:00
|
|
|
lines.append(" -u " + shlex.quote(f"{auth_email}:{auth_api_key}"))
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
for param in operation_params:
|
2019-10-09 13:01:07 +02:00
|
|
|
if param["in"] == "path":
|
|
|
|
continue
|
|
|
|
param_name = param["name"]
|
|
|
|
|
|
|
|
if include is not None and param_name not in include:
|
2019-10-03 15:02:51 +02:00
|
|
|
continue
|
2019-10-09 13:01:07 +02:00
|
|
|
|
|
|
|
if exclude is not None and param_name in exclude:
|
|
|
|
continue
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
example_value = get_openapi_param_example_value_as_string(
|
|
|
|
endpoint, method, param, curl_argument=True
|
|
|
|
)
|
2019-10-03 15:21:33 +02:00
|
|
|
lines.append(example_value)
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-12-04 12:20:51 +01:00
|
|
|
if "requestBody" in operation_entry:
|
2021-02-12 08:19:30 +01:00
|
|
|
properties = operation_entry["requestBody"]["content"]["multipart/form-data"]["schema"][
|
|
|
|
"properties"
|
|
|
|
]
|
2019-10-16 13:06:31 +02:00
|
|
|
for key, property in properties.items():
|
2021-02-12 08:20:45 +01:00
|
|
|
lines.append(" -F " + shlex.quote("{}=@{}".format(key, property["example"])))
|
2019-10-16 13:06:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
for i in range(1, len(lines) - 1):
|
2019-07-29 15:46:48 +02:00
|
|
|
lines[i] = lines[i] + " \\"
|
|
|
|
|
|
|
|
lines.append("```")
|
|
|
|
|
|
|
|
return lines
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def render_curl_example(
|
|
|
|
function: str,
|
|
|
|
api_url: str,
|
|
|
|
exclude: Optional[List[str]] = None,
|
|
|
|
include: Optional[List[str]] = None,
|
|
|
|
) -> List[str]:
|
2019-07-29 15:46:48 +02:00
|
|
|
""" A simple wrapper around generate_curl_example. """
|
|
|
|
parts = function.split(":")
|
|
|
|
endpoint = parts[0]
|
|
|
|
method = parts[1]
|
2020-09-02 08:14:51 +02:00
|
|
|
kwargs: Dict[str, Any] = {}
|
2019-07-29 15:46:48 +02:00
|
|
|
if len(parts) > 2:
|
|
|
|
kwargs["auth_email"] = parts[2]
|
|
|
|
if len(parts) > 3:
|
|
|
|
kwargs["auth_api_key"] = parts[3]
|
2019-08-16 21:17:01 +02:00
|
|
|
kwargs["api_url"] = api_url
|
2019-08-04 08:14:08 +02:00
|
|
|
kwargs["exclude"] = exclude
|
2019-10-09 13:01:07 +02:00
|
|
|
kwargs["include"] = include
|
2019-07-29 15:46:48 +02:00
|
|
|
return generate_curl_example(endpoint, method, **kwargs)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
SUPPORTED_LANGUAGES: Dict[str, Any] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"python": {
|
|
|
|
"client_config": PYTHON_CLIENT_CONFIG,
|
|
|
|
"admin_config": PYTHON_CLIENT_ADMIN_CONFIG,
|
|
|
|
"render": render_python_code_example,
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"curl": {
|
|
|
|
"render": render_curl_example,
|
2020-05-17 12:04:53 +02:00
|
|
|
},
|
2021-02-12 08:20:45 +01:00
|
|
|
"javascript": {
|
|
|
|
"client_config": JS_CLIENT_CONFIG,
|
|
|
|
"admin_config": JS_CLIENT_ADMIN_CONFIG,
|
|
|
|
"render": render_javascript_code_example,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
class APIMarkdownExtension(Extension):
|
2019-08-16 21:17:01 +02:00
|
|
|
def __init__(self, api_url: Optional[str]) -> None:
|
|
|
|
self.config = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"api_url": [
|
2019-08-16 21:17:01 +02:00
|
|
|
api_url,
|
2021-02-12 08:20:45 +01:00
|
|
|
"API URL to use when rendering curl examples",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
],
|
2019-08-16 21:17:01 +02:00
|
|
|
}
|
|
|
|
|
2020-10-20 01:28:13 +02:00
|
|
|
def extendMarkdown(self, md: markdown.Markdown) -> None:
|
|
|
|
md.preprocessors.register(
|
2021-02-12 08:20:45 +01:00
|
|
|
APICodeExamplesPreprocessor(md, self.getConfigs()), "generate_code_example", 525
|
2018-01-26 22:08:42 +01:00
|
|
|
)
|
2020-10-20 01:28:13 +02:00
|
|
|
md.preprocessors.register(
|
2021-02-12 08:20:45 +01:00
|
|
|
APIDescriptionPreprocessor(md, self.getConfigs()), "generate_api_description", 530
|
2020-05-06 00:44:08 +02:00
|
|
|
)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-01-26 22:08:42 +01:00
|
|
|
class APICodeExamplesPreprocessor(Preprocessor):
|
2020-10-20 02:49:02 +02:00
|
|
|
def __init__(self, md: markdown.Markdown, config: Mapping[str, Any]) -> None:
|
2020-04-09 21:51:58 +02:00
|
|
|
super().__init__(md)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.api_url = config["api_url"]
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
def run(self, lines: List[str]) -> List[str]:
|
|
|
|
done = False
|
|
|
|
while not done:
|
|
|
|
for line in lines:
|
|
|
|
loc = lines.index(line)
|
2018-02-16 04:09:21 +01:00
|
|
|
match = MACRO_REGEXP.search(line)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
if match:
|
2019-08-04 08:14:08 +02:00
|
|
|
language, options = parse_language_and_options(match.group(2))
|
2018-02-16 04:09:21 +01:00
|
|
|
function = match.group(3)
|
|
|
|
key = match.group(4)
|
|
|
|
argument = match.group(6)
|
2019-08-16 21:17:01 +02:00
|
|
|
if self.api_url is None:
|
|
|
|
raise AssertionError("Cannot render curl API examples without API URL set.")
|
2021-02-12 08:20:45 +01:00
|
|
|
options["api_url"] = self.api_url
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if key == "fixture":
|
2018-02-06 04:07:12 +01:00
|
|
|
if argument:
|
|
|
|
text = self.render_fixture(function, name=argument)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif key == "example":
|
|
|
|
if argument == "admin_config=True":
|
|
|
|
text = SUPPORTED_LANGUAGES[language]["render"](
|
2021-02-12 08:19:30 +01:00
|
|
|
function, admin_config=True
|
|
|
|
)
|
2018-02-06 04:07:12 +01:00
|
|
|
else:
|
2021-02-12 08:20:45 +01:00
|
|
|
text = SUPPORTED_LANGUAGES[language]["render"](function, **options)
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
# The line that contains the directive to include the macro
|
|
|
|
# may be preceded or followed by text or tags, in that case
|
|
|
|
# we need to make sure that any preceding or following text
|
|
|
|
# stays the same.
|
2018-02-16 04:09:21 +01:00
|
|
|
line_split = MACRO_REGEXP.split(line, maxsplit=0)
|
2018-01-26 22:08:42 +01:00
|
|
|
preceding = line_split[0]
|
|
|
|
following = line_split[-1]
|
2020-09-02 06:59:07 +02:00
|
|
|
text = [preceding, *text, following]
|
2021-02-12 08:19:30 +01:00
|
|
|
lines = lines[:loc] + text + lines[loc + 1 :]
|
2018-01-26 22:08:42 +01:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
done = True
|
|
|
|
return lines
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def render_fixture(self, function: str, name: Optional[str] = None) -> List[str]:
|
2018-01-26 22:08:42 +01:00
|
|
|
fixture = []
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
path, method = function.rsplit(":", 1)
|
2020-04-02 03:42:59 +02:00
|
|
|
fixture_dict = get_openapi_fixture(path, method, name)
|
2021-02-12 08:20:45 +01:00
|
|
|
fixture_json = json.dumps(fixture_dict, indent=4, sort_keys=True, separators=(",", ": "))
|
2018-01-26 22:08:42 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
fixture.append("``` json")
|
2018-01-26 22:08:42 +01:00
|
|
|
fixture.extend(fixture_json.splitlines())
|
2021-02-12 08:20:45 +01:00
|
|
|
fixture.append("```")
|
2018-01-26 22:08:42 +01:00
|
|
|
|
|
|
|
return fixture
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
class APIDescriptionPreprocessor(Preprocessor):
|
2020-10-20 02:49:02 +02:00
|
|
|
def __init__(self, md: markdown.Markdown, config: Mapping[str, Any]) -> None:
|
2020-05-06 00:44:08 +02:00
|
|
|
super().__init__(md)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.api_url = config["api_url"]
|
2020-05-06 00:44:08 +02:00
|
|
|
|
|
|
|
def run(self, lines: List[str]) -> List[str]:
|
|
|
|
done = False
|
|
|
|
while not done:
|
|
|
|
for line in lines:
|
|
|
|
loc = lines.index(line)
|
|
|
|
match = MACRO_REGEXP_DESC.search(line)
|
|
|
|
|
|
|
|
if match:
|
|
|
|
function = match.group(2)
|
|
|
|
text = self.render_description(function)
|
|
|
|
# The line that contains the directive to include the macro
|
|
|
|
# may be preceded or followed by text or tags, in that case
|
|
|
|
# we need to make sure that any preceding or following text
|
|
|
|
# stays the same.
|
|
|
|
line_split = MACRO_REGEXP_DESC.split(line, maxsplit=0)
|
|
|
|
preceding = line_split[0]
|
|
|
|
following = line_split[-1]
|
2020-09-02 06:59:07 +02:00
|
|
|
text = [preceding, *text, following]
|
2021-02-12 08:19:30 +01:00
|
|
|
lines = lines[:loc] + text + lines[loc + 1 :]
|
2020-05-06 00:44:08 +02:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
done = True
|
|
|
|
return lines
|
|
|
|
|
|
|
|
def render_description(self, function: str) -> List[str]:
|
|
|
|
description: List[str] = []
|
2021-02-12 08:20:45 +01:00
|
|
|
path, method = function.rsplit(":", 1)
|
2020-05-06 00:44:08 +02:00
|
|
|
description_dict = get_openapi_description(path, method)
|
2021-02-12 08:20:45 +01:00
|
|
|
description_dict = description_dict.replace("{{api_url}}", self.api_url)
|
2020-05-06 00:44:08 +02:00
|
|
|
description.extend(description_dict.splitlines())
|
|
|
|
return description
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-06 00:44:08 +02:00
|
|
|
def makeExtension(*args: Any, **kwargs: str) -> APIMarkdownExtension:
|
|
|
|
return APIMarkdownExtension(*args, **kwargs)
|