2020-09-15 00:24:01 +02:00
|
|
|
# Zulip's OpenAPI-based API documentation system is documented at
|
|
|
|
# https://zulip.readthedocs.io/en/latest/documentation/api.html
|
|
|
|
#
|
|
|
|
# This file contains helper functions to interact with the OpenAPI
|
|
|
|
# definitions and validate that Zulip's implementation matches what is
|
|
|
|
# described in our documentation.
|
|
|
|
|
2021-06-07 22:14:34 +02:00
|
|
|
import json
|
2018-05-15 19:28:42 +02:00
|
|
|
import os
|
2020-06-13 17:59:46 +02:00
|
|
|
import re
|
2021-07-03 07:37:59 +02:00
|
|
|
from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union
|
2018-05-15 19:28:42 +02:00
|
|
|
|
2022-01-12 03:08:52 +01:00
|
|
|
import orjson
|
2020-10-22 23:45:38 +02:00
|
|
|
from jsonschema.exceptions import ValidationError as JsonSchemaValidationError
|
2022-10-06 09:57:41 +02:00
|
|
|
from openapi_core import Spec
|
2022-01-12 03:08:52 +01:00
|
|
|
from openapi_core.testing import MockRequest, MockResponse
|
|
|
|
from openapi_core.unmarshalling.schemas.exceptions import InvalidSchemaValue
|
2022-10-06 09:57:41 +02:00
|
|
|
from openapi_core.validation.request import openapi_request_validator
|
|
|
|
from openapi_core.validation.response import openapi_response_validator
|
2020-07-01 19:07:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
OPENAPI_SPEC_PATH = os.path.abspath(
|
2021-02-12 08:20:45 +01:00
|
|
|
os.path.join(os.path.dirname(__file__), "../openapi/zulip.yaml")
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-05-15 19:28:42 +02:00
|
|
|
|
2020-06-13 17:59:46 +02:00
|
|
|
# A list of endpoint-methods such that the endpoint
|
|
|
|
# has documentation but not with this particular method.
|
2020-08-12 04:54:48 +02:00
|
|
|
EXCLUDE_UNDOCUMENTED_ENDPOINTS = {
|
|
|
|
("/realm/emoji/{emoji_name}", "delete"),
|
|
|
|
("/users", "patch"),
|
|
|
|
}
|
2020-07-01 19:07:31 +02:00
|
|
|
# Consists of endpoints with some documentation remaining.
|
|
|
|
# These are skipped but return true as the validator cannot exclude objects
|
2021-08-02 23:16:44 +02:00
|
|
|
EXCLUDE_DOCUMENTED_ENDPOINTS: Set[Tuple[str, str]] = set()
|
2020-08-12 04:54:48 +02:00
|
|
|
|
2023-02-02 04:35:24 +01:00
|
|
|
|
2020-08-12 01:35:02 +02:00
|
|
|
# Most of our code expects allOf to be preprocessed away because that is what
|
|
|
|
# yamole did. Its algorithm for doing so is not standards compliant, but we
|
|
|
|
# replicate it here.
|
|
|
|
def naively_merge(a: Dict[str, object], b: Dict[str, object]) -> Dict[str, object]:
|
|
|
|
ret: Dict[str, object] = a.copy()
|
|
|
|
for key, b_value in b.items():
|
|
|
|
if key == "example" or key not in ret:
|
|
|
|
ret[key] = b_value
|
|
|
|
continue
|
|
|
|
a_value = ret[key]
|
|
|
|
if isinstance(b_value, list):
|
|
|
|
assert isinstance(a_value, list)
|
|
|
|
ret[key] = a_value + b_value
|
|
|
|
elif isinstance(b_value, dict):
|
|
|
|
assert isinstance(a_value, dict)
|
|
|
|
ret[key] = naively_merge(a_value, b_value)
|
|
|
|
return ret
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-12 01:35:02 +02:00
|
|
|
def naively_merge_allOf(obj: object) -> object:
|
|
|
|
if isinstance(obj, dict):
|
|
|
|
return naively_merge_allOf_dict(obj)
|
|
|
|
elif isinstance(obj, list):
|
|
|
|
return list(map(naively_merge_allOf, obj))
|
|
|
|
else:
|
|
|
|
return obj
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-12 01:35:02 +02:00
|
|
|
def naively_merge_allOf_dict(obj: Dict[str, object]) -> Dict[str, object]:
|
|
|
|
if "allOf" in obj:
|
|
|
|
ret = obj.copy()
|
|
|
|
subschemas = ret.pop("allOf")
|
|
|
|
ret = naively_merge_allOf_dict(ret)
|
|
|
|
assert isinstance(subschemas, list)
|
|
|
|
for subschema in subschemas:
|
|
|
|
assert isinstance(subschema, dict)
|
|
|
|
ret = naively_merge(ret, naively_merge_allOf_dict(subschema))
|
|
|
|
return ret
|
|
|
|
return {key: naively_merge_allOf(value) for key, value in obj.items()}
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
class OpenAPISpec:
|
2020-08-12 04:54:48 +02:00
|
|
|
def __init__(self, openapi_path: str) -> None:
|
|
|
|
self.openapi_path = openapi_path
|
|
|
|
self.mtime: Optional[float] = None
|
|
|
|
self._openapi: Dict[str, Any] = {}
|
|
|
|
self._endpoints_dict: Dict[str, str] = {}
|
2022-10-06 09:57:41 +02:00
|
|
|
self._spec: Optional[Spec] = None
|
2018-08-07 23:40:07 +02:00
|
|
|
|
2020-08-12 03:29:51 +02:00
|
|
|
def check_reload(self) -> None:
|
2020-08-12 01:35:02 +02:00
|
|
|
# Because importing yaml takes significant time, and we only
|
|
|
|
# use python-yaml for our API docs, importing it lazily here
|
|
|
|
# is a significant optimization to `manage.py` startup.
|
2018-09-07 01:30:19 +02:00
|
|
|
#
|
|
|
|
# There is a bit of a race here...we may have two processes
|
|
|
|
# accessing this module level object and both trying to
|
|
|
|
# populate self.data at the same time. Hopefully this will
|
|
|
|
# only cause some extra processing at startup and not data
|
|
|
|
# corruption.
|
2020-08-12 03:29:51 +02:00
|
|
|
|
2020-08-12 01:35:02 +02:00
|
|
|
import yaml
|
|
|
|
from jsonref import JsonRef
|
2020-08-12 03:29:51 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
with open(self.openapi_path) as f:
|
2020-08-12 01:35:02 +02:00
|
|
|
mtime = os.fstat(f.fileno()).st_mtime
|
|
|
|
# Using == rather than >= to cover the corner case of users placing an
|
|
|
|
# earlier version than the current one
|
|
|
|
if self.mtime == mtime:
|
|
|
|
return
|
|
|
|
|
|
|
|
openapi = yaml.load(f, Loader=yaml.CSafeLoader)
|
|
|
|
|
2022-10-06 09:57:41 +02:00
|
|
|
spec = Spec.create(openapi)
|
|
|
|
self._spec = spec
|
2020-08-12 01:35:02 +02:00
|
|
|
self._openapi = naively_merge_allOf_dict(JsonRef.replace_refs(openapi))
|
2020-08-12 04:54:48 +02:00
|
|
|
self.create_endpoints_dict()
|
|
|
|
self.mtime = mtime
|
2018-08-07 23:40:07 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
def create_endpoints_dict(self) -> None:
|
2020-08-11 01:47:44 +02:00
|
|
|
# Algorithm description:
|
2020-06-13 17:59:46 +02:00
|
|
|
# We have 2 types of endpoints
|
|
|
|
# 1.with path arguments 2. without path arguments
|
|
|
|
# In validate_against_openapi_schema we directly check
|
|
|
|
# if we have a without path endpoint, since it does not
|
|
|
|
# require regex. Hence they are not part of the regex dict
|
|
|
|
# and now we are left with only:
|
|
|
|
# endpoint with path arguments.
|
|
|
|
# Now for this case, the regex has been created carefully,
|
|
|
|
# numeric arguments are matched with [0-9] only and
|
|
|
|
# emails are matched with their regex. This is why there are zero
|
|
|
|
# collisions. Hence if this regex matches
|
|
|
|
# an incorrect endpoint then there is some backend problem.
|
|
|
|
# For example if we have users/{name}/presence then it will
|
|
|
|
# conflict with users/me/presence even in the backend.
|
|
|
|
# Care should be taken though that if we have special strings
|
|
|
|
# such as email they must be substituted with proper regex.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
email_regex = r"([a-zA-Z0-9_\-\.]+)@([a-zA-Z0-9_\-\.]+)\.([a-zA-Z]{2,5})"
|
2020-08-12 04:54:48 +02:00
|
|
|
self._endpoints_dict = {}
|
2021-02-12 08:20:45 +01:00
|
|
|
for endpoint in self._openapi["paths"]:
|
|
|
|
if "{" not in endpoint:
|
2020-06-13 17:59:46 +02:00
|
|
|
continue
|
2021-02-12 08:20:45 +01:00
|
|
|
path_regex = "^" + endpoint + "$"
|
2020-06-13 17:59:46 +02:00
|
|
|
# Numeric arguments have id at their end
|
|
|
|
# so find such arguments and replace them with numeric
|
|
|
|
# regex
|
2021-02-12 08:20:45 +01:00
|
|
|
path_regex = re.sub(r"{[^}]*id}", r"[0-9]*", path_regex)
|
2020-06-13 17:59:46 +02:00
|
|
|
# Email arguments end with email
|
2021-02-12 08:20:45 +01:00
|
|
|
path_regex = re.sub(r"{[^}]*email}", email_regex, path_regex)
|
2020-06-13 17:59:46 +02:00
|
|
|
# All other types of arguments are supposed to be
|
|
|
|
# all-encompassing string.
|
2021-02-12 08:20:45 +01:00
|
|
|
path_regex = re.sub(r"{[^}]*}", r"[^\/]*", path_regex)
|
|
|
|
path_regex = path_regex.replace(r"/", r"\/")
|
2020-08-12 04:54:48 +02:00
|
|
|
self._endpoints_dict[path_regex] = endpoint
|
2020-06-13 17:59:46 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
def openapi(self) -> Dict[str, Any]:
|
2018-08-07 23:40:07 +02:00
|
|
|
"""Reload the OpenAPI file if it has been modified after the last time
|
|
|
|
it was read, and then return the parsed data.
|
|
|
|
"""
|
2020-08-12 03:29:51 +02:00
|
|
|
self.check_reload()
|
2021-02-12 08:19:30 +01:00
|
|
|
assert len(self._openapi) > 0
|
2020-08-12 04:54:48 +02:00
|
|
|
return self._openapi
|
2018-06-20 19:31:24 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
def endpoints_dict(self) -> Dict[str, str]:
|
2020-06-13 17:59:46 +02:00
|
|
|
"""Reload the OpenAPI file if it has been modified after the last time
|
|
|
|
it was read, and then return the parsed data.
|
|
|
|
"""
|
2020-08-12 03:29:51 +02:00
|
|
|
self.check_reload()
|
2021-02-12 08:19:30 +01:00
|
|
|
assert len(self._endpoints_dict) > 0
|
2020-08-12 04:54:48 +02:00
|
|
|
return self._endpoints_dict
|
2020-06-13 17:59:46 +02:00
|
|
|
|
2022-10-06 09:57:41 +02:00
|
|
|
def spec(self) -> Spec:
|
2020-07-09 20:28:07 +02:00
|
|
|
"""Reload the OpenAPI file if it has been modified after the last time
|
|
|
|
it was read, and then return the openapi_core validator object. Similar
|
|
|
|
to preceding functions. Used for proper access to OpenAPI objects.
|
|
|
|
"""
|
2020-08-12 03:29:51 +02:00
|
|
|
self.check_reload()
|
2022-10-06 09:57:41 +02:00
|
|
|
assert self._spec is not None
|
|
|
|
return self._spec
|
2022-01-12 03:08:52 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-31 19:41:17 +02:00
|
|
|
class SchemaError(Exception):
|
|
|
|
pass
|
2018-05-15 19:28:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-08-07 23:40:07 +02:00
|
|
|
openapi_spec = OpenAPISpec(OPENAPI_SPEC_PATH)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
def get_schema(endpoint: str, method: str, status_code: str) -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
if len(status_code) == 3 and (
|
2021-02-12 08:20:45 +01:00
|
|
|
"oneOf"
|
|
|
|
in openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"][status_code][
|
|
|
|
"content"
|
|
|
|
]["application/json"]["schema"]
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2020-06-13 17:59:46 +02:00
|
|
|
# Currently at places where multiple schemas are defined they only
|
|
|
|
# differ in example so either can be used.
|
2021-02-12 08:20:45 +01:00
|
|
|
status_code += "_0"
|
2020-08-12 04:54:48 +02:00
|
|
|
if len(status_code) == 3:
|
2021-02-12 08:20:45 +01:00
|
|
|
schema = openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"][
|
2021-02-12 08:19:30 +01:00
|
|
|
status_code
|
2021-02-12 08:20:45 +01:00
|
|
|
]["content"]["application/json"]["schema"]
|
2020-04-17 19:16:43 +02:00
|
|
|
return schema
|
|
|
|
else:
|
2020-08-12 04:54:48 +02:00
|
|
|
subschema_index = int(status_code[4])
|
|
|
|
status_code = status_code[0:3]
|
2021-02-12 08:20:45 +01:00
|
|
|
schema = openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"][
|
2021-02-12 08:19:30 +01:00
|
|
|
status_code
|
2021-02-12 08:20:45 +01:00
|
|
|
]["content"]["application/json"]["schema"]["oneOf"][subschema_index]
|
2020-04-17 19:16:43 +02:00
|
|
|
return schema
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
def get_openapi_fixture(endpoint: str, method: str, status_code: str = "200") -> Dict[str, Any]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Fetch a fixture from the full spec object."""
|
2021-02-12 08:20:45 +01:00
|
|
|
return get_schema(endpoint, method, status_code)["example"]
|
2018-05-15 19:28:42 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-06-07 22:14:34 +02:00
|
|
|
def get_openapi_fixture_description(endpoint: str, method: str, status_code: str = "200") -> str:
|
|
|
|
"""Fetch a fixture from the full spec object."""
|
|
|
|
return get_schema(endpoint, method, status_code)["description"]
|
|
|
|
|
|
|
|
|
2021-06-21 12:53:05 +02:00
|
|
|
def get_curl_include_exclude(endpoint: str, method: str) -> List[Dict[str, Any]]:
|
|
|
|
"""Fetch all the kinds of parameters required for curl examples."""
|
|
|
|
if (
|
|
|
|
"x-curl-examples-parameters"
|
|
|
|
not in openapi_spec.openapi()["paths"][endpoint][method.lower()]
|
|
|
|
):
|
|
|
|
return [{"type": "exclude", "parameters": {"enum": [""]}}]
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()]["x-curl-examples-parameters"][
|
|
|
|
"oneOf"
|
|
|
|
]
|
|
|
|
|
|
|
|
|
2021-06-11 20:07:45 +02:00
|
|
|
def check_requires_administrator(endpoint: str, method: str) -> bool:
|
|
|
|
"""Fetch if the endpoint requires admin config."""
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()].get(
|
|
|
|
"x-requires-administrator", False
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-06-24 15:59:47 +02:00
|
|
|
def check_additional_imports(endpoint: str, method: str) -> Optional[List[str]]:
|
|
|
|
"""Fetch the additional imports required for an endpoint."""
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()].get(
|
|
|
|
"x-python-examples-extra-imports", None
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-06-21 21:56:18 +02:00
|
|
|
def get_responses_description(endpoint: str, method: str) -> str:
|
|
|
|
"""Fetch responses description of an endpoint."""
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()].get(
|
|
|
|
"x-response-description", ""
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-06-21 22:22:27 +02:00
|
|
|
def get_parameters_description(endpoint: str, method: str) -> str:
|
|
|
|
"""Fetch parameters description of an endpoint."""
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()].get(
|
|
|
|
"x-parameter-description", ""
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-07-13 17:33:43 +02:00
|
|
|
def generate_openapi_fixture(endpoint: str, method: str) -> List[str]:
|
2021-06-07 22:14:34 +02:00
|
|
|
"""Generate fixture to be rendered"""
|
|
|
|
fixture = []
|
2021-07-13 17:33:43 +02:00
|
|
|
for status_code in sorted(
|
|
|
|
openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"]
|
2021-06-07 22:14:34 +02:00
|
|
|
):
|
2021-07-13 17:33:43 +02:00
|
|
|
if (
|
|
|
|
"oneOf"
|
|
|
|
in openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"][status_code][
|
2021-06-07 22:14:34 +02:00
|
|
|
"content"
|
2021-07-13 17:33:43 +02:00
|
|
|
]["application/json"]["schema"]
|
|
|
|
):
|
|
|
|
subschema_count = len(
|
|
|
|
openapi_spec.openapi()["paths"][endpoint][method.lower()]["responses"][status_code][
|
|
|
|
"content"
|
|
|
|
]["application/json"]["schema"]["oneOf"]
|
|
|
|
)
|
2021-06-07 22:14:34 +02:00
|
|
|
else:
|
2021-07-13 17:33:43 +02:00
|
|
|
subschema_count = 1
|
|
|
|
for subschema_index in range(subschema_count):
|
|
|
|
if subschema_count != 1:
|
|
|
|
subschema_status_code = status_code + "_" + str(subschema_index)
|
|
|
|
else:
|
|
|
|
subschema_status_code = status_code
|
|
|
|
fixture_dict = get_openapi_fixture(endpoint, method, subschema_status_code)
|
2021-08-17 11:14:17 +02:00
|
|
|
fixture_description = get_openapi_fixture_description(
|
|
|
|
endpoint, method, subschema_status_code
|
|
|
|
).strip()
|
2021-07-13 17:33:43 +02:00
|
|
|
fixture_json = json.dumps(
|
|
|
|
fixture_dict, indent=4, sort_keys=True, separators=(",", ": ")
|
|
|
|
)
|
|
|
|
|
|
|
|
fixture.extend(fixture_description.splitlines())
|
|
|
|
fixture.append("``` json")
|
|
|
|
fixture.extend(fixture_json.splitlines())
|
|
|
|
fixture.append("```")
|
2021-06-07 22:14:34 +02:00
|
|
|
return fixture
|
|
|
|
|
|
|
|
|
2020-04-28 12:13:46 +02:00
|
|
|
def get_openapi_description(endpoint: str, method: str) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Fetch a description from the full spec object."""
|
2021-02-12 08:20:45 +01:00
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()]["description"]
|
2020-04-28 12:13:46 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-22 13:11:23 +02:00
|
|
|
def get_openapi_summary(endpoint: str, method: str) -> str:
|
|
|
|
"""Fetch a summary from the full spec object."""
|
|
|
|
return openapi_spec.openapi()["paths"][endpoint][method.lower()]["summary"]
|
|
|
|
|
|
|
|
|
2021-05-23 09:46:10 +02:00
|
|
|
def get_endpoint_from_operationid(operationid: str) -> Tuple[str, str]:
|
|
|
|
for endpoint in openapi_spec.openapi()["paths"]:
|
|
|
|
for method in openapi_spec.openapi()["paths"][endpoint]:
|
|
|
|
operationId = openapi_spec.openapi()["paths"][endpoint][method].get("operationId")
|
|
|
|
if operationId == operationid:
|
|
|
|
return (endpoint, method)
|
|
|
|
raise AssertionError("No such page exists in OpenAPI data.")
|
|
|
|
|
|
|
|
|
2019-07-08 14:08:02 +02:00
|
|
|
def get_openapi_paths() -> Set[str]:
|
2021-02-12 08:20:45 +01:00
|
|
|
return set(openapi_spec.openapi()["paths"].keys())
|
2019-07-08 14:08:02 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def get_openapi_parameters(
|
|
|
|
endpoint: str, method: str, include_url_parameters: bool = True
|
|
|
|
) -> List[Dict[str, Any]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
operation = openapi_spec.openapi()["paths"][endpoint][method.lower()]
|
2019-07-15 22:33:16 +02:00
|
|
|
# We do a `.get()` for this last bit to distinguish documented
|
|
|
|
# endpoints with no parameters (empty list) from undocumented
|
|
|
|
# endpoints (KeyError exception).
|
2021-02-12 08:20:45 +01:00
|
|
|
parameters = operation.get("parameters", [])
|
2019-08-17 01:21:08 +02:00
|
|
|
# Also, we skip parameters defined in the URL.
|
|
|
|
if not include_url_parameters:
|
2021-02-12 08:20:45 +01:00
|
|
|
parameters = [parameter for parameter in parameters if parameter["in"] != "path"]
|
2019-08-17 01:21:08 +02:00
|
|
|
return parameters
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-03 07:37:59 +02:00
|
|
|
def get_openapi_return_values(endpoint: str, method: str) -> Dict[str, Any]:
|
2021-02-12 08:20:45 +01:00
|
|
|
operation = openapi_spec.openapi()["paths"][endpoint][method.lower()]
|
|
|
|
schema = operation["responses"]["200"]["content"]["application/json"]["schema"]
|
2020-05-20 11:57:57 +02:00
|
|
|
# In cases where we have used oneOf, the schemas only differ in examples
|
|
|
|
# So we can choose any.
|
2021-02-12 08:20:45 +01:00
|
|
|
if "oneOf" in schema:
|
|
|
|
schema = schema["oneOf"][0]
|
|
|
|
return schema["properties"]
|
2020-08-12 04:54:48 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
def find_openapi_endpoint(path: str) -> Optional[str]:
|
|
|
|
for path_regex, endpoint in openapi_spec.endpoints_dict().items():
|
|
|
|
matches = re.match(path_regex, path)
|
2020-06-13 17:59:46 +02:00
|
|
|
if matches:
|
2020-08-12 04:54:48 +02:00
|
|
|
return endpoint
|
2020-06-13 17:59:46 +02:00
|
|
|
return None
|
2020-06-02 18:04:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-27 16:22:31 +02:00
|
|
|
def get_event_type(event: Dict[str, Any]) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return event["type"] + ":" + event.get("op", "")
|
2020-07-27 16:22:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-27 16:22:31 +02:00
|
|
|
def fix_events(content: Dict[str, Any]) -> None:
|
|
|
|
"""Remove undocumented events from events array. This is a makeshift
|
|
|
|
function so that further documentation of `/events` can happen with
|
|
|
|
only zulip.yaml changes and minimal other changes. It should be removed
|
|
|
|
as soon as `/events` documentation is complete.
|
|
|
|
"""
|
2020-08-11 01:47:44 +02:00
|
|
|
# 'user' is deprecated so remove its occurrences from the events array
|
2021-02-12 08:20:45 +01:00
|
|
|
for event in content["events"]:
|
|
|
|
event.pop("user", None)
|
2020-07-27 16:22:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-01-12 02:14:00 +01:00
|
|
|
def prune_type_schema_by_type(schema: Dict[str, Any], type: str) -> bool:
|
|
|
|
return ("enum" in schema and type not in schema["enum"]) or (
|
|
|
|
"allOf" in schema
|
|
|
|
and any(prune_type_schema_by_type(subschema, type) for subschema in schema["allOf"])
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
def prune_schema_by_type(schema: Dict[str, Any], type: str) -> bool:
|
|
|
|
return (
|
|
|
|
"properties" in schema
|
|
|
|
and "type" in schema["properties"]
|
|
|
|
and prune_type_schema_by_type(schema["properties"]["type"], type)
|
|
|
|
) or (
|
|
|
|
"allOf" in schema
|
|
|
|
and any(prune_schema_by_type(subschema, type) for subschema in schema["allOf"])
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def validate_against_openapi_schema(
|
|
|
|
content: Dict[str, Any],
|
|
|
|
path: str,
|
|
|
|
method: str,
|
|
|
|
status_code: str,
|
|
|
|
display_brief_error: bool = False,
|
|
|
|
) -> bool:
|
2018-05-31 19:41:17 +02:00
|
|
|
"""Compare a "content" dict with the defined schema for a specific method
|
2020-06-13 17:59:46 +02:00
|
|
|
in an endpoint. Return true if validated and false if skipped.
|
2018-05-31 19:41:17 +02:00
|
|
|
"""
|
2020-07-01 19:07:31 +02:00
|
|
|
|
|
|
|
# This first set of checks are primarily training wheels that we
|
|
|
|
# hope to eliminate over time as we improve our API documentation.
|
|
|
|
|
2020-06-13 17:59:46 +02:00
|
|
|
# No 500 responses have been documented, so skip them
|
2021-02-12 08:20:45 +01:00
|
|
|
if status_code.startswith("5"):
|
2020-06-13 17:59:46 +02:00
|
|
|
return False
|
2021-02-12 08:20:45 +01:00
|
|
|
if path not in openapi_spec.openapi()["paths"].keys():
|
2020-08-12 04:54:48 +02:00
|
|
|
endpoint = find_openapi_endpoint(path)
|
2020-06-13 17:59:46 +02:00
|
|
|
# If it doesn't match it hasn't been documented yet.
|
2020-08-12 04:54:48 +02:00
|
|
|
if endpoint is None:
|
2020-06-13 17:59:46 +02:00
|
|
|
return False
|
2020-08-12 04:54:48 +02:00
|
|
|
else:
|
|
|
|
endpoint = path
|
2020-06-13 17:59:46 +02:00
|
|
|
# Excluded endpoint/methods
|
2020-08-12 04:54:48 +02:00
|
|
|
if (endpoint, method) in EXCLUDE_UNDOCUMENTED_ENDPOINTS:
|
2020-06-13 17:59:46 +02:00
|
|
|
return False
|
2020-07-01 19:07:31 +02:00
|
|
|
# Return true for endpoints with only response documentation remaining
|
2020-08-12 04:54:48 +02:00
|
|
|
if (endpoint, method) in EXCLUDE_DOCUMENTED_ENDPOINTS:
|
2020-07-01 19:07:31 +02:00
|
|
|
return True
|
2020-06-10 21:18:27 +02:00
|
|
|
# Check if the response matches its code
|
2022-10-02 21:32:36 +02:00
|
|
|
if status_code.startswith("2") and (
|
|
|
|
content.get("result", "success").lower() not in ["success", "partially_completed"]
|
|
|
|
):
|
2020-06-10 21:18:27 +02:00
|
|
|
raise SchemaError("Response is not 200 but is validating against 200 schema")
|
2020-07-01 19:07:31 +02:00
|
|
|
# Code is not declared but appears in various 400 responses. If
|
|
|
|
# common, it can be added to 400 response schema
|
2021-02-12 08:20:45 +01:00
|
|
|
if status_code.startswith("4"):
|
2020-07-01 19:07:31 +02:00
|
|
|
# This return statement should ideally be not here. But since
|
|
|
|
# we have not defined 400 responses for various paths this has
|
|
|
|
# been added as all 400 have the same schema. When all 400
|
|
|
|
# response have been defined this should be removed.
|
2020-06-13 17:59:46 +02:00
|
|
|
return True
|
2022-01-12 03:08:52 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
if endpoint == "/events" and method == "get":
|
2020-07-27 16:22:31 +02:00
|
|
|
# This a temporary function for checking only documented events
|
|
|
|
# as all events haven't been documented yet.
|
|
|
|
# TODO: Remove this after all events have been documented.
|
|
|
|
fix_events(content)
|
2020-10-22 23:45:38 +02:00
|
|
|
|
2022-01-12 03:08:52 +01:00
|
|
|
mock_request = MockRequest("http://localhost:9991/", method, "/api/v1" + path)
|
|
|
|
mock_response = MockResponse(
|
|
|
|
# TODO: Use original response content instead of re-serializing it.
|
2022-10-06 09:57:41 +02:00
|
|
|
orjson.dumps(content).decode(),
|
|
|
|
status_code=int(status_code),
|
2022-01-12 03:08:52 +01:00
|
|
|
)
|
2022-10-06 09:57:41 +02:00
|
|
|
result = openapi_response_validator.validate(openapi_spec.spec(), mock_request, mock_response)
|
2020-10-22 23:45:38 +02:00
|
|
|
try:
|
2022-01-12 03:08:52 +01:00
|
|
|
result.raise_for_errors()
|
|
|
|
except InvalidSchemaValue as isv:
|
2022-10-06 09:57:41 +02:00
|
|
|
schema_errors = list(isv.schema_errors)
|
|
|
|
message = f"{len(schema_errors)} response validation error(s) at {method} /api/v1{path} ({status_code}):"
|
|
|
|
for error in schema_errors:
|
|
|
|
if display_brief_error and isinstance(error, JsonSchemaValidationError):
|
2022-01-12 03:08:52 +01:00
|
|
|
# display_brief_error is designed to avoid printing 1000 lines
|
|
|
|
# of output when the schema to validate is extremely large
|
|
|
|
# (E.g. the several dozen format variants for individual
|
|
|
|
# events returned by GET /events) and instead just display the
|
|
|
|
# specific variant we expect to match the response.
|
|
|
|
brief_error_validator_value = [
|
|
|
|
validator_value
|
|
|
|
for validator_value in error.validator_value
|
|
|
|
if not prune_schema_by_type(validator_value, error.instance["type"])
|
|
|
|
]
|
|
|
|
brief_error_display_schema = error.schema.copy()
|
|
|
|
if "oneOf" in brief_error_display_schema:
|
|
|
|
brief_error_display_schema["oneOf"] = [
|
|
|
|
i_schema
|
|
|
|
for i_schema in error.schema["oneOf"]
|
|
|
|
if not prune_schema_by_type(i_schema, error.instance["type"])
|
|
|
|
]
|
|
|
|
|
|
|
|
# Field list from https://python-jsonschema.readthedocs.io/en/stable/errors/
|
|
|
|
error = JsonSchemaValidationError(
|
|
|
|
message=error.message,
|
|
|
|
validator=error.validator,
|
|
|
|
path=error.path,
|
|
|
|
instance=error.instance,
|
|
|
|
schema_path=error.schema_path,
|
|
|
|
schema=brief_error_display_schema,
|
|
|
|
validator_value=brief_error_validator_value,
|
|
|
|
cause=error.cause,
|
|
|
|
)
|
2022-04-21 17:55:05 +02:00
|
|
|
# Some endpoints have long, descriptive OpenAPI schemas
|
|
|
|
# which, when printed to the console, do not assist with
|
|
|
|
# debugging, so we omit some of the error information.
|
|
|
|
if path in ["/register"] and isinstance(error, JsonSchemaValidationError):
|
|
|
|
error.schema = "OpenAPI schema omitted due to length of output."
|
|
|
|
if len(error.instance) > 100:
|
|
|
|
error.instance = "Error instance omitted due to length of output."
|
2022-01-12 03:08:52 +01:00
|
|
|
message += f"\n\n{type(error).__name__}: {error}"
|
2022-04-21 17:55:05 +02:00
|
|
|
message += (
|
|
|
|
"\n\nFor help debugging these errors see: "
|
|
|
|
"https://zulip.readthedocs.io/en/latest/documentation/api.html#debugging-schema-validation-errors"
|
|
|
|
)
|
2022-01-12 03:08:52 +01:00
|
|
|
raise SchemaError(message) from None
|
2020-10-22 23:45:38 +02:00
|
|
|
|
2020-06-13 17:59:46 +02:00
|
|
|
return True
|
2020-06-02 18:04:03 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-01 19:07:31 +02:00
|
|
|
def validate_schema(schema: Dict[str, Any]) -> None:
|
|
|
|
"""Check if opaque objects are present in the OpenAPI spec; this is an
|
|
|
|
important part of our policy for ensuring every detail of Zulip's
|
|
|
|
API responses is correct.
|
2020-06-02 18:04:03 +02:00
|
|
|
|
2020-07-01 19:07:31 +02:00
|
|
|
This is done by checking for the presence of the
|
|
|
|
`additionalProperties` attribute for all objects (dictionaries).
|
|
|
|
"""
|
2021-02-12 08:20:45 +01:00
|
|
|
if "oneOf" in schema:
|
|
|
|
for subschema in schema["oneOf"]:
|
2020-08-12 04:58:01 +02:00
|
|
|
validate_schema(subschema)
|
2021-02-12 08:20:45 +01:00
|
|
|
elif schema["type"] == "array":
|
|
|
|
validate_schema(schema["items"])
|
|
|
|
elif schema["type"] == "object":
|
|
|
|
if "additionalProperties" not in schema:
|
2021-02-12 08:19:30 +01:00
|
|
|
raise SchemaError(
|
2023-01-03 02:16:53 +01:00
|
|
|
"additionalProperties needs to be defined for objects to make sure they have no"
|
|
|
|
" additional properties left to be documented."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
for property_schema in schema.get("properties", {}).values():
|
2020-08-12 04:58:01 +02:00
|
|
|
validate_schema(property_schema)
|
2021-02-12 08:20:45 +01:00
|
|
|
if schema["additionalProperties"]:
|
|
|
|
validate_schema(schema["additionalProperties"])
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-26 16:18:27 +02:00
|
|
|
def likely_deprecated_parameter(parameter_description: str) -> bool:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "**Changes**: Deprecated" in parameter_description:
|
2020-06-26 16:18:27 +02:00
|
|
|
return True
|
|
|
|
|
|
|
|
return "**Deprecated**" in parameter_description
|
2020-07-09 20:51:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-07-03 07:37:59 +02:00
|
|
|
def check_deprecated_consistency(argument: Mapping[str, Any], description: str) -> None:
|
2021-02-16 13:01:36 +01:00
|
|
|
# Test to make sure deprecated parameters are marked so.
|
|
|
|
if likely_deprecated_parameter(description):
|
|
|
|
assert argument["deprecated"]
|
|
|
|
if "deprecated" in argument:
|
|
|
|
assert likely_deprecated_parameter(description)
|
|
|
|
|
|
|
|
|
2020-07-09 20:51:31 +02:00
|
|
|
# Skip those JSON endpoints whose query parameters are different from
|
|
|
|
# their `/api/v1` counterpart. This is a legacy code issue that we
|
|
|
|
# plan to fix by changing the implementation.
|
2020-08-12 04:54:48 +02:00
|
|
|
SKIP_JSON = {
|
2021-02-12 08:20:45 +01:00
|
|
|
("/fetch_api_key", "post"),
|
2020-08-12 04:54:48 +02:00
|
|
|
}
|
2020-07-09 20:51:31 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def validate_request(
|
|
|
|
url: str,
|
|
|
|
method: str,
|
2022-10-06 11:56:48 +02:00
|
|
|
data: Union[str, bytes, Mapping[str, Any]],
|
2021-12-17 08:14:22 +01:00
|
|
|
http_headers: Dict[str, str],
|
2021-02-12 08:19:30 +01:00
|
|
|
json_url: bool,
|
|
|
|
status_code: str,
|
|
|
|
intentionally_undocumented: bool = False,
|
|
|
|
) -> None:
|
2020-07-09 20:51:31 +02:00
|
|
|
# Some JSON endpoints have different parameters compared to
|
|
|
|
# their `/api/v1` counterparts.
|
2020-08-12 04:54:48 +02:00
|
|
|
if json_url and (url, method) in SKIP_JSON:
|
2020-07-09 20:51:31 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# TODO: Add support for file upload endpoints that lack the /json/
|
|
|
|
# or /api/v1/ prefix.
|
2021-02-12 08:20:45 +01:00
|
|
|
if url == "/user_uploads" or url.startswith("/realm/emoji/"):
|
2020-07-09 20:51:31 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# Now using the openapi_core APIs, validate the request schema
|
|
|
|
# against the OpenAPI documentation.
|
2022-10-06 09:57:41 +02:00
|
|
|
assert isinstance(data, dict)
|
2021-02-12 08:19:30 +01:00
|
|
|
mock_request = MockRequest(
|
2021-02-12 08:20:45 +01:00
|
|
|
"http://localhost:9991/", method, "/api/v1" + url, headers=http_headers, args=data
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2022-10-06 09:57:41 +02:00
|
|
|
result = openapi_request_validator.validate(openapi_spec.spec(), mock_request)
|
|
|
|
errors = list(result.errors)
|
2020-07-09 20:51:31 +02:00
|
|
|
|
|
|
|
# If no errors are raised, then validation is successful
|
2022-10-06 09:57:41 +02:00
|
|
|
if not errors:
|
|
|
|
return
|
|
|
|
|
|
|
|
# Requests that do not validate against the OpenAPI spec must either:
|
|
|
|
# * Have returned a 400 (bad request) error
|
|
|
|
# * Have returned a 200 (success) with this request marked as intentionally
|
|
|
|
# undocumented behavior.
|
|
|
|
if status_code.startswith("4"):
|
|
|
|
return
|
|
|
|
if status_code.startswith("2") and intentionally_undocumented:
|
2020-07-09 20:51:31 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
# Show a block error message explaining the options for fixing it.
|
|
|
|
msg = f"""
|
|
|
|
|
|
|
|
Error! The OpenAPI schema for {method} {url} is not consistent
|
|
|
|
with the parameters passed in this HTTP request. Consider:
|
|
|
|
|
|
|
|
* Updating the OpenAPI schema defined in zerver/openapi/zulip.yaml
|
|
|
|
* Adjusting the test to pass valid parameters. If the test
|
2020-07-25 17:24:21 +02:00
|
|
|
fails due to intentionally_undocumented features, you need to pass
|
|
|
|
`intentionally_undocumented=True` to self.client_{method.lower()} or
|
2020-07-09 20:51:31 +02:00
|
|
|
self.api_{method.lower()} to document your intent.
|
|
|
|
|
|
|
|
See https://zulip.readthedocs.io/en/latest/documentation/api.html for help.
|
|
|
|
|
|
|
|
The errors logged by the OpenAPI validator are below:\n"""
|
2022-10-06 09:57:41 +02:00
|
|
|
for error in errors:
|
2020-07-09 20:51:31 +02:00
|
|
|
msg += f"* {str(error)}\n"
|
|
|
|
raise SchemaError(msg)
|