2020-06-11 00:54:34 +02:00
|
|
|
import inspect
|
2020-06-20 19:25:32 +02:00
|
|
|
import os
|
2019-07-04 18:12:53 +02:00
|
|
|
import re
|
2019-07-10 13:23:25 +02:00
|
|
|
import sys
|
2020-06-13 06:26:41 +02:00
|
|
|
from collections import abc
|
2020-06-11 00:54:34 +02:00
|
|
|
from typing import (
|
|
|
|
Any,
|
|
|
|
Callable,
|
|
|
|
Dict,
|
|
|
|
Iterable,
|
|
|
|
List,
|
|
|
|
Mapping,
|
|
|
|
Optional,
|
|
|
|
Sequence,
|
|
|
|
Set,
|
|
|
|
Tuple,
|
|
|
|
Union,
|
|
|
|
)
|
|
|
|
from unittest.mock import MagicMock, patch
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2020-06-20 19:25:32 +02:00
|
|
|
import yaml
|
2019-07-10 13:23:25 +02:00
|
|
|
from django.http import HttpResponse
|
2020-07-01 19:07:31 +02:00
|
|
|
from jsonschema.exceptions import ValidationError
|
2019-06-06 22:22:21 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.request import _REQ, arguments_map
|
2020-09-22 03:55:32 +02:00
|
|
|
from zerver.lib.rest import rest_dispatch
|
2018-05-31 19:41:17 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.openapi.markdown_extension import (
|
|
|
|
generate_curl_example,
|
|
|
|
parse_language_and_options,
|
|
|
|
render_curl_example,
|
|
|
|
)
|
2020-02-23 18:10:42 +01:00
|
|
|
from zerver.openapi.openapi import (
|
2020-06-11 00:54:34 +02:00
|
|
|
OPENAPI_SPEC_PATH,
|
|
|
|
OpenAPISpec,
|
|
|
|
SchemaError,
|
2020-08-12 04:54:48 +02:00
|
|
|
find_openapi_endpoint,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_openapi_fixture,
|
|
|
|
get_openapi_parameters,
|
|
|
|
get_openapi_paths,
|
|
|
|
openapi_spec,
|
|
|
|
to_python_type,
|
|
|
|
validate_against_openapi_schema,
|
2020-07-09 20:51:31 +02:00
|
|
|
validate_request,
|
2020-07-01 19:07:31 +02:00
|
|
|
validate_schema,
|
2018-05-31 19:41:17 +02:00
|
|
|
)
|
2020-09-22 03:55:32 +02:00
|
|
|
from zerver.tornado.views import get_events, get_events_backend
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
TEST_ENDPOINT = "/messages/{message_id}"
|
|
|
|
TEST_METHOD = "patch"
|
|
|
|
TEST_RESPONSE_BAD_REQ = "400"
|
|
|
|
TEST_RESPONSE_SUCCESS = "200"
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2019-07-10 13:23:25 +02:00
|
|
|
VARMAP = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"integer": int,
|
|
|
|
"string": str,
|
|
|
|
"boolean": bool,
|
|
|
|
"object": dict,
|
|
|
|
"NoneType": type(None),
|
2019-07-10 13:23:25 +02:00
|
|
|
}
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-06-13 06:26:41 +02:00
|
|
|
def schema_type(schema: Dict[str, Any]) -> Union[type, Tuple[type, object]]:
|
2021-02-12 08:20:45 +01:00
|
|
|
if "oneOf" in schema:
|
2020-06-13 06:26:41 +02:00
|
|
|
# Hack: Just use the type of the first value
|
|
|
|
# Ideally, we'd turn this into a Union type.
|
2021-02-12 08:20:45 +01:00
|
|
|
return schema_type(schema["oneOf"][0])
|
2020-06-13 06:26:41 +02:00
|
|
|
elif schema["type"] == "array":
|
|
|
|
return (list, schema_type(schema["items"]))
|
|
|
|
else:
|
|
|
|
return VARMAP[schema["type"]]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2018-05-31 19:41:17 +02:00
|
|
|
class OpenAPIToolsTest(ZulipTestCase):
|
|
|
|
"""Make sure that the tools we use to handle our OpenAPI specification
|
2020-02-23 18:10:42 +01:00
|
|
|
(located in zerver/openapi/openapi.py) work as expected.
|
2018-05-31 19:41:17 +02:00
|
|
|
|
|
|
|
These tools are mostly dedicated to fetching parts of the -already parsed-
|
|
|
|
specification, and comparing them to objects returned by our REST API.
|
|
|
|
"""
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2018-05-31 19:41:17 +02:00
|
|
|
def test_get_openapi_fixture(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
actual = get_openapi_fixture(TEST_ENDPOINT, TEST_METHOD, TEST_RESPONSE_BAD_REQ)
|
2018-05-31 19:41:17 +02:00
|
|
|
expected = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"code": "BAD_REQUEST",
|
|
|
|
"msg": "You don't have permission to edit this message",
|
|
|
|
"result": "error",
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
|
|
|
self.assertEqual(actual, expected)
|
|
|
|
|
|
|
|
def test_get_openapi_parameters(self) -> None:
|
|
|
|
actual = get_openapi_parameters(TEST_ENDPOINT, TEST_METHOD)
|
|
|
|
expected_item = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"name": "message_id",
|
|
|
|
"in": "path",
|
|
|
|
"description": "The target message's ID.\n",
|
|
|
|
"example": 42,
|
|
|
|
"required": True,
|
|
|
|
"schema": {"type": "integer"},
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
assert expected_item in actual
|
2018-05-31 19:41:17 +02:00
|
|
|
|
|
|
|
def test_validate_against_openapi_schema(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaises(
|
|
|
|
ValidationError, msg="Additional properties are not allowed ('foo' was unexpected)"
|
|
|
|
):
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
bad_content: Dict[str, object] = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"msg": "",
|
|
|
|
"result": "success",
|
|
|
|
"foo": "bar",
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_against_openapi_schema(
|
|
|
|
bad_content, TEST_ENDPOINT, TEST_METHOD, TEST_RESPONSE_SUCCESS
|
|
|
|
)
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaises(ValidationError, msg=("42 is not of type string")):
|
2018-05-31 19:41:17 +02:00
|
|
|
bad_content = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"msg": 42,
|
|
|
|
"result": "success",
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_against_openapi_schema(
|
|
|
|
bad_content, TEST_ENDPOINT, TEST_METHOD, TEST_RESPONSE_SUCCESS
|
|
|
|
)
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
with self.assertRaises(ValidationError, msg='Expected to find the "msg" required key'):
|
2018-05-31 19:41:17 +02:00
|
|
|
bad_content = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"result": "success",
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_against_openapi_schema(
|
|
|
|
bad_content, TEST_ENDPOINT, TEST_METHOD, TEST_RESPONSE_SUCCESS
|
|
|
|
)
|
2018-05-31 19:41:17 +02:00
|
|
|
|
|
|
|
# No exceptions should be raised here.
|
|
|
|
good_content = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"msg": "",
|
|
|
|
"result": "success",
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_against_openapi_schema(
|
|
|
|
good_content, TEST_ENDPOINT, TEST_METHOD, TEST_RESPONSE_SUCCESS
|
|
|
|
)
|
2018-05-31 19:41:17 +02:00
|
|
|
|
2018-06-20 19:31:24 +02:00
|
|
|
# Overwrite the exception list with a mocked one
|
2020-06-20 19:25:32 +02:00
|
|
|
test_dict: Dict[str, Any] = {}
|
|
|
|
|
|
|
|
# Check that validate_against_openapi_schema correctly
|
|
|
|
# descends into 'deep' objects and arrays. Test 1 should
|
|
|
|
# pass, Test 2 has a 'deep' extraneous key and Test 3 has a
|
2020-08-11 01:47:44 +02:00
|
|
|
# 'deep' opaque object. Also the parameters are a heterogeneous
|
2020-06-20 19:25:32 +02:00
|
|
|
# mix of arrays and objects to verify that our descent logic
|
|
|
|
# correctly gets to the the deeply nested objects.
|
2021-02-12 08:19:30 +01:00
|
|
|
with open(os.path.join(os.path.dirname(OPENAPI_SPEC_PATH), "testing.yaml")) as test_file:
|
2020-06-20 19:25:32 +02:00
|
|
|
test_dict = yaml.safe_load(test_file)
|
2021-02-12 08:20:45 +01:00
|
|
|
openapi_spec.openapi()["paths"]["testing"] = test_dict
|
2020-06-20 19:25:32 +02:00
|
|
|
try:
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_against_openapi_schema(
|
2021-02-12 08:20:45 +01:00
|
|
|
(test_dict["test1"]["responses"]["200"]["content"]["application/json"]["example"]),
|
|
|
|
"testing",
|
|
|
|
"test1",
|
|
|
|
"200",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
with self.assertRaises(
|
|
|
|
ValidationError, msg='Extraneous key "str4" in response\'s content'
|
|
|
|
):
|
|
|
|
validate_against_openapi_schema(
|
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
test_dict["test2"]["responses"]["200"]["content"]["application/json"][
|
|
|
|
"example"
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
"testing",
|
|
|
|
"test2",
|
|
|
|
"200",
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
with self.assertRaises(SchemaError, msg='Opaque object "obj"'):
|
2020-07-01 19:07:31 +02:00
|
|
|
# Checks for opaque objects
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_schema(
|
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
test_dict["test3"]["responses"]["200"]["content"]["application/json"][
|
|
|
|
"schema"
|
2021-02-12 08:19:30 +01:00
|
|
|
]
|
|
|
|
)
|
|
|
|
)
|
2020-06-20 19:25:32 +02:00
|
|
|
finally:
|
2021-02-12 08:20:45 +01:00
|
|
|
openapi_spec.openapi()["paths"].pop("testing", None)
|
2018-06-20 19:31:24 +02:00
|
|
|
|
2018-05-31 19:41:17 +02:00
|
|
|
def test_to_python_type(self) -> None:
|
|
|
|
TYPES = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"string": str,
|
|
|
|
"number": float,
|
|
|
|
"integer": int,
|
|
|
|
"boolean": bool,
|
|
|
|
"array": list,
|
|
|
|
"object": dict,
|
2018-05-31 19:41:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for oa_type, py_type in TYPES.items():
|
|
|
|
self.assertEqual(to_python_type(oa_type), py_type)
|
2018-08-07 23:40:07 +02:00
|
|
|
|
|
|
|
def test_live_reload(self) -> None:
|
|
|
|
# Force the reload by making the last update date < the file's last
|
|
|
|
# modified date
|
2020-08-12 04:54:48 +02:00
|
|
|
openapi_spec.mtime = 0
|
2018-08-07 23:40:07 +02:00
|
|
|
get_openapi_fixture(TEST_ENDPOINT, TEST_METHOD)
|
|
|
|
|
|
|
|
# Check that the file has been reloaded by verifying that the last
|
|
|
|
# update date isn't zero anymore
|
2020-08-12 04:54:48 +02:00
|
|
|
self.assertNotEqual(openapi_spec.mtime, 0)
|
2018-08-08 01:35:41 +02:00
|
|
|
|
|
|
|
# Now verify calling it again doesn't call reload
|
2020-08-12 04:54:48 +02:00
|
|
|
old_openapi = openapi_spec.openapi()
|
2020-08-12 03:29:51 +02:00
|
|
|
get_openapi_fixture(TEST_ENDPOINT, TEST_METHOD)
|
2020-08-12 04:54:48 +02:00
|
|
|
new_openapi = openapi_spec.openapi()
|
|
|
|
self.assertIs(old_openapi, new_openapi)
|
2019-06-06 22:22:21 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-06-06 22:22:21 +02:00
|
|
|
class OpenAPIArgumentsTest(ZulipTestCase):
|
2019-07-07 08:54:19 +02:00
|
|
|
# This will be filled during test_openapi_arguments:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
checked_endpoints: Set[str] = set()
|
2020-04-09 21:51:58 +02:00
|
|
|
pending_endpoints = {
|
2020-01-31 22:51:57 +01:00
|
|
|
#### TODO: These endpoints are a priority to document:
|
2021-02-12 08:20:45 +01:00
|
|
|
"/realm/presence",
|
|
|
|
"/streams/{stream_id}/members",
|
|
|
|
"/streams/{stream_id}/delete_topic",
|
|
|
|
"/users/me/presence",
|
|
|
|
"/users/me/alert_words",
|
|
|
|
"/users/me/status",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### These realm administration settings are valuable to document:
|
|
|
|
# Delete a file uploaded by current user.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/attachments/{attachment_id}",
|
2020-01-31 22:51:57 +01:00
|
|
|
# List data exports for organization (GET) or request one (POST)
|
2021-02-12 08:20:45 +01:00
|
|
|
"/export/realm",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Delete a data export.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/export/realm/{export_id}",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Manage default streams and default stream groups
|
2021-02-12 08:20:45 +01:00
|
|
|
"/default_streams",
|
|
|
|
"/default_stream_groups/create",
|
|
|
|
"/default_stream_groups/{group_id}",
|
|
|
|
"/default_stream_groups/{group_id}/streams",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Administer invitations
|
2021-02-12 08:20:45 +01:00
|
|
|
"/invites",
|
|
|
|
"/invites/multiuse",
|
|
|
|
"/invites/{prereg_id}",
|
|
|
|
"/invites/{prereg_id}/resend",
|
|
|
|
"/invites/multiuse/{invite_id}",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Single-stream settings alternative to the bulk endpoint
|
|
|
|
# users/me/subscriptions/properties; probably should just be a
|
|
|
|
# section of the same page.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/users/me/subscriptions/{stream_id}",
|
2020-03-18 02:40:44 +01:00
|
|
|
# Real-time-events endpoint
|
2021-02-12 08:20:45 +01:00
|
|
|
"/real-time",
|
2020-04-02 02:15:28 +02:00
|
|
|
# Rest error handling endpoint
|
2021-02-12 08:20:45 +01:00
|
|
|
"/rest-error-handling",
|
2020-04-02 03:42:59 +02:00
|
|
|
# Zulip outgoing webhook payload
|
2021-02-12 08:20:45 +01:00
|
|
|
"/zulip-outgoing-webhook",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### Mobile-app only endpoints; important for mobile developers.
|
|
|
|
# Mobile interface for fetching API keys
|
2021-02-12 08:20:45 +01:00
|
|
|
"/fetch_api_key",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Already documented; need to fix tracking bug
|
2021-02-12 08:20:45 +01:00
|
|
|
"/dev_fetch_api_key",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Mobile interface for development environment login
|
2021-02-12 08:20:45 +01:00
|
|
|
"/dev_list_users",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Registration for iOS/Android mobile push notifications.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/users/me/android_gcm_reg_id",
|
|
|
|
"/users/me/apns_device_token",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### These personal settings endpoints have modest value to document:
|
2021-02-12 08:20:45 +01:00
|
|
|
"/settings",
|
|
|
|
"/users/me/avatar",
|
|
|
|
"/users/me/api_key/regenerate",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Much more valuable would be an org admin bulk-upload feature.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/users/me/profile_data",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### Should be documented as part of interactive bots documentation
|
2021-02-12 08:20:45 +01:00
|
|
|
"/bot_storage",
|
|
|
|
"/submessage",
|
|
|
|
"/zcommand",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### These "organization settings" endpoint have modest value to document:
|
2021-02-12 08:20:45 +01:00
|
|
|
"/realm",
|
|
|
|
"/realm/domains",
|
|
|
|
"/realm/domains/{domain}",
|
|
|
|
"/bots",
|
|
|
|
"/bots/{bot_id}",
|
|
|
|
"/bots/{bot_id}/api_key/regenerate",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### These "organization settings" endpoints have low value to document:
|
2021-02-12 08:20:45 +01:00
|
|
|
"/realm/profile_fields/{field_id}",
|
|
|
|
"/realm/icon",
|
|
|
|
"/realm/logo",
|
|
|
|
"/realm/deactivate",
|
|
|
|
"/realm/subdomain/{subdomain}",
|
2020-01-31 22:51:57 +01:00
|
|
|
#### Other low value endpoints
|
|
|
|
# Used for dead desktop app to test connectivity. To delete.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/generate_204",
|
2020-01-31 22:51:57 +01:00
|
|
|
# Used for failed approach with dead Android app.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/fetch_google_client_id",
|
2020-01-31 22:51:57 +01:00
|
|
|
# API for video calls we're planning to remove/replace.
|
2021-02-12 08:20:45 +01:00
|
|
|
"/calls/zoom/create",
|
2020-04-09 21:51:58 +02:00
|
|
|
}
|
2019-10-22 01:43:54 +02:00
|
|
|
|
|
|
|
# Endpoints where the documentation is currently failing our
|
|
|
|
# consistency tests. We aim to keep this list empty.
|
2021-02-12 08:19:30 +01:00
|
|
|
buggy_documentation_endpoints: Set[str] = set([])
|
2019-07-07 08:54:19 +02:00
|
|
|
|
2019-07-09 08:28:29 +02:00
|
|
|
def convert_regex_to_url_pattern(self, regex_pattern: str) -> str:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Convert regular expressions style URL patterns to their
|
|
|
|
corresponding OpenAPI style formats. All patterns are
|
|
|
|
expected to start with ^ and end with $.
|
|
|
|
Examples:
|
|
|
|
1. /messages/{message_id} <-> r'^messages/(?P<message_id>[0-9]+)$'
|
|
|
|
2. /events <-> r'^events$'
|
|
|
|
3. '/realm/domains' <-> r'/realm\\/domains$'
|
2019-07-09 08:28:29 +02:00
|
|
|
"""
|
2019-08-20 00:15:11 +02:00
|
|
|
|
2020-05-07 13:56:49 +02:00
|
|
|
# Handle the presence-email code which has a non-slashes syntax.
|
2021-02-12 08:20:45 +01:00
|
|
|
regex_pattern = regex_pattern.replace("[^/]*", ".*").replace("[^/]+", ".*")
|
2019-08-20 00:15:11 +02:00
|
|
|
|
2019-07-09 08:28:29 +02:00
|
|
|
self.assertTrue(regex_pattern.startswith("^"))
|
|
|
|
self.assertTrue(regex_pattern.endswith("$"))
|
2021-02-12 08:20:45 +01:00
|
|
|
url_pattern = "/" + regex_pattern[1:][:-1]
|
2019-07-09 08:28:29 +02:00
|
|
|
url_pattern = re.sub(r"\(\?P<(\w+)>[^/]+\)", r"{\1}", url_pattern)
|
2021-02-12 08:20:45 +01:00
|
|
|
url_pattern = url_pattern.replace("\\", "")
|
2019-07-09 08:28:29 +02:00
|
|
|
return url_pattern
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def ensure_no_documentation_if_intentionally_undocumented(
|
|
|
|
self, url_pattern: str, method: str, msg: Optional[str] = None
|
|
|
|
) -> None:
|
2019-07-09 08:28:29 +02:00
|
|
|
try:
|
|
|
|
get_openapi_parameters(url_pattern, method)
|
2019-07-19 07:02:10 +02:00
|
|
|
if not msg: # nocoverage
|
2020-06-13 08:57:35 +02:00
|
|
|
msg = f"""
|
2019-07-20 20:16:47 +02:00
|
|
|
We found some OpenAPI documentation for {method} {url_pattern},
|
2020-10-23 02:43:28 +02:00
|
|
|
so maybe we shouldn't mark it as intentionally undocumented in the URLs.
|
2020-06-13 08:57:35 +02:00
|
|
|
"""
|
2019-07-20 20:16:47 +02:00
|
|
|
raise AssertionError(msg) # nocoverage
|
2019-07-09 08:28:29 +02:00
|
|
|
except KeyError:
|
|
|
|
return
|
|
|
|
|
|
|
|
def check_for_non_existant_openapi_endpoints(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Here, we check to see if every endpoint documented in the OpenAPI
|
2019-07-09 08:28:29 +02:00
|
|
|
documentation actually exists in urls.py and thus in actual code.
|
|
|
|
Note: We define this as a helper called at the end of
|
|
|
|
test_openapi_arguments instead of as a separate test to ensure that
|
|
|
|
this test is only executed after test_openapi_arguments so that it's
|
2021-02-12 08:19:30 +01:00
|
|
|
results can be used here in the set operations."""
|
2019-07-09 08:28:29 +02:00
|
|
|
openapi_paths = set(get_openapi_paths())
|
|
|
|
undocumented_paths = openapi_paths - self.checked_endpoints
|
|
|
|
undocumented_paths -= self.buggy_documentation_endpoints
|
|
|
|
undocumented_paths -= self.pending_endpoints
|
|
|
|
try:
|
|
|
|
self.assertEqual(len(undocumented_paths), 0)
|
|
|
|
except AssertionError: # nocoverage
|
|
|
|
msg = "The following endpoints have been documented but can't be found in urls.py:"
|
|
|
|
for undocumented_path in undocumented_paths:
|
2020-06-09 00:25:09 +02:00
|
|
|
msg += f"\n + {undocumented_path}"
|
2019-07-09 08:28:29 +02:00
|
|
|
raise AssertionError(msg)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_type_by_priority(
|
|
|
|
self, types: Sequence[Union[type, Tuple[type, object]]]
|
|
|
|
) -> Union[type, Tuple[type, object]]:
|
2019-08-04 15:55:32 +02:00
|
|
|
priority = {list: 1, dict: 2, str: 3, int: 4, bool: 5}
|
|
|
|
tyiroirp = {1: list, 2: dict, 3: str, 4: int, 5: bool}
|
|
|
|
val = 6
|
2019-07-10 13:23:25 +02:00
|
|
|
for t in types:
|
2019-08-10 00:30:34 +02:00
|
|
|
if isinstance(t, tuple):
|
2019-08-04 15:55:32 +02:00
|
|
|
return t # e.g. (list, dict) or (list ,str)
|
2019-08-10 00:30:34 +02:00
|
|
|
v = priority.get(t, 6)
|
2019-07-10 13:23:25 +02:00
|
|
|
if v < val:
|
|
|
|
val = v
|
|
|
|
return tyiroirp.get(val, types[0])
|
|
|
|
|
2019-08-10 00:30:34 +02:00
|
|
|
def get_standardized_argument_type(self, t: Any) -> Union[type, Tuple[type, object]]:
|
2021-02-12 08:19:30 +01:00
|
|
|
"""Given a type from the typing module such as List[str] or Union[str, int],
|
2019-07-10 13:23:25 +02:00
|
|
|
convert it into a corresponding Python type. Unions are mapped to a canonical
|
|
|
|
choice among the options.
|
|
|
|
E.g. typing.Union[typing.List[typing.Dict[str, typing.Any]], NoneType]
|
|
|
|
needs to be mapped to list."""
|
|
|
|
|
2020-09-02 05:05:20 +02:00
|
|
|
origin = getattr(t, "__origin__", None)
|
|
|
|
if sys.version_info < (3, 7): # nocoverage
|
|
|
|
if origin == List:
|
|
|
|
origin = list
|
|
|
|
elif origin == Dict:
|
|
|
|
origin = dict
|
|
|
|
elif origin == Iterable:
|
|
|
|
origin = abc.Iterable
|
|
|
|
elif origin == Mapping:
|
|
|
|
origin = abc.Mapping
|
|
|
|
elif origin == Sequence:
|
|
|
|
origin = abc.Sequence
|
2020-02-11 07:20:25 +01:00
|
|
|
|
2019-07-10 13:23:25 +02:00
|
|
|
if not origin:
|
|
|
|
# Then it's most likely one of the fundamental data types
|
|
|
|
# I.E. Not one of the data types from the "typing" module.
|
|
|
|
return t
|
|
|
|
elif origin == Union:
|
2020-09-02 05:05:20 +02:00
|
|
|
subtypes = [self.get_standardized_argument_type(st) for st in t.__args__]
|
2019-07-10 13:23:25 +02:00
|
|
|
return self.get_type_by_priority(subtypes)
|
2020-09-02 05:05:20 +02:00
|
|
|
elif origin in [list, abc.Iterable, abc.Sequence]:
|
2020-06-13 06:26:41 +02:00
|
|
|
[st] = t.__args__
|
|
|
|
return (list, self.get_standardized_argument_type(st))
|
2020-09-02 05:05:20 +02:00
|
|
|
elif origin in [dict, abc.Mapping]:
|
2019-08-04 15:55:32 +02:00
|
|
|
return dict
|
2020-06-13 06:26:41 +02:00
|
|
|
raise AssertionError(f"Unknown origin {origin}")
|
2019-07-10 13:23:25 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def render_openapi_type_exception(
|
|
|
|
self,
|
|
|
|
function: Callable[..., HttpResponse],
|
|
|
|
openapi_params: Set[Tuple[str, Union[type, Tuple[type, object]]]],
|
|
|
|
function_params: Set[Tuple[str, Union[type, Tuple[type, object]]]],
|
|
|
|
diff: Set[Tuple[str, Union[type, Tuple[type, object]]]],
|
|
|
|
) -> None: # nocoverage
|
|
|
|
"""Print a *VERY* clear and verbose error message for when the types
|
|
|
|
(between the OpenAPI documentation and the function declaration) don't match."""
|
2019-07-10 13:23:25 +02:00
|
|
|
|
2020-06-13 08:57:35 +02:00
|
|
|
msg = f"""
|
2019-07-10 13:23:25 +02:00
|
|
|
The types for the request parameters in zerver/openapi/zulip.yaml
|
2020-06-13 08:57:35 +02:00
|
|
|
do not match the types declared in the implementation of {function.__name__}.\n"""
|
2021-02-12 08:20:45 +01:00
|
|
|
msg += "=" * 65 + "\n"
|
2021-02-12 08:19:30 +01:00
|
|
|
msg += "{:<10s}{:^30s}{:>10s}\n".format(
|
|
|
|
"Parameter", "OpenAPI Type", "Function Declaration Type"
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
msg += "=" * 65 + "\n"
|
2019-07-10 13:23:25 +02:00
|
|
|
opvtype = None
|
|
|
|
fdvtype = None
|
|
|
|
for element in diff:
|
|
|
|
vname = element[0]
|
|
|
|
for element in openapi_params:
|
|
|
|
if element[0] == vname:
|
|
|
|
opvtype = element[1]
|
|
|
|
break
|
|
|
|
for element in function_params:
|
|
|
|
if element[0] == vname:
|
|
|
|
fdvtype = element[1]
|
|
|
|
break
|
2020-06-09 00:25:09 +02:00
|
|
|
msg += f"{vname:<10s}{str(opvtype):^30s}{str(fdvtype):>10s}\n"
|
2019-07-10 13:23:25 +02:00
|
|
|
raise AssertionError(msg)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def check_argument_types(
|
|
|
|
self, function: Callable[..., HttpResponse], openapi_parameters: List[Dict[str, Any]]
|
|
|
|
) -> None:
|
|
|
|
"""We construct for both the OpenAPI data and the function's definition a set of
|
2019-07-10 13:23:25 +02:00
|
|
|
tuples of the form (var_name, type) and then compare those sets to see if the
|
|
|
|
OpenAPI data defines a different type than that actually accepted by the function.
|
|
|
|
Otherwise, we print out the exact differences for convenient debugging and raise an
|
2021-02-12 08:19:30 +01:00
|
|
|
AssertionError."""
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
openapi_params: Set[Tuple[str, Union[type, Tuple[type, object]]]] = set()
|
2020-09-02 08:14:51 +02:00
|
|
|
json_params: Dict[str, Union[type, Tuple[type, object]]] = {}
|
2019-08-04 15:55:32 +02:00
|
|
|
for element in openapi_parameters:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
name: str = element["name"]
|
2020-05-11 16:26:33 +02:00
|
|
|
schema = {}
|
|
|
|
if "content" in element:
|
2020-06-27 19:23:50 +02:00
|
|
|
# The only content-type we use in our API is application/json.
|
|
|
|
assert "schema" in element["content"]["application/json"]
|
|
|
|
# If content_type is application/json, then the
|
|
|
|
# parameter needs to be handled specially, as REQ can
|
|
|
|
# either return the application/json as a string or it
|
|
|
|
# can either decode it and return the required
|
|
|
|
# elements. For example `to` array in /messages: POST
|
|
|
|
# is processed by REQ as a string and then its type is
|
|
|
|
# checked in the view code.
|
|
|
|
#
|
|
|
|
# Meanwhile `profile_data` in /users/{user_id}: GET is
|
2020-08-11 01:47:44 +02:00
|
|
|
# taken as array of objects. So treat them separately.
|
2020-05-11 16:26:33 +02:00
|
|
|
schema = element["content"]["application/json"]["schema"]
|
2020-06-27 19:23:50 +02:00
|
|
|
json_params[name] = schema_type(schema)
|
2020-05-11 16:26:33 +02:00
|
|
|
continue
|
|
|
|
else:
|
|
|
|
schema = element["schema"]
|
2020-06-13 06:26:41 +02:00
|
|
|
openapi_params.add((name, schema_type(schema)))
|
2019-08-04 15:55:32 +02:00
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
function_params: Set[Tuple[str, Union[type, Tuple[type, object]]]] = set()
|
2019-07-10 13:23:25 +02:00
|
|
|
|
|
|
|
# Iterate through the decorators to find the original
|
|
|
|
# function, wrapped by has_request_variables, so we can parse
|
|
|
|
# its arguments.
|
|
|
|
while getattr(function, "__wrapped__", None):
|
|
|
|
function = getattr(function, "__wrapped__", None)
|
|
|
|
# Tell mypy this is never None.
|
|
|
|
assert function is not None
|
|
|
|
|
|
|
|
# Now, we do inference mapping each REQ parameter's
|
|
|
|
# declaration details to the Python/mypy types for the
|
|
|
|
# arguments passed to it.
|
|
|
|
#
|
|
|
|
# Because the mypy types are the types used inside the inner
|
|
|
|
# function (after the original data is processed by any
|
|
|
|
# validators, converters, etc.), they will not always match
|
|
|
|
# the API-level argument types. The main case where this
|
|
|
|
# happens is when a `converter` is used that changes the types
|
|
|
|
# of its parameters.
|
2020-07-01 01:34:35 +02:00
|
|
|
for pname, defval in inspect.signature(function).parameters.items():
|
2019-07-10 13:23:25 +02:00
|
|
|
defval = defval.default
|
2020-07-01 01:34:35 +02:00
|
|
|
if isinstance(defval, _REQ):
|
2019-07-10 13:23:25 +02:00
|
|
|
# TODO: The below inference logic in cases where
|
|
|
|
# there's a converter function declared is incorrect.
|
|
|
|
# Theoretically, we could restructure the converter
|
|
|
|
# function model so that we can check what type it
|
|
|
|
# excepts to be passed to make validation here
|
|
|
|
# possible.
|
|
|
|
|
2020-07-01 01:34:35 +02:00
|
|
|
vtype = self.get_standardized_argument_type(function.__annotations__[pname])
|
|
|
|
vname = defval.post_var_name
|
|
|
|
assert vname is not None
|
2020-06-27 19:23:50 +02:00
|
|
|
if vname in json_params:
|
|
|
|
# Here we have two cases. If the the REQ type is
|
|
|
|
# string then there is no point in comparing as
|
|
|
|
# JSON can always be returned as string. Ideally,
|
|
|
|
# we wouldn't use REQ for a JSON object without a
|
|
|
|
# validator in these cases, but it does happen.
|
|
|
|
#
|
|
|
|
# If the REQ type is not string then, insert the
|
2020-10-23 02:43:28 +02:00
|
|
|
# REQ and OpenAPI data types of the variable in
|
2020-06-27 19:23:50 +02:00
|
|
|
# the respective sets so that they can be dealt
|
|
|
|
# with later. In either case remove the variable
|
|
|
|
# from `json_params`.
|
|
|
|
if vtype == str:
|
|
|
|
json_params.pop(vname, None)
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
openapi_params.add((vname, json_params[vname]))
|
|
|
|
json_params.pop(vname, None)
|
2019-07-10 13:23:25 +02:00
|
|
|
function_params.add((vname, vtype))
|
|
|
|
|
2020-06-27 19:23:50 +02:00
|
|
|
# After the above operations `json_params` should be empty.
|
2021-02-12 08:19:30 +01:00
|
|
|
assert len(json_params) == 0
|
2019-07-10 13:23:25 +02:00
|
|
|
diff = openapi_params - function_params
|
|
|
|
if diff: # nocoverage
|
|
|
|
self.render_openapi_type_exception(function, openapi_params, function_params, diff)
|
|
|
|
|
2019-06-06 22:22:21 +02:00
|
|
|
def test_openapi_arguments(self) -> None:
|
2019-07-07 08:54:19 +02:00
|
|
|
"""This end-to-end API documentation test compares the arguments
|
|
|
|
defined in the actual code using @has_request_variables and
|
|
|
|
REQ(), with the arguments declared in our API documentation
|
|
|
|
for every API endpoint in Zulip.
|
|
|
|
|
|
|
|
First, we import the fancy-Django version of zproject/urls.py
|
|
|
|
by doing this, each has_request_variables wrapper around each
|
|
|
|
imported view function gets called to generate the wrapped
|
|
|
|
view function and thus filling the global arguments_map variable.
|
|
|
|
Basically, we're exploiting code execution during import.
|
|
|
|
|
|
|
|
Then we need to import some view modules not already imported in
|
|
|
|
urls.py. We use this different syntax because of the linters complaining
|
|
|
|
of an unused import (which is correct, but we do this for triggering the
|
|
|
|
has_request_variables decorator).
|
2019-07-08 14:08:02 +02:00
|
|
|
|
|
|
|
At the end, we perform a reverse mapping test that verifies that
|
2020-10-23 02:43:28 +02:00
|
|
|
every URL pattern defined in the OpenAPI documentation actually exists
|
2019-07-08 14:08:02 +02:00
|
|
|
in code.
|
2019-07-07 08:54:19 +02:00
|
|
|
"""
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zproject import urls as urlconf
|
2019-06-06 22:22:21 +02:00
|
|
|
|
|
|
|
# We loop through all the API patterns, looking in particular
|
2019-07-07 08:54:19 +02:00
|
|
|
# for those using the rest_dispatch decorator; we then parse
|
|
|
|
# its mapping of (HTTP_METHOD -> FUNCTION).
|
2019-08-20 00:33:09 +02:00
|
|
|
for p in urlconf.v1_api_and_json_patterns + urlconf.v1_api_mobile_patterns:
|
2020-09-22 03:55:32 +02:00
|
|
|
if p.callback is not rest_dispatch:
|
2019-08-20 00:33:09 +02:00
|
|
|
# Endpoints not using rest_dispatch don't have extra data.
|
|
|
|
methods_endpoints = dict(
|
2020-09-22 03:55:32 +02:00
|
|
|
GET=p.callback,
|
2019-08-20 00:33:09 +02:00
|
|
|
)
|
|
|
|
else:
|
|
|
|
methods_endpoints = p.default_args
|
2019-07-10 13:23:25 +02:00
|
|
|
|
|
|
|
# since the module was already imported and is now residing in
|
|
|
|
# memory, we won't actually face any performance penalties here.
|
2019-08-20 00:33:09 +02:00
|
|
|
for method, value in methods_endpoints.items():
|
2020-09-22 03:55:32 +02:00
|
|
|
if callable(value):
|
|
|
|
function: Callable[..., HttpResponse] = value
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
tags: Set[str] = set()
|
2019-06-06 22:22:21 +02:00
|
|
|
else:
|
2020-09-22 03:55:32 +02:00
|
|
|
function, tags = value
|
2019-07-07 08:54:19 +02:00
|
|
|
|
2020-09-22 03:55:32 +02:00
|
|
|
if function is get_events:
|
2019-10-22 01:43:54 +02:00
|
|
|
# Work around the fact that the registered
|
|
|
|
# get_events view function isn't where we do
|
|
|
|
# @has_request_variables.
|
|
|
|
#
|
|
|
|
# TODO: Make this configurable via an optional argument
|
|
|
|
# to has_request_variables, e.g.
|
|
|
|
# @has_request_variables(view_func_name="zerver.tornado.views.get_events")
|
2020-09-22 03:55:32 +02:00
|
|
|
function = get_events_backend
|
2019-10-22 01:43:54 +02:00
|
|
|
|
2020-09-22 03:55:32 +02:00
|
|
|
function_name = f"{function.__module__}.{function.__name__}"
|
2019-07-10 13:23:25 +02:00
|
|
|
|
2019-06-06 22:22:21 +02:00
|
|
|
# Our accounting logic in the `has_request_variables()`
|
|
|
|
# code means we have the list of all arguments
|
|
|
|
# accepted by every view function in arguments_map.
|
2019-07-15 17:53:51 +02:00
|
|
|
accepted_arguments = set(arguments_map[function_name])
|
2019-06-06 22:22:21 +02:00
|
|
|
|
2018-02-02 05:43:18 +01:00
|
|
|
regex_pattern = p.pattern.regex.pattern
|
2019-07-09 08:28:29 +02:00
|
|
|
url_pattern = self.convert_regex_to_url_pattern(regex_pattern)
|
2019-07-04 18:12:53 +02:00
|
|
|
|
2019-07-20 20:16:47 +02:00
|
|
|
if "intentionally_undocumented" in tags:
|
2019-07-09 08:28:29 +02:00
|
|
|
self.ensure_no_documentation_if_intentionally_undocumented(url_pattern, method)
|
|
|
|
continue
|
2019-06-06 22:22:21 +02:00
|
|
|
|
2019-07-20 20:16:47 +02:00
|
|
|
if url_pattern in self.pending_endpoints:
|
|
|
|
# HACK: After all pending_endpoints have been resolved, we should remove
|
|
|
|
# this segment and the "msg" part of the `ensure_no_...` method.
|
2020-06-13 08:57:35 +02:00
|
|
|
msg = f"""
|
2019-07-20 20:16:47 +02:00
|
|
|
We found some OpenAPI documentation for {method} {url_pattern},
|
|
|
|
so maybe we shouldn't include it in pending_endpoints.
|
2020-06-13 08:57:35 +02:00
|
|
|
"""
|
2021-02-12 08:19:30 +01:00
|
|
|
self.ensure_no_documentation_if_intentionally_undocumented(
|
|
|
|
url_pattern, method, msg
|
|
|
|
)
|
2019-07-20 20:16:47 +02:00
|
|
|
continue
|
|
|
|
|
2019-06-06 22:22:21 +02:00
|
|
|
try:
|
2019-08-17 01:21:08 +02:00
|
|
|
# Don't include OpenAPI parameters that live in
|
|
|
|
# the path; these are not extracted by REQ.
|
2021-02-12 08:19:30 +01:00
|
|
|
openapi_parameters = get_openapi_parameters(
|
|
|
|
url_pattern, method, include_url_parameters=False
|
|
|
|
)
|
2019-06-06 22:22:21 +02:00
|
|
|
except Exception: # nocoverage
|
2020-06-14 02:57:50 +02:00
|
|
|
raise AssertionError(f"Could not find OpenAPI docs for {method} {url_pattern}")
|
2019-06-06 22:22:21 +02:00
|
|
|
|
|
|
|
# We now have everything we need to understand the
|
2019-07-07 08:54:19 +02:00
|
|
|
# function as defined in our urls.py:
|
2019-06-06 22:22:21 +02:00
|
|
|
#
|
|
|
|
# * method is the HTTP method, e.g. GET, POST, or PATCH
|
|
|
|
#
|
2018-02-02 05:43:18 +01:00
|
|
|
# * p.pattern.regex.pattern is the URL pattern; might require
|
2019-06-06 22:22:21 +02:00
|
|
|
# some processing to match with OpenAPI rules
|
|
|
|
#
|
2019-07-07 08:54:19 +02:00
|
|
|
# * accepted_arguments is the full set of arguments
|
|
|
|
# this method accepts (from the REQ declarations in
|
|
|
|
# code).
|
2019-06-06 22:22:21 +02:00
|
|
|
#
|
|
|
|
# * The documented parameters for the endpoint as recorded in our
|
|
|
|
# OpenAPI data in zerver/openapi/zulip.yaml.
|
|
|
|
#
|
|
|
|
# We now compare these to confirm that the documented
|
|
|
|
# argument list matches what actually appears in the
|
|
|
|
# codebase.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
openapi_parameter_names = {parameter["name"] for parameter in openapi_parameters}
|
2019-06-06 22:22:21 +02:00
|
|
|
|
2019-10-22 01:43:54 +02:00
|
|
|
if len(accepted_arguments - openapi_parameter_names) > 0: # nocoverage
|
2021-02-12 08:19:30 +01:00
|
|
|
print("Undocumented parameters for", url_pattern, method, function_name)
|
2019-06-06 22:22:21 +02:00
|
|
|
print(" +", openapi_parameter_names)
|
|
|
|
print(" -", accepted_arguments)
|
2021-02-12 08:19:30 +01:00
|
|
|
assert url_pattern in self.buggy_documentation_endpoints
|
2019-10-22 01:26:08 +02:00
|
|
|
elif len(openapi_parameter_names - accepted_arguments) > 0: # nocoverage
|
2021-02-12 08:19:30 +01:00
|
|
|
print("Documented invalid parameters for", url_pattern, method, function_name)
|
2019-06-06 22:22:21 +02:00
|
|
|
print(" -", openapi_parameter_names)
|
|
|
|
print(" +", accepted_arguments)
|
2021-02-12 08:19:30 +01:00
|
|
|
assert url_pattern in self.buggy_documentation_endpoints
|
2019-06-06 22:22:21 +02:00
|
|
|
else:
|
|
|
|
self.assertEqual(openapi_parameter_names, accepted_arguments)
|
2019-07-10 13:23:25 +02:00
|
|
|
self.check_argument_types(function, openapi_parameters)
|
2019-07-11 19:05:48 +02:00
|
|
|
self.checked_endpoints.add(url_pattern)
|
2019-07-08 14:08:02 +02:00
|
|
|
|
2019-07-09 08:28:29 +02:00
|
|
|
self.check_for_non_existant_openapi_endpoints()
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2019-08-04 08:14:08 +02:00
|
|
|
|
|
|
|
class ModifyExampleGenerationTestCase(ZulipTestCase):
|
|
|
|
def test_no_mod_argument(self) -> None:
|
|
|
|
res = parse_language_and_options("python")
|
|
|
|
self.assertEqual(res, ("python", {}))
|
|
|
|
|
|
|
|
def test_single_simple_mod_argument(self) -> None:
|
|
|
|
res = parse_language_and_options("curl, mod=1")
|
|
|
|
self.assertEqual(res, ("curl", {"mod": 1}))
|
|
|
|
|
|
|
|
res = parse_language_and_options("curl, mod='somevalue'")
|
|
|
|
self.assertEqual(res, ("curl", {"mod": "somevalue"}))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
res = parse_language_and_options('curl, mod="somevalue"')
|
2019-08-04 08:14:08 +02:00
|
|
|
self.assertEqual(res, ("curl", {"mod": "somevalue"}))
|
|
|
|
|
|
|
|
def test_multiple_simple_mod_argument(self) -> None:
|
|
|
|
res = parse_language_and_options("curl, mod1=1, mod2='a'")
|
|
|
|
self.assertEqual(res, ("curl", {"mod1": 1, "mod2": "a"}))
|
|
|
|
|
|
|
|
res = parse_language_and_options("curl, mod1=\"asdf\", mod2='thing', mod3=3")
|
|
|
|
self.assertEqual(res, ("curl", {"mod1": "asdf", "mod2": "thing", "mod3": 3}))
|
|
|
|
|
|
|
|
def test_single_list_mod_argument(self) -> None:
|
|
|
|
res = parse_language_and_options("curl, exclude=['param1', 'param2']")
|
|
|
|
self.assertEqual(res, ("curl", {"exclude": ["param1", "param2"]}))
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
res = parse_language_and_options('curl, exclude=["param1", "param2"]')
|
2019-08-04 08:14:08 +02:00
|
|
|
self.assertEqual(res, ("curl", {"exclude": ["param1", "param2"]}))
|
|
|
|
|
|
|
|
res = parse_language_and_options("curl, exclude=['param1', \"param2\"]")
|
|
|
|
self.assertEqual(res, ("curl", {"exclude": ["param1", "param2"]}))
|
|
|
|
|
|
|
|
def test_multiple_list_mod_argument(self) -> None:
|
|
|
|
res = parse_language_and_options("curl, exclude=['param1', \"param2\"], special=['param3']")
|
|
|
|
self.assertEqual(res, ("curl", {"exclude": ["param1", "param2"], "special": ["param3"]}))
|
|
|
|
|
|
|
|
def test_multiple_mixed_mod_arguments(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
res = parse_language_and_options(
|
2021-02-12 08:20:45 +01:00
|
|
|
'curl, exclude=["asdf", \'sdfg\'], other_key=\'asdf\', more_things="asdf", another_list=[1, "2"]'
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
res,
|
|
|
|
(
|
|
|
|
"curl",
|
|
|
|
{
|
|
|
|
"exclude": ["asdf", "sdfg"],
|
|
|
|
"other_key": "asdf",
|
|
|
|
"more_things": "asdf",
|
|
|
|
"another_list": [1, "2"],
|
|
|
|
},
|
|
|
|
),
|
|
|
|
)
|
2019-08-04 08:14:08 +02:00
|
|
|
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
class TestCurlExampleGeneration(ZulipTestCase):
|
|
|
|
|
|
|
|
spec_mock_without_examples = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-07-29 15:46:48 +02:00
|
|
|
"paths": {
|
|
|
|
"/mark_stream_as_read": {
|
|
|
|
"post": {
|
|
|
|
"description": "Mark all the unread messages in a stream as read.",
|
|
|
|
"parameters": [
|
|
|
|
{
|
|
|
|
"name": "stream_id",
|
|
|
|
"in": "query",
|
|
|
|
"description": "The ID of the stream whose messages should be marked as read.",
|
|
|
|
"schema": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"type": "integer",
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "bool_param",
|
|
|
|
"in": "query",
|
|
|
|
"description": "Just a boolean parameter.",
|
|
|
|
"schema": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"type": "boolean",
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
|
|
|
},
|
2019-07-29 15:46:48 +02:00
|
|
|
],
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-07-29 15:46:48 +02:00
|
|
|
}
|
|
|
|
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
spec_mock_with_invalid_method: Dict[str, object] = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-07-29 15:46:48 +02:00
|
|
|
"paths": {
|
|
|
|
"/endpoint": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"brew": {}, # the data is irrelevant as is should be rejected.
|
|
|
|
},
|
|
|
|
},
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
}
|
2019-07-29 15:46:48 +02:00
|
|
|
|
|
|
|
spec_mock_using_object = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-07-29 15:46:48 +02:00
|
|
|
"paths": {
|
|
|
|
"/endpoint": {
|
2019-10-03 15:02:51 +02:00
|
|
|
"get": {
|
|
|
|
"description": "Get some info.",
|
|
|
|
"parameters": [
|
|
|
|
{
|
|
|
|
"name": "param1",
|
|
|
|
"in": "query",
|
|
|
|
"description": "An object",
|
2020-06-27 19:23:50 +02:00
|
|
|
"content": {
|
|
|
|
"application/json": {
|
2021-02-12 08:19:30 +01:00
|
|
|
"schema": {"type": "object"},
|
2020-06-27 19:23:50 +02:00
|
|
|
"example": {
|
|
|
|
"key": "value",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2020-06-27 19:23:50 +02:00
|
|
|
}
|
2019-10-03 15:02:51 +02:00
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-10-03 15:02:51 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
spec_mock_using_param_in_path = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-10-03 15:02:51 +02:00
|
|
|
"paths": {
|
|
|
|
"/endpoint/{param1}": {
|
2019-07-29 15:46:48 +02:00
|
|
|
"get": {
|
|
|
|
"description": "Get some info.",
|
|
|
|
"parameters": [
|
|
|
|
{
|
|
|
|
"name": "param1",
|
|
|
|
"in": "path",
|
2019-10-03 15:59:28 +02:00
|
|
|
"description": "Param in path",
|
|
|
|
"schema": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"type": "integer",
|
2019-10-03 15:59:28 +02:00
|
|
|
},
|
|
|
|
"example": 35,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
2019-10-03 15:59:28 +02:00
|
|
|
},
|
|
|
|
{
|
|
|
|
"name": "param2",
|
|
|
|
"in": "query",
|
2019-07-29 15:46:48 +02:00
|
|
|
"description": "An object",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
2020-06-27 19:23:50 +02:00
|
|
|
"content": {
|
|
|
|
"application/json": {
|
2021-02-12 08:19:30 +01:00
|
|
|
"schema": {"type": "object"},
|
2020-06-27 19:23:50 +02:00
|
|
|
"example": {
|
|
|
|
"key": "value",
|
2021-02-12 08:19:30 +01:00
|
|
|
},
|
2020-06-27 19:23:50 +02:00
|
|
|
}
|
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
},
|
|
|
|
],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-07-29 15:46:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
spec_mock_using_object_without_example = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-07-29 15:46:48 +02:00
|
|
|
"paths": {
|
|
|
|
"/endpoint": {
|
|
|
|
"get": {
|
|
|
|
"description": "Get some info.",
|
|
|
|
"parameters": [
|
|
|
|
{
|
|
|
|
"name": "param1",
|
2019-10-03 15:02:51 +02:00
|
|
|
"in": "query",
|
2019-07-29 15:46:48 +02:00
|
|
|
"description": "An object",
|
|
|
|
"schema": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"type": "object",
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-07-29 15:46:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
spec_mock_using_array_without_example = {
|
2019-12-04 12:27:15 +01:00
|
|
|
"security": [{"basicAuth": []}],
|
2019-07-29 15:46:48 +02:00
|
|
|
"paths": {
|
|
|
|
"/endpoint": {
|
|
|
|
"get": {
|
|
|
|
"description": "Get some info.",
|
|
|
|
"parameters": [
|
|
|
|
{
|
|
|
|
"name": "param1",
|
2019-10-03 15:02:51 +02:00
|
|
|
"in": "query",
|
2019-07-29 15:46:48 +02:00
|
|
|
"description": "An array",
|
|
|
|
"schema": {
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"type": "array",
|
2019-07-29 15:46:48 +02:00
|
|
|
},
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"required": True,
|
|
|
|
},
|
|
|
|
],
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2019-07-29 15:46:48 +02:00
|
|
|
}
|
|
|
|
|
2019-08-16 21:17:01 +02:00
|
|
|
def curl_example(self, endpoint: str, method: str, *args: Any, **kwargs: Any) -> List[str]:
|
2021-02-12 08:19:30 +01:00
|
|
|
return generate_curl_example(endpoint, method, "http://localhost:9991/api", *args, **kwargs)
|
2019-08-16 21:17:01 +02:00
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
def test_generate_and_render_curl_example(self) -> None:
|
2019-08-16 21:17:01 +02:00
|
|
|
generated_curl_example = self.curl_example("/get_stream_id", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
|
|
|
"```curl",
|
2019-08-07 10:55:41 +02:00
|
|
|
"curl -sSX GET -G http://localhost:9991/api/v1/get_stream_id \\",
|
2019-07-29 15:46:48 +02:00
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode stream=Denmark",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
|
|
|
|
|
|
|
def test_generate_and_render_curl_example_with_nonexistant_endpoints(self) -> None:
|
|
|
|
with self.assertRaises(KeyError):
|
2019-08-16 21:17:01 +02:00
|
|
|
self.curl_example("/mark_this_stream_as_read", "POST")
|
2019-07-29 15:46:48 +02:00
|
|
|
with self.assertRaises(KeyError):
|
2019-08-16 21:17:01 +02:00
|
|
|
self.curl_example("/mark_stream_as_read", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
|
|
|
|
def test_generate_and_render_curl_without_auth(self) -> None:
|
2019-08-16 21:17:01 +02:00
|
|
|
generated_curl_example = self.curl_example("/dev_fetch_api_key", "POST")
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
|
|
|
"```curl",
|
2019-08-07 10:55:41 +02:00
|
|
|
"curl -sSX POST http://localhost:9991/api/v1/dev_fetch_api_key \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode username=iago@zulip.com",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2019-07-29 15:46:48 +02:00
|
|
|
def test_generate_and_render_curl_with_default_examples(self, spec_mock: MagicMock) -> None:
|
|
|
|
spec_mock.return_value = self.spec_mock_without_examples
|
2019-08-16 21:17:01 +02:00
|
|
|
generated_curl_example = self.curl_example("/mark_stream_as_read", "POST")
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
|
|
|
"```curl",
|
2019-08-07 10:55:41 +02:00
|
|
|
"curl -sSX POST http://localhost:9991/api/v1/mark_stream_as_read \\",
|
2019-12-04 12:27:15 +01:00
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode stream_id=1 \\",
|
|
|
|
" --data-urlencode bool_param=false",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2019-07-29 15:46:48 +02:00
|
|
|
def test_generate_and_render_curl_with_invalid_method(self, spec_mock: MagicMock) -> None:
|
|
|
|
spec_mock.return_value = self.spec_mock_with_invalid_method
|
|
|
|
with self.assertRaises(ValueError):
|
2019-08-16 21:17:01 +02:00
|
|
|
self.curl_example("/endpoint", "BREW") # see: HTCPCP
|
2019-07-29 15:46:48 +02:00
|
|
|
|
|
|
|
def test_generate_and_render_curl_with_array_example(self) -> None:
|
2019-08-16 21:17:01 +02:00
|
|
|
generated_curl_example = self.curl_example("/messages", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"```curl",
|
|
|
|
"curl -sSX GET -G http://localhost:9991/api/v1/messages \\",
|
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode anchor=42 \\",
|
|
|
|
" --data-urlencode num_before=4 \\",
|
|
|
|
" --data-urlencode num_after=8 \\",
|
|
|
|
' --data-urlencode \'narrow=[{"operand": "Denmark", "operator": "stream"}]\' \\',
|
|
|
|
" --data-urlencode client_gravatar=true \\",
|
|
|
|
" --data-urlencode apply_markdown=false \\",
|
|
|
|
" --data-urlencode use_first_unread_anchor=true",
|
2021-02-12 08:20:45 +01:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2019-07-29 15:46:48 +02:00
|
|
|
def test_generate_and_render_curl_with_object(self, spec_mock: MagicMock) -> None:
|
|
|
|
spec_mock.return_value = self.spec_mock_using_object
|
2019-08-16 21:17:01 +02:00
|
|
|
generated_curl_example = self.curl_example("/endpoint", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"```curl",
|
|
|
|
"curl -sSX GET -G http://localhost:9991/api/v1/endpoint \\",
|
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
' --data-urlencode \'param1={"key": "value"}\'',
|
2021-02-12 08:20:45 +01:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
2019-10-03 15:02:51 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_generate_and_render_curl_with_object_without_example(
|
|
|
|
self, spec_mock: MagicMock
|
|
|
|
) -> None:
|
2019-07-29 15:46:48 +02:00
|
|
|
spec_mock.return_value = self.spec_mock_using_object_without_example
|
|
|
|
with self.assertRaises(ValueError):
|
2019-08-16 21:17:01 +02:00
|
|
|
self.curl_example("/endpoint", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2021-02-12 08:19:30 +01:00
|
|
|
def test_generate_and_render_curl_with_array_without_example(
|
|
|
|
self, spec_mock: MagicMock
|
|
|
|
) -> None:
|
2019-07-29 15:46:48 +02:00
|
|
|
spec_mock.return_value = self.spec_mock_using_array_without_example
|
|
|
|
with self.assertRaises(ValueError):
|
2019-08-16 21:17:01 +02:00
|
|
|
self.curl_example("/endpoint", "GET")
|
2019-07-29 15:46:48 +02:00
|
|
|
|
2020-08-12 04:54:48 +02:00
|
|
|
@patch("zerver.openapi.openapi.OpenAPISpec.openapi")
|
2019-10-03 15:59:28 +02:00
|
|
|
def test_generate_and_render_curl_with_param_in_path(self, spec_mock: MagicMock) -> None:
|
|
|
|
spec_mock.return_value = self.spec_mock_using_param_in_path
|
|
|
|
generated_curl_example = self.curl_example("/endpoint/{param1}", "GET")
|
|
|
|
expected_curl_example = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"```curl",
|
|
|
|
"curl -sSX GET -G http://localhost:9991/api/v1/endpoint/35 \\",
|
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
' --data-urlencode \'param2={"key": "value"}\'',
|
2021-02-12 08:20:45 +01:00
|
|
|
"```",
|
2019-10-03 15:59:28 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
|
|
|
|
2019-07-29 15:46:48 +02:00
|
|
|
def test_generate_and_render_curl_wrapper(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
generated_curl_example = render_curl_example(
|
|
|
|
"/get_stream_id:GET:email:key", api_url="https://zulip.example.com/api"
|
|
|
|
)
|
2019-07-29 15:46:48 +02:00
|
|
|
expected_curl_example = [
|
|
|
|
"```curl",
|
2019-08-07 10:55:41 +02:00
|
|
|
"curl -sSX GET -G https://zulip.example.com/api/v1/get_stream_id \\",
|
2019-07-29 15:46:48 +02:00
|
|
|
" -u email:key \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode stream=Denmark",
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
"```",
|
2019-07-29 15:46:48 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
2019-08-04 08:14:08 +02:00
|
|
|
|
|
|
|
def test_generate_and_render_curl_example_with_excludes(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
generated_curl_example = self.curl_example(
|
|
|
|
"/messages", "GET", exclude=["client_gravatar", "apply_markdown"]
|
|
|
|
)
|
2019-08-04 08:14:08 +02:00
|
|
|
expected_curl_example = [
|
2021-02-12 08:20:45 +01:00
|
|
|
"```curl",
|
|
|
|
"curl -sSX GET -G http://localhost:9991/api/v1/messages \\",
|
|
|
|
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\",
|
2020-11-04 02:49:09 +01:00
|
|
|
" --data-urlencode anchor=42 \\",
|
|
|
|
" --data-urlencode num_before=4 \\",
|
|
|
|
" --data-urlencode num_after=8 \\",
|
|
|
|
' --data-urlencode \'narrow=[{"operand": "Denmark", "operator": "stream"}]\' \\',
|
|
|
|
" --data-urlencode use_first_unread_anchor=true",
|
2021-02-12 08:20:45 +01:00
|
|
|
"```",
|
2019-08-04 08:14:08 +02:00
|
|
|
]
|
|
|
|
self.assertEqual(generated_curl_example, expected_curl_example)
|
2020-05-20 19:53:41 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-20 19:53:41 +02:00
|
|
|
class OpenAPIAttributesTest(ZulipTestCase):
|
|
|
|
def test_attributes(self) -> None:
|
2020-07-01 19:07:31 +02:00
|
|
|
"""
|
|
|
|
Checks:
|
|
|
|
* All endpoints have `operationId` and `tag` attributes.
|
|
|
|
* All example responses match their schema.
|
|
|
|
* That no opaque object exists.
|
|
|
|
"""
|
2021-03-27 19:38:10 +01:00
|
|
|
EXCLUDE = ["/real-time"]
|
2021-02-12 08:19:30 +01:00
|
|
|
VALID_TAGS = [
|
|
|
|
"users",
|
|
|
|
"server_and_organizations",
|
|
|
|
"authentication",
|
|
|
|
"real_time_events",
|
|
|
|
"streams",
|
|
|
|
"messages",
|
|
|
|
"users",
|
|
|
|
"webhooks",
|
|
|
|
]
|
2020-08-12 04:54:48 +02:00
|
|
|
paths = OpenAPISpec(OPENAPI_SPEC_PATH).openapi()["paths"]
|
|
|
|
for path, path_item in paths.items():
|
2020-05-20 19:53:41 +02:00
|
|
|
if path in EXCLUDE:
|
|
|
|
continue
|
2020-08-12 04:54:48 +02:00
|
|
|
for method, operation in path_item.items():
|
2020-05-20 19:53:41 +02:00
|
|
|
# Check if every file has an operationId
|
2021-02-12 08:19:30 +01:00
|
|
|
assert "operationId" in operation
|
|
|
|
assert "tags" in operation
|
2020-08-12 04:54:48 +02:00
|
|
|
tag = operation["tags"][0]
|
2021-02-12 08:19:30 +01:00
|
|
|
assert tag in VALID_TAGS
|
2021-02-12 08:20:45 +01:00
|
|
|
for status_code, response in operation["responses"].items():
|
|
|
|
schema = response["content"]["application/json"]["schema"]
|
|
|
|
if "oneOf" in schema:
|
|
|
|
for subschema_index, subschema in enumerate(schema["oneOf"]):
|
2020-08-12 04:54:48 +02:00
|
|
|
validate_schema(subschema)
|
2021-02-12 08:19:30 +01:00
|
|
|
assert validate_against_openapi_schema(
|
2021-02-12 08:20:45 +01:00
|
|
|
subschema["example"],
|
2021-02-12 08:19:30 +01:00
|
|
|
path,
|
|
|
|
method,
|
2021-02-12 08:20:45 +01:00
|
|
|
status_code + "_" + str(subschema_index),
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-06-10 19:39:24 +02:00
|
|
|
continue
|
2020-08-12 04:54:48 +02:00
|
|
|
validate_schema(schema)
|
2021-02-12 08:19:30 +01:00
|
|
|
assert validate_against_openapi_schema(
|
2021-02-12 08:20:45 +01:00
|
|
|
schema["example"], path, method, status_code
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
|
|
|
|
2020-06-13 17:59:46 +02:00
|
|
|
|
|
|
|
class OpenAPIRegexTest(ZulipTestCase):
|
|
|
|
def test_regex(self) -> None:
|
|
|
|
"""
|
|
|
|
Calls a few documented and undocumented endpoints and checks whether they
|
|
|
|
find a match or not.
|
|
|
|
"""
|
|
|
|
# Some of the undocumentd endpoints which are very similar to
|
|
|
|
# some of the documented endpoints.
|
2021-02-12 08:20:45 +01:00
|
|
|
assert find_openapi_endpoint("/users/me/presence") is None
|
|
|
|
assert find_openapi_endpoint("/users/me/subscriptions/23") is None
|
|
|
|
assert find_openapi_endpoint("/users/iago/subscriptions/23") is None
|
|
|
|
assert find_openapi_endpoint("/messages/matches_narrow") is None
|
2020-06-13 17:59:46 +02:00
|
|
|
# Making sure documented endpoints are matched correctly.
|
2021-02-12 08:19:30 +01:00
|
|
|
assert (
|
2021-02-12 08:20:45 +01:00
|
|
|
find_openapi_endpoint("/users/23/subscriptions/21")
|
|
|
|
== "/users/{user_id}/subscriptions/{stream_id}"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-01-04 19:36:00 +01:00
|
|
|
assert (
|
|
|
|
find_openapi_endpoint("/users/iago@zulip.com/presence")
|
|
|
|
== "/users/{user_id_or_email}/presence"
|
|
|
|
)
|
2021-01-02 15:05:29 +01:00
|
|
|
assert find_openapi_endpoint("/users/iago@zulip.com") == "/users/{email}"
|
2021-02-12 08:20:45 +01:00
|
|
|
assert find_openapi_endpoint("/messages/23") == "/messages/{message_id}"
|
|
|
|
assert find_openapi_endpoint("/realm/emoji/realm_emoji_1") == "/realm/emoji/{emoji_name}"
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-07-09 20:51:31 +02:00
|
|
|
|
|
|
|
class OpenAPIRequestValidatorTest(ZulipTestCase):
|
|
|
|
def test_validator(self) -> None:
|
|
|
|
"""
|
|
|
|
Test to make sure the request validator works properly
|
|
|
|
The tests cover both cases such as catching valid requests marked
|
|
|
|
as invalid and making sure invalid requests are markded properly
|
|
|
|
"""
|
|
|
|
# `/users/me/subscriptions` doesn't require any parameters
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_request("/users/me/subscriptions", "get", {}, {}, False, "200")
|
2020-07-09 20:51:31 +02:00
|
|
|
with self.assertRaises(SchemaError):
|
2020-07-25 17:24:21 +02:00
|
|
|
# `/messages` POST does not work on an empty response
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_request("/messages", "post", {}, {}, False, "200")
|
2020-07-25 17:24:21 +02:00
|
|
|
# 400 responses are allowed to fail validation.
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_request("/messages", "post", {}, {}, False, "400")
|
2020-07-25 17:24:21 +02:00
|
|
|
# `intentionally_undocumented` allows validation errors on
|
|
|
|
# 200 responses.
|
2021-02-12 08:19:30 +01:00
|
|
|
validate_request(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/dev_fetch_api_key", "post", {}, {}, False, "200", intentionally_undocumented=True
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|