2020-06-11 00:54:34 +02:00
import inspect
2020-06-20 19:25:32 +02:00
import os
2020-06-13 06:26:41 +02:00
from collections import abc
2022-04-27 02:30:13 +02:00
from typing import (
Any ,
Callable ,
Dict ,
List ,
2023-08-17 02:34:42 +02:00
Mapping ,
2022-04-27 02:30:13 +02:00
Optional ,
Sequence ,
Set ,
Tuple ,
Union ,
get_args ,
get_origin ,
)
2020-06-11 00:54:34 +02:00
from unittest . mock import MagicMock , patch
2018-05-31 19:41:17 +02:00
2020-06-20 19:25:32 +02:00
import yaml
2019-07-10 13:23:25 +02:00
from django . http import HttpResponse
2023-08-12 05:24:45 +02:00
from django . urls import URLPattern
2021-12-23 22:27:05 +01:00
from django . utils import regex_helper
2023-08-12 00:03:37 +02:00
from pydantic import TypeAdapter
2019-06-06 22:22:21 +02:00
2020-06-11 00:54:34 +02:00
from zerver . lib . request import _REQ , arguments_map
2020-09-22 03:55:32 +02:00
from zerver . lib . rest import rest_dispatch
2018-05-31 19:41:17 +02:00
from zerver . lib . test_classes import ZulipTestCase
2023-08-12 00:03:37 +02:00
from zerver . lib . typed_endpoint import parse_view_func_signature
2021-08-10 17:18:43 +02:00
from zerver . lib . utils import assert_is_not_none
2021-09-30 00:10:12 +02:00
from zerver . openapi . markdown_extension import generate_curl_example , render_curl_example
2020-02-23 18:10:42 +01:00
from zerver . openapi . openapi import (
2020-06-11 00:54:34 +02:00
OPENAPI_SPEC_PATH ,
OpenAPISpec ,
2024-02-01 05:43:43 +01:00
Parameter ,
2020-06-11 00:54:34 +02:00
SchemaError ,
2020-08-12 04:54:48 +02:00
find_openapi_endpoint ,
2020-06-11 00:54:34 +02:00
get_openapi_fixture ,
get_openapi_parameters ,
get_openapi_paths ,
openapi_spec ,
validate_against_openapi_schema ,
2020-07-09 20:51:31 +02:00
validate_request ,
2020-07-01 19:07:31 +02:00
validate_schema ,
2018-05-31 19:41:17 +02:00
)
2020-09-22 03:55:32 +02:00
from zerver . tornado . views import get_events , get_events_backend
2018-05-31 19:41:17 +02:00
2021-02-12 08:20:45 +01:00
TEST_ENDPOINT = " /messages/ {message_id} "
TEST_METHOD = " patch "
TEST_RESPONSE_BAD_REQ = " 400 "
TEST_RESPONSE_SUCCESS = " 200 "
2018-05-31 19:41:17 +02:00
2019-07-10 13:23:25 +02:00
VARMAP = {
2021-02-12 08:20:45 +01:00
" integer " : int ,
" string " : str ,
" boolean " : bool ,
" object " : dict ,
" NoneType " : type ( None ) ,
2019-07-10 13:23:25 +02:00
}
2018-05-31 19:41:17 +02:00
2021-02-12 08:19:30 +01:00
2023-08-17 02:34:42 +02:00
def schema_type (
schema : Dict [ str , Any ] , defs : Mapping [ str , Any ] = { }
) - > Union [ type , Tuple [ type , object ] ] :
2021-02-12 08:20:45 +01:00
if " oneOf " in schema :
2020-06-13 06:26:41 +02:00
# Hack: Just use the type of the first value
# Ideally, we'd turn this into a Union type.
2023-08-17 02:34:42 +02:00
return schema_type ( schema [ " oneOf " ] [ 0 ] , defs )
2023-08-12 00:03:37 +02:00
elif " anyOf " in schema :
2023-08-17 02:34:42 +02:00
return schema_type ( schema [ " anyOf " ] [ 0 ] , defs )
2023-08-12 00:03:37 +02:00
elif schema . get ( " contentMediaType " ) == " application/json " :
2023-08-17 02:34:42 +02:00
return schema_type ( schema [ " contentSchema " ] , defs )
elif " $ref " in schema :
return schema_type ( defs [ schema [ " $ref " ] ] , defs )
2020-06-13 06:26:41 +02:00
elif schema [ " type " ] == " array " :
2023-08-17 02:34:42 +02:00
return ( list , schema_type ( schema [ " items " ] , defs ) )
2020-06-13 06:26:41 +02:00
else :
return VARMAP [ schema [ " type " ] ]
2021-02-12 08:19:30 +01:00
2018-05-31 19:41:17 +02:00
class OpenAPIToolsTest ( ZulipTestCase ) :
""" Make sure that the tools we use to handle our OpenAPI specification
2020-02-23 18:10:42 +01:00
( located in zerver / openapi / openapi . py ) work as expected .
2018-05-31 19:41:17 +02:00
These tools are mostly dedicated to fetching parts of the - already parsed -
specification , and comparing them to objects returned by our REST API .
"""
2020-04-22 01:45:30 +02:00
2018-05-31 19:41:17 +02:00
def test_get_openapi_fixture ( self ) - > None :
2021-02-12 08:19:30 +01:00
actual = get_openapi_fixture ( TEST_ENDPOINT , TEST_METHOD , TEST_RESPONSE_BAD_REQ )
2018-05-31 19:41:17 +02:00
expected = {
2021-02-12 08:20:45 +01:00
" code " : " BAD_REQUEST " ,
" msg " : " You don ' t have permission to edit this message " ,
" result " : " error " ,
2018-05-31 19:41:17 +02:00
}
self . assertEqual ( actual , expected )
def test_get_openapi_parameters ( self ) - > None :
actual = get_openapi_parameters ( TEST_ENDPOINT , TEST_METHOD )
2024-02-01 05:43:43 +01:00
expected_item = Parameter (
kind = " path " ,
name = " message_id " ,
description = " The target message ' s ID. \n " ,
json_encoded = False ,
value_schema = { " type " : " integer " } ,
example = 43 ,
required = True ,
deprecated = False ,
)
2021-02-12 08:19:30 +01:00
assert expected_item in actual
2018-05-31 19:41:17 +02:00
def test_validate_against_openapi_schema ( self ) - > None :
2022-01-12 05:26:43 +01:00
with self . assertRaisesRegex (
2022-01-12 03:08:52 +01:00
SchemaError , r " Additional properties are not allowed \ ( ' foo ' was unexpected \ ) "
2021-02-12 08:19:30 +01:00
) :
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
bad_content : Dict [ str , object ] = {
2021-02-12 08:20:45 +01:00
" msg " : " " ,
" result " : " success " ,
" foo " : " bar " ,
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
}
2021-02-12 08:19:30 +01:00
validate_against_openapi_schema (
bad_content , TEST_ENDPOINT , TEST_METHOD , TEST_RESPONSE_SUCCESS
)
2018-05-31 19:41:17 +02:00
2022-10-06 09:57:41 +02:00
with self . assertRaisesRegex ( SchemaError , r " 42 is not of type ' string ' " ) :
2018-05-31 19:41:17 +02:00
bad_content = {
2021-02-12 08:20:45 +01:00
" msg " : 42 ,
" result " : " success " ,
2018-05-31 19:41:17 +02:00
}
2021-02-12 08:19:30 +01:00
validate_against_openapi_schema (
bad_content , TEST_ENDPOINT , TEST_METHOD , TEST_RESPONSE_SUCCESS
)
2018-05-31 19:41:17 +02:00
2022-01-12 03:08:52 +01:00
with self . assertRaisesRegex ( SchemaError , r " ' msg ' is a required property " ) :
2018-05-31 19:41:17 +02:00
bad_content = {
2021-02-12 08:20:45 +01:00
" result " : " success " ,
2018-05-31 19:41:17 +02:00
}
2021-02-12 08:19:30 +01:00
validate_against_openapi_schema (
bad_content , TEST_ENDPOINT , TEST_METHOD , TEST_RESPONSE_SUCCESS
)
2018-05-31 19:41:17 +02:00
# No exceptions should be raised here.
good_content = {
2021-02-12 08:20:45 +01:00
" msg " : " " ,
" result " : " success " ,
2018-05-31 19:41:17 +02:00
}
2021-02-12 08:19:30 +01:00
validate_against_openapi_schema (
good_content , TEST_ENDPOINT , TEST_METHOD , TEST_RESPONSE_SUCCESS
)
2018-05-31 19:41:17 +02:00
2018-06-20 19:31:24 +02:00
# Overwrite the exception list with a mocked one
2020-06-20 19:25:32 +02:00
test_dict : Dict [ str , Any ] = { }
# Check that validate_against_openapi_schema correctly
# descends into 'deep' objects and arrays. Test 1 should
# pass, Test 2 has a 'deep' extraneous key and Test 3 has a
2020-08-11 01:47:44 +02:00
# 'deep' opaque object. Also the parameters are a heterogeneous
2020-06-20 19:25:32 +02:00
# mix of arrays and objects to verify that our descent logic
2024-05-20 22:16:21 +02:00
# correctly gets to the deeply nested objects.
2022-01-12 06:14:04 +01:00
test_filename = os . path . join ( os . path . dirname ( OPENAPI_SPEC_PATH ) , " testing.yaml " )
with open ( test_filename ) as test_file :
2020-06-20 19:25:32 +02:00
test_dict = yaml . safe_load ( test_file )
2022-01-12 06:14:04 +01:00
with patch ( " zerver.openapi.openapi.openapi_spec " , OpenAPISpec ( test_filename ) ) :
2021-02-12 08:19:30 +01:00
validate_against_openapi_schema (
2022-01-12 06:14:04 +01:00
{
" top_array " : [
{ " obj " : { " str3 " : " test " } } ,
[ { " str1 " : " success " , " str2 " : " success " } ] ,
] ,
} ,
" /test1 " ,
" get " ,
2021-02-12 08:20:45 +01:00
" 200 " ,
2021-02-12 08:19:30 +01:00
)
2022-01-12 05:26:43 +01:00
with self . assertRaisesRegex (
2022-01-12 03:08:52 +01:00
SchemaError ,
2022-01-12 05:26:43 +01:00
r " \ { ' obj ' : \ { ' str3 ' : ' test ' , ' str4 ' : ' extraneous ' \ } \ } is not valid under any of the given schemas " ,
2021-02-12 08:19:30 +01:00
) :
validate_against_openapi_schema (
2022-01-12 06:14:04 +01:00
{
" top_array " : [
{ " obj " : { " str3 " : " test " , " str4 " : " extraneous " } } ,
[ { " str1 " : " success " , " str2 " : " success " } ] ,
] ,
} ,
" /test2 " ,
" get " ,
2021-02-12 08:20:45 +01:00
" 200 " ,
2021-02-12 08:19:30 +01:00
)
2022-01-12 05:26:43 +01:00
with self . assertRaisesRegex (
SchemaError ,
2022-02-08 00:13:33 +01:00
r " additionalProperties needs to be defined for objects to make sure they have no additional properties left to be documented \ . " ,
2022-01-12 05:26:43 +01:00
) :
2020-07-01 19:07:31 +02:00
# Checks for opaque objects
2021-02-12 08:19:30 +01:00
validate_schema (
2022-01-12 06:14:04 +01:00
test_dict [ " paths " ] [ " /test3 " ] [ " get " ] [ " responses " ] [ " 200 " ] [ " content " ] [
" application/json "
] [ " schema " ]
2021-02-12 08:19:30 +01:00
)
2018-06-20 19:31:24 +02:00
2018-08-07 23:40:07 +02:00
def test_live_reload ( self ) - > None :
# Force the reload by making the last update date < the file's last
# modified date
2020-08-12 04:54:48 +02:00
openapi_spec . mtime = 0
2018-08-07 23:40:07 +02:00
get_openapi_fixture ( TEST_ENDPOINT , TEST_METHOD )
# Check that the file has been reloaded by verifying that the last
# update date isn't zero anymore
2020-08-12 04:54:48 +02:00
self . assertNotEqual ( openapi_spec . mtime , 0 )
2018-08-08 01:35:41 +02:00
# Now verify calling it again doesn't call reload
2020-08-12 04:54:48 +02:00
old_openapi = openapi_spec . openapi ( )
2020-08-12 03:29:51 +02:00
get_openapi_fixture ( TEST_ENDPOINT , TEST_METHOD )
2020-08-12 04:54:48 +02:00
new_openapi = openapi_spec . openapi ( )
self . assertIs ( old_openapi , new_openapi )
2019-06-06 22:22:21 +02:00
2021-02-12 08:19:30 +01:00
2019-06-06 22:22:21 +02:00
class OpenAPIArgumentsTest ( ZulipTestCase ) :
2019-07-07 08:54:19 +02:00
# This will be filled during test_openapi_arguments:
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
checked_endpoints : Set [ str ] = set ( )
2020-04-09 21:51:58 +02:00
pending_endpoints = {
2020-01-31 22:51:57 +01:00
#### TODO: These endpoints are a priority to document:
2021-02-12 08:20:45 +01:00
" /users/me/presence " ,
2021-04-08 06:12:05 +02:00
# These are a priority to document but don't match our normal URL schemes
# and thus may be complicated to document with our current tooling.
# (No /api/v1/ or /json prefix).
" /avatar/ {email_or_id} " ,
## This one is in zulip.yaml, but not the actual docs.
# "/api/v1/user_uploads/{realm_id_str}/{filename}",
## And this one isn't, and isn't really representable
# "/user_uploads/{realm_id_str}/{filename}",
2020-01-31 22:51:57 +01:00
#### These realm administration settings are valuable to document:
# List data exports for organization (GET) or request one (POST)
2021-02-12 08:20:45 +01:00
" /export/realm " ,
2020-01-31 22:51:57 +01:00
# Delete a data export.
2021-02-12 08:20:45 +01:00
" /export/realm/ {export_id} " ,
2020-01-31 22:51:57 +01:00
# Manage default streams and default stream groups
2021-02-12 08:20:45 +01:00
" /default_stream_groups/create " ,
" /default_stream_groups/ {group_id} " ,
" /default_stream_groups/ {group_id} /streams " ,
2020-01-31 22:51:57 +01:00
# Administer invitations
2021-02-12 08:20:45 +01:00
" /invites/ {prereg_id} /resend " ,
2020-01-31 22:51:57 +01:00
# Single-stream settings alternative to the bulk endpoint
# users/me/subscriptions/properties; probably should just be a
# section of the same page.
2021-02-12 08:20:45 +01:00
" /users/me/subscriptions/ {stream_id} " ,
2020-01-31 22:51:57 +01:00
#### Mobile-app only endpoints; important for mobile developers.
# Mobile interface for development environment login
2021-02-12 08:20:45 +01:00
" /dev_list_users " ,
2020-01-31 22:51:57 +01:00
#### These personal settings endpoints have modest value to document:
2021-02-12 08:20:45 +01:00
" /users/me/avatar " ,
" /users/me/api_key/regenerate " ,
2020-01-31 22:51:57 +01:00
# Much more valuable would be an org admin bulk-upload feature.
2021-02-12 08:20:45 +01:00
" /users/me/profile_data " ,
2020-01-31 22:51:57 +01:00
#### Should be documented as part of interactive bots documentation
2021-02-12 08:20:45 +01:00
" /bot_storage " ,
" /submessage " ,
" /zcommand " ,
2020-01-31 22:51:57 +01:00
#### These "organization settings" endpoint have modest value to document:
2021-02-12 08:20:45 +01:00
" /realm " ,
" /realm/domains " ,
" /realm/domains/ {domain} " ,
" /bots " ,
" /bots/ {bot_id} " ,
" /bots/ {bot_id} /api_key/regenerate " ,
2020-01-31 22:51:57 +01:00
#### These "organization settings" endpoints have low value to document:
2021-02-12 08:20:45 +01:00
" /realm/profile_fields/ {field_id} " ,
" /realm/icon " ,
" /realm/logo " ,
" /realm/deactivate " ,
" /realm/subdomain/ {subdomain} " ,
2021-04-08 06:23:51 +02:00
# API for Zoom video calls. Unclear if this can support other apps.
2021-02-12 08:20:45 +01:00
" /calls/zoom/create " ,
2021-04-08 06:12:05 +02:00
#### The following are fake endpoints that live in our zulip.yaml
#### for tooling convenience reasons, and should eventually be moved.
# Real-time-events endpoint
" /real-time " ,
# Rest error handling endpoint
" /rest-error-handling " ,
# Zulip outgoing webhook payload
" /zulip-outgoing-webhook " ,
2023-04-07 21:00:27 +02:00
" /jwt/fetch_api_key " ,
2020-04-09 21:51:58 +02:00
}
2019-10-22 01:43:54 +02:00
2023-08-15 17:23:57 +02:00
# Endpoints in the API documentation that don't use rest_dispatch
# and only use the POST method; used in test_openapi_arguments.
documented_post_only_endpoints = {
" fetch_api_key " ,
" dev_fetch_api_key " ,
}
2019-10-22 01:43:54 +02:00
# Endpoints where the documentation is currently failing our
# consistency tests. We aim to keep this list empty.
2021-08-02 23:16:44 +02:00
buggy_documentation_endpoints : Set [ str ] = set ( )
2019-07-07 08:54:19 +02:00
2021-02-12 08:19:30 +01:00
def ensure_no_documentation_if_intentionally_undocumented (
self , url_pattern : str , method : str , msg : Optional [ str ] = None
) - > None :
2019-07-09 08:28:29 +02:00
try :
get_openapi_parameters ( url_pattern , method )
2019-07-19 07:02:10 +02:00
if not msg : # nocoverage
2020-06-13 08:57:35 +02:00
msg = f """
2019-07-20 20:16:47 +02:00
We found some OpenAPI documentation for { method } { url_pattern } ,
2020-10-23 02:43:28 +02:00
so maybe we shouldn ' t mark it as intentionally undocumented in the URLs.
2020-06-13 08:57:35 +02:00
"""
2019-07-20 20:16:47 +02:00
raise AssertionError ( msg ) # nocoverage
2019-07-09 08:28:29 +02:00
except KeyError :
return
2022-02-08 00:13:33 +01:00
def check_for_non_existent_openapi_endpoints ( self ) - > None :
2021-02-12 08:19:30 +01:00
""" Here, we check to see if every endpoint documented in the OpenAPI
2019-07-09 08:28:29 +02:00
documentation actually exists in urls . py and thus in actual code .
Note : We define this as a helper called at the end of
test_openapi_arguments instead of as a separate test to ensure that
this test is only executed after test_openapi_arguments so that it ' s
2021-02-12 08:19:30 +01:00
results can be used here in the set operations . """
2019-07-09 08:28:29 +02:00
openapi_paths = set ( get_openapi_paths ( ) )
undocumented_paths = openapi_paths - self . checked_endpoints
undocumented_paths - = self . buggy_documentation_endpoints
undocumented_paths - = self . pending_endpoints
try :
2021-05-17 05:41:32 +02:00
self . assert_length ( undocumented_paths , 0 )
2019-07-09 08:28:29 +02:00
except AssertionError : # nocoverage
msg = " The following endpoints have been documented but can ' t be found in urls.py: "
for undocumented_path in undocumented_paths :
2020-06-09 00:25:09 +02:00
msg + = f " \n + { undocumented_path } "
2019-07-09 08:28:29 +02:00
raise AssertionError ( msg )
2021-02-12 08:19:30 +01:00
def get_type_by_priority (
self , types : Sequence [ Union [ type , Tuple [ type , object ] ] ]
) - > Union [ type , Tuple [ type , object ] ] :
2019-08-04 15:55:32 +02:00
priority = { list : 1 , dict : 2 , str : 3 , int : 4 , bool : 5 }
tyiroirp = { 1 : list , 2 : dict , 3 : str , 4 : int , 5 : bool }
val = 6
2019-07-10 13:23:25 +02:00
for t in types :
2019-08-10 00:30:34 +02:00
if isinstance ( t , tuple ) :
2023-02-23 01:40:19 +01:00
return t # e.g. (list, dict) or (list, str)
2019-08-10 00:30:34 +02:00
v = priority . get ( t , 6 )
2019-07-10 13:23:25 +02:00
if v < val :
val = v
return tyiroirp . get ( val , types [ 0 ] )
2019-08-10 00:30:34 +02:00
def get_standardized_argument_type ( self , t : Any ) - > Union [ type , Tuple [ type , object ] ] :
2021-02-12 08:19:30 +01:00
""" Given a type from the typing module such as List[str] or Union[str, int],
2019-07-10 13:23:25 +02:00
convert it into a corresponding Python type . Unions are mapped to a canonical
choice among the options .
E . g . typing . Union [ typing . List [ typing . Dict [ str , typing . Any ] ] , NoneType ]
needs to be mapped to list . """
2022-04-27 02:30:13 +02:00
origin = get_origin ( t )
2020-02-11 07:20:25 +01:00
2022-04-27 02:30:13 +02:00
if origin is None :
2019-07-10 13:23:25 +02:00
# Then it's most likely one of the fundamental data types
# I.E. Not one of the data types from the "typing" module.
return t
elif origin == Union :
2022-04-27 02:30:13 +02:00
subtypes = [ self . get_standardized_argument_type ( st ) for st in get_args ( t ) ]
2019-07-10 13:23:25 +02:00
return self . get_type_by_priority ( subtypes )
2021-04-30 00:15:33 +02:00
elif origin in [ list , abc . Sequence ] :
2022-04-27 02:30:13 +02:00
[ st ] = get_args ( t )
2020-06-13 06:26:41 +02:00
return ( list , self . get_standardized_argument_type ( st ) )
2020-09-02 05:05:20 +02:00
elif origin in [ dict , abc . Mapping ] :
2019-08-04 15:55:32 +02:00
return dict
2020-06-13 06:26:41 +02:00
raise AssertionError ( f " Unknown origin { origin } " )
2019-07-10 13:23:25 +02:00
2021-02-12 08:19:30 +01:00
def render_openapi_type_exception (
self ,
function : Callable [ . . . , HttpResponse ] ,
openapi_params : Set [ Tuple [ str , Union [ type , Tuple [ type , object ] ] ] ] ,
function_params : Set [ Tuple [ str , Union [ type , Tuple [ type , object ] ] ] ] ,
diff : Set [ Tuple [ str , Union [ type , Tuple [ type , object ] ] ] ] ,
) - > None : # nocoverage
""" Print a *VERY* clear and verbose error message for when the types
( between the OpenAPI documentation and the function declaration ) don ' t match. " " "
2019-07-10 13:23:25 +02:00
2020-06-13 08:57:35 +02:00
msg = f """
2019-07-10 13:23:25 +02:00
The types for the request parameters in zerver / openapi / zulip . yaml
2020-06-13 08:57:35 +02:00
do not match the types declared in the implementation of { function . __name__ } . \n """
2021-02-12 08:20:45 +01:00
msg + = " = " * 65 + " \n "
2023-06-06 22:07:28 +02:00
msg + = " {:<10} {:^30} {:>10} \n " . format (
2021-05-10 07:02:14 +02:00
" parameter " , " OpenAPI type " , " function declaration type "
2021-02-12 08:19:30 +01:00
)
2021-02-12 08:20:45 +01:00
msg + = " = " * 65 + " \n "
2019-07-10 13:23:25 +02:00
opvtype = None
fdvtype = None
for element in diff :
vname = element [ 0 ]
for element in openapi_params :
if element [ 0 ] == vname :
opvtype = element [ 1 ]
break
for element in function_params :
if element [ 0 ] == vname :
fdvtype = element [ 1 ]
break
2023-06-06 22:07:28 +02:00
msg + = f " { vname : <10 } { opvtype !s: ^30 } { fdvtype !s: >10 } \n "
2019-07-10 13:23:25 +02:00
raise AssertionError ( msg )
2023-08-12 00:03:37 +02:00
def validate_json_schema (
2024-02-01 05:43:43 +01:00
self , function : Callable [ . . . , HttpResponse ] , openapi_parameters : List [ Parameter ]
2023-08-12 00:03:37 +02:00
) - > None :
""" Validate against the Pydantic generated JSON schema against our OpenAPI definitions """
USE_JSON_CONTENT_TYPE_HINT = f """
The view function { { param_name } } should accept JSON input .
Consider wrapping the type annotation of the parameter in Json .
For example :
from pydantic import Json
. . .
@typed_endpoint
def { function . __name__ } (
request : HttpRequest ,
* ,
{ { param_name } } : Json [ { { param_type } } ] = . . . ,
) - > . . . :
"""
# The set of tuples containing the var name and type pairs extracted
# from the function signature.
function_params = set ( )
# The set of tuples containing the var name and type pairs extracted
# from OpenAPI.
openapi_params = set ( )
# The names of request variables that should have a content type of
# application/json according to our OpenAPI definitions.
json_request_var_names = set ( )
2024-02-01 05:43:43 +01:00
for openapi_parameter in openapi_parameters :
2023-08-12 00:03:37 +02:00
# We differentiate JSON and non-JSON parameters here. Because
# application/json is the only content type to be verify in the API,
# we assume that as long as "content" is present in the OpenAPI
# spec, the content type should be JSON.
2024-02-01 05:43:43 +01:00
expected_request_var_name = openapi_parameter . name
if openapi_parameter . json_encoded :
2023-08-12 00:03:37 +02:00
json_request_var_names . add ( expected_request_var_name )
2024-02-01 05:43:43 +01:00
openapi_params . add (
( expected_request_var_name , schema_type ( openapi_parameter . value_schema ) )
)
2023-08-12 00:03:37 +02:00
for actual_param in parse_view_func_signature ( function ) . parameters :
2023-08-17 02:34:42 +02:00
actual_param_schema = TypeAdapter ( actual_param . param_type ) . json_schema (
ref_template = " {model} "
)
defs_mapping = actual_param_schema . get ( " $defs " , { } )
2023-08-12 00:03:37 +02:00
# The content type of the JSON schema generated from the
# function parameter type annotation should have content type
# matching that of our OpenAPI spec. If not so, hint that the
# Json[T] wrapper might be missing from the type annotation.
if actual_param . request_var_name in json_request_var_names :
self . assertEqual (
actual_param_schema . get ( " contentMediaType " ) ,
" application/json " ,
USE_JSON_CONTENT_TYPE_HINT . format (
param_name = actual_param . param_name ,
param_type = actual_param . param_type ,
) ,
)
# actual_param_schema is a json_schema. Reference:
# https://docs.pydantic.dev/latest/api/json_schema/#pydantic.json_schema.GenerateJsonSchema.json_schema
actual_param_schema = actual_param_schema [ " contentSchema " ]
2023-08-17 01:46:00 +02:00
elif " contentMediaType " in actual_param_schema :
2024-02-01 02:13:48 +01:00
function_schema_type = schema_type ( actual_param_schema , defs_mapping )
2023-08-12 00:03:37 +02:00
# We do not specify that the content type of int or bool
# parameters should be JSON encoded, while our code does expect
# that. In this case, we exempt this parameter from the content
# type check.
self . assertIn (
function_schema_type ,
( int , bool ) ,
f ' \n Unexpected content type { actual_param_schema [ " contentMediaType " ] } on function parameter { actual_param . param_name } , which does not match the OpenAPI definition. ' ,
)
2023-08-17 02:34:42 +02:00
function_params . add (
( actual_param . request_var_name , schema_type ( actual_param_schema , defs_mapping ) )
)
2023-08-12 00:03:37 +02:00
diff = openapi_params - function_params
if diff : # nocoverage
self . render_openapi_type_exception ( function , openapi_params , function_params , diff )
2021-02-12 08:19:30 +01:00
def check_argument_types (
2024-02-01 05:43:43 +01:00
self , function : Callable [ . . . , HttpResponse ] , openapi_parameters : List [ Parameter ]
2021-02-12 08:19:30 +01:00
) - > None :
""" We construct for both the OpenAPI data and the function ' s definition a set of
2019-07-10 13:23:25 +02:00
tuples of the form ( var_name , type ) and then compare those sets to see if the
OpenAPI data defines a different type than that actually accepted by the function .
Otherwise , we print out the exact differences for convenient debugging and raise an
2021-02-12 08:19:30 +01:00
AssertionError . """
2023-08-12 00:03:37 +02:00
# Iterate through the decorators to find the original function, wrapped
# by has_request_variables/typed_endpoint, so we can parse its
# arguments.
use_endpoint_decorator = False
while ( wrapped := getattr ( function , " __wrapped__ " , None ) ) is not None :
# TODO: Remove this check once we replace has_request_variables with
# typed_endpoint.
if getattr ( function , " use_endpoint " , False ) :
use_endpoint_decorator = True
function = wrapped
if use_endpoint_decorator :
return self . validate_json_schema ( function , openapi_parameters )
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
openapi_params : Set [ Tuple [ str , Union [ type , Tuple [ type , object ] ] ] ] = set ( )
2020-09-02 08:14:51 +02:00
json_params : Dict [ str , Union [ type , Tuple [ type , object ] ] ] = { }
2024-02-01 05:43:43 +01:00
for openapi_parameter in openapi_parameters :
name = openapi_parameter . name
if openapi_parameter . json_encoded :
2020-06-27 19:23:50 +02:00
# If content_type is application/json, then the
# parameter needs to be handled specially, as REQ can
# either return the application/json as a string or it
# can either decode it and return the required
# elements. For example `to` array in /messages: POST
# is processed by REQ as a string and then its type is
# checked in the view code.
#
# Meanwhile `profile_data` in /users/{user_id}: GET is
2020-08-11 01:47:44 +02:00
# taken as array of objects. So treat them separately.
2024-02-01 05:43:43 +01:00
json_params [ name ] = schema_type ( openapi_parameter . value_schema )
2020-05-11 16:26:33 +02:00
continue
2024-02-01 05:43:43 +01:00
openapi_params . add ( ( name , schema_type ( openapi_parameter . value_schema ) ) )
2019-08-04 15:55:32 +02:00
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
function_params : Set [ Tuple [ str , Union [ type , Tuple [ type , object ] ] ] ] = set ( )
2019-07-10 13:23:25 +02:00
2020-07-01 01:34:35 +02:00
for pname , defval in inspect . signature ( function ) . parameters . items ( ) :
2019-07-10 13:23:25 +02:00
defval = defval . default
2020-07-01 01:34:35 +02:00
if isinstance ( defval , _REQ ) :
2019-07-10 13:23:25 +02:00
# TODO: The below inference logic in cases where
# there's a converter function declared is incorrect.
# Theoretically, we could restructure the converter
# function model so that we can check what type it
# excepts to be passed to make validation here
# possible.
2020-07-01 01:34:35 +02:00
vtype = self . get_standardized_argument_type ( function . __annotations__ [ pname ] )
vname = defval . post_var_name
assert vname is not None
2020-06-27 19:23:50 +02:00
if vname in json_params :
2024-05-20 22:16:21 +02:00
# Here we have two cases. If the REQ type is
2020-06-27 19:23:50 +02:00
# string then there is no point in comparing as
# JSON can always be returned as string. Ideally,
# we wouldn't use REQ for a JSON object without a
# validator in these cases, but it does happen.
#
# If the REQ type is not string then, insert the
2020-10-23 02:43:28 +02:00
# REQ and OpenAPI data types of the variable in
2020-06-27 19:23:50 +02:00
# the respective sets so that they can be dealt
# with later. In either case remove the variable
# from `json_params`.
2024-03-01 03:07:34 +01:00
if vtype is str :
2020-06-27 19:23:50 +02:00
json_params . pop ( vname , None )
continue
else :
openapi_params . add ( ( vname , json_params [ vname ] ) )
json_params . pop ( vname , None )
2019-07-10 13:23:25 +02:00
function_params . add ( ( vname , vtype ) )
2020-06-27 19:23:50 +02:00
# After the above operations `json_params` should be empty.
2021-02-12 08:19:30 +01:00
assert len ( json_params ) == 0
2019-07-10 13:23:25 +02:00
diff = openapi_params - function_params
if diff : # nocoverage
self . render_openapi_type_exception ( function , openapi_params , function_params , diff )
2023-08-12 05:24:45 +02:00
def check_openapi_arguments_for_view (
self ,
pattern : URLPattern ,
function_name : str ,
function : Callable [ . . . , HttpResponse ] ,
method : str ,
tags : Set [ str ] ,
) - > None :
# Our accounting logic in the `has_request_variables()`
# code means we have the list of all arguments
# accepted by every view function in arguments_map.
accepted_arguments = set ( arguments_map [ function_name ] )
regex_pattern = pattern . pattern . regex . pattern
for url_format , url_params in regex_helper . normalize ( regex_pattern ) :
url_pattern = " / " + url_format % { param : f " {{ { param } }} " for param in url_params }
if " intentionally_undocumented " in tags :
self . ensure_no_documentation_if_intentionally_undocumented ( url_pattern , method )
continue
if url_pattern in self . pending_endpoints :
# HACK: After all pending_endpoints have been resolved, we should remove
# this segment and the "msg" part of the `ensure_no_...` method.
msg = f """
We found some OpenAPI documentation for { method } { url_pattern } ,
so maybe we shouldn ' t include it in pending_endpoints.
"""
self . ensure_no_documentation_if_intentionally_undocumented ( url_pattern , method , msg )
continue
try :
# Don't include OpenAPI parameters that live in
# the path; these are not extracted by REQ.
openapi_parameters = get_openapi_parameters (
url_pattern , method , include_url_parameters = False
)
except Exception : # nocoverage
raise AssertionError ( f " Could not find OpenAPI docs for { method } { url_pattern } " )
# We now have everything we need to understand the
# function as defined in our urls.py:
#
# * method is the HTTP method, e.g. GET, POST, or PATCH
#
# * p.pattern.regex.pattern is the URL pattern; might require
# some processing to match with OpenAPI rules
#
# * accepted_arguments is the full set of arguments
# this method accepts (from the REQ declarations in
# code).
#
# * The documented parameters for the endpoint as recorded in our
# OpenAPI data in zerver/openapi/zulip.yaml.
#
# We now compare these to confirm that the documented
# argument list matches what actually appears in the
# codebase.
2024-02-01 05:43:43 +01:00
openapi_parameter_names = { parameter . name for parameter in openapi_parameters }
2023-08-12 05:24:45 +02:00
if len ( accepted_arguments - openapi_parameter_names ) > 0 : # nocoverage
print ( " Undocumented parameters for " , url_pattern , method , function_name )
print ( " + " , openapi_parameter_names )
print ( " - " , accepted_arguments )
assert url_pattern in self . buggy_documentation_endpoints
elif len ( openapi_parameter_names - accepted_arguments ) > 0 : # nocoverage
print (
" Documented invalid parameters for " ,
url_pattern ,
method ,
function_name ,
)
print ( " - " , openapi_parameter_names )
print ( " + " , accepted_arguments )
assert url_pattern in self . buggy_documentation_endpoints
else :
self . assertEqual ( openapi_parameter_names , accepted_arguments )
self . check_argument_types ( function , openapi_parameters )
self . checked_endpoints . add ( url_pattern )
2019-06-06 22:22:21 +02:00
def test_openapi_arguments ( self ) - > None :
2019-07-07 08:54:19 +02:00
""" This end-to-end API documentation test compares the arguments
defined in the actual code using @has_request_variables and
REQ ( ) , with the arguments declared in our API documentation
for every API endpoint in Zulip .
First , we import the fancy - Django version of zproject / urls . py
by doing this , each has_request_variables wrapper around each
imported view function gets called to generate the wrapped
view function and thus filling the global arguments_map variable .
Basically , we ' re exploiting code execution during import.
Then we need to import some view modules not already imported in
urls . py . We use this different syntax because of the linters complaining
of an unused import ( which is correct , but we do this for triggering the
has_request_variables decorator ) .
2019-07-08 14:08:02 +02:00
At the end , we perform a reverse mapping test that verifies that
2020-10-23 02:43:28 +02:00
every URL pattern defined in the OpenAPI documentation actually exists
2019-07-08 14:08:02 +02:00
in code .
2019-07-07 08:54:19 +02:00
"""
2020-06-11 00:54:34 +02:00
from zproject import urls as urlconf
2019-06-06 22:22:21 +02:00
# We loop through all the API patterns, looking in particular
2019-07-07 08:54:19 +02:00
# for those using the rest_dispatch decorator; we then parse
# its mapping of (HTTP_METHOD -> FUNCTION).
2019-08-20 00:33:09 +02:00
for p in urlconf . v1_api_and_json_patterns + urlconf . v1_api_mobile_patterns :
2023-08-15 17:23:57 +02:00
methods_endpoints : Dict [ str , Any ] = { }
2020-09-22 03:55:32 +02:00
if p . callback is not rest_dispatch :
2019-08-20 00:33:09 +02:00
# Endpoints not using rest_dispatch don't have extra data.
2023-08-15 17:23:57 +02:00
if str ( p . pattern ) in self . documented_post_only_endpoints :
methods_endpoints = dict ( POST = p . callback )
else :
methods_endpoints = dict ( GET = p . callback )
2019-08-20 00:33:09 +02:00
else :
2021-08-10 17:18:43 +02:00
methods_endpoints = assert_is_not_none ( p . default_args )
2019-07-10 13:23:25 +02:00
# since the module was already imported and is now residing in
# memory, we won't actually face any performance penalties here.
2019-08-20 00:33:09 +02:00
for method , value in methods_endpoints . items ( ) :
2020-09-22 03:55:32 +02:00
if callable ( value ) :
function : Callable [ . . . , HttpResponse ] = value
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
tags : Set [ str ] = set ( )
2019-06-06 22:22:21 +02:00
else :
2020-09-22 03:55:32 +02:00
function , tags = value
2019-07-07 08:54:19 +02:00
2020-09-22 03:55:32 +02:00
if function is get_events :
2019-10-22 01:43:54 +02:00
# Work around the fact that the registered
# get_events view function isn't where we do
# @has_request_variables.
#
# TODO: Make this configurable via an optional argument
# to has_request_variables, e.g.
# @has_request_variables(view_func_name="zerver.tornado.views.get_events")
2020-09-22 03:55:32 +02:00
function = get_events_backend
2019-10-22 01:43:54 +02:00
2020-09-22 03:55:32 +02:00
function_name = f " { function . __module__ } . { function . __name__ } "
2019-07-10 13:23:25 +02:00
2023-08-12 05:24:45 +02:00
with self . subTest ( function_name ) :
self . check_openapi_arguments_for_view ( p , function_name , function , method , tags )
2019-07-08 14:08:02 +02:00
2022-02-08 00:13:33 +01:00
self . check_for_non_existent_openapi_endpoints ( )
2019-07-29 15:46:48 +02:00
2019-08-04 08:14:08 +02:00
2019-07-29 15:46:48 +02:00
class TestCurlExampleGeneration ( ZulipTestCase ) :
spec_mock_without_examples = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-07-29 15:46:48 +02:00
" paths " : {
" /mark_stream_as_read " : {
" post " : {
" description " : " Mark all the unread messages in a stream as read. " ,
" parameters " : [
{
" name " : " stream_id " ,
" in " : " query " ,
" description " : " The ID of the stream whose messages should be marked as read. " ,
" schema " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" type " : " integer " ,
2019-07-29 15:46:48 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
2019-07-29 15:46:48 +02:00
} ,
{
" name " : " bool_param " ,
" in " : " query " ,
" description " : " Just a boolean parameter. " ,
" schema " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" type " : " boolean " ,
2019-07-29 15:46:48 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
} ,
2019-07-29 15:46:48 +02:00
] ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
} ,
} ,
2019-07-29 15:46:48 +02:00
}
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
spec_mock_with_invalid_method : Dict [ str , object ] = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-07-29 15:46:48 +02:00
" paths " : {
" /endpoint " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" brew " : { } , # the data is irrelevant as is should be rejected.
} ,
} ,
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
}
2019-07-29 15:46:48 +02:00
spec_mock_using_object = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-07-29 15:46:48 +02:00
" paths " : {
" /endpoint " : {
2019-10-03 15:02:51 +02:00
" get " : {
" description " : " Get some info. " ,
" parameters " : [
{
" name " : " param1 " ,
" in " : " query " ,
" description " : " An object " ,
2020-06-27 19:23:50 +02:00
" content " : {
" application/json " : {
2021-02-12 08:19:30 +01:00
" schema " : { " type " : " object " } ,
2020-06-27 19:23:50 +02:00
" example " : {
" key " : " value " ,
2021-02-12 08:19:30 +01:00
} ,
2020-06-27 19:23:50 +02:00
}
2019-10-03 15:02:51 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
} ,
] ,
} ,
} ,
} ,
2019-10-03 15:02:51 +02:00
}
spec_mock_using_param_in_path = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-10-03 15:02:51 +02:00
" paths " : {
" /endpoint/ {param1} " : {
2019-07-29 15:46:48 +02:00
" get " : {
" description " : " Get some info. " ,
" parameters " : [
{
" name " : " param1 " ,
" in " : " path " ,
2019-10-03 15:59:28 +02:00
" description " : " Param in path " ,
" schema " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" type " : " integer " ,
2019-10-03 15:59:28 +02:00
} ,
" example " : 35 ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
2019-10-03 15:59:28 +02:00
} ,
{
" name " : " param2 " ,
" in " : " query " ,
2019-07-29 15:46:48 +02:00
" description " : " An object " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
2020-06-27 19:23:50 +02:00
" content " : {
" application/json " : {
2021-02-12 08:19:30 +01:00
" schema " : { " type " : " object " } ,
2020-06-27 19:23:50 +02:00
" example " : {
" key " : " value " ,
2021-02-12 08:19:30 +01:00
} ,
2020-06-27 19:23:50 +02:00
}
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
} ,
] ,
} ,
} ,
} ,
2019-07-29 15:46:48 +02:00
}
spec_mock_using_object_without_example = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-07-29 15:46:48 +02:00
" paths " : {
" /endpoint " : {
" get " : {
" description " : " Get some info. " ,
" parameters " : [
{
" name " : " param1 " ,
2019-10-03 15:02:51 +02:00
" in " : " query " ,
2019-07-29 15:46:48 +02:00
" description " : " An object " ,
" schema " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" type " : " object " ,
2019-07-29 15:46:48 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
} ,
] ,
} ,
} ,
} ,
2019-07-29 15:46:48 +02:00
}
spec_mock_using_array_without_example = {
2019-12-04 12:27:15 +01:00
" security " : [ { " basicAuth " : [ ] } ] ,
2019-07-29 15:46:48 +02:00
" paths " : {
" /endpoint " : {
" get " : {
" description " : " Get some info. " ,
" parameters " : [
{
" name " : " param1 " ,
2019-10-03 15:02:51 +02:00
" in " : " query " ,
2019-07-29 15:46:48 +02:00
" description " : " An array " ,
" schema " : {
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" type " : " array " ,
2019-07-29 15:46:48 +02:00
} ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" required " : True ,
} ,
] ,
} ,
} ,
} ,
2019-07-29 15:46:48 +02:00
}
2019-08-16 21:17:01 +02:00
def curl_example ( self , endpoint : str , method : str , * args : Any , * * kwargs : Any ) - > List [ str ] :
2021-02-12 08:19:30 +01:00
return generate_curl_example ( endpoint , method , " http://localhost:9991/api " , * args , * * kwargs )
2019-08-16 21:17:01 +02:00
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_example ( self ) - > None :
2019-08-16 21:17:01 +02:00
generated_curl_example = self . curl_example ( " /get_stream_id " , " GET " )
2019-07-29 15:46:48 +02:00
expected_curl_example = [
" ```curl " ,
2019-08-07 10:55:41 +02:00
" curl -sSX GET -G http://localhost:9991/api/v1/get_stream_id \\ " ,
2019-07-29 15:46:48 +02:00
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode stream=Denmark " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2022-02-08 00:13:33 +01:00
def test_generate_and_render_curl_example_with_nonexistent_endpoints ( self ) - > None :
2019-07-29 15:46:48 +02:00
with self . assertRaises ( KeyError ) :
2019-08-16 21:17:01 +02:00
self . curl_example ( " /mark_this_stream_as_read " , " POST " )
2019-07-29 15:46:48 +02:00
with self . assertRaises ( KeyError ) :
2019-08-16 21:17:01 +02:00
self . curl_example ( " /mark_stream_as_read " , " GET " )
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_without_auth ( self ) - > None :
2019-08-16 21:17:01 +02:00
generated_curl_example = self . curl_example ( " /dev_fetch_api_key " , " POST " )
2019-07-29 15:46:48 +02:00
expected_curl_example = [
" ```curl " ,
2019-08-07 10:55:41 +02:00
" curl -sSX POST http://localhost:9991/api/v1/dev_fetch_api_key \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode username=iago@zulip.com " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_with_default_examples ( self , spec_mock : MagicMock ) - > None :
spec_mock . return_value = self . spec_mock_without_examples
2019-08-16 21:17:01 +02:00
generated_curl_example = self . curl_example ( " /mark_stream_as_read " , " POST " )
2019-07-29 15:46:48 +02:00
expected_curl_example = [
" ```curl " ,
2019-08-07 10:55:41 +02:00
" curl -sSX POST http://localhost:9991/api/v1/mark_stream_as_read \\ " ,
2019-12-04 12:27:15 +01:00
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode stream_id=1 \\ " ,
" --data-urlencode bool_param=false " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_with_invalid_method ( self , spec_mock : MagicMock ) - > None :
spec_mock . return_value = self . spec_mock_with_invalid_method
with self . assertRaises ( ValueError ) :
2019-08-16 21:17:01 +02:00
self . curl_example ( " /endpoint " , " BREW " ) # see: HTCPCP
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_with_array_example ( self ) - > None :
2019-08-16 21:17:01 +02:00
generated_curl_example = self . curl_example ( " /messages " , " GET " )
2019-07-29 15:46:48 +02:00
expected_curl_example = [
2021-02-12 08:20:45 +01:00
" ```curl " ,
" curl -sSX GET -G http://localhost:9991/api/v1/messages \\ " ,
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2021-12-15 01:04:35 +01:00
" --data-urlencode anchor=43 \\ " ,
2022-11-11 03:32:09 +01:00
" --data-urlencode include_anchor=false \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode num_before=4 \\ " ,
" --data-urlencode num_after=8 \\ " ,
2024-05-19 21:10:43 +02:00
' --data-urlencode \' narrow=[ { " operand " : " Denmark " , " operator " : " channel " }] \' \\ ' ,
2021-08-05 19:48:43 +02:00
" --data-urlencode client_gravatar=false \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode apply_markdown=false \\ " ,
" --data-urlencode use_first_unread_anchor=true " ,
2021-02-12 08:20:45 +01:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_with_object ( self , spec_mock : MagicMock ) - > None :
spec_mock . return_value = self . spec_mock_using_object
2019-08-16 21:17:01 +02:00
generated_curl_example = self . curl_example ( " /endpoint " , " GET " )
2019-07-29 15:46:48 +02:00
expected_curl_example = [
2021-02-12 08:20:45 +01:00
" ```curl " ,
" curl -sSX GET -G http://localhost:9991/api/v1/endpoint \\ " ,
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2020-11-04 02:49:09 +01:00
' --data-urlencode \' param1= { " key " : " value " } \' ' ,
2021-02-12 08:20:45 +01:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2019-10-03 15:02:51 +02:00
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2021-02-12 08:19:30 +01:00
def test_generate_and_render_curl_with_object_without_example (
self , spec_mock : MagicMock
) - > None :
2019-07-29 15:46:48 +02:00
spec_mock . return_value = self . spec_mock_using_object_without_example
with self . assertRaises ( ValueError ) :
2019-08-16 21:17:01 +02:00
self . curl_example ( " /endpoint " , " GET " )
2019-07-29 15:46:48 +02:00
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2021-02-12 08:19:30 +01:00
def test_generate_and_render_curl_with_array_without_example (
self , spec_mock : MagicMock
) - > None :
2019-07-29 15:46:48 +02:00
spec_mock . return_value = self . spec_mock_using_array_without_example
with self . assertRaises ( ValueError ) :
2019-08-16 21:17:01 +02:00
self . curl_example ( " /endpoint " , " GET " )
2019-07-29 15:46:48 +02:00
2020-08-12 04:54:48 +02:00
@patch ( " zerver.openapi.openapi.OpenAPISpec.openapi " )
2019-10-03 15:59:28 +02:00
def test_generate_and_render_curl_with_param_in_path ( self , spec_mock : MagicMock ) - > None :
spec_mock . return_value = self . spec_mock_using_param_in_path
generated_curl_example = self . curl_example ( " /endpoint/ {param1} " , " GET " )
expected_curl_example = [
2021-02-12 08:20:45 +01:00
" ```curl " ,
" curl -sSX GET -G http://localhost:9991/api/v1/endpoint/35 \\ " ,
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2020-11-04 02:49:09 +01:00
' --data-urlencode \' param2= { " key " : " value " } \' ' ,
2021-02-12 08:20:45 +01:00
" ``` " ,
2019-10-03 15:59:28 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2019-07-29 15:46:48 +02:00
def test_generate_and_render_curl_wrapper ( self ) - > None :
2021-02-12 08:19:30 +01:00
generated_curl_example = render_curl_example (
" /get_stream_id:GET:email:key " , api_url = " https://zulip.example.com/api "
)
2019-07-29 15:46:48 +02:00
expected_curl_example = [
" ```curl " ,
2019-08-07 10:55:41 +02:00
" curl -sSX GET -G https://zulip.example.com/api/v1/get_stream_id \\ " ,
2019-07-29 15:46:48 +02:00
" -u email:key \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode stream=Denmark " ,
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
" ``` " ,
2019-07-29 15:46:48 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2019-08-04 08:14:08 +02:00
def test_generate_and_render_curl_example_with_excludes ( self ) - > None :
2021-02-12 08:19:30 +01:00
generated_curl_example = self . curl_example (
" /messages " , " GET " , exclude = [ " client_gravatar " , " apply_markdown " ]
)
2019-08-04 08:14:08 +02:00
expected_curl_example = [
2021-02-12 08:20:45 +01:00
" ```curl " ,
" curl -sSX GET -G http://localhost:9991/api/v1/messages \\ " ,
" -u BOT_EMAIL_ADDRESS:BOT_API_KEY \\ " ,
2021-12-15 01:04:35 +01:00
" --data-urlencode anchor=43 \\ " ,
2022-11-11 03:32:09 +01:00
" --data-urlencode include_anchor=false \\ " ,
2020-11-04 02:49:09 +01:00
" --data-urlencode num_before=4 \\ " ,
" --data-urlencode num_after=8 \\ " ,
2024-05-19 21:10:43 +02:00
' --data-urlencode \' narrow=[ { " operand " : " Denmark " , " operator " : " channel " }] \' \\ ' ,
2020-11-04 02:49:09 +01:00
" --data-urlencode use_first_unread_anchor=true " ,
2021-02-12 08:20:45 +01:00
" ``` " ,
2019-08-04 08:14:08 +02:00
]
self . assertEqual ( generated_curl_example , expected_curl_example )
2020-05-20 19:53:41 +02:00
2021-02-12 08:19:30 +01:00
2020-05-20 19:53:41 +02:00
class OpenAPIAttributesTest ( ZulipTestCase ) :
def test_attributes ( self ) - > None :
2020-07-01 19:07:31 +02:00
"""
Checks :
* All endpoints have ` operationId ` and ` tag ` attributes .
* All example responses match their schema .
2023-06-21 19:59:26 +02:00
* All example events in ` / get - events ` match an event schema .
2020-07-01 19:07:31 +02:00
* That no opaque object exists .
"""
2021-03-27 19:38:10 +01:00
EXCLUDE = [ " /real-time " ]
2021-02-12 08:19:30 +01:00
VALID_TAGS = [
" users " ,
" server_and_organizations " ,
" authentication " ,
" real_time_events " ,
2024-05-20 16:21:04 +02:00
" channels " ,
2021-02-12 08:19:30 +01:00
" messages " ,
2021-02-14 12:56:14 +01:00
" drafts " ,
2021-02-12 08:19:30 +01:00
" webhooks " ,
2023-04-14 21:19:46 +02:00
" scheduled_messages " ,
2023-10-05 13:53:09 +02:00
" mobile " ,
2024-04-01 16:38:09 +02:00
" invites " ,
2021-02-12 08:19:30 +01:00
]
2020-08-12 04:54:48 +02:00
paths = OpenAPISpec ( OPENAPI_SPEC_PATH ) . openapi ( ) [ " paths " ]
for path , path_item in paths . items ( ) :
2020-05-20 19:53:41 +02:00
if path in EXCLUDE :
continue
2020-08-12 04:54:48 +02:00
for method , operation in path_item . items ( ) :
2021-02-12 08:19:30 +01:00
assert " operationId " in operation
assert " tags " in operation
2020-08-12 04:54:48 +02:00
tag = operation [ " tags " ] [ 0 ]
2021-02-12 08:19:30 +01:00
assert tag in VALID_TAGS
2021-02-12 08:20:45 +01:00
for status_code , response in operation [ " responses " ] . items ( ) :
schema = response [ " content " ] [ " application/json " ] [ " schema " ]
2023-06-21 19:59:26 +02:00
# Validate the documented examples for each event type
# in api/get-events for the documented event schemas.
if path == " /events " and method == " get " and status_code == " 200 " :
for event_type in schema [ " properties " ] [ " events " ] [ " items " ] [ " oneOf " ] :
event_array = [ event_type [ " example " ] ]
content = {
" queue_id " : " fb67bf8a-c031-47cc-84cf-ed80accacda8 " ,
" events " : event_array ,
" msg " : " " ,
" result " : " success " ,
}
assert validate_against_openapi_schema (
content , path , method , status_code
)
2021-02-12 08:20:45 +01:00
if " oneOf " in schema :
2023-06-21 19:59:26 +02:00
for subschema in schema [ " oneOf " ] :
2020-08-12 04:54:48 +02:00
validate_schema ( subschema )
2021-02-12 08:19:30 +01:00
assert validate_against_openapi_schema (
2021-02-12 08:20:45 +01:00
subschema [ " example " ] ,
2021-02-12 08:19:30 +01:00
path ,
method ,
2022-10-26 00:08:08 +02:00
status_code ,
2021-02-12 08:19:30 +01:00
)
2020-06-10 19:39:24 +02:00
continue
2020-08-12 04:54:48 +02:00
validate_schema ( schema )
2021-02-12 08:19:30 +01:00
assert validate_against_openapi_schema (
2021-02-12 08:20:45 +01:00
schema [ " example " ] , path , method , status_code
2021-02-12 08:19:30 +01:00
)
2020-06-13 17:59:46 +02:00
class OpenAPIRegexTest ( ZulipTestCase ) :
def test_regex ( self ) - > None :
"""
Calls a few documented and undocumented endpoints and checks whether they
find a match or not .
"""
2022-02-08 00:13:33 +01:00
# Some of the undocumented endpoints which are very similar to
2020-06-13 17:59:46 +02:00
# some of the documented endpoints.
2021-02-12 08:20:45 +01:00
assert find_openapi_endpoint ( " /users/me/presence " ) is None
assert find_openapi_endpoint ( " /users/me/subscriptions/23 " ) is None
assert find_openapi_endpoint ( " /users/iago/subscriptions/23 " ) is None
assert find_openapi_endpoint ( " /messages/matches_narrow " ) is None
2020-06-13 17:59:46 +02:00
# Making sure documented endpoints are matched correctly.
2021-02-12 08:19:30 +01:00
assert (
2021-02-12 08:20:45 +01:00
find_openapi_endpoint ( " /users/23/subscriptions/21 " )
== " /users/ {user_id} /subscriptions/ {stream_id} "
2021-02-12 08:19:30 +01:00
)
2021-01-04 19:36:00 +01:00
assert (
find_openapi_endpoint ( " /users/iago@zulip.com/presence " )
== " /users/ {user_id_or_email} /presence "
)
2021-01-02 15:05:29 +01:00
assert find_openapi_endpoint ( " /users/iago@zulip.com " ) == " /users/ {email} "
2021-02-12 08:20:45 +01:00
assert find_openapi_endpoint ( " /messages/23 " ) == " /messages/ {message_id} "
assert find_openapi_endpoint ( " /realm/emoji/realm_emoji_1 " ) == " /realm/emoji/ {emoji_name} "
2021-02-12 08:19:30 +01:00
2020-07-09 20:51:31 +02:00
class OpenAPIRequestValidatorTest ( ZulipTestCase ) :
def test_validator ( self ) - > None :
"""
Test to make sure the request validator works properly
The tests cover both cases such as catching valid requests marked
2022-02-08 00:13:33 +01:00
as invalid and making sure invalid requests are marked properly
2020-07-09 20:51:31 +02:00
"""
# `/users/me/subscriptions` doesn't require any parameters
2021-02-12 08:20:45 +01:00
validate_request ( " /users/me/subscriptions " , " get " , { } , { } , False , " 200 " )
2020-07-09 20:51:31 +02:00
with self . assertRaises ( SchemaError ) :
2020-07-25 17:24:21 +02:00
# `/messages` POST does not work on an empty response
2021-02-12 08:20:45 +01:00
validate_request ( " /messages " , " post " , { } , { } , False , " 200 " )
2020-07-25 17:24:21 +02:00
# 400 responses are allowed to fail validation.
2021-02-12 08:20:45 +01:00
validate_request ( " /messages " , " post " , { } , { } , False , " 400 " )
2020-07-25 17:24:21 +02:00
# `intentionally_undocumented` allows validation errors on
# 200 responses.
2021-02-12 08:19:30 +01:00
validate_request (
2021-02-12 08:20:45 +01:00
" /dev_fetch_api_key " , " post " , { } , { } , False , " 200 " , intentionally_undocumented = True
2021-02-12 08:19:30 +01:00
)