mirror of https://github.com/zulip/zulip.git
python: Strip leading and trailing spaces from docstrings.
This is enforced by Black ≥ 21.4b0. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
ad0be6cea1
commit
d0c6f4f400
|
@ -110,7 +110,7 @@ CRITICAL_COUNT_THRESHOLD_DEFAULT = 50
|
||||||
|
|
||||||
|
|
||||||
def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]]:
|
def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]]:
|
||||||
""" Do a simple queue size check for queues whose workers don't publish stats files."""
|
"""Do a simple queue size check for queues whose workers don't publish stats files."""
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for queue, count in queue_counts_dict.items():
|
for queue, count in queue_counts_dict.items():
|
||||||
|
|
|
@ -6829,7 +6829,7 @@ def do_remove_realm_playground(realm: Realm, realm_playground: RealmPlayground)
|
||||||
|
|
||||||
def get_occupied_streams(realm: Realm) -> QuerySet:
|
def get_occupied_streams(realm: Realm) -> QuerySet:
|
||||||
# TODO: Make a generic stub for QuerySet
|
# TODO: Make a generic stub for QuerySet
|
||||||
""" Get streams with subscribers """
|
"""Get streams with subscribers"""
|
||||||
exists_expression = Exists(
|
exists_expression = Exists(
|
||||||
Subscription.objects.filter(
|
Subscription.objects.filter(
|
||||||
active=True,
|
active=True,
|
||||||
|
|
|
@ -600,7 +600,7 @@ class InlineImageProcessor(markdown.treeprocessors.Treeprocessor):
|
||||||
|
|
||||||
|
|
||||||
class BacktickInlineProcessor(markdown.inlinepatterns.BacktickInlineProcessor):
|
class BacktickInlineProcessor(markdown.inlinepatterns.BacktickInlineProcessor):
|
||||||
""" Return a `<code>` element containing the matching text. """
|
"""Return a `<code>` element containing the matching text."""
|
||||||
|
|
||||||
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
|
def handleMatch( # type: ignore[override] # supertype incompatible with supersupertype
|
||||||
self, m: Match[str], data: str
|
self, m: Match[str], data: str
|
||||||
|
@ -1480,7 +1480,7 @@ def unicode_emoji_to_codepoint(unicode_emoji: str) -> str:
|
||||||
|
|
||||||
|
|
||||||
class EmoticonTranslation(markdown.inlinepatterns.Pattern):
|
class EmoticonTranslation(markdown.inlinepatterns.Pattern):
|
||||||
""" Translates emoticons like `:)` into emoji like `:smile:`. """
|
"""Translates emoticons like `:)` into emoji like `:smile:`."""
|
||||||
|
|
||||||
def handleMatch(self, match: Match[str]) -> Optional[Element]:
|
def handleMatch(self, match: Match[str]) -> Optional[Element]:
|
||||||
db_data = self.md.zulip_db_data
|
db_data = self.md.zulip_db_data
|
||||||
|
@ -1640,7 +1640,7 @@ class OListProcessor(sane_lists.SaneOListProcessor):
|
||||||
|
|
||||||
|
|
||||||
class UListProcessor(sane_lists.SaneUListProcessor):
|
class UListProcessor(sane_lists.SaneUListProcessor):
|
||||||
""" Unordered lists, but with 2-space indent """
|
"""Unordered lists, but with 2-space indent"""
|
||||||
|
|
||||||
def __init__(self, parser: BlockParser) -> None:
|
def __init__(self, parser: BlockParser) -> None:
|
||||||
parser.md.tab_length = 2
|
parser.md.tab_length = 2
|
||||||
|
@ -1736,7 +1736,7 @@ class MarkdownListPreprocessor(markdown.preprocessors.Preprocessor):
|
||||||
LI_RE = re.compile(r"^[ ]*([*+-]|\d\.)[ ]+(.*)", re.MULTILINE)
|
LI_RE = re.compile(r"^[ ]*([*+-]|\d\.)[ ]+(.*)", re.MULTILINE)
|
||||||
|
|
||||||
def run(self, lines: List[str]) -> List[str]:
|
def run(self, lines: List[str]) -> List[str]:
|
||||||
""" Insert a newline between a paragraph and ulist if missing """
|
"""Insert a newline between a paragraph and ulist if missing"""
|
||||||
inserts = 0
|
inserts = 0
|
||||||
in_code_fence: bool = False
|
in_code_fence: bool = False
|
||||||
open_fences: List[Fence] = []
|
open_fences: List[Fence] = []
|
||||||
|
@ -1792,7 +1792,7 @@ def prepare_linkifier_pattern(source: str) -> str:
|
||||||
# Given a regular expression pattern, linkifies groups that match it
|
# Given a regular expression pattern, linkifies groups that match it
|
||||||
# using the provided format string to construct the URL.
|
# using the provided format string to construct the URL.
|
||||||
class LinkifierPattern(markdown.inlinepatterns.Pattern):
|
class LinkifierPattern(markdown.inlinepatterns.Pattern):
|
||||||
""" Applied a given linkifier to the input """
|
"""Applied a given linkifier to the input"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
|
@ -157,7 +157,7 @@ class FencedCodeExtension(Extension):
|
||||||
self.setConfig(key, value)
|
self.setConfig(key, value)
|
||||||
|
|
||||||
def extendMarkdown(self, md: Markdown) -> None:
|
def extendMarkdown(self, md: Markdown) -> None:
|
||||||
""" Add FencedBlockPreprocessor to the Markdown instance. """
|
"""Add FencedBlockPreprocessor to the Markdown instance."""
|
||||||
md.registerExtension(self)
|
md.registerExtension(self)
|
||||||
processor = FencedBlockPreprocessor(
|
processor = FencedBlockPreprocessor(
|
||||||
md, run_content_validators=self.config["run_content_validators"][0]
|
md, run_content_validators=self.config["run_content_validators"][0]
|
||||||
|
@ -379,7 +379,7 @@ class FencedBlockPreprocessor(Preprocessor):
|
||||||
self.handlers.pop()
|
self.handlers.pop()
|
||||||
|
|
||||||
def run(self, lines: Iterable[str]) -> List[str]:
|
def run(self, lines: Iterable[str]) -> List[str]:
|
||||||
""" Match and store Fenced Code Blocks in the HtmlStash. """
|
"""Match and store Fenced Code Blocks in the HtmlStash."""
|
||||||
|
|
||||||
output: List[str] = []
|
output: List[str] = []
|
||||||
|
|
||||||
|
@ -502,7 +502,7 @@ class FencedBlockPreprocessor(Preprocessor):
|
||||||
return self.md.htmlStash.store(code)
|
return self.md.htmlStash.store(code)
|
||||||
|
|
||||||
def _escape(self, txt: str) -> str:
|
def _escape(self, txt: str) -> str:
|
||||||
""" basic html escaping """
|
"""basic html escaping"""
|
||||||
txt = txt.replace("&", "&")
|
txt = txt.replace("&", "&")
|
||||||
txt = txt.replace("<", "<")
|
txt = txt.replace("<", "<")
|
||||||
txt = txt.replace(">", ">")
|
txt = txt.replace(">", ">")
|
||||||
|
|
|
@ -38,7 +38,7 @@ ROW_HTML = """\
|
||||||
|
|
||||||
class EmoticonTranslationsHelpExtension(Extension):
|
class EmoticonTranslationsHelpExtension(Extension):
|
||||||
def extendMarkdown(self, md: Markdown) -> None:
|
def extendMarkdown(self, md: Markdown) -> None:
|
||||||
""" Add SettingHelpExtension to the Markdown instance. """
|
"""Add SettingHelpExtension to the Markdown instance."""
|
||||||
md.registerExtension(self)
|
md.registerExtension(self)
|
||||||
md.preprocessors.register(EmoticonTranslation(), "emoticon_translations", -505)
|
md.preprocessors.register(EmoticonTranslation(), "emoticon_translations", -505)
|
||||||
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ LINK_TYPE_HANDLERS = {
|
||||||
|
|
||||||
class RelativeLinksHelpExtension(Extension):
|
class RelativeLinksHelpExtension(Extension):
|
||||||
def extendMarkdown(self, md: Markdown) -> None:
|
def extendMarkdown(self, md: Markdown) -> None:
|
||||||
""" Add RelativeLinksHelpExtension to the Markdown instance. """
|
"""Add RelativeLinksHelpExtension to the Markdown instance."""
|
||||||
md.registerExtension(self)
|
md.registerExtension(self)
|
||||||
md.preprocessors.register(RelativeLinks(), "help_relative_links", 520)
|
md.preprocessors.register(RelativeLinks(), "help_relative_links", 520)
|
||||||
|
|
||||||
|
|
|
@ -96,7 +96,7 @@ settings_markdown = """
|
||||||
|
|
||||||
class SettingHelpExtension(Extension):
|
class SettingHelpExtension(Extension):
|
||||||
def extendMarkdown(self, md: Markdown) -> None:
|
def extendMarkdown(self, md: Markdown) -> None:
|
||||||
""" Add SettingHelpExtension to the Markdown instance. """
|
"""Add SettingHelpExtension to the Markdown instance."""
|
||||||
md.registerExtension(self)
|
md.registerExtension(self)
|
||||||
md.preprocessors.register(Setting(), "setting", 515)
|
md.preprocessors.register(Setting(), "setting", 515)
|
||||||
|
|
||||||
|
|
|
@ -180,7 +180,7 @@ def destroy_test_databases(worker_id: Optional[int] = None) -> None:
|
||||||
# delete that database, we need to not pass a number
|
# delete that database, we need to not pass a number
|
||||||
# argument to destroy_test_db.
|
# argument to destroy_test_db.
|
||||||
if worker_id is not None:
|
if worker_id is not None:
|
||||||
"""Modified from the Django original to """
|
"""Modified from the Django original to"""
|
||||||
database_id = get_database_id(worker_id)
|
database_id = get_database_id(worker_id)
|
||||||
connection.creation.destroy_test_db(suffix=database_id)
|
connection.creation.destroy_test_db(suffix=database_id)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -337,7 +337,7 @@ def render_curl_example(
|
||||||
exclude: Optional[List[str]] = None,
|
exclude: Optional[List[str]] = None,
|
||||||
include: Optional[List[str]] = None,
|
include: Optional[List[str]] = None,
|
||||||
) -> List[str]:
|
) -> List[str]:
|
||||||
""" A simple wrapper around generate_curl_example. """
|
"""A simple wrapper around generate_curl_example."""
|
||||||
parts = function.split(":")
|
parts = function.split(":")
|
||||||
endpoint = parts[0]
|
endpoint = parts[0]
|
||||||
method = parts[1]
|
method = parts[1]
|
||||||
|
|
|
@ -189,7 +189,7 @@ class DraftCreationTests(ZulipTestCase):
|
||||||
self.create_and_check_drafts_for_error(draft_dicts, "Timestamp must not be negative.")
|
self.create_and_check_drafts_for_error(draft_dicts, "Timestamp must not be negative.")
|
||||||
|
|
||||||
def test_create_non_stream_draft_with_no_recipient(self) -> None:
|
def test_create_non_stream_draft_with_no_recipient(self) -> None:
|
||||||
""" When "to" is an empty list, the type should become "" as well. """
|
"""When "to" is an empty list, the type should become "" as well."""
|
||||||
draft_dicts = [
|
draft_dicts = [
|
||||||
{
|
{
|
||||||
"type": "private",
|
"type": "private",
|
||||||
|
|
|
@ -298,7 +298,7 @@ class TestArchiveMessagesGeneral(ArchiveMessagesTestingBase):
|
||||||
self._verify_archive_data([msg_id], usermsg_ids)
|
self._verify_archive_data([msg_id], usermsg_ids)
|
||||||
|
|
||||||
def test_cross_realm_personal_message_archiving(self) -> None:
|
def test_cross_realm_personal_message_archiving(self) -> None:
|
||||||
"""Check that cross-realm personal messages get correctly archived. """
|
"""Check that cross-realm personal messages get correctly archived."""
|
||||||
msg_ids = [self._send_cross_realm_personal_message() for i in range(1, 7)]
|
msg_ids = [self._send_cross_realm_personal_message() for i in range(1, 7)]
|
||||||
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
usermsg_ids = self._get_usermessage_ids(msg_ids)
|
||||||
# Make the message expired on the recipient's realm:
|
# Make the message expired on the recipient's realm:
|
||||||
|
|
|
@ -92,7 +92,7 @@ def convert_lines_to_traceback_string(lines: Optional[List[str]]) -> str:
|
||||||
|
|
||||||
|
|
||||||
def handle_event_payload(event: Dict[str, Any]) -> Tuple[str, str]:
|
def handle_event_payload(event: Dict[str, Any]) -> Tuple[str, str]:
|
||||||
""" Handle either an exception type event or a message type event payload."""
|
"""Handle either an exception type event or a message type event payload."""
|
||||||
# We shouldn't support the officially deprecated Raven series of SDKs.
|
# We shouldn't support the officially deprecated Raven series of SDKs.
|
||||||
if int(event["version"]) < 7:
|
if int(event["version"]) < 7:
|
||||||
raise UnsupportedWebhookEventType("Raven SDK")
|
raise UnsupportedWebhookEventType("Raven SDK")
|
||||||
|
@ -167,7 +167,7 @@ def handle_event_payload(event: Dict[str, Any]) -> Tuple[str, str]:
|
||||||
def handle_issue_payload(
|
def handle_issue_payload(
|
||||||
action: str, issue: Dict[str, Any], actor: Dict[str, Any]
|
action: str, issue: Dict[str, Any], actor: Dict[str, Any]
|
||||||
) -> Tuple[str, str]:
|
) -> Tuple[str, str]:
|
||||||
""" Handle either an issue type event. """
|
"""Handle either an issue type event."""
|
||||||
subject = issue["title"]
|
subject = issue["title"]
|
||||||
datetime = issue["lastSeen"].split(".")[0].replace("T", " ")
|
datetime = issue["lastSeen"].split(".")[0].replace("T", " ")
|
||||||
|
|
||||||
|
|
|
@ -157,14 +157,14 @@ return_type = Tuple[Optional[Dict[str, Any]], Optional[Dict[str, Any]]]
|
||||||
|
|
||||||
|
|
||||||
def get_old_and_new_values(change_type: str, message: Mapping[str, Any]) -> return_type:
|
def get_old_and_new_values(change_type: str, message: Mapping[str, Any]) -> return_type:
|
||||||
""" Parses the payload and finds previous and current value of change_type."""
|
"""Parses the payload and finds previous and current value of change_type."""
|
||||||
old = message["change"]["diff"][change_type].get("from")
|
old = message["change"]["diff"][change_type].get("from")
|
||||||
new = message["change"]["diff"][change_type].get("to")
|
new = message["change"]["diff"][change_type].get("to")
|
||||||
return old, new
|
return old, new
|
||||||
|
|
||||||
|
|
||||||
def parse_comment(message: Mapping[str, Any]) -> Dict[str, Any]:
|
def parse_comment(message: Mapping[str, Any]) -> Dict[str, Any]:
|
||||||
""" Parses the comment to issue, task or US. """
|
"""Parses the comment to issue, task or US."""
|
||||||
return {
|
return {
|
||||||
"event": "commented",
|
"event": "commented",
|
||||||
"type": message["type"],
|
"type": message["type"],
|
||||||
|
@ -177,7 +177,7 @@ def parse_comment(message: Mapping[str, Any]) -> Dict[str, Any]:
|
||||||
|
|
||||||
|
|
||||||
def parse_create_or_delete(message: Mapping[str, Any]) -> Dict[str, Any]:
|
def parse_create_or_delete(message: Mapping[str, Any]) -> Dict[str, Any]:
|
||||||
""" Parses create or delete event. """
|
"""Parses create or delete event."""
|
||||||
if message["type"] == "relateduserstory":
|
if message["type"] == "relateduserstory":
|
||||||
return {
|
return {
|
||||||
"type": message["type"],
|
"type": message["type"],
|
||||||
|
@ -202,7 +202,7 @@ def parse_create_or_delete(message: Mapping[str, Any]) -> Dict[str, Any]:
|
||||||
|
|
||||||
|
|
||||||
def parse_change_event(change_type: str, message: Mapping[str, Any]) -> Optional[Dict[str, Any]]:
|
def parse_change_event(change_type: str, message: Mapping[str, Any]) -> Optional[Dict[str, Any]]:
|
||||||
""" Parses change event. """
|
"""Parses change event."""
|
||||||
evt: Dict[str, Any] = {}
|
evt: Dict[str, Any] = {}
|
||||||
values: Dict[str, Any] = {
|
values: Dict[str, Any] = {
|
||||||
"user": get_owner_name(message),
|
"user": get_owner_name(message),
|
||||||
|
@ -285,7 +285,7 @@ def parse_webhook_test(message: Mapping[str, Any]) -> Dict[str, Any]:
|
||||||
|
|
||||||
|
|
||||||
def parse_message(message: Mapping[str, Any]) -> List[Dict[str, Any]]:
|
def parse_message(message: Mapping[str, Any]) -> List[Dict[str, Any]]:
|
||||||
""" Parses the payload by delegating to specialized functions. """
|
"""Parses the payload by delegating to specialized functions."""
|
||||||
events = []
|
events = []
|
||||||
if message["action"] in ["create", "delete"]:
|
if message["action"] in ["create", "delete"]:
|
||||||
events.append(parse_create_or_delete(message))
|
events.append(parse_create_or_delete(message))
|
||||||
|
@ -304,7 +304,7 @@ def parse_message(message: Mapping[str, Any]) -> List[Dict[str, Any]]:
|
||||||
|
|
||||||
|
|
||||||
def generate_content(data: Mapping[str, Any]) -> str:
|
def generate_content(data: Mapping[str, Any]) -> str:
|
||||||
""" Gets the template string and formats it with parsed data. """
|
"""Gets the template string and formats it with parsed data."""
|
||||||
template = templates[data["type"]][data["event"]]
|
template = templates[data["type"]][data["event"]]
|
||||||
content = template.format(**data["values"])
|
content = template.format(**data["values"])
|
||||||
return content
|
return content
|
||||||
|
|
|
@ -385,7 +385,7 @@ class EmailAuthBackend(ZulipAuthMixin):
|
||||||
realm: Realm,
|
realm: Realm,
|
||||||
return_data: Optional[Dict[str, Any]] = None,
|
return_data: Optional[Dict[str, Any]] = None,
|
||||||
) -> Optional[UserProfile]:
|
) -> Optional[UserProfile]:
|
||||||
""" Authenticate a user based on email address as the user name. """
|
"""Authenticate a user based on email address as the user name."""
|
||||||
if not password_auth_enabled(realm):
|
if not password_auth_enabled(realm):
|
||||||
if return_data is not None:
|
if return_data is not None:
|
||||||
return_data["password_auth_disabled"] = True
|
return_data["password_auth_disabled"] = True
|
||||||
|
|
Loading…
Reference in New Issue