mirror of https://github.com/zulip/zulip.git
mypy: Type simple generators as Iterator, not Iterable.
A generator that yields values without receiving or returning them is an Iterator. Although every Iterator happens to be iterable, Iterable is a confusing annotation for generators because a generator is only iterable once. Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
c242d176ba
commit
3ffed617a2
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import re
|
||||
from typing import Callable, Iterable, List, Optional, Union
|
||||
from typing import Callable, Iterator, List, Optional, Union
|
||||
|
||||
import scrapy
|
||||
from scrapy.http import Request, Response
|
||||
|
@ -105,8 +105,8 @@ class BaseDocumentationSpider(scrapy.Spider):
|
|||
|
||||
return callback
|
||||
|
||||
def _make_requests(self, url: str) -> Iterable[Request]:
|
||||
callback: Callable[[Response], Optional[Iterable[Request]]] = self.parse
|
||||
def _make_requests(self, url: str) -> Iterator[Request]:
|
||||
callback: Callable[[Response], Optional[Iterator[Request]]] = self.parse
|
||||
dont_filter = False
|
||||
method = 'GET'
|
||||
if self._is_external_url(url):
|
||||
|
@ -120,11 +120,11 @@ class BaseDocumentationSpider(scrapy.Spider):
|
|||
yield Request(url, method=method, callback=callback, dont_filter=dont_filter,
|
||||
errback=self.error_callback)
|
||||
|
||||
def start_requests(self) -> Iterable[Request]:
|
||||
def start_requests(self) -> Iterator[Request]:
|
||||
for url in self.start_urls:
|
||||
yield from self._make_requests(url)
|
||||
|
||||
def parse(self, response: Response) -> Iterable[Request]:
|
||||
def parse(self, response: Response) -> Iterator[Request]:
|
||||
self.log(response)
|
||||
|
||||
if getattr(self, 'validate_html', False):
|
||||
|
@ -142,7 +142,7 @@ class BaseDocumentationSpider(scrapy.Spider):
|
|||
canonicalize=False).extract_links(response):
|
||||
yield from self._make_requests(link.url)
|
||||
|
||||
def retry_request_with_get(self, request: Request) -> Iterable[Request]:
|
||||
def retry_request_with_get(self, request: Request) -> Iterator[Request]:
|
||||
request.method = 'GET'
|
||||
request.dont_filter = True
|
||||
yield request
|
||||
|
@ -150,7 +150,7 @@ class BaseDocumentationSpider(scrapy.Spider):
|
|||
def exclude_error(self, url: str) -> bool:
|
||||
return url in EXCLUDED_URLS
|
||||
|
||||
def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterable[Request]]]:
|
||||
def error_callback(self, failure: Failure) -> Optional[Union[Failure, Iterator[Request]]]:
|
||||
if failure.check(HttpError):
|
||||
response = failure.value.response
|
||||
if self.exclude_error(response.url):
|
||||
|
|
|
@ -2,7 +2,19 @@ import logging
|
|||
import os
|
||||
import random
|
||||
import shutil
|
||||
from typing import AbstractSet, Any, Callable, Dict, Iterable, List, Optional, Set, Tuple, TypeVar
|
||||
from typing import (
|
||||
AbstractSet,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Set,
|
||||
Tuple,
|
||||
TypeVar,
|
||||
)
|
||||
|
||||
import requests
|
||||
import ujson
|
||||
|
@ -576,7 +588,7 @@ def write_avatar_png(avatar_folder: str,
|
|||
|
||||
ListJobData = TypeVar('ListJobData')
|
||||
def run_parallel_wrapper(f: Callable[[ListJobData], None], full_items: List[ListJobData],
|
||||
threads: int=6) -> Iterable[Tuple[int, List[ListJobData]]]:
|
||||
threads: int=6) -> Iterator[Tuple[int, List[ListJobData]]]:
|
||||
logging.info("Distributing %s items across %s threads", len(full_items), threads)
|
||||
|
||||
def wrapping_function(items: List[ListJobData]) -> int:
|
||||
|
|
|
@ -7,7 +7,7 @@ import re
|
|||
import string
|
||||
from itertools import zip_longest
|
||||
from time import sleep
|
||||
from typing import Any, Callable, Iterable, List, Optional, Sequence, Set, Tuple, TypeVar
|
||||
from typing import Any, Callable, Iterator, List, Optional, Sequence, Set, Tuple, TypeVar
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
|
@ -122,7 +122,7 @@ def has_api_key_format(key: str) -> bool:
|
|||
def query_chunker(queries: List[Any],
|
||||
id_collector: Optional[Set[int]]=None,
|
||||
chunk_size: int=1000,
|
||||
db_chunk_size: Optional[int]=None) -> Iterable[Any]:
|
||||
db_chunk_size: Optional[int]=None) -> Iterator[Any]:
|
||||
'''
|
||||
This merges one or more Django ascending-id queries into
|
||||
a generator that returns chunks of chunk_size row objects
|
||||
|
@ -149,7 +149,7 @@ def query_chunker(queries: List[Any],
|
|||
else:
|
||||
id_collector = set()
|
||||
|
||||
def chunkify(q: Any, i: int) -> Iterable[Tuple[int, int, Any]]:
|
||||
def chunkify(q: Any, i: int) -> Iterator[Tuple[int, int, Any]]:
|
||||
q = q.order_by('id')
|
||||
min_id = -1
|
||||
while True:
|
||||
|
|
|
@ -34,7 +34,7 @@ import json
|
|||
import os
|
||||
import re
|
||||
from argparse import ArgumentParser
|
||||
from typing import Any, Dict, Iterable, List, Mapping
|
||||
from typing import Any, Dict, Iterable, Iterator, List, Mapping
|
||||
|
||||
from django.core.management.commands import makemessages
|
||||
from django.template.base import BLOCK_TAG_END, BLOCK_TAG_START
|
||||
|
@ -205,7 +205,7 @@ class Command(makemessages.Command):
|
|||
def get_base_path(self) -> str:
|
||||
return self.frontend_output
|
||||
|
||||
def get_output_paths(self) -> Iterable[str]:
|
||||
def get_output_paths(self) -> Iterator[str]:
|
||||
base_path = self.get_base_path()
|
||||
locales = self.get_locales()
|
||||
for path in [os.path.join(base_path, locale) for locale in locales]:
|
||||
|
|
|
@ -181,7 +181,7 @@ class QueryUtilTest(ZulipTestCase):
|
|||
id_collector=all_msg_ids,
|
||||
chunk_size=10, # use a different size each time
|
||||
)
|
||||
first_chunk = next(chunker) # type: ignore[call-overload]
|
||||
first_chunk = next(chunker)
|
||||
self.assertEqual(len(first_chunk), 10)
|
||||
self.assertEqual(len(all_msg_ids), 10)
|
||||
expected_msg = Message.objects.all()[0:10][5]
|
||||
|
|
Loading…
Reference in New Issue