2017-08-08 05:27:06 +02:00
|
|
|
from typing import Dict, Iterable, Tuple, Callable, TypeVar, Iterator
|
2013-04-23 18:51:17 +02:00
|
|
|
|
2012-09-14 17:05:19 +02:00
|
|
|
import os
|
|
|
|
import pty
|
|
|
|
import sys
|
|
|
|
import errno
|
|
|
|
|
2017-08-08 05:27:06 +02:00
|
|
|
JobData = TypeVar('JobData')
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def run_parallel(job: Callable[[JobData], int],
|
|
|
|
data: Iterable[JobData],
|
|
|
|
threads: int=6) -> Iterator[Tuple[int, JobData]]:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
pids: Dict[int, JobData] = {}
|
2012-09-14 17:05:19 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def wait_for_one() -> Tuple[int, JobData]:
|
2012-09-14 17:05:19 +02:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
(pid, status) = os.wait()
|
|
|
|
return status, pids.pop(pid)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
for item in data:
|
|
|
|
pid = os.fork()
|
|
|
|
if pid == 0:
|
|
|
|
sys.stdin.close()
|
|
|
|
try:
|
|
|
|
os.close(pty.STDIN_FILENO)
|
2015-11-01 17:08:33 +01:00
|
|
|
except OSError as e:
|
2012-09-14 17:05:19 +02:00
|
|
|
if e.errno != errno.EBADF:
|
|
|
|
raise
|
2020-04-09 21:51:58 +02:00
|
|
|
sys.stdin = open("/dev/null")
|
2012-09-14 17:05:19 +02:00
|
|
|
os._exit(job(item))
|
|
|
|
|
|
|
|
pids[pid] = item
|
|
|
|
threads = threads - 1
|
|
|
|
|
|
|
|
if threads == 0:
|
|
|
|
(status, item) = wait_for_one()
|
|
|
|
threads += 1
|
|
|
|
yield (status, item)
|
|
|
|
if status != 0:
|
|
|
|
# Stop if any error occurred
|
|
|
|
break
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
(status, item) = wait_for_one()
|
|
|
|
yield (status, item)
|
2015-11-01 17:08:33 +01:00
|
|
|
except OSError as e:
|
2012-09-14 17:05:19 +02:00
|
|
|
if e.errno == errno.ECHILD:
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
# run some unit tests
|
|
|
|
import time
|
|
|
|
jobs = [10, 19, 18, 6, 14, 12, 8, 2, 1, 13, 3, 17, 9, 11, 5, 16, 7, 15, 4]
|
|
|
|
expected_output = [6, 10, 12, 2, 1, 14, 8, 3, 18, 19, 5, 9, 13, 11, 4, 7, 17, 16, 15]
|
2016-11-29 07:22:02 +01:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def wait_and_print(x: int) -> int:
|
2012-09-14 17:05:19 +02:00
|
|
|
time.sleep(x * 0.1)
|
|
|
|
return 0
|
|
|
|
|
|
|
|
output = []
|
|
|
|
for (status, job) in run_parallel(wait_and_print, jobs):
|
|
|
|
output.append(job)
|
|
|
|
if output == expected_output:
|
2015-11-01 17:11:06 +01:00
|
|
|
print("Successfully passed test!")
|
2012-09-14 17:05:19 +02:00
|
|
|
else:
|
2015-11-01 17:11:06 +01:00
|
|
|
print("Failed test!")
|
|
|
|
print(jobs)
|
|
|
|
print(expected_output)
|
|
|
|
print(output)
|