zulip/zerver/management/commands/enqueue_file.py

59 lines
1.9 KiB
Python
Raw Normal View History

import sys
from argparse import ArgumentParser
from typing import IO, Any
import orjson
from django.core.management.base import BaseCommand
from zerver.lib.queue import queue_json_publish
def error(*args: Any) -> None:
raise Exception('We cannot enqueue because settings.USING_RABBITMQ is False.')
class Command(BaseCommand):
help = """Read JSON lines from a file and enqueue them to a worker queue.
Each line in the file should either be a JSON payload or two tab-separated
2017-01-12 03:33:10 +01:00
fields, the second of which is a JSON payload. (The latter is to accommodate
the format of error files written by queue workers that catch exceptions--their
first field is a timestamp that we ignore.)
You can use "-" to represent stdin.
"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument('queue_name', metavar='<queue>',
help="name of worker queue to enqueue to")
parser.add_argument('file_name', metavar='<file>',
help="name of file containing JSON lines")
def handle(self, *args: Any, **options: str) -> None:
queue_name = options['queue_name']
file_name = options['file_name']
if file_name == '-':
python: Convert assignment type annotations to Python 3.6 style. This commit was split by tabbott; this piece covers the vast majority of files in Zulip, but excludes scripts/, tools/, and puppet/ to help ensure we at least show the right error messages for Xenial systems. We can likely further refine the remaining pieces with some testing. Generated by com2ann, with whitespace fixes and various manual fixes for runtime issues: - invoiced_through: Optional[LicenseLedger] = models.ForeignKey( + invoiced_through: Optional["LicenseLedger"] = models.ForeignKey( -_apns_client: Optional[APNsClient] = None +_apns_client: Optional["APNsClient"] = None - notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) + signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) - author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) + author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) - bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) + bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) - default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) - default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) + default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) -descriptors_by_handler_id: Dict[int, ClientDescriptor] = {} +descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {} -worker_classes: Dict[str, Type[QueueProcessingWorker]] = {} -queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {} +worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {} +queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {} -AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None +AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
f: IO[str] = sys.stdin
else:
f = open(file_name)
while True:
line = f.readline()
if not line:
break
line = line.strip()
try:
payload = line.split('\t')[1]
except IndexError:
payload = line
print(f'Queueing to queue {queue_name}: {payload}')
# Verify that payload is valid json.
data = orjson.loads(payload)
# This is designed to use the `error` method rather than
# the call_consume_in_tests flow.
queue_json_publish(queue_name, data, error)