2020-06-11 00:54:34 +02:00
|
|
|
from typing import Dict, List
|
2017-09-13 20:00:36 +02:00
|
|
|
|
|
|
|
from django.db import connection
|
2020-06-09 11:57:51 +02:00
|
|
|
from psycopg2.sql import SQL
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-09-13 20:00:36 +02:00
|
|
|
from zerver.models import Recipient
|
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2017-11-05 11:37:41 +01:00
|
|
|
class StreamRecipientMap:
|
2017-09-13 20:00:36 +02:00
|
|
|
'''
|
|
|
|
This class maps stream_id -> recipient_id and vice versa.
|
|
|
|
It is useful for bulk operations. Call the populate_* methods
|
|
|
|
to initialize the data structures. You should try to avoid
|
|
|
|
excessive queries by finding ids up front, but you can call
|
|
|
|
this repeatedly, and it will only look up new ids.
|
|
|
|
|
|
|
|
You should ONLY use this class for READ operations.
|
|
|
|
|
|
|
|
Note that this class uses raw SQL, because we want to highly
|
|
|
|
optimize page loads.
|
|
|
|
'''
|
2020-04-22 01:45:30 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def __init__(self) -> None:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
self.recip_to_stream: Dict[int, int] = dict()
|
|
|
|
self.stream_to_recip: Dict[int, int] = dict()
|
2017-09-13 20:00:36 +02:00
|
|
|
|
2020-01-08 10:51:08 +01:00
|
|
|
def populate_with(self, *, stream_id: int, recipient_id: int) -> None:
|
|
|
|
# We use * to enforce using named arguments when calling this function,
|
|
|
|
# to avoid confusion about the ordering of the two integers.
|
|
|
|
self.recip_to_stream[recipient_id] = stream_id
|
|
|
|
self.stream_to_recip[stream_id] = recipient_id
|
2017-09-13 20:00:36 +02:00
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def populate_for_recipient_ids(self, recipient_ids: List[int]) -> None:
|
2017-09-13 20:00:36 +02:00
|
|
|
recipient_ids = sorted([
|
|
|
|
recip_id for recip_id in recipient_ids
|
|
|
|
if recip_id not in self.recip_to_stream
|
|
|
|
])
|
|
|
|
|
|
|
|
if not recipient_ids:
|
|
|
|
return
|
|
|
|
|
|
|
|
# see comment at the top of the class
|
2020-06-09 11:57:51 +02:00
|
|
|
query = SQL('''
|
2017-09-13 20:00:36 +02:00
|
|
|
SELECT
|
|
|
|
zerver_recipient.id as recipient_id,
|
2020-01-08 09:00:51 +01:00
|
|
|
zerver_recipient.type_id as stream_id
|
2017-09-13 20:00:36 +02:00
|
|
|
FROM
|
|
|
|
zerver_recipient
|
|
|
|
WHERE
|
2020-06-09 11:57:51 +02:00
|
|
|
zerver_recipient.type = %(STREAM)s
|
2017-09-13 20:00:36 +02:00
|
|
|
AND
|
2020-06-09 11:57:51 +02:00
|
|
|
zerver_recipient.id in %(recipient_ids)s
|
|
|
|
''')
|
2017-09-13 20:00:36 +02:00
|
|
|
|
|
|
|
cursor = connection.cursor()
|
2020-06-09 11:57:51 +02:00
|
|
|
cursor.execute(query, {
|
|
|
|
"STREAM": Recipient.STREAM,
|
|
|
|
"recipient_ids": tuple(recipient_ids),
|
|
|
|
})
|
2017-09-13 20:00:36 +02:00
|
|
|
rows = cursor.fetchall()
|
|
|
|
cursor.close()
|
|
|
|
for recip_id, stream_id in rows:
|
|
|
|
self.recip_to_stream[recip_id] = stream_id
|
|
|
|
self.stream_to_recip[stream_id] = recip_id
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def recipient_id_for(self, stream_id: int) -> int:
|
2017-09-13 20:00:36 +02:00
|
|
|
return self.stream_to_recip[stream_id]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def stream_id_for(self, recip_id: int) -> int:
|
2017-09-13 20:00:36 +02:00
|
|
|
return self.recip_to_stream[recip_id]
|
|
|
|
|
2017-11-05 11:15:10 +01:00
|
|
|
def recipient_to_stream_id_dict(self) -> Dict[int, int]:
|
2017-09-13 20:00:36 +02:00
|
|
|
return self.recip_to_stream
|