2022-09-19 21:48:53 +02:00
|
|
|
from typing import Dict, List, Optional, Set, Tuple, TypedDict
|
|
|
|
|
|
|
|
from django_stubs_ext import ValuesQuerySet
|
2019-08-15 00:44:33 +02:00
|
|
|
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
2020-07-01 03:29:31 +02:00
|
|
|
bulk_cached_fetch,
|
2020-06-11 00:54:34 +02:00
|
|
|
cache_with_key,
|
|
|
|
display_recipient_bulk_get_users_by_id_cache_key,
|
|
|
|
display_recipient_cache_key,
|
2020-07-01 03:29:31 +02:00
|
|
|
transformed_bulk_cached_fetch,
|
2020-06-11 00:54:34 +02:00
|
|
|
)
|
|
|
|
from zerver.lib.types import DisplayRecipientT, UserDisplayRecipient
|
2019-08-07 00:18:13 +02:00
|
|
|
from zerver.models import Recipient, Stream, UserProfile, bulk_get_huddle_user_ids
|
2019-08-15 00:44:33 +02:00
|
|
|
|
2019-08-20 21:22:37 +02:00
|
|
|
display_recipient_fields = [
|
|
|
|
"id",
|
|
|
|
"email",
|
|
|
|
"full_name",
|
|
|
|
"is_mirror_dummy",
|
|
|
|
]
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-02 06:24:43 +02:00
|
|
|
class TinyStreamResult(TypedDict):
|
|
|
|
id: int
|
|
|
|
name: str
|
2020-03-27 16:17:38 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2022-04-13 05:42:12 +02:00
|
|
|
def get_display_recipient_cache_key(
|
|
|
|
recipient_id: int, recipient_type: int, recipient_type_id: Optional[int]
|
|
|
|
) -> str:
|
|
|
|
return display_recipient_cache_key(recipient_id)
|
|
|
|
|
|
|
|
|
|
|
|
@cache_with_key(get_display_recipient_cache_key, timeout=3600 * 24 * 7)
|
2021-02-12 08:19:30 +01:00
|
|
|
def get_display_recipient_remote_cache(
|
|
|
|
recipient_id: int, recipient_type: int, recipient_type_id: Optional[int]
|
|
|
|
) -> DisplayRecipientT:
|
2019-08-15 00:44:33 +02:00
|
|
|
"""
|
|
|
|
returns: an appropriate object describing the recipient. For a
|
|
|
|
stream this will be the stream name as a string. For a huddle or
|
|
|
|
personal, it will be an array of dicts about each recipient.
|
|
|
|
"""
|
|
|
|
if recipient_type == Recipient.STREAM:
|
|
|
|
assert recipient_type_id is not None
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = Stream.objects.values("name").get(id=recipient_type_id)
|
|
|
|
return stream["name"]
|
2019-08-15 00:44:33 +02:00
|
|
|
|
|
|
|
# The main priority for ordering here is being deterministic.
|
|
|
|
# Right now, we order by ID, which matches the ordering of user
|
|
|
|
# names in the left sidebar.
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile_list = (
|
|
|
|
UserProfile.objects.filter(
|
|
|
|
subscription__recipient_id=recipient_id,
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
.order_by("id")
|
2021-02-12 08:19:30 +01:00
|
|
|
.values(*display_recipient_fields)
|
|
|
|
)
|
2019-08-18 01:00:04 +02:00
|
|
|
return list(user_profile_list)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-18 01:00:04 +02:00
|
|
|
def user_dict_id_fetcher(user_dict: UserDisplayRecipient) -> int:
|
2021-02-12 08:20:45 +01:00
|
|
|
return user_dict["id"]
|
2019-08-18 01:00:04 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-18 01:00:04 +02:00
|
|
|
def bulk_get_user_profile_by_id(uids: List[int]) -> Dict[int, UserDisplayRecipient]:
|
2020-07-01 03:29:31 +02:00
|
|
|
return bulk_cached_fetch(
|
2019-08-07 00:18:13 +02:00
|
|
|
# Use a separate cache key to protect us from conflicts with
|
|
|
|
# the get_user_profile_by_id cache.
|
2019-08-18 01:00:04 +02:00
|
|
|
# (Since we fetch only several fields here)
|
2019-08-07 00:18:13 +02:00
|
|
|
cache_key_function=display_recipient_bulk_get_users_by_id_cache_key,
|
2019-08-20 21:22:37 +02:00
|
|
|
query_function=lambda ids: list(
|
2021-02-12 08:19:30 +01:00
|
|
|
UserProfile.objects.filter(id__in=ids).values(*display_recipient_fields)
|
|
|
|
),
|
2019-08-18 01:00:04 +02:00
|
|
|
object_ids=uids,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
id_fetcher=user_dict_id_fetcher,
|
2019-08-07 00:18:13 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
def bulk_fetch_display_recipients(
|
|
|
|
recipient_tuples: Set[Tuple[int, int, int]],
|
|
|
|
) -> Dict[int, DisplayRecipientT]:
|
2019-08-07 00:18:13 +02:00
|
|
|
"""
|
|
|
|
Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id)
|
|
|
|
Returns dict mapping recipient_id to corresponding display_recipient
|
|
|
|
"""
|
|
|
|
|
|
|
|
# Build dict mapping recipient id to (type, type_id) of the corresponding recipient:
|
|
|
|
recipient_id_to_type_pair_dict = {
|
2021-02-12 08:19:30 +01:00
|
|
|
recipient[0]: (recipient[1], recipient[2]) for recipient in recipient_tuples
|
2019-08-07 00:18:13 +02:00
|
|
|
}
|
|
|
|
# And the inverse mapping:
|
|
|
|
type_pair_to_recipient_id_dict = {
|
2021-02-12 08:19:30 +01:00
|
|
|
(recipient[1], recipient[2]): recipient[0] for recipient in recipient_tuples
|
2019-08-07 00:18:13 +02:00
|
|
|
}
|
|
|
|
|
2020-04-09 21:51:58 +02:00
|
|
|
stream_recipients = {
|
2019-08-07 00:18:13 +02:00
|
|
|
recipient for recipient in recipient_tuples if recipient[1] == Recipient.STREAM
|
2020-04-09 21:51:58 +02:00
|
|
|
}
|
2019-08-07 00:18:13 +02:00
|
|
|
personal_and_huddle_recipients = recipient_tuples - stream_recipients
|
|
|
|
|
2022-06-23 20:23:46 +02:00
|
|
|
def stream_query_function(
|
|
|
|
recipient_ids: List[int],
|
2022-09-19 21:48:53 +02:00
|
|
|
) -> ValuesQuerySet[Stream, TinyStreamResult]:
|
2019-08-07 00:18:13 +02:00
|
|
|
stream_ids = [
|
|
|
|
recipient_id_to_type_pair_dict[recipient_id][1] for recipient_id in recipient_ids
|
|
|
|
]
|
2021-02-12 08:20:45 +01:00
|
|
|
return Stream.objects.filter(id__in=stream_ids).values("name", "id")
|
2019-08-07 00:18:13 +02:00
|
|
|
|
2020-03-27 16:17:38 +01:00
|
|
|
def stream_id_fetcher(stream: TinyStreamResult) -> int:
|
2021-02-12 08:20:45 +01:00
|
|
|
return type_pair_to_recipient_id_dict[(Recipient.STREAM, stream["id"])]
|
2019-08-07 00:18:13 +02:00
|
|
|
|
2020-03-27 16:17:38 +01:00
|
|
|
def stream_cache_transformer(stream: TinyStreamResult) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return stream["name"]
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
# ItemT = Stream, CacheItemT = str (name), ObjKT = int (recipient_id)
|
2020-07-01 03:29:31 +02:00
|
|
|
stream_display_recipients: Dict[int, str] = transformed_bulk_cached_fetch(
|
2019-08-07 00:18:13 +02:00
|
|
|
cache_key_function=display_recipient_cache_key,
|
|
|
|
query_function=stream_query_function,
|
|
|
|
object_ids=[recipient[0] for recipient in stream_recipients],
|
|
|
|
id_fetcher=stream_id_fetcher,
|
|
|
|
cache_transformer=stream_cache_transformer,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
# Now we have to create display_recipients for personal and huddle messages.
|
2022-02-08 00:13:33 +01:00
|
|
|
# We do this via generic_bulk_cached_fetch, supplying appropriate functions to it.
|
2019-08-07 00:18:13 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def personal_and_huddle_query_function(
|
|
|
|
recipient_ids: List[int],
|
|
|
|
) -> List[Tuple[int, List[UserDisplayRecipient]]]:
|
2019-08-07 00:18:13 +02:00
|
|
|
"""
|
|
|
|
Return a list of tuples of the form (recipient_id, [list of UserProfiles])
|
|
|
|
where [list of UserProfiles] has users corresponding to the recipient,
|
|
|
|
so the receiving userin Recipient.PERSONAL case,
|
|
|
|
or in Personal.HUDDLE case - users in the huddle.
|
|
|
|
This is a pretty hacky return value, but it needs to be in this form,
|
|
|
|
for this function to work as the query_function in generic_bulk_cached_fetch.
|
|
|
|
"""
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
recipients = [
|
|
|
|
Recipient(
|
|
|
|
id=recipient_id,
|
|
|
|
type=recipient_id_to_type_pair_dict[recipient_id][0],
|
|
|
|
type_id=recipient_id_to_type_pair_dict[recipient_id][1],
|
|
|
|
)
|
|
|
|
for recipient_id in recipient_ids
|
|
|
|
]
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
# Find all user ids whose UserProfiles we will need to fetch:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
user_ids_to_fetch: Set[int] = set()
|
|
|
|
huddle_user_ids: Dict[int, List[int]] = {}
|
2021-02-12 08:19:30 +01:00
|
|
|
huddle_user_ids = bulk_get_huddle_user_ids(
|
|
|
|
[recipient for recipient in recipients if recipient.type == Recipient.HUDDLE]
|
|
|
|
)
|
2019-08-07 00:18:13 +02:00
|
|
|
for recipient in recipients:
|
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
user_ids_to_fetch.add(recipient.type_id)
|
|
|
|
else:
|
|
|
|
user_ids_to_fetch = user_ids_to_fetch.union(huddle_user_ids[recipient.id])
|
|
|
|
|
|
|
|
# Fetch the needed UserProfiles:
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profiles: Dict[int, UserDisplayRecipient] = bulk_get_user_profile_by_id(
|
|
|
|
list(user_ids_to_fetch)
|
|
|
|
)
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
# Build the return value:
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
result: List[Tuple[int, List[UserDisplayRecipient]]] = []
|
2019-08-07 00:18:13 +02:00
|
|
|
for recipient in recipients:
|
|
|
|
if recipient.type == Recipient.PERSONAL:
|
|
|
|
result.append((recipient.id, [user_profiles[recipient.type_id]]))
|
|
|
|
else:
|
2021-02-12 08:19:30 +01:00
|
|
|
result.append(
|
|
|
|
(
|
|
|
|
recipient.id,
|
|
|
|
[user_profiles[user_id] for user_id in huddle_user_ids[recipient.id]],
|
|
|
|
)
|
|
|
|
)
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def personal_and_huddle_cache_transformer(
|
|
|
|
db_object: Tuple[int, List[UserDisplayRecipient]],
|
|
|
|
) -> List[UserDisplayRecipient]:
|
2019-08-07 00:18:13 +02:00
|
|
|
"""
|
|
|
|
Takes an element of the list returned by the query_function, maps it to the final
|
|
|
|
display_recipient list.
|
|
|
|
"""
|
|
|
|
user_profile_list = db_object[1]
|
2019-08-18 01:00:04 +02:00
|
|
|
display_recipient = user_profile_list
|
2019-08-07 00:18:13 +02:00
|
|
|
|
|
|
|
return display_recipient
|
|
|
|
|
2019-08-18 01:00:04 +02:00
|
|
|
def personal_and_huddle_id_fetcher(db_object: Tuple[int, List[UserDisplayRecipient]]) -> int:
|
2019-08-07 00:18:13 +02:00
|
|
|
# db_object is a tuple, with recipient_id in the first position
|
|
|
|
return db_object[0]
|
|
|
|
|
2019-08-18 01:00:04 +02:00
|
|
|
# ItemT = Tuple[int, List[UserDisplayRecipient]] (recipient_id, list of corresponding users)
|
2019-08-18 00:24:46 +02:00
|
|
|
# CacheItemT = List[UserDisplayRecipient] (display_recipient list)
|
2019-08-07 00:18:13 +02:00
|
|
|
# ObjKT = int (recipient_id)
|
2021-02-12 08:19:30 +01:00
|
|
|
personal_and_huddle_display_recipients: Dict[
|
|
|
|
int, List[UserDisplayRecipient]
|
|
|
|
] = transformed_bulk_cached_fetch(
|
2019-08-07 00:18:13 +02:00
|
|
|
cache_key_function=display_recipient_cache_key,
|
|
|
|
query_function=personal_and_huddle_query_function,
|
|
|
|
object_ids=[recipient[0] for recipient in personal_and_huddle_recipients],
|
|
|
|
id_fetcher=personal_and_huddle_id_fetcher,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
cache_transformer=personal_and_huddle_cache_transformer,
|
2019-08-07 00:18:13 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Glue the dicts together and return:
|
|
|
|
return {**stream_display_recipients, **personal_and_huddle_display_recipients}
|