python: Use trailing commas consistently.

Automatically generated by the following script, based on the output
of lint with flake8-comma:

import re
import sys

last_filename = None
last_row = None
lines = []

for msg in sys.stdin:
    m = re.match(
        r"\x1b\[35mflake8    \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
    )
    if m:
        filename, row_str, col_str, err = m.groups()
        row, col = int(row_str), int(col_str)

        if filename == last_filename:
            assert last_row != row
        else:
            if last_filename is not None:
                with open(last_filename, "w") as f:
                    f.writelines(lines)

            with open(filename) as f:
                lines = f.readlines()
            last_filename = filename
        last_row = row

        line = lines[row - 1]
        if err in ["C812", "C815"]:
            lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
        elif err in ["C819"]:
            assert line[col - 2] == ","
            lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")

if last_filename is not None:
    with open(last_filename, "w") as f:
        f.writelines(lines)

Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
This commit is contained in:
Anders Kaseorg 2020-04-09 20:23:40 -07:00 committed by Tim Abbott
parent b114eb2f10
commit 69730a78cc
310 changed files with 1789 additions and 1789 deletions

View File

@ -325,9 +325,9 @@ def sql_data_collector(
def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime, def do_pull_minutes_active(property: str, start_time: datetime, end_time: datetime,
realm: Optional[Realm] = None) -> int: realm: Optional[Realm] = None) -> int:
user_activity_intervals = UserActivityInterval.objects.filter( user_activity_intervals = UserActivityInterval.objects.filter(
end__gt=start_time, start__lt=end_time end__gt=start_time, start__lt=end_time,
).select_related( ).select_related(
'user_profile' 'user_profile',
).values_list( ).values_list(
'user_profile_id', 'user_profile__realm_id', 'start', 'end') 'user_profile_id', 'user_profile__realm_id', 'start', 'end')
@ -660,7 +660,7 @@ def get_count_stats(realm: Optional[Realm]=None) -> Dict[str, CountStat]:
sql_data_collector( sql_data_collector(
RealmCount, count_realm_active_humans_query(realm), None), RealmCount, count_realm_active_humans_query(realm), None),
CountStat.DAY, CountStat.DAY,
dependencies=['active_users_audit:is_bot:day', '15day_actives::day']) dependencies=['active_users_audit:is_bot:day', '15day_actives::day']),
] ]
return OrderedDict([(stat.property, stat) for stat in count_stats_]) return OrderedDict([(stat.property, stat) for stat in count_stats_])

View File

@ -24,7 +24,7 @@ def analyze_activity(options: Dict[str, Any]) -> None:
continue continue
total_duration += duration total_duration += duration
print("%-*s%s" % (37, user_profile.email, duration,)) print("%-*s%s" % (37, user_profile.email, duration))
print(f"\nTotal Duration: {total_duration}") print(f"\nTotal Duration: {total_duration}")
print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}") print(f"\nTotal Duration in minutes: {total_duration.total_seconds() / 60.}")

View File

@ -16,7 +16,7 @@ states = {
0: "OK", 0: "OK",
1: "WARNING", 1: "WARNING",
2: "CRITICAL", 2: "CRITICAL",
3: "UNKNOWN" 3: "UNKNOWN",
} }
class Command(BaseCommand): class Command(BaseCommand):

View File

@ -57,7 +57,7 @@ class InstallationCount(BaseCount):
UniqueConstraint( UniqueConstraint(
fields=["property", "end_time"], fields=["property", "end_time"],
condition=Q(subgroup__isnull=True), condition=Q(subgroup__isnull=True),
name='unique_installation_count_null_subgroup') name='unique_installation_count_null_subgroup'),
] ]
def __str__(self) -> str: def __str__(self) -> str:
@ -76,7 +76,7 @@ class RealmCount(BaseCount):
UniqueConstraint( UniqueConstraint(
fields=["realm", "property", "end_time"], fields=["realm", "property", "end_time"],
condition=Q(subgroup__isnull=True), condition=Q(subgroup__isnull=True),
name='unique_realm_count_null_subgroup') name='unique_realm_count_null_subgroup'),
] ]
index_together = ["property", "end_time"] index_together = ["property", "end_time"]
@ -97,7 +97,7 @@ class UserCount(BaseCount):
UniqueConstraint( UniqueConstraint(
fields=["user", "property", "end_time"], fields=["user", "property", "end_time"],
condition=Q(subgroup__isnull=True), condition=Q(subgroup__isnull=True),
name='unique_user_count_null_subgroup') name='unique_user_count_null_subgroup'),
] ]
# This index dramatically improves the performance of # This index dramatically improves the performance of
# aggregating from users to realms # aggregating from users to realms
@ -120,7 +120,7 @@ class StreamCount(BaseCount):
UniqueConstraint( UniqueConstraint(
fields=["stream", "property", "end_time"], fields=["stream", "property", "end_time"],
condition=Q(subgroup__isnull=True), condition=Q(subgroup__isnull=True),
name='unique_stream_count_null_subgroup') name='unique_stream_count_null_subgroup'),
] ]
# This index dramatically improves the performance of # This index dramatically improves the performance of
# aggregating from streams to realms # aggregating from streams to realms

View File

@ -403,7 +403,7 @@ class TestSupportEndpoint(ZulipTestCase):
self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>', self.assert_in_success_response(['<span class="label">user</span>\n', '<h3>King Hamlet</h3>',
'<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>', '<b>Email</b>: hamlet@zulip.com', '<b>Is active</b>: True<br>',
'<b>Admins</b>: desdemona@zulip.com, iago@zulip.com\n', '<b>Admins</b>: desdemona@zulip.com, iago@zulip.com\n',
'class="copy-button" data-copytext="desdemona@zulip.com, iago@zulip.com"' 'class="copy-button" data-copytext="desdemona@zulip.com, iago@zulip.com"',
], result) ], result)
def check_zulip_realm_query_result(result: HttpResponse) -> None: def check_zulip_realm_query_result(result: HttpResponse) -> None:
@ -448,19 +448,19 @@ class TestSupportEndpoint(ZulipTestCase):
self.assert_in_success_response(['<span class="label">preregistration user</span>\n', self.assert_in_success_response(['<span class="label">preregistration user</span>\n',
'<span class="label">realm creation</span>\n', '<span class="label">realm creation</span>\n',
'<b>Link</b>: http://zulip.testserver/accounts/do_confirm/', '<b>Link</b>: http://zulip.testserver/accounts/do_confirm/',
'<b>Expires in</b>: 1\xa0day<br>\n' '<b>Expires in</b>: 1\xa0day<br>\n',
], result) ], result)
def check_multiuse_invite_link_query_result(result: HttpResponse) -> None: def check_multiuse_invite_link_query_result(result: HttpResponse) -> None:
self.assert_in_success_response(['<span class="label">multiuse invite</span>\n', self.assert_in_success_response(['<span class="label">multiuse invite</span>\n',
'<b>Link</b>: http://zulip.testserver/join/', '<b>Link</b>: http://zulip.testserver/join/',
'<b>Expires in</b>: 1\xa0week, 3' '<b>Expires in</b>: 1\xa0week, 3',
], result) ], result)
def check_realm_reactivation_link_query_result(result: HttpResponse) -> None: def check_realm_reactivation_link_query_result(result: HttpResponse) -> None:
self.assert_in_success_response(['<span class="label">realm reactivation</span>\n', self.assert_in_success_response(['<span class="label">realm reactivation</span>\n',
'<b>Link</b>: http://zulip.testserver/reactivate/', '<b>Link</b>: http://zulip.testserver/reactivate/',
'<b>Expires in</b>: 1\xa0day' '<b>Expires in</b>: 1\xa0day',
], result) ], result)
self.login('cordelia') self.login('cordelia')

View File

@ -401,7 +401,7 @@ def make_table(title: str, cols: List[str], rows: List[Any], has_row_class: bool
content = loader.render_to_string( content = loader.render_to_string(
'analytics/ad_hoc_query.html', 'analytics/ad_hoc_query.html',
dict(data=data) dict(data=data),
) )
return content return content
@ -590,7 +590,7 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
realm_admins: Dict[str, List[str]] = defaultdict(list) realm_admins: Dict[str, List[str]] = defaultdict(list)
for up in UserProfile.objects.select_related("realm").filter( for up in UserProfile.objects.select_related("realm").filter(
role=UserProfile.ROLE_REALM_ADMINISTRATOR, role=UserProfile.ROLE_REALM_ADMINISTRATOR,
is_active=True is_active=True,
): ):
realm_admins[up.realm.string_id].append(up.delivery_email) realm_admins[up.realm.string_id].append(up.delivery_email)
@ -674,7 +674,7 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str:
content = loader.render_to_string( content = loader.render_to_string(
'analytics/realm_summary_table.html', 'analytics/realm_summary_table.html',
dict(rows=rows, num_active_sites=num_active_sites, dict(rows=rows, num_active_sites=num_active_sites,
now=now.strftime('%Y-%m-%dT%H:%M:%SZ')) now=now.strftime('%Y-%m-%dT%H:%M:%SZ')),
) )
return content return content
@ -688,18 +688,18 @@ def user_activity_intervals() -> Tuple[mark_safe, Dict[str, float]]:
all_intervals = UserActivityInterval.objects.filter( all_intervals = UserActivityInterval.objects.filter(
end__gte=day_start, end__gte=day_start,
start__lte=day_end start__lte=day_end,
).select_related( ).select_related(
'user_profile', 'user_profile',
'user_profile__realm' 'user_profile__realm',
).only( ).only(
'start', 'start',
'end', 'end',
'user_profile__delivery_email', 'user_profile__delivery_email',
'user_profile__realm__string_id' 'user_profile__realm__string_id',
).order_by( ).order_by(
'user_profile__realm__string_id', 'user_profile__realm__string_id',
'user_profile__delivery_email' 'user_profile__delivery_email',
) )
by_string_id = lambda row: row.user_profile.realm.string_id by_string_id = lambda row: row.user_profile.realm.string_id
@ -735,7 +735,7 @@ def sent_messages_report(realm: str) -> str:
cols = [ cols = [
'Date', 'Date',
'Humans', 'Humans',
'Bots' 'Bots',
] ]
query = SQL(''' query = SQL('''
@ -833,7 +833,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
return dict( return dict(
content=content, content=content,
title=title title=title,
) )
pages = [] pages = []
@ -868,7 +868,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
'User id', 'User id',
'Name', 'Name',
'Hits', 'Hits',
'Last time' 'Last time',
] ]
pages.append(get_page(query, cols, title)) pages.append(get_page(query, cols, title))
@ -898,7 +898,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
'Realm', 'Realm',
'Client', 'Client',
'Hits', 'Hits',
'Last time' 'Last time',
] ]
pages.append(get_page(query, cols, title)) pages.append(get_page(query, cols, title))
@ -936,7 +936,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
'Realm', 'Realm',
'Client', 'Client',
'Hits', 'Hits',
'Last time' 'Last time',
] ]
pages.append(get_page(query, cols, title)) pages.append(get_page(query, cols, title))
@ -974,7 +974,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]:
'Client', 'Client',
'Realm', 'Realm',
'Hits', 'Hits',
'Last time' 'Last time',
] ]
pages.append(get_page(query, cols, title)) pages.append(get_page(query, cols, title))
@ -1184,7 +1184,7 @@ def get_user_activity_records_for_realm(realm: str, is_bot: bool) -> QuerySet:
records = UserActivity.objects.filter( records = UserActivity.objects.filter(
user_profile__realm__string_id=realm, user_profile__realm__string_id=realm,
user_profile__is_active=True, user_profile__is_active=True,
user_profile__is_bot=is_bot user_profile__is_bot=is_bot,
) )
records = records.order_by("user_profile__delivery_email", "-last_visit") records = records.order_by("user_profile__delivery_email", "-last_visit")
records = records.select_related('user_profile', 'client').only(*fields) records = records.select_related('user_profile', 'client').only(*fields)
@ -1196,11 +1196,11 @@ def get_user_activity_records_for_email(email: str) -> List[QuerySet]:
'query', 'query',
'client__name', 'client__name',
'count', 'count',
'last_visit' 'last_visit',
] ]
records = UserActivity.objects.filter( records = UserActivity.objects.filter(
user_profile__delivery_email=email user_profile__delivery_email=email,
) )
records = records.order_by("-last_visit") records = records.order_by("-last_visit")
records = records.select_related('user_profile', 'client').only(*fields) records = records.select_related('user_profile', 'client').only(*fields)
@ -1211,7 +1211,7 @@ def raw_user_activity_table(records: List[QuerySet]) -> str:
'query', 'query',
'client', 'client',
'count', 'count',
'last_visit' 'last_visit',
] ]
def row(record: QuerySet) -> List[Any]: def row(record: QuerySet) -> List[Any]:
@ -1219,7 +1219,7 @@ def raw_user_activity_table(records: List[QuerySet]) -> str:
record.query, record.query,
record.client.name, record.client.name,
record.count, record.count,
format_date_for_activity_reports(record.last_visit) format_date_for_activity_reports(record.last_visit),
] ]
rows = list(map(row, records)) rows = list(map(row, records))
@ -1238,13 +1238,13 @@ def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, An
if action not in summary: if action not in summary:
summary[action] = dict( summary[action] = dict(
count=record.count, count=record.count,
last_visit=record.last_visit last_visit=record.last_visit,
) )
else: else:
summary[action]['count'] += record.count summary[action]['count'] += record.count
summary[action]['last_visit'] = max( summary[action]['last_visit'] = max(
summary[action]['last_visit'], summary[action]['last_visit'],
record.last_visit record.last_visit,
) )
if records: if records:

View File

@ -245,14 +245,14 @@ class StripeTestCase(ZulipTestCase):
# sanity check our 8 expected users are active # sanity check our 8 expected users are active
self.assertEqual( self.assertEqual(
UserProfile.objects.filter(realm=realm, is_active=True).count(), UserProfile.objects.filter(realm=realm, is_active=True).count(),
8 8,
) )
# Make sure we have active users outside our realm (to make # Make sure we have active users outside our realm (to make
# sure relevant queries restrict on realm). # sure relevant queries restrict on realm).
self.assertEqual( self.assertEqual(
UserProfile.objects.exclude(realm=realm).filter(is_active=True).count(), UserProfile.objects.exclude(realm=realm).filter(is_active=True).count(),
10 10,
) )
# Our seat count excludes our guest user and bot, and # Our seat count excludes our guest user and bot, and
@ -633,14 +633,14 @@ class StripeTest(StripeTestCase):
update_license_ledger_if_needed(realm, self.now) update_license_ledger_if_needed(realm, self.now)
self.assertEqual( self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(), LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(12, 12) (12, 12),
) )
with patch('corporate.lib.stripe.get_latest_seat_count', return_value=15): with patch('corporate.lib.stripe.get_latest_seat_count', return_value=15):
update_license_ledger_if_needed(realm, self.next_month) update_license_ledger_if_needed(realm, self.next_month)
self.assertEqual( self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(), LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(15, 15) (15, 15),
) )
invoice_plans_as_needed(self.next_month) invoice_plans_as_needed(self.next_month)
@ -662,7 +662,7 @@ class StripeTest(StripeTestCase):
"amount_due": 15 * 80 * 100, "amount_paid": 0, "amount_remaining": 15 * 80 * 100, "amount_due": 15 * 80 * 100, "amount_paid": 0, "amount_remaining": 15 * 80 * 100,
"auto_advance": True, "billing": "charge_automatically", "collection_method": "charge_automatically", "auto_advance": True, "billing": "charge_automatically", "collection_method": "charge_automatically",
"customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open", "customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open",
"total": 15 * 80 * 100 "total": 15 * 80 * 100,
} }
for key, value in invoice_params.items(): for key, value in invoice_params.items():
self.assertEqual(invoices[0].get(key), value) self.assertEqual(invoices[0].get(key), value)
@ -673,7 +673,7 @@ class StripeTest(StripeTestCase):
"plan": None, "quantity": 15, "subscription": None, "discountable": False, "plan": None, "quantity": 15, "subscription": None, "discountable": False,
"period": { "period": {
"start": datetime_to_timestamp(free_trial_end_date), "start": datetime_to_timestamp(free_trial_end_date),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)) "end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
}, },
} }
for key, value in invoice_item_params.items(): for key, value in invoice_item_params.items():
@ -687,14 +687,14 @@ class StripeTest(StripeTestCase):
update_license_ledger_if_needed(realm, add_months(free_trial_end_date, 10)) update_license_ledger_if_needed(realm, add_months(free_trial_end_date, 10))
self.assertEqual( self.assertEqual(
LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(), LicenseLedger.objects.order_by('-id').values_list('licenses', 'licenses_at_next_renewal').first(),
(19, 19) (19, 19),
) )
invoice_plans_as_needed(add_months(free_trial_end_date, 10)) invoice_plans_as_needed(add_months(free_trial_end_date, 10))
invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)] invoices = [invoice for invoice in stripe.Invoice.list(customer=stripe_customer.id)]
self.assertEqual(len(invoices), 2) self.assertEqual(len(invoices), 2)
invoice_params = { invoice_params = {
"amount_due": 5172, "auto_advance": True, "billing": "charge_automatically", "amount_due": 5172, "auto_advance": True, "billing": "charge_automatically",
"collection_method": "charge_automatically", "customer_email": "hamlet@zulip.com" "collection_method": "charge_automatically", "customer_email": "hamlet@zulip.com",
} }
invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")] invoice_items = [invoice_item for invoice_item in invoices[0].get("lines")]
self.assertEqual(len(invoice_items), 1) self.assertEqual(len(invoice_items), 1)
@ -703,8 +703,8 @@ class StripeTest(StripeTestCase):
"discountable": False, "quantity": 4, "discountable": False, "quantity": 4,
"period": { "period": {
"start": datetime_to_timestamp(add_months(free_trial_end_date, 10)), "start": datetime_to_timestamp(add_months(free_trial_end_date, 10)),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)) "end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
} },
} }
invoice_plans_as_needed(add_months(free_trial_end_date, 12)) invoice_plans_as_needed(add_months(free_trial_end_date, 12))
@ -774,7 +774,7 @@ class StripeTest(StripeTestCase):
'Zulip Standard', 'Free Trial', str(self.seat_count), 'Zulip Standard', 'Free Trial', str(self.seat_count),
'You are using', f'{self.seat_count} of {123} licenses', 'You are using', f'{self.seat_count} of {123} licenses',
'Your plan will be upgraded to', 'March 2, 2012', 'Your plan will be upgraded to', 'March 2, 2012',
f'{80 * 123:,.2f}', 'Billed by invoice' f'{80 * 123:,.2f}', 'Billed by invoice',
]: ]:
self.assert_in_response(substring, response) self.assert_in_response(substring, response)
@ -798,7 +798,7 @@ class StripeTest(StripeTestCase):
"amount_due": 123 * 80 * 100, "amount_paid": 0, "amount_remaining": 123 * 80 * 100, "amount_due": 123 * 80 * 100, "amount_paid": 0, "amount_remaining": 123 * 80 * 100,
"auto_advance": True, "billing": "send_invoice", "collection_method": "send_invoice", "auto_advance": True, "billing": "send_invoice", "collection_method": "send_invoice",
"customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open", "customer_email": self.example_email("hamlet"), "discount": None, "paid": False, "status": "open",
"total": 123 * 80 * 100 "total": 123 * 80 * 100,
} }
for key, value in invoice_params.items(): for key, value in invoice_params.items():
self.assertEqual(invoices[0].get(key), value) self.assertEqual(invoices[0].get(key), value)
@ -809,7 +809,7 @@ class StripeTest(StripeTestCase):
"plan": None, "quantity": 123, "subscription": None, "discountable": False, "plan": None, "quantity": 123, "subscription": None, "discountable": False,
"period": { "period": {
"start": datetime_to_timestamp(free_trial_end_date), "start": datetime_to_timestamp(free_trial_end_date),
"end": datetime_to_timestamp(add_months(free_trial_end_date, 12)) "end": datetime_to_timestamp(add_months(free_trial_end_date, 12)),
}, },
} }
for key, value in invoice_item_params.items(): for key, value in invoice_item_params.items():

View File

@ -266,7 +266,7 @@ latex_documents = [
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
(master_doc, 'zulip-contributor-docs', 'Zulip Documentation', (master_doc, 'zulip-contributor-docs', 'Zulip Documentation',
[author], 1) [author], 1),
] ]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.

View File

@ -24,5 +24,5 @@ class Migration(migrations.Migration):
operations = [ operations = [
migrations.RunPython(rebuild_pgroonga_index, migrations.RunPython(rebuild_pgroonga_index,
reverse_code=migrations.RunPython.noop) reverse_code=migrations.RunPython.noop),
] ]

View File

@ -32,5 +32,5 @@ DROP INDEX zerver_message_search_pgroonga;
CREATE INDEX CONCURRENTLY zerver_message_search_pgroonga ON zerver_message CREATE INDEX CONCURRENTLY zerver_message_search_pgroonga ON zerver_message
USING pgroonga(search_pgroonga pgroonga.text_full_text_search_ops); USING pgroonga(search_pgroonga pgroonga.text_full_text_search_ops);
"""]) """]),
] ]

View File

@ -82,7 +82,7 @@ states = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None: def report(state: str, timestamp: Any = None, msg: Optional[str] = None) -> None:

View File

@ -14,7 +14,7 @@ states = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, msg: str) -> "NoReturn": def report(state: str, msg: str) -> "NoReturn":

View File

@ -18,7 +18,7 @@ states = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, num: str) -> None: def report(state: str, num: str) -> None:

View File

@ -9,7 +9,7 @@ states = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, msg: str) -> None: def report(state: str, msg: str) -> None:

View File

@ -18,7 +18,7 @@ states: Dict[str, int] = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, output: str) -> None: def report(state: str, output: str) -> None:

View File

@ -32,7 +32,7 @@ states: Dict[str, int] = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> None: def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> None:
@ -40,8 +40,8 @@ def report(state: str, short_msg: str, too_old: Optional[Set[Any]] = None) -> No
if too_old: if too_old:
too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join( too_old_data = "\nLast call to get_message for recently out of date mirrors:\n" + "\n".join(
["{:>16}: {}".format(user.user_profile.email, ["{:>16}: {}".format(user.user_profile.email,
user.last_visit.strftime("%Y-%m-%d %H:%M %Z") user.last_visit.strftime("%Y-%m-%d %H:%M %Z"),
) for user in too_old] ) for user in too_old],
) )
print(f"{state}: {short_msg}{too_old_data}") print(f"{state}: {short_msg}{too_old_data}")

View File

@ -19,7 +19,7 @@ states: Dict[str, int] = {
"OK": 0, "OK": 0,
"WARNING": 1, "WARNING": 1,
"CRITICAL": 2, "CRITICAL": 2,
"UNKNOWN": 3 "UNKNOWN": 3,
} }
def report(state: str, data: str, last_check: float) -> None: def report(state: str, data: str, last_check: float) -> None:

View File

@ -36,7 +36,7 @@ states = {
0: "OK", 0: "OK",
1: "WARNING", 1: "WARNING",
2: "CRITICAL", 2: "CRITICAL",
3: "UNKNOWN" 3: "UNKNOWN",
} }
MAX_SECONDS_TO_CLEAR_FOR_BURSTS: DefaultDict[str, int] = defaultdict( MAX_SECONDS_TO_CLEAR_FOR_BURSTS: DefaultDict[str, int] = defaultdict(

View File

@ -10,7 +10,7 @@ if ZULIP_PATH not in sys.path:
from scripts.lib.zulip_tools import os_families, overwrite_symlink, run, parse_os_release from scripts.lib.zulip_tools import os_families, overwrite_symlink, run, parse_os_release
from scripts.lib.setup_venv import ( from scripts.lib.setup_venv import (
setup_virtualenv, get_venv_dependencies setup_virtualenv, get_venv_dependencies,
) )
parser = argparse.ArgumentParser(description="Create a production virtualenv with caching") parser = argparse.ArgumentParser(description="Create a production virtualenv with caching")

View File

@ -10,7 +10,7 @@ if ZULIP_PATH not in sys.path:
from scripts.lib.zulip_tools import os_families, run, parse_os_release from scripts.lib.zulip_tools import os_families, run, parse_os_release
from scripts.lib.setup_venv import ( from scripts.lib.setup_venv import (
setup_virtualenv, THUMBOR_VENV_DEPENDENCIES, YUM_THUMBOR_VENV_DEPENDENCIES setup_virtualenv, THUMBOR_VENV_DEPENDENCIES, YUM_THUMBOR_VENV_DEPENDENCIES,
) )
parser = argparse.ArgumentParser(description="Create a thumbor virtualenv with caching") parser = argparse.ArgumentParser(description="Create a thumbor virtualenv with caching")

View File

@ -94,7 +94,7 @@ def process_response_error(e: HTTPError) -> None:
def send_email_mirror( def send_email_mirror(
rcpt_to: str, shared_secret: str, host: str, url: str, test: bool, verify_ssl: bool rcpt_to: str, shared_secret: str, host: str, url: str, test: bool, verify_ssl: bool,
) -> None: ) -> None:
if not rcpt_to: if not rcpt_to:
print("5.1.1 Bad destination mailbox address: No missed message email address.") print("5.1.1 Bad destination mailbox address: No missed message email address.")
@ -112,7 +112,7 @@ def send_email_mirror(
request_data = { request_data = {
"recipient": rcpt_to, "recipient": rcpt_to,
"msg_text": msg_text "msg_text": msg_text,
} }
if test: if test:
exit(0) exit(0)

View File

@ -23,7 +23,7 @@ def get_yarn_args(production: bool) -> List[str]:
return yarn_args return yarn_args
def generate_sha1sum_node_modules( def generate_sha1sum_node_modules(
setup_dir: Optional[str] = None, production: bool = DEFAULT_PRODUCTION setup_dir: Optional[str] = None, production: bool = DEFAULT_PRODUCTION,
) -> str: ) -> str:
if setup_dir is None: if setup_dir is None:
setup_dir = os.path.realpath(os.getcwd()) setup_dir = os.path.realpath(os.getcwd())
@ -69,7 +69,7 @@ def setup_node_modules(
def do_yarn_install( def do_yarn_install(
target_path: str, target_path: str,
yarn_args: List[str], yarn_args: List[str],
success_stamp: str success_stamp: str,
) -> None: ) -> None:
os.makedirs(target_path, exist_ok=True) os.makedirs(target_path, exist_ok=True)
shutil.copy('package.json', target_path) shutil.copy('package.json', target_path)

View File

@ -98,9 +98,9 @@ YUM_THUMBOR_VENV_DEPENDENCIES = [
def get_venv_dependencies(vendor: str, os_version: str) -> List[str]: def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
if vendor == 'ubuntu' and os_version == '20.04': if vendor == 'ubuntu' and os_version == '20.04':
return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("2"), ] return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("2")]
elif "debian" in os_families(): elif "debian" in os_families():
return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format(""), ] return VENV_DEPENDENCIES + [PYTHON_DEV_DEPENDENCY.format("")]
elif "rhel" in os_families(): elif "rhel" in os_families():
return REDHAT_VENV_DEPENDENCIES return REDHAT_VENV_DEPENDENCIES
elif "fedora" in os_families(): elif "fedora" in os_families():
@ -238,7 +238,7 @@ def get_logfile_name(venv_path: str) -> str:
return "{}/setup-venv.log".format(venv_path) return "{}/setup-venv.log".format(venv_path)
def create_log_entry( def create_log_entry(
target_log: str, parent: str, copied_packages: Set[str], new_packages: Set[str] target_log: str, parent: str, copied_packages: Set[str], new_packages: Set[str],
) -> None: ) -> None:
venv_path = os.path.dirname(target_log) venv_path = os.path.dirname(target_log)

View File

@ -84,7 +84,7 @@ def parse_cache_script_args(description: str) -> argparse.Namespace:
def get_deploy_root() -> str: def get_deploy_root() -> str:
return os.path.realpath( return os.path.realpath(
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")) os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")),
) )
def get_deployment_version(extract_path: str) -> str: def get_deployment_version(extract_path: str) -> str:
@ -271,7 +271,7 @@ def get_caches_to_be_purged(caches_dir: str, caches_in_use: Set[str], threshold_
return caches_to_purge return caches_to_purge
def purge_unused_caches( def purge_unused_caches(
caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace,
) -> None: ) -> None:
all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)} all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)}
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days) caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)

View File

@ -17,7 +17,7 @@ states = {
0: "OK", 0: "OK",
1: "WARNING", 1: "WARNING",
2: "CRITICAL", 2: "CRITICAL",
3: "UNKNOWN" 3: "UNKNOWN",
} }
if 'USER' in os.environ and not os.environ['USER'] in ['root', 'rabbitmq']: if 'USER' in os.environ and not os.environ['USER'] in ['root', 'rabbitmq']:

View File

@ -18,5 +18,5 @@ pylibmc.Client(
binary=True, binary=True,
username=settings.MEMCACHED_USERNAME, username=settings.MEMCACHED_USERNAME,
password=settings.MEMCACHED_PASSWORD, password=settings.MEMCACHED_PASSWORD,
behaviors=settings.CACHES["default"]["OPTIONS"] # type: ignore[index] # settings not typed properly behaviors=settings.CACHES["default"]["OPTIONS"], # type: ignore[index] # settings not typed properly
).flush_all() ).flush_all()

View File

@ -149,7 +149,7 @@ def generate_secrets(development: bool = False) -> None:
with open(filename, "a") as f: with open(filename, "a") as f:
f.write( f.write(
"# Set a Redis password based on zulip-secrets.conf\n" "# Set a Redis password based on zulip-secrets.conf\n"
"requirepass '%s'\n" % (redis_password,) "requirepass '%s'\n" % (redis_password,),
) )
break break

View File

@ -70,7 +70,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
assert not any("|" in name or "|" in path for name, path in paths) assert not any("|" in name or "|" in path for name, path in paths)
transform_args = [ transform_args = [
r"--transform=s|^zulip-backup/{}(/.*)?$|{}\1|x".format( r"--transform=s|^zulip-backup/{}(/.*)?$|{}\1|x".format(
re.escape(name), path.replace("\\", r"\\") re.escape(name), path.replace("\\", r"\\"),
) )
for name, path in paths for name, path in paths
] ]
@ -89,12 +89,12 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
run( run(
[ [
os.path.join( os.path.join(
settings.DEPLOY_ROOT, "scripts", "setup", "terminate-psql-sessions" settings.DEPLOY_ROOT, "scripts", "setup", "terminate-psql-sessions",
), ),
"zulip", "zulip",
"zulip", "zulip",
"zulip_base", "zulip_base",
] ],
) )
as_postgres = ["su", "-s", "/usr/bin/env", "-", "--", POSTGRES_USER] as_postgres = ["su", "-s", "/usr/bin/env", "-", "--", POSTGRES_USER]
run(as_postgres + ["dropdb", "--if-exists", "--", db_name]) run(as_postgres + ["dropdb", "--if-exists", "--", db_name])
@ -130,7 +130,7 @@ def restore_backup(tarball_file: IO[bytes]) -> None:
[ [
os.path.join(settings.DEPLOY_ROOT, "scripts", "zulip-puppet-apply"), os.path.join(settings.DEPLOY_ROOT, "scripts", "zulip-puppet-apply"),
"-f", "-f",
] ],
) )
# Now, restore the the database backup using pg_restore. This # Now, restore the the database backup using pg_restore. This

View File

@ -12,7 +12,7 @@ def get_start_url() -> List[str]:
start_file = os.path.join(dir_path, os.path.join(*[os.pardir] * 4), start_file = os.path.join(dir_path, os.path.join(*[os.pardir] * 4),
"docs/_build/html/index.html") "docs/_build/html/index.html")
return [ return [
pathlib.Path(os.path.abspath(start_file)).as_uri() pathlib.Path(os.path.abspath(start_file)).as_uri(),
] ]

View File

@ -236,5 +236,5 @@ else:
parser.error( parser.error(
"Could not find configuration for integration. " "Could not find configuration for integration. "
"You can specify a fixture file to use, using the --fixture flag. " "You can specify a fixture file to use, using the --fixture flag. "
"Or add a configuration to zerver.lib.integrations.DOC_SCREENSHOT_CONFIG" "Or add a configuration to zerver.lib.integrations.DOC_SCREENSHOT_CONFIG",
) )

View File

@ -69,7 +69,7 @@ WORD_SET = {
'testing', 'tested', # 'tests' excluded to reduce false negative 'testing', 'tested', # 'tests' excluded to reduce false negative
'truncates', 'truncating', 'truncated', 'truncates', 'truncating', 'truncated',
'updates', 'updating', 'updated', 'updates', 'updating', 'updated',
'uses', 'using', 'used' 'uses', 'using', 'used',
} }
imperative_forms = sorted([ imperative_forms = sorted([
@ -135,7 +135,7 @@ class ImperativeMood(LineRule):
violation = RuleViolation(self.id, self.error_msg.format( violation = RuleViolation(self.id, self.error_msg.format(
word=first_word, word=first_word,
imperative=imperative, imperative=imperative,
title=commit.message.title title=commit.message.title,
)) ))
violations.append(violation) violations.append(violation)

View File

@ -85,7 +85,7 @@ def pretty_print_html(html: str, num_spaces: int = 4) -> str:
adjustment=adjustment, adjustment=adjustment,
indenting=True, indenting=True,
adjust_offset_until=token.line, adjust_offset_until=token.line,
ignore_lines=[] ignore_lines=[],
) )
if token.kind in ('handlebars_start', 'django_start'): if token.kind in ('handlebars_start', 'django_start'):
info.update(dict(depth=new_depth - 1, indenting=False)) info.update(dict(depth=new_depth - 1, indenting=False))
@ -98,7 +98,7 @@ def pretty_print_html(html: str, num_spaces: int = 4) -> str:
tag=token.tag, tag=token.tag,
token_kind=token.kind, token_kind=token.kind,
extra_indent=stack[-1]['extra_indent'], extra_indent=stack[-1]['extra_indent'],
ignore_lines=[] ignore_lines=[],
) )
stack.append(info) stack.append(info)
elif (token.kind in ('html_end', 'handlebars_end', 'html_singleton_end', elif (token.kind in ('html_end', 'handlebars_end', 'html_singleton_end',

View File

@ -55,7 +55,7 @@ try:
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')) os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
os.symlink( os.symlink(
os.path.join(ZULIP_PATH, 'README.md'), os.path.join(ZULIP_PATH, 'README.md'),
os.path.join(VAR_DIR_PATH, 'zulip-test-symlink') os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'),
) )
os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink')) os.remove(os.path.join(VAR_DIR_PATH, 'zulip-test-symlink'))
except OSError: except OSError:
@ -130,7 +130,7 @@ COMMON_DEPENDENCIES = [
"libxss1", "libxss1",
"fonts-freefont-ttf", "fonts-freefont-ttf",
"libappindicator1", "libappindicator1",
"xdg-utils" "xdg-utils",
# Puppeteer dependencies end here. # Puppeteer dependencies end here.
] ]
@ -152,7 +152,7 @@ COMMON_YUM_DEPENDENCIES = COMMON_DEPENDENCIES + [
"freetype", "freetype",
"freetype-devel", "freetype-devel",
"fontconfig-devel", "fontconfig-devel",
"libstdc++" "libstdc++",
] + YUM_THUMBOR_VENV_DEPENDENCIES ] + YUM_THUMBOR_VENV_DEPENDENCIES
BUILD_PGROONGA_FROM_SOURCE = False BUILD_PGROONGA_FROM_SOURCE = False
@ -168,7 +168,7 @@ if vendor == 'debian' and os_version in [] or vendor == 'ubuntu' and os_version
"libgroonga-dev", "libgroonga-dev",
"libmsgpack-dev", "libmsgpack-dev",
"clang-9", "clang-9",
"llvm-9-dev" "llvm-9-dev",
] ]
] + VENV_DEPENDENCIES ] + VENV_DEPENDENCIES
elif "debian" in os_families(): elif "debian" in os_families():
@ -244,7 +244,7 @@ def install_apt_deps(deps_to_install: List[str]) -> None:
"env", "DEBIAN_FRONTEND=noninteractive", "env", "DEBIAN_FRONTEND=noninteractive",
"apt-get", "-y", "install", "--no-install-recommends", "apt-get", "-y", "install", "--no-install-recommends",
] ]
+ deps_to_install + deps_to_install,
) )
def install_yum_deps(deps_to_install: List[str]) -> None: def install_yum_deps(deps_to_install: List[str]) -> None:
@ -411,7 +411,7 @@ def main(options: argparse.Namespace) -> "NoReturn":
provision_inner, provision_inner,
*(["--force"] if options.is_force else []), *(["--force"] if options.is_force else []),
*(["--build-release-tarball-only"] if options.is_build_release_tarball_only else []), *(["--build-release-tarball-only"] if options.is_build_release_tarball_only else []),
] ],
) )
if __name__ == "__main__": if __name__ == "__main__":

View File

@ -128,7 +128,7 @@ def need_to_run_build_pygments_data() -> bool:
return is_digest_obsolete( return is_digest_obsolete(
"build_pygments_data_hash", "build_pygments_data_hash",
build_pygments_data_paths(), build_pygments_data_paths(),
[pygments_version] [pygments_version],
) )
def need_to_run_compilemessages() -> bool: def need_to_run_compilemessages() -> bool:
@ -155,7 +155,7 @@ def need_to_run_configure_rabbitmq(settings_list: List[str]) -> bool:
obsolete = is_digest_obsolete( obsolete = is_digest_obsolete(
'last_configure_rabbitmq_hash', 'last_configure_rabbitmq_hash',
configure_rabbitmq_paths(), configure_rabbitmq_paths(),
settings_list settings_list,
) )
if obsolete: if obsolete:
@ -204,7 +204,7 @@ def main(options: argparse.Namespace) -> int:
write_new_digest( write_new_digest(
'build_pygments_data_hash', 'build_pygments_data_hash',
build_pygments_data_paths(), build_pygments_data_paths(),
[pygments_version] [pygments_version],
) )
else: else:
print("No need to run `tools/setup/build_pygments_data`.") print("No need to run `tools/setup/build_pygments_data`.")
@ -240,7 +240,7 @@ def main(options: argparse.Namespace) -> int:
write_new_digest( write_new_digest(
'last_configure_rabbitmq_hash', 'last_configure_rabbitmq_hash',
configure_rabbitmq_paths(), configure_rabbitmq_paths(),
[settings.RABBITMQ_PASSWORD] [settings.RABBITMQ_PASSWORD],
) )
else: else:
print("No need to run `scripts/setup/configure-rabbitmq.") print("No need to run `scripts/setup/configure-rabbitmq.")

View File

@ -160,8 +160,8 @@ def tokenize(text: str) -> List[Token]:
e.message, e.message,
state.line, state.line,
state.col, state.col,
e.line_content e.line_content,
) ),
) )
line_span = len(s.split('\n')) line_span = len(s.split('\n'))
@ -171,7 +171,7 @@ def tokenize(text: str) -> List[Token]:
tag=tag.strip(), tag=tag.strip(),
line=state.line, line=state.line,
col=state.col, col=state.col,
line_span=line_span line_span=line_span,
) )
tokens.append(token) tokens.append(token)
advance(len(s)) advance(len(s))
@ -183,7 +183,7 @@ def tokenize(text: str) -> List[Token]:
tag=tag, tag=tag,
line=state.line, line=state.line,
col=state.col, col=state.col,
line_span=1 line_span=1,
) )
tokens.append(token) tokens.append(token)

View File

@ -51,7 +51,7 @@ def server_is_up(server: "subprocess.Popen[bytes]", log_file: Optional[str]) ->
@contextmanager @contextmanager
def test_server_running(force: bool=False, external_host: str='testserver', def test_server_running(force: bool=False, external_host: str='testserver',
log_file: Optional[str]=None, dots: bool=False, use_db: bool=True log_file: Optional[str]=None, dots: bool=False, use_db: bool=True,
) -> Iterator[None]: ) -> Iterator[None]:
log = sys.stdout log = sys.stdout
if log_file: if log_file:

View File

@ -60,13 +60,13 @@ shebang_rules: List["Rule"] = [
'description': "Use `#!/usr/bin/env foo` instead of `#!/path/foo`" 'description': "Use `#!/usr/bin/env foo` instead of `#!/path/foo`"
" for interpreters other than sh."}, " for interpreters other than sh."},
{'pattern': '^#!/usr/bin/env python$', {'pattern': '^#!/usr/bin/env python$',
'description': "Use `#!/usr/bin/env python3` instead of `#!/usr/bin/env python`."} 'description': "Use `#!/usr/bin/env python3` instead of `#!/usr/bin/env python`."},
] ]
trailing_whitespace_rule: "Rule" = { trailing_whitespace_rule: "Rule" = {
'pattern': r'\s+$', 'pattern': r'\s+$',
'strip': '\n', 'strip': '\n',
'description': 'Fix trailing whitespace' 'description': 'Fix trailing whitespace',
} }
whitespace_rules: List["Rule"] = [ whitespace_rules: List["Rule"] = [
# This linter should be first since bash_rules depends on it. # This linter should be first since bash_rules depends on it.
@ -446,8 +446,8 @@ bash_rules = RuleList(
'include_only': {'scripts/'}, 'include_only': {'scripts/'},
'exclude': { 'exclude': {
'scripts/lib/install', 'scripts/lib/install',
'scripts/setup/configure-rabbitmq' 'scripts/setup/configure-rabbitmq',
}, }, }},
*whitespace_rules[0:1], *whitespace_rules[0:1],
], ],
shebang_rules=shebang_rules, shebang_rules=shebang_rules,
@ -554,7 +554,7 @@ html_rules: List["Rule"] = whitespace_rules + prose_style_rules + [
{'pattern': r'title="[^{\:]', {'pattern': r'title="[^{\:]',
'exclude_line': { 'exclude_line': {
('templates/zerver/app/markdown_help.html', ('templates/zerver/app/markdown_help.html',
'<td class="rendered_markdown"><img alt=":heart:" class="emoji" src="/static/generated/emoji/images/emoji/heart.png" title=":heart:" /></td>') '<td class="rendered_markdown"><img alt=":heart:" class="emoji" src="/static/generated/emoji/images/emoji/heart.png" title=":heart:" /></td>'),
}, },
'exclude': {"templates/zerver/emails", "templates/analytics/realm_details.html", "templates/analytics/support.html"}, 'exclude': {"templates/zerver/emails", "templates/analytics/realm_details.html", "templates/analytics/support.html"},
'description': "`title` value should be translatable."}, 'description': "`title` value should be translatable."},
@ -691,7 +691,7 @@ json_rules = RuleList(
{'pattern': r'":["\[\{]', {'pattern': r'":["\[\{]',
'exclude': {'zerver/webhooks/', 'zerver/tests/fixtures/'}, 'exclude': {'zerver/webhooks/', 'zerver/tests/fixtures/'},
'description': 'Require space after : in JSON'}, 'description': 'Require space after : in JSON'},
] ],
) )
markdown_docs_length_exclude = { markdown_docs_length_exclude = {
@ -737,7 +737,7 @@ markdown_rules = RuleList(
], ],
max_length=120, max_length=120,
length_exclude=markdown_docs_length_exclude, length_exclude=markdown_docs_length_exclude,
exclude_files_in='templates/zerver/help/' exclude_files_in='templates/zerver/help/',
) )
help_markdown_rules = RuleList( help_markdown_rules = RuleList(

View File

@ -203,7 +203,7 @@ def fetch_request(url: str, callback: Any, **kwargs: Any) -> "Generator[Callable
connect_timeout=240.0, connect_timeout=240.0,
request_timeout=240.0, request_timeout=240.0,
decompress_response=False, decompress_response=False,
**kwargs **kwargs,
) )
client = httpclient.AsyncHTTPClient() client = httpclient.AsyncHTTPClient()
# wait for response # wait for response
@ -218,7 +218,7 @@ class BaseHandler(web.RequestHandler):
target_port: int target_port: int
def _add_request_headers( def _add_request_headers(
self, exclude_lower_headers_list: Optional[List[str]] = None self, exclude_lower_headers_list: Optional[List[str]] = None,
) -> httputil.HTTPHeaders: ) -> httputil.HTTPHeaders:
exclude_lower_headers_list = exclude_lower_headers_list or [] exclude_lower_headers_list = exclude_lower_headers_list or []
headers = httputil.HTTPHeaders() headers = httputil.HTTPHeaders()
@ -284,7 +284,7 @@ class BaseHandler(web.RequestHandler):
headers=self._add_request_headers(["upgrade-insecure-requests"]), headers=self._add_request_headers(["upgrade-insecure-requests"]),
follow_redirects=False, follow_redirects=False,
body=getattr(self.request, 'body'), body=getattr(self.request, 'body'),
allow_nonstandard_methods=True allow_nonstandard_methods=True,
) )
except httpclient.HTTPError as e: except httpclient.HTTPError as e:
if hasattr(e, 'response') and e.response: if hasattr(e, 'response') and e.response:
@ -329,7 +329,7 @@ class Application(web.Application):
(r"/api/v1/events.*", TornadoHandler), (r"/api/v1/events.*", TornadoHandler),
(r"/webpack.*", WebPackHandler), (r"/webpack.*", WebPackHandler),
(r"/thumbor.*", ThumborHandler if using_thumbor() else ErrorHandler), (r"/thumbor.*", ThumborHandler if using_thumbor() else ErrorHandler),
(r"/.*", DjangoHandler) (r"/.*", DjangoHandler),
] ]
super().__init__(handlers, enable_logging=enable_logging) super().__init__(handlers, enable_logging=enable_logging)

View File

@ -89,7 +89,7 @@ def main() -> None:
args = parser.parse_args() args = parser.parse_args()
prepare_sorting_info() prepare_sorting_info()
output_data = [column_names, ] output_data = [column_names]
explanation_lines: List[str] = [] explanation_lines: List[str] = []
with open(args.input_file_path) as fp: with open(args.input_file_path) as fp:
for line in fp.readlines(): for line in fp.readlines():

View File

@ -132,7 +132,7 @@ def generate_emoji_code_to_emoji_names_maps() -> None:
if emoji_code in reverse_unified_reactions_map: if emoji_code in reverse_unified_reactions_map:
reverse_unified_reactions_map[emoji_code].append(name) reverse_unified_reactions_map[emoji_code].append(name)
else: else:
reverse_unified_reactions_map[emoji_code] = [name, ] reverse_unified_reactions_map[emoji_code] = [name]
for emoji_code in reverse_unified_reactions_map: for emoji_code in reverse_unified_reactions_map:
emoji_code_to_gemoji_names[emoji_code] = ", ".join(reverse_unified_reactions_map[emoji_code]) emoji_code_to_gemoji_names[emoji_code] = ", ".join(reverse_unified_reactions_map[emoji_code])
@ -146,7 +146,7 @@ def generate_emoji_code_to_emoji_names_maps() -> None:
for emoji_code in EMOJI_NAME_MAPS: for emoji_code in EMOJI_NAME_MAPS:
canonical_name = EMOJI_NAME_MAPS[emoji_code]["canonical_name"] canonical_name = EMOJI_NAME_MAPS[emoji_code]["canonical_name"]
aliases = EMOJI_NAME_MAPS[emoji_code]["aliases"] aliases = EMOJI_NAME_MAPS[emoji_code]["aliases"]
names = [canonical_name, ] names = [canonical_name]
names.extend(aliases) names.extend(aliases)
emoji_code_to_zulip_names[emoji_code] = ", ".join(names) emoji_code_to_zulip_names[emoji_code] = ", ".join(names)

View File

@ -57,7 +57,7 @@ def check_valid_emoji_name(emoji_name: str) -> None:
def check_emoji_names(canonical_name: str, aliases: List[str]) -> None: def check_emoji_names(canonical_name: str, aliases: List[str]) -> None:
if canonical_name == 'X': if canonical_name == 'X':
return return
names_to_check = [canonical_name, ] + aliases names_to_check = [canonical_name] + aliases
for name in names_to_check: for name in names_to_check:
check_valid_emoji_name(name) check_valid_emoji_name(name)
check_uniqueness(name) check_uniqueness(name)

View File

@ -51,7 +51,7 @@ with test_server_running(force=options.force, external_host='zulipdev.com:9981')
client = Client( client = Client(
email=email, email=email,
api_key=api_key, api_key=api_key,
site=site site=site,
) )
# Prepare a generic bot client for curl testing # Prepare a generic bot client for curl testing
@ -62,7 +62,7 @@ with test_server_running(force=options.force, external_host='zulipdev.com:9981')
bot_client = Client( bot_client = Client(
email=email, email=email,
api_key=api_key, api_key=api_key,
site=site site=site,
) )
# Prepare the non-admin client # Prepare the non-admin client
@ -73,7 +73,7 @@ with test_server_running(force=options.force, external_host='zulipdev.com:9981')
nonadmin_client = Client( nonadmin_client = Client(
email=email, email=email,
api_key=api_key, api_key=api_key,
site=site site=site,
) )
test_the_api(client, nonadmin_client) test_the_api(client, nonadmin_client)
@ -84,7 +84,7 @@ with test_server_running(force=options.force, external_host='zulipdev.com:9981')
client = Client( client = Client(
email=email, email=email,
api_key='X'*32, api_key='X'*32,
site=site site=site,
) )
test_invalid_api_key(client) test_invalid_api_key(client)

View File

@ -279,7 +279,7 @@ def enforce_proper_coverage(coverage_json: Any) -> bool:
glob.glob('static/js/*.ts') + glob.glob('static/js/*.ts') +
glob.glob('static/shared/js/*.js') + glob.glob('static/shared/js/*.js') +
glob.glob('static/shared/js/*.ts') + glob.glob('static/shared/js/*.ts') +
glob.glob('static/js/billing/*.js') glob.glob('static/js/billing/*.js'),
) )
enforce_fully_covered = all_js_files - EXEMPT_FILES enforce_fully_covered = all_js_files - EXEMPT_FILES

View File

@ -39,7 +39,7 @@ def build_for_dev_server(host: str, port: str, minify: bool, disable_host_check:
# We add the hot flag using the cli because it takes care # We add the hot flag using the cli because it takes care
# of addition to entry points and adding the plugin # of addition to entry points and adding the plugin
# automatically # automatically
'--hot' '--hot',
] ]
if minify: if minify:
webpack_args.append('--optimize-minimize') webpack_args.append('--optimize-minimize')
@ -99,7 +99,7 @@ def build_for_most_tests() -> None:
entries[entry] = [{ entries[entry] = [{
"name": f"{entry}.js", "name": f"{entry}.js",
"publicPath": f"http://localhost:3000/webpack-stub/{entry}-stubentry.js", "publicPath": f"http://localhost:3000/webpack-stub/{entry}-stubentry.js",
"path": f"/stubfolder/{entry}-stubfile.js" "path": f"/stubfolder/{entry}-stubfile.js",
}] }]
stat_data = { stat_data = {
"status": "done", "status": "done",

View File

@ -84,5 +84,5 @@ for msg in result['messages']:
filename = f"zulip-{options.stream}.json" filename = f"zulip-{options.stream}.json"
with open(filename, 'wb') as f: with open(filename, 'wb') as f:
f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8')) f.write(json.dumps(messages, indent=0, sort_keys=False).encode('utf-8'))
print("%d messages exported to %s" % (len(messages), filename,)) print("%d messages exported to %s" % (len(messages), filename))
sys.exit(0) sys.exit(0)

View File

@ -188,7 +188,7 @@ def login_context(request: HttpRequest) -> Dict[str, Any]:
# by the desktop client. We expand it with IDs of the <button> elements corresponding # by the desktop client. We expand it with IDs of the <button> elements corresponding
# to the authentication methods. # to the authentication methods.
context['page_params'] = dict( context['page_params'] = dict(
external_authentication_methods = get_external_method_dicts(realm) external_authentication_methods = get_external_method_dicts(realm),
) )
for auth_dict in context['page_params']['external_authentication_methods']: for auth_dict in context['page_params']['external_authentication_methods']:
auth_dict['button_id_suffix'] = "auth_button_{}".format(auth_dict['name']) auth_dict['button_id_suffix'] = "auth_button_{}".format(auth_dict['name'])

View File

@ -534,7 +534,7 @@ def get_hipchat_sender_id(realm_id: int,
return None return None
mirror_user = user_handler.get_mirror_user( mirror_user = user_handler.get_mirror_user(
realm_id=realm_id, realm_id=realm_id,
name=message_dict['sender']['name'] name=message_dict['sender']['name'],
) )
sender_id = mirror_user['id'] sender_id = mirror_user['id']
return sender_id return sender_id
@ -544,7 +544,7 @@ def get_hipchat_sender_id(realm_id: int,
return None return None
mirror_user = user_handler.get_mirror_user( mirror_user = user_handler.get_mirror_user(
realm_id=realm_id, realm_id=realm_id,
name=message_dict['sender']['id'] name=message_dict['sender']['id'],
) )
sender_id = mirror_user['id'] sender_id = mirror_user['id']
return sender_id return sender_id

View File

@ -42,7 +42,7 @@ class AttachmentHandler:
target_path = os.path.join( target_path = os.path.join(
str(realm_id), str(realm_id),
'HipChatImportAttachment', 'HipChatImportAttachment',
path path,
) )
if target_path in self.info_dict: if target_path in self.info_dict:
@ -130,7 +130,7 @@ class AttachmentHandler:
os.makedirs(os.path.join(uploads_folder, str(realm_id)), exist_ok=True) os.makedirs(os.path.join(uploads_folder, str(realm_id)), exist_ok=True)
attachment = dict( attachment = dict(
zerver_attachment=attachments zerver_attachment=attachments,
) )
create_converted_data_files(uploads_records, output_dir, '/uploads/records.json') create_converted_data_files(uploads_records, output_dir, '/uploads/records.json')

View File

@ -428,7 +428,7 @@ def build_stream(date_created: Any, realm_id: int, name: str,
def build_huddle(huddle_id: int) -> ZerverFieldsT: def build_huddle(huddle_id: int) -> ZerverFieldsT:
huddle = Huddle( huddle = Huddle(
id=huddle_id id=huddle_id,
) )
return model_to_dict(huddle) return model_to_dict(huddle)
@ -635,7 +635,7 @@ def build_realm_emoji(realm_id: int,
name=name, name=name,
id=id, id=id,
file_name=file_name, file_name=file_name,
) ),
) )
def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str, def process_emojis(zerver_realmemoji: List[ZerverFieldsT], emoji_dir: str,

View File

@ -430,7 +430,7 @@ def process_posts(num_teams: int,
sender_id=sender_id, sender_id=sender_id,
content=content, content=content,
date_sent=int(post_dict['create_at'] / 1000), date_sent=int(post_dict['create_at'] / 1000),
reactions=reactions reactions=reactions,
) )
if "channel" in post_dict: if "channel" in post_dict:
message_dict["channel_name"] = post_dict["channel"] message_dict["channel_name"] = post_dict["channel"]

View File

@ -231,8 +231,8 @@ def build_customprofile_field(customprofile_field: List[ZerverFieldsT], fields:
customprofilefield = CustomProfileField( customprofilefield = CustomProfileField(
id=custom_profile_field_id, id=custom_profile_field_id,
name=field_name, name=field_name,
field_type=1 # For now this is defaulted to 'SHORT_TEXT' field_type=1, # For now this is defaulted to 'SHORT_TEXT'
# Processing is done in the function 'process_customprofilefields' # Processing is done in the function 'process_customprofilefields'
) )
customprofilefield_dict = model_to_dict(customprofilefield, customprofilefield_dict = model_to_dict(customprofilefield,
@ -699,7 +699,7 @@ def channel_message_to_zerver_message(realm_id: int,
# Slack's channel join/leave notices are spammy # Slack's channel join/leave notices are spammy
"channel_join", "channel_join",
"channel_leave", "channel_leave",
"channel_name" "channel_name",
]: ]:
continue continue
@ -885,7 +885,7 @@ def get_attachment_path_and_content(fileinfo: ZerverFieldsT, realm_id: int) -> T
# in sync with 'exports.py' function 'import_message_data' # in sync with 'exports.py' function 'import_message_data'
format(random.randint(0, 255), 'x'), format(random.randint(0, 255), 'x'),
random_name(18), random_name(18),
sanitize_name(fileinfo['name']) sanitize_name(fileinfo['name']),
]) ])
attachment_path = f'/user_uploads/{s3_path}' attachment_path = f'/user_uploads/{s3_path}'
content = '[{}]({})'.format(fileinfo['title'], attachment_path) content = '[{}]({})'.format(fileinfo['title'], attachment_path)

View File

@ -259,7 +259,7 @@ def access_user_by_api_key(request: HttpRequest, api_key: str, email: Optional[s
def log_exception_to_webhook_logger( def log_exception_to_webhook_logger(
request: HttpRequest, user_profile: UserProfile, request: HttpRequest, user_profile: UserProfile,
request_body: Optional[str]=None, request_body: Optional[str]=None,
unexpected_event: Optional[bool]=False unexpected_event: Optional[bool]=False,
) -> None: ) -> None:
if request_body is not None: if request_body is not None:
payload = request_body payload = request_body
@ -318,7 +318,7 @@ def full_webhook_client_name(raw_client_name: Optional[str]=None) -> Optional[st
# Use this for webhook views that don't get an email passed in. # Use this for webhook views that don't get an email passed in.
def api_key_only_webhook_view( def api_key_only_webhook_view(
webhook_client_name: str, webhook_client_name: str,
notify_bot_owner_on_invalid_json: Optional[bool]=True notify_bot_owner_on_invalid_json: Optional[bool]=True,
) -> Callable[[ViewFuncT], ViewFuncT]: ) -> Callable[[ViewFuncT], ViewFuncT]:
# TODO The typing here could be improved by using the Extended Callable types: # TODO The typing here could be improved by using the Extended Callable types:
# https://mypy.readthedocs.io/en/latest/kinds_of_types.html#extended-callable-types # https://mypy.readthedocs.io/en/latest/kinds_of_types.html#extended-callable-types
@ -456,12 +456,12 @@ def zulip_login_required(
actual_decorator = user_passes_test( actual_decorator = user_passes_test(
logged_in_and_active, logged_in_and_active,
login_url=login_url, login_url=login_url,
redirect_field_name=redirect_field_name redirect_field_name=redirect_field_name,
) )
otp_required_decorator = zulip_otp_required( otp_required_decorator = zulip_otp_required(
redirect_field_name=redirect_field_name, redirect_field_name=redirect_field_name,
login_url=login_url login_url=login_url,
) )
if function: if function:
@ -621,7 +621,7 @@ def process_as_post(view_func: ViewFuncT) -> ViewFuncT:
request.META, request.META,
BytesIO(request.body), BytesIO(request.body),
request.upload_handlers, request.upload_handlers,
request.encoding request.encoding,
).parse() ).parse()
else: else:
request.POST = QueryDict(request.body, encoding=request.encoding) request.POST = QueryDict(request.body, encoding=request.encoding)

View File

@ -240,7 +240,7 @@ class ZulipPasswordResetForm(PasswordResetForm):
from_email: Optional[str]=None, from_email: Optional[str]=None,
request: HttpRequest=None, request: HttpRequest=None,
html_email_template_name: Optional[str]=None, html_email_template_name: Optional[str]=None,
extra_email_context: Optional[Dict[str, Any]]=None extra_email_context: Optional[Dict[str, Any]]=None,
) -> None: ) -> None:
""" """
If the email address has an account in the target realm, If the email address has an account in the target realm,

View File

@ -1,6 +1,6 @@
from typing import ( from typing import (
AbstractSet, Any, Callable, Dict, Iterable, List, Mapping, MutableMapping, AbstractSet, Any, Callable, Dict, Iterable, List, Mapping, MutableMapping,
Optional, Sequence, Set, Tuple, Union, cast Optional, Sequence, Set, Tuple, Union, cast,
) )
from typing_extensions import TypedDict from typing_extensions import TypedDict
@ -240,7 +240,7 @@ def bot_owner_user_ids(user_profile: UserProfile) -> Set[int]:
user_profile.default_events_register_stream and user_profile.default_events_register_stream and
user_profile.default_events_register_stream.invite_only) user_profile.default_events_register_stream.invite_only)
if is_private_bot: if is_private_bot:
return {user_profile.bot_owner_id, } return {user_profile.bot_owner_id}
else: else:
users = {user.id for user in user_profile.realm.get_human_admin_users()} users = {user.id for user in user_profile.realm.get_human_admin_users()}
users.add(user_profile.bot_owner_id) users.add(user_profile.bot_owner_id)
@ -282,8 +282,8 @@ def notify_new_user(user_profile: UserProfile) -> None:
signup_notifications_stream, signup_notifications_stream,
"signups", "signups",
"@_**%s|%s** just signed up for Zulip. (total: %i)" % ( "@_**%s|%s** just signed up for Zulip. (total: %i)" % (
user_profile.full_name, user_profile.id, user_count user_profile.full_name, user_profile.id, user_count,
) ),
) )
# We also send a notification to the Zulip administrative realm # We also send a notification to the Zulip administrative realm
@ -300,7 +300,7 @@ def notify_new_user(user_profile: UserProfile) -> None:
user_profile.full_name, user_profile.full_name,
user_profile.email, user_profile.email,
user_count, user_count,
) ),
) )
except Stream.DoesNotExist: except Stream.DoesNotExist:
@ -395,7 +395,7 @@ def process_new_human_user(user_profile: UserProfile,
user_profile.realm, user_profile.realm,
get_system_bot(settings.NOTIFICATION_BOT), get_system_bot(settings.NOTIFICATION_BOT),
prereg_user.referred_by, prereg_user.referred_by,
f"{user_profile.full_name} <`{user_profile.email}`> accepted your invitation to join Zulip!" f"{user_profile.full_name} <`{user_profile.email}`> accepted your invitation to join Zulip!",
) )
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as # Mark any other PreregistrationUsers that are STATUS_ACTIVE as
# inactive so we can keep track of the PreregistrationUser we # inactive so we can keep track of the PreregistrationUser we
@ -523,7 +523,7 @@ def do_create_user(email: str, password: Optional[str], realm: Realm, full_name:
realm=user_profile.realm, modified_user=user_profile, realm=user_profile.realm, modified_user=user_profile,
event_type=RealmAuditLog.USER_CREATED, event_time=event_time, event_type=RealmAuditLog.USER_CREATED, event_time=event_time,
extra_data=ujson.dumps({ extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
})) }))
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'], do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
user_profile.is_bot, event_time) user_profile.is_bot, event_time)
@ -554,7 +554,7 @@ def do_activate_user(user_profile: UserProfile) -> None:
realm=user_profile.realm, modified_user=user_profile, realm=user_profile.realm, modified_user=user_profile,
event_type=RealmAuditLog.USER_ACTIVATED, event_time=event_time, event_type=RealmAuditLog.USER_ACTIVATED, event_time=event_time,
extra_data=ujson.dumps({ extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
})) }))
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'], do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
user_profile.is_bot, event_time) user_profile.is_bot, event_time)
@ -574,7 +574,7 @@ def do_reactivate_user(user_profile: UserProfile, acting_user: Optional[UserProf
realm=user_profile.realm, modified_user=user_profile, acting_user=acting_user, realm=user_profile.realm, modified_user=user_profile, acting_user=acting_user,
event_type=RealmAuditLog.USER_REACTIVATED, event_time=event_time, event_type=RealmAuditLog.USER_REACTIVATED, event_time=event_time,
extra_data=ujson.dumps({ extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
})) }))
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'], do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
user_profile.is_bot, event_time) user_profile.is_bot, event_time)
@ -638,7 +638,7 @@ def do_set_realm_authentication_methods(realm: Realm,
type="realm", type="realm",
op="update_dict", op="update_dict",
property='default', property='default',
data=dict(authentication_methods=realm.authentication_methods_dict()) data=dict(authentication_methods=realm.authentication_methods_dict()),
) )
send_event(realm, event, active_user_ids(realm.id)) send_event(realm, event, active_user_ids(realm.id))
@ -652,7 +652,7 @@ def do_set_realm_message_editing(realm: Realm,
realm.save(update_fields=['allow_message_editing', realm.save(update_fields=['allow_message_editing',
'allow_community_topic_editing', 'allow_community_topic_editing',
'message_content_edit_limit_seconds', 'message_content_edit_limit_seconds',
] ],
) )
event = dict( event = dict(
type="realm", type="realm",
@ -683,7 +683,7 @@ def do_set_realm_notifications_stream(realm: Realm, stream: Stream, stream_id: i
type="realm", type="realm",
op="update", op="update",
property="notifications_stream_id", property="notifications_stream_id",
value=stream_id value=stream_id,
) )
send_event(realm, event, active_user_ids(realm.id)) send_event(realm, event, active_user_ids(realm.id))
@ -695,7 +695,7 @@ def do_set_realm_signup_notifications_stream(realm: Realm, stream: Stream,
type="realm", type="realm",
op="update", op="update",
property="signup_notifications_stream_id", property="signup_notifications_stream_id",
value=stream_id value=stream_id,
) )
send_event(realm, event, active_user_ids(realm.id)) send_event(realm, event, active_user_ids(realm.id))
@ -719,7 +719,7 @@ def do_deactivate_realm(realm: Realm, acting_user: Optional[UserProfile]=None) -
RealmAuditLog.objects.create( RealmAuditLog.objects.create(
realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, event_time=event_time, realm=realm, event_type=RealmAuditLog.REALM_DEACTIVATED, event_time=event_time,
acting_user=acting_user, extra_data=ujson.dumps({ acting_user=acting_user, extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
})) }))
ScheduledEmail.objects.filter(realm=realm).delete() ScheduledEmail.objects.filter(realm=realm).delete()
@ -741,7 +741,7 @@ def do_reactivate_realm(realm: Realm) -> None:
RealmAuditLog.objects.create( RealmAuditLog.objects.create(
realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED, event_time=event_time, realm=realm, event_type=RealmAuditLog.REALM_REACTIVATED, event_time=event_time,
extra_data=ujson.dumps({ extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
})) }))
def do_change_realm_subdomain(realm: Realm, new_subdomain: str) -> None: def do_change_realm_subdomain(realm: Realm, new_subdomain: str) -> None:
@ -790,7 +790,7 @@ def do_deactivate_user(user_profile: UserProfile,
realm=user_profile.realm, modified_user=user_profile, acting_user=acting_user, realm=user_profile.realm, modified_user=user_profile, acting_user=acting_user,
event_type=RealmAuditLog.USER_DEACTIVATED, event_time=event_time, event_type=RealmAuditLog.USER_DEACTIVATED, event_time=event_time,
extra_data=ujson.dumps({ extra_data=ujson.dumps({
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm) RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
})) }))
do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'], do_increment_logging_stat(user_profile.realm, COUNT_STATS['active_users_log:is_bot:day'],
user_profile.is_bot, event_time, increment=-1) user_profile.is_bot, event_time, increment=-1)
@ -914,7 +914,7 @@ def do_start_email_change_process(user_profile: UserProfile, new_email: str) ->
context.update({ context.update({
'old_email': old_email, 'old_email': old_email,
'new_email': new_email, 'new_email': new_email,
'activate_url': activation_url 'activate_url': activation_url,
}) })
language = user_profile.default_language language = user_profile.default_language
send_email('zerver/emails/confirm_new_email', to_emails=[new_email], send_email('zerver/emails/confirm_new_email', to_emails=[new_email],
@ -1117,7 +1117,7 @@ def get_recipient_info(recipient: Recipient,
query = query_for_ids( query = query_for_ids(
query=query, query=query,
user_ids=sorted(list(user_ids)), user_ids=sorted(list(user_ids)),
field='id' field='id',
) )
rows = list(query) rows = list(query)
else: else:
@ -1150,16 +1150,16 @@ def get_recipient_info(recipient: Recipient,
active_user_ids = get_ids_for(lambda r: True) active_user_ids = get_ids_for(lambda r: True)
push_notify_user_ids = get_ids_for( push_notify_user_ids = get_ids_for(
lambda r: r['enable_online_push_notifications'] lambda r: r['enable_online_push_notifications'],
) )
# Service bots don't get UserMessage rows. # Service bots don't get UserMessage rows.
um_eligible_user_ids = get_ids_for( um_eligible_user_ids = get_ids_for(
lambda r: not is_service_bot(r) lambda r: not is_service_bot(r),
) )
long_term_idle_user_ids = get_ids_for( long_term_idle_user_ids = get_ids_for(
lambda r: r['long_term_idle'] lambda r: r['long_term_idle'],
) )
# These two bot data structures need to filter from the full set # These two bot data structures need to filter from the full set
@ -1193,7 +1193,7 @@ def get_recipient_info(recipient: Recipient,
um_eligible_user_ids=um_eligible_user_ids, um_eligible_user_ids=um_eligible_user_ids,
long_term_idle_user_ids=long_term_idle_user_ids, long_term_idle_user_ids=long_term_idle_user_ids,
default_bot_user_ids=default_bot_user_ids, default_bot_user_ids=default_bot_user_ids,
service_bot_tuples=service_bot_tuples service_bot_tuples=service_bot_tuples,
) )
return info return info
@ -1323,7 +1323,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str,
stream_id = message['message'].recipient.type_id stream_id = message['message'].recipient.type_id
stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget( stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget(
stream_id=stream_id, stream_id=stream_id,
topic_name=message['message'].topic_name() topic_name=message['message'].topic_name(),
) )
else: else:
stream_topic = None stream_topic = None
@ -1409,7 +1409,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str,
stream_push_user_ids = message['stream_push_user_ids'], stream_push_user_ids = message['stream_push_user_ids'],
stream_email_user_ids = message['stream_email_user_ids'], stream_email_user_ids = message['stream_email_user_ids'],
mentioned_user_ids=mentioned_user_ids, mentioned_user_ids=mentioned_user_ids,
mark_as_read=mark_as_read mark_as_read=mark_as_read,
) )
for um in user_messages: for um in user_messages:
@ -1531,7 +1531,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str,
"message": wide_message_dict, "message": wide_message_dict,
"trigger": event['trigger'], "trigger": event['trigger'],
"user_profile_id": event["user_profile_id"], "user_profile_id": event["user_profile_id"],
} },
) )
# Note that this does not preserve the order of message ids # Note that this does not preserve the order of message ids
@ -2050,7 +2050,7 @@ def check_schedule_message(sender: UserProfile, client: Client,
topic_name: Optional[str], message_content: str, topic_name: Optional[str], message_content: str,
delivery_type: str, deliver_at: datetime.datetime, delivery_type: str, deliver_at: datetime.datetime,
realm: Optional[Realm]=None, realm: Optional[Realm]=None,
forwarder_user_profile: Optional[UserProfile]=None forwarder_user_profile: Optional[UserProfile]=None,
) -> int: ) -> int:
addressee = Addressee.legacy_build( addressee = Addressee.legacy_build(
sender, sender,
@ -2334,7 +2334,7 @@ def _internal_prep_message(realm: Realm,
def internal_prep_stream_message( def internal_prep_stream_message(
realm: Realm, sender: UserProfile, realm: Realm, sender: UserProfile,
stream: Stream, topic: str, content: str stream: Stream, topic: str, content: str,
) -> Optional[Dict[str, Any]]: ) -> Optional[Dict[str, Any]]:
""" """
See _internal_prep_message for details of how this works. See _internal_prep_message for details of how this works.
@ -2350,7 +2350,7 @@ def internal_prep_stream_message(
def internal_prep_stream_message_by_name( def internal_prep_stream_message_by_name(
realm: Realm, sender: UserProfile, realm: Realm, sender: UserProfile,
stream_name: str, topic: str, content: str stream_name: str, topic: str, content: str,
) -> Optional[Dict[str, Any]]: ) -> Optional[Dict[str, Any]]:
""" """
See _internal_prep_message for details of how this works. See _internal_prep_message for details of how this works.
@ -2400,7 +2400,7 @@ def internal_send_stream_message(
message = internal_prep_stream_message( message = internal_prep_stream_message(
realm, sender, stream, realm, sender, stream,
topic, content topic, content,
) )
if message is None: if message is None:
@ -2410,11 +2410,11 @@ def internal_send_stream_message(
def internal_send_stream_message_by_name( def internal_send_stream_message_by_name(
realm: Realm, sender: UserProfile, realm: Realm, sender: UserProfile,
stream_name: str, topic: str, content: str stream_name: str, topic: str, content: str,
) -> Optional[int]: ) -> Optional[int]:
message = internal_prep_stream_message_by_name( message = internal_prep_stream_message_by_name(
realm, sender, stream_name, realm, sender, stream_name,
topic, content topic, content,
) )
if message is None: if message is None:
@ -2591,7 +2591,7 @@ def get_subscribers_query(stream: Stream, requesting_user: Optional[UserProfile]
# want to be able to easily reactivate them with their old subscriptions. This # want to be able to easily reactivate them with their old subscriptions. This
# is why the query here has to look at the UserProfile.is_active flag. # is why the query here has to look at the UserProfile.is_active flag.
subscriptions = get_active_subscriptions_for_stream_id(stream.id).filter( subscriptions = get_active_subscriptions_for_stream_id(stream.id).filter(
user_profile__is_active=True user_profile__is_active=True,
) )
return subscriptions return subscriptions
@ -3039,7 +3039,7 @@ def log_subscription_property_change(user_email: str, stream_name: str, property
log_event(event) log_event(event)
def do_change_subscription_property(user_profile: UserProfile, sub: Subscription, def do_change_subscription_property(user_profile: UserProfile, sub: Subscription,
stream: Stream, property_name: str, value: Any stream: Stream, property_name: str, value: Any,
) -> None: ) -> None:
database_property_name = property_name database_property_name = property_name
event_property_name = property_name event_property_name = property_name
@ -3147,16 +3147,16 @@ def do_change_bot_owner(user_profile: UserProfile, bot_owner: UserProfile,
bot=dict( bot=dict(
user_id=user_profile.id, user_id=user_profile.id,
)), )),
{previous_owner.id, }) {previous_owner.id})
# Do not send update event for previous bot owner. # Do not send update event for previous bot owner.
update_users = update_users - {previous_owner.id, } update_users = update_users - {previous_owner.id}
# Notify the new owner that the bot has been added. # Notify the new owner that the bot has been added.
if not bot_owner.is_realm_admin: if not bot_owner.is_realm_admin:
add_event = created_bot_event(user_profile) add_event = created_bot_event(user_profile)
send_event(user_profile.realm, add_event, {bot_owner.id, }) send_event(user_profile.realm, add_event, {bot_owner.id})
# Do not send update event for bot_owner. # Do not send update event for bot_owner.
update_users = update_users - {bot_owner.id, } update_users = update_users - {bot_owner.id}
send_event(user_profile.realm, send_event(user_profile.realm,
dict(type='realm_bot', dict(type='realm_bot',
@ -3235,7 +3235,7 @@ def notify_avatar_url_change(user_profile: UserProfile) -> None:
avatar_version=user_profile.avatar_version, avatar_version=user_profile.avatar_version,
# Even clients using client_gravatar don't need the email, # Even clients using client_gravatar don't need the email,
# since we're sending the URL anyway. # since we're sending the URL anyway.
user_id=user_profile.id user_id=user_profile.id,
) )
send_event(user_profile.realm, send_event(user_profile.realm,
@ -3418,7 +3418,7 @@ def do_change_stream_invite_only(stream: Stream, invite_only: bool,
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers( history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
stream.realm, stream.realm,
invite_only, invite_only,
history_public_to_subscribers history_public_to_subscribers,
) )
stream.invite_only = invite_only stream.invite_only = invite_only
stream.history_public_to_subscribers = history_public_to_subscribers stream.history_public_to_subscribers = history_public_to_subscribers
@ -3530,7 +3530,7 @@ def do_rename_stream(stream: Stream,
'user_name': user_profile.full_name, 'user_name': user_profile.full_name,
'user_id': user_profile.id, 'user_id': user_profile.id,
'old_stream_name': old_name, 'old_stream_name': old_name,
'new_stream_name': new_name} 'new_stream_name': new_name},
) )
# Even though the token doesn't change, the web client needs to update the # Even though the token doesn't change, the web client needs to update the
# email forwarding address to display the correctly-escaped new name. # email forwarding address to display the correctly-escaped new name.
@ -3548,7 +3548,7 @@ def do_change_stream_description(stream: Stream, new_description: str) -> None:
name=stream.name, name=stream.name,
stream_id=stream.id, stream_id=stream.id,
value=new_description, value=new_description,
rendered_description=stream.rendered_description rendered_description=stream.rendered_description,
) )
send_event(stream.realm, event, can_access_stream_user_ids(stream)) send_event(stream.realm, event, can_access_stream_user_ids(stream))
@ -3605,7 +3605,7 @@ def do_create_realm(string_id: str, name: str,
sender, sender,
signups_stream, signups_stream,
topic, topic,
signup_message signup_message,
) )
except Stream.DoesNotExist: # nocoverage except Stream.DoesNotExist: # nocoverage
# If the signups stream hasn't been created in the admin # If the signups stream hasn't been created in the admin
@ -3683,14 +3683,14 @@ def lookup_default_stream_groups(default_stream_group_names: List[str],
def notify_default_streams(realm: Realm) -> None: def notify_default_streams(realm: Realm) -> None:
event = dict( event = dict(
type="default_streams", type="default_streams",
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm.id)) default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm.id)),
) )
send_event(realm, event, active_non_guest_user_ids(realm.id)) send_event(realm, event, active_non_guest_user_ids(realm.id))
def notify_default_stream_groups(realm: Realm) -> None: def notify_default_stream_groups(realm: Realm) -> None:
event = dict( event = dict(
type="default_stream_groups", type="default_stream_groups",
default_stream_groups=default_stream_groups_to_dicts_sorted(get_default_stream_groups(realm)) default_stream_groups=default_stream_groups_to_dicts_sorted(get_default_stream_groups(realm)),
) )
send_event(realm, event, active_non_guest_user_ids(realm.id)) send_event(realm, event, active_non_guest_user_ids(realm.id))
@ -3866,13 +3866,13 @@ def do_update_user_presence(user_profile: UserProfile,
defaults = dict( defaults = dict(
timestamp=log_time, timestamp=log_time,
status=status, status=status,
realm_id=user_profile.realm_id realm_id=user_profile.realm_id,
) )
(presence, created) = UserPresence.objects.get_or_create( (presence, created) = UserPresence.objects.get_or_create(
user_profile = user_profile, user_profile = user_profile,
client = client, client = client,
defaults = defaults defaults = defaults,
) )
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10) stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
@ -3992,13 +3992,13 @@ def do_mark_all_as_read(user_profile: UserProfile, client: Client) -> int:
log_statsd_event('bankruptcy') log_statsd_event('bankruptcy')
msgs = UserMessage.objects.filter( msgs = UserMessage.objects.filter(
user_profile=user_profile user_profile=user_profile,
).extra( ).extra(
where=[UserMessage.where_unread()] where=[UserMessage.where_unread()],
) )
count = msgs.update( count = msgs.update(
flags=F('flags').bitor(UserMessage.flags.read) flags=F('flags').bitor(UserMessage.flags.read),
) )
event = dict( event = dict(
@ -4006,7 +4006,7 @@ def do_mark_all_as_read(user_profile: UserProfile, client: Client) -> int:
operation='add', operation='add',
flag='read', flag='read',
messages=[], # we don't send messages, since the client reloads anyway messages=[], # we don't send messages, since the client reloads anyway
all=True all=True,
) )
send_event(user_profile.realm, event, [user_profile.id]) send_event(user_profile.realm, event, [user_profile.id])
@ -4028,7 +4028,7 @@ def do_mark_stream_messages_as_read(user_profile: UserProfile,
log_statsd_event('mark_stream_as_read') log_statsd_event('mark_stream_as_read')
msgs = UserMessage.objects.filter( msgs = UserMessage.objects.filter(
user_profile=user_profile user_profile=user_profile,
) )
recipient = stream.recipient recipient = stream.recipient
@ -4041,13 +4041,13 @@ def do_mark_stream_messages_as_read(user_profile: UserProfile,
) )
msgs = msgs.extra( msgs = msgs.extra(
where=[UserMessage.where_unread()] where=[UserMessage.where_unread()],
) )
message_ids = list(msgs.values_list('message__id', flat=True)) message_ids = list(msgs.values_list('message__id', flat=True))
count = msgs.update( count = msgs.update(
flags=F('flags').bitor(UserMessage.flags.read) flags=F('flags').bitor(UserMessage.flags.read),
) )
event = dict( event = dict(
@ -4191,7 +4191,7 @@ def get_user_info_for_message_updates(message_id: int) -> MessageUpdateUserInfoR
# decision we change in the future. # decision we change in the future.
query = UserMessage.objects.filter( query = UserMessage.objects.filter(
message=message_id, message=message_id,
flags=~UserMessage.flags.historical flags=~UserMessage.flags.historical,
).values('user_profile_id', 'flags') ).values('user_profile_id', 'flags')
rows = list(query) rows = list(query)
@ -4284,7 +4284,7 @@ def do_update_embedded_data(user_profile: UserProfile,
def user_info(um: UserMessage) -> Dict[str, Any]: def user_info(um: UserMessage) -> Dict[str, Any]:
return { return {
'id': um.user_profile_id, 'id': um.user_profile_id,
'flags': um.flags_list() 'flags': um.flags_list(),
} }
send_event(user_profile.realm, event, list(map(user_info, ums))) send_event(user_profile.realm, event, list(map(user_info, ums)))
@ -4435,7 +4435,7 @@ def do_update_message(user_profile: UserProfile, message: Message,
propagate_mode=propagate_mode, propagate_mode=propagate_mode,
orig_topic_name=orig_topic_name, orig_topic_name=orig_topic_name,
topic_name=topic_name, topic_name=topic_name,
new_stream=new_stream new_stream=new_stream,
) )
changed_messages += messages_list changed_messages += messages_list
@ -4458,13 +4458,13 @@ def do_update_message(user_profile: UserProfile, message: Message,
def user_info(um: UserMessage) -> Dict[str, Any]: def user_info(um: UserMessage) -> Dict[str, Any]:
return { return {
'id': um.user_profile_id, 'id': um.user_profile_id,
'flags': um.flags_list() 'flags': um.flags_list(),
} }
def subscriber_info(user_id: int) -> Dict[str, Any]: def subscriber_info(user_id: int) -> Dict[str, Any]:
return { return {
'id': user_id, 'id': user_id,
'flags': ['read'] 'flags': ['read'],
} }
# The following blocks arranges that users who are subscribed to a # The following blocks arranges that users who are subscribed to a
@ -4542,7 +4542,7 @@ def do_delete_messages(realm: Realm, messages: Iterable[Message]) -> None:
# there are no UserMessage objects associated with the # there are no UserMessage objects associated with the
# message. # message.
events_and_users_to_notify.append( events_and_users_to_notify.append(
(event, message_id_to_notifiable_users.get(message.id, [])) (event, message_id_to_notifiable_users.get(message.id, [])),
) )
move_messages_to_archive(message_ids, realm=realm) move_messages_to_archive(message_ids, realm=realm)
@ -4694,7 +4694,7 @@ def gather_subscriptions_helper(user_profile: UserProfile,
all_streams, all_streams,
user_profile, user_profile,
streams_subscribed_map, streams_subscribed_map,
stream_recipient stream_recipient,
) )
else: else:
# If we're not including subscribers, always return None, # If we're not including subscribers, always return None,
@ -4871,7 +4871,7 @@ def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
rows = UserPresence.objects.filter( rows = UserPresence.objects.filter(
user_profile_id__in=user_ids, user_profile_id__in=user_ids,
status=UserPresence.ACTIVE, status=UserPresence.ACTIVE,
timestamp__gte=recent timestamp__gte=recent,
).exclude(client__name="ZulipMobile").distinct('user_profile_id').values('user_profile_id') ).exclude(client__name="ZulipMobile").distinct('user_profile_id').values('user_profile_id')
active_user_ids = {row['user_profile_id'] for row in rows} active_user_ids = {row['user_profile_id'] for row in rows}
idle_user_ids = user_ids - active_user_ids idle_user_ids = user_ids - active_user_ids
@ -4916,7 +4916,7 @@ def estimate_recent_invites(realms: Iterable[Realm], *, days: int) -> int:
recent_invites = RealmCount.objects.filter( recent_invites = RealmCount.objects.filter(
realm__in=realms, realm__in=realms,
property='invites_sent::day', property='invites_sent::day',
end_time__gte=timezone_now() - datetime.timedelta(days=days) end_time__gte=timezone_now() - datetime.timedelta(days=days),
).aggregate(Sum('value'))['value__sum'] ).aggregate(Sum('value'))['value__sum']
if recent_invites is None: if recent_invites is None:
return 0 return 0
@ -5271,7 +5271,7 @@ def get_occupied_streams(realm: Realm) -> QuerySet:
exists_expression = Exists( exists_expression = Exists(
Subscription.objects.filter(active=True, user_profile__is_active=True, Subscription.objects.filter(active=True, user_profile__is_active=True,
user_profile__realm=realm, user_profile__realm=realm,
recipient_id=OuterRef('recipient_id')) recipient_id=OuterRef('recipient_id')),
) )
occupied_streams = Stream.objects.filter(realm=realm, deactivated=False) \ occupied_streams = Stream.objects.filter(realm=realm, deactivated=False) \
.annotate(occupied=exists_expression).filter(occupied=True) .annotate(occupied=exists_expression).filter(occupied=True)
@ -5285,7 +5285,7 @@ def get_web_public_streams(realm: Realm) -> List[Dict[str, Any]]:
def do_get_streams( def do_get_streams(
user_profile: UserProfile, include_public: bool=True, user_profile: UserProfile, include_public: bool=True,
include_subscribed: bool=True, include_all_active: bool=False, include_subscribed: bool=True, include_all_active: bool=False,
include_default: bool=False, include_owner_subscribed: bool=False include_default: bool=False, include_owner_subscribed: bool=False,
) -> List[Dict[str, Any]]: ) -> List[Dict[str, Any]]:
if include_all_active and not user_profile.is_api_super_user: if include_all_active and not user_profile.is_api_super_user:
raise JsonableError(_("User not authorized for this query")) raise JsonableError(_("User not authorized for this query"))
@ -5490,7 +5490,7 @@ def notify_user_update_custom_profile_data(user_profile: UserProfile,
send_event(user_profile.realm, event, active_user_ids(user_profile.realm.id)) send_event(user_profile.realm, event, active_user_ids(user_profile.realm.id))
def do_update_user_custom_profile_data_if_changed(user_profile: UserProfile, def do_update_user_custom_profile_data_if_changed(user_profile: UserProfile,
data: List[Dict[str, Union[int, str, List[int]]]] data: List[Dict[str, Union[int, str, List[int]]]],
) -> None: ) -> None:
with transaction.atomic(): with transaction.atomic():
for field in data: for field in data:
@ -5518,7 +5518,7 @@ def do_update_user_custom_profile_data_if_changed(user_profile: UserProfile,
"type": field_value.field.field_type}) "type": field_value.field.field_type})
def check_remove_custom_profile_field_value(user_profile: UserProfile, def check_remove_custom_profile_field_value(user_profile: UserProfile,
field_id: Union[int, str, List[int]] field_id: Union[int, str, List[int]],
) -> None: ) -> None:
try: try:
field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id) field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id)
@ -5584,7 +5584,7 @@ def do_update_outgoing_webhook_service(bot_profile: UserProfile,
bot=dict(user_id=bot_profile.id, bot=dict(user_id=bot_profile.id,
services = [dict(base_url=service.base_url, services = [dict(base_url=service.base_url,
interface=service.interface, interface=service.interface,
token=service.token,)], token=service.token)],
), ),
), ),
bot_owner_user_ids(bot_profile)) bot_owner_user_ids(bot_profile))
@ -5616,7 +5616,7 @@ def get_service_dicts_for_bot(user_profile_id: str) -> List[Dict[str, Any]]:
elif user_profile.bot_type == UserProfile.EMBEDDED_BOT: elif user_profile.bot_type == UserProfile.EMBEDDED_BOT:
try: try:
service_dicts = [{'config_data': get_bot_config(user_profile), service_dicts = [{'config_data': get_bot_config(user_profile),
'service_name': services[0].name 'service_name': services[0].name,
}] }]
# A ConfigError just means that there are no config entries for user_profile. # A ConfigError just means that there are no config entries for user_profile.
except ConfigError: except ConfigError:
@ -5650,7 +5650,7 @@ def get_service_dicts_for_bots(bot_dicts: List[Dict[str, Any]],
if bot_profile_id in embedded_bot_configs.keys(): if bot_profile_id in embedded_bot_configs.keys():
bot_config = embedded_bot_configs[bot_profile_id] bot_config = embedded_bot_configs[bot_profile_id]
service_dicts = [{'config_data': bot_config, service_dicts = [{'config_data': bot_config,
'service_name': services[0].name 'service_name': services[0].name,
}] }]
service_dicts_by_uid[bot_profile_id] = service_dicts service_dicts_by_uid[bot_profile_id] = service_dicts
return service_dicts_by_uid return service_dicts_by_uid
@ -5734,7 +5734,7 @@ def do_set_zoom_token(user: UserProfile, token: Optional[Dict[str, object]]) ->
user.zoom_token = token user.zoom_token = token
user.save(update_fields=["zoom_token"]) user.save(update_fields=["zoom_token"])
send_event( send_event(
user.realm, dict(type="has_zoom_token", value=token is not None), [user.id] user.realm, dict(type="has_zoom_token", value=token is not None), [user.id],
) )
def notify_realm_export(user_profile: UserProfile) -> None: def notify_realm_export(user_profile: UserProfile) -> None:
@ -5764,6 +5764,6 @@ def get_topic_messages(user_profile: UserProfile, stream: Stream,
topic_name: str) -> List[Message]: topic_name: str) -> List[Message]:
query = UserMessage.objects.filter( query = UserMessage.objects.filter(
user_profile=user_profile, user_profile=user_profile,
message__recipient=stream.recipient message__recipient=stream.recipient,
).order_by("id") ).order_by("id")
return [um.message for um in filter_by_topic_name_via_message(query, topic_name)] return [um.message for um in filter_by_topic_name_via_message(query, topic_name)]

View File

@ -75,7 +75,7 @@ class EmbeddedBotHandler:
if message['type'] == 'stream': if message['type'] == 'stream':
internal_send_stream_message_by_name( internal_send_stream_message_by_name(
self.user_profile.realm, self.user_profile, self.user_profile.realm, self.user_profile,
message['to'], message['topic'], message['content'] message['to'], message['topic'], message['content'],
) )
return return

View File

@ -100,13 +100,13 @@ EMOJI_REGEX = r'(?P<syntax>:[\w\-\+]+:)'
def verbose_compile(pattern: str) -> Any: def verbose_compile(pattern: str) -> Any:
return re.compile( return re.compile(
"^(.*?)%s(.*?)$" % (pattern,), "^(.*?)%s(.*?)$" % (pattern,),
re.DOTALL | re.UNICODE | re.VERBOSE re.DOTALL | re.UNICODE | re.VERBOSE,
) )
def normal_compile(pattern: str) -> Any: def normal_compile(pattern: str) -> Any:
return re.compile( return re.compile(
r"^(.*?)%s(.*)$" % (pattern,), r"^(.*?)%s(.*)$" % (pattern,),
re.DOTALL | re.UNICODE re.DOTALL | re.UNICODE,
) )
STREAM_LINK_REGEX = r""" STREAM_LINK_REGEX = r"""
@ -318,7 +318,7 @@ class ElementPair:
self.value = value self.value = value
def walk_tree_with_family(root: Element, def walk_tree_with_family(root: Element,
processor: Callable[[Element], Optional[_T]] processor: Callable[[Element], Optional[_T]],
) -> List[ResultWithFamily[_T]]: ) -> List[ResultWithFamily[_T]]:
results = [] results = []
@ -339,12 +339,12 @@ def walk_tree_with_family(root: Element,
grandparent=grandparent, grandparent=grandparent,
parent=currElementPair.value, parent=currElementPair.value,
child=child, child=child,
in_blockquote=has_blockquote_ancestor(currElementPair) in_blockquote=has_blockquote_ancestor(currElementPair),
) )
results.append(ResultWithFamily( results.append(ResultWithFamily(
family=family, family=family,
result=result result=result,
)) ))
return results return results
@ -547,7 +547,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
class_attr: str="message_inline_image", class_attr: str="message_inline_image",
data_id: Optional[str]=None, data_id: Optional[str]=None,
insertion_index: Optional[int]=None, insertion_index: Optional[int]=None,
already_thumbnailed: Optional[bool]=False already_thumbnailed: Optional[bool]=False,
) -> None: ) -> None:
desc = desc if desc is not None else "" desc = desc if desc is not None else ""
@ -576,10 +576,10 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
# consistency in what gets passed to /thumbnail # consistency in what gets passed to /thumbnail
url = url.lstrip('/') url = url.lstrip('/')
img.set("src", "/thumbnail?url={}&size=thumbnail".format( img.set("src", "/thumbnail?url={}&size=thumbnail".format(
urllib.parse.quote(url, safe='') urllib.parse.quote(url, safe=''),
)) ))
img.set('data-src-fullsize', "/thumbnail?url={}&size=full".format( img.set('data-src-fullsize', "/thumbnail?url={}&size=full".format(
urllib.parse.quote(url, safe='') urllib.parse.quote(url, safe=''),
)) ))
else: else:
img.set("src", url) img.set("src", url)
@ -1133,7 +1133,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor):
if image_source is not None: if image_source is not None:
found_url = ResultWithFamily( found_url = ResultWithFamily(
family=found_url.family, family=found_url.family,
result=(image_source, image_source) result=(image_source, image_source),
) )
self.handle_image_inlining(root, found_url) self.handle_image_inlining(root, found_url)
continue continue
@ -1822,7 +1822,7 @@ class Bugdown(markdown.Markdown):
"Realm-specific filters for realm_filters_key %s" % (kwargs['realm'],)], "Realm-specific filters for realm_filters_key %s" % (kwargs['realm'],)],
"realm": [kwargs['realm'], "Realm id"], "realm": [kwargs['realm'], "Realm id"],
"code_block_processor_disabled": [kwargs['code_block_processor_disabled'], "code_block_processor_disabled": [kwargs['code_block_processor_disabled'],
"Disabled for email gateway"] "Disabled for email gateway"],
} }
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
@ -2014,7 +2014,7 @@ def build_engine(realm_filters: List[Tuple[str, str, int]],
tables.makeExtension(), tables.makeExtension(),
codehilite.makeExtension( codehilite.makeExtension(
linenums=False, linenums=False,
guess_lang=False guess_lang=False,
), ),
]) ])
return engine return engine
@ -2104,7 +2104,7 @@ def get_email_info(realm_id: int, emails: Set[str]) -> Dict[str, FullNameInfo]:
} }
rows = UserProfile.objects.filter( rows = UserProfile.objects.filter(
realm_id=realm_id realm_id=realm_id,
).filter( ).filter(
functools.reduce(lambda a, b: a | b, q_list), functools.reduce(lambda a, b: a | b, q_list),
).values( ).values(

View File

@ -24,7 +24,7 @@ class MarkdownArgumentsTableGenerator(Extension):
def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None: def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None:
md.preprocessors.add( md.preprocessors.add(
'generate_api_arguments', APIArgumentsTablePreprocessor(md, self.getConfigs()), '_begin' 'generate_api_arguments', APIArgumentsTablePreprocessor(md, self.getConfigs()), '_begin',
) )

View File

@ -15,7 +15,7 @@ class MarkdownReturnValuesTableGenerator(Extension):
def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None: def extendMarkdown(self, md: markdown.Markdown, md_globals: Dict[str, Any]) -> None:
md.preprocessors.add( md.preprocessors.add(
'generate_return_values', APIReturnValuesTablePreprocessor(md, self.getConfigs()), '_begin' 'generate_return_values', APIReturnValuesTablePreprocessor(md, self.getConfigs()), '_begin',
) )

View File

@ -133,8 +133,8 @@ class FencedCodeExtension(markdown.Extension):
self.config = { self.config = {
'run_content_validators': [ 'run_content_validators': [
config.get('run_content_validators', False), config.get('run_content_validators', False),
'Boolean specifying whether to run content validation code in CodeHandler' 'Boolean specifying whether to run content validation code in CodeHandler',
] ],
} }
for key, value in config.items(): for key, value in config.items():

View File

@ -15,7 +15,7 @@ class MarkdownIncludeCustom(MarkdownInclude):
md.preprocessors.add( md.preprocessors.add(
'include_wrapper', 'include_wrapper',
IncludeCustomPreprocessor(md, self.getConfigs()), IncludeCustomPreprocessor(md, self.getConfigs()),
'_begin' '_begin',
) )
class IncludeCustomPreprocessor(IncludePreprocessor): class IncludeCustomPreprocessor(IncludePreprocessor):
@ -39,7 +39,7 @@ class IncludeCustomPreprocessor(IncludePreprocessor):
filename = os.path.expanduser(filename) filename = os.path.expanduser(filename)
if not os.path.isabs(filename): if not os.path.isabs(filename):
filename = os.path.normpath( filename = os.path.normpath(
os.path.join(self.base_path, filename) os.path.join(self.base_path, filename),
) )
try: try:
with open(filename, encoding=self.encoding) as r: with open(filename, encoding=self.encoding) as r:

View File

@ -10,7 +10,7 @@ class NestedCodeBlocksRenderer(Extension):
md.treeprocessors.add( md.treeprocessors.add(
'nested_code_blocks', 'nested_code_blocks',
NestedCodeBlocksRendererTreeProcessor(md, self.getConfigs()), NestedCodeBlocksRendererTreeProcessor(md, self.getConfigs()),
'_end' '_end',
) )
class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocessor): class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocessor):
@ -33,7 +33,7 @@ class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocesso
return None return None
def get_nested_code_blocks( def get_nested_code_blocks(
self, code_tags: List[ResultWithFamily[Tuple[str, Optional[str]]]] self, code_tags: List[ResultWithFamily[Tuple[str, Optional[str]]]],
) -> List[ResultWithFamily[Tuple[str, Optional[str]]]]: ) -> List[ResultWithFamily[Tuple[str, Optional[str]]]]:
nested_code_blocks = [] nested_code_blocks = []
for code_tag in code_tags: for code_tag in code_tags:
@ -59,7 +59,7 @@ class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocesso
def replace_element( def replace_element(
self, parent: Optional[Element], self, parent: Optional[Element],
replacement: Element, replacement: Element,
element_to_replace: Element element_to_replace: Element,
) -> None: ) -> None:
if parent is None: if parent is None:
return return

View File

@ -124,7 +124,7 @@ def bulk_create_streams(realm: Realm,
history_public_to_subscribers=options["history_public_to_subscribers"], history_public_to_subscribers=options["history_public_to_subscribers"],
is_web_public=options.get("is_web_public", False), is_web_public=options.get("is_web_public", False),
is_in_zephyr_realm=realm.is_zephyr_mirror_realm, is_in_zephyr_realm=realm.is_zephyr_mirror_realm,
) ),
) )
# Sort streams by name before creating them so that we can have a # Sort streams by name before creating them so that we can have a
# reliable ordering of `stream_id` across different python versions. # reliable ordering of `stream_id` across different python versions.

View File

@ -117,7 +117,7 @@ def get_cache_backend(cache_name: Optional[str]) -> BaseCache:
def get_cache_with_key( def get_cache_with_key(
keyfunc: Callable[..., str], keyfunc: Callable[..., str],
cache_name: Optional[str]=None cache_name: Optional[str]=None,
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]: ) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
""" """
The main goal of this function getting value from the cache like in the "cache_with_key". The main goal of this function getting value from the cache like in the "cache_with_key".
@ -145,7 +145,7 @@ def get_cache_with_key(
def cache_with_key( def cache_with_key(
keyfunc: Callable[..., str], cache_name: Optional[str]=None, keyfunc: Callable[..., str], cache_name: Optional[str]=None,
timeout: Optional[int]=None, with_statsd_key: Optional[str]=None timeout: Optional[int]=None, with_statsd_key: Optional[str]=None,
) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]: ) -> Callable[[Callable[..., ReturnT]], Callable[..., ReturnT]]:
"""Decorator which applies Django caching to a function. """Decorator which applies Django caching to a function.
@ -382,7 +382,7 @@ def generic_bulk_cached_fetch(
cache_keys[object_id] = cache_key_function(object_id) cache_keys[object_id] = cache_key_function(object_id)
cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many( cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many(
[cache_keys[object_id] for object_id in object_ids] [cache_keys[object_id] for object_id in object_ids],
) )
cached_objects: Dict[str, CacheItemT] = {} cached_objects: Dict[str, CacheItemT] = {}
@ -445,7 +445,7 @@ realm_user_dict_fields: List[str] = [
'avatar_source', 'avatar_version', 'is_active', 'avatar_source', 'avatar_version', 'is_active',
'role', 'is_bot', 'realm_id', 'timezone', 'role', 'is_bot', 'realm_id', 'timezone',
'date_joined', 'bot_owner_id', 'delivery_email', 'date_joined', 'bot_owner_id', 'delivery_email',
'bot_type' 'bot_type',
] ]
def realm_user_dicts_cache_key(realm_id: int) -> str: def realm_user_dicts_cache_key(realm_id: int) -> str:

View File

@ -18,7 +18,7 @@ def generate_dev_ldap_dir(mode: str, num_users: int=8) -> Dict[str, Dict[str, An
name = 'LDAP User %d' % (i,) name = 'LDAP User %d' % (i,)
email = 'ldapuser%d@zulip.com' % (i,) email = 'ldapuser%d@zulip.com' % (i,)
phone_number = '999999999%d' % (i,) phone_number = '999999999%d' % (i,)
birthdate = '19%02d-%02d-%02d' % (i, i, i,) birthdate = '19%02d-%02d-%02d' % (i, i, i)
ldap_data.append((name, email, phone_number, birthdate)) ldap_data.append((name, email, phone_number, birthdate))
profile_images = [open(path, "rb").read() for path in profile_images = [open(path, "rb").read() for path in
@ -28,26 +28,26 @@ def generate_dev_ldap_dir(mode: str, num_users: int=8) -> Dict[str, Dict[str, An
email = user_data[1].lower() email = user_data[1].lower()
email_username = email.split('@')[0] email_username = email.split('@')[0]
common_data = { common_data = {
'cn': [user_data[0], ], 'cn': [user_data[0]],
'userPassword': [email_username, ], 'userPassword': [email_username],
'phoneNumber': [user_data[2], ], 'phoneNumber': [user_data[2]],
'birthDate': [user_data[3], ], 'birthDate': [user_data[3]],
} }
if mode == 'a': if mode == 'a':
ldap_dir['uid=' + email + ',ou=users,dc=zulip,dc=com'] = dict( ldap_dir['uid=' + email + ',ou=users,dc=zulip,dc=com'] = dict(
uid=[email, ], uid=[email],
thumbnailPhoto=[profile_images[i % len(profile_images)], ], thumbnailPhoto=[profile_images[i % len(profile_images)]],
userAccountControl=[LDAP_USER_ACCOUNT_CONTROL_NORMAL, ], userAccountControl=[LDAP_USER_ACCOUNT_CONTROL_NORMAL],
**common_data) **common_data)
elif mode == 'b': elif mode == 'b':
ldap_dir['uid=' + email_username + ',ou=users,dc=zulip,dc=com'] = dict( ldap_dir['uid=' + email_username + ',ou=users,dc=zulip,dc=com'] = dict(
uid=[email_username, ], uid=[email_username],
jpegPhoto=[profile_images[i % len(profile_images)], ], jpegPhoto=[profile_images[i % len(profile_images)]],
**common_data) **common_data)
elif mode == 'c': elif mode == 'c':
ldap_dir['uid=' + email_username + ',ou=users,dc=zulip,dc=com'] = dict( ldap_dir['uid=' + email_username + ',ou=users,dc=zulip,dc=com'] = dict(
uid=[email_username, ], uid=[email_username],
email=[email, ], email=[email],
**common_data) **common_data)
return ldap_dir return ldap_dir

View File

@ -173,7 +173,7 @@ def handle_digest_email(user_profile_id: int, cutoff: float,
# Start building email template data. # Start building email template data.
context.update({ context.update({
'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest") 'unsubscribe_link': one_click_unsubscribe_link(user_profile, "digest"),
}) })
home_view_streams = Subscription.objects.filter( home_view_streams = Subscription.objects.filter(
@ -225,7 +225,7 @@ def exclude_subscription_modified_streams(user_profile: UserProfile,
events = [ events = [
RealmAuditLog.SUBSCRIPTION_CREATED, RealmAuditLog.SUBSCRIPTION_CREATED,
RealmAuditLog.SUBSCRIPTION_ACTIVATED, RealmAuditLog.SUBSCRIPTION_ACTIVATED,
RealmAuditLog.SUBSCRIPTION_DEACTIVATED RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
] ]
# Streams where the user's subscription was changed # Streams where the user's subscription was changed

View File

@ -36,7 +36,7 @@ def get_display_recipient_remote_cache(recipient_id: int, recipient_type: int,
# Right now, we order by ID, which matches the ordering of user # Right now, we order by ID, which matches the ordering of user
# names in the left sidebar. # names in the left sidebar.
user_profile_list = UserProfile.objects.filter( user_profile_list = UserProfile.objects.filter(
subscription__recipient_id=recipient_id subscription__recipient_id=recipient_id,
).order_by('id').values(*display_recipient_fields) ).order_by('id').values(*display_recipient_fields)
return list(user_profile_list) return list(user_profile_list)
@ -52,10 +52,10 @@ def bulk_get_user_profile_by_id(uids: List[int]) -> Dict[int, UserDisplayRecipie
query_function=lambda ids: list( query_function=lambda ids: list(
UserProfile.objects.filter(id__in=ids).values(*display_recipient_fields)), UserProfile.objects.filter(id__in=ids).values(*display_recipient_fields)),
object_ids=uids, object_ids=uids,
id_fetcher=user_dict_id_fetcher id_fetcher=user_dict_id_fetcher,
) )
def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]] def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]],
) -> Dict[int, DisplayRecipientT]: ) -> Dict[int, DisplayRecipientT]:
""" """
Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id) Takes set of tuples of the form (recipient_id, recipient_type, recipient_type_id)
@ -102,7 +102,7 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]]
# Now we have to create display_recipients for personal and huddle messages. # Now we have to create display_recipients for personal and huddle messages.
# We do this via generic_bulk_cached_fetch, supplying apprioprate functions to it. # We do this via generic_bulk_cached_fetch, supplying apprioprate functions to it.
def personal_and_huddle_query_function(recipient_ids: List[int] def personal_and_huddle_query_function(recipient_ids: List[int],
) -> List[Tuple[int, List[UserDisplayRecipient]]]: ) -> List[Tuple[int, List[UserDisplayRecipient]]]:
""" """
Return a list of tuples of the form (recipient_id, [list of UserProfiles]) Return a list of tuples of the form (recipient_id, [list of UserProfiles])
@ -116,7 +116,7 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]]
recipients = [Recipient( recipients = [Recipient(
id=recipient_id, id=recipient_id,
type=recipient_id_to_type_pair_dict[recipient_id][0], type=recipient_id_to_type_pair_dict[recipient_id][0],
type_id=recipient_id_to_type_pair_dict[recipient_id][1] type_id=recipient_id_to_type_pair_dict[recipient_id][1],
) for recipient_id in recipient_ids] ) for recipient_id in recipient_ids]
# Find all user ids whose UserProfiles we will need to fetch: # Find all user ids whose UserProfiles we will need to fetch:
@ -144,7 +144,7 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]]
return result return result
def personal_and_huddle_cache_transformer(db_object: Tuple[int, List[UserDisplayRecipient]] def personal_and_huddle_cache_transformer(db_object: Tuple[int, List[UserDisplayRecipient]],
) -> List[UserDisplayRecipient]: ) -> List[UserDisplayRecipient]:
""" """
Takes an element of the list returned by the query_function, maps it to the final Takes an element of the list returned by the query_function, maps it to the final
@ -167,7 +167,7 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]]
query_function=personal_and_huddle_query_function, query_function=personal_and_huddle_query_function,
object_ids=[recipient[0] for recipient in personal_and_huddle_recipients], object_ids=[recipient[0] for recipient in personal_and_huddle_recipients],
id_fetcher=personal_and_huddle_id_fetcher, id_fetcher=personal_and_huddle_id_fetcher,
cache_transformer=personal_and_huddle_cache_transformer cache_transformer=personal_and_huddle_cache_transformer,
) )
# Glue the dicts together and return: # Glue the dicts together and return:

View File

@ -69,7 +69,7 @@ def report_to_zulip(error_message: str) -> None:
error_bot, error_bot,
error_stream, error_stream,
"email mirror error", "email mirror error",
f"""~~~\n{error_message}\n~~~""" f"""~~~\n{error_message}\n~~~""",
) )
def log_and_report(email_message: EmailMessage, error_message: str, to: Optional[str]) -> None: def log_and_report(email_message: EmailMessage, error_message: str, to: Optional[str]) -> None:
@ -114,7 +114,7 @@ def get_usable_missed_message_address(address: str) -> MissedMessageEmailAddress
try: try:
mm_address = MissedMessageEmailAddress.objects.select_related().get( mm_address = MissedMessageEmailAddress.objects.select_related().get(
email_token=token, email_token=token,
timestamp__gt=timezone_now() - timedelta(seconds=MissedMessageEmailAddress.EXPIRY_SECONDS) timestamp__gt=timezone_now() - timedelta(seconds=MissedMessageEmailAddress.EXPIRY_SECONDS),
) )
except MissedMessageEmailAddress.DoesNotExist: except MissedMessageEmailAddress.DoesNotExist:
raise ZulipEmailForwardError("Missed message address expired or doesn't exist.") raise ZulipEmailForwardError("Missed message address expired or doesn't exist.")
@ -371,7 +371,7 @@ def process_missed_message(to: str, message: EmailMessage) -> None:
stream = get_stream_by_id_in_realm(recipient.type_id, user_profile.realm) stream = get_stream_by_id_in_realm(recipient.type_id, user_profile.realm)
internal_send_stream_message( internal_send_stream_message(
user_profile.realm, user_profile, stream, user_profile.realm, user_profile, stream,
topic, body topic, body,
) )
recipient_str = stream.name recipient_str = stream.name
elif recipient.type == Recipient.PERSONAL: elif recipient.type == Recipient.PERSONAL:
@ -429,15 +429,15 @@ def mirror_email_message(data: Dict[str, str]) -> Dict[str, str]:
except ZulipEmailForwardError as e: except ZulipEmailForwardError as e:
return { return {
"status": "error", "status": "error",
"msg": f"5.1.1 Bad destination mailbox address: {e}" "msg": f"5.1.1 Bad destination mailbox address: {e}",
} }
queue_json_publish( queue_json_publish(
"email_mirror", "email_mirror",
{ {
"message": data['msg_text'], "message": data['msg_text'],
"rcpt_to": rcpt_to "rcpt_to": rcpt_to,
} },
) )
return {"status": "success"} return {"status": "success"}

View File

@ -305,7 +305,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile,
if len(recipients) != 1: if len(recipients) != 1:
raise ValueError( raise ValueError(
'All missed_messages must have the same recipient and topic %r' % 'All missed_messages must have the same recipient and topic %r' %
(recipients,) (recipients,),
) )
# This link is no longer a part of the email, but keeping the code in case # This link is no longer a part of the email, but keeping the code in case
@ -495,7 +495,7 @@ def handle_missedmessage_emails(user_profile_id: int,
for m in messages_by_bucket[bucket_tup]: for m in messages_by_bucket[bucket_tup]:
unique_messages[m.id] = dict( unique_messages[m.id] = dict(
message=m, message=m,
trigger=message_ids.get(m.id) trigger=message_ids.get(m.id),
) )
do_send_missedmessage_events_reply_in_zulip( do_send_missedmessage_events_reply_in_zulip(
user_profile, user_profile,

View File

@ -17,7 +17,7 @@ if not os.path.exists(emoji_codes_path): # nocoverage
# prod-static/serve/generated/emoji won't exist yet. # prod-static/serve/generated/emoji won't exist yet.
emoji_codes_path = os.path.join( emoji_codes_path = os.path.join(
os.path.dirname(__file__), os.path.dirname(__file__),
"../../static/generated/emoji/emoji_codes.json" "../../static/generated/emoji/emoji_codes.json",
) )
with open(emoji_codes_path) as fp: with open(emoji_codes_path) as fp:

View File

@ -83,7 +83,7 @@ def zulip_browser_error(report: Dict[str, Any]) -> None:
error_bot, error_bot,
errors_stream, errors_stream,
format_email_subject(email_subject), format_email_subject(email_subject),
body body,
) )
def notify_server_error(report: Dict[str, Any], skip_error_zulip: Optional[bool]=False) -> None: def notify_server_error(report: Dict[str, Any], skip_error_zulip: Optional[bool]=False) -> None:
@ -125,7 +125,7 @@ def zulip_server_error(report: Dict[str, Any]) -> None:
error_bot, error_bot,
errors_stream, errors_stream,
format_email_subject(email_subject), format_email_subject(email_subject),
message message,
) )
def email_server_error(report: Dict[str, Any]) -> None: def email_server_error(report: Dict[str, Any]) -> None:

View File

@ -7,7 +7,7 @@ from django.utils.translation import ugettext as _
from django.conf import settings from django.conf import settings
from importlib import import_module from importlib import import_module
from typing import ( from typing import (
Any, Callable, Dict, Iterable, Optional, Sequence, Set Any, Callable, Dict, Iterable, Optional, Sequence, Set,
) )
session_engine = import_module(settings.SESSION_ENGINE) session_engine = import_module(settings.SESSION_ENGINE)
@ -56,7 +56,7 @@ from zerver.models import (
Client, Message, Realm, UserProfile, UserMessage, Client, Message, Realm, UserProfile, UserMessage,
realm_filters_for_realm, realm_filters_for_realm,
custom_profile_fields_for_realm, get_realm_domains, custom_profile_fields_for_realm, get_realm_domains,
get_default_stream_groups, CustomProfileField, Stream get_default_stream_groups, CustomProfileField, Stream,
) )
from zproject.backends import email_auth_enabled, password_auth_enabled from zproject.backends import email_auth_enabled, password_auth_enabled
from version import ZULIP_VERSION, API_FEATURE_LEVEL from version import ZULIP_VERSION, API_FEATURE_LEVEL
@ -255,7 +255,7 @@ def fetch_initial_state_data(user_profile: UserProfile,
for integration in WEBHOOK_INTEGRATIONS: for integration in WEBHOOK_INTEGRATIONS:
realm_incoming_webhook_bots.append({ realm_incoming_webhook_bots.append({
'name': integration.name, 'name': integration.name,
'config': {c[1]: c[0] for c in integration.config_options} 'config': {c[1]: c[0] for c in integration.config_options},
}) })
state['realm_incoming_webhook_bots'] = realm_incoming_webhook_bots state['realm_incoming_webhook_bots'] = realm_incoming_webhook_bots
@ -375,7 +375,7 @@ def apply_event(state: Dict[str, Any],
conversations[recipient_id] = dict( conversations[recipient_id] = dict(
user_ids=sorted([user_dict['id'] for user_dict in user_ids=sorted([user_dict['id'] for user_dict in
event['message']['display_recipient'] if event['message']['display_recipient'] if
user_dict['id'] != user_profile.id]) user_dict['id'] != user_profile.id]),
) )
conversations[recipient_id]['max_message_id'] = event['message']['id'] conversations[recipient_id]['max_message_id'] = event['message']['id']
return return
@ -482,11 +482,11 @@ def apply_event(state: Dict[str, Any],
if 'rendered_value' in person['custom_profile_field']: if 'rendered_value' in person['custom_profile_field']:
p['profile_data'][custom_field_id] = { p['profile_data'][custom_field_id] = {
'value': custom_field_new_value, 'value': custom_field_new_value,
'rendered_value': person['custom_profile_field']['rendered_value'] 'rendered_value': person['custom_profile_field']['rendered_value'],
} }
else: else:
p['profile_data'][custom_field_id] = { p['profile_data'][custom_field_id] = {
'value': custom_field_new_value 'value': custom_field_new_value,
} }
elif event['type'] == 'realm_bot': elif event['type'] == 'realm_bot':
@ -926,7 +926,7 @@ def post_process_state(user_profile: UserProfile, ret: Dict[str, Any],
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict. # Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
ret['recent_private_conversations'] = sorted([ ret['recent_private_conversations'] = sorted([
dict( dict(
**value **value,
) for (recipient_id, value) in ret['raw_recent_private_conversations'].items() ) for (recipient_id, value) in ret['raw_recent_private_conversations'].items()
], key = lambda x: -x["max_message_id"]) ], key = lambda x: -x["max_message_id"])
del ret['raw_recent_private_conversations'] del ret['raw_recent_private_conversations']

View File

@ -463,7 +463,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona
config.custom_fetch( config.custom_fetch(
response=response, response=response,
config=config, config=config,
context=context context=context,
) )
if config.custom_tables: if config.custom_tables:
for t in config.custom_tables: for t in config.custom_tables:
@ -533,7 +533,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona
config.post_process_data( config.post_process_data(
response=response, response=response,
config=config, config=config,
context=context context=context,
) )
# Now walk our children. It's extremely important to respect # Now walk our children. It's extremely important to respect
@ -551,7 +551,7 @@ def get_realm_config() -> Config:
realm_config = Config( realm_config = Config(
table='zerver_realm', table='zerver_realm',
is_seeded=True is_seeded=True,
) )
Config( Config(
@ -593,7 +593,7 @@ def get_realm_config() -> Config:
table='zerver_client', table='zerver_client',
model=Client, model=Client,
virtual_parent=realm_config, virtual_parent=realm_config,
use_all=True use_all=True,
) )
user_profile_config = Config( user_profile_config = Config(
@ -740,7 +740,7 @@ def get_realm_config() -> Config:
id_source=('_stream_recipient', 'type_id'), id_source=('_stream_recipient', 'type_id'),
source_filter=lambda r: r['type'] == Recipient.STREAM, source_filter=lambda r: r['type'] == Recipient.STREAM,
exclude=['email_token'], exclude=['email_token'],
post_process_data=sanity_check_stream_data post_process_data=sanity_check_stream_data,
) )
# #
@ -773,7 +773,7 @@ def get_realm_config() -> Config:
'_user_subscription', '_user_subscription',
'_stream_subscription', '_stream_subscription',
'_huddle_subscription', '_huddle_subscription',
] ],
) )
return realm_config return realm_config
@ -999,7 +999,7 @@ def export_partial_message_files(realm: Realm,
# were specified as being allowed to be exported. "Them" # were specified as being allowed to be exported. "Them"
# refers to other users. # refers to other users.
user_ids_for_us = get_ids( user_ids_for_us = get_ids(
response['zerver_userprofile'] response['zerver_userprofile'],
) )
ids_of_our_possible_senders = get_ids( ids_of_our_possible_senders = get_ids(
response['zerver_userprofile'] + response['zerver_userprofile'] +
@ -1408,7 +1408,7 @@ def export_emoji_from_local(realm: Realm, local_dir: Path, output_dir: Path) ->
for realm_emoji in RealmEmoji.objects.filter(realm_id=realm.id): for realm_emoji in RealmEmoji.objects.filter(realm_id=realm.id):
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=realm.id, realm_id=realm.id,
emoji_file_name=realm_emoji.file_name emoji_file_name=realm_emoji.file_name,
) )
# Use 'mark_sanitized' to work around false positive caused by Pysa # Use 'mark_sanitized' to work around false positive caused by Pysa
@ -1490,7 +1490,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int,
response=response, response=response,
config=realm_config, config=realm_config,
seed_object=realm, seed_object=realm,
context=dict(realm=realm, exportable_user_ids=exportable_user_ids) context=dict(realm=realm, exportable_user_ids=exportable_user_ids),
) )
logging.info('...DONE with get_realm_config() data') logging.info('...DONE with get_realm_config() data')
@ -1584,7 +1584,7 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path,
os.path.join(settings.DEPLOY_ROOT, "manage.py"), os.path.join(settings.DEPLOY_ROOT, "manage.py"),
'export_usermessage_batch', 'export_usermessage_batch',
'--path', str(output_dir), '--path', str(output_dir),
'--thread', str(shard_id) '--thread', str(shard_id),
] ]
if consent_message_id is not None: if consent_message_id is not None:
arguments.extend(['--consent-message-id', str(consent_message_id)]) arguments.extend(['--consent-message-id', str(consent_message_id)])

View File

@ -94,7 +94,7 @@ def fix_unsubscribed(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'get recipients', 'get recipients',
find_recipients find_recipients,
) )
if not recipient_ids: if not recipient_ids:
@ -129,7 +129,7 @@ def fix_unsubscribed(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'finding unread messages for non-active streams', 'finding unread messages for non-active streams',
find find,
) )
if not user_message_ids: if not user_message_ids:
@ -140,7 +140,7 @@ def fix_unsubscribed(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'fixing unread messages for non-active streams', 'fixing unread messages for non-active streams',
fix fix,
) )
def fix_pre_pointer(cursor: CursorObj, user_profile: UserProfile) -> None: def fix_pre_pointer(cursor: CursorObj, user_profile: UserProfile) -> None:
@ -176,7 +176,7 @@ def fix_pre_pointer(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'find_non_muted_recipients', 'find_non_muted_recipients',
find_non_muted_recipients find_non_muted_recipients,
) )
if not recipient_ids: if not recipient_ids:
@ -218,7 +218,7 @@ def fix_pre_pointer(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'finding pre-pointer messages that are not muted', 'finding pre-pointer messages that are not muted',
find_old_ids find_old_ids,
) )
if not user_message_ids: if not user_message_ids:
@ -229,7 +229,7 @@ def fix_pre_pointer(cursor: CursorObj, user_profile: UserProfile) -> None:
get_timing( get_timing(
'fixing unread messages for pre-pointer non-muted messages', 'fixing unread messages for pre-pointer non-muted messages',
fix fix,
) )
def fix(user_profile: UserProfile) -> None: def fix(user_profile: UserProfile) -> None:

View File

@ -60,7 +60,7 @@ def get_language_list_for_templates(default_language: str) -> List[Dict[str, Dic
'name': name, 'name': name,
'code': lang['code'], 'code': lang['code'],
'percent': percent, 'percent': percent,
'selected': selected 'selected': selected,
} }
formatted_list.append(item) formatted_list.append(item)

View File

@ -653,7 +653,7 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av
relative_path = "/".join([ relative_path = "/".join([
str(record['realm_id']), str(record['realm_id']),
random_name(18), random_name(18),
sanitize_name(os.path.basename(record['path'])) sanitize_name(os.path.basename(record['path'])),
]) ])
path_maps['attachment_path'][record['s3_path']] = relative_path path_maps['attachment_path'][record['s3_path']] = relative_path
@ -1038,7 +1038,7 @@ def do_import_realm(import_dir: Path, subdomain: str, processes: int=1) -> Realm
# #
# Longer-term, the plan is to eliminate pointer as a concept. # Longer-term, the plan is to eliminate pointer as a concept.
first_unread_message = UserMessage.objects.filter(user_profile=user_profile).extra( first_unread_message = UserMessage.objects.filter(user_profile=user_profile).extra(
where=[UserMessage.where_unread()] where=[UserMessage.where_unread()],
).order_by("message_id").first() ).order_by("message_id").first()
if first_unread_message is not None: if first_unread_message is not None:
user_profile.pointer = first_unread_message.message_id user_profile.pointer = first_unread_message.message_id

View File

@ -74,7 +74,7 @@ class Integration:
if category not in CATEGORIES: if category not in CATEGORIES:
raise KeyError( # nocoverage raise KeyError( # nocoverage
'INTEGRATIONS: ' + name + ' - category \'' + 'INTEGRATIONS: ' + name + ' - category \'' +
category + '\' is not a key in CATEGORIES.' category + '\' is not a key in CATEGORIES.',
) )
self.categories = list(map((lambda c: CATEGORIES[c]), categories)) self.categories = list(map((lambda c: CATEGORIES[c]), categories))
@ -172,7 +172,7 @@ class WebhookIntegration(Integration):
display_name=display_name, display_name=display_name,
stream_name=stream_name, stream_name=stream_name,
legacy=legacy, legacy=legacy,
config_options=config_options config_options=config_options,
) )
if function is None: if function is None:
@ -242,7 +242,7 @@ class HubotIntegration(Integration):
name, name, categories, name, name, categories,
logo=logo, display_name=display_name, logo=logo, display_name=display_name,
doc = 'zerver/integrations/hubot_common.md', doc = 'zerver/integrations/hubot_common.md',
legacy=legacy legacy=legacy,
) )
class EmbeddedBotIntegration(Integration): class EmbeddedBotIntegration(Integration):
@ -273,7 +273,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
'alertmanager', 'alertmanager',
['monitoring'], ['monitoring'],
display_name='Prometheus AlertManager', display_name='Prometheus AlertManager',
logo='images/integrations/logos/prometheus.svg' logo='images/integrations/logos/prometheus.svg',
), ),
WebhookIntegration('ansibletower', ['deployment'], display_name='Ansible Tower'), WebhookIntegration('ansibletower', ['deployment'], display_name='Ansible Tower'),
WebhookIntegration('appfollow', ['customer-support'], display_name='AppFollow'), WebhookIntegration('appfollow', ['customer-support'], display_name='AppFollow'),
@ -286,14 +286,14 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
['version-control'], ['version-control'],
logo='images/integrations/logos/bitbucket.svg', logo='images/integrations/logos/bitbucket.svg',
display_name='Bitbucket Server', display_name='Bitbucket Server',
stream_name='bitbucket' stream_name='bitbucket',
), ),
WebhookIntegration( WebhookIntegration(
'bitbucket2', 'bitbucket2',
['version-control'], ['version-control'],
logo='images/integrations/logos/bitbucket.svg', logo='images/integrations/logos/bitbucket.svg',
display_name='Bitbucket', display_name='Bitbucket',
stream_name='bitbucket' stream_name='bitbucket',
), ),
WebhookIntegration( WebhookIntegration(
'bitbucket', 'bitbucket',
@ -301,7 +301,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
display_name='Bitbucket', display_name='Bitbucket',
secondary_line_text='(Enterprise)', secondary_line_text='(Enterprise)',
stream_name='commits', stream_name='commits',
legacy=True legacy=True,
), ),
WebhookIntegration('buildbot', ['continuous-integration'], display_name='Buildbot'), WebhookIntegration('buildbot', ['continuous-integration'], display_name='Buildbot'),
WebhookIntegration('circleci', ['continuous-integration'], display_name='CircleCI'), WebhookIntegration('circleci', ['continuous-integration'], display_name='CircleCI'),
@ -315,7 +315,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
['customer-support'], ['customer-support'],
logo='images/integrations/logos/deskcom.png', logo='images/integrations/logos/deskcom.png',
display_name='Desk.com', display_name='Desk.com',
stream_name='desk' stream_name='desk',
), ),
WebhookIntegration('dropbox', ['productivity'], display_name='Dropbox'), WebhookIntegration('dropbox', ['productivity'], display_name='Dropbox'),
WebhookIntegration('errbit', ['monitoring'], display_name='Errbit'), WebhookIntegration('errbit', ['monitoring'], display_name='Errbit'),
@ -329,7 +329,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
display_name='GitHub', display_name='GitHub',
logo='images/integrations/logos/github.svg', logo='images/integrations/logos/github.svg',
function='zerver.webhooks.github.view.api_github_webhook', function='zerver.webhooks.github.view.api_github_webhook',
stream_name='github' stream_name='github',
), ),
WebhookIntegration('gitlab', ['version-control'], display_name='GitLab'), WebhookIntegration('gitlab', ['version-control'], display_name='GitLab'),
WebhookIntegration('gocd', ['continuous-integration'], display_name='GoCD'), WebhookIntegration('gocd', ['continuous-integration'], display_name='GoCD'),
@ -347,7 +347,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
'ifttt', 'ifttt',
['meta-integration'], ['meta-integration'],
function='zerver.webhooks.ifttt.view.api_iftt_app_webhook', function='zerver.webhooks.ifttt.view.api_iftt_app_webhook',
display_name='IFTTT' display_name='IFTTT',
), ),
WebhookIntegration('insping', ['monitoring'], display_name='Insping'), WebhookIntegration('insping', ['monitoring'], display_name='Insping'),
WebhookIntegration('intercom', ['customer-support'], display_name='Intercom'), WebhookIntegration('intercom', ['customer-support'], display_name='Intercom'),
@ -361,7 +361,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
['monitoring'], ['monitoring'],
display_name='Opbeat', display_name='Opbeat',
stream_name='opbeat', stream_name='opbeat',
function='zerver.webhooks.opbeat.view.api_opbeat_webhook' function='zerver.webhooks.opbeat.view.api_opbeat_webhook',
), ),
WebhookIntegration('opsgenie', ['meta-integration', 'monitoring']), WebhookIntegration('opsgenie', ['meta-integration', 'monitoring']),
WebhookIntegration('pagerduty', ['monitoring'], display_name='PagerDuty'), WebhookIntegration('pagerduty', ['monitoring'], display_name='PagerDuty'),
@ -394,7 +394,7 @@ WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [
'yo', 'yo',
['communication'], ['communication'],
function='zerver.webhooks.yo.view.api_yo_app_webhook', function='zerver.webhooks.yo.view.api_yo_app_webhook',
display_name='Yo App' display_name='Yo App',
), ),
WebhookIntegration('wordpress', ['marketing'], display_name='WordPress'), WebhookIntegration('wordpress', ['marketing'], display_name='WordPress'),
WebhookIntegration('zapier', ['meta-integration']), WebhookIntegration('zapier', ['meta-integration']),
@ -411,7 +411,7 @@ INTEGRATIONS: Dict[str, Integration] = {
'capistrano', 'capistrano',
['deployment'], ['deployment'],
display_name='Capistrano', display_name='Capistrano',
doc='zerver/integrations/capistrano.md' doc='zerver/integrations/capistrano.md',
), ),
'codebase': Integration('codebase', 'codebase', ['version-control'], 'codebase': Integration('codebase', 'codebase', ['version-control'],
doc='zerver/integrations/codebase.md'), doc='zerver/integrations/codebase.md'),
@ -428,7 +428,7 @@ INTEGRATIONS: Dict[str, Integration] = {
'google-calendar', 'google-calendar',
['productivity'], ['productivity'],
display_name='Google Calendar', display_name='Google Calendar',
doc='zerver/integrations/google-calendar.md' doc='zerver/integrations/google-calendar.md',
), ),
'hubot': Integration('hubot', 'hubot', ['meta-integration', 'bots'], doc='zerver/integrations/hubot.md'), 'hubot': Integration('hubot', 'hubot', ['meta-integration', 'bots'], doc='zerver/integrations/hubot.md'),
'irc': Integration('irc', 'irc', ['communication'], display_name='IRC', 'irc': Integration('irc', 'irc', ['communication'], display_name='IRC',
@ -438,7 +438,7 @@ INTEGRATIONS: Dict[str, Integration] = {
'jenkins', 'jenkins',
['continuous-integration'], ['continuous-integration'],
secondary_line_text='(or Hudson)', secondary_line_text='(or Hudson)',
doc='zerver/integrations/jenkins.md' doc='zerver/integrations/jenkins.md',
), ),
'jira-plugin': Integration( 'jira-plugin': Integration(
'jira-plugin', 'jira-plugin',
@ -449,7 +449,7 @@ INTEGRATIONS: Dict[str, Integration] = {
display_name='JIRA', display_name='JIRA',
doc='zerver/integrations/jira-plugin.md', doc='zerver/integrations/jira-plugin.md',
stream_name='jira', stream_name='jira',
legacy=True legacy=True,
), ),
'matrix': Integration('matrix', 'matrix', ['communication'], 'matrix': Integration('matrix', 'matrix', ['communication'],
doc='zerver/integrations/matrix.md'), doc='zerver/integrations/matrix.md'),
@ -490,7 +490,7 @@ INTEGRATIONS: Dict[str, Integration] = {
display_name='Trello', display_name='Trello',
doc='zerver/integrations/trello-plugin.md', doc='zerver/integrations/trello-plugin.md',
stream_name='trello', stream_name='trello',
legacy=True legacy=True,
), ),
'twitter': Integration('twitter', 'twitter', ['customer-support', 'marketing'], 'twitter': Integration('twitter', 'twitter', ['customer-support', 'marketing'],
# _ needed to get around adblock plus # _ needed to get around adblock plus

View File

@ -53,7 +53,7 @@ from zerver.models import (
Reaction, Reaction,
get_usermessage_by_message_id, get_usermessage_by_message_id,
MAX_MESSAGE_LENGTH, MAX_MESSAGE_LENGTH,
MAX_TOPIC_NAME_LENGTH MAX_TOPIC_NAME_LENGTH,
) )
from typing import Any, Dict, List, Optional, Set, Tuple, Sequence from typing import Any, Dict, List, Optional, Set, Tuple, Sequence
@ -236,7 +236,7 @@ class MessageDict:
new_obj, new_obj,
apply_markdown=apply_markdown, apply_markdown=apply_markdown,
client_gravatar=client_gravatar, client_gravatar=client_gravatar,
keep_rendered_content=keep_rendered_content keep_rendered_content=keep_rendered_content,
) )
return new_obj return new_obj
@ -532,7 +532,7 @@ class MessageDict:
( (
obj['recipient_id'], obj['recipient_id'],
obj['recipient_type'], obj['recipient_type'],
obj['recipient_type_id'] obj['recipient_type_id'],
) for obj in objs ) for obj in objs
} }
display_recipients = bulk_fetch_display_recipients(recipient_tuples) display_recipients = bulk_fetch_display_recipients(recipient_tuples)
@ -732,13 +732,13 @@ def do_render_markdown(message: Message,
sent_by_bot=sent_by_bot, sent_by_bot=sent_by_bot,
translate_emoticons=translate_emoticons, translate_emoticons=translate_emoticons,
mention_data=mention_data, mention_data=mention_data,
email_gateway=email_gateway email_gateway=email_gateway,
) )
return rendered_content return rendered_content
def huddle_users(recipient_id: int) -> str: def huddle_users(recipient_id: int) -> str:
display_recipient: DisplayRecipientT = get_display_recipient_by_id( display_recipient: DisplayRecipientT = get_display_recipient_by_id(
recipient_id, Recipient.HUDDLE, None recipient_id, Recipient.HUDDLE, None,
) )
# str is for streams. # str is for streams.
@ -814,7 +814,7 @@ def get_inactive_recipient_ids(user_profile: UserProfile) -> List[int]:
rows = get_stream_subscriptions_for_user(user_profile).filter( rows = get_stream_subscriptions_for_user(user_profile).filter(
active=False, active=False,
).values( ).values(
'recipient_id' 'recipient_id',
) )
inactive_recipient_ids = [ inactive_recipient_ids = [
row['recipient_id'] row['recipient_id']
@ -826,7 +826,7 @@ def get_muted_stream_ids(user_profile: UserProfile) -> List[int]:
active=True, active=True,
is_muted=True, is_muted=True,
).values( ).values(
'recipient__type_id' 'recipient__type_id',
) )
muted_stream_ids = [ muted_stream_ids = [
row['recipient__type_id'] row['recipient__type_id']
@ -837,9 +837,9 @@ def get_starred_message_ids(user_profile: UserProfile) -> List[int]:
return list(UserMessage.objects.filter( return list(UserMessage.objects.filter(
user_profile=user_profile, user_profile=user_profile,
).extra( ).extra(
where=[UserMessage.where_starred()] where=[UserMessage.where_starred()],
).order_by( ).order_by(
'message_id' 'message_id',
).values_list('message_id', flat=True)[0:10000]) ).values_list('message_id', flat=True)[0:10000])
def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult:
@ -847,11 +847,11 @@ def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult:
excluded_recipient_ids = get_inactive_recipient_ids(user_profile) excluded_recipient_ids = get_inactive_recipient_ids(user_profile)
user_msgs = UserMessage.objects.filter( user_msgs = UserMessage.objects.filter(
user_profile=user_profile user_profile=user_profile,
).exclude( ).exclude(
message__recipient_id__in=excluded_recipient_ids message__recipient_id__in=excluded_recipient_ids,
).extra( ).extra(
where=[UserMessage.where_unread()] where=[UserMessage.where_unread()],
).values( ).values(
'message_id', 'message_id',
'message__sender_id', 'message__sender_id',

View File

@ -32,7 +32,7 @@ def setup_realm_internal_bots(realm: Realm) -> None:
bots = UserProfile.objects.filter( bots = UserProfile.objects.filter(
realm=realm, realm=realm,
email__in=[bot_info[1] for bot_info in internal_bots], email__in=[bot_info[1] for bot_info in internal_bots],
bot_owner__isnull=True bot_owner__isnull=True,
) )
for bot in bots: for bot in bots:
bot.bot_owner = bot bot.bot_owner = bot
@ -147,7 +147,7 @@ def send_initial_realm_messages(realm: Realm) -> None:
] ]
messages = [internal_prep_stream_message_by_name( messages = [internal_prep_stream_message_by_name(
realm, welcome_bot, message['stream'], message['topic'], message['content'] realm, welcome_bot, message['stream'], message['topic'], message['content'],
) for message in welcome_messages] ) for message in welcome_messages]
message_ids = do_send_messages(messages) message_ids = do_send_messages(messages)

View File

@ -54,7 +54,7 @@ class GenericOutgoingWebhookService(OutgoingWebhookServiceInterface):
event['message'], event['message'],
apply_markdown=False, apply_markdown=False,
client_gravatar=False, client_gravatar=False,
keep_rendered_content=True keep_rendered_content=True,
) )
request_data = {"data": event['command'], request_data = {"data": event['command'],
@ -325,7 +325,7 @@ def do_rest_call(base_url: str,
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
response_message = ("An exception of type *%s* occurred for message `%s`! " response_message = ("An exception of type *%s* occurred for message `%s`! "
"See the Zulip server logs for more information." % ( "See the Zulip server logs for more information." % (
type(e).__name__, event["command"],)) type(e).__name__, event["command"]))
logging.exception(f"Outhook trigger failed:\n {e}") logging.exception(f"Outhook trigger failed:\n {e}")
fail_with_message(event, response_message) fail_with_message(event, response_message)
notify_bot_owner(event, exception=e) notify_bot_owner(event, exception=e)

View File

@ -28,7 +28,7 @@ def get_status_dicts_for_rows(all_rows: List[Dict[str, Any]],
# here prevents us from having to assume the caller is playing nice. # here prevents us from having to assume the caller is playing nice.
all_rows = sorted( all_rows = sorted(
all_rows, all_rows,
key = lambda row: (row['user_profile__id'], row['timestamp']) key = lambda row: (row['user_profile__id'], row['timestamp']),
) )
if slim_presence: if slim_presence:
@ -164,10 +164,10 @@ def get_status_dict_by_realm(realm_id: int, slim_presence: bool = False) -> Dict
presence_rows = list(query) presence_rows = list(query)
mobile_query = PushDeviceToken.objects.distinct( mobile_query = PushDeviceToken.objects.distinct(
'user_id' 'user_id',
).values_list( ).values_list(
'user_id', 'user_id',
flat=True flat=True,
) )
user_profile_ids = [presence_row['user_profile__id'] for presence_row in presence_rows] user_profile_ids = [presence_row['user_profile__id'] for presence_row in presence_rows]
@ -183,7 +183,7 @@ def get_status_dict_by_realm(realm_id: int, slim_presence: bool = False) -> Dict
mobile_query = query_for_ids( mobile_query = query_for_ids(
query=mobile_query, query=mobile_query,
user_ids=user_profile_ids, user_ids=user_profile_ids,
field='user_id' field='user_id',
) )
mobile_user_ids = set(mobile_query) mobile_user_ids = set(mobile_query)

View File

@ -50,7 +50,7 @@ class RateLimitedObject(ABC):
entity=self, entity=self,
secs_to_freedom=time, secs_to_freedom=time,
remaining=0, remaining=0,
over_limit=ratelimited over_limit=ratelimited,
)) ))
# Abort this request if the user is over their rate limits # Abort this request if the user is over their rate limits
if ratelimited: if ratelimited:
@ -95,7 +95,7 @@ class RateLimitedObject(ABC):
for "no rules". for "no rules".
""" """
rules_list = self.rules() rules_list = self.rules()
return rules_list or [(1, 9999), ] return rules_list or [(1, 9999)]
@abstractmethod @abstractmethod
def key(self) -> str: def key(self) -> str:

View File

@ -42,7 +42,7 @@ def put_dict_in_redis(redis_client: redis.StrictRedis, key_format: str,
return key return key
def get_dict_from_redis(redis_client: redis.StrictRedis, key_format: str, key: str def get_dict_from_redis(redis_client: redis.StrictRedis, key_format: str, key: str,
) -> Optional[Dict[str, Any]]: ) -> Optional[Dict[str, Any]]:
# This function requires inputting the intended key_format to validate # This function requires inputting the intended key_format to validate
# that the key fits it, as an additionally security measure. This protects # that the key fits it, as an additionally security measure. This protects

View File

@ -27,19 +27,19 @@ models_with_message_key: List[Dict[str, Any]] = [
'class': Reaction, 'class': Reaction,
'archive_class': ArchivedReaction, 'archive_class': ArchivedReaction,
'table_name': 'zerver_reaction', 'table_name': 'zerver_reaction',
'archive_table_name': 'zerver_archivedreaction' 'archive_table_name': 'zerver_archivedreaction',
}, },
{ {
'class': SubMessage, 'class': SubMessage,
'archive_class': ArchivedSubMessage, 'archive_class': ArchivedSubMessage,
'table_name': 'zerver_submessage', 'table_name': 'zerver_submessage',
'archive_table_name': 'zerver_archivedsubmessage' 'archive_table_name': 'zerver_archivedsubmessage',
}, },
{ {
'class': UserMessage, 'class': UserMessage,
'archive_class': ArchivedUserMessage, 'archive_class': ArchivedUserMessage,
'table_name': 'zerver_usermessage', 'table_name': 'zerver_usermessage',
'archive_table_name': 'zerver_archivedusermessage' 'archive_table_name': 'zerver_archivedusermessage',
}, },
] ]
@ -68,7 +68,7 @@ def move_rows(
sql_args.update(kwargs) sql_args.update(kwargs)
with connection.cursor() as cursor: with connection.cursor() as cursor:
cursor.execute( cursor.execute(
raw_query.format(**sql_args) raw_query.format(**sql_args),
) )
if returning_id: if returning_id:
return [id for (id,) in cursor.fetchall()] # return list of row ids return [id for (id,) in cursor.fetchall()] # return list of row ids
@ -153,7 +153,7 @@ def move_expired_messages_to_archive_by_recipient(recipient: Recipient,
) )
def move_expired_personal_and_huddle_messages_to_archive(realm: Realm, def move_expired_personal_and_huddle_messages_to_archive(realm: Realm,
chunk_size: int=MESSAGE_BATCH_SIZE chunk_size: int=MESSAGE_BATCH_SIZE,
) -> int: ) -> int:
# This function will archive appropriate messages and their related objects. # This function will archive appropriate messages and their related objects.
cross_realm_bot_ids = [ cross_realm_bot_ids = [
@ -324,7 +324,7 @@ def archive_stream_messages(realm: Realm, streams: List[Stream], chunk_size: int
message_count = 0 message_count = 0
for recipient in recipients: for recipient in recipients:
message_count += archive_messages_by_recipient( message_count += archive_messages_by_recipient(
recipient, retention_policy_dict[recipient.type_id], realm, chunk_size recipient, retention_policy_dict[recipient.type_id], realm, chunk_size,
) )
logger.info("Done. Archived %s messages.", message_count) logger.info("Done. Archived %s messages.", message_count)
@ -521,7 +521,7 @@ def restore_all_data_from_archive(restore_manual_transactions: bool=True) -> Non
if restore_manual_transactions: if restore_manual_transactions:
restore_data_from_archive_by_transactions( restore_data_from_archive_by_transactions(
ArchiveTransaction.objects.exclude(restored=True).filter(type=ArchiveTransaction.MANUAL) ArchiveTransaction.objects.exclude(restored=True).filter(type=ArchiveTransaction.MANUAL),
) )
def clean_archived_data() -> None: def clean_archived_data() -> None:

View File

@ -59,7 +59,7 @@ class FromAddress:
def build_email(template_prefix: str, to_user_ids: Optional[List[int]]=None, def build_email(template_prefix: str, to_user_ids: Optional[List[int]]=None,
to_emails: Optional[List[str]]=None, from_name: Optional[str]=None, to_emails: Optional[List[str]]=None, from_name: Optional[str]=None,
from_address: Optional[str]=None, reply_to_email: Optional[str]=None, from_address: Optional[str]=None, reply_to_email: Optional[str]=None,
language: Optional[str]=None, context: Optional[Dict[str, Any]]=None language: Optional[str]=None, context: Optional[Dict[str, Any]]=None,
) -> EmailMultiAlternatives: ) -> EmailMultiAlternatives:
# Callers should pass exactly one of to_user_id and to_email. # Callers should pass exactly one of to_user_id and to_email.
assert (to_user_ids is None) ^ (to_emails is None) assert (to_user_ids is None) ^ (to_emails is None)

View File

@ -228,7 +228,7 @@ def do_soft_deactivate_users(users: List[UserProfile]) -> List[UserProfile]:
realm=user.realm, realm=user.realm,
modified_user=user, modified_user=user,
event_type=RealmAuditLog.USER_SOFT_DEACTIVATED, event_type=RealmAuditLog.USER_SOFT_DEACTIVATED,
event_time=event_time event_time=event_time,
) )
realm_logs.append(log) realm_logs.append(log)
users_soft_deactivated.append(user) users_soft_deactivated.append(user)
@ -265,7 +265,7 @@ def reactivate_user_if_soft_deactivated(user_profile: UserProfile) -> Union[User
realm=user_profile.realm, realm=user_profile.realm,
modified_user=user_profile, modified_user=user_profile,
event_type=RealmAuditLog.USER_SOFT_ACTIVATED, event_type=RealmAuditLog.USER_SOFT_ACTIVATED,
event_time=timezone_now() event_time=timezone_now(),
) )
logger.info('Soft Reactivated user %s', user_profile.id) logger.info('Soft Reactivated user %s', user_profile.id)
return user_profile return user_profile
@ -309,6 +309,6 @@ def get_soft_deactivated_users_for_catch_up(filter_kwargs: Any) -> List[UserProf
long_term_idle=True, long_term_idle=True,
is_active=True, is_active=True,
is_bot=False, is_bot=False,
**filter_kwargs **filter_kwargs,
) )
return users_to_catch_up return users_to_catch_up

View File

@ -21,7 +21,7 @@ def get_active_subscriptions_for_stream_ids(stream_ids: List[int]) -> QuerySet:
return Subscription.objects.filter( return Subscription.objects.filter(
recipient__type=Recipient.STREAM, recipient__type=Recipient.STREAM,
recipient__type_id__in=stream_ids, recipient__type_id__in=stream_ids,
active=True active=True,
) )
def get_subscribed_stream_ids_for_user(user_profile: UserProfile) -> QuerySet: def get_subscribed_stream_ids_for_user(user_profile: UserProfile) -> QuerySet:

View File

@ -18,7 +18,7 @@ from django.db.models.query import QuerySet
def get_default_value_for_history_public_to_subscribers( def get_default_value_for_history_public_to_subscribers(
realm: Realm, realm: Realm,
invite_only: bool, invite_only: bool,
history_public_to_subscribers: Optional[bool] history_public_to_subscribers: Optional[bool],
) -> bool: ) -> bool:
if invite_only: if invite_only:
if history_public_to_subscribers is None: if history_public_to_subscribers is None:
@ -63,8 +63,8 @@ def create_stream_if_needed(realm: Realm,
invite_only=invite_only, invite_only=invite_only,
stream_post_policy=stream_post_policy, stream_post_policy=stream_post_policy,
history_public_to_subscribers=history_public_to_subscribers, history_public_to_subscribers=history_public_to_subscribers,
is_in_zephyr_realm=realm.is_zephyr_mirror_realm is_in_zephyr_realm=realm.is_zephyr_mirror_realm,
) ),
) )
if created: if created:
@ -95,7 +95,7 @@ def create_streams_if_needed(realm: Realm,
invite_only=stream_dict.get("invite_only", False), invite_only=stream_dict.get("invite_only", False),
stream_post_policy=stream_dict.get("stream_post_policy", Stream.STREAM_POST_POLICY_EVERYONE), stream_post_policy=stream_dict.get("stream_post_policy", Stream.STREAM_POST_POLICY_EVERYONE),
history_public_to_subscribers=stream_dict.get("history_public_to_subscribers"), history_public_to_subscribers=stream_dict.get("history_public_to_subscribers"),
stream_description=stream_dict.get("description", "") stream_description=stream_dict.get("description", ""),
) )
if created: if created:

View File

@ -34,7 +34,7 @@ from zerver.lib.actions import (
) )
from zerver.lib.streams import ( from zerver.lib.streams import (
create_stream_if_needed, create_stream_if_needed,
get_default_value_for_history_public_to_subscribers get_default_value_for_history_public_to_subscribers,
) )
from zerver.lib.stream_subscription import ( from zerver.lib.stream_subscription import (
get_stream_subscriptions_for_user, get_stream_subscriptions_for_user,
@ -61,7 +61,7 @@ from zerver.models import (
Stream, Stream,
Subscription, Subscription,
UserProfile, UserProfile,
get_realm_stream get_realm_stream,
) )
from zilencer.models import get_remote_server_by_uuid from zilencer.models import get_remote_server_by_uuid
from zerver.decorator import do_two_factor_login from zerver.decorator import do_two_factor_login
@ -251,7 +251,7 @@ class ZulipTestCase(TestCase):
webhook_bot='webhook-bot@zulip.com', webhook_bot='webhook-bot@zulip.com',
welcome_bot='welcome-bot@zulip.com', welcome_bot='welcome-bot@zulip.com',
outgoing_webhook_bot='outgoing-webhook@zulip.com', outgoing_webhook_bot='outgoing-webhook@zulip.com',
default_bot='default-bot@zulip.com' default_bot='default-bot@zulip.com',
) )
mit_user_map = dict( mit_user_map = dict(
@ -262,7 +262,7 @@ class ZulipTestCase(TestCase):
lear_user_map = dict( lear_user_map = dict(
cordelia="cordelia@zulip.com", cordelia="cordelia@zulip.com",
king="king@lear.org" king="king@lear.org",
) )
# Non-registered test users # Non-registered test users
@ -367,7 +367,7 @@ class ZulipTestCase(TestCase):
username=email, username=email,
password=password, password=password,
realm=realm, realm=realm,
) ),
) )
def assert_login_failure(self, def assert_login_failure(self,
@ -379,7 +379,7 @@ class ZulipTestCase(TestCase):
username=email, username=email,
password=password, password=password,
realm=realm, realm=realm,
) ),
) )
def login_user(self, user_profile: UserProfile) -> None: def login_user(self, user_profile: UserProfile) -> None:
@ -530,7 +530,7 @@ class ZulipTestCase(TestCase):
return check_send_message( return check_send_message(
from_user, sending_client, 'private', recipient_list, None, from_user, sending_client, 'private', recipient_list, None,
content content,
) )
def send_huddle_message(self, def send_huddle_message(self,
@ -545,7 +545,7 @@ class ZulipTestCase(TestCase):
return check_send_message( return check_send_message(
from_user, sending_client, 'private', to_user_ids, None, from_user, sending_client, 'private', to_user_ids, None,
content content,
) )
def send_stream_message(self, sender: UserProfile, stream_name: str, content: str="test content", def send_stream_message(self, sender: UserProfile, stream_name: str, content: str="test content",
@ -663,14 +663,14 @@ class ZulipTestCase(TestCase):
def webhook_fixture_data(self, type: str, action: str, file_type: str='json') -> str: def webhook_fixture_data(self, type: str, action: str, file_type: str='json') -> str:
fn = os.path.join( fn = os.path.join(
os.path.dirname(__file__), os.path.dirname(__file__),
f"../webhooks/{type}/fixtures/{action}.{file_type}" f"../webhooks/{type}/fixtures/{action}.{file_type}",
) )
return open(fn).read() return open(fn).read()
def fixture_file_name(self, file_name: str, type: str='') -> str: def fixture_file_name(self, file_name: str, type: str='') -> str:
return os.path.join( return os.path.join(
os.path.dirname(__file__), os.path.dirname(__file__),
f"../tests/fixtures/{type}/{file_name}" f"../tests/fixtures/{type}/{file_name}",
) )
def fixture_data(self, file_name: str, type: str='') -> str: def fixture_data(self, file_name: str, type: str='') -> str:
@ -840,7 +840,7 @@ class ZulipTestCase(TestCase):
for dn, attrs in directory.items(): for dn, attrs in directory.items():
if 'uid' in attrs: if 'uid' in attrs:
# Generate a password for the ldap account: # Generate a password for the ldap account:
attrs['userPassword'] = [self.ldap_password(attrs['uid'][0]), ] attrs['userPassword'] = [self.ldap_password(attrs['uid'][0])]
# Load binary attributes. If in "directory", an attribute as its value # Load binary attributes. If in "directory", an attribute as its value
# has a string starting with "file:", the rest of the string is assumed # has a string starting with "file:", the rest of the string is assumed
@ -849,7 +849,7 @@ class ZulipTestCase(TestCase):
for attr, value in attrs.items(): for attr, value in attrs.items():
if isinstance(value, str) and value.startswith("file:"): if isinstance(value, str) and value.startswith("file:"):
with open(value[5:], 'rb') as f: with open(value[5:], 'rb') as f:
attrs[attr] = [f.read(), ] attrs[attr] = [f.read()]
ldap_patcher = mock.patch('django_auth_ldap.config.ldap.initialize') ldap_patcher = mock.patch('django_auth_ldap.config.ldap.initialize')
self.mock_initialize = ldap_patcher.start() self.mock_initialize = ldap_patcher.start()
@ -873,7 +873,7 @@ class ZulipTestCase(TestCase):
else: else:
data = attr_value data = attr_value
self.mock_ldap.directory[dn][attr_name] = [data, ] self.mock_ldap.directory[dn][attr_name] = [data]
def ldap_username(self, username: str) -> str: def ldap_username(self, username: str) -> str:
""" """

View File

@ -69,7 +69,7 @@ class Database:
self.migration_status_file = 'migration_status_' + platform self.migration_status_file = 'migration_status_' + platform
self.migration_status_path = os.path.join( self.migration_status_path = os.path.join(
UUID_VAR_DIR, UUID_VAR_DIR,
self.migration_status_file self.migration_status_file,
) )
self.migration_digest_file = "migrations_hash_" + database_name self.migration_digest_file = "migrations_hash_" + database_name
@ -290,7 +290,7 @@ def get_migration_status(**options: Any) -> str:
app_label = options['app_label'] if options.get('app_label') else None app_label = options['app_label'] if options.get('app_label') else None
db = options.get('database', DEFAULT_DB_ALIAS) db = options.get('database', DEFAULT_DB_ALIAS)
out = StringIO() out = StringIO()
command_args = ['--list', ] command_args = ['--list']
if app_label: if app_label:
command_args.append(app_label) command_args.append(app_label)

View File

@ -1,7 +1,7 @@
from contextlib import contextmanager from contextlib import contextmanager
from typing import ( from typing import (
Any, Callable, Dict, Generator, Iterable, Iterator, List, Mapping, Any, Callable, Dict, Generator, Iterable, Iterator, List, Mapping,
Optional, Tuple, Union, IO, TypeVar, TYPE_CHECKING Optional, Tuple, Union, IO, TypeVar, TYPE_CHECKING,
) )
from django.urls import URLResolver from django.urls import URLResolver
@ -555,7 +555,7 @@ def use_db_models(method: Callable[..., None]) -> Callable[..., None]: # nocove
UserHotspot=UserHotspot, UserHotspot=UserHotspot,
UserMessage=UserMessage, UserMessage=UserMessage,
UserPresence=UserPresence, UserPresence=UserPresence,
UserProfile=UserProfile UserProfile=UserProfile,
) )
zerver_test_helpers_patch = mock.patch.multiple( zerver_test_helpers_patch = mock.patch.multiple(
'zerver.lib.test_helpers', 'zerver.lib.test_helpers',

View File

@ -21,7 +21,7 @@ from zerver.lib.rate_limiter import bounce_redis_key_prefix_for_testing
from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection from zerver.lib.sqlalchemy_utils import get_sqlalchemy_connection
from zerver.lib.test_helpers import ( from zerver.lib.test_helpers import (
write_instrumentation_reports, write_instrumentation_reports,
append_instrumentation_data append_instrumentation_data,
) )
import os import os

View File

@ -12,7 +12,7 @@ ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__f
sys.path.append(ZULIP_PATH) sys.path.append(ZULIP_PATH)
from zthumbor.loaders.helpers import ( from zthumbor.loaders.helpers import (
THUMBOR_S3_TYPE, THUMBOR_LOCAL_FILE_TYPE, THUMBOR_EXTERNAL_TYPE THUMBOR_S3_TYPE, THUMBOR_LOCAL_FILE_TYPE, THUMBOR_EXTERNAL_TYPE,
) )
from zerver.lib.camo import get_camo_url from zerver.lib.camo import get_camo_url
@ -63,7 +63,7 @@ def generate_thumbnail_url(path: str,
height=height, height=height,
smart=smart_crop_enabled, smart=smart_crop_enabled,
filters=apply_filters, filters=apply_filters,
image_url=image_url image_url=image_url,
) )
if settings.THUMBOR_URL == 'http://127.0.0.1:9995': if settings.THUMBOR_URL == 'http://127.0.0.1:9995':

View File

@ -165,7 +165,7 @@ def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dic
for canonical_topic, (max_message_id, topic_name) in canonical_topic_names.items(): for canonical_topic, (max_message_id, topic_name) in canonical_topic_names.items():
history.append(dict( history.append(dict(
name=topic_name, name=topic_name,
max_id=max_message_id) max_id=max_message_id),
) )
return sorted(history, key=lambda x: -x['max_id']) return sorted(history, key=lambda x: -x['max_id'])

View File

@ -8,14 +8,14 @@ from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import ( from zerver.models import (
get_stream, get_stream,
MutedTopic, MutedTopic,
UserProfile UserProfile,
) )
from sqlalchemy.sql import ( from sqlalchemy.sql import (
and_, and_,
column, column,
not_, not_,
or_, or_,
Selectable Selectable,
) )
from django.utils.timezone import now as timezone_now from django.utils.timezone import now as timezone_now
@ -26,7 +26,7 @@ def get_topic_mutes(user_profile: UserProfile) -> List[List[Union[str, float]]]:
).values( ).values(
'stream__name', 'stream__name',
'topic_name', 'topic_name',
'date_muted' 'date_muted',
) )
return [ return [
[row['stream__name'], row['topic_name'], datetime_to_timestamp(row['date_muted'])] [row['stream__name'], row['topic_name'], datetime_to_timestamp(row['date_muted'])]
@ -73,7 +73,7 @@ def remove_topic_mute(user_profile: UserProfile, stream_id: int, topic_name: str
row = MutedTopic.objects.get( row = MutedTopic.objects.get(
user_profile=user_profile, user_profile=user_profile,
stream_id=stream_id, stream_id=stream_id,
topic_name__iexact=topic_name topic_name__iexact=topic_name,
) )
row.delete() row.delete()
@ -99,7 +99,7 @@ def exclude_topic_mutes(conditions: List[Selectable],
query = query.values( query = query.values(
'recipient_id', 'recipient_id',
'topic_name' 'topic_name',
) )
rows = list(query) rows = list(query)
@ -121,7 +121,7 @@ def build_topic_mute_checker(user_profile: UserProfile) -> Callable[[int, str],
user_profile=user_profile, user_profile=user_profile,
).values( ).values(
'recipient_id', 'recipient_id',
'topic_name' 'topic_name',
) )
rows = list(rows) rows = list(rows)

View File

@ -69,7 +69,7 @@ def transfer_emoji_to_s3(processes: int) -> None:
return 0 # nocoverage return 0 # nocoverage
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=realm_emoji.realm.id, realm_id=realm_emoji.realm.id,
emoji_file_name=realm_emoji.file_name emoji_file_name=realm_emoji.file_name,
) )
emoji_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path) + ".original" emoji_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", emoji_path) + ".original"
try: try:

View File

@ -292,7 +292,7 @@ def upload_image_to_s3(
key = bucket.Object(file_name) key = bucket.Object(file_name)
metadata = { metadata = {
"user_profile_id": str(user_profile.id), "user_profile_id": str(user_profile.id),
"realm_id": str(user_profile.realm_id) "realm_id": str(user_profile.realm_id),
} }
content_disposition = '' content_disposition = ''
@ -378,7 +378,7 @@ class S3UploadBackend(ZulipUploadBackend):
s3_file_name = "/".join([ s3_file_name = "/".join([
str(target_realm.id), str(target_realm.id),
random_name(18), random_name(18),
sanitize_name(uploaded_file_name) sanitize_name(uploaded_file_name),
]) ])
url = f"/user_uploads/{s3_file_name}" url = f"/user_uploads/{s3_file_name}"
@ -387,7 +387,7 @@ class S3UploadBackend(ZulipUploadBackend):
s3_file_name, s3_file_name,
content_type, content_type,
user_profile, user_profile,
file_data file_data,
) )
create_attachment(uploaded_file_name, s3_file_name, user_profile, uploaded_file_size) create_attachment(uploaded_file_name, s3_file_name, user_profile, uploaded_file_size)
@ -415,7 +415,7 @@ class S3UploadBackend(ZulipUploadBackend):
s3_file_name + "-medium.png", s3_file_name + "-medium.png",
"image/png", "image/png",
target_user_profile, target_user_profile,
resized_medium resized_medium,
) )
resized_data = resize_avatar(image_data) resized_data = resize_avatar(image_data)
@ -565,7 +565,7 @@ class S3UploadBackend(ZulipUploadBackend):
s3_file_name + "-medium.png", s3_file_name + "-medium.png",
"image/png", "image/png",
user_profile, user_profile,
resized_medium resized_medium,
) )
def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage def ensure_basic_avatar_image(self, user_profile: UserProfile) -> None: # nocoverage
@ -585,7 +585,7 @@ class S3UploadBackend(ZulipUploadBackend):
s3_file_name, s3_file_name,
"image/png", "image/png",
user_profile, user_profile,
resized_avatar resized_avatar,
) )
def upload_emoji_image(self, emoji_file: File, emoji_file_name: str, def upload_emoji_image(self, emoji_file: File, emoji_file_name: str,
@ -594,7 +594,7 @@ class S3UploadBackend(ZulipUploadBackend):
bucket_name = settings.S3_AVATAR_BUCKET bucket_name = settings.S3_AVATAR_BUCKET
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id=user_profile.realm_id, realm_id=user_profile.realm_id,
emoji_file_name=emoji_file_name emoji_file_name=emoji_file_name,
) )
image_data = emoji_file.read() image_data = emoji_file.read()
@ -640,9 +640,9 @@ class S3UploadBackend(ZulipUploadBackend):
'get_object', 'get_object',
Params={ Params={
'Bucket': bucket.name, 'Bucket': bucket.name,
'Key': key.key 'Key': key.key,
}, },
ExpiresIn=0 ExpiresIn=0,
) )
return public_url return public_url
@ -710,7 +710,7 @@ class LocalUploadBackend(ZulipUploadBackend):
str(user_profile.realm_id), str(user_profile.realm_id),
format(random.randint(0, 255), 'x'), format(random.randint(0, 255), 'x'),
random_name(18), random_name(18),
sanitize_name(uploaded_file_name) sanitize_name(uploaded_file_name),
]) ])
write_local_file('files', path, file_data) write_local_file('files', path, file_data)
@ -832,7 +832,7 @@ class LocalUploadBackend(ZulipUploadBackend):
user_profile: UserProfile) -> None: user_profile: UserProfile) -> None:
emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format( emoji_path = RealmEmoji.PATH_ID_TEMPLATE.format(
realm_id= user_profile.realm_id, realm_id= user_profile.realm_id,
emoji_file_name=emoji_file_name emoji_file_name=emoji_file_name,
) )
image_data = emoji_file.read() image_data = emoji_file.read()

View File

@ -13,7 +13,7 @@ def get_user_info_dict(realm_id: int) -> Dict[int, Dict[str, Any]]:
user_profile__is_active=True, user_profile__is_active=True,
).exclude( ).exclude(
Q(status=UserStatus.NORMAL) & Q(status=UserStatus.NORMAL) &
Q(status_text='') Q(status_text=''),
).values( ).values(
'user_profile_id', 'user_profile_id',
'status', 'status',

View File

@ -181,7 +181,7 @@ def user_ids_to_users(user_ids: List[int], realm: Realm) -> List[UserProfile]:
user_profiles_by_id: Dict[int, UserProfile] = generic_bulk_cached_fetch( user_profiles_by_id: Dict[int, UserProfile] = generic_bulk_cached_fetch(
cache_key_function=user_profile_by_id_cache_key, cache_key_function=user_profile_by_id_cache_key,
query_function=fetch_users_by_id, query_function=fetch_users_by_id,
object_ids=user_ids object_ids=user_ids,
) )
found_user_ids = user_profiles_by_id.keys() found_user_ids = user_profiles_by_id.keys()
@ -307,7 +307,7 @@ def format_user_row(realm: Realm, acting_user: UserProfile, row: Dict[str, Any],
avatar_source=row['avatar_source'], avatar_source=row['avatar_source'],
avatar_version=row['avatar_version'], avatar_version=row['avatar_version'],
medium=False, medium=False,
client_gravatar=client_gravatar,) client_gravatar=client_gravatar)
is_admin = is_administrator_role(row['role']) is_admin = is_administrator_role(row['role'])
is_owner = row['role'] == UserProfile.ROLE_REALM_OWNER is_owner = row['role'] == UserProfile.ROLE_REALM_OWNER
@ -398,11 +398,11 @@ def get_custom_profile_field_values(custom_profile_field_values:
if profile_field.field.is_renderable(): if profile_field.field.is_renderable():
profiles_by_user_id[user_id][profile_field.field_id] = { profiles_by_user_id[user_id][profile_field.field_id] = {
"value": profile_field.value, "value": profile_field.value,
"rendered_value": profile_field.rendered_value "rendered_value": profile_field.rendered_value,
} }
else: else:
profiles_by_user_id[user_id][profile_field.field_id] = { profiles_by_user_id[user_id][profile_field.field_id] = {
"value": profile_field.value "value": profile_field.value,
} }
return profiles_by_user_id return profiles_by_user_id
@ -440,6 +440,6 @@ def get_raw_user_data(realm: Realm, acting_user: UserProfile, client_gravatar: b
acting_user = acting_user, acting_user = acting_user,
row=row, row=row,
client_gravatar= client_gravatar, client_gravatar= client_gravatar,
custom_profile_field_data = custom_profile_field_data custom_profile_field_data = custom_profile_field_data,
) )
return result return result

View File

@ -36,7 +36,7 @@ def notify_bot_owner_about_invalid_json(user_profile: UserProfile,
webhook_client_name: str) -> None: webhook_client_name: str) -> None:
send_rate_limited_pm_notification_to_bot_owner( send_rate_limited_pm_notification_to_bot_owner(
user_profile, user_profile.realm, user_profile, user_profile.realm,
INVALID_JSON_MESSAGE.format(webhook_name=webhook_client_name).strip() INVALID_JSON_MESSAGE.format(webhook_name=webhook_client_name).strip(),
) )
class MissingHTTPEventHeader(JsonableError): class MissingHTTPEventHeader(JsonableError):
@ -55,7 +55,7 @@ def check_send_webhook_message(
request: HttpRequest, user_profile: UserProfile, request: HttpRequest, user_profile: UserProfile,
topic: str, body: str, stream: Optional[str]=REQ(default=None), topic: str, body: str, stream: Optional[str]=REQ(default=None),
user_specified_topic: Optional[str]=REQ("topic", default=None), user_specified_topic: Optional[str]=REQ("topic", default=None),
unquote_url_parameters: Optional[bool]=False unquote_url_parameters: Optional[bool]=False,
) -> None: ) -> None:
if stream is None: if stream is None:

View File

@ -62,14 +62,14 @@ def get_push_commits_event_message(user_name: str, compare_url: Optional[str],
return PUSH_DELETE_BRANCH_MESSAGE_TEMPLATE.format( return PUSH_DELETE_BRANCH_MESSAGE_TEMPLATE.format(
user_name=user_name, user_name=user_name,
compare_url=compare_url, compare_url=compare_url,
branch_name=branch_name branch_name=branch_name,
) )
if not commits_data and not deleted: if not commits_data and not deleted:
return PUSH_LOCAL_BRANCH_WITHOUT_COMMITS_MESSAGE_TEMPLATE.format( return PUSH_LOCAL_BRANCH_WITHOUT_COMMITS_MESSAGE_TEMPLATE.format(
user_name=user_name, user_name=user_name,
compare_url=compare_url, compare_url=compare_url,
branch_name=branch_name branch_name=branch_name,
) )
pushed_message_template = PUSH_PUSHED_TEXT_WITH_URL if compare_url else PUSH_PUSHED_TEXT_WITHOUT_URL pushed_message_template = PUSH_PUSHED_TEXT_WITH_URL if compare_url else PUSH_PUSHED_TEXT_WITHOUT_URL
@ -109,7 +109,7 @@ def get_force_push_commits_event_message(user_name: str, url: str, branch_name:
user_name=user_name, user_name=user_name,
url=url, url=url,
branch_name=branch_name, branch_name=branch_name,
head=head head=head,
) )
def get_create_branch_event_message(user_name: str, url: Optional[str], branch_name: str) -> str: def get_create_branch_event_message(user_name: str, url: Optional[str], branch_name: str) -> str:
@ -172,7 +172,7 @@ def get_pull_request_event_message(user_name: str, action: str, url: str, number
if target_branch and base_branch: if target_branch and base_branch:
branch_info = PULL_REQUEST_BRANCH_INFO_TEMPLATE.format( branch_info = PULL_REQUEST_BRANCH_INFO_TEMPLATE.format(
target=target_branch, target=target_branch,
base=base_branch base=base_branch,
) )
main_message = f"{main_message} {branch_info}" main_message = f"{main_message} {branch_info}"
@ -228,7 +228,7 @@ def get_push_tag_event_message(user_name: str,
message = PUSH_TAGS_MESSAGE_TEMPLATE.format( message = PUSH_TAGS_MESSAGE_TEMPLATE.format(
user_name=user_name, user_name=user_name,
action=action, action=action,
tag=tag_part tag=tag_part,
) )
if tag_name[-1] not in string.punctuation: if tag_name[-1] not in string.punctuation:
@ -245,13 +245,13 @@ def get_commits_comment_action_message(user_name: str,
user_name=user_name, user_name=user_name,
action=action, action=action,
sha=get_short_sha(sha), sha=get_short_sha(sha),
url=commit_url url=commit_url,
) )
punctuation = ':' if message else '.' punctuation = ':' if message else '.'
content = f'{content}{punctuation}' content = f'{content}{punctuation}'
if message: if message:
content += CONTENT_MESSAGE_TEMPLATE.format( content += CONTENT_MESSAGE_TEMPLATE.format(
message=message message=message,
) )
return content return content
@ -262,16 +262,16 @@ def get_commits_content(commits_data: List[Dict[str, Any]], is_truncated: Option
commits_content += COMMIT_ROW_TEMPLATE.format( commits_content += COMMIT_ROW_TEMPLATE.format(
commit_short_sha=get_short_sha(commit['sha']), commit_short_sha=get_short_sha(commit['sha']),
commit_url=commit.get('url'), commit_url=commit.get('url'),
commit_msg=commit['message'].partition('\n')[0] commit_msg=commit['message'].partition('\n')[0],
) )
if len(commits_data) > COMMITS_LIMIT: if len(commits_data) > COMMITS_LIMIT:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format( commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number=len(commits_data) - COMMITS_LIMIT commits_number=len(commits_data) - COMMITS_LIMIT,
) )
elif is_truncated: elif is_truncated:
commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format( commits_content += COMMITS_MORE_THAN_LIMIT_TEMPLATE.format(
commits_number='' commits_number='',
).replace(' ', ' ') ).replace(' ', ' ')
return commits_content.rstrip() return commits_content.rstrip()
@ -282,7 +282,7 @@ def get_release_event_message(user_name: str, action: str,
action=action, action=action,
tagname=tagname, tagname=tagname,
release_name=release_name, release_name=release_name,
url=url url=url,
) )
return content return content
@ -298,7 +298,7 @@ def get_all_committers(commits_data: List[Dict[str, Any]]) -> List[Tuple[str, in
# Sort by commit count, breaking ties alphabetically. # Sort by commit count, breaking ties alphabetically.
committers_items: List[Tuple[str, int]] = sorted( committers_items: List[Tuple[str, int]] = sorted(
list(committers.items()), key=lambda item: (-item[1], item[0]) list(committers.items()), key=lambda item: (-item[1], item[0]),
) )
committers_values: List[int] = [c_i[1] for c_i in committers_items] committers_values: List[int] = [c_i[1] for c_i in committers_items]

Some files were not shown because too many files have changed in this diff Show More