mirror of https://github.com/zulip/zulip.git
ruff: Fix UP006 Use `list` instead of `List` for type annotation.
Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
parent
c2214b3904
commit
e08a24e47f
|
@ -2,7 +2,7 @@ import logging
|
|||
import time
|
||||
from collections import OrderedDict, defaultdict
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Callable, Dict, List, Optional, Sequence, Tuple, Type, Union
|
||||
from typing import Callable, Optional, Sequence, Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection, models
|
||||
|
@ -82,7 +82,7 @@ class CountStat:
|
|||
|
||||
|
||||
class LoggingCountStat(CountStat):
|
||||
def __init__(self, property: str, output_table: Type[BaseCount], frequency: str) -> None:
|
||||
def __init__(self, property: str, output_table: type[BaseCount], frequency: str) -> None:
|
||||
CountStat.__init__(self, property, DataCollector(output_table, None), frequency)
|
||||
|
||||
|
||||
|
@ -102,7 +102,7 @@ class DependentCountStat(CountStat):
|
|||
class DataCollector:
|
||||
def __init__(
|
||||
self,
|
||||
output_table: Type[BaseCount],
|
||||
output_table: type[BaseCount],
|
||||
pull_function: Optional[Callable[[str, datetime, datetime, Optional[Realm]], int]],
|
||||
) -> None:
|
||||
self.output_table = output_table
|
||||
|
@ -311,8 +311,8 @@ def do_increment_logging_stat(
|
|||
return
|
||||
|
||||
table = stat.data_collector.output_table
|
||||
id_args: Dict[str, Union[int, None]] = {}
|
||||
conflict_args: List[str] = []
|
||||
id_args: dict[str, Union[int, None]] = {}
|
||||
conflict_args: list[str] = []
|
||||
if table == RealmCount:
|
||||
assert isinstance(model_object_for_bucket, Realm)
|
||||
id_args = {"realm_id": model_object_for_bucket.id}
|
||||
|
@ -425,7 +425,7 @@ def do_drop_single_stat(property: str) -> None:
|
|||
|
||||
## DataCollector-level operations ##
|
||||
|
||||
QueryFn: TypeAlias = Callable[[Dict[str, Composable]], Composable]
|
||||
QueryFn: TypeAlias = Callable[[dict[str, Composable]], Composable]
|
||||
|
||||
|
||||
def do_pull_by_sql_query(
|
||||
|
@ -433,7 +433,7 @@ def do_pull_by_sql_query(
|
|||
start_time: datetime,
|
||||
end_time: datetime,
|
||||
query: QueryFn,
|
||||
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||
group_by: Optional[tuple[type[models.Model], str]],
|
||||
) -> int:
|
||||
if group_by is None:
|
||||
subgroup: Composable = SQL("NULL")
|
||||
|
@ -467,9 +467,9 @@ def do_pull_by_sql_query(
|
|||
|
||||
|
||||
def sql_data_collector(
|
||||
output_table: Type[BaseCount],
|
||||
output_table: type[BaseCount],
|
||||
query: QueryFn,
|
||||
group_by: Optional[Tuple[Type[models.Model], str]],
|
||||
group_by: Optional[tuple[type[models.Model], str]],
|
||||
) -> DataCollector:
|
||||
def pull_function(
|
||||
property: str, start_time: datetime, end_time: datetime, realm: Optional[Realm] = None
|
||||
|
@ -533,7 +533,7 @@ def do_pull_minutes_active(
|
|||
.values_list("user_profile_id", "user_profile__realm_id", "start", "end")
|
||||
)
|
||||
|
||||
seconds_active: Dict[Tuple[int, int], float] = defaultdict(float)
|
||||
seconds_active: dict[tuple[int, int], float] = defaultdict(float)
|
||||
for user_id, realm_id, interval_start, interval_end in user_activity_intervals:
|
||||
if realm is None or realm.id == realm_id:
|
||||
start = max(start_time, interval_start)
|
||||
|
@ -817,7 +817,7 @@ count_stream_by_realm_query = lambda kwargs: SQL(
|
|||
).format(**kwargs)
|
||||
|
||||
|
||||
def get_count_stats(realm: Optional[Realm] = None) -> Dict[str, CountStat]:
|
||||
def get_count_stats(realm: Optional[Realm] = None) -> dict[str, CountStat]:
|
||||
## CountStat declarations ##
|
||||
|
||||
count_stats_ = [
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
from math import sqrt
|
||||
from random import Random
|
||||
from typing import List
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
|
||||
|
@ -16,7 +15,7 @@ def generate_time_series_data(
|
|||
frequency: str = CountStat.DAY,
|
||||
partial_sum: bool = False,
|
||||
random_seed: int = 26,
|
||||
) -> List[int]:
|
||||
) -> list[int]:
|
||||
"""
|
||||
Generate semi-realistic looking time series data for testing analytics graphs.
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timedelta
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from analytics.lib.counts import CountStat
|
||||
from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
||||
|
@ -11,7 +11,7 @@ from zerver.lib.timestamp import floor_to_day, floor_to_hour, verify_UTC
|
|||
# and time_range(Sep 20, Sep 22, day, 5) returns [Sep 18, Sep 19, Sep 20, Sep 21, Sep 22]
|
||||
def time_range(
|
||||
start: datetime, end: datetime, frequency: str, min_length: Optional[int]
|
||||
) -> List[datetime]:
|
||||
) -> list[datetime]:
|
||||
verify_UTC(start)
|
||||
verify_UTC(end)
|
||||
if frequency == CountStat.HOUR:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import timedelta
|
||||
from typing import Any, Dict, List, Mapping, Type, Union
|
||||
from typing import Any, Mapping, Union
|
||||
|
||||
from django.core.files.uploadedfile import UploadedFile
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
@ -53,7 +53,7 @@ class Command(ZulipBaseCommand):
|
|||
spikiness: float,
|
||||
holiday_rate: float = 0,
|
||||
partial_sum: bool = False,
|
||||
) -> List[int]:
|
||||
) -> list[int]:
|
||||
self.random_seed += 1
|
||||
return generate_time_series_data(
|
||||
days=self.DAYS_OF_DATA,
|
||||
|
@ -147,18 +147,18 @@ class Command(ZulipBaseCommand):
|
|||
with open(IMAGE_FILE_PATH, "rb") as fp:
|
||||
upload_message_attachment_from_request(UploadedFile(fp), shylock)
|
||||
|
||||
FixtureData: TypeAlias = Mapping[Union[str, int, None], List[int]]
|
||||
FixtureData: TypeAlias = Mapping[Union[str, int, None], list[int]]
|
||||
|
||||
def insert_fixture_data(
|
||||
stat: CountStat,
|
||||
fixture_data: FixtureData,
|
||||
table: Type[BaseCount],
|
||||
table: type[BaseCount],
|
||||
) -> None:
|
||||
end_times = time_range(
|
||||
last_end_time, last_end_time, stat.frequency, len(next(iter(fixture_data.values())))
|
||||
)
|
||||
if table == InstallationCount:
|
||||
id_args: Dict[str, Any] = {}
|
||||
id_args: dict[str, Any] = {}
|
||||
if table == RealmCount:
|
||||
id_args = {"realm": realm}
|
||||
if table == UserCount:
|
||||
|
@ -330,7 +330,7 @@ class Command(ZulipBaseCommand):
|
|||
"true": self.generate_fixture_data(stat, 20, 2, 3, 0.2, 3),
|
||||
}
|
||||
insert_fixture_data(stat, realm_data, RealmCount)
|
||||
stream_data: Mapping[Union[int, str, None], List[int]] = {
|
||||
stream_data: Mapping[Union[int, str, None], list[int]] = {
|
||||
"false": self.generate_fixture_data(stat, 10, 7, 5, 0.6, 4),
|
||||
"true": self.generate_fixture_data(stat, 5, 3, 2, 0.4, 2),
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ import hashlib
|
|||
import time
|
||||
from argparse import ArgumentParser
|
||||
from datetime import timezone
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.dateparse import parse_datetime
|
||||
|
@ -43,7 +43,7 @@ class Command(ZulipBaseCommand):
|
|||
def handle(self, *args: Any, **options: Any) -> None:
|
||||
self.run_update_analytics_counts(options)
|
||||
|
||||
def run_update_analytics_counts(self, options: Dict[str, Any]) -> None:
|
||||
def run_update_analytics_counts(self, options: dict[str, Any]) -> None:
|
||||
# installation_epoch relies on there being at least one realm; we
|
||||
# shouldn't run the analytics code if that condition isn't satisfied
|
||||
if not Realm.objects.exists():
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from contextlib import AbstractContextManager, ExitStack, contextmanager
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, Iterator, List, Optional, Tuple, Type
|
||||
from typing import Any, Iterator, Optional
|
||||
from unittest import mock
|
||||
|
||||
import time_machine
|
||||
|
@ -132,7 +132,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
kwargs[key] = kwargs.get(key, value)
|
||||
kwargs["delivery_email"] = kwargs["email"]
|
||||
with time_machine.travel(kwargs["date_joined"], tick=False):
|
||||
pass_kwargs: Dict[str, Any] = {}
|
||||
pass_kwargs: dict[str, Any] = {}
|
||||
if kwargs["is_bot"]:
|
||||
pass_kwargs["bot_type"] = UserProfile.DEFAULT_BOT
|
||||
pass_kwargs["bot_owner"] = None
|
||||
|
@ -158,7 +158,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
)
|
||||
return user
|
||||
|
||||
def create_stream_with_recipient(self, **kwargs: Any) -> Tuple[Stream, Recipient]:
|
||||
def create_stream_with_recipient(self, **kwargs: Any) -> tuple[Stream, Recipient]:
|
||||
self.name_counter += 1
|
||||
defaults = {
|
||||
"name": f"stream name {self.name_counter}",
|
||||
|
@ -174,7 +174,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
stream.save(update_fields=["recipient"])
|
||||
return stream, recipient
|
||||
|
||||
def create_huddle_with_recipient(self, **kwargs: Any) -> Tuple[DirectMessageGroup, Recipient]:
|
||||
def create_huddle_with_recipient(self, **kwargs: Any) -> tuple[DirectMessageGroup, Recipient]:
|
||||
self.name_counter += 1
|
||||
defaults = {"huddle_hash": f"hash{self.name_counter}"}
|
||||
for key, value in defaults.items():
|
||||
|
@ -224,7 +224,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
# kwargs should only ever be a UserProfile or Stream.
|
||||
def assert_table_count(
|
||||
self,
|
||||
table: Type[BaseCount],
|
||||
table: type[BaseCount],
|
||||
value: int,
|
||||
property: Optional[str] = None,
|
||||
subgroup: Optional[str] = None,
|
||||
|
@ -246,7 +246,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
self.assertEqual(queryset.values_list("value", flat=True)[0], value)
|
||||
|
||||
def assertTableState(
|
||||
self, table: Type[BaseCount], arg_keys: List[str], arg_values: List[List[object]]
|
||||
self, table: type[BaseCount], arg_keys: list[str], arg_values: list[list[object]]
|
||||
) -> None:
|
||||
"""Assert that the state of a *Count table is what it should be.
|
||||
|
||||
|
@ -276,7 +276,7 @@ class AnalyticsTestCase(ZulipTestCase):
|
|||
"value": 1,
|
||||
}
|
||||
for values in arg_values:
|
||||
kwargs: Dict[str, Any] = {}
|
||||
kwargs: dict[str, Any] = {}
|
||||
for i in range(len(values)):
|
||||
kwargs[arg_keys[i]] = values[i]
|
||||
for key, value in defaults.items():
|
||||
|
@ -1619,7 +1619,7 @@ class TestLoggingCountStats(AnalyticsTestCase):
|
|||
def invite_context(
|
||||
too_many_recent_realm_invites: bool = False, failure: bool = False
|
||||
) -> Iterator[None]:
|
||||
managers: List[AbstractContextManager[Any]] = [
|
||||
managers: list[AbstractContextManager[Any]] = [
|
||||
mock.patch(
|
||||
"zerver.actions.invites.too_many_recent_realm_invites", return_value=False
|
||||
),
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from datetime import datetime, timedelta, timezone
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from django.utils.timezone import now as timezone_now
|
||||
from typing_extensions import override
|
||||
|
@ -84,11 +84,11 @@ class TestGetChartData(ZulipTestCase):
|
|||
ceiling_to_day(self.realm.date_created) + timedelta(days=i) for i in range(4)
|
||||
]
|
||||
|
||||
def data(self, i: int) -> List[int]:
|
||||
def data(self, i: int) -> list[int]:
|
||||
return [0, 0, i, 0]
|
||||
|
||||
def insert_data(
|
||||
self, stat: CountStat, realm_subgroups: List[Optional[str]], user_subgroups: List[str]
|
||||
self, stat: CountStat, realm_subgroups: list[Optional[str]], user_subgroups: list[str]
|
||||
) -> None:
|
||||
if stat.frequency == CountStat.HOUR:
|
||||
insert_time = self.end_times_hour[2]
|
||||
|
@ -605,7 +605,7 @@ class TestGetChartData(ZulipTestCase):
|
|||
|
||||
class TestGetChartDataHelpers(ZulipTestCase):
|
||||
def test_sort_by_totals(self) -> None:
|
||||
empty: List[int] = []
|
||||
empty: list[int] = []
|
||||
value_arrays = {"c": [0, 1], "a": [9], "b": [1, 1, 1], "d": empty}
|
||||
self.assertEqual(sort_by_totals(value_arrays), ["a", "b", "c", "d"])
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import List, Union
|
||||
from typing import Union
|
||||
|
||||
from django.conf import settings
|
||||
from django.conf.urls import include
|
||||
|
@ -16,7 +16,7 @@ from analytics.views.stats import (
|
|||
)
|
||||
from zerver.lib.rest import rest_path
|
||||
|
||||
i18n_urlpatterns: List[Union[URLPattern, URLResolver]] = [
|
||||
i18n_urlpatterns: list[Union[URLPattern, URLResolver]] = [
|
||||
# Server admin (user_profile.is_staff) visible stats pages
|
||||
path("stats/realm/<realm_str>/", stats_for_realm),
|
||||
path("stats/installation", stats_for_installation),
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar, Union, cast
|
||||
from typing import Any, Optional, TypeVar, Union, cast
|
||||
|
||||
from django.conf import settings
|
||||
from django.db.models import QuerySet
|
||||
|
@ -260,10 +260,10 @@ def get_chart_data(
|
|||
stream: Optional[Stream] = None,
|
||||
) -> HttpResponse:
|
||||
TableType: TypeAlias = Union[
|
||||
Type["RemoteInstallationCount"],
|
||||
Type[InstallationCount],
|
||||
Type["RemoteRealmCount"],
|
||||
Type[RealmCount],
|
||||
type["RemoteInstallationCount"],
|
||||
type[InstallationCount],
|
||||
type["RemoteRealmCount"],
|
||||
type[RealmCount],
|
||||
]
|
||||
if for_installation:
|
||||
if remote:
|
||||
|
@ -282,7 +282,7 @@ def get_chart_data(
|
|||
aggregate_table = RealmCount
|
||||
|
||||
tables: Union[
|
||||
Tuple[TableType], Tuple[TableType, Type[UserCount]], Tuple[TableType, Type[StreamCount]]
|
||||
tuple[TableType], tuple[TableType, type[UserCount]], tuple[TableType, type[StreamCount]]
|
||||
]
|
||||
|
||||
if chart_name == "number_of_humans":
|
||||
|
@ -292,7 +292,7 @@ def get_chart_data(
|
|||
COUNT_STATS["active_users_audit:is_bot:day"],
|
||||
]
|
||||
tables = (aggregate_table,)
|
||||
subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = {
|
||||
subgroup_to_label: dict[CountStat, dict[Optional[str], str]] = {
|
||||
stats[0]: {None: "_1day"},
|
||||
stats[1]: {None: "_15day"},
|
||||
stats[2]: {"false": "all_time"},
|
||||
|
@ -372,7 +372,7 @@ def get_chart_data(
|
|||
assert server is not None
|
||||
assert aggregate_table is RemoteInstallationCount or aggregate_table is RemoteRealmCount
|
||||
aggregate_table_remote = cast(
|
||||
Union[Type[RemoteInstallationCount], Type[RemoteRealmCount]], aggregate_table
|
||||
Union[type[RemoteInstallationCount], type[RemoteRealmCount]], aggregate_table
|
||||
) # https://stackoverflow.com/questions/68540528/mypy-assertions-on-the-types-of-types
|
||||
if not aggregate_table_remote.objects.filter(server=server).exists():
|
||||
raise JsonableError(
|
||||
|
@ -418,7 +418,7 @@ def get_chart_data(
|
|||
|
||||
assert len({stat.frequency for stat in stats}) == 1
|
||||
end_times = time_range(start, end, stats[0].frequency, min_length)
|
||||
data: Dict[str, Any] = {
|
||||
data: dict[str, Any] = {
|
||||
"end_times": [int(end_time.timestamp()) for end_time in end_times],
|
||||
"frequency": stats[0].frequency,
|
||||
}
|
||||
|
@ -471,7 +471,7 @@ def get_chart_data(
|
|||
return json_success(request, data=data)
|
||||
|
||||
|
||||
def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
||||
def sort_by_totals(value_arrays: dict[str, list[int]]) -> list[str]:
|
||||
totals = sorted(((sum(values), label) for label, values in value_arrays.items()), reverse=True)
|
||||
return [label for total, label in totals]
|
||||
|
||||
|
@ -482,10 +482,10 @@ def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]:
|
|||
# understanding the realm's traffic and the user's traffic. This function
|
||||
# tries to rank the clients so that taking the first N elements of the
|
||||
# sorted list has a reasonable chance of doing so.
|
||||
def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
||||
def sort_client_labels(data: dict[str, dict[str, list[int]]]) -> list[str]:
|
||||
realm_order = sort_by_totals(data["everyone"])
|
||||
user_order = sort_by_totals(data["user"])
|
||||
label_sort_values: Dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
||||
label_sort_values: dict[str, float] = {label: i for i, label in enumerate(realm_order)}
|
||||
for i, label in enumerate(user_order):
|
||||
label_sort_values[label] = min(i - 0.1, label_sort_values.get(label, i))
|
||||
return [label for label, sort_value in sorted(label_sort_values.items(), key=lambda x: x[1])]
|
||||
|
@ -494,7 +494,7 @@ def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]:
|
|||
CountT = TypeVar("CountT", bound=BaseCount)
|
||||
|
||||
|
||||
def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
def table_filtered_to_id(table: type[CountT], key_id: int) -> QuerySet[CountT]:
|
||||
if table == RealmCount:
|
||||
return table._default_manager.filter(realm_id=key_id)
|
||||
elif table == UserCount:
|
||||
|
@ -535,8 +535,8 @@ def client_label_map(name: str) -> str:
|
|||
return name
|
||||
|
||||
|
||||
def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]:
|
||||
mapped_arrays: Dict[str, List[int]] = {}
|
||||
def rewrite_client_arrays(value_arrays: dict[str, list[int]]) -> dict[str, list[int]]:
|
||||
mapped_arrays: dict[str, list[int]] = {}
|
||||
for label, array in value_arrays.items():
|
||||
mapped_label = client_label_map(label)
|
||||
if mapped_label in mapped_arrays:
|
||||
|
@ -549,18 +549,18 @@ def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[
|
|||
|
||||
def get_time_series_by_subgroup(
|
||||
stat: CountStat,
|
||||
table: Type[BaseCount],
|
||||
table: type[BaseCount],
|
||||
key_id: int,
|
||||
end_times: List[datetime],
|
||||
subgroup_to_label: Dict[Optional[str], str],
|
||||
end_times: list[datetime],
|
||||
subgroup_to_label: dict[Optional[str], str],
|
||||
include_empty_subgroups: bool,
|
||||
) -> Dict[str, List[int]]:
|
||||
) -> dict[str, list[int]]:
|
||||
queryset = (
|
||||
table_filtered_to_id(table, key_id)
|
||||
.filter(property=stat.property)
|
||||
.values_list("subgroup", "end_time", "value")
|
||||
)
|
||||
value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
value_dicts: dict[Optional[str], dict[datetime, int]] = defaultdict(lambda: defaultdict(int))
|
||||
for subgroup, end_time, value in queryset:
|
||||
value_dicts[subgroup][end_time] = value
|
||||
value_arrays = {}
|
||||
|
|
|
@ -4,7 +4,7 @@ __revision__ = "$Id: models.py 28 2009-10-22 15:03:02Z jarek.zgoda $"
|
|||
import secrets
|
||||
from base64 import b32encode
|
||||
from datetime import timedelta
|
||||
from typing import List, Mapping, Optional, Union, cast
|
||||
from typing import Mapping, Optional, Union, cast
|
||||
from urllib.parse import urljoin
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -80,7 +80,7 @@ ConfirmationObjT: TypeAlias = Union[NoZilencerConfirmationObjT, ZilencerConfirma
|
|||
|
||||
|
||||
def get_object_from_key(
|
||||
confirmation_key: str, confirmation_types: List[int], *, mark_object_used: bool
|
||||
confirmation_key: str, confirmation_types: list[int], *, mark_object_used: bool
|
||||
) -> ConfirmationObjT:
|
||||
"""Access a confirmation object from one of the provided confirmation
|
||||
types with the provided key.
|
||||
|
|
|
@ -2,7 +2,7 @@ from collections import defaultdict
|
|||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union
|
||||
from typing import Any, Callable, Optional, Sequence, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -52,7 +52,7 @@ def make_table(
|
|||
) -> str:
|
||||
if not has_row_class:
|
||||
|
||||
def fix_row(row: Any) -> Dict[str, Any]:
|
||||
def fix_row(row: Any) -> dict[str, Any]:
|
||||
return dict(cells=row, row_class=None)
|
||||
|
||||
rows = list(map(fix_row, rows))
|
||||
|
@ -68,7 +68,7 @@ def make_table(
|
|||
|
||||
|
||||
def fix_rows(
|
||||
rows: List[List[Any]],
|
||||
rows: list[list[Any]],
|
||||
i: int,
|
||||
fixup_func: Union[Callable[[str], Markup], Callable[[datetime], str], Callable[[int], int]],
|
||||
) -> None:
|
||||
|
@ -76,7 +76,7 @@ def fix_rows(
|
|||
row[i] = fixup_func(row[i])
|
||||
|
||||
|
||||
def get_query_data(query: Composable) -> List[List[Any]]:
|
||||
def get_query_data(query: Composable) -> list[list[Any]]:
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query)
|
||||
rows = cursor.fetchall()
|
||||
|
@ -85,7 +85,7 @@ def get_query_data(query: Composable) -> List[List[Any]]:
|
|||
return rows
|
||||
|
||||
|
||||
def dictfetchall(cursor: CursorWrapper) -> List[Dict[str, Any]]:
|
||||
def dictfetchall(cursor: CursorWrapper) -> list[dict[str, Any]]:
|
||||
"""Returns all rows from a cursor as a dict"""
|
||||
desc = cursor.description
|
||||
return [dict(zip((col[0] for col in desc), row)) for row in cursor.fetchall()]
|
||||
|
@ -208,7 +208,7 @@ def get_remote_activity_plan_data(
|
|||
)
|
||||
|
||||
|
||||
def get_estimated_arr_and_rate_by_realm() -> Tuple[Dict[str, int], Dict[str, str]]: # nocoverage
|
||||
def get_estimated_arr_and_rate_by_realm() -> tuple[dict[str, int], dict[str, str]]: # nocoverage
|
||||
# NOTE: Customers without a plan might still have a discount attached to them which
|
||||
# are not included in `plan_rate`.
|
||||
annual_revenue = {}
|
||||
|
@ -241,8 +241,8 @@ def get_estimated_arr_and_rate_by_realm() -> Tuple[Dict[str, int], Dict[str, str
|
|||
return annual_revenue, plan_rate
|
||||
|
||||
|
||||
def get_plan_data_by_remote_server() -> Dict[int, RemoteActivityPlanData]: # nocoverage
|
||||
remote_server_plan_data: Dict[int, RemoteActivityPlanData] = {}
|
||||
def get_plan_data_by_remote_server() -> dict[int, RemoteActivityPlanData]: # nocoverage
|
||||
remote_server_plan_data: dict[int, RemoteActivityPlanData] = {}
|
||||
plans = (
|
||||
CustomerPlan.objects.filter(
|
||||
status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD,
|
||||
|
@ -290,8 +290,8 @@ def get_plan_data_by_remote_server() -> Dict[int, RemoteActivityPlanData]: # no
|
|||
return remote_server_plan_data
|
||||
|
||||
|
||||
def get_plan_data_by_remote_realm() -> Dict[int, Dict[int, RemoteActivityPlanData]]: # nocoverage
|
||||
remote_server_plan_data_by_realm: Dict[int, Dict[int, RemoteActivityPlanData]] = {}
|
||||
def get_plan_data_by_remote_realm() -> dict[int, dict[int, RemoteActivityPlanData]]: # nocoverage
|
||||
remote_server_plan_data_by_realm: dict[int, dict[int, RemoteActivityPlanData]] = {}
|
||||
plans = (
|
||||
CustomerPlan.objects.filter(
|
||||
status__lt=CustomerPlan.LIVE_STATUS_THRESHOLD,
|
||||
|
@ -351,8 +351,8 @@ def get_plan_data_by_remote_realm() -> Dict[int, Dict[int, RemoteActivityPlanDat
|
|||
|
||||
def get_remote_realm_user_counts(
|
||||
event_time: datetime = timezone_now(),
|
||||
) -> Dict[int, RemoteCustomerUserCount]: # nocoverage
|
||||
user_counts_by_realm: Dict[int, RemoteCustomerUserCount] = {}
|
||||
) -> dict[int, RemoteCustomerUserCount]: # nocoverage
|
||||
user_counts_by_realm: dict[int, RemoteCustomerUserCount] = {}
|
||||
for log in (
|
||||
RemoteRealmAuditLog.objects.filter(
|
||||
event_type__in=RemoteRealmAuditLog.SYNCED_BILLING_EVENTS,
|
||||
|
@ -378,8 +378,8 @@ def get_remote_realm_user_counts(
|
|||
|
||||
def get_remote_server_audit_logs(
|
||||
event_time: datetime = timezone_now(),
|
||||
) -> Dict[int, List[RemoteRealmAuditLog]]:
|
||||
logs_per_server: Dict[int, List[RemoteRealmAuditLog]] = defaultdict(list)
|
||||
) -> dict[int, list[RemoteRealmAuditLog]]:
|
||||
logs_per_server: dict[int, list[RemoteRealmAuditLog]] = defaultdict(list)
|
||||
for log in (
|
||||
RemoteRealmAuditLog.objects.filter(
|
||||
event_type__in=RemoteRealmAuditLog.SYNCED_BILLING_EVENTS,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Literal, Optional, Tuple, TypedDict, Union, cast
|
||||
from typing import Literal, Optional, TypedDict, Union, cast
|
||||
|
||||
from django.http import HttpRequest
|
||||
from django.utils.timezone import now as timezone_now
|
||||
|
@ -102,7 +102,7 @@ def get_identity_dict_from_session(
|
|||
def get_remote_realm_and_user_from_session(
|
||||
request: HttpRequest,
|
||||
realm_uuid: Optional[str],
|
||||
) -> Tuple[RemoteRealm, RemoteRealmBillingUser]:
|
||||
) -> tuple[RemoteRealm, RemoteRealmBillingUser]:
|
||||
# Cannot use isinstance with TypeDicts, to make mypy know
|
||||
# which of the TypedDicts in the Union this is - so just cast it.
|
||||
identity_dict = cast(
|
||||
|
@ -151,7 +151,7 @@ def get_remote_realm_and_user_from_session(
|
|||
def get_remote_server_and_user_from_session(
|
||||
request: HttpRequest,
|
||||
server_uuid: str,
|
||||
) -> Tuple[RemoteZulipServer, Optional[RemoteServerBillingUser]]:
|
||||
) -> tuple[RemoteZulipServer, Optional[RemoteServerBillingUser]]:
|
||||
identity_dict: Optional[LegacyServerIdentityDict] = get_identity_dict_from_session(
|
||||
request, realm_uuid=None, server_uuid=server_uuid
|
||||
)
|
||||
|
|
|
@ -8,18 +8,7 @@ from datetime import datetime, timedelta, timezone
|
|||
from decimal import Decimal
|
||||
from enum import Enum
|
||||
from functools import wraps
|
||||
from typing import (
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Literal,
|
||||
Optional,
|
||||
Tuple,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
from typing import Any, Callable, Generator, Literal, Optional, TypedDict, TypeVar, Union
|
||||
from urllib.parse import urlencode, urljoin
|
||||
|
||||
import stripe
|
||||
|
@ -187,7 +176,7 @@ def get_seat_count(
|
|||
return max(non_guests, math.ceil(guests / 5))
|
||||
|
||||
|
||||
def sign_string(string: str) -> Tuple[str, str]:
|
||||
def sign_string(string: str) -> tuple[str, str]:
|
||||
salt = secrets.token_hex(32)
|
||||
signer = Signer(salt=salt)
|
||||
return signer.sign(string), salt
|
||||
|
@ -541,7 +530,7 @@ class PriceArgs(TypedDict, total=False):
|
|||
class StripeCustomerData:
|
||||
description: str
|
||||
email: str
|
||||
metadata: Dict[str, Any]
|
||||
metadata: dict[str, Any]
|
||||
|
||||
|
||||
@dataclass
|
||||
|
@ -754,7 +743,7 @@ class BillingSession(ABC):
|
|||
event_time: datetime,
|
||||
*,
|
||||
background_update: bool = False,
|
||||
extra_data: Optional[Dict[str, Any]] = None,
|
||||
extra_data: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
|
@ -764,8 +753,8 @@ class BillingSession(ABC):
|
|||
|
||||
@abstractmethod
|
||||
def update_data_for_checkout_session_and_invoice_payment(
|
||||
self, metadata: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, metadata: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
@ -956,7 +945,7 @@ class BillingSession(ABC):
|
|||
|
||||
@abstractmethod
|
||||
def update_or_create_customer(
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[Dict[str, Any]] = None
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[dict[str, Any]] = None
|
||||
) -> Customer:
|
||||
pass
|
||||
|
||||
|
@ -1013,11 +1002,11 @@ class BillingSession(ABC):
|
|||
pass
|
||||
|
||||
@abstractmethod
|
||||
def add_sponsorship_info_to_context(self, context: Dict[str, Any]) -> None:
|
||||
def add_sponsorship_info_to_context(self, context: dict[str, Any]) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_metadata_for_stripe_update_card(self) -> Dict[str, str]:
|
||||
def get_metadata_for_stripe_update_card(self) -> dict[str, str]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
@ -1142,7 +1131,7 @@ class BillingSession(ABC):
|
|||
|
||||
def create_stripe_invoice_and_charge(
|
||||
self,
|
||||
metadata: Dict[str, Any],
|
||||
metadata: dict[str, Any],
|
||||
) -> str:
|
||||
"""
|
||||
Charge customer based on `billing_modality`. If `billing_modality` is `charge_automatically`,
|
||||
|
@ -1217,7 +1206,7 @@ class BillingSession(ABC):
|
|||
self,
|
||||
manual_license_management: bool,
|
||||
tier: int,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
metadata = self.get_metadata_for_stripe_update_card()
|
||||
customer = self.update_or_create_stripe_customer()
|
||||
assert customer.stripe_customer_id is not None
|
||||
|
@ -1252,7 +1241,7 @@ class BillingSession(ABC):
|
|||
"stripe_session_id": stripe_session.id,
|
||||
}
|
||||
|
||||
def create_card_update_session(self) -> Dict[str, Any]:
|
||||
def create_card_update_session(self) -> dict[str, Any]:
|
||||
metadata = self.get_metadata_for_stripe_update_card()
|
||||
customer = self.get_customer()
|
||||
assert customer is not None and customer.stripe_customer_id is not None
|
||||
|
@ -1429,7 +1418,7 @@ class BillingSession(ABC):
|
|||
required_plan_tier_name = CustomerPlan.name_from_tier(customer.required_plan_tier)
|
||||
|
||||
fixed_price_cents = fixed_price * 100
|
||||
fixed_price_plan_params: Dict[str, Any] = {
|
||||
fixed_price_plan_params: dict[str, Any] = {
|
||||
"fixed_price": fixed_price_cents,
|
||||
"tier": customer.required_plan_tier,
|
||||
}
|
||||
|
@ -1591,7 +1580,7 @@ class BillingSession(ABC):
|
|||
]
|
||||
)
|
||||
|
||||
def write_to_audit_log_plan_property_changed(extra_data: Dict[str, Any]) -> None:
|
||||
def write_to_audit_log_plan_property_changed(extra_data: dict[str, Any]) -> None:
|
||||
extra_data["plan_id"] = plan.id
|
||||
self.write_to_audit_log(
|
||||
event_type=AuditLogEventType.CUSTOMER_PLAN_PROPERTY_CHANGED,
|
||||
|
@ -1942,7 +1931,7 @@ class BillingSession(ABC):
|
|||
current_plan_id=plan.id,
|
||||
)
|
||||
|
||||
def do_upgrade(self, upgrade_request: UpgradeRequest) -> Dict[str, Any]:
|
||||
def do_upgrade(self, upgrade_request: UpgradeRequest) -> dict[str, Any]:
|
||||
customer = self.get_customer()
|
||||
if customer is not None:
|
||||
self.ensure_current_plan_is_upgradable(customer, upgrade_request.tier)
|
||||
|
@ -1977,7 +1966,7 @@ class BillingSession(ABC):
|
|||
"annual": CustomerPlan.BILLING_SCHEDULE_ANNUAL,
|
||||
"monthly": CustomerPlan.BILLING_SCHEDULE_MONTHLY,
|
||||
}[schedule]
|
||||
data: Dict[str, Any] = {}
|
||||
data: dict[str, Any] = {}
|
||||
|
||||
is_self_hosted_billing = not isinstance(self, RealmBillingSession)
|
||||
free_trial = is_free_trial_offer_enabled(is_self_hosted_billing, upgrade_request.tier)
|
||||
|
@ -2120,7 +2109,7 @@ class BillingSession(ABC):
|
|||
@transaction.atomic
|
||||
def make_end_of_cycle_updates_if_needed(
|
||||
self, plan: CustomerPlan, event_time: datetime
|
||||
) -> Tuple[Optional[CustomerPlan], Optional[LicenseLedger]]:
|
||||
) -> tuple[Optional[CustomerPlan], Optional[LicenseLedger]]:
|
||||
last_ledger_entry = (
|
||||
LicenseLedger.objects.filter(plan=plan, event_time__lte=event_time)
|
||||
.order_by("-id")
|
||||
|
@ -2338,7 +2327,7 @@ class BillingSession(ABC):
|
|||
plan: CustomerPlan,
|
||||
last_ledger_entry: LicenseLedger,
|
||||
now: datetime,
|
||||
) -> Dict[str, Any]:
|
||||
) -> dict[str, Any]:
|
||||
is_self_hosted_billing = not isinstance(self, RealmBillingSession)
|
||||
downgrade_at_end_of_cycle = plan.status == CustomerPlan.DOWNGRADE_AT_END_OF_CYCLE
|
||||
downgrade_at_end_of_free_trial = plan.status == CustomerPlan.DOWNGRADE_AT_END_OF_FREE_TRIAL
|
||||
|
@ -2483,7 +2472,7 @@ class BillingSession(ABC):
|
|||
}
|
||||
return context
|
||||
|
||||
def get_billing_page_context(self) -> Dict[str, Any]:
|
||||
def get_billing_page_context(self) -> dict[str, Any]:
|
||||
now = timezone_now()
|
||||
|
||||
customer = self.get_customer()
|
||||
|
@ -2524,7 +2513,7 @@ class BillingSession(ABC):
|
|||
context[key] = next_plan_context[key]
|
||||
return context
|
||||
|
||||
def get_flat_discount_info(self, customer: Optional[Customer] = None) -> Tuple[int, int]:
|
||||
def get_flat_discount_info(self, customer: Optional[Customer] = None) -> tuple[int, int]:
|
||||
is_self_hosted_billing = not isinstance(self, RealmBillingSession)
|
||||
flat_discount = 0
|
||||
flat_discounted_months = 0
|
||||
|
@ -2542,7 +2531,7 @@ class BillingSession(ABC):
|
|||
|
||||
def get_initial_upgrade_context(
|
||||
self, initial_upgrade_request: InitialUpgradeRequest
|
||||
) -> Tuple[Optional[str], Optional[UpgradePageContext]]:
|
||||
) -> tuple[Optional[str], Optional[UpgradePageContext]]:
|
||||
customer = self.get_customer()
|
||||
|
||||
# Allow users to upgrade to business regardless of current sponsorship status.
|
||||
|
@ -3200,7 +3189,7 @@ class BillingSession(ABC):
|
|||
assert type_of_tier_change == PlanTierChangeType.DOWNGRADE # nocoverage
|
||||
return "" # nocoverage
|
||||
|
||||
def get_event_status(self, event_status_request: EventStatusRequest) -> Dict[str, Any]:
|
||||
def get_event_status(self, event_status_request: EventStatusRequest) -> dict[str, Any]:
|
||||
customer = self.get_customer()
|
||||
|
||||
if customer is None:
|
||||
|
@ -3261,7 +3250,7 @@ class BillingSession(ABC):
|
|||
|
||||
return sponsored_plan_name
|
||||
|
||||
def get_sponsorship_request_context(self) -> Optional[Dict[str, Any]]:
|
||||
def get_sponsorship_request_context(self) -> Optional[dict[str, Any]]:
|
||||
customer = self.get_customer()
|
||||
is_remotely_hosted = isinstance(
|
||||
self, (RemoteRealmBillingSession, RemoteServerBillingSession)
|
||||
|
@ -3271,7 +3260,7 @@ class BillingSession(ABC):
|
|||
if is_remotely_hosted:
|
||||
plan_name = "Free"
|
||||
|
||||
context: Dict[str, Any] = {
|
||||
context: dict[str, Any] = {
|
||||
"billing_base_url": self.billing_base_url,
|
||||
"is_remotely_hosted": is_remotely_hosted,
|
||||
"sponsorship_plan_name": self.get_sponsorship_plan_name(customer, is_remotely_hosted),
|
||||
|
@ -3837,7 +3826,7 @@ class RealmBillingSession(BillingSession):
|
|||
event_time: datetime,
|
||||
*,
|
||||
background_update: bool = False,
|
||||
extra_data: Optional[Dict[str, Any]] = None,
|
||||
extra_data: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
audit_log_event = self.get_audit_log_event(event_type)
|
||||
audit_log_data = {
|
||||
|
@ -3859,7 +3848,7 @@ class RealmBillingSession(BillingSession):
|
|||
# Support requests do not set any stripe billing information.
|
||||
assert self.support_session is False
|
||||
assert self.user is not None
|
||||
metadata: Dict[str, Any] = {}
|
||||
metadata: dict[str, Any] = {}
|
||||
metadata["realm_id"] = self.realm.id
|
||||
metadata["realm_str"] = self.realm.string_id
|
||||
realm_stripe_customer_data = StripeCustomerData(
|
||||
|
@ -3871,8 +3860,8 @@ class RealmBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_data_for_checkout_session_and_invoice_payment(
|
||||
self, metadata: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, metadata: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
assert self.user is not None
|
||||
updated_metadata = dict(
|
||||
user_email=self.get_email(),
|
||||
|
@ -3885,7 +3874,7 @@ class RealmBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_or_create_customer(
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[Dict[str, Any]] = None
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[dict[str, Any]] = None
|
||||
) -> Customer:
|
||||
if stripe_customer_id is not None:
|
||||
# Support requests do not set any stripe billing information.
|
||||
|
@ -3986,7 +3975,7 @@ class RealmBillingSession(BillingSession):
|
|||
return self.realm.plan_type == self.realm.PLAN_TYPE_STANDARD_FREE
|
||||
|
||||
@override
|
||||
def get_metadata_for_stripe_update_card(self) -> Dict[str, str]:
|
||||
def get_metadata_for_stripe_update_card(self) -> dict[str, str]:
|
||||
assert self.user is not None
|
||||
return {
|
||||
"type": "card_update",
|
||||
|
@ -4051,7 +4040,7 @@ class RealmBillingSession(BillingSession):
|
|||
return self.realm.name
|
||||
|
||||
@override
|
||||
def add_sponsorship_info_to_context(self, context: Dict[str, Any]) -> None:
|
||||
def add_sponsorship_info_to_context(self, context: dict[str, Any]) -> None:
|
||||
context.update(
|
||||
realm_org_type=self.realm.org_type,
|
||||
sorted_org_types=sorted(
|
||||
|
@ -4166,7 +4155,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
# possible, in that the self-hosted server will have uploaded
|
||||
# current audit log data as needed as part of logging the user
|
||||
# in.
|
||||
missing_data_context: Dict[str, Any] = {
|
||||
missing_data_context: dict[str, Any] = {
|
||||
"remote_realm_session": True,
|
||||
"supports_remote_realms": self.remote_realm.server.last_api_feature_level is not None,
|
||||
}
|
||||
|
@ -4216,7 +4205,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
event_time: datetime,
|
||||
*,
|
||||
background_update: bool = False,
|
||||
extra_data: Optional[Dict[str, Any]] = None,
|
||||
extra_data: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
# These audit logs don't use all the fields of `RemoteRealmAuditLog`:
|
||||
#
|
||||
|
@ -4250,7 +4239,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
def get_data_for_stripe_customer(self) -> StripeCustomerData:
|
||||
# Support requests do not set any stripe billing information.
|
||||
assert self.support_session is False
|
||||
metadata: Dict[str, Any] = {}
|
||||
metadata: dict[str, Any] = {}
|
||||
metadata["remote_realm_uuid"] = self.remote_realm.uuid
|
||||
metadata["remote_realm_host"] = str(self.remote_realm.host)
|
||||
realm_stripe_customer_data = StripeCustomerData(
|
||||
|
@ -4262,8 +4251,8 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_data_for_checkout_session_and_invoice_payment(
|
||||
self, metadata: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, metadata: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
assert self.remote_billing_user is not None
|
||||
updated_metadata = dict(
|
||||
remote_realm_user_id=self.remote_billing_user.id,
|
||||
|
@ -4275,7 +4264,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_or_create_customer(
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[Dict[str, Any]] = None
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[dict[str, Any]] = None
|
||||
) -> Customer:
|
||||
if stripe_customer_id is not None:
|
||||
# Support requests do not set any stripe billing information.
|
||||
|
@ -4381,7 +4370,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
return self.remote_realm.plan_type == self.remote_realm.PLAN_TYPE_COMMUNITY
|
||||
|
||||
@override
|
||||
def get_metadata_for_stripe_update_card(self) -> Dict[str, str]: # nocoverage
|
||||
def get_metadata_for_stripe_update_card(self) -> dict[str, str]: # nocoverage
|
||||
assert self.remote_billing_user is not None
|
||||
return {"type": "card_update", "remote_realm_user_id": str(self.remote_billing_user.id)}
|
||||
|
||||
|
@ -4487,7 +4476,7 @@ class RemoteRealmBillingSession(BillingSession):
|
|||
return self.remote_realm.host
|
||||
|
||||
@override
|
||||
def add_sponsorship_info_to_context(self, context: Dict[str, Any]) -> None:
|
||||
def add_sponsorship_info_to_context(self, context: dict[str, Any]) -> None:
|
||||
context.update(
|
||||
realm_org_type=self.remote_realm.org_type,
|
||||
sorted_org_types=sorted(
|
||||
|
@ -4659,7 +4648,7 @@ class RemoteServerBillingSession(BillingSession):
|
|||
event_time: datetime,
|
||||
*,
|
||||
background_update: bool = False,
|
||||
extra_data: Optional[Dict[str, Any]] = None,
|
||||
extra_data: Optional[dict[str, Any]] = None,
|
||||
) -> None:
|
||||
audit_log_event = self.get_audit_log_event(event_type)
|
||||
log_data = {
|
||||
|
@ -4687,7 +4676,7 @@ class RemoteServerBillingSession(BillingSession):
|
|||
def get_data_for_stripe_customer(self) -> StripeCustomerData:
|
||||
# Support requests do not set any stripe billing information.
|
||||
assert self.support_session is False
|
||||
metadata: Dict[str, Any] = {}
|
||||
metadata: dict[str, Any] = {}
|
||||
metadata["remote_server_uuid"] = self.remote_server.uuid
|
||||
metadata["remote_server_str"] = str(self.remote_server)
|
||||
realm_stripe_customer_data = StripeCustomerData(
|
||||
|
@ -4699,8 +4688,8 @@ class RemoteServerBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_data_for_checkout_session_and_invoice_payment(
|
||||
self, metadata: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
self, metadata: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
assert self.remote_billing_user is not None
|
||||
updated_metadata = dict(
|
||||
remote_server_user_id=self.remote_billing_user.id,
|
||||
|
@ -4712,7 +4701,7 @@ class RemoteServerBillingSession(BillingSession):
|
|||
|
||||
@override
|
||||
def update_or_create_customer(
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[Dict[str, Any]] = None
|
||||
self, stripe_customer_id: Optional[str] = None, *, defaults: Optional[dict[str, Any]] = None
|
||||
) -> Customer:
|
||||
if stripe_customer_id is not None:
|
||||
# Support requests do not set any stripe billing information.
|
||||
|
@ -4848,7 +4837,7 @@ class RemoteServerBillingSession(BillingSession):
|
|||
return self.remote_server.plan_type == self.remote_server.PLAN_TYPE_COMMUNITY
|
||||
|
||||
@override
|
||||
def get_metadata_for_stripe_update_card(self) -> Dict[str, str]: # nocoverage
|
||||
def get_metadata_for_stripe_update_card(self) -> dict[str, str]: # nocoverage
|
||||
assert self.remote_billing_user is not None
|
||||
return {"type": "card_update", "remote_server_user_id": str(self.remote_billing_user.id)}
|
||||
|
||||
|
@ -4936,7 +4925,7 @@ class RemoteServerBillingSession(BillingSession):
|
|||
return self.remote_server.hostname
|
||||
|
||||
@override
|
||||
def add_sponsorship_info_to_context(self, context: Dict[str, Any]) -> None: # nocoverage
|
||||
def add_sponsorship_info_to_context(self, context: dict[str, Any]) -> None: # nocoverage
|
||||
context.update(
|
||||
realm_org_type=self.remote_server.org_type,
|
||||
sorted_org_types=sorted(
|
||||
|
@ -5025,7 +5014,7 @@ def get_price_per_license(
|
|||
# We already have a set discounted price for the current tier.
|
||||
return price_per_license
|
||||
|
||||
price_map: Dict[int, Dict[str, int]] = {
|
||||
price_map: dict[int, dict[str, int]] = {
|
||||
CustomerPlan.TIER_CLOUD_STANDARD: {"Annual": 8000, "Monthly": 800},
|
||||
CustomerPlan.TIER_CLOUD_PLUS: {"Annual": 12000, "Monthly": 1200},
|
||||
CustomerPlan.TIER_SELF_HOSTED_BASIC: {"Annual": 4200, "Monthly": 350},
|
||||
|
@ -5047,7 +5036,7 @@ def get_price_per_license(
|
|||
|
||||
def get_price_per_license_and_discount(
|
||||
tier: int, billing_schedule: int, customer: Optional[Customer]
|
||||
) -> Tuple[int, Union[str, None]]:
|
||||
) -> tuple[int, Union[str, None]]:
|
||||
original_price_per_license = get_price_per_license(tier, billing_schedule)
|
||||
if customer is None:
|
||||
return original_price_per_license, None
|
||||
|
@ -5070,7 +5059,7 @@ def compute_plan_parameters(
|
|||
billing_cycle_anchor: Optional[datetime] = None,
|
||||
is_self_hosted_billing: bool = False,
|
||||
should_schedule_upgrade_for_legacy_remote_server: bool = False,
|
||||
) -> Tuple[datetime, datetime, datetime, int]:
|
||||
) -> tuple[datetime, datetime, datetime, int]:
|
||||
# Everything in Stripe is stored as timestamps with 1 second resolution,
|
||||
# so standardize on 1 second resolution.
|
||||
# TODO talk about leap seconds?
|
||||
|
@ -5354,7 +5343,7 @@ def downgrade_small_realms_behind_on_payments_as_needed() -> None:
|
|||
billing_session = RealmBillingSession(user=None, realm=realm)
|
||||
billing_session.downgrade_now_without_creating_additional_invoices()
|
||||
billing_session.void_all_open_invoices()
|
||||
context: Dict[str, Union[str, Realm]] = {
|
||||
context: dict[str, Union[str, Realm]] = {
|
||||
"upgrade_url": f"{realm.url}{reverse('upgrade_page')}",
|
||||
"realm": realm,
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from enum import Enum
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from typing import Any, Optional, Union
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
@ -109,7 +109,7 @@ class Event(models.Model):
|
|||
|
||||
handler_error = models.JSONField(default=None, null=True)
|
||||
|
||||
def get_event_handler_details_as_dict(self) -> Dict[str, Any]:
|
||||
def get_event_handler_details_as_dict(self) -> dict[str, Any]:
|
||||
details_dict = {}
|
||||
details_dict["status"] = {
|
||||
Event.RECEIVED: "not_started",
|
||||
|
@ -158,8 +158,8 @@ class Session(models.Model):
|
|||
Session.CARD_UPDATE_FROM_UPGRADE_PAGE: "card_update_from_upgrade_page",
|
||||
}[self.type]
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
session_dict: Dict[str, Any] = {}
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
session_dict: dict[str, Any] = {}
|
||||
|
||||
session_dict["status"] = self.get_status_as_string()
|
||||
session_dict["type"] = self.get_type_as_string()
|
||||
|
@ -216,8 +216,8 @@ class PaymentIntent(models.Model): # nocoverage
|
|||
return None # nocoverage
|
||||
return get_last_associated_event_by_type(self, event_type)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
payment_intent_dict: Dict[str, Any] = {}
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
payment_intent_dict: dict[str, Any] = {}
|
||||
payment_intent_dict["status"] = self.get_status_as_string()
|
||||
event = self.get_last_associated_event()
|
||||
if event is not None:
|
||||
|
@ -251,8 +251,8 @@ class Invoice(models.Model):
|
|||
return None # nocoverage
|
||||
return get_last_associated_event_by_type(self, event_type)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
stripe_invoice_dict: Dict[str, Any] = {}
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
stripe_invoice_dict: dict[str, Any] = {}
|
||||
stripe_invoice_dict["status"] = self.get_status_as_string()
|
||||
event = self.get_last_associated_event()
|
||||
if event is not None:
|
||||
|
|
|
@ -14,14 +14,10 @@ from typing import (
|
|||
TYPE_CHECKING,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Literal,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
|
@ -139,7 +135,7 @@ def stripe_fixture_path(
|
|||
return f"{STRIPE_FIXTURES_DIR}/{decorated_function_name}--{mocked_function_name[7:]}.{call_count}.json"
|
||||
|
||||
|
||||
def fixture_files_for_function(decorated_function: CallableT) -> List[str]: # nocoverage
|
||||
def fixture_files_for_function(decorated_function: CallableT) -> list[str]: # nocoverage
|
||||
decorated_function_name = decorated_function.__name__
|
||||
if decorated_function_name[:5] == "test_":
|
||||
decorated_function_name = decorated_function_name[5:]
|
||||
|
@ -269,7 +265,7 @@ def normalize_fixture_data(
|
|||
f'"{timestamp_field}": 1{i + 1:02}%07d'
|
||||
)
|
||||
|
||||
normalized_values: Dict[str, Dict[str, str]] = {pattern: {} for pattern in pattern_translations}
|
||||
normalized_values: dict[str, dict[str, str]] = {pattern: {} for pattern in pattern_translations}
|
||||
for fixture_file in fixture_files_for_function(decorated_function):
|
||||
with open(fixture_file) as f:
|
||||
file_content = f.read()
|
||||
|
@ -470,7 +466,7 @@ class StripeTestCase(ZulipTestCase):
|
|||
return "tok_visa_chargeDeclined"
|
||||
|
||||
def assert_details_of_valid_session_from_event_status_endpoint(
|
||||
self, stripe_session_id: str, expected_details: Dict[str, Any]
|
||||
self, stripe_session_id: str, expected_details: dict[str, Any]
|
||||
) -> None:
|
||||
json_response = self.client_billing_get(
|
||||
"/billing/event/status",
|
||||
|
@ -484,7 +480,7 @@ class StripeTestCase(ZulipTestCase):
|
|||
def assert_details_of_valid_invoice_payment_from_event_status_endpoint(
|
||||
self,
|
||||
stripe_invoice_id: str,
|
||||
expected_details: Dict[str, Any],
|
||||
expected_details: dict[str, Any],
|
||||
) -> None:
|
||||
json_response = self.client_billing_get(
|
||||
"/billing/event/status",
|
||||
|
@ -626,7 +622,7 @@ class StripeTestCase(ZulipTestCase):
|
|||
upgrade_page_response = self.client_get(upgrade_url, {}, subdomain="selfhosting")
|
||||
else:
|
||||
upgrade_page_response = self.client_get(upgrade_url, {})
|
||||
params: Dict[str, Any] = {
|
||||
params: dict[str, Any] = {
|
||||
"schedule": "annual",
|
||||
"signed_seat_count": self.get_signed_seat_count_from_response(upgrade_page_response),
|
||||
"salt": self.get_salt_from_response(upgrade_page_response),
|
||||
|
@ -4468,7 +4464,7 @@ class StripeTest(StripeTestCase):
|
|||
for invoice in invoices:
|
||||
self.assertEqual(invoice.status, "void")
|
||||
|
||||
def create_invoices(self, customer: Customer, num_invoices: int) -> List[stripe.Invoice]:
|
||||
def create_invoices(self, customer: Customer, num_invoices: int) -> list[stripe.Invoice]:
|
||||
invoices = []
|
||||
assert customer.stripe_customer_id is not None
|
||||
for _ in range(num_invoices):
|
||||
|
@ -4499,7 +4495,7 @@ class StripeTest(StripeTestCase):
|
|||
create_stripe_customer: bool,
|
||||
create_plan: bool,
|
||||
num_invoices: Optional[int] = None,
|
||||
) -> Tuple[Realm, Optional[CustomerPlan], List[stripe.Invoice]]:
|
||||
) -> tuple[Realm, Optional[CustomerPlan], list[stripe.Invoice]]:
|
||||
nonlocal test_realm_count
|
||||
test_realm_count += 1
|
||||
realm_string_id = "test-realm-" + str(test_realm_count)
|
||||
|
@ -4541,7 +4537,7 @@ class StripeTest(StripeTestCase):
|
|||
expected_invoice_count: int
|
||||
email_expected_to_be_sent: bool
|
||||
|
||||
rows: List[Row] = []
|
||||
rows: list[Row] = []
|
||||
|
||||
# no stripe customer ID (excluded from query)
|
||||
realm, _, _ = create_realm(
|
||||
|
@ -4970,11 +4966,11 @@ class RequiresBillingAccessTest(StripeTestCase):
|
|||
tested_endpoints = set()
|
||||
|
||||
def check_users_cant_access(
|
||||
users: List[UserProfile],
|
||||
users: list[UserProfile],
|
||||
error_message: str,
|
||||
url: str,
|
||||
method: str,
|
||||
data: Dict[str, Any],
|
||||
data: dict[str, Any],
|
||||
) -> None:
|
||||
tested_endpoints.add(url)
|
||||
for user in users:
|
||||
|
@ -6507,7 +6503,7 @@ class TestRemoteBillingWriteAuditLog(StripeTestCase):
|
|||
# Necessary cast or mypy doesn't understand that we can use Django's
|
||||
# model .objects. style queries on this.
|
||||
audit_log_model = cast(
|
||||
Union[Type[RemoteRealmAuditLog], Type[RemoteZulipServerAuditLog]], audit_log_class
|
||||
Union[type[RemoteRealmAuditLog], type[RemoteZulipServerAuditLog]], audit_log_class
|
||||
)
|
||||
assert isinstance(remote_user, (RemoteRealmBillingUser, RemoteServerBillingUser))
|
||||
# No acting user:
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict, Literal, Optional
|
||||
from typing import Any, Literal, Optional
|
||||
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotAllowed, HttpResponseRedirect
|
||||
from django.shortcuts import render
|
||||
|
@ -55,7 +55,7 @@ def billing_page(
|
|||
|
||||
billing_session = RealmBillingSession(user=user, realm=user.realm)
|
||||
|
||||
context: Dict[str, Any] = {
|
||||
context: dict[str, Any] = {
|
||||
"admin_access": user.has_billing_access,
|
||||
"has_active_plan": False,
|
||||
"org_name": billing_session.org_name(),
|
||||
|
@ -101,7 +101,7 @@ def remote_realm_billing_page(
|
|||
success_message: str = "",
|
||||
) -> HttpResponse:
|
||||
realm_uuid = billing_session.remote_realm.uuid
|
||||
context: Dict[str, Any] = {
|
||||
context: dict[str, Any] = {
|
||||
# We wouldn't be here if user didn't have access.
|
||||
"admin_access": billing_session.has_billing_access(),
|
||||
"has_active_plan": False,
|
||||
|
@ -161,7 +161,7 @@ def remote_server_billing_page(
|
|||
*,
|
||||
success_message: str = "",
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {
|
||||
context: dict[str, Any] = {
|
||||
# We wouldn't be here if user didn't have access.
|
||||
"admin_access": billing_session.has_billing_access(),
|
||||
"has_active_plan": False,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
from collections import defaultdict
|
||||
from typing import Dict, Optional
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
|
@ -33,7 +33,7 @@ from zerver.models.realm_audit_logs import RealmAuditLog
|
|||
from zerver.models.realms import get_org_type_display_name
|
||||
|
||||
|
||||
def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]:
|
||||
def get_realm_day_counts() -> dict[str, dict[str, Markup]]:
|
||||
# To align with UTC days, we subtract an hour from end_time to
|
||||
# get the start_time, since the hour that starts at midnight was
|
||||
# on the previous day.
|
||||
|
@ -61,7 +61,7 @@ def get_realm_day_counts() -> Dict[str, Dict[str, Markup]]:
|
|||
rows = dictfetchall(cursor)
|
||||
cursor.close()
|
||||
|
||||
counts: Dict[str, Dict[int, int]] = defaultdict(dict)
|
||||
counts: dict[str, dict[int, int]] = defaultdict(dict)
|
||||
for row in rows:
|
||||
counts[row["string_id"]][row["age"]] = row["cnt"]
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ import itertools
|
|||
import re
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import Any, Collection, Dict, Optional, Set
|
||||
from typing import Any, Collection, Optional
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse, HttpResponseNotFound
|
||||
|
@ -97,9 +97,9 @@ def get_user_activity_summary(records: Collection[UserActivity]) -> UserActivity
|
|||
|
||||
|
||||
def realm_user_summary_table(
|
||||
all_records: QuerySet[UserActivity], admin_emails: Set[str], title: str, stats_link: Markup
|
||||
all_records: QuerySet[UserActivity], admin_emails: set[str], title: str, stats_link: Markup
|
||||
) -> str:
|
||||
user_records: Dict[str, UserActivitySummary] = {}
|
||||
user_records: dict[str, UserActivitySummary] = {}
|
||||
|
||||
def by_email(record: UserActivity) -> str:
|
||||
return record.user_profile.delivery_email
|
||||
|
@ -141,7 +141,7 @@ def realm_user_summary_table(
|
|||
row = dict(cells=cells, row_class=row_class)
|
||||
rows.append(row)
|
||||
|
||||
def by_last_heard_from(row: Dict[str, Any]) -> str:
|
||||
def by_last_heard_from(row: dict[str, Any]) -> str:
|
||||
return row["cells"][4]
|
||||
|
||||
rows = sorted(rows, key=by_last_heard_from, reverse=True)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, Dict, Literal, Optional, Union, cast
|
||||
from typing import Any, Literal, Optional, Union, cast
|
||||
from urllib.parse import urlsplit, urlunsplit
|
||||
|
||||
from django.conf import settings
|
||||
|
@ -526,7 +526,7 @@ def remote_billing_legacy_server_login(
|
|||
zulip_org_key: Optional[str] = None,
|
||||
next_page: VALID_NEXT_PAGES_TYPE = None,
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {"next_page": next_page}
|
||||
context: dict[str, Any] = {"next_page": next_page}
|
||||
if zulip_org_id is None or zulip_org_key is None:
|
||||
context.update({"error_message": False})
|
||||
return render(request, "corporate/billing/legacy_server_login.html", context)
|
||||
|
|
|
@ -3,7 +3,7 @@ from contextlib import suppress
|
|||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from operator import attrgetter
|
||||
from typing import Any, Dict, Iterable, List, Optional, Union
|
||||
from typing import Any, Iterable, Optional, Union
|
||||
from urllib.parse import urlencode, urlsplit
|
||||
|
||||
from django import forms
|
||||
|
@ -215,8 +215,8 @@ def get_plan_type_string(plan_type: int) -> str:
|
|||
|
||||
|
||||
def get_confirmations(
|
||||
types: List[int], object_ids: Iterable[int], hostname: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
types: list[int], object_ids: Iterable[int], hostname: Optional[str] = None
|
||||
) -> list[dict[str, Any]]:
|
||||
lowest_datetime = timezone_now() - timedelta(days=30)
|
||||
confirmations = Confirmation.objects.filter(
|
||||
type__in=types, object_id__in=object_ids, date_sent__gte=lowest_datetime
|
||||
|
@ -265,7 +265,7 @@ class SupportSelectOption:
|
|||
value: int
|
||||
|
||||
|
||||
def get_remote_plan_tier_options() -> List[SupportSelectOption]:
|
||||
def get_remote_plan_tier_options() -> list[SupportSelectOption]:
|
||||
remote_plan_tiers = [
|
||||
SupportSelectOption("None", 0),
|
||||
SupportSelectOption(
|
||||
|
@ -280,7 +280,7 @@ def get_remote_plan_tier_options() -> List[SupportSelectOption]:
|
|||
return remote_plan_tiers
|
||||
|
||||
|
||||
def get_realm_plan_type_options() -> List[SupportSelectOption]:
|
||||
def get_realm_plan_type_options() -> list[SupportSelectOption]:
|
||||
plan_types = [
|
||||
SupportSelectOption(
|
||||
get_plan_type_string(Realm.PLAN_TYPE_SELF_HOSTED), Realm.PLAN_TYPE_SELF_HOSTED
|
||||
|
@ -297,7 +297,7 @@ def get_realm_plan_type_options() -> List[SupportSelectOption]:
|
|||
return plan_types
|
||||
|
||||
|
||||
def get_realm_plan_type_options_for_discount() -> List[SupportSelectOption]:
|
||||
def get_realm_plan_type_options_for_discount() -> list[SupportSelectOption]:
|
||||
plan_types = [
|
||||
SupportSelectOption("None", 0),
|
||||
SupportSelectOption(
|
||||
|
@ -349,7 +349,7 @@ def support(
|
|||
query: Annotated[Optional[str], ApiParamConfig("q")] = None,
|
||||
org_type: Optional[Json[NonNegativeInt]] = None,
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {}
|
||||
context: dict[str, Any] = {}
|
||||
|
||||
if "success_message" in request.session:
|
||||
context["success_message"] = request.session["success_message"]
|
||||
|
@ -499,7 +499,7 @@ def support(
|
|||
context["users"] = users
|
||||
context["realms"] = realms
|
||||
|
||||
confirmations: List[Dict[str, Any]] = []
|
||||
confirmations: list[dict[str, Any]] = []
|
||||
|
||||
preregistration_user_ids = [
|
||||
user.id for user in PreregistrationUser.objects.filter(email__in=key_words)
|
||||
|
@ -544,7 +544,7 @@ def support(
|
|||
]
|
||||
+ [user.realm for user in users]
|
||||
)
|
||||
realm_support_data: Dict[int, CloudSupportData] = {}
|
||||
realm_support_data: dict[int, CloudSupportData] = {}
|
||||
for realm in all_realms:
|
||||
billing_session = RealmBillingSession(user=None, realm=realm)
|
||||
realm_data = get_data_for_cloud_support_view(billing_session)
|
||||
|
@ -581,7 +581,7 @@ def support(
|
|||
|
||||
def get_remote_servers_for_support(
|
||||
email_to_search: Optional[str], uuid_to_search: Optional[str], hostname_to_search: Optional[str]
|
||||
) -> List["RemoteZulipServer"]:
|
||||
) -> list["RemoteZulipServer"]:
|
||||
remote_servers_query = RemoteZulipServer.objects.order_by("id")
|
||||
|
||||
if email_to_search:
|
||||
|
@ -645,7 +645,7 @@ def remote_servers_support(
|
|||
delete_fixed_price_next_plan: Json[bool] = False,
|
||||
remote_server_status: Optional[VALID_STATUS_VALUES] = None,
|
||||
) -> HttpResponse:
|
||||
context: Dict[str, Any] = {}
|
||||
context: dict[str, Any] = {}
|
||||
|
||||
if "success_message" in request.session:
|
||||
context["success_message"] = request.session["success_message"]
|
||||
|
@ -778,10 +778,10 @@ def remote_servers_support(
|
|||
uuid_to_search=uuid_to_search,
|
||||
hostname_to_search=hostname_to_search,
|
||||
)
|
||||
remote_server_to_max_monthly_messages: Dict[int, Union[int, str]] = dict()
|
||||
server_support_data: Dict[int, RemoteSupportData] = {}
|
||||
realm_support_data: Dict[int, RemoteSupportData] = {}
|
||||
remote_realms: Dict[int, List[RemoteRealm]] = {}
|
||||
remote_server_to_max_monthly_messages: dict[int, Union[int, str]] = dict()
|
||||
server_support_data: dict[int, RemoteSupportData] = {}
|
||||
realm_support_data: dict[int, RemoteSupportData] = {}
|
||||
remote_realms: dict[int, list[RemoteRealm]] = {}
|
||||
for remote_server in remote_servers:
|
||||
# Get remote realms attached to remote server
|
||||
remote_realms_for_server = list(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, List
|
||||
from typing import Any
|
||||
|
||||
from django.db.models import QuerySet
|
||||
from django.http import HttpRequest, HttpResponse
|
||||
|
@ -43,7 +43,7 @@ def get_user_activity(request: HttpRequest, user_profile_id: int) -> HttpRespons
|
|||
"Last visit (UTC)",
|
||||
]
|
||||
|
||||
def row(record: UserActivity) -> List[Any]:
|
||||
def row(record: UserActivity) -> list[Any]:
|
||||
return [
|
||||
record.query,
|
||||
record.client.name,
|
||||
|
|
|
@ -3,7 +3,7 @@ import configparser
|
|||
import os
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Optional
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.append(BASE_DIR)
|
||||
|
@ -18,7 +18,7 @@ from typing_extensions import override
|
|||
from scripts.lib.zulip_tools import assert_not_running_as_root
|
||||
|
||||
|
||||
def get_filtered_commands() -> Dict[str, str]:
|
||||
def get_filtered_commands() -> dict[str, str]:
|
||||
"""Because Zulip uses management commands in production, `manage.py
|
||||
help` is a form of documentation for users. Here we exclude from
|
||||
that documentation built-in commands that are not constructive for
|
||||
|
@ -110,7 +110,7 @@ class FilteredManagementUtility(ManagementUtility):
|
|||
return "\n".join(usage)
|
||||
|
||||
|
||||
def execute_from_command_line(argv: Optional[List[str]] = None) -> None:
|
||||
def execute_from_command_line(argv: Optional[list[str]] = None) -> None:
|
||||
"""Run a FilteredManagementUtility."""
|
||||
utility = FilteredManagementUtility(argv)
|
||||
utility.execute()
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
import contextlib
|
||||
import sys
|
||||
import time
|
||||
from typing import Any, Dict, Iterable, Optional, Sequence, Union
|
||||
from typing import Any, Iterable, Optional, Sequence, Union
|
||||
|
||||
sys.path.append("/home/zulip/deployments/current")
|
||||
from scripts.lib.setup_path import setup_path
|
||||
|
@ -47,7 +47,7 @@ class MemcachedCollector(Collector):
|
|||
name: str,
|
||||
doc: str,
|
||||
value: Union[bytes, float],
|
||||
labels: Optional[Dict[str, str]] = None,
|
||||
labels: Optional[dict[str, str]] = None,
|
||||
) -> CounterMetricFamily:
|
||||
if labels is None:
|
||||
labels = {}
|
||||
|
@ -63,7 +63,7 @@ class MemcachedCollector(Collector):
|
|||
)
|
||||
return metric
|
||||
|
||||
cache: Dict[str, Any] = settings.CACHES["default"]
|
||||
cache: dict[str, Any] = settings.CACHES["default"]
|
||||
client = None
|
||||
with contextlib.suppress(Exception):
|
||||
client = bmemcached.Client((cache["LOCATION"],), **cache["OPTIONS"])
|
||||
|
@ -73,7 +73,7 @@ class MemcachedCollector(Collector):
|
|||
return
|
||||
|
||||
raw_stats = client.stats()
|
||||
stats: Dict[str, bytes] = next(iter(raw_stats.values()))
|
||||
stats: dict[str, bytes] = next(iter(raw_stats.values()))
|
||||
|
||||
version_gauge = gauge(
|
||||
"version", "The version of this memcached server.", labels=["version"]
|
||||
|
|
|
@ -12,11 +12,11 @@ mirrors when they receive the messages sent every minute by
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, NoReturn
|
||||
from typing import NoReturn
|
||||
|
||||
RESULTS_DIR: str = "/home/zulip/mirror_status"
|
||||
|
||||
states: Dict[str, int] = {
|
||||
states: dict[str, int] = {
|
||||
"OK": 0,
|
||||
"WARNING": 1,
|
||||
"CRITICAL": 2,
|
||||
|
|
|
@ -13,11 +13,11 @@ See puppet/kandra/files/cron.d/zephyr-mirror for the crontab details.
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
from typing import Dict, NoReturn
|
||||
from typing import NoReturn
|
||||
|
||||
RESULTS_FILE = "/var/lib/nagios_state/check-mirroring-results"
|
||||
|
||||
states: Dict[str, int] = {
|
||||
states: dict[str, int] = {
|
||||
"OK": 0,
|
||||
"WARNING": 1,
|
||||
"CRITICAL": 2,
|
||||
|
|
|
@ -6,10 +6,9 @@ file output by the cron job is correct.
|
|||
|
||||
import sys
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
|
||||
def nagios_from_file(results_file: str, max_time_diff: int = 60 * 2) -> Tuple[int, str]:
|
||||
def nagios_from_file(results_file: str, max_time_diff: int = 60 * 2) -> tuple[int, str]:
|
||||
"""Returns a nagios-appropriate string and return code obtained by
|
||||
parsing the desired file on disk. The file on disk should be of format
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import random
|
|||
import sys
|
||||
import time
|
||||
import traceback
|
||||
from typing import Any, Dict, List, Literal, NoReturn, Optional
|
||||
from typing import Any, Literal, NoReturn, Optional
|
||||
|
||||
sys.path.append(".")
|
||||
sys.path.append("/home/zulip/deployments/current")
|
||||
|
@ -58,13 +58,13 @@ def report(
|
|||
sys.exit(atomic_nagios_write("check_send_receive_state", state, msg))
|
||||
|
||||
|
||||
def send_zulip(sender: zulip.Client, message: Dict[str, Any]) -> None:
|
||||
def send_zulip(sender: zulip.Client, message: dict[str, Any]) -> None:
|
||||
result = sender.send_message(message)
|
||||
if result["result"] != "success":
|
||||
report("critical", msg=f"Error sending Zulip, args were: {message}, {result}")
|
||||
|
||||
|
||||
def get_zulips() -> List[Dict[str, Any]]:
|
||||
def get_zulips() -> list[dict[str, Any]]:
|
||||
global last_event_id
|
||||
res = zulip_recipient.get_events(queue_id=queue_id, last_event_id=last_event_id)
|
||||
if "error" in res.get("result", {}):
|
||||
|
@ -128,7 +128,7 @@ send_zulip(
|
|||
},
|
||||
)
|
||||
|
||||
msg_content: List[str] = []
|
||||
msg_content: list[str] = []
|
||||
|
||||
while msg_to_send not in msg_content:
|
||||
messages = get_zulips()
|
||||
|
|
|
@ -11,7 +11,6 @@ import configparser
|
|||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
def get_config(
|
||||
|
@ -44,7 +43,7 @@ def report(state: str, msg: str) -> None:
|
|||
MAXSTATE = max(MAXSTATE, states[state])
|
||||
|
||||
|
||||
def run_sql_query(query: str) -> List[List[str]]:
|
||||
def run_sql_query(query: str) -> list[list[str]]:
|
||||
command = [
|
||||
"psql",
|
||||
"-t", # Omit header line
|
||||
|
@ -130,7 +129,7 @@ else:
|
|||
report("CRITICAL", f"replica {client_addr} is in state {state}, not streaming")
|
||||
|
||||
sent_offset = loc_to_abs_offset(sent_lsn)
|
||||
lag: Dict[str, int] = {}
|
||||
lag: dict[str, int] = {}
|
||||
lag["write"] = sent_offset - loc_to_abs_offset(write_lsn)
|
||||
lag["flush"] = sent_offset - loc_to_abs_offset(flush_lsn)
|
||||
lag["replay"] = sent_offset - loc_to_abs_offset(replay_lsn)
|
||||
|
|
|
@ -9,7 +9,7 @@ import sys
|
|||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from http.server import BaseHTTPRequestHandler, HTTPServer
|
||||
from typing import Dict, List, Mapping, Optional, Protocol
|
||||
from typing import Mapping, Optional, Protocol
|
||||
from urllib.parse import parse_qs, urlsplit
|
||||
|
||||
|
||||
|
@ -21,8 +21,8 @@ class GaugeMetric(Protocol):
|
|||
class WalGPrometheusServer(BaseHTTPRequestHandler):
|
||||
METRIC_PREFIX = "wal_g_backup_"
|
||||
|
||||
metrics: Dict[str, List[str]] = {}
|
||||
metric_values: Dict[str, Dict[str, str]] = defaultdict(dict)
|
||||
metrics: dict[str, list[str]] = {}
|
||||
metric_values: dict[str, dict[str, str]] = defaultdict(dict)
|
||||
|
||||
server_version = "wal-g-prometheus-server/1.0"
|
||||
|
||||
|
@ -124,7 +124,7 @@ class WalGPrometheusServer(BaseHTTPRequestHandler):
|
|||
backup_latest_compressed_size_bytes(latest["compressed_size"], labels)
|
||||
backup_latest_uncompressed_size_bytes(latest["uncompressed_size"], labels)
|
||||
|
||||
def t(key: str, e: Dict[str, str] = latest) -> datetime:
|
||||
def t(key: str, e: dict[str, str] = latest) -> datetime:
|
||||
return datetime.strptime(e[key], e["date_fmt"]).replace(tzinfo=timezone.utc)
|
||||
|
||||
backup_earliest_age_seconds(
|
||||
|
|
|
@ -5,7 +5,7 @@ import subprocess
|
|||
import sys
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Any, DefaultDict, Dict, List
|
||||
from typing import Any
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
@ -43,14 +43,14 @@ states = {
|
|||
3: "UNKNOWN",
|
||||
}
|
||||
|
||||
MAX_SECONDS_TO_CLEAR: DefaultDict[str, int] = defaultdict(
|
||||
MAX_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
|
||||
lambda: 30,
|
||||
deferred_work=600,
|
||||
digest_emails=1200,
|
||||
missedmessage_mobile_notifications=120,
|
||||
embed_links=60,
|
||||
)
|
||||
CRITICAL_SECONDS_TO_CLEAR: DefaultDict[str, int] = defaultdict(
|
||||
CRITICAL_SECONDS_TO_CLEAR: defaultdict[str, int] = defaultdict(
|
||||
lambda: 60,
|
||||
deferred_work=900,
|
||||
missedmessage_mobile_notifications=180,
|
||||
|
@ -60,8 +60,8 @@ CRITICAL_SECONDS_TO_CLEAR: DefaultDict[str, int] = defaultdict(
|
|||
|
||||
|
||||
def analyze_queue_stats(
|
||||
queue_name: str, stats: Dict[str, Any], queue_count_rabbitmqctl: int
|
||||
) -> Dict[str, Any]:
|
||||
queue_name: str, stats: dict[str, Any], queue_count_rabbitmqctl: int
|
||||
) -> dict[str, Any]:
|
||||
now = int(time.time())
|
||||
if stats == {}:
|
||||
return dict(status=UNKNOWN, name=queue_name, message="invalid or no stats data")
|
||||
|
@ -117,7 +117,7 @@ WARN_COUNT_THRESHOLD_DEFAULT = 10
|
|||
CRITICAL_COUNT_THRESHOLD_DEFAULT = 50
|
||||
|
||||
|
||||
def check_other_queues(queue_counts_dict: Dict[str, int]) -> List[Dict[str, Any]]:
|
||||
def check_other_queues(queue_counts_dict: dict[str, int]) -> list[dict[str, Any]]:
|
||||
"""Do a simple queue size check for queues whose workers don't publish stats files."""
|
||||
|
||||
results = []
|
||||
|
@ -161,7 +161,7 @@ def check_rabbitmq_queues() -> None:
|
|||
[os.path.join(ZULIP_PATH, "scripts/get-django-setting"), "QUEUE_STATS_DIR"],
|
||||
text=True,
|
||||
).strip()
|
||||
queue_stats: Dict[str, Dict[str, Any]] = {}
|
||||
queue_stats: dict[str, dict[str, Any]] = {}
|
||||
check_queues = normal_queues
|
||||
if mobile_notification_shards > 1:
|
||||
check_queues += [
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -17,7 +16,7 @@ ENV = get_environment()
|
|||
EMOJI_CACHE_PATH = "/srv/zulip-emoji-cache"
|
||||
|
||||
|
||||
def get_caches_in_use(threshold_days: int) -> Set[str]:
|
||||
def get_caches_in_use(threshold_days: int) -> set[str]:
|
||||
setups_to_check = {ZULIP_PATH}
|
||||
caches_in_use = set()
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@
|
|||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -22,7 +21,7 @@ ENV = get_environment()
|
|||
NODE_MODULES_CACHE_PATH = "/srv/zulip-npm-cache"
|
||||
|
||||
|
||||
def get_caches_in_use(threshold_days: int) -> Set[str]:
|
||||
def get_caches_in_use(threshold_days: int) -> set[str]:
|
||||
setups_to_check = {ZULIP_PATH}
|
||||
caches_in_use = set()
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ import argparse
|
|||
import glob
|
||||
import os
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -19,7 +18,7 @@ ENV = get_environment()
|
|||
VENV_CACHE_DIR = "/srv/zulip-venv-cache"
|
||||
|
||||
|
||||
def get_caches_in_use(threshold_days: int) -> Set[str]:
|
||||
def get_caches_in_use(threshold_days: int) -> set[str]:
|
||||
setups_to_check = {ZULIP_PATH}
|
||||
caches_in_use = set()
|
||||
|
||||
|
|
|
@ -4,11 +4,11 @@ import hashlib
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Iterable, List
|
||||
from typing import Iterable
|
||||
|
||||
|
||||
def expand_reqs_helper(fpath: str) -> List[str]:
|
||||
result: List[str] = []
|
||||
def expand_reqs_helper(fpath: str) -> list[str]:
|
||||
result: list[str] = []
|
||||
|
||||
with open(fpath) as f:
|
||||
for line in f:
|
||||
|
@ -20,7 +20,7 @@ def expand_reqs_helper(fpath: str) -> List[str]:
|
|||
return result
|
||||
|
||||
|
||||
def expand_reqs(fpath: str) -> List[str]:
|
||||
def expand_reqs(fpath: str) -> list[str]:
|
||||
"""
|
||||
Returns a sorted list of unique dependencies specified by the requirements file `fpath`.
|
||||
Removes comments from the output and recursively visits files specified inside `fpath`.
|
||||
|
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from typing import List, Optional, Set, Tuple
|
||||
from typing import Optional
|
||||
|
||||
from scripts.lib.hash_reqs import expand_reqs, python_version
|
||||
from scripts.lib.zulip_tools import ENDC, WARNING, os_families, run, run_as_root
|
||||
|
@ -62,7 +62,7 @@ FEDORA_VENV_DEPENDENCIES = [
|
|||
]
|
||||
|
||||
|
||||
def get_venv_dependencies(vendor: str, os_version: str) -> List[str]:
|
||||
def get_venv_dependencies(vendor: str, os_version: str) -> list[str]:
|
||||
if "debian" in os_families():
|
||||
return VENV_DEPENDENCIES
|
||||
elif "rhel" in os_families():
|
||||
|
@ -93,7 +93,7 @@ def get_index_filename(venv_path: str) -> str:
|
|||
return os.path.join(venv_path, "package_index")
|
||||
|
||||
|
||||
def get_package_names(requirements_file: str) -> List[str]:
|
||||
def get_package_names(requirements_file: str) -> list[str]:
|
||||
packages = expand_reqs(requirements_file)
|
||||
cleaned = []
|
||||
operators = ["~=", "==", "!=", "<", ">"]
|
||||
|
@ -132,7 +132,7 @@ def create_requirements_index_file(venv_path: str, requirements_file: str) -> st
|
|||
return index_filename
|
||||
|
||||
|
||||
def get_venv_packages(venv_path: str) -> Set[str]:
|
||||
def get_venv_packages(venv_path: str) -> set[str]:
|
||||
"""
|
||||
Returns the packages installed in the virtual environment using the
|
||||
package index file.
|
||||
|
@ -141,7 +141,7 @@ def get_venv_packages(venv_path: str) -> Set[str]:
|
|||
return {p.strip() for p in reader.read().split("\n") if p.strip()}
|
||||
|
||||
|
||||
def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
|
||||
def try_to_copy_venv(venv_path: str, new_packages: set[str]) -> bool:
|
||||
"""
|
||||
Tries to copy packages from an old virtual environment in the cache
|
||||
to the new virtual environment. The algorithm works as follows:
|
||||
|
@ -159,8 +159,8 @@ def try_to_copy_venv(venv_path: str, new_packages: Set[str]) -> bool:
|
|||
desired_python_version = python_version()
|
||||
venv_name = os.path.basename(venv_path)
|
||||
|
||||
overlaps: List[Tuple[int, str, Set[str]]] = []
|
||||
old_packages: Set[str] = set()
|
||||
overlaps: list[tuple[int, str, set[str]]] = []
|
||||
old_packages: set[str] = set()
|
||||
for sha1sum in os.listdir(VENV_CACHE_PATH):
|
||||
curr_venv_path = os.path.join(VENV_CACHE_PATH, sha1sum, venv_name)
|
||||
if curr_venv_path == venv_path or not os.path.exists(get_index_filename(curr_venv_path)):
|
||||
|
@ -230,8 +230,8 @@ def get_logfile_name(venv_path: str) -> str:
|
|||
def create_log_entry(
|
||||
target_log: str,
|
||||
parent: str,
|
||||
copied_packages: Set[str],
|
||||
new_packages: Set[str],
|
||||
copied_packages: set[str],
|
||||
new_packages: set[str],
|
||||
) -> None:
|
||||
venv_path = os.path.dirname(target_log)
|
||||
with open(target_log, "a") as writer:
|
||||
|
|
|
@ -5,7 +5,7 @@ import json
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Dict, List, Tuple, Union
|
||||
from typing import Union
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(BASE_DIR)
|
||||
|
@ -46,8 +46,8 @@ def write_updated_configs() -> None:
|
|||
|
||||
nginx_sharding_conf_f.write("map $host $tornado_server {\n")
|
||||
nginx_sharding_conf_f.write(" default http://tornado9800;\n")
|
||||
shard_map: Dict[str, Union[int, List[int]]] = {}
|
||||
shard_regexes: List[Tuple[str, Union[int, List[int]]]] = []
|
||||
shard_map: dict[str, Union[int, list[int]]] = {}
|
||||
shard_regexes: list[tuple[str, Union[int, list[int]]]] = []
|
||||
external_host = subprocess.check_output(
|
||||
[os.path.join(BASE_DIR, "scripts/get-django-setting"), "EXTERNAL_HOST"],
|
||||
text=True,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import socket
|
||||
import time
|
||||
from http.client import HTTPConnection
|
||||
from typing import Dict, List, Optional, Tuple, Union
|
||||
from typing import Optional, Union
|
||||
from xmlrpc import client
|
||||
|
||||
from typing_extensions import override
|
||||
|
@ -33,7 +33,7 @@ class UnixStreamTransport(client.Transport):
|
|||
|
||||
@override
|
||||
def make_connection(
|
||||
self, host: Union[Tuple[str, Dict[str, str]], str]
|
||||
self, host: Union[tuple[str, dict[str, str]], str]
|
||||
) -> UnixStreamHTTPConnection:
|
||||
return UnixStreamHTTPConnection(self.socket_path)
|
||||
|
||||
|
@ -45,8 +45,8 @@ def rpc() -> client.ServerProxy:
|
|||
|
||||
|
||||
def list_supervisor_processes(
|
||||
filter_names: Optional[List[str]] = None, *, only_running: Optional[bool] = None
|
||||
) -> List[str]:
|
||||
filter_names: Optional[list[str]] = None, *, only_running: Optional[bool] = None
|
||||
) -> list[str]:
|
||||
results = []
|
||||
processes = rpc().supervisor.getAllProcessInfo()
|
||||
assert isinstance(processes, list)
|
||||
|
|
|
@ -16,7 +16,7 @@ import sys
|
|||
import time
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from typing import IO, Any, Dict, List, Literal, Optional, Sequence, Set, Union, overload
|
||||
from typing import IO, Any, Literal, Optional, Sequence, Union, overload
|
||||
from urllib.parse import SplitResult
|
||||
|
||||
import zoneinfo
|
||||
|
@ -300,7 +300,7 @@ def get_environment() -> str:
|
|||
return "dev"
|
||||
|
||||
|
||||
def get_recent_deployments(threshold_days: int) -> Set[str]:
|
||||
def get_recent_deployments(threshold_days: int) -> set[str]:
|
||||
# Returns a list of deployments not older than threshold days
|
||||
# including `/root/zulip` directory if it exists.
|
||||
recent = set()
|
||||
|
@ -337,8 +337,8 @@ def get_threshold_timestamp(threshold_days: int) -> int:
|
|||
|
||||
|
||||
def get_caches_to_be_purged(
|
||||
caches_dir: str, caches_in_use: Set[str], threshold_days: int
|
||||
) -> Set[str]:
|
||||
caches_dir: str, caches_in_use: set[str], threshold_days: int
|
||||
) -> set[str]:
|
||||
# Given a directory containing caches, a list of caches in use
|
||||
# and threshold days, this function return a list of caches
|
||||
# which can be purged. Remove the cache only if it is:
|
||||
|
@ -360,7 +360,7 @@ def get_caches_to_be_purged(
|
|||
|
||||
def purge_unused_caches(
|
||||
caches_dir: str,
|
||||
caches_in_use: Set[str],
|
||||
caches_in_use: set[str],
|
||||
cache_type: str,
|
||||
args: argparse.Namespace,
|
||||
) -> None:
|
||||
|
@ -405,8 +405,8 @@ def generate_sha1sum_emoji(zulip_path: str) -> str:
|
|||
|
||||
|
||||
def maybe_perform_purging(
|
||||
dirs_to_purge: Set[str],
|
||||
dirs_to_keep: Set[str],
|
||||
dirs_to_purge: set[str],
|
||||
dirs_to_keep: set[str],
|
||||
dir_type: str,
|
||||
dry_run: bool,
|
||||
verbose: bool,
|
||||
|
@ -429,7 +429,7 @@ def maybe_perform_purging(
|
|||
|
||||
|
||||
@functools.lru_cache(None)
|
||||
def parse_os_release() -> Dict[str, str]:
|
||||
def parse_os_release() -> dict[str, str]:
|
||||
"""
|
||||
Example of the useful subset of the data:
|
||||
{
|
||||
|
@ -444,7 +444,7 @@ def parse_os_release() -> Dict[str, str]:
|
|||
developers, but we avoid using it, as it is not available on
|
||||
RHEL-based platforms.
|
||||
"""
|
||||
distro_info: Dict[str, str] = {}
|
||||
distro_info: dict[str, str] = {}
|
||||
with open("/etc/os-release") as fp:
|
||||
for line in fp:
|
||||
line = line.strip()
|
||||
|
@ -458,7 +458,7 @@ def parse_os_release() -> Dict[str, str]:
|
|||
|
||||
|
||||
@functools.lru_cache(None)
|
||||
def os_families() -> Set[str]:
|
||||
def os_families() -> set[str]:
|
||||
"""
|
||||
Known families:
|
||||
debian (includes: debian, ubuntu)
|
||||
|
@ -548,7 +548,7 @@ def is_root() -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def run_as_root(args: List[str], **kwargs: Any) -> None:
|
||||
def run_as_root(args: list[str], **kwargs: Any) -> None:
|
||||
sudo_args = kwargs.pop("sudo_args", [])
|
||||
if not is_root():
|
||||
args = ["sudo", *sudo_args, "--", *args]
|
||||
|
@ -619,7 +619,7 @@ def get_config_file() -> configparser.RawConfigParser:
|
|||
return config_file
|
||||
|
||||
|
||||
def get_deploy_options(config_file: configparser.RawConfigParser) -> List[str]:
|
||||
def get_deploy_options(config_file: configparser.RawConfigParser) -> list[str]:
|
||||
return shlex.split(get_config(config_file, "deployment", "deploy_options", ""))
|
||||
|
||||
|
||||
|
@ -632,7 +632,7 @@ def run_psql_as_postgres(
|
|||
subprocess.check_call(["su", "postgres", "-c", subcmd])
|
||||
|
||||
|
||||
def get_tornado_ports(config_file: configparser.RawConfigParser) -> List[int]:
|
||||
def get_tornado_ports(config_file: configparser.RawConfigParser) -> list[int]:
|
||||
ports = []
|
||||
if config_file.has_section("tornado_sharding"):
|
||||
ports = sorted(
|
||||
|
@ -705,7 +705,7 @@ def start_arg_parser(action: str, add_help: bool = False) -> argparse.ArgumentPa
|
|||
return parser
|
||||
|
||||
|
||||
def listening_publicly(port: int) -> List[str]:
|
||||
def listening_publicly(port: int) -> list[str]:
|
||||
filter = f"sport = :{port} and not src 127.0.0.1:{port} and not src [::1]:{port}"
|
||||
# Parse lines that look like this:
|
||||
# tcp LISTEN 0 128 0.0.0.0:25672 0.0.0.0:*
|
||||
|
|
|
@ -10,7 +10,7 @@ import signal
|
|||
import sys
|
||||
from datetime import date, datetime, timedelta, timezone
|
||||
from enum import Enum, auto
|
||||
from typing import List, Match, Optional, Set, TextIO, Tuple
|
||||
from typing import Match, Optional, TextIO
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -242,7 +242,7 @@ def main() -> None:
|
|||
sys.exit(signal.SIGINT + 128)
|
||||
|
||||
|
||||
def parse_logfile_names(args: argparse.Namespace) -> List[str]:
|
||||
def parse_logfile_names(args: argparse.Namespace) -> list[str]:
|
||||
if args.nginx:
|
||||
base_path = "/var/log/nginx/access.log"
|
||||
else:
|
||||
|
@ -311,7 +311,7 @@ def convert_to_nginx_date(date: str) -> str:
|
|||
|
||||
def parse_filters(
|
||||
args: argparse.Namespace,
|
||||
) -> Tuple[Set[FilterType], List[FilterFunc], List[str]]:
|
||||
) -> tuple[set[FilterType], list[FilterFunc], list[str]]:
|
||||
# The heuristics below are not intended to be precise -- they
|
||||
# certainly count things as "IPv4" or "IPv6" addresses that are
|
||||
# invalid. However, we expect the input here to already be
|
||||
|
@ -397,7 +397,7 @@ def parse_filters(
|
|||
|
||||
|
||||
def passes_filters(
|
||||
string_filters: List[FilterFunc],
|
||||
string_filters: list[FilterFunc],
|
||||
match: Match[str],
|
||||
args: argparse.Namespace,
|
||||
) -> bool:
|
||||
|
@ -434,7 +434,7 @@ last_match_end: Optional[datetime] = None
|
|||
def print_line(
|
||||
match: Match[str],
|
||||
args: argparse.Namespace,
|
||||
filter_types: Set[FilterType],
|
||||
filter_types: set[FilterType],
|
||||
use_color: bool,
|
||||
) -> None:
|
||||
global last_match_end
|
||||
|
|
|
@ -6,7 +6,6 @@ import subprocess
|
|||
import sys
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Dict
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -30,7 +29,7 @@ TORNADO_PROCESSES = len(get_tornado_ports(config_file))
|
|||
|
||||
output = subprocess.check_output(["/usr/sbin/rabbitmqctl", "list_consumers"], text=True)
|
||||
|
||||
consumers: Dict[str, int] = defaultdict(int)
|
||||
consumers: dict[str, int] = defaultdict(int)
|
||||
|
||||
queues = {
|
||||
*normal_queues,
|
||||
|
|
|
@ -3,7 +3,6 @@ import argparse
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Set
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.append(ZULIP_PATH)
|
||||
|
@ -56,7 +55,7 @@ def parse_args() -> argparse.Namespace:
|
|||
return args
|
||||
|
||||
|
||||
def get_deployments_to_be_purged(recent_deployments: Set[str]) -> Set[str]:
|
||||
def get_deployments_to_be_purged(recent_deployments: set[str]) -> set[str]:
|
||||
all_deployments = {
|
||||
os.path.join(DEPLOYMENTS_DIR, deployment) for deployment in os.listdir(DEPLOYMENTS_DIR)
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from typing import Dict, List
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
sys.path.append(BASE_DIR)
|
||||
|
@ -62,7 +61,7 @@ def generate_django_secretkey() -> str:
|
|||
return get_random_string(50, chars)
|
||||
|
||||
|
||||
def get_old_conf(output_filename: str) -> Dict[str, str]:
|
||||
def get_old_conf(output_filename: str) -> dict[str, str]:
|
||||
if not os.path.exists(output_filename) or os.path.getsize(output_filename) == 0:
|
||||
return {}
|
||||
|
||||
|
@ -79,7 +78,7 @@ def generate_secrets(development: bool = False) -> None:
|
|||
OUTPUT_SETTINGS_FILENAME = "/etc/zulip/zulip-secrets.conf"
|
||||
current_conf = get_old_conf(OUTPUT_SETTINGS_FILENAME)
|
||||
|
||||
lines: List[str] = []
|
||||
lines: list[str] = []
|
||||
if len(current_conf) == 0:
|
||||
lines = ["[secrets]\n"]
|
||||
|
||||
|
|
|
@ -6,7 +6,6 @@ import re
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import yaml
|
||||
|
||||
|
@ -64,7 +63,7 @@ puppet_env["FACTER_zulip_conf_path"] = args.config
|
|||
puppet_env["FACTER_zulip_scripts_path"] = scripts_path
|
||||
|
||||
|
||||
def noop_would_change(puppet_cmd: List[str]) -> bool:
|
||||
def noop_would_change(puppet_cmd: list[str]) -> bool:
|
||||
# --noop does not work with --detailed-exitcodes; see
|
||||
# https://tickets.puppetlabs.com/browse/PUP-686
|
||||
try:
|
||||
|
|
|
@ -4,7 +4,6 @@ import json
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
|
@ -16,7 +15,7 @@ sanity_check.check_venv(__file__)
|
|||
from scripts.lib.zulip_tools import ENDC, FAIL, WARNING
|
||||
|
||||
|
||||
def find_handlebars(translatable_strings: List[str]) -> List[str]:
|
||||
def find_handlebars(translatable_strings: list[str]) -> list[str]:
|
||||
return [string for string in translatable_strings if "{{" in string]
|
||||
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ import configparser
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import Any, Dict, List, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -27,7 +27,7 @@ def get_config() -> configparser.ConfigParser:
|
|||
return config
|
||||
|
||||
|
||||
def area_labeled(issue: Dict[str, Any]) -> bool:
|
||||
def area_labeled(issue: dict[str, Any]) -> bool:
|
||||
for label in issue["labels"]:
|
||||
label_name = str(label["name"])
|
||||
if "area:" in label_name:
|
||||
|
@ -35,7 +35,7 @@ def area_labeled(issue: Dict[str, Any]) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def is_issue(item: Dict[str, Any]) -> bool:
|
||||
def is_issue(item: dict[str, Any]) -> bool:
|
||||
return "issues" in item["html_url"]
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ def check_issue_labels() -> None:
|
|||
sys.exit(1)
|
||||
|
||||
next_page_url: Optional[str] = "https://api.github.com/repos/zulip/zulip/issues"
|
||||
unlabeled_issue_urls: List[str] = []
|
||||
unlabeled_issue_urls: list[str] = []
|
||||
while next_page_url:
|
||||
try:
|
||||
if args.force:
|
||||
|
|
|
@ -14,7 +14,7 @@ import difflib
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from typing import Any, Callable, Optional
|
||||
|
||||
import orjson
|
||||
|
||||
|
@ -87,7 +87,7 @@ DEPRECATED_EVENTS = [
|
|||
]
|
||||
|
||||
|
||||
def get_event_checker(event: Dict[str, Any]) -> Optional[Callable[[str, Dict[str, Any]], None]]:
|
||||
def get_event_checker(event: dict[str, Any]) -> Optional[Callable[[str, dict[str, Any]], None]]:
|
||||
name = event["type"]
|
||||
if "op" in event:
|
||||
name += "_" + event["op"]
|
||||
|
@ -99,7 +99,7 @@ def get_event_checker(event: Dict[str, Any]) -> Optional[Callable[[str, Dict[str
|
|||
return None
|
||||
|
||||
|
||||
def check_event(name: str, event: Dict[str, Any]) -> None:
|
||||
def check_event(name: str, event: dict[str, Any]) -> None:
|
||||
event["id"] = 1
|
||||
checker = get_event_checker(event)
|
||||
if checker is not None:
|
||||
|
@ -112,7 +112,7 @@ def check_event(name: str, event: Dict[str, Any]) -> None:
|
|||
print(f"WARNING - NEED SCHEMA: {name}")
|
||||
|
||||
|
||||
def read_fixtures() -> Dict[str, Any]:
|
||||
def read_fixtures() -> dict[str, Any]:
|
||||
cmd = [
|
||||
"node",
|
||||
os.path.join(TOOLS_DIR, "node_lib/dump_fixtures.js"),
|
||||
|
@ -121,7 +121,7 @@ def read_fixtures() -> Dict[str, Any]:
|
|||
return orjson.loads(schema)
|
||||
|
||||
|
||||
def verify_fixtures_are_sorted(names: List[str]) -> None:
|
||||
def verify_fixtures_are_sorted(names: list[str]) -> None:
|
||||
for i in range(1, len(names)):
|
||||
if names[i] < names[i - 1]:
|
||||
raise Exception(
|
||||
|
@ -135,7 +135,7 @@ def verify_fixtures_are_sorted(names: List[str]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def from_openapi(node: Dict[str, Any]) -> Any:
|
||||
def from_openapi(node: dict[str, Any]) -> Any:
|
||||
"""Converts the OpenAPI data into event_schema.py style type
|
||||
definitions for convenient comparison with the types used for backend
|
||||
tests declared there."""
|
||||
|
|
|
@ -11,7 +11,7 @@ from tools.lib import sanity_check
|
|||
|
||||
sanity_check.check_venv(__file__)
|
||||
|
||||
from typing import Dict, Iterable, List
|
||||
from typing import Iterable
|
||||
|
||||
from zulint import lister
|
||||
|
||||
|
@ -31,7 +31,7 @@ EXCLUDED_FILES = [
|
|||
]
|
||||
|
||||
|
||||
def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: List[str]) -> None:
|
||||
def check_our_files(modified_only: bool, all_dups: bool, fix: bool, targets: list[str]) -> None:
|
||||
by_lang = lister.list_files(
|
||||
targets=targets,
|
||||
modified_only=modified_only,
|
||||
|
@ -53,7 +53,7 @@ def check_html_templates(templates: Iterable[str], all_dups: bool, fix: bool) ->
|
|||
if "templates/corporate/team.html" in templates:
|
||||
templates.remove("templates/corporate/team.html")
|
||||
|
||||
def check_for_duplicate_ids(templates: List[str]) -> Dict[str, List[str]]:
|
||||
def check_for_duplicate_ids(templates: list[str]) -> dict[str, list[str]]:
|
||||
template_id_dict = build_id_dict(templates)
|
||||
# TODO: Clean up these cases of duplicate ids in the code
|
||||
IGNORE_IDS = [
|
||||
|
|
|
@ -4,7 +4,7 @@ import platform
|
|||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Callable, List
|
||||
from typing import Callable
|
||||
|
||||
TOOLS_DIR = os.path.dirname(__file__)
|
||||
ROOT_DIR = os.path.dirname(TOOLS_DIR)
|
||||
|
@ -24,7 +24,7 @@ def run(check_func: Callable[[], bool]) -> None:
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
def run_command(args: List[str]) -> None:
|
||||
def run_command(args: list[str]) -> None:
|
||||
print(shlex.join(args))
|
||||
subprocess.check_call(args)
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import optparse
|
||||
from typing import List, Union
|
||||
from typing import Union
|
||||
|
||||
from scrapy.commands import crawl
|
||||
from scrapy.crawler import Crawler
|
||||
|
||||
|
||||
class Command(crawl.Command):
|
||||
def run(self, args: List[str], opts: optparse.Values) -> None:
|
||||
def run(self, args: list[str], opts: optparse.Values) -> None:
|
||||
crawlers = []
|
||||
real_create_crawler = self.crawler_process.create_crawler
|
||||
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import os
|
||||
import pathlib
|
||||
from typing import List
|
||||
|
||||
from .common.spiders import BaseDocumentationSpider
|
||||
|
||||
|
||||
def get_start_url() -> List[str]:
|
||||
def get_start_url() -> list[str]:
|
||||
# Get index.html file as start URL and convert it to file URI
|
||||
dir_path = os.path.dirname(os.path.realpath(__file__))
|
||||
start_file = os.path.join(
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import os
|
||||
from posixpath import basename
|
||||
from typing import Any, List, Set
|
||||
from typing import Any
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
from typing_extensions import override
|
||||
|
@ -20,7 +20,7 @@ class UnusedImagesLinterSpider(BaseDocumentationSpider):
|
|||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
super().__init__(*args, **kwargs)
|
||||
self.static_images: Set[str] = set()
|
||||
self.static_images: set[str] = set()
|
||||
self.images_static_dir: str = get_images_dir(self.images_path)
|
||||
|
||||
@override
|
||||
|
@ -45,7 +45,7 @@ class UnusedImagesLinterSpider(BaseDocumentationSpider):
|
|||
class HelpDocumentationSpider(UnusedImagesLinterSpider):
|
||||
name = "help_documentation_crawler"
|
||||
start_urls = ["http://localhost:9981/help/"]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
deny = ["/policies/privacy"]
|
||||
images_path = "static/images/help"
|
||||
|
||||
|
@ -53,7 +53,7 @@ class HelpDocumentationSpider(UnusedImagesLinterSpider):
|
|||
class APIDocumentationSpider(UnusedImagesLinterSpider):
|
||||
name = "api_documentation_crawler"
|
||||
start_urls = ["http://localhost:9981/api"]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
images_path = "static/images/api"
|
||||
|
||||
|
||||
|
@ -84,4 +84,4 @@ class PorticoDocumentationSpider(BaseDocumentationSpider):
|
|||
"http://localhost:9981/for/research/",
|
||||
"http://localhost:9981/security/",
|
||||
]
|
||||
deny_domains: List[str] = []
|
||||
deny_domains: list[str] = []
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import json
|
||||
import os
|
||||
import re
|
||||
from typing import Callable, Iterator, List, Optional, Union
|
||||
from typing import Callable, Iterator, Optional, Union
|
||||
from urllib.parse import urlsplit
|
||||
|
||||
import scrapy
|
||||
|
@ -60,10 +60,10 @@ ZULIP_SERVER_GITHUB_DIRECTORY_PATH_PREFIX = "/zulip/zulip/tree/main"
|
|||
class BaseDocumentationSpider(scrapy.Spider):
|
||||
name: Optional[str] = None
|
||||
# Exclude domain address.
|
||||
deny_domains: List[str] = []
|
||||
start_urls: List[str] = []
|
||||
deny: List[str] = []
|
||||
file_extensions: List[str] = ["." + ext for ext in IGNORED_EXTENSIONS]
|
||||
deny_domains: list[str] = []
|
||||
start_urls: list[str] = []
|
||||
deny: list[str] = []
|
||||
file_extensions: list[str] = ["." + ext for ext in IGNORED_EXTENSIONS]
|
||||
tags = ("a", "area", "img")
|
||||
attrs = ("href", "src")
|
||||
|
||||
|
|
|
@ -14,7 +14,6 @@ import re
|
|||
import socket
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import List
|
||||
|
||||
import requests
|
||||
|
||||
|
@ -30,7 +29,7 @@ append_key = """\
|
|||
"""
|
||||
|
||||
|
||||
def get_mentor_keys(username: str) -> List[str]:
|
||||
def get_mentor_keys(username: str) -> list[str]:
|
||||
url = f"https://api.github.com/users/{username}/keys"
|
||||
|
||||
r = requests.get(url)
|
||||
|
|
|
@ -20,7 +20,7 @@ import sys
|
|||
import time
|
||||
import urllib.error
|
||||
import urllib.request
|
||||
from typing import Any, Dict, List, Tuple
|
||||
from typing import Any
|
||||
|
||||
import digitalocean
|
||||
import requests
|
||||
|
@ -56,7 +56,7 @@ def assert_github_user_exists(github_username: str) -> bool:
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
def get_ssh_public_keys_from_github(github_username: str) -> List[Dict[str, Any]]:
|
||||
def get_ssh_public_keys_from_github(github_username: str) -> list[dict[str, Any]]:
|
||||
print("Checking to see that GitHub user has available public keys...")
|
||||
apiurl_keys = f"https://api.github.com/users/{github_username}/keys"
|
||||
try:
|
||||
|
@ -108,12 +108,12 @@ def assert_droplet_does_not_exist(my_token: str, droplet_name: str, recreate: bo
|
|||
print("...No droplet found...proceeding.")
|
||||
|
||||
|
||||
def get_ssh_keys_string_from_github_ssh_key_dicts(userkey_dicts: List[Dict[str, Any]]) -> str:
|
||||
def get_ssh_keys_string_from_github_ssh_key_dicts(userkey_dicts: list[dict[str, Any]]) -> str:
|
||||
return "\n".join(userkey_dict["key"] for userkey_dict in userkey_dicts)
|
||||
|
||||
|
||||
def generate_dev_droplet_user_data(
|
||||
username: str, subdomain: str, userkey_dicts: List[Dict[str, Any]]
|
||||
username: str, subdomain: str, userkey_dicts: list[dict[str, Any]]
|
||||
) -> str:
|
||||
ssh_keys_string = get_ssh_keys_string_from_github_ssh_key_dicts(userkey_dicts)
|
||||
setup_root_ssh_keys = f"printf '{ssh_keys_string}' > /root/.ssh/authorized_keys"
|
||||
|
@ -159,7 +159,7 @@ su -c 'git config --global pull.rebase true' zulipdev
|
|||
return cloudconf
|
||||
|
||||
|
||||
def generate_prod_droplet_user_data(username: str, userkey_dicts: List[Dict[str, Any]]) -> str:
|
||||
def generate_prod_droplet_user_data(username: str, userkey_dicts: list[dict[str, Any]]) -> str:
|
||||
ssh_keys_string = get_ssh_keys_string_from_github_ssh_key_dicts(userkey_dicts)
|
||||
setup_root_ssh_keys = f"printf '{ssh_keys_string}' > /root/.ssh/authorized_keys"
|
||||
|
||||
|
@ -179,10 +179,10 @@ def create_droplet(
|
|||
my_token: str,
|
||||
template_id: str,
|
||||
name: str,
|
||||
tags: List[str],
|
||||
tags: list[str],
|
||||
user_data: str,
|
||||
region: str = "nyc3",
|
||||
) -> Tuple[str, str]:
|
||||
) -> tuple[str, str]:
|
||||
droplet = digitalocean.Droplet(
|
||||
token=my_token,
|
||||
name=name,
|
||||
|
@ -215,7 +215,7 @@ def create_droplet(
|
|||
return (droplet.ip_address, droplet.ip_v6_address)
|
||||
|
||||
|
||||
def delete_existing_records(records: List[digitalocean.Record], record_name: str) -> None:
|
||||
def delete_existing_records(records: list[digitalocean.Record], record_name: str) -> None:
|
||||
count = 0
|
||||
for record in records:
|
||||
if (
|
||||
|
|
|
@ -11,7 +11,7 @@ import os
|
|||
import sys
|
||||
import unicodedata
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
from scripts.lib.setup_path import setup_path
|
||||
|
@ -42,7 +42,7 @@ args = parser.parse_args()
|
|||
|
||||
class ContributorsJSON(TypedDict):
|
||||
date: str
|
||||
contributors: List[Dict[str, Union[int, str]]]
|
||||
contributors: list[dict[str, Union[int, str]]]
|
||||
|
||||
|
||||
class Contributor(TypedDict):
|
||||
|
@ -56,15 +56,15 @@ class Contributor(TypedDict):
|
|||
logger = logging.getLogger("zulip.fetch_contributors_json")
|
||||
|
||||
|
||||
def fetch_contributors(repo_name: str, max_retries: int) -> List[Contributor]:
|
||||
contributors: List[Contributor] = []
|
||||
def fetch_contributors(repo_name: str, max_retries: int) -> list[Contributor]:
|
||||
contributors: list[Contributor] = []
|
||||
page_index = 1
|
||||
|
||||
api_link = f"https://api.github.com/repos/zulip/{repo_name}/contributors"
|
||||
api_data = {"anon": "1"}
|
||||
certificates = os.environ.get("CUSTOM_CA_CERTIFICATES")
|
||||
|
||||
headers: Dict[str, str] = {}
|
||||
headers: dict[str, str] = {}
|
||||
personal_access_token = get_secret("github_personal_access_token")
|
||||
if personal_access_token is not None:
|
||||
headers = {"Authorization": f"token {personal_access_token}"}
|
||||
|
@ -134,7 +134,7 @@ def update_contributor_data_file() -> None:
|
|||
]
|
||||
|
||||
data: ContributorsJSON = dict(date=str(datetime.now(tz=timezone.utc).date()), contributors=[])
|
||||
contributor_username_to_data: Dict[str, Dict[str, Union[str, int]]] = {}
|
||||
contributor_username_to_data: dict[str, dict[str, Union[str, int]]] = {}
|
||||
|
||||
for repo_name in repo_names:
|
||||
contributors = fetch_contributors(repo_name, args.max_retries)
|
||||
|
|
|
@ -3,14 +3,13 @@ import json
|
|||
import os
|
||||
import re
|
||||
from subprocess import check_output
|
||||
from typing import Dict, List
|
||||
|
||||
|
||||
def get_json_filename(locale: str) -> str:
|
||||
return f"locale/{locale}/mobile.json"
|
||||
|
||||
|
||||
def get_locales() -> List[str]:
|
||||
def get_locales() -> list[str]:
|
||||
output = check_output(["git", "ls-files", "locale"], text=True)
|
||||
tracked_files = output.split()
|
||||
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
|
||||
|
@ -23,7 +22,7 @@ def get_locales() -> List[str]:
|
|||
return locales
|
||||
|
||||
|
||||
def get_translation_stats(resource_path: str) -> Dict[str, int]:
|
||||
def get_translation_stats(resource_path: str) -> dict[str, int]:
|
||||
with open(resource_path) as raw_resource_file:
|
||||
raw_info = json.load(raw_resource_file)
|
||||
|
||||
|
@ -32,7 +31,7 @@ def get_translation_stats(resource_path: str) -> Dict[str, int]:
|
|||
return {"total": total, "not_translated": not_translated}
|
||||
|
||||
|
||||
translation_stats: Dict[str, Dict[str, int]] = {}
|
||||
translation_stats: dict[str, dict[str, int]] = {}
|
||||
locale_paths = [] # List[str]
|
||||
for locale in get_locales():
|
||||
path = get_json_filename(locale)
|
||||
|
|
|
@ -4,7 +4,7 @@ import argparse
|
|||
import html
|
||||
import json
|
||||
import sys
|
||||
from typing import Dict, NamedTuple
|
||||
from typing import NamedTuple
|
||||
|
||||
|
||||
class CLIArgs(NamedTuple):
|
||||
|
@ -36,7 +36,7 @@ if __name__ == "__main__":
|
|||
args = parse_args()
|
||||
print(f"unescaping file {args.filename}", file=sys.stderr)
|
||||
|
||||
json_data: Dict[str, str] = {}
|
||||
json_data: dict[str, str] = {}
|
||||
|
||||
with open(args.filename) as source:
|
||||
json_data = json.load(source)
|
||||
|
|
|
@ -3,7 +3,6 @@ import json
|
|||
import os
|
||||
import re
|
||||
from subprocess import check_output
|
||||
from typing import Dict, List
|
||||
|
||||
LEGACY_STRINGS_MAP = {
|
||||
"<p>You are searching for messages that belong to more than one channel, which is not possible.</p>": "<p>You are searching for messages that belong to more than one stream, which is not possible.</p>",
|
||||
|
@ -187,7 +186,7 @@ def get_legacy_filename(locale: str) -> str:
|
|||
return f"locale/{locale}/legacy_stream_translations.json"
|
||||
|
||||
|
||||
def get_locales() -> List[str]:
|
||||
def get_locales() -> list[str]:
|
||||
output = check_output(["git", "ls-files", "locale"], text=True)
|
||||
tracked_files = output.split()
|
||||
regex = re.compile(r"locale/(\w+)/LC_MESSAGES/django.po")
|
||||
|
@ -200,7 +199,7 @@ def get_locales() -> List[str]:
|
|||
return locales
|
||||
|
||||
|
||||
def get_translations(path: str) -> Dict[str, str]:
|
||||
def get_translations(path: str) -> dict[str, str]:
|
||||
with open(path) as raw_resource_file:
|
||||
translations = json.load(raw_resource_file)
|
||||
|
||||
|
@ -208,10 +207,10 @@ def get_translations(path: str) -> Dict[str, str]:
|
|||
|
||||
|
||||
def update_for_legacy_stream_translations(
|
||||
current: Dict[str, str], legacy: Dict[str, str], path: str
|
||||
current: dict[str, str], legacy: dict[str, str], path: str
|
||||
) -> None:
|
||||
number_of_updates = 0
|
||||
updated_translations: Dict[str, str] = {}
|
||||
updated_translations: dict[str, str] = {}
|
||||
for line in current:
|
||||
# If the string has a legacy string mapped and see if it's
|
||||
# not currently translated (e.g. an empty string), then use
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import re
|
||||
from typing import List, Match, Tuple
|
||||
from typing import Match
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
@ -245,7 +245,7 @@ def is_capitalized(safe_text: str) -> bool:
|
|||
return not any(DISALLOWED_REGEX.search(sentence.strip()) for sentence in sentences)
|
||||
|
||||
|
||||
def check_banned_words(text: str) -> List[str]:
|
||||
def check_banned_words(text: str) -> list[str]:
|
||||
lower_cased_text = text.lower()
|
||||
errors = []
|
||||
for word, reason in BANNED_WORDS.items():
|
||||
|
@ -266,7 +266,7 @@ def check_banned_words(text: str) -> List[str]:
|
|||
return errors
|
||||
|
||||
|
||||
def check_capitalization(strings: List[str]) -> Tuple[List[str], List[str], List[str]]:
|
||||
def check_capitalization(strings: list[str]) -> tuple[list[str], list[str], list[str]]:
|
||||
errors = []
|
||||
ignored = []
|
||||
banned_word_errors = []
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from typing import List
|
||||
|
||||
from gitlint.git import GitCommit
|
||||
from gitlint.rules import CommitMessageTitle, LineRule, RuleViolation
|
||||
|
||||
|
@ -87,7 +85,7 @@ class ImperativeMood(LineRule):
|
|||
'("{word}" -> "{imperative}"): "{title}"'
|
||||
)
|
||||
|
||||
def validate(self, line: str, commit: GitCommit) -> List[RuleViolation]:
|
||||
def validate(self, line: str, commit: GitCommit) -> list[RuleViolation]:
|
||||
violations = []
|
||||
|
||||
# Ignore the section tag (ie `<section tag>: <message body>.`)
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
import re
|
||||
from collections import defaultdict
|
||||
from typing import Dict, List
|
||||
|
||||
from .template_parser import FormattedError, Token, tokenize
|
||||
|
||||
|
||||
class TagInfo:
|
||||
def __init__(self, tag: str, classes: List[str], ids: List[str], token: Token) -> None:
|
||||
def __init__(self, tag: str, classes: list[str], ids: list[str], token: Token) -> None:
|
||||
self.tag = tag
|
||||
self.classes = classes
|
||||
self.ids = ids
|
||||
|
@ -29,8 +28,8 @@ class TagInfo:
|
|||
def get_tag_info(token: Token) -> TagInfo:
|
||||
s = token.s
|
||||
tag = token.tag
|
||||
classes: List[str] = []
|
||||
ids: List[str] = []
|
||||
classes: list[str] = []
|
||||
ids: list[str] = []
|
||||
|
||||
searches = [
|
||||
(classes, ' class="(.*?)"'),
|
||||
|
@ -48,7 +47,7 @@ def get_tag_info(token: Token) -> TagInfo:
|
|||
return TagInfo(tag=tag, classes=classes, ids=ids, token=token)
|
||||
|
||||
|
||||
def split_for_id_and_class(element: str) -> List[str]:
|
||||
def split_for_id_and_class(element: str) -> list[str]:
|
||||
# Here we split a given string which is expected to contain id or class
|
||||
# attributes from HTML tags. This also takes care of template variables
|
||||
# in string during splitting process. For eg. 'red black {{ a|b|c }}'
|
||||
|
@ -74,8 +73,8 @@ def split_for_id_and_class(element: str) -> List[str]:
|
|||
return lst
|
||||
|
||||
|
||||
def build_id_dict(templates: List[str]) -> Dict[str, List[str]]:
|
||||
template_id_dict: Dict[str, List[str]] = defaultdict(list)
|
||||
def build_id_dict(templates: list[str]) -> dict[str, list[str]]:
|
||||
template_id_dict: dict[str, list[str]] = defaultdict(list)
|
||||
|
||||
for fn in templates:
|
||||
with open(fn) as f:
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
import subprocess
|
||||
from typing import List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from zulint.printer import BOLDRED, CYAN, ENDC, GREEN
|
||||
|
||||
from .template_parser import Token
|
||||
|
||||
|
||||
def shift_indents_to_the_next_tokens(tokens: List[Token]) -> None:
|
||||
def shift_indents_to_the_next_tokens(tokens: list[Token]) -> None:
|
||||
"""
|
||||
During the parsing/validation phase, it's useful to have separate
|
||||
tokens for "indent" chunks, but during pretty printing, we like
|
||||
|
@ -39,7 +39,7 @@ def token_allows_children_to_skip_indents(token: Token) -> bool:
|
|||
return token.kind in ("django_start", "handlebars_start") or token.tag == "a"
|
||||
|
||||
|
||||
def adjust_block_indentation(tokens: List[Token], fn: str) -> None:
|
||||
def adjust_block_indentation(tokens: list[Token], fn: str) -> None:
|
||||
start_token: Optional[Token] = None
|
||||
|
||||
for token in tokens:
|
||||
|
@ -106,7 +106,7 @@ def adjust_block_indentation(tokens: List[Token], fn: str) -> None:
|
|||
token.indent = start_token.child_indent
|
||||
|
||||
|
||||
def fix_indents_for_multi_line_tags(tokens: List[Token]) -> None:
|
||||
def fix_indents_for_multi_line_tags(tokens: list[Token]) -> None:
|
||||
def fix(frag: str) -> str:
|
||||
frag = frag.strip()
|
||||
return continue_indent + frag if frag else ""
|
||||
|
@ -128,13 +128,13 @@ def fix_indents_for_multi_line_tags(tokens: List[Token]) -> None:
|
|||
token.new_s = frags[0] + "\n" + "\n".join(fix(frag) for frag in frags[1:])
|
||||
|
||||
|
||||
def apply_token_indents(tokens: List[Token]) -> None:
|
||||
def apply_token_indents(tokens: list[Token]) -> None:
|
||||
for token in tokens:
|
||||
if token.indent:
|
||||
token.new_s = token.indent + token.new_s
|
||||
|
||||
|
||||
def pretty_print_html(tokens: List[Token], fn: str) -> str:
|
||||
def pretty_print_html(tokens: list[Token], fn: str) -> str:
|
||||
for token in tokens:
|
||||
token.new_s = token.s
|
||||
|
||||
|
@ -150,7 +150,7 @@ def numbered_lines(s: str) -> str:
|
|||
return "".join(f"{i + 1: >5} {line}\n" for i, line in enumerate(s.split("\n")))
|
||||
|
||||
|
||||
def validate_indent_html(fn: str, tokens: List[Token], fix: bool) -> bool:
|
||||
def validate_indent_html(fn: str, tokens: list[Token], fix: bool) -> bool:
|
||||
with open(fn) as f:
|
||||
html = f.read()
|
||||
phtml = pretty_print_html(tokens, fn)
|
||||
|
|
|
@ -6,7 +6,7 @@ import os
|
|||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List, NoReturn
|
||||
from typing import NoReturn
|
||||
|
||||
os.environ["PYTHONUNBUFFERED"] = "y"
|
||||
|
||||
|
@ -232,7 +232,7 @@ def install_system_deps() -> None:
|
|||
run_as_root(["./scripts/lib/build-pgroonga"])
|
||||
|
||||
|
||||
def install_apt_deps(deps_to_install: List[str]) -> None:
|
||||
def install_apt_deps(deps_to_install: list[str]) -> None:
|
||||
# setup-apt-repo does an `apt-get update` if the sources.list files changed.
|
||||
run_as_root(["./scripts/lib/setup-apt-repo"])
|
||||
|
||||
|
@ -255,7 +255,7 @@ def install_apt_deps(deps_to_install: List[str]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def install_yum_deps(deps_to_install: List[str]) -> None:
|
||||
def install_yum_deps(deps_to_install: list[str]) -> None:
|
||||
print(WARNING + "RedHat support is still experimental." + ENDC)
|
||||
run_as_root(["./scripts/lib/setup-yum-repo"])
|
||||
|
||||
|
@ -265,7 +265,7 @@ def install_yum_deps(deps_to_install: List[str]) -> None:
|
|||
#
|
||||
# Error: Package: moreutils-0.49-2.el7.x86_64 (epel)
|
||||
# Requires: perl(IPC::Run)
|
||||
yum_extra_flags: List[str] = []
|
||||
yum_extra_flags: list[str] = []
|
||||
if vendor == "rhel":
|
||||
proc = subprocess.run(
|
||||
["sudo", "subscription-manager", "status"],
|
||||
|
|
|
@ -11,7 +11,6 @@ import glob
|
|||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
|
@ -57,7 +56,7 @@ def create_var_directories() -> None:
|
|||
os.makedirs(path, exist_ok=True)
|
||||
|
||||
|
||||
def build_pygments_data_paths() -> List[str]:
|
||||
def build_pygments_data_paths() -> list[str]:
|
||||
paths = [
|
||||
"tools/setup/build_pygments_data",
|
||||
"tools/setup/lang.json",
|
||||
|
@ -65,27 +64,27 @@ def build_pygments_data_paths() -> List[str]:
|
|||
return paths
|
||||
|
||||
|
||||
def build_timezones_data_paths() -> List[str]:
|
||||
def build_timezones_data_paths() -> list[str]:
|
||||
paths = [
|
||||
"tools/setup/build_timezone_values",
|
||||
]
|
||||
return paths
|
||||
|
||||
|
||||
def build_landing_page_images_paths() -> List[str]:
|
||||
def build_landing_page_images_paths() -> list[str]:
|
||||
paths = ["tools/setup/generate_landing_page_images.py"]
|
||||
paths += glob.glob("static/images/landing-page/hello/original/*")
|
||||
return paths
|
||||
|
||||
|
||||
def compilemessages_paths() -> List[str]:
|
||||
def compilemessages_paths() -> list[str]:
|
||||
paths = ["zerver/management/commands/compilemessages.py"]
|
||||
paths += glob.glob("locale/*/LC_MESSAGES/*.po")
|
||||
paths += glob.glob("locale/*/translations.json")
|
||||
return paths
|
||||
|
||||
|
||||
def configure_rabbitmq_paths() -> List[str]:
|
||||
def configure_rabbitmq_paths() -> list[str]:
|
||||
paths = [
|
||||
"scripts/setup/configure-rabbitmq",
|
||||
]
|
||||
|
@ -194,7 +193,7 @@ def need_to_run_compilemessages() -> bool:
|
|||
)
|
||||
|
||||
|
||||
def need_to_run_configure_rabbitmq(settings_list: List[str]) -> bool:
|
||||
def need_to_run_configure_rabbitmq(settings_list: list[str]) -> bool:
|
||||
obsolete = is_digest_obsolete(
|
||||
"last_configure_rabbitmq_hash",
|
||||
configure_rabbitmq_paths(),
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Callable, List, Optional
|
||||
from typing import Callable, Optional
|
||||
|
||||
from typing_extensions import override
|
||||
|
||||
|
@ -51,7 +51,7 @@ class Token:
|
|||
self.parent_token: Optional[Token] = None
|
||||
|
||||
|
||||
def tokenize(text: str, template_format: Optional[str] = None) -> List[Token]:
|
||||
def tokenize(text: str, template_format: Optional[str] = None) -> list[Token]:
|
||||
in_code_block = False
|
||||
|
||||
def advance(n: int) -> None:
|
||||
|
@ -124,7 +124,7 @@ def tokenize(text: str, template_format: Optional[str] = None) -> List[Token]:
|
|||
return looking_at("\n") or looking_at(" ")
|
||||
|
||||
state = TokenizerState()
|
||||
tokens: List[Token] = []
|
||||
tokens: list[Token] = []
|
||||
|
||||
while state.i < len(text):
|
||||
try:
|
||||
|
@ -355,7 +355,7 @@ def validate(
|
|||
fn: Optional[str] = None,
|
||||
text: Optional[str] = None,
|
||||
template_format: Optional[str] = None,
|
||||
) -> List[Token]:
|
||||
) -> list[Token]:
|
||||
assert fn or text
|
||||
|
||||
if fn is None:
|
||||
|
@ -500,7 +500,7 @@ def validate(
|
|||
return tokens
|
||||
|
||||
|
||||
def ensure_matching_indentation(fn: str, tokens: List[Token], lines: List[str]) -> None:
|
||||
def ensure_matching_indentation(fn: str, tokens: list[Token], lines: list[str]) -> None:
|
||||
def has_bad_indentation() -> bool:
|
||||
is_inline_tag = start_tag in HTML_INLINE_TAGS and start_token.kind == "html_start"
|
||||
|
||||
|
@ -545,7 +545,7 @@ def ensure_matching_indentation(fn: str, tokens: List[Token], lines: List[str])
|
|||
)
|
||||
|
||||
|
||||
def prevent_extra_newlines(fn: str, tokens: List[Token]) -> None:
|
||||
def prevent_extra_newlines(fn: str, tokens: list[Token]) -> None:
|
||||
count = 0
|
||||
|
||||
for token in tokens:
|
||||
|
@ -560,7 +560,7 @@ def prevent_extra_newlines(fn: str, tokens: List[Token]) -> None:
|
|||
)
|
||||
|
||||
|
||||
def prevent_whitespace_violations(fn: str, tokens: List[Token]) -> None:
|
||||
def prevent_whitespace_violations(fn: str, tokens: list[Token]) -> None:
|
||||
if tokens[0].kind in ("indent", "whitespace"):
|
||||
raise TemplateParserError(f" Please remove the whitespace at the beginning of {fn}.")
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import os
|
|||
import subprocess
|
||||
import sys
|
||||
from argparse import ArgumentParser
|
||||
from typing import Iterable, List, Optional, Tuple
|
||||
from typing import Iterable, Optional
|
||||
|
||||
from scripts.lib.zulip_tools import get_dev_uuid_var_path
|
||||
from version import PROVISION_VERSION
|
||||
|
@ -25,7 +25,7 @@ properly.
|
|||
"""
|
||||
|
||||
|
||||
def preamble(version: Tuple[int, ...]) -> str:
|
||||
def preamble(version: tuple[int, ...]) -> str:
|
||||
text = PREAMBLE.format(version, PROVISION_VERSION)
|
||||
text += "\n"
|
||||
return text
|
||||
|
@ -49,7 +49,7 @@ Do this: `./tools/provision`
|
|||
"""
|
||||
|
||||
|
||||
def get_provisioning_status() -> Tuple[bool, Optional[str]]:
|
||||
def get_provisioning_status() -> tuple[bool, Optional[str]]:
|
||||
version_file = get_version_file()
|
||||
if not os.path.exists(version_file):
|
||||
# If the developer doesn't have a version_file written by
|
||||
|
@ -93,7 +93,7 @@ def add_provision_check_override_param(parser: ArgumentParser) -> None:
|
|||
)
|
||||
|
||||
|
||||
def find_js_test_files(test_dir: str, files: Iterable[str]) -> List[str]:
|
||||
def find_js_test_files(test_dir: str, files: Iterable[str]) -> list[str]:
|
||||
test_files = []
|
||||
for file in files:
|
||||
file = min(
|
||||
|
|
|
@ -1,5 +1,3 @@
|
|||
from typing import List
|
||||
|
||||
from zulint.custom_rules import Rule, RuleList
|
||||
|
||||
# Rule help:
|
||||
|
@ -39,7 +37,7 @@ FILES_WITH_LEGACY_SUBJECT = {
|
|||
"zerver/tests/test_message_fetch.py",
|
||||
}
|
||||
|
||||
shebang_rules: List["Rule"] = [
|
||||
shebang_rules: list["Rule"] = [
|
||||
{
|
||||
"pattern": r"\A#!",
|
||||
"description": "zerver library code shouldn't have a shebang line.",
|
||||
|
@ -59,7 +57,7 @@ shebang_rules: List["Rule"] = [
|
|||
},
|
||||
]
|
||||
|
||||
base_whitespace_rules: List["Rule"] = [
|
||||
base_whitespace_rules: list["Rule"] = [
|
||||
{
|
||||
"pattern": r"[\t ]+$",
|
||||
"exclude": {"tools/ci/success-http-headers.template.txt"},
|
||||
|
@ -70,7 +68,7 @@ base_whitespace_rules: List["Rule"] = [
|
|||
"description": "Missing newline at end of file",
|
||||
},
|
||||
]
|
||||
whitespace_rules: List["Rule"] = [
|
||||
whitespace_rules: list["Rule"] = [
|
||||
*base_whitespace_rules,
|
||||
{
|
||||
"pattern": "http://zulip.readthedocs.io",
|
||||
|
@ -85,7 +83,7 @@ whitespace_rules: List["Rule"] = [
|
|||
"description": "Web app should be two words",
|
||||
},
|
||||
]
|
||||
comma_whitespace_rule: List["Rule"] = [
|
||||
comma_whitespace_rule: list["Rule"] = [
|
||||
{
|
||||
"pattern": ", {2,}[^#/ ]",
|
||||
"exclude": {"zerver/tests", "web/tests", "corporate/tests"},
|
||||
|
@ -94,7 +92,7 @@ comma_whitespace_rule: List["Rule"] = [
|
|||
"bad_lines": ["foo(1, 2, 3)", "foo(1, 2, 3)"],
|
||||
},
|
||||
]
|
||||
markdown_whitespace_rules: List["Rule"] = [
|
||||
markdown_whitespace_rules: list["Rule"] = [
|
||||
*(rule for rule in whitespace_rules if rule["pattern"] != r"[\t ]+$"),
|
||||
# Two spaces trailing a line with other content is okay--it's a Markdown line break.
|
||||
# This rule finds one space trailing a non-space, three or more trailing spaces, and
|
||||
|
@ -508,7 +506,7 @@ css_rules = RuleList(
|
|||
],
|
||||
)
|
||||
|
||||
prose_style_rules: List["Rule"] = [
|
||||
prose_style_rules: list["Rule"] = [
|
||||
{
|
||||
"pattern": r'^[\t ]*[^\n{].*?[^\n\/\#\-"]([jJ]avascript)', # exclude usage in hrefs/divs/custom-markdown
|
||||
"exclude": {"docs/documentation/api.md", "templates/corporate/policies/privacy.md"},
|
||||
|
@ -531,7 +529,7 @@ prose_style_rules: List["Rule"] = [
|
|||
},
|
||||
*comma_whitespace_rule,
|
||||
]
|
||||
html_rules: List["Rule"] = [
|
||||
html_rules: list["Rule"] = [
|
||||
*whitespace_rules,
|
||||
*prose_style_rules,
|
||||
{
|
||||
|
|
|
@ -2,7 +2,6 @@ import os
|
|||
import subprocess
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
|
||||
import digitalocean
|
||||
import zulip
|
||||
|
@ -63,7 +62,7 @@ def set_api_request_retry_limits(api_object: digitalocean.baseapi.BaseAPI) -> No
|
|||
|
||||
|
||||
def create_droplet(
|
||||
name: str, ssh_keys: List[str], image: str = "ubuntu-22-04-x64"
|
||||
name: str, ssh_keys: list[str], image: str = "ubuntu-22-04-x64"
|
||||
) -> digitalocean.Droplet:
|
||||
droplet = digitalocean.Droplet(
|
||||
token=manager.token,
|
||||
|
|
|
@ -4,10 +4,9 @@ import glob
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
|
||||
def validate_order(order: List[int], length: int) -> None:
|
||||
def validate_order(order: list[int], length: int) -> None:
|
||||
if len(order) != length:
|
||||
print("Please enter the sequence of all the conflicting files at once")
|
||||
sys.exit(1)
|
||||
|
@ -18,8 +17,8 @@ def validate_order(order: List[int], length: int) -> None:
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
def renumber_migration(conflicts: List[str], order: List[int], last_correct_migration: str) -> None:
|
||||
stack: List[str] = []
|
||||
def renumber_migration(conflicts: list[str], order: list[int], last_correct_migration: str) -> None:
|
||||
stack: list[str] = []
|
||||
for i in order:
|
||||
if conflicts[i - 1][0:4] not in stack:
|
||||
stack.append(conflicts[i - 1][0:4])
|
||||
|
@ -38,7 +37,7 @@ def renumber_migration(conflicts: List[str], order: List[int], last_correct_migr
|
|||
last_correct_migration = new_name.replace(".py", "")
|
||||
|
||||
|
||||
def resolve_conflicts(conflicts: List[str], files_list: List[str]) -> None:
|
||||
def resolve_conflicts(conflicts: list[str], files_list: list[str]) -> None:
|
||||
print("Conflicting migrations:")
|
||||
for i in range(len(conflicts)):
|
||||
print(str(i + 1) + ". " + conflicts[i])
|
||||
|
@ -56,8 +55,8 @@ def resolve_conflicts(conflicts: List[str], files_list: List[str]) -> None:
|
|||
if __name__ == "__main__":
|
||||
MIGRATIONS_TO_SKIP = {"0209", "0261", "0501"}
|
||||
while True:
|
||||
conflicts: List[str] = []
|
||||
stack: List[str] = []
|
||||
conflicts: list[str] = []
|
||||
stack: list[str] = []
|
||||
files_list = [os.path.basename(path) for path in glob.glob("zerver/migrations/????_*.py")]
|
||||
file_index = [file[0:4] for file in files_list]
|
||||
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
import shlex
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
|
||||
def exit(message: str) -> None:
|
||||
|
@ -11,12 +10,12 @@ def exit(message: str) -> None:
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
def run(command: List[str]) -> None:
|
||||
def run(command: list[str]) -> None:
|
||||
print(f"\n>>> {shlex.join(command)}")
|
||||
subprocess.check_call(command)
|
||||
|
||||
|
||||
def check_output(command: List[str]) -> str:
|
||||
def check_output(command: list[str]) -> str:
|
||||
return subprocess.check_output(command, text=True)
|
||||
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@ import pwd
|
|||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
|
||||
|
@ -81,7 +80,7 @@ if options.interface is None:
|
|||
elif options.interface == "":
|
||||
options.interface = None
|
||||
|
||||
runserver_args: List[str] = []
|
||||
runserver_args: list[str] = []
|
||||
base_port = 9991
|
||||
if options.test:
|
||||
base_port = 9981
|
||||
|
@ -139,7 +138,7 @@ with open(pid_file_path, "w+") as f:
|
|||
f.write(str(os.getpgrp()) + "\n")
|
||||
|
||||
|
||||
def server_processes() -> List[List[str]]:
|
||||
def server_processes() -> list[list[str]]:
|
||||
main_cmds = [
|
||||
[
|
||||
"./manage.py",
|
||||
|
@ -330,7 +329,7 @@ def https_log_filter(record: logging.LogRecord) -> bool:
|
|||
logging.getLogger("aiohttp.server").addFilter(https_log_filter)
|
||||
|
||||
runner: web.AppRunner
|
||||
children: List["subprocess.Popen[bytes]"] = []
|
||||
children: list["subprocess.Popen[bytes]"] = []
|
||||
|
||||
|
||||
async def serve() -> None:
|
||||
|
|
|
@ -3,7 +3,6 @@ import argparse
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
from zulint import lister
|
||||
|
||||
|
@ -72,7 +71,7 @@ if not python_files and not pyi_files:
|
|||
print("There are no files to run mypy on.")
|
||||
sys.exit(0)
|
||||
|
||||
mypy_args: List[str] = []
|
||||
mypy_args: list[str] = []
|
||||
# --no-error-summary is a mypy flag that comes after all dmypy options
|
||||
if args.use_daemon:
|
||||
mypy_args += ["run", "--"]
|
||||
|
|
|
@ -6,7 +6,7 @@ import base64
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, Optional, Tuple
|
||||
from typing import Any, Optional
|
||||
from urllib.parse import parse_qsl, urlencode
|
||||
|
||||
SCREENSHOTS_DIR = os.path.abspath(os.path.dirname(__file__))
|
||||
|
@ -94,7 +94,7 @@ def create_integration_stream(integration: Integration, bot: UserProfile) -> Non
|
|||
bulk_add_subscriptions(realm, [stream], [bot, bot.bot_owner], acting_user=bot)
|
||||
|
||||
|
||||
def get_fixture_info(fixture_path: str) -> Tuple[Any, bool, str]:
|
||||
def get_fixture_info(fixture_path: str) -> tuple[Any, bool, str]:
|
||||
json_fixture = fixture_path.endswith(".json")
|
||||
_, fixture_name = split_fixture_path(fixture_path)
|
||||
|
||||
|
@ -116,7 +116,7 @@ def get_integration(integration_name: str) -> Integration:
|
|||
return integration
|
||||
|
||||
|
||||
def get_requests_headers(integration_name: str, fixture_name: str) -> Dict[str, Any]:
|
||||
def get_requests_headers(integration_name: str, fixture_name: str) -> dict[str, Any]:
|
||||
headers = get_fixture_http_headers(integration_name, fixture_name)
|
||||
|
||||
def fix_name(header: str) -> str:
|
||||
|
@ -126,7 +126,7 @@ def get_requests_headers(integration_name: str, fixture_name: str) -> Dict[str,
|
|||
return {fix_name(k): v for k, v in headers.items()}
|
||||
|
||||
|
||||
def custom_headers(headers_json: str) -> Dict[str, str]:
|
||||
def custom_headers(headers_json: str) -> dict[str, str]:
|
||||
if not headers_json:
|
||||
return {}
|
||||
try:
|
||||
|
|
|
@ -6,7 +6,7 @@ import os
|
|||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from typing import Dict, List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
@ -59,7 +59,7 @@ realm.save()
|
|||
|
||||
DEFAULT_USER = get_user_by_delivery_email("iago@zulip.com", realm)
|
||||
NOTIFICATION_BOT = get_system_bot(settings.NOTIFICATION_BOT, realm.id)
|
||||
message_thread_ids: List[int] = []
|
||||
message_thread_ids: list[int] = []
|
||||
|
||||
USER_AVATARS_MAP = {
|
||||
"Ariella Drake": "tools/screenshots/user_avatars/AriellaDrake.png",
|
||||
|
@ -82,8 +82,8 @@ class MessageThread(BaseModel):
|
|||
content: str
|
||||
starred: bool
|
||||
edited: bool
|
||||
reactions: Dict[str, List[str]]
|
||||
date: Dict[str, int]
|
||||
reactions: dict[str, list[str]]
|
||||
date: dict[str, int]
|
||||
|
||||
|
||||
def create_user(full_name: str, avatar_filename: Optional[str]) -> None:
|
||||
|
@ -104,7 +104,7 @@ def set_avatar(user: UserProfile, filename: str) -> None:
|
|||
|
||||
|
||||
def create_and_subscribe_stream(
|
||||
stream_name: str, users: List[str], color: Optional[str] = None, invite_only: bool = False
|
||||
stream_name: str, users: list[str], color: Optional[str] = None, invite_only: bool = False
|
||||
) -> None:
|
||||
stream = ensure_stream(realm, stream_name, invite_only=invite_only, acting_user=DEFAULT_USER)
|
||||
bulk_add_subscriptions(
|
||||
|
@ -123,22 +123,22 @@ def create_and_subscribe_stream(
|
|||
|
||||
|
||||
def send_stream_messages(
|
||||
stream_name: str, topic: str, staged_messages_data: List[MessageThread]
|
||||
) -> List[int]:
|
||||
stream_name: str, topic: str, staged_messages_data: list[MessageThread]
|
||||
) -> list[int]:
|
||||
staged_messages = [dict(staged_message) for staged_message in staged_messages_data]
|
||||
stream = ensure_stream(realm, stream_name, acting_user=DEFAULT_USER)
|
||||
subscribers_query = get_active_subscriptions_for_stream_id(
|
||||
stream.id, include_deactivated_users=False
|
||||
).values_list("user_profile", flat=True)
|
||||
|
||||
subscribers: Dict[str, UserProfile] = {}
|
||||
subscribers: dict[str, UserProfile] = {}
|
||||
for subscriber_id in subscribers_query:
|
||||
subscriber = UserProfile.objects.get(realm=realm, id=subscriber_id)
|
||||
subscribers[subscriber.full_name] = subscriber
|
||||
|
||||
subscribers["Notification Bot"] = NOTIFICATION_BOT
|
||||
|
||||
messages: List[Optional[SendMessageRequest]] = []
|
||||
messages: list[Optional[SendMessageRequest]] = []
|
||||
|
||||
for message in staged_messages:
|
||||
date_sent = message["date"]
|
||||
|
@ -186,7 +186,7 @@ def send_stream_messages(
|
|||
return message_ids
|
||||
|
||||
|
||||
def add_message_reactions(message_id: int, emoji: str, users: List[UserProfile]) -> None:
|
||||
def add_message_reactions(message_id: int, emoji: str, users: list[UserProfile]) -> None:
|
||||
preview_message = access_message(user_profile=DEFAULT_USER, message_id=message_id)
|
||||
emoji_data = get_emoji_data(realm.id, emoji)
|
||||
for user in users:
|
||||
|
@ -195,7 +195,7 @@ def add_message_reactions(message_id: int, emoji: str, users: List[UserProfile])
|
|||
)
|
||||
|
||||
|
||||
def create_user_group(group_name: str, members: List[str]) -> None:
|
||||
def create_user_group(group_name: str, members: list[str]) -> None:
|
||||
member_profiles = [
|
||||
UserProfile.objects.get(realm=realm, full_name=member_name) for member_name in members
|
||||
]
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import Any, Dict, Iterator, List, Optional, Sequence
|
||||
from typing import Any, Iterator, Optional, Sequence
|
||||
|
||||
import orjson
|
||||
|
||||
|
@ -127,13 +127,13 @@ def percent(f: float) -> str:
|
|||
return f"{f * 100:0.3f}%"
|
||||
|
||||
|
||||
def get_square_size(emoji_data: Sequence[Dict[str, Any]]) -> int:
|
||||
def get_square_size(emoji_data: Sequence[dict[str, Any]]) -> int:
|
||||
"""
|
||||
Spritesheets are usually NxN squares, and we have to
|
||||
infer N from the sheet_x/sheet_y values of emojis.
|
||||
"""
|
||||
|
||||
def get_offsets(emoji_data: Sequence[Dict[str, Any]]) -> Iterator[int]:
|
||||
def get_offsets(emoji_data: Sequence[dict[str, Any]]) -> Iterator[int]:
|
||||
for emoji_dict in emoji_data:
|
||||
yield emoji_dict["sheet_x"]
|
||||
yield emoji_dict["sheet_y"]
|
||||
|
@ -148,10 +148,10 @@ def get_square_size(emoji_data: Sequence[Dict[str, Any]]) -> int:
|
|||
|
||||
def generate_sprite_css_files(
|
||||
cache_path: str,
|
||||
emoji_data: List[Dict[str, Any]],
|
||||
emoji_data: list[dict[str, Any]],
|
||||
emojiset: str,
|
||||
alt_name: str,
|
||||
fallback_emoji_data: Sequence[Dict[str, Any]],
|
||||
fallback_emoji_data: Sequence[dict[str, Any]],
|
||||
) -> None:
|
||||
"""
|
||||
Spritesheets are usually NxN squares.
|
||||
|
@ -272,9 +272,9 @@ def generate_sprite_css_files(
|
|||
f.write(extra_emoji_positions)
|
||||
|
||||
|
||||
def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None:
|
||||
def setup_emoji_farms(cache_path: str, emoji_data: list[dict[str, Any]]) -> None:
|
||||
def ensure_emoji_image(
|
||||
emoji_dict: Dict[str, Any], src_emoji_farm: str, target_emoji_farm: str
|
||||
emoji_dict: dict[str, Any], src_emoji_farm: str, target_emoji_farm: str
|
||||
) -> None:
|
||||
# We use individual images from emoji farm for rendering emojis
|
||||
# in notification messages. We have a custom emoji formatter in
|
||||
|
@ -289,9 +289,9 @@ def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None
|
|||
|
||||
def setup_emoji_farm(
|
||||
emojiset: str,
|
||||
emoji_data: List[Dict[str, Any]],
|
||||
emoji_data: list[dict[str, Any]],
|
||||
alt_name: Optional[str] = None,
|
||||
fallback_emoji_data: Sequence[Dict[str, Any]] = [],
|
||||
fallback_emoji_data: Sequence[dict[str, Any]] = [],
|
||||
) -> None:
|
||||
# `alt_name` is an optional parameter that we use to avoid duplicating below
|
||||
# code. It is only used while setting up google-blob emoji set as it is just
|
||||
|
@ -342,7 +342,7 @@ def setup_emoji_farms(cache_path: str, emoji_data: List[Dict[str, Any]]) -> None
|
|||
|
||||
|
||||
def setup_old_emoji_farm(
|
||||
cache_path: str, emoji_map: Dict[str, str], emoji_data: List[Dict[str, Any]]
|
||||
cache_path: str, emoji_map: dict[str, str], emoji_data: list[dict[str, Any]]
|
||||
) -> None:
|
||||
# Code for setting up old emoji farm.
|
||||
os.chdir(cache_path)
|
||||
|
@ -374,7 +374,7 @@ def setup_old_emoji_farm(
|
|||
pass
|
||||
|
||||
|
||||
def generate_map_files(cache_path: str, emoji_catalog: Dict[str, List[str]]) -> None:
|
||||
def generate_map_files(cache_path: str, emoji_catalog: dict[str, list[str]]) -> None:
|
||||
# This function generates the main data files about emoji that are
|
||||
# consumed by the web app, mobile apps, Markdown processor, etc.
|
||||
names = emoji_names_for_picker(EMOJI_NAME_MAPS)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
CUSTOM_EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
|
||||
CUSTOM_EMOJI_NAME_MAPS: dict[str, dict[str, Any]] = {
|
||||
# seems like best emoji for happy
|
||||
"1f600": {"canonical_name": "grinning", "aliases": ["happy"]},
|
||||
"1f603": {"canonical_name": "smiley", "aliases": []},
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
# Generated with `generate_emoji_names`.
|
||||
|
||||
EMOJI_NAME_MAPS: Dict[str, Dict[str, Any]] = {
|
||||
EMOJI_NAME_MAPS: dict[str, dict[str, Any]] = {
|
||||
"0023-20e3": {"canonical_name": "hash", "aliases": []},
|
||||
"002a-20e3": {"canonical_name": "asterisk", "aliases": []},
|
||||
"0030-20e3": {"canonical_name": "zero", "aliases": []},
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# This file contains various helper functions used by `build_emoji` tool.
|
||||
# See docs/subsystems/emoji.md for details on how this system works.
|
||||
from collections import defaultdict
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from zerver.lib.emoji_utils import emoji_to_hex_codepoint, hex_codepoint_to_emoji, unqualify_emoji
|
||||
|
||||
|
@ -50,8 +50,8 @@ EMOTICON_CONVERSIONS = {
|
|||
}
|
||||
|
||||
|
||||
def emoji_names_for_picker(emoji_name_maps: Dict[str, Dict[str, Any]]) -> List[str]:
|
||||
emoji_names: List[str] = []
|
||||
def emoji_names_for_picker(emoji_name_maps: dict[str, dict[str, Any]]) -> list[str]:
|
||||
emoji_names: list[str] = []
|
||||
for name_info in emoji_name_maps.values():
|
||||
emoji_names.append(name_info["canonical_name"])
|
||||
emoji_names.extend(name_info["aliases"])
|
||||
|
@ -59,7 +59,7 @@ def emoji_names_for_picker(emoji_name_maps: Dict[str, Dict[str, Any]]) -> List[s
|
|||
return sorted(emoji_names)
|
||||
|
||||
|
||||
def get_emoji_code(emoji_dict: Dict[str, Any]) -> str:
|
||||
def get_emoji_code(emoji_dict: dict[str, Any]) -> str:
|
||||
# There is a `non_qualified` field on `emoji_dict` but it's
|
||||
# inconsistently present, so we'll always use the unqualified
|
||||
# emoji by unqualifying it ourselves. This gives us more consistent
|
||||
|
@ -72,10 +72,10 @@ def get_emoji_code(emoji_dict: Dict[str, Any]) -> str:
|
|||
# codepoints are sorted according to the `sort_order` as defined in
|
||||
# `emoji_data`.
|
||||
def generate_emoji_catalog(
|
||||
emoji_data: List[Dict[str, Any]], emoji_name_maps: Dict[str, Dict[str, Any]]
|
||||
) -> Dict[str, List[str]]:
|
||||
sort_order: Dict[str, int] = {}
|
||||
emoji_catalog: Dict[str, List[str]] = defaultdict(list)
|
||||
emoji_data: list[dict[str, Any]], emoji_name_maps: dict[str, dict[str, Any]]
|
||||
) -> dict[str, list[str]]:
|
||||
sort_order: dict[str, int] = {}
|
||||
emoji_catalog: dict[str, list[str]] = defaultdict(list)
|
||||
|
||||
for emoji_dict in emoji_data:
|
||||
emoji_code = get_emoji_code(emoji_dict)
|
||||
|
@ -93,8 +93,8 @@ def generate_emoji_catalog(
|
|||
return dict(emoji_catalog)
|
||||
|
||||
|
||||
def generate_codepoint_to_name_map(emoji_name_maps: Dict[str, Dict[str, Any]]) -> Dict[str, str]:
|
||||
codepoint_to_name: Dict[str, str] = {}
|
||||
def generate_codepoint_to_name_map(emoji_name_maps: dict[str, dict[str, Any]]) -> dict[str, str]:
|
||||
codepoint_to_name: dict[str, str] = {}
|
||||
for emoji_code, name_info in emoji_name_maps.items():
|
||||
codepoint_to_name[emoji_code] = name_info["canonical_name"]
|
||||
return codepoint_to_name
|
||||
|
@ -102,13 +102,13 @@ def generate_codepoint_to_name_map(emoji_name_maps: Dict[str, Dict[str, Any]]) -
|
|||
|
||||
# We support Google Modern, and fall back to Google Modern when emoji
|
||||
# aren't supported by the other styles we use.
|
||||
def emoji_is_supported(emoji_dict: Dict[str, Any]) -> bool:
|
||||
def emoji_is_supported(emoji_dict: dict[str, Any]) -> bool:
|
||||
return emoji_dict["has_img_google"]
|
||||
|
||||
|
||||
def generate_codepoint_to_names_map(
|
||||
emoji_name_maps: Dict[str, Dict[str, Any]],
|
||||
) -> Dict[str, List[str]]:
|
||||
emoji_name_maps: dict[str, dict[str, Any]],
|
||||
) -> dict[str, list[str]]:
|
||||
# The first element of the names list is always the canonical name.
|
||||
return {
|
||||
emoji_code: [name_info["canonical_name"], *name_info["aliases"]]
|
||||
|
@ -116,7 +116,7 @@ def generate_codepoint_to_names_map(
|
|||
}
|
||||
|
||||
|
||||
def generate_name_to_codepoint_map(emoji_name_maps: Dict[str, Dict[str, Any]]) -> Dict[str, str]:
|
||||
def generate_name_to_codepoint_map(emoji_name_maps: dict[str, dict[str, Any]]) -> dict[str, str]:
|
||||
name_to_codepoint = {}
|
||||
for emoji_code, name_info in emoji_name_maps.items():
|
||||
canonical_name = name_info["canonical_name"]
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
# codepoint.
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, List
|
||||
from typing import Any
|
||||
|
||||
import orjson
|
||||
|
||||
|
@ -124,15 +124,15 @@ SORTED_CATEGORIES = [
|
|||
"Skin Tones",
|
||||
]
|
||||
|
||||
emoji_code_to_zulip_names: Dict[str, str] = {}
|
||||
emoji_code_to_iamcal_names: Dict[str, str] = {}
|
||||
emoji_code_to_gemoji_names: Dict[str, str] = {}
|
||||
emoji_collection: Dict[str, List[Dict[str, Any]]] = {category: [] for category in SORTED_CATEGORIES}
|
||||
emoji_code_to_zulip_names: dict[str, str] = {}
|
||||
emoji_code_to_iamcal_names: dict[str, str] = {}
|
||||
emoji_code_to_gemoji_names: dict[str, str] = {}
|
||||
emoji_collection: dict[str, list[dict[str, Any]]] = {category: [] for category in SORTED_CATEGORIES}
|
||||
|
||||
|
||||
def generate_emoji_code_to_emoji_names_maps() -> None:
|
||||
# Prepare gemoji names map.
|
||||
reverse_unified_reactions_map: Dict[str, List[str]] = {}
|
||||
reverse_unified_reactions_map: dict[str, list[str]] = {}
|
||||
for name in UNIFIED_REACTIONS_MAP:
|
||||
emoji_code = UNIFIED_REACTIONS_MAP[name]
|
||||
if emoji_code in reverse_unified_reactions_map:
|
||||
|
|
|
@ -3,7 +3,6 @@ import glob
|
|||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import List
|
||||
|
||||
ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
if ZULIP_PATH not in sys.path:
|
||||
|
@ -25,7 +24,7 @@ def generate_zulip_bots_static_files() -> None:
|
|||
|
||||
package_bots_dir = get_bots_directory_path()
|
||||
|
||||
def copy_bots_data(bot_names: List[str]) -> None:
|
||||
def copy_bots_data(bot_names: list[str]) -> None:
|
||||
for name in bot_names:
|
||||
src_dir = os.path.join(package_bots_dir, name)
|
||||
dst_dir = os.path.join(bots_dir, name)
|
||||
|
|
|
@ -13,7 +13,7 @@ os.environ["DJANGO_SETTINGS_MODULE"] = "zproject.settings"
|
|||
import argparse
|
||||
import secrets
|
||||
from contextlib import contextmanager
|
||||
from typing import Iterator, List, Tuple, TypedDict
|
||||
from typing import Iterator, TypedDict
|
||||
|
||||
import boto3.session
|
||||
import orjson
|
||||
|
@ -86,7 +86,7 @@ def get_ses_arn(session: boto3.session.Session, args: argparse.Namespace) -> str
|
|||
|
||||
|
||||
@contextmanager
|
||||
def our_sqs_queue(session: boto3.session.Session, ses_topic_arn: str) -> Iterator[Tuple[str, str]]:
|
||||
def our_sqs_queue(session: boto3.session.Session, ses_topic_arn: str) -> Iterator[tuple[str, str]]:
|
||||
(_, _, _, region, account_id, topic_name) = ses_topic_arn.split(":")
|
||||
|
||||
sqs: SQSClient = session.client("sqs")
|
||||
|
@ -153,7 +153,7 @@ def print_messages(session: boto3.session.Session, queue_url: str) -> None:
|
|||
MessageAttributeNames=["All"],
|
||||
)
|
||||
messages = resp.get("Messages", [])
|
||||
delete_list: List[DeleteMessageBatchRequestEntryTypeDef] = []
|
||||
delete_list: list[DeleteMessageBatchRequestEntryTypeDef] = []
|
||||
for m in messages:
|
||||
body = orjson.loads(m["Body"])
|
||||
body_message = orjson.loads(body["Message"])
|
||||
|
|
|
@ -8,7 +8,7 @@ import shlex
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import TYPE_CHECKING, Iterator, List, Type, cast
|
||||
from typing import TYPE_CHECKING, Iterator, cast
|
||||
from unittest import mock
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -154,7 +154,7 @@ enforce_fully_covered = sorted(
|
|||
FAILED_TEST_PATH = "var/last_test_failure.json"
|
||||
|
||||
|
||||
def get_failed_tests() -> List[str]:
|
||||
def get_failed_tests() -> list[str]:
|
||||
try:
|
||||
with open(FAILED_TEST_PATH, "rb") as f:
|
||||
return orjson.loads(f.read())
|
||||
|
@ -418,7 +418,7 @@ def main() -> None:
|
|||
|
||||
# isinstance check cannot be used with types. This can potentially improved by supporting
|
||||
# dynamic resolution of the test runner type with the django-stubs mypy plugin.
|
||||
TestRunner = cast("Type[Runner]", get_runner(settings))
|
||||
TestRunner = cast("type[Runner]", get_runner(settings))
|
||||
|
||||
if options.processes:
|
||||
parallel = options.processes
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
import pwd
|
||||
import subprocess
|
||||
import sys
|
||||
from typing import Any, Dict, List, Set
|
||||
from typing import Any
|
||||
|
||||
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
sys.path.insert(0, os.path.dirname(TOOLS_DIR))
|
||||
|
@ -34,7 +34,7 @@ USAGE = """
|
|||
"""
|
||||
|
||||
|
||||
def make_set(files: List[str]) -> Set[str]:
|
||||
def make_set(files: list[str]) -> set[str]:
|
||||
for i in range(1, len(files)):
|
||||
if files[i - 1] > files[i]:
|
||||
raise Exception(f"Please move {files[i]} so that names are sorted.")
|
||||
|
@ -376,7 +376,7 @@ def clean_file(orig_fn: str) -> str:
|
|||
return fn
|
||||
|
||||
|
||||
def clean_files(fns: List[str]) -> List[str]:
|
||||
def clean_files(fns: list[str]) -> list[str]:
|
||||
cleaned_files = [clean_file(fn) for fn in fns]
|
||||
return cleaned_files
|
||||
|
||||
|
@ -442,7 +442,7 @@ def run_tests_via_node_js() -> int:
|
|||
|
||||
|
||||
def check_line_coverage(
|
||||
fn: str, line_coverage: Dict[Any, Any], line_mapping: Dict[Any, Any], log: bool = True
|
||||
fn: str, line_coverage: dict[Any, Any], line_mapping: dict[Any, Any], log: bool = True
|
||||
) -> bool:
|
||||
missing_lines = []
|
||||
for line in line_coverage:
|
||||
|
|
|
@ -39,7 +39,7 @@ from tools.lib import sanity_check
|
|||
|
||||
sanity_check.check_venv(__file__)
|
||||
|
||||
from typing import Iterable, Tuple
|
||||
from typing import Iterable
|
||||
|
||||
from tools.lib.test_script import (
|
||||
add_provision_check_override_param,
|
||||
|
@ -94,7 +94,7 @@ def run_tests(files: Iterable[str], external_host: str, loop: int = 1) -> None:
|
|||
test_files = find_js_test_files(test_dir, files)
|
||||
total_tests = len(test_files)
|
||||
|
||||
def run_tests(test_number: int = 0) -> Tuple[int, int]:
|
||||
def run_tests(test_number: int = 0) -> tuple[int, int]:
|
||||
current_test_num = test_number
|
||||
for test_file in test_files[test_number:]:
|
||||
return_code = run_single_test(test_file, current_test_num + 1, total_tests)
|
||||
|
|
|
@ -8,7 +8,6 @@ import shutil
|
|||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import List
|
||||
|
||||
import orjson
|
||||
|
||||
|
@ -74,13 +73,13 @@ def maybe_set_up_cache() -> None:
|
|||
fp.write(orjson.dumps([]))
|
||||
|
||||
|
||||
def load_cache() -> List[str]:
|
||||
def load_cache() -> list[str]:
|
||||
with open(CACHE_FILE, "rb") as fp:
|
||||
hash_list = orjson.loads(fp.read())
|
||||
return hash_list
|
||||
|
||||
|
||||
def update_cache(hash_list: List[str]) -> None:
|
||||
def update_cache(hash_list: list[str]) -> None:
|
||||
# We store last 100 hash entries. Aggressive caching is
|
||||
# not a problem as it is cheap to do.
|
||||
if len(hash_list) > 100:
|
||||
|
|
|
@ -5,7 +5,6 @@ import subprocess
|
|||
import sys
|
||||
import tempfile
|
||||
import time
|
||||
from typing import Tuple
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
|
||||
|
@ -16,7 +15,7 @@ sanity_check.check_venv(__file__)
|
|||
TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def start_server(logfile_name: str) -> Tuple[bool, str]:
|
||||
def start_server(logfile_name: str) -> tuple[bool, str]:
|
||||
failure = True
|
||||
key = "Quit the server with CONTROL-C."
|
||||
datalog = []
|
||||
|
|
|
@ -5,7 +5,6 @@ import subprocess
|
|||
import sys
|
||||
from collections import defaultdict
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
bot_commits = 0
|
||||
|
||||
|
@ -13,7 +12,7 @@ ZULIP_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|||
os.chdir(ZULIP_PATH)
|
||||
|
||||
|
||||
def add_log(committer_dict: Dict[str, int], input: List[str]) -> None:
|
||||
def add_log(committer_dict: dict[str, int], input: list[str]) -> None:
|
||||
for dataset in input:
|
||||
committer_name = dataset.split("\t")[1]
|
||||
commit_count = int(dataset.split("\t")[0])
|
||||
|
@ -27,7 +26,7 @@ def add_log(committer_dict: Dict[str, int], input: List[str]) -> None:
|
|||
committer_dict[committer_name] += commit_count
|
||||
|
||||
|
||||
def retrieve_log(repo: str, revisions: str) -> List[str]:
|
||||
def retrieve_log(repo: str, revisions: str) -> list[str]:
|
||||
return subprocess.check_output(
|
||||
["git", "shortlog", "-s", revisions],
|
||||
cwd=find_path(repo),
|
||||
|
@ -41,7 +40,7 @@ def find_path(repository: str) -> str:
|
|||
|
||||
def process_repo(
|
||||
*,
|
||||
out_dict: Dict[str, int],
|
||||
out_dict: dict[str, int],
|
||||
repo_short: str,
|
||||
repo_full: str,
|
||||
lower_version: str,
|
||||
|
@ -164,8 +163,8 @@ print(
|
|||
f"Commit range {lower_zulip_version}..{upper_zulip_version} corresponds to {lower_time} to {upper_time}"
|
||||
)
|
||||
|
||||
repository_dict: Dict[str, int] = defaultdict(int)
|
||||
out_dict: Dict[str, int] = defaultdict(int)
|
||||
repository_dict: dict[str, int] = defaultdict(int)
|
||||
out_dict: dict[str, int] = defaultdict(int)
|
||||
subprocess.check_call(["git", "fetch"], cwd=find_path("zulip"))
|
||||
process_repo(
|
||||
out_dict=out_dict,
|
||||
|
|
|
@ -5,7 +5,7 @@ import os
|
|||
import re
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import IO, Dict, List, cast
|
||||
from typing import IO, cast
|
||||
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
|
||||
from scripts.lib.setup_path import setup_path
|
||||
|
@ -41,7 +41,7 @@ session = boto3.session.Session()
|
|||
client: S3Client = session.client("s3")
|
||||
bucket = session.resource("s3", region_name="us-east-1").Bucket("zulip-download")
|
||||
|
||||
file_hashes: Dict[str, str] = {}
|
||||
file_hashes: dict[str, str] = {}
|
||||
with open(args.filename, "rb") as new_file:
|
||||
print(f"Hashing {new_basename}..")
|
||||
file_hashes[new_basename] = sha256_contents(new_file)
|
||||
|
@ -75,7 +75,7 @@ for obj_summary in bucket.objects.filter(Prefix="server/zulip-server-"):
|
|||
|
||||
file_hashes[filename] = metadata["sha256sum"]
|
||||
|
||||
ordered_filenames = cast(List[str], natsorted(file_hashes.keys(), reverse=True))
|
||||
ordered_filenames = cast(list[str], natsorted(file_hashes.keys(), reverse=True))
|
||||
assert ordered_filenames[0] == "zulip-server-latest.tar.gz"
|
||||
|
||||
print(f"Uploading {new_basename}..")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
@ -136,7 +136,7 @@ def set_realm_permissions_based_on_org_type(realm: Realm) -> None:
|
|||
|
||||
@transaction.atomic(savepoint=False)
|
||||
def set_default_for_realm_permission_group_settings(
|
||||
realm: Realm, group_settings_defaults_for_org_types: Optional[Dict[str, Dict[int, str]]] = None
|
||||
realm: Realm, group_settings_defaults_for_org_types: Optional[dict[str, dict[int, str]]] = None
|
||||
) -> None:
|
||||
system_groups_dict = get_role_based_system_groups_dict(realm)
|
||||
|
||||
|
@ -206,7 +206,7 @@ def do_create_realm(
|
|||
logging.info("Server not yet initialized. Creating the internal realm first.")
|
||||
create_internal_realm()
|
||||
|
||||
kwargs: Dict[str, Any] = {}
|
||||
kwargs: dict[str, Any] = {}
|
||||
if emails_restricted_to_domains is not None:
|
||||
kwargs["emails_restricted_to_domains"] = emails_restricted_to_domains
|
||||
if description is not None:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
from typing import Any, Dict, Iterable, List, Optional, Sequence, Set
|
||||
from typing import Any, Iterable, Optional, Sequence
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
@ -133,7 +133,7 @@ def set_up_streams_for_new_human_user(
|
|||
realm = user_profile.realm
|
||||
|
||||
if prereg_user is not None:
|
||||
streams: List[Stream] = list(prereg_user.streams.all())
|
||||
streams: list[Stream] = list(prereg_user.streams.all())
|
||||
acting_user: Optional[UserProfile] = prereg_user.referred_by
|
||||
|
||||
# A PregistrationUser should not be used for another UserProfile
|
||||
|
@ -351,10 +351,10 @@ def process_new_human_user(
|
|||
OnboardingStep.objects.create(user=user_profile, onboarding_step="visibility_policy_banner")
|
||||
|
||||
|
||||
def notify_created_user(user_profile: UserProfile, notify_user_ids: List[int]) -> None:
|
||||
def notify_created_user(user_profile: UserProfile, notify_user_ids: list[int]) -> None:
|
||||
user_row = user_profile_to_user_row(user_profile)
|
||||
|
||||
format_user_row_kwargs: Dict[str, Any] = {
|
||||
format_user_row_kwargs: dict[str, Any] = {
|
||||
"realm_id": user_profile.realm_id,
|
||||
"row": user_row,
|
||||
# Since we don't know what the client
|
||||
|
@ -370,8 +370,8 @@ def notify_created_user(user_profile: UserProfile, notify_user_ids: List[int]) -
|
|||
"custom_profile_field_data": {},
|
||||
}
|
||||
|
||||
user_ids_without_access_to_created_user: List[int] = []
|
||||
users_with_access_to_created_users: List[UserProfile] = []
|
||||
user_ids_without_access_to_created_user: list[int] = []
|
||||
users_with_access_to_created_users: list[UserProfile] = []
|
||||
|
||||
if notify_user_ids:
|
||||
# This is currently used to send creation event when a guest
|
||||
|
@ -427,7 +427,7 @@ def notify_created_user(user_profile: UserProfile, notify_user_ids: List[int]) -
|
|||
|
||||
if user_ids_with_real_email_access:
|
||||
assert person_for_real_email_access_users is not None
|
||||
event: Dict[str, Any] = dict(
|
||||
event: dict[str, Any] = dict(
|
||||
type="realm_user", op="add", person=person_for_real_email_access_users
|
||||
)
|
||||
send_event_on_commit(user_profile.realm, event, user_ids_with_real_email_access)
|
||||
|
@ -447,7 +447,7 @@ def notify_created_user(user_profile: UserProfile, notify_user_ids: List[int]) -
|
|||
send_event_on_commit(user_profile.realm, event, user_ids_without_access_to_created_user)
|
||||
|
||||
|
||||
def created_bot_event(user_profile: UserProfile) -> Dict[str, Any]:
|
||||
def created_bot_event(user_profile: UserProfile) -> dict[str, Any]:
|
||||
def stream_name(stream: Optional[Stream]) -> Optional[str]:
|
||||
if not stream:
|
||||
return None
|
||||
|
@ -749,7 +749,7 @@ def do_reactivate_user(user_profile: UserProfile, *, acting_user: Optional[UserP
|
|||
streams=subscribed_streams,
|
||||
)
|
||||
|
||||
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
|
||||
altered_user_dict: dict[int, set[int]] = defaultdict(set)
|
||||
for stream in subscribed_streams:
|
||||
altered_user_dict[stream.id] = {user_profile.id}
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, Iterable, List, Optional, Union
|
||||
from typing import Iterable, Optional, Union
|
||||
|
||||
import orjson
|
||||
from django.db import transaction
|
||||
|
@ -146,7 +146,7 @@ def try_reorder_realm_custom_profile_fields(realm: Realm, order: Iterable[int])
|
|||
|
||||
|
||||
def notify_user_update_custom_profile_data(
|
||||
user_profile: UserProfile, field: Dict[str, Union[int, str, List[int], None]]
|
||||
user_profile: UserProfile, field: dict[str, Union[int, str, list[int], None]]
|
||||
) -> None:
|
||||
data = dict(id=field["id"], value=field["value"])
|
||||
|
||||
|
@ -159,7 +159,7 @@ def notify_user_update_custom_profile_data(
|
|||
|
||||
def do_update_user_custom_profile_data_if_changed(
|
||||
user_profile: UserProfile,
|
||||
data: List[ProfileDataElementUpdateDict],
|
||||
data: list[ProfileDataElementUpdateDict],
|
||||
) -> None:
|
||||
with transaction.atomic():
|
||||
for custom_profile_field in data:
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict, Iterable, List
|
||||
from typing import Any, Iterable
|
||||
|
||||
from django.db import transaction
|
||||
from django.utils.translation import gettext as _
|
||||
|
@ -37,8 +37,8 @@ def check_default_stream_group_name(group_name: str) -> None:
|
|||
|
||||
|
||||
def lookup_default_stream_groups(
|
||||
default_stream_group_names: List[str], realm: Realm
|
||||
) -> List[DefaultStreamGroup]:
|
||||
default_stream_group_names: list[str], realm: Realm
|
||||
) -> list[DefaultStreamGroup]:
|
||||
default_stream_groups = []
|
||||
for group_name in default_stream_group_names:
|
||||
try:
|
||||
|
@ -86,7 +86,7 @@ def do_remove_default_stream(stream: Stream) -> None:
|
|||
|
||||
|
||||
def do_create_default_stream_group(
|
||||
realm: Realm, group_name: str, description: str, streams: List[Stream]
|
||||
realm: Realm, group_name: str, description: str, streams: list[Stream]
|
||||
) -> None:
|
||||
default_stream_ids = get_default_stream_ids_for_realm(realm.id)
|
||||
for stream in streams:
|
||||
|
@ -113,7 +113,7 @@ def do_create_default_stream_group(
|
|||
|
||||
|
||||
def do_add_streams_to_default_stream_group(
|
||||
realm: Realm, group: DefaultStreamGroup, streams: List[Stream]
|
||||
realm: Realm, group: DefaultStreamGroup, streams: list[Stream]
|
||||
) -> None:
|
||||
default_stream_ids = get_default_stream_ids_for_realm(realm.id)
|
||||
for stream in streams:
|
||||
|
@ -136,7 +136,7 @@ def do_add_streams_to_default_stream_group(
|
|||
|
||||
|
||||
def do_remove_streams_from_default_stream_group(
|
||||
realm: Realm, group: DefaultStreamGroup, streams: List[Stream]
|
||||
realm: Realm, group: DefaultStreamGroup, streams: list[Stream]
|
||||
) -> None:
|
||||
group_stream_ids = {stream.id for stream in group.streams.all()}
|
||||
for stream in streams:
|
||||
|
@ -190,5 +190,5 @@ def do_remove_default_stream_group(realm: Realm, group: DefaultStreamGroup) -> N
|
|||
|
||||
def default_stream_groups_to_dicts_sorted(
|
||||
groups: Iterable[DefaultStreamGroup],
|
||||
) -> List[Dict[str, Any]]:
|
||||
) -> list[dict[str, Any]]:
|
||||
return sorted((group.to_dict() for group in groups), key=lambda elt: elt["name"])
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Collection, Dict, List, Optional, Sequence, Set, Tuple
|
||||
from typing import Any, Collection, Optional, Sequence
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
@ -179,7 +179,7 @@ def do_invite_users(
|
|||
invite_expires_in_minutes: Optional[int],
|
||||
include_realm_default_subscriptions: bool,
|
||||
invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
|
||||
) -> List[Tuple[str, str, bool]]:
|
||||
) -> list[tuple[str, str, bool]]:
|
||||
num_invites = len(invitee_emails)
|
||||
|
||||
# Lock the realm, since we need to not race with other invitations
|
||||
|
@ -211,8 +211,8 @@ def do_invite_users(
|
|||
sent_invitations=False,
|
||||
)
|
||||
|
||||
good_emails: Set[str] = set()
|
||||
errors: List[Tuple[str, str, bool]] = []
|
||||
good_emails: set[str] = set()
|
||||
errors: list[tuple[str, str, bool]] = []
|
||||
validate_email_allowed_in_realm = get_realm_email_validator(realm)
|
||||
for email in invitee_emails:
|
||||
if email == "":
|
||||
|
@ -234,7 +234,7 @@ def do_invite_users(
|
|||
"""
|
||||
error_dict = get_existing_user_errors(realm, good_emails)
|
||||
|
||||
skipped: List[Tuple[str, str, bool]] = []
|
||||
skipped: list[tuple[str, str, bool]] = []
|
||||
for email in error_dict:
|
||||
msg, deactivated = error_dict[email]
|
||||
skipped.append((email, msg, deactivated))
|
||||
|
@ -292,7 +292,7 @@ def get_invitation_expiry_date(confirmation_obj: Confirmation) -> Optional[int]:
|
|||
return datetime_to_timestamp(expiry_date)
|
||||
|
||||
|
||||
def do_get_invites_controlled_by_user(user_profile: UserProfile) -> List[Dict[str, Any]]:
|
||||
def do_get_invites_controlled_by_user(user_profile: UserProfile) -> list[dict[str, Any]]:
|
||||
"""
|
||||
Returns a list of dicts representing invitations that can be controlled by user_profile.
|
||||
This isn't necessarily the same as all the invitations generated by the user, as administrators
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Iterable, List, TypedDict
|
||||
from typing import Iterable, TypedDict
|
||||
|
||||
from zerver.lib import retention
|
||||
from zerver.lib.retention import move_messages_to_archive
|
||||
|
@ -9,14 +9,14 @@ from zerver.tornado.django_api import send_event_on_commit
|
|||
|
||||
class DeleteMessagesEvent(TypedDict, total=False):
|
||||
type: str
|
||||
message_ids: List[int]
|
||||
message_ids: list[int]
|
||||
message_type: str
|
||||
topic: str
|
||||
stream_id: int
|
||||
|
||||
|
||||
def check_update_first_message_id(
|
||||
realm: Realm, stream: Stream, message_ids: List[int], users_to_notify: Iterable[int]
|
||||
realm: Realm, stream: Stream, message_ids: list[int], users_to_notify: Iterable[int]
|
||||
) -> None:
|
||||
# This will not update the `first_message_id` of streams where the
|
||||
# first message was deleted prior to the implementation of this function.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import itertools
|
||||
from collections import defaultdict
|
||||
from datetime import timedelta
|
||||
from typing import AbstractSet, Any, Dict, Iterable, List, Optional, Set, Tuple
|
||||
from typing import AbstractSet, Any, Iterable, Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
@ -84,7 +84,7 @@ from zerver.models.users import get_system_bot
|
|||
from zerver.tornado.django_api import send_event_on_commit
|
||||
|
||||
|
||||
def subscriber_info(user_id: int) -> Dict[str, Any]:
|
||||
def subscriber_info(user_id: int) -> dict[str, Any]:
|
||||
return {"id": user_id, "flags": ["read"]}
|
||||
|
||||
|
||||
|
@ -148,7 +148,7 @@ def maybe_send_resolve_topic_notifications(
|
|||
new_topic_name: str,
|
||||
changed_messages: QuerySet[Message],
|
||||
pre_truncation_new_topic_name: str,
|
||||
) -> Tuple[Optional[int], bool]:
|
||||
) -> tuple[Optional[int], bool]:
|
||||
"""Returns resolved_topic_message_id if resolve topic notifications were in fact sent."""
|
||||
# Note that topics will have already been stripped in check_update_message.
|
||||
resolved_prefix_len = len(RESOLVED_TOPIC_PREFIX)
|
||||
|
@ -299,7 +299,7 @@ def send_message_moved_breadcrumbs(
|
|||
)
|
||||
|
||||
|
||||
def get_mentions_for_message_updates(message_id: int) -> Set[int]:
|
||||
def get_mentions_for_message_updates(message_id: int) -> set[int]:
|
||||
# We exclude UserMessage.flags.historical rows since those
|
||||
# users did not receive the message originally, and thus
|
||||
# probably are not relevant for reprocessed alert_words,
|
||||
|
@ -330,7 +330,7 @@ def update_user_message_flags(
|
|||
) -> None:
|
||||
mentioned_ids = rendering_result.mentions_user_ids
|
||||
ids_with_alert_words = rendering_result.user_ids_with_alert_words
|
||||
changed_ums: Set[UserMessage] = set()
|
||||
changed_ums: set[UserMessage] = set()
|
||||
|
||||
def update_flag(um: UserMessage, should_set: bool, flag: int) -> None:
|
||||
if should_set:
|
||||
|
@ -366,7 +366,7 @@ def do_update_embedded_data(
|
|||
rendering_result: MessageRenderingResult,
|
||||
) -> None:
|
||||
timestamp = timezone_now()
|
||||
event: Dict[str, Any] = {
|
||||
event: dict[str, Any] = {
|
||||
"type": "update_message",
|
||||
"user_id": None,
|
||||
"edit_timestamp": datetime_to_timestamp(timestamp),
|
||||
|
@ -390,7 +390,7 @@ def do_update_embedded_data(
|
|||
|
||||
event["message_ids"] = update_message_cache(changed_messages)
|
||||
|
||||
def user_info(um: UserMessage) -> Dict[str, Any]:
|
||||
def user_info(um: UserMessage) -> dict[str, Any]:
|
||||
return {
|
||||
"id": um.user_profile_id,
|
||||
"flags": um.flags_list(),
|
||||
|
@ -433,7 +433,7 @@ def do_update_message(
|
|||
send_notification_to_new_thread: bool,
|
||||
content: Optional[str],
|
||||
rendering_result: Optional[MessageRenderingResult],
|
||||
prior_mention_user_ids: Set[int],
|
||||
prior_mention_user_ids: set[int],
|
||||
mention_data: Optional[MentionData] = None,
|
||||
) -> int:
|
||||
"""
|
||||
|
@ -452,7 +452,7 @@ def do_update_message(
|
|||
timestamp = timezone_now()
|
||||
target_message.last_edit_time = timestamp
|
||||
|
||||
event: Dict[str, Any] = {
|
||||
event: dict[str, Any] = {
|
||||
"type": "update_message",
|
||||
"user_id": user_profile.id,
|
||||
"edit_timestamp": datetime_to_timestamp(timestamp),
|
||||
|
@ -586,7 +586,7 @@ def do_update_message(
|
|||
event["propagate_mode"] = propagate_mode
|
||||
|
||||
users_losing_access = UserProfile.objects.none()
|
||||
user_ids_gaining_usermessages: List[int] = []
|
||||
user_ids_gaining_usermessages: list[int] = []
|
||||
if new_stream is not None:
|
||||
assert content is None
|
||||
assert target_message.is_stream_message()
|
||||
|
@ -798,7 +798,7 @@ def do_update_message(
|
|||
|
||||
event["message_ids"] = update_message_cache(changed_messages, realm_id)
|
||||
|
||||
def user_info(um: UserMessage) -> Dict[str, Any]:
|
||||
def user_info(um: UserMessage) -> dict[str, Any]:
|
||||
return {
|
||||
"id": um.user_profile_id,
|
||||
"flags": um.flags_list(),
|
||||
|
@ -908,9 +908,9 @@ def do_update_message(
|
|||
assert target_stream is not None
|
||||
assert target_topic_name is not None
|
||||
|
||||
stream_inaccessible_to_user_profiles: List[UserProfile] = []
|
||||
orig_topic_user_profile_to_visibility_policy: Dict[UserProfile, int] = {}
|
||||
target_topic_user_profile_to_visibility_policy: Dict[UserProfile, int] = {}
|
||||
stream_inaccessible_to_user_profiles: list[UserProfile] = []
|
||||
orig_topic_user_profile_to_visibility_policy: dict[UserProfile, int] = {}
|
||||
target_topic_user_profile_to_visibility_policy: dict[UserProfile, int] = {}
|
||||
user_ids_losing_access = {user.id for user in users_losing_access}
|
||||
for user_topic in get_users_with_user_topic_visibility_policy(
|
||||
stream_being_edited.id, orig_topic_name
|
||||
|
@ -930,14 +930,14 @@ def do_update_message(
|
|||
)
|
||||
|
||||
# User profiles having any of the visibility policies set for either the original or target topic.
|
||||
user_profiles_having_visibility_policy: Set[UserProfile] = set(
|
||||
user_profiles_having_visibility_policy: set[UserProfile] = set(
|
||||
itertools.chain(
|
||||
orig_topic_user_profile_to_visibility_policy.keys(),
|
||||
target_topic_user_profile_to_visibility_policy.keys(),
|
||||
)
|
||||
)
|
||||
|
||||
user_profiles_for_visibility_policy_pair: Dict[Tuple[int, int], List[UserProfile]] = (
|
||||
user_profiles_for_visibility_policy_pair: dict[tuple[int, int], list[UserProfile]] = (
|
||||
defaultdict(list)
|
||||
)
|
||||
for user_profile_with_policy in user_profiles_having_visibility_policy:
|
||||
|
@ -1197,7 +1197,7 @@ def check_time_limit_for_change_all_propagate_mode(
|
|||
message__recipient_id=message.recipient_id,
|
||||
message__subject__iexact=message.topic_name(),
|
||||
).values_list("message_id", flat=True)
|
||||
messages_allowed_to_move: List[int] = list(
|
||||
messages_allowed_to_move: list[int] = list(
|
||||
Message.objects.filter(
|
||||
# Uses index: zerver_message_pkey
|
||||
id__in=accessible_messages_in_topic,
|
||||
|
@ -1291,8 +1291,8 @@ def check_update_message(
|
|||
raise JsonableError(_("The time limit for editing this message's topic has passed."))
|
||||
|
||||
rendering_result = None
|
||||
links_for_embed: Set[str] = set()
|
||||
prior_mention_user_ids: Set[int] = set()
|
||||
links_for_embed: set[str] = set()
|
||||
prior_mention_user_ids: set[int] = set()
|
||||
mention_data: Optional[MentionData] = None
|
||||
if content is not None:
|
||||
if content.rstrip() == "":
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import time
|
||||
from collections import defaultdict
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from typing import List, Optional, Set
|
||||
from typing import Optional
|
||||
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
|
@ -26,7 +26,7 @@ from zerver.tornado.django_api import send_event
|
|||
|
||||
@dataclass
|
||||
class ReadMessagesEvent:
|
||||
messages: List[int]
|
||||
messages: list[int]
|
||||
all: bool
|
||||
type: str = field(default="update_message_flags", init=False)
|
||||
op: str = field(default="add", init=False)
|
||||
|
@ -203,9 +203,9 @@ def do_mark_muted_user_messages_as_read(
|
|||
|
||||
def do_update_mobile_push_notification(
|
||||
message: Message,
|
||||
prior_mention_user_ids: Set[int],
|
||||
mentions_user_ids: Set[int],
|
||||
stream_push_user_ids: Set[int],
|
||||
prior_mention_user_ids: set[int],
|
||||
mentions_user_ids: set[int],
|
||||
stream_push_user_ids: set[int],
|
||||
) -> None:
|
||||
# Called during the message edit code path to remove mobile push
|
||||
# notifications for users who are no longer mentioned following
|
||||
|
@ -223,7 +223,7 @@ def do_update_mobile_push_notification(
|
|||
|
||||
|
||||
def do_clear_mobile_push_notifications_for_ids(
|
||||
user_profile_ids: List[int], message_ids: List[int]
|
||||
user_profile_ids: list[int], message_ids: list[int]
|
||||
) -> None:
|
||||
if len(message_ids) == 0:
|
||||
return
|
||||
|
@ -261,7 +261,7 @@ def do_clear_mobile_push_notifications_for_ids(
|
|||
|
||||
|
||||
def do_update_message_flags(
|
||||
user_profile: UserProfile, operation: str, flag: str, messages: List[int]
|
||||
user_profile: UserProfile, operation: str, flag: str, messages: list[int]
|
||||
) -> int:
|
||||
valid_flags = [item for item in UserMessage.flags if item not in UserMessage.NON_API_FLAGS]
|
||||
if flag not in valid_flags:
|
||||
|
|
|
@ -3,20 +3,7 @@ from collections import defaultdict
|
|||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from email.headerregistry import Address
|
||||
from typing import (
|
||||
AbstractSet,
|
||||
Any,
|
||||
Callable,
|
||||
Collection,
|
||||
Dict,
|
||||
List,
|
||||
Optional,
|
||||
Sequence,
|
||||
Set,
|
||||
Tuple,
|
||||
TypedDict,
|
||||
Union,
|
||||
)
|
||||
from typing import AbstractSet, Any, Callable, Collection, Optional, Sequence, TypedDict, Union
|
||||
|
||||
import orjson
|
||||
from django.conf import settings
|
||||
|
@ -157,7 +144,7 @@ def render_incoming_message(
|
|||
content: str,
|
||||
realm: Realm,
|
||||
mention_data: Optional[MentionData] = None,
|
||||
url_embed_data: Optional[Dict[str, Optional[UrlEmbedData]]] = None,
|
||||
url_embed_data: Optional[dict[str, Optional[UrlEmbedData]]] = None,
|
||||
email_gateway: bool = False,
|
||||
) -> MessageRenderingResult:
|
||||
realm_alert_words_automaton = get_alert_word_automaton(realm)
|
||||
|
@ -178,25 +165,25 @@ def render_incoming_message(
|
|||
|
||||
@dataclass
|
||||
class RecipientInfoResult:
|
||||
active_user_ids: Set[int]
|
||||
online_push_user_ids: Set[int]
|
||||
dm_mention_email_disabled_user_ids: Set[int]
|
||||
dm_mention_push_disabled_user_ids: Set[int]
|
||||
stream_email_user_ids: Set[int]
|
||||
stream_push_user_ids: Set[int]
|
||||
topic_wildcard_mention_user_ids: Set[int]
|
||||
stream_wildcard_mention_user_ids: Set[int]
|
||||
followed_topic_email_user_ids: Set[int]
|
||||
followed_topic_push_user_ids: Set[int]
|
||||
topic_wildcard_mention_in_followed_topic_user_ids: Set[int]
|
||||
stream_wildcard_mention_in_followed_topic_user_ids: Set[int]
|
||||
muted_sender_user_ids: Set[int]
|
||||
um_eligible_user_ids: Set[int]
|
||||
long_term_idle_user_ids: Set[int]
|
||||
default_bot_user_ids: Set[int]
|
||||
service_bot_tuples: List[Tuple[int, int]]
|
||||
all_bot_user_ids: Set[int]
|
||||
topic_participant_user_ids: Set[int]
|
||||
active_user_ids: set[int]
|
||||
online_push_user_ids: set[int]
|
||||
dm_mention_email_disabled_user_ids: set[int]
|
||||
dm_mention_push_disabled_user_ids: set[int]
|
||||
stream_email_user_ids: set[int]
|
||||
stream_push_user_ids: set[int]
|
||||
topic_wildcard_mention_user_ids: set[int]
|
||||
stream_wildcard_mention_user_ids: set[int]
|
||||
followed_topic_email_user_ids: set[int]
|
||||
followed_topic_push_user_ids: set[int]
|
||||
topic_wildcard_mention_in_followed_topic_user_ids: set[int]
|
||||
stream_wildcard_mention_in_followed_topic_user_ids: set[int]
|
||||
muted_sender_user_ids: set[int]
|
||||
um_eligible_user_ids: set[int]
|
||||
long_term_idle_user_ids: set[int]
|
||||
default_bot_user_ids: set[int]
|
||||
service_bot_tuples: list[tuple[int, int]]
|
||||
all_bot_user_ids: set[int]
|
||||
topic_participant_user_ids: set[int]
|
||||
sender_muted_stream: Optional[bool]
|
||||
|
||||
|
||||
|
@ -226,16 +213,16 @@ def get_recipient_info(
|
|||
possible_topic_wildcard_mention: bool = True,
|
||||
possible_stream_wildcard_mention: bool = True,
|
||||
) -> RecipientInfoResult:
|
||||
stream_push_user_ids: Set[int] = set()
|
||||
stream_email_user_ids: Set[int] = set()
|
||||
topic_wildcard_mention_user_ids: Set[int] = set()
|
||||
stream_wildcard_mention_user_ids: Set[int] = set()
|
||||
followed_topic_push_user_ids: Set[int] = set()
|
||||
followed_topic_email_user_ids: Set[int] = set()
|
||||
topic_wildcard_mention_in_followed_topic_user_ids: Set[int] = set()
|
||||
stream_wildcard_mention_in_followed_topic_user_ids: Set[int] = set()
|
||||
muted_sender_user_ids: Set[int] = get_muting_users(sender_id)
|
||||
topic_participant_user_ids: Set[int] = set()
|
||||
stream_push_user_ids: set[int] = set()
|
||||
stream_email_user_ids: set[int] = set()
|
||||
topic_wildcard_mention_user_ids: set[int] = set()
|
||||
stream_wildcard_mention_user_ids: set[int] = set()
|
||||
followed_topic_push_user_ids: set[int] = set()
|
||||
followed_topic_email_user_ids: set[int] = set()
|
||||
topic_wildcard_mention_in_followed_topic_user_ids: set[int] = set()
|
||||
stream_wildcard_mention_in_followed_topic_user_ids: set[int] = set()
|
||||
muted_sender_user_ids: set[int] = get_muting_users(sender_id)
|
||||
topic_participant_user_ids: set[int] = set()
|
||||
sender_muted_stream: Optional[bool] = None
|
||||
|
||||
if recipient.type == Recipient.PERSONAL:
|
||||
|
@ -315,7 +302,7 @@ def get_recipient_info(
|
|||
|
||||
user_id_to_visibility_policy = stream_topic.user_id_to_visibility_policy_dict()
|
||||
|
||||
def notification_recipients(setting: str) -> Set[int]:
|
||||
def notification_recipients(setting: str) -> set[int]:
|
||||
return {
|
||||
row["user_profile_id"]
|
||||
for row in subscription_rows
|
||||
|
@ -332,7 +319,7 @@ def get_recipient_info(
|
|||
stream_push_user_ids = notification_recipients("push_notifications")
|
||||
stream_email_user_ids = notification_recipients("email_notifications")
|
||||
|
||||
def followed_topic_notification_recipients(setting: str) -> Set[int]:
|
||||
def followed_topic_notification_recipients(setting: str) -> set[int]:
|
||||
return {
|
||||
row["user_profile_id"]
|
||||
for row in subscription_rows
|
||||
|
@ -427,7 +414,7 @@ def get_recipient_info(
|
|||
# to-do.
|
||||
rows = []
|
||||
|
||||
def get_ids_for(f: Callable[[ActiveUserDict], bool]) -> Set[int]:
|
||||
def get_ids_for(f: Callable[[ActiveUserDict], bool]) -> set[int]:
|
||||
"""Only includes users on the explicit message to line"""
|
||||
return {row["id"] for row in rows if f(row)} & message_to_user_id_set
|
||||
|
||||
|
@ -506,12 +493,12 @@ def get_recipient_info(
|
|||
|
||||
def get_service_bot_events(
|
||||
sender: UserProfile,
|
||||
service_bot_tuples: List[Tuple[int, int]],
|
||||
mentioned_user_ids: Set[int],
|
||||
active_user_ids: Set[int],
|
||||
service_bot_tuples: list[tuple[int, int]],
|
||||
mentioned_user_ids: set[int],
|
||||
active_user_ids: set[int],
|
||||
recipient_type: int,
|
||||
) -> Dict[str, List[Dict[str, Any]]]:
|
||||
event_dict: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
|
||||
) -> dict[str, list[dict[str, Any]]]:
|
||||
event_dict: dict[str, list[dict[str, Any]]] = defaultdict(list)
|
||||
|
||||
# Avoid infinite loops by preventing messages sent by bots from generating
|
||||
# Service events.
|
||||
|
@ -576,12 +563,12 @@ def build_message_send_dict(
|
|||
stream: Optional[Stream] = None,
|
||||
local_id: Optional[str] = None,
|
||||
sender_queue_id: Optional[str] = None,
|
||||
widget_content_dict: Optional[Dict[str, Any]] = None,
|
||||
widget_content_dict: Optional[dict[str, Any]] = None,
|
||||
email_gateway: bool = False,
|
||||
mention_backend: Optional[MentionBackend] = None,
|
||||
limit_unread_user_ids: Optional[Set[int]] = None,
|
||||
limit_unread_user_ids: Optional[set[int]] = None,
|
||||
disable_external_notifications: bool = False,
|
||||
recipients_for_user_creation_events: Optional[Dict[UserProfile, Set[int]]] = None,
|
||||
recipients_for_user_creation_events: Optional[dict[UserProfile, set[int]]] = None,
|
||||
) -> SendMessageRequest:
|
||||
"""Returns a dictionary that can be passed into do_send_messages. In
|
||||
production, this is always called by check_message, but some
|
||||
|
@ -726,10 +713,10 @@ def create_user_messages(
|
|||
mentioned_user_ids: AbstractSet[int],
|
||||
followed_topic_push_user_ids: AbstractSet[int],
|
||||
followed_topic_email_user_ids: AbstractSet[int],
|
||||
mark_as_read_user_ids: Set[int],
|
||||
limit_unread_user_ids: Optional[Set[int]],
|
||||
topic_participant_user_ids: Set[int],
|
||||
) -> List[UserMessageLite]:
|
||||
mark_as_read_user_ids: set[int],
|
||||
limit_unread_user_ids: Optional[set[int]],
|
||||
topic_participant_user_ids: set[int],
|
||||
) -> list[UserMessageLite]:
|
||||
# These properties on the Message are set via
|
||||
# render_message_markdown by code in the Markdown inline patterns
|
||||
ids_with_alert_words = rendering_result.user_ids_with_alert_words
|
||||
|
@ -798,7 +785,7 @@ def create_user_messages(
|
|||
return user_messages
|
||||
|
||||
|
||||
def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
|
||||
def filter_presence_idle_user_ids(user_ids: set[int]) -> list[int]:
|
||||
# Given a set of user IDs (the recipients of a message), accesses
|
||||
# the UserPresence table to determine which of these users are
|
||||
# currently idle and should potentially get email notifications
|
||||
|
@ -821,8 +808,8 @@ def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
|
|||
def get_active_presence_idle_user_ids(
|
||||
realm: Realm,
|
||||
sender_id: int,
|
||||
user_notifications_data_list: List[UserMessageNotificationsData],
|
||||
) -> List[int]:
|
||||
user_notifications_data_list: list[UserMessageNotificationsData],
|
||||
) -> list[int]:
|
||||
"""
|
||||
Given a list of active_user_ids, we build up a subset
|
||||
of those users who fit these criteria:
|
||||
|
@ -851,7 +838,7 @@ def do_send_messages(
|
|||
send_message_requests_maybe_none: Sequence[Optional[SendMessageRequest]],
|
||||
*,
|
||||
mark_as_read: Sequence[int] = [],
|
||||
) -> List[SentMessageResult]:
|
||||
) -> list[SentMessageResult]:
|
||||
"""See
|
||||
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
|
||||
for high-level documentation on this subsystem.
|
||||
|
@ -865,7 +852,7 @@ def do_send_messages(
|
|||
]
|
||||
|
||||
# Save the message receipts in the database
|
||||
user_message_flags: Dict[int, Dict[int, List[str]]] = defaultdict(dict)
|
||||
user_message_flags: dict[int, dict[int, list[str]]] = defaultdict(dict)
|
||||
|
||||
Message.objects.bulk_create(send_request.message for send_request in send_message_requests)
|
||||
|
||||
|
@ -877,7 +864,7 @@ def do_send_messages(
|
|||
send_request.message.has_attachment = True
|
||||
send_request.message.save(update_fields=["has_attachment"])
|
||||
|
||||
ums: List[UserMessageLite] = []
|
||||
ums: list[UserMessageLite] = []
|
||||
for send_request in send_message_requests:
|
||||
# Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows;
|
||||
# they will be processed later.
|
||||
|
@ -978,7 +965,7 @@ def do_send_messages(
|
|||
human_user_personal_mentions = send_request.rendering_result.mentions_user_ids & (
|
||||
send_request.active_user_ids - send_request.all_bot_user_ids
|
||||
)
|
||||
expect_follow_user_profiles: Set[UserProfile] = set()
|
||||
expect_follow_user_profiles: set[UserProfile] = set()
|
||||
|
||||
if len(human_user_personal_mentions) > 0:
|
||||
expect_follow_user_profiles = set(
|
||||
|
@ -1045,10 +1032,10 @@ def do_send_messages(
|
|||
|
||||
class UserData(TypedDict):
|
||||
id: int
|
||||
flags: List[str]
|
||||
flags: list[str]
|
||||
mentioned_user_group_id: Optional[int]
|
||||
|
||||
users: List[UserData] = []
|
||||
users: list[UserData] = []
|
||||
for user_id in user_list:
|
||||
flags = user_flags.get(user_id, [])
|
||||
# TODO/compatibility: The `wildcard_mentioned` flag was deprecated in favor of
|
||||
|
@ -1278,7 +1265,7 @@ def extract_stream_indicator(s: str) -> Union[str, int]:
|
|||
raise JsonableError(_("Invalid data type for channel"))
|
||||
|
||||
|
||||
def extract_private_recipients(s: str) -> Union[List[str], List[int]]:
|
||||
def extract_private_recipients(s: str) -> Union[list[str], list[int]]:
|
||||
# We try to accept multiple incoming formats for recipients.
|
||||
# See test_extract_recipients() for examples of what we allow.
|
||||
|
||||
|
@ -1306,7 +1293,7 @@ def extract_private_recipients(s: str) -> Union[List[str], List[int]]:
|
|||
return get_validated_user_ids(data)
|
||||
|
||||
|
||||
def get_validated_user_ids(user_ids: Collection[int]) -> List[int]:
|
||||
def get_validated_user_ids(user_ids: Collection[int]) -> list[int]:
|
||||
for user_id in user_ids:
|
||||
if not isinstance(user_id, int):
|
||||
raise JsonableError(_("Recipient lists may contain emails or user IDs, but not both."))
|
||||
|
@ -1314,7 +1301,7 @@ def get_validated_user_ids(user_ids: Collection[int]) -> List[int]:
|
|||
return list(set(user_ids))
|
||||
|
||||
|
||||
def get_validated_emails(emails: Collection[str]) -> List[str]:
|
||||
def get_validated_emails(emails: Collection[str]) -> list[str]:
|
||||
for email in emails:
|
||||
if not isinstance(email, str):
|
||||
raise JsonableError(_("Recipient lists may contain emails or user IDs, but not both."))
|
||||
|
@ -1457,7 +1444,7 @@ def send_pm_if_empty_stream(
|
|||
|
||||
if sender.bot_owner is not None:
|
||||
with override_language(sender.bot_owner.default_language):
|
||||
arg_dict: Dict[str, Any] = {
|
||||
arg_dict: dict[str, Any] = {
|
||||
"bot_identity": f"`{sender.delivery_email}`",
|
||||
}
|
||||
if stream is None:
|
||||
|
@ -1596,14 +1583,14 @@ def check_sender_can_access_recipients(
|
|||
|
||||
def get_recipients_for_user_creation_events(
|
||||
realm: Realm, sender: UserProfile, user_profiles: Sequence[UserProfile]
|
||||
) -> Dict[UserProfile, Set[int]]:
|
||||
) -> dict[UserProfile, set[int]]:
|
||||
"""
|
||||
This function returns a dictionary with data about which users would
|
||||
receive stream creation events due to gaining access to a user.
|
||||
The key of the dictionary is a user object and the value is a set of
|
||||
user_ids that would gain access to that user.
|
||||
"""
|
||||
recipients_for_user_creation_events: Dict[UserProfile, Set[int]] = defaultdict(set)
|
||||
recipients_for_user_creation_events: dict[UserProfile, set[int]] = defaultdict(set)
|
||||
|
||||
# If none of the users in the direct message conversation are
|
||||
# guests, then there is no possible can_access_all_users_group
|
||||
|
@ -1666,7 +1653,7 @@ def check_message(
|
|||
skip_stream_access_check: bool = False,
|
||||
message_type: int = Message.MessageType.NORMAL,
|
||||
mention_backend: Optional[MentionBackend] = None,
|
||||
limit_unread_user_ids: Optional[Set[int]] = None,
|
||||
limit_unread_user_ids: Optional[set[int]] = None,
|
||||
disable_external_notifications: bool = False,
|
||||
) -> SendMessageRequest:
|
||||
"""See
|
||||
|
@ -1841,7 +1828,7 @@ def _internal_prep_message(
|
|||
email_gateway: bool = False,
|
||||
message_type: int = Message.MessageType.NORMAL,
|
||||
mention_backend: Optional[MentionBackend] = None,
|
||||
limit_unread_user_ids: Optional[Set[int]] = None,
|
||||
limit_unread_user_ids: Optional[set[int]] = None,
|
||||
disable_external_notifications: bool = False,
|
||||
forged: bool = False,
|
||||
forged_timestamp: Optional[float] = None,
|
||||
|
@ -1896,7 +1883,7 @@ def internal_prep_stream_message(
|
|||
*,
|
||||
email_gateway: bool = False,
|
||||
message_type: int = Message.MessageType.NORMAL,
|
||||
limit_unread_user_ids: Optional[Set[int]] = None,
|
||||
limit_unread_user_ids: Optional[set[int]] = None,
|
||||
forged: bool = False,
|
||||
forged_timestamp: Optional[float] = None,
|
||||
) -> Optional[SendMessageRequest]:
|
||||
|
@ -1993,7 +1980,7 @@ def internal_send_stream_message(
|
|||
*,
|
||||
email_gateway: bool = False,
|
||||
message_type: int = Message.MessageType.NORMAL,
|
||||
limit_unread_user_ids: Optional[Set[int]] = None,
|
||||
limit_unread_user_ids: Optional[set[int]] = None,
|
||||
) -> Optional[int]:
|
||||
message = internal_prep_stream_message(
|
||||
sender,
|
||||
|
@ -2038,8 +2025,8 @@ def internal_prep_group_direct_message(
|
|||
sender: UserProfile,
|
||||
content: str,
|
||||
*,
|
||||
emails: Optional[List[str]] = None,
|
||||
recipient_users: Optional[List[UserProfile]] = None,
|
||||
emails: Optional[list[str]] = None,
|
||||
recipient_users: Optional[list[UserProfile]] = None,
|
||||
) -> Optional[SendMessageRequest]:
|
||||
if recipient_users is not None:
|
||||
addressee = Addressee.for_user_profiles(recipient_users)
|
||||
|
@ -2060,8 +2047,8 @@ def internal_send_group_direct_message(
|
|||
sender: UserProfile,
|
||||
content: str,
|
||||
*,
|
||||
emails: Optional[List[str]] = None,
|
||||
recipient_users: Optional[List[UserProfile]] = None,
|
||||
emails: Optional[list[str]] = None,
|
||||
recipient_users: Optional[list[UserProfile]] = None,
|
||||
) -> Optional[int]:
|
||||
message = internal_prep_group_direct_message(
|
||||
realm, sender, content, emails=emails, recipient_users=recipient_users
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Any, Dict, Optional
|
||||
from typing import Any, Optional
|
||||
|
||||
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
||||
from zerver.lib.emoji import check_emoji_request, get_emoji_data
|
||||
|
@ -26,7 +26,7 @@ def notify_reaction_update(
|
|||
"full_name": user_profile.full_name,
|
||||
}
|
||||
|
||||
event: Dict[str, Any] = {
|
||||
event: dict[str, Any] = {
|
||||
"type": "reaction",
|
||||
"op": op,
|
||||
"user_id": user_profile.id,
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import IO, Dict, Optional
|
||||
from typing import IO, Optional
|
||||
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import transaction
|
||||
|
@ -17,7 +17,7 @@ from zerver.models.users import active_user_ids
|
|||
from zerver.tornado.django_api import send_event_on_commit
|
||||
|
||||
|
||||
def notify_realm_emoji(realm: Realm, realm_emoji: Dict[str, EmojiInfo]) -> None:
|
||||
def notify_realm_emoji(realm: Realm, realm_emoji: dict[str, EmojiInfo]) -> None:
|
||||
event = dict(type="realm_emoji", op="update", realm_emoji=realm_emoji)
|
||||
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from typing import Dict, List, Optional
|
||||
from typing import Optional
|
||||
|
||||
from django.db import transaction
|
||||
from django.db.models import Max
|
||||
|
@ -13,8 +13,8 @@ from zerver.models.users import active_user_ids
|
|||
from zerver.tornado.django_api import send_event_on_commit
|
||||
|
||||
|
||||
def notify_linkifiers(realm: Realm, realm_linkifiers: List[LinkifierDict]) -> None:
|
||||
event: Dict[str, object] = dict(type="realm_linkifiers", realm_linkifiers=realm_linkifiers)
|
||||
def notify_linkifiers(realm: Realm, realm_linkifiers: list[LinkifierDict]) -> None:
|
||||
event: dict[str, object] = dict(type="realm_linkifiers", realm_linkifiers=realm_linkifiers)
|
||||
send_event_on_commit(realm, event, active_user_ids(realm.id))
|
||||
|
||||
|
||||
|
@ -137,7 +137,7 @@ def do_update_linkifier(
|
|||
|
||||
@transaction.atomic(durable=True)
|
||||
def check_reorder_linkifiers(
|
||||
realm: Realm, ordered_linkifier_ids: List[int], *, acting_user: Optional[UserProfile]
|
||||
realm: Realm, ordered_linkifier_ids: list[int], *, acting_user: Optional[UserProfile]
|
||||
) -> None:
|
||||
"""ordered_linkifier_ids should contain ids of all existing linkifiers.
|
||||
In the rare situation when any of the linkifier gets deleted that more ids
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue