diff --git a/analytics/lib/counts.py b/analytics/lib/counts.py index c4387035b1..072326c86d 100644 --- a/analytics/lib/counts.py +++ b/analytics/lib/counts.py @@ -303,7 +303,7 @@ def do_pull_minutes_active(property: str, start_time: datetime, end_time: dateti ).values_list( 'user_profile_id', 'user_profile__realm_id', 'start', 'end') - seconds_active = defaultdict(float) # type: Dict[Tuple[int, int], float] + seconds_active: Dict[Tuple[int, int], float] = defaultdict(float) for user_id, realm_id, interval_start, interval_end in user_activity_intervals: if realm is None or realm.id == realm_id: start = max(start_time, interval_start) diff --git a/analytics/management/commands/analyze_mit.py b/analytics/management/commands/analyze_mit.py index 63b207b890..dcef41567c 100644 --- a/analytics/management/commands/analyze_mit.py +++ b/analytics/management/commands/analyze_mit.py @@ -27,15 +27,15 @@ def compute_stats(log_level: int) -> None: "bitcoin@mit.edu", "lp@mit.edu", "clocks@mit.edu", "root@mit.edu", "nagios@mit.edu", "www-data|local-realm@mit.edu"]) - user_counts = {} # type: Dict[str, Dict[str, int]] + user_counts: Dict[str, Dict[str, int]] = {} for m in mit_query.select_related("sending_client", "sender"): email = m.sender.email user_counts.setdefault(email, {}) user_counts[email].setdefault(m.sending_client.name, 0) user_counts[email][m.sending_client.name] += 1 - total_counts = {} # type: Dict[str, int] - total_user_counts = {} # type: Dict[str, int] + total_counts: Dict[str, int] = {} + total_user_counts: Dict[str, int] = {} for email, counts in user_counts.items(): total_user_counts.setdefault(email, 0) for client_name, count in counts.items(): @@ -44,7 +44,7 @@ def compute_stats(log_level: int) -> None: total_user_counts[email] += count logging.debug("%40s | %10s | %s" % ("User", "Messages", "Percentage Zulip")) - top_percents = {} # type: Dict[int, float] + top_percents: Dict[int, float] = {} for size in [10, 25, 50, 100, 200, len(total_user_counts.keys())]: top_percents[size] = 0.0 for i, email in enumerate(sorted(total_user_counts.keys(), diff --git a/analytics/management/commands/populate_analytics_db.py b/analytics/management/commands/populate_analytics_db.py index 8e81e1ff57..fa206b45ce 100644 --- a/analytics/management/commands/populate_analytics_db.py +++ b/analytics/management/commands/populate_analytics_db.py @@ -81,7 +81,7 @@ class Command(BaseCommand): end_times = time_range(last_end_time, last_end_time, stat.frequency, len(list(fixture_data.values())[0])) if table == InstallationCount: - id_args = {} # type: Dict[str, Any] + id_args: Dict[str, Any] = {} if table == RealmCount: id_args = {'realm': realm} if table == UserCount: @@ -96,13 +96,13 @@ class Command(BaseCommand): for end_time, value in zip(end_times, values) if value != 0]) stat = COUNT_STATS['1day_actives::day'] - realm_data = { + realm_data: Mapping[Optional[str], List[int]] = { None: self.generate_fixture_data(stat, .08, .02, 3, .3, 6, partial_sum=True), - } # type: Mapping[Optional[str], List[int]] + } insert_fixture_data(stat, realm_data, RealmCount) - installation_data = { + installation_data: Mapping[Optional[str], List[int]] = { None: self.generate_fixture_data(stat, .8, .2, 4, .3, 6, partial_sum=True), - } # type: Mapping[Optional[str], List[int]] + } insert_fixture_data(stat, installation_data, InstallationCount) FillState.objects.create(property=stat.property, end_time=last_end_time, state=FillState.DONE) @@ -132,8 +132,9 @@ class Command(BaseCommand): state=FillState.DONE) stat = COUNT_STATS['messages_sent:is_bot:hour'] - user_data = {'false': self.generate_fixture_data( - stat, 2, 1, 1.5, .6, 8, holiday_rate=.1)} # type: Mapping[Optional[str], List[int]] + user_data: Mapping[Optional[str], List[int]] = { + 'false': self.generate_fixture_data(stat, 2, 1, 1.5, .6, 8, holiday_rate=.1), + } insert_fixture_data(stat, user_data, UserCount) realm_data = {'false': self.generate_fixture_data(stat, 35, 15, 6, .6, 4), 'true': self.generate_fixture_data(stat, 15, 15, 3, .4, 2)} @@ -209,8 +210,10 @@ class Command(BaseCommand): realm_data = {'false': self.generate_fixture_data(stat, 30, 5, 6, .6, 4), 'true': self.generate_fixture_data(stat, 20, 2, 3, .2, 3)} insert_fixture_data(stat, realm_data, RealmCount) - stream_data = {'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4), - 'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2)} # type: Mapping[Optional[str], List[int]] + stream_data: Mapping[Optional[str], List[int]] = { + 'false': self.generate_fixture_data(stat, 10, 7, 5, .6, 4), + 'true': self.generate_fixture_data(stat, 5, 3, 2, .4, 2), + } insert_fixture_data(stat, stream_data, StreamCount) FillState.objects.create(property=stat.property, end_time=last_end_time, state=FillState.DONE) diff --git a/analytics/models.py b/analytics/models.py index 7d053db805..4d6ebd11de 100644 --- a/analytics/models.py +++ b/analytics/models.py @@ -8,13 +8,13 @@ from zerver.lib.timestamp import floor_to_day from zerver.models import Realm, Stream, UserProfile class FillState(models.Model): - property = models.CharField(max_length=40, unique=True) # type: str - end_time = models.DateTimeField() # type: datetime.datetime + property: str = models.CharField(max_length=40, unique=True) + end_time: datetime.datetime = models.DateTimeField() # Valid states are {DONE, STARTED} DONE = 1 STARTED = 2 - state = models.PositiveSmallIntegerField() # type: int + state: int = models.PositiveSmallIntegerField() def __str__(self) -> str: return "" % (self.property, self.end_time, self.state) @@ -37,10 +37,10 @@ class BaseCount(models.Model): # Note: When inheriting from BaseCount, you may want to rearrange # the order of the columns in the migration to make sure they # match how you'd like the table to be arranged. - property = models.CharField(max_length=32) # type: str - subgroup = models.CharField(max_length=16, null=True) # type: Optional[str] - end_time = models.DateTimeField() # type: datetime.datetime - value = models.BigIntegerField() # type: int + property: str = models.CharField(max_length=32) + subgroup: Optional[str] = models.CharField(max_length=16, null=True) + end_time: datetime.datetime = models.DateTimeField() + value: int = models.BigIntegerField() class Meta: abstract = True diff --git a/analytics/tests/test_counts.py b/analytics/tests/test_counts.py index 7d5ce9796f..cddcedda50 100644 --- a/analytics/tests/test_counts.py +++ b/analytics/tests/test_counts.py @@ -43,7 +43,7 @@ class AnalyticsTestCase(TestCase): # used to generate unique names in self.create_* self.name_counter = 100 # used as defaults in self.assertCountEquals - self.current_property = None # type: Optional[str] + self.current_property: Optional[str] = None # Lightweight creation of users, streams, and messages def create_user(self, **kwargs: Any) -> UserProfile: @@ -60,7 +60,7 @@ class AnalyticsTestCase(TestCase): kwargs[key] = kwargs.get(key, value) kwargs['delivery_email'] = kwargs['email'] with mock.patch("zerver.lib.create_user.timezone_now", return_value=kwargs['date_joined']): - pass_kwargs = {} # type: Dict[str, Any] + pass_kwargs: Dict[str, Any] = {} if kwargs['is_bot']: pass_kwargs['bot_type'] = UserProfile.DEFAULT_BOT pass_kwargs['bot_owner'] = None @@ -147,7 +147,7 @@ class AnalyticsTestCase(TestCase): 'end_time': self.TIME_ZERO, 'value': 1} for values in arg_values: - kwargs = {} # type: Dict[str, Any] + kwargs: Dict[str, Any] = {} for i in range(len(values)): kwargs[arg_keys[i]] = values[i] for key, value in defaults.items(): diff --git a/analytics/tests/test_views.py b/analytics/tests/test_views.py index f8149229bb..af8238329b 100644 --- a/analytics/tests/test_views.py +++ b/analytics/tests/test_views.py @@ -617,7 +617,7 @@ class TestGetChartDataHelpers(ZulipTestCase): self.assertEqual(last_successful_fill('property'), one_hour_before) def test_sort_by_totals(self) -> None: - empty = [] # type: List[int] + empty: List[int] = [] value_arrays = {'c': [0, 1], 'a': [9], 'b': [1, 1, 1], 'd': empty} self.assertEqual(sort_by_totals(value_arrays), ['a', 'b', 'c', 'd']) diff --git a/analytics/views.py b/analytics/views.py index 1eba8f23e4..c240993ccd 100644 --- a/analytics/views.py +++ b/analytics/views.py @@ -184,10 +184,10 @@ def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: COUNT_STATS['realm_active_humans::day'], COUNT_STATS['active_users_audit:is_bot:day']] tables = [aggregate_table] - subgroup_to_label = { + subgroup_to_label: Dict[CountStat, Dict[Optional[str], str]] = { stats[0]: {None: '_1day'}, stats[1]: {None: '_15day'}, - stats[2]: {'false': 'all_time'}} # type: Dict[CountStat, Dict[Optional[str], str]] + stats[2]: {'false': 'all_time'}} labels_sort_function = None include_empty_subgroups = True elif chart_name == 'messages_sent_over_time': @@ -263,7 +263,7 @@ def get_chart_data(request: HttpRequest, user_profile: UserProfile, chart_name: assert len({stat.frequency for stat in stats}) == 1 end_times = time_range(start, end, stats[0].frequency, min_length) - data = {'end_times': end_times, 'frequency': stats[0].frequency} # type: Dict[str, Any] + data: Dict[str, Any] = {'end_times': end_times, 'frequency': stats[0].frequency} aggregation_level = { InstallationCount: 'everyone', @@ -308,7 +308,7 @@ def sort_by_totals(value_arrays: Dict[str, List[int]]) -> List[str]: def sort_client_labels(data: Dict[str, Dict[str, List[int]]]) -> List[str]: realm_order = sort_by_totals(data['everyone']) user_order = sort_by_totals(data['user']) - label_sort_values = {} # type: Dict[str, float] + label_sort_values: Dict[str, float] = {} for i, label in enumerate(realm_order): label_sort_values[label] = i for i, label in enumerate(user_order): @@ -352,7 +352,7 @@ def client_label_map(name: str) -> str: return name def rewrite_client_arrays(value_arrays: Dict[str, List[int]]) -> Dict[str, List[int]]: - mapped_arrays = {} # type: Dict[str, List[int]] + mapped_arrays: Dict[str, List[int]] = {} for label, array in value_arrays.items(): mapped_label = client_label_map(label) if mapped_label in mapped_arrays: @@ -370,7 +370,7 @@ def get_time_series_by_subgroup(stat: CountStat, include_empty_subgroups: bool) -> Dict[str, List[int]]: queryset = table_filtered_to_id(table, key_id).filter(property=stat.property) \ .values_list('subgroup', 'end_time', 'value') - value_dicts = defaultdict(lambda: defaultdict(int)) # type: Dict[Optional[str], Dict[datetime, int]] + value_dicts: Dict[Optional[str], Dict[datetime, int]] = defaultdict(lambda: defaultdict(int)) for subgroup, end_time, value in queryset: value_dicts[subgroup][end_time] = value value_arrays = {} @@ -441,7 +441,7 @@ def get_realm_day_counts() -> Dict[str, Dict[str, str]]: rows = dictfetchall(cursor) cursor.close() - counts = defaultdict(dict) # type: Dict[str, Dict[int, int]] + counts: Dict[str, Dict[int, int]] = defaultdict(dict) for row in rows: counts[row['string_id']][row['age']] = row['cnt'] @@ -585,7 +585,7 @@ def realm_summary_table(realm_minutes: Dict[str, float]) -> str: cursor.close() # Fetch all the realm administrator users - realm_admins = defaultdict(list) # type: Dict[str, List[str]] + realm_admins: Dict[str, List[str]] = defaultdict(list) for up in UserProfile.objects.select_related("realm").filter( role=UserProfile.ROLE_REALM_ADMINISTRATOR, is_active=True @@ -1024,7 +1024,7 @@ def ad_hoc_queries() -> List[Dict[str, str]]: @has_request_variables def get_activity(request: HttpRequest) -> HttpResponse: duration_content, realm_minutes = user_activity_intervals() # type: Tuple[mark_safe, Dict[str, float]] - counts_content = realm_summary_table(realm_minutes) # type: str + counts_content: str = realm_summary_table(realm_minutes) data = [ ('Counts', counts_content), ('Durations', duration_content), @@ -1082,7 +1082,7 @@ def get_confirmations(types: List[int], object_ids: List[int], @require_server_admin def support(request: HttpRequest) -> HttpResponse: - context = {} # type: Dict[str, Any] + context: Dict[str, Any] = {} if settings.BILLING_ENABLED and request.method == "POST": realm_id = request.POST.get("realm_id", None) realm = Realm.objects.get(id=realm_id) @@ -1145,7 +1145,7 @@ def support(request: HttpRequest) -> HttpResponse: context["realms"] = realms - confirmations = [] # type: List[Dict[str, Any]] + confirmations: List[Dict[str, Any]] = [] preregistration_users = PreregistrationUser.objects.filter(email__in=key_words) confirmations += get_confirmations([Confirmation.USER_REGISTRATION, Confirmation.INVITATION, @@ -1229,7 +1229,7 @@ def get_user_activity_summary(records: List[QuerySet]) -> Dict[str, Dict[str, An #: We could use something like: # `Union[Dict[str, Dict[str, int]], Dict[str, Dict[str, datetime]]]` #: but that would require this long `Union` to carry on throughout inner functions. - summary = {} # type: Dict[str, Dict[str, Any]] + summary: Dict[str, Dict[str, Any]] = {} def update(action: str, record: QuerySet) -> None: if action not in summary: @@ -1440,8 +1440,8 @@ def realm_user_summary_table(all_records: List[QuerySet], @require_server_admin def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: - data = [] # type: List[Tuple[str, str]] - all_user_records = {} # type: Dict[str, Any] + data: List[Tuple[str, str]] = [] + all_user_records: Dict[str, Any] = {} try: admins = Realm.objects.get(string_id=realm_str).get_human_admin_users() @@ -1477,7 +1477,7 @@ def get_realm_activity(request: HttpRequest, realm_str: str) -> HttpResponse: def get_user_activity(request: HttpRequest, email: str) -> HttpResponse: records = get_user_activity_records_for_email(email) - data = [] # type: List[Tuple[str, str]] + data: List[Tuple[str, str]] = [] user_summary = get_user_activity_summary(records) content = user_activity_summary_table(user_summary) diff --git a/confirmation/models.py b/confirmation/models.py index d597d9e14a..ba2587a988 100644 --- a/confirmation/models.py +++ b/confirmation/models.py @@ -88,11 +88,11 @@ def confirmation_url(confirmation_key: str, host: str, class Confirmation(models.Model): content_type = models.ForeignKey(ContentType, on_delete=CASCADE) - object_id = models.PositiveIntegerField(db_index=True) # type: int + object_id: int = models.PositiveIntegerField(db_index=True) content_object = GenericForeignKey('content_type', 'object_id') - date_sent = models.DateTimeField(db_index=True) # type: datetime.datetime - confirmation_key = models.CharField(max_length=40, db_index=True) # type: str - realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm] + date_sent: datetime.datetime = models.DateTimeField(db_index=True) + confirmation_key: str = models.CharField(max_length=40, db_index=True) + realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # The following list is the set of valid types USER_REGISTRATION = 1 @@ -103,7 +103,7 @@ class Confirmation(models.Model): MULTIUSE_INVITE = 6 REALM_CREATION = 7 REALM_REACTIVATION = 8 - type = models.PositiveSmallIntegerField() # type: int + type: int = models.PositiveSmallIntegerField() def __str__(self) -> str: return '' % (self.content_object,) @@ -177,7 +177,7 @@ class RealmCreationKey(models.Model): # True just if we should presume the email address the user enters # is theirs, and skip sending mail to it to confirm that. - presume_email_valid = models.BooleanField(default=False) # type: bool + presume_email_valid: bool = models.BooleanField(default=False) class Invalid(Exception): pass diff --git a/corporate/lib/stripe.py b/corporate/lib/stripe.py index fad152d328..61029e23cd 100644 --- a/corporate/lib/stripe.py +++ b/corporate/lib/stripe.py @@ -398,7 +398,7 @@ def invoice_plan(plan: CustomerPlan, event_time: datetime) -> None: invoice_item_created = False for ledger_entry in LicenseLedger.objects.filter(plan=plan, id__gt=plan.invoiced_through.id, event_time__lte=event_time).order_by('id'): - price_args = {} # type: Dict[str, int] + price_args: Dict[str, int] = {} if ledger_entry.is_renewal: if plan.fixed_price is not None: price_args = {'amount': plan.fixed_price} @@ -423,7 +423,7 @@ def invoice_plan(plan: CustomerPlan, event_time: datetime) -> None: plan.invoiced_through = ledger_entry plan.invoicing_status = CustomerPlan.STARTED plan.save(update_fields=['invoicing_status', 'invoiced_through']) - idempotency_key = 'ledger_entry:{}'.format(ledger_entry.id) # type: Optional[str] + idempotency_key: Optional[str] = 'ledger_entry:{}'.format(ledger_entry.id) if settings.TEST_SUITE: idempotency_key = None stripe.InvoiceItem.create( diff --git a/corporate/models.py b/corporate/models.py index 44866d411b..c517121f75 100644 --- a/corporate/models.py +++ b/corporate/models.py @@ -8,10 +8,10 @@ from django.db.models import CASCADE from zerver.models import Realm class Customer(models.Model): - realm = models.OneToOneField(Realm, on_delete=CASCADE) # type: Realm - stripe_customer_id = models.CharField(max_length=255, null=True, unique=True) # type: str + realm: Realm = models.OneToOneField(Realm, on_delete=CASCADE) + stripe_customer_id: str = models.CharField(max_length=255, null=True, unique=True) # A percentage, like 85. - default_discount = models.DecimalField(decimal_places=4, max_digits=7, null=True) # type: Optional[Decimal] + default_discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=7, null=True) def __str__(self) -> str: return "" % (self.realm, self.stripe_customer_id) @@ -20,35 +20,35 @@ def get_customer_by_realm(realm: Realm) -> Optional[Customer]: return Customer.objects.filter(realm=realm).first() class CustomerPlan(models.Model): - customer = models.ForeignKey(Customer, on_delete=CASCADE) # type: Customer - automanage_licenses = models.BooleanField(default=False) # type: bool - charge_automatically = models.BooleanField(default=False) # type: bool + customer: Customer = models.ForeignKey(Customer, on_delete=CASCADE) + automanage_licenses: bool = models.BooleanField(default=False) + charge_automatically: bool = models.BooleanField(default=False) # Both of these are in cents. Exactly one of price_per_license or # fixed_price should be set. fixed_price is only for manual deals, and # can't be set via the self-serve billing system. - price_per_license = models.IntegerField(null=True) # type: Optional[int] - fixed_price = models.IntegerField(null=True) # type: Optional[int] + price_per_license: Optional[int] = models.IntegerField(null=True) + fixed_price: Optional[int] = models.IntegerField(null=True) # Discount that was applied. For display purposes only. - discount = models.DecimalField(decimal_places=4, max_digits=6, null=True) # type: Optional[Decimal] + discount: Optional[Decimal] = models.DecimalField(decimal_places=4, max_digits=6, null=True) - billing_cycle_anchor = models.DateTimeField() # type: datetime.datetime + billing_cycle_anchor: datetime.datetime = models.DateTimeField() ANNUAL = 1 MONTHLY = 2 - billing_schedule = models.SmallIntegerField() # type: int + billing_schedule: int = models.SmallIntegerField() - next_invoice_date = models.DateTimeField(db_index=True, null=True) # type: Optional[datetime.datetime] - invoiced_through = models.ForeignKey( - 'LicenseLedger', null=True, on_delete=CASCADE, related_name='+') # type: Optional[LicenseLedger] + next_invoice_date: Optional[datetime.datetime] = models.DateTimeField(db_index=True, null=True) + invoiced_through: Optional["LicenseLedger"] = models.ForeignKey( + 'LicenseLedger', null=True, on_delete=CASCADE, related_name='+') DONE = 1 STARTED = 2 - invoicing_status = models.SmallIntegerField(default=DONE) # type: int + invoicing_status: int = models.SmallIntegerField(default=DONE) STANDARD = 1 PLUS = 2 # not available through self-serve signup ENTERPRISE = 10 - tier = models.SmallIntegerField() # type: int + tier: int = models.SmallIntegerField() ACTIVE = 1 DOWNGRADE_AT_END_OF_CYCLE = 2 @@ -57,7 +57,7 @@ class CustomerPlan(models.Model): LIVE_STATUS_THRESHOLD = 10 ENDED = 11 NEVER_STARTED = 12 - status = models.SmallIntegerField(default=ACTIVE) # type: int + status: int = models.SmallIntegerField(default=ACTIVE) # TODO maybe override setattr to ensure billing_cycle_anchor, etc are immutable @@ -72,11 +72,11 @@ def get_current_plan_by_realm(realm: Realm) -> Optional[CustomerPlan]: return get_current_plan_by_customer(customer) class LicenseLedger(models.Model): - plan = models.ForeignKey(CustomerPlan, on_delete=CASCADE) # type: CustomerPlan + plan: CustomerPlan = models.ForeignKey(CustomerPlan, on_delete=CASCADE) # Also True for the initial upgrade. - is_renewal = models.BooleanField(default=False) # type: bool - event_time = models.DateTimeField() # type: datetime.datetime - licenses = models.IntegerField() # type: int + is_renewal: bool = models.BooleanField(default=False) + event_time: datetime.datetime = models.DateTimeField() + licenses: int = models.IntegerField() # None means the plan does not automatically renew. # This cannot be None if plan.automanage_licenses. - licenses_at_next_renewal = models.IntegerField(null=True) # type: Optional[int] + licenses_at_next_renewal: Optional[int] = models.IntegerField(null=True) diff --git a/corporate/tests/test_stripe.py b/corporate/tests/test_stripe.py index 16f3472132..0da2062b65 100644 --- a/corporate/tests/test_stripe.py +++ b/corporate/tests/test_stripe.py @@ -149,8 +149,9 @@ def normalize_fixture_data(decorated_function: CallableT, '"%s": 1[5-9][0-9]{8}(?![0-9-])' % (timestamp_field,) ] = '"%s": 1%02d%%07d' % (timestamp_field, i+1) - normalized_values = {pattern: {} - for pattern in pattern_translations.keys()} # type: Dict[str, Dict[str, str]] + normalized_values: Dict[str, Dict[str, str]] = { + pattern: {} for pattern in pattern_translations.keys() + } for fixture_file in fixture_files_for_function(decorated_function): with open(fixture_file) as f: file_content = f.read() @@ -258,10 +259,10 @@ class StripeTestCase(ZulipTestCase): if realm is not None: # nocoverage: TODO host_args['HTTP_HOST'] = realm.host response = self.client_get("/upgrade/", **host_args) - params = { + params: Dict[str, Any] = { 'schedule': 'annual', 'signed_seat_count': self.get_signed_seat_count_from_response(response), - 'salt': self.get_salt_from_response(response)} # type: Dict[str, Any] + 'salt': self.get_salt_from_response(response)} if invoice: # send_invoice params.update({ 'billing_modality': 'send_invoice', @@ -1110,10 +1111,10 @@ class RequiresBillingAccessTest(ZulipTestCase): self.assert_json_error_contains(response, "Must be a billing administrator or an organization") def test_non_admins_blocked_from_json_endpoints(self) -> None: - params = [ + params: List[Tuple[str, Dict[str, Any]]] = [ ("/json/billing/sources/change", {'stripe_token': ujson.dumps('token')}), ("/json/billing/plan/change", {'status': ujson.dumps(1)}), - ] # type: List[Tuple[str, Dict[str, Any]]] + ] for (url, data) in params: self.verify_non_admins_blocked_from_endpoint(url, data) diff --git a/corporate/urls.py b/corporate/urls.py index 0d02abed5b..27b7869da6 100644 --- a/corporate/urls.py +++ b/corporate/urls.py @@ -6,7 +6,7 @@ from django.conf.urls import include, url import corporate.views from zerver.lib.rest import rest_dispatch -i18n_urlpatterns = [ +i18n_urlpatterns: Any = [ # Zephyr/MIT url(r'^zephyr/$', TemplateView.as_view(template_name='corporate/zephyr.html')), url(r'^zephyr-mirror/$', TemplateView.as_view(template_name='corporate/zephyr-mirror.html')), @@ -16,7 +16,7 @@ i18n_urlpatterns = [ # Billing url(r'^billing/$', corporate.views.billing_home, name='corporate.views.billing_home'), url(r'^upgrade/$', corporate.views.initial_upgrade, name='corporate.views.initial_upgrade'), -] # type: Any +] v1_api_and_json_patterns = [ url(r'^billing/upgrade$', rest_dispatch, diff --git a/corporate/views.py b/corporate/views.py index 7890011f27..8c4e7957eb 100644 --- a/corporate/views.py +++ b/corporate/views.py @@ -57,7 +57,7 @@ def check_upgrade_parameters( # Should only be called if the customer is being charged automatically def payment_method_string(stripe_customer: stripe.Customer) -> str: - stripe_source = stripe_customer.default_source # type: Optional[Union[stripe.Card, stripe.Source]] + stripe_source: Optional[Union[stripe.Card, stripe.Source]] = stripe_customer.default_source # In case of e.g. an expired card if stripe_source is None: # nocoverage return _("No payment method on file") @@ -128,7 +128,7 @@ def initial_upgrade(request: HttpRequest) -> HttpResponse: seat_count = get_latest_seat_count(user.realm) signed_seat_count, salt = sign_string(str(seat_count)) - context = { + context: Dict[str, Any] = { 'publishable_key': STRIPE_PUBLISHABLE_KEY, 'email': user.delivery_email, 'seat_count': seat_count, @@ -143,7 +143,7 @@ def initial_upgrade(request: HttpRequest) -> HttpResponse: 'monthly_price': 800, 'percent_off': float(percent_off), }, - } # type: Dict[str, Any] + } response = render(request, 'corporate/upgrade.html', context=context) return response @@ -157,7 +157,7 @@ def billing_home(request: HttpRequest) -> HttpResponse: return HttpResponseRedirect(reverse('corporate.views.initial_upgrade')) if not user.is_realm_admin and not user.is_billing_admin: - context = {'admin_access': False} # type: Dict[str, Any] + context: Dict[str, Any] = {'admin_access': False} return render(request, 'corporate/billing.html', context=context) context = { diff --git a/docs/conf.py b/docs/conf.py index 931510f2e7..15423e42b5 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -32,8 +32,8 @@ from version import ZULIP_VERSION # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ -] # type: List[str] +extensions: List[str] = [ +] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] @@ -63,7 +63,7 @@ release = ZULIP_VERSION # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None # type: Optional[str] +language: Optional[str] = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: @@ -217,7 +217,7 @@ htmlhelp_basename = 'zulip-contributor-docsdoc' # -- Options for LaTeX output --------------------------------------------- -latex_elements = { +latex_elements: Dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', @@ -229,7 +229,7 @@ latex_elements = { # Latex figure (float) alignment #'figure_align': 'htbp', -} # type: Dict[str, str] +} # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, diff --git a/frontend_tests/run-casper b/frontend_tests/run-casper index 8229e7b984..df98d3da6a 100755 --- a/frontend_tests/run-casper +++ b/frontend_tests/run-casper @@ -104,15 +104,15 @@ def run_tests(files: Iterable[str], external_host: str) -> None: else: loop_cnt = None - remote_debug = [] # type: List[str] + remote_debug: List[str] = [] if options.remote_debug: remote_debug = ["--remote-debugger-port=7777", "--remote-debugger-autorun=yes"] - verbose = [] # type: List[str] + verbose: List[str] = [] if options.verbose: verbose = ["--verbose", "--log-level=debug"] - xunit_export = [] # type: List[str] + xunit_export: List[str] = [] if options.xunit_export: xunit_export = ["--xunit=var/xunit-test-results/casper/result.xml"] diff --git a/zerver/apps.py b/zerver/apps.py index 3dabc73806..d1b95a3827 100644 --- a/zerver/apps.py +++ b/zerver/apps.py @@ -12,7 +12,7 @@ def flush_cache(sender: AppConfig, **kwargs: Any) -> None: class ZerverConfig(AppConfig): - name = "zerver" # type: str + name: str = "zerver" def ready(self) -> None: # We import zerver.signals here for the side effect of diff --git a/zerver/context_processors.py b/zerver/context_processors.py index 0b63c5dd68..1195f52bb9 100644 --- a/zerver/context_processors.py +++ b/zerver/context_processors.py @@ -152,14 +152,14 @@ def login_context(request: HttpRequest) -> Dict[str, Any]: realm_description = get_realm_rendered_description(realm) realm_invite_required = realm.invite_required - context = { + context: Dict[str, Any] = { 'realm_invite_required': realm_invite_required, 'realm_description': realm_description, 'require_email_format_usernames': require_email_format_usernames(realm), 'password_auth_enabled': password_auth_enabled(realm), 'any_social_backend_enabled': any_social_backend_enabled(realm), 'two_factor_authentication_enabled': settings.TWO_FACTOR_AUTHENTICATION_ENABLED, - } # type: Dict[str, Any] + } if realm is not None and realm.description: context['OPEN_GRAPH_TITLE'] = realm.name diff --git a/zerver/data_import/gitter.py b/zerver/data_import/gitter.py index c31eead604..d8f5e546ef 100644 --- a/zerver/data_import/gitter.py +++ b/zerver/data_import/gitter.py @@ -32,7 +32,7 @@ def gitter_workspace_to_realm(domain_name: str, gitter_data: GitterDataT, 3. user_map, which is a dictionary to map from gitter user id to zulip user id """ NOW = float(timezone_now().timestamp()) - zerver_realm = build_zerver_realm(realm_id, realm_subdomain, NOW, 'Gitter') # type: List[ZerverFieldsT] + zerver_realm: List[ZerverFieldsT] = build_zerver_realm(realm_id, realm_subdomain, NOW, 'Gitter') realm = build_realm(zerver_realm, realm_id, domain_name) zerver_userprofile, avatars, user_map = build_userprofile(int(NOW), domain_name, gitter_data) @@ -60,8 +60,8 @@ def build_userprofile(timestamp: Any, domain_name: str, """ logging.info('######### IMPORTING USERS STARTED #########\n') zerver_userprofile = [] - avatar_list = [] # type: List[ZerverFieldsT] - user_map = {} # type: Dict[str, int] + avatar_list: List[ZerverFieldsT] = [] + user_map: Dict[str, int] = {} user_id = 0 for data in gitter_data: @@ -169,7 +169,7 @@ def convert_gitter_workspace_messages(gitter_data: GitterDataT, output_dir: str, while True: message_json = {} zerver_message = [] - zerver_usermessage = [] # type: List[ZerverFieldsT] + zerver_usermessage: List[ZerverFieldsT] = [] message_data = gitter_data[low_index: upper_index] if len(message_data) == 0: break @@ -262,7 +262,7 @@ def do_convert_data(gitter_data_file: str, output_dir: str, threads: int=6) -> N os.makedirs(avatar_realm_folder, exist_ok=True) avatar_records = process_avatars(avatar_list, avatar_folder, realm_id, threads) - attachment = {"zerver_attachment": []} # type: Dict[str, List[Any]] + attachment: Dict[str, List[Any]] = {"zerver_attachment": []} # IO realm.json create_converted_data_files(realm, output_dir, '/realm.json') diff --git a/zerver/data_import/hipchat.py b/zerver/data_import/hipchat.py index ff46e1cca1..9fd95500e8 100755 --- a/zerver/data_import/hipchat.py +++ b/zerver/data_import/hipchat.py @@ -244,11 +244,11 @@ def convert_room_data(raw_data: List[ZerverFieldsT], ) if invite_only: - users = { + users: Set[int] = { user_id_mapper.get(key) for key in in_dict['members'] if user_id_mapper.has(key) - } # type: Set[int] + } if user_id_mapper.has(in_dict['owner']): owner = user_id_mapper.get(in_dict['owner']) @@ -671,7 +671,7 @@ def process_raw_message_batch(realm_id: int, content = content.replace('@here', '@**all**') return content - mention_map = dict() # type: Dict[int, Set[int]] + mention_map: Dict[int, Set[int]] = dict() zerver_message = [] @@ -807,7 +807,7 @@ def do_convert_data(input_tar_file: str, if api_token is None: if slim_mode: - public_stream_subscriptions = [] # type: List[ZerverFieldsT] + public_stream_subscriptions: List[ZerverFieldsT] = [] else: public_stream_subscriptions = build_public_stream_subscriptions( zerver_userprofile=normal_users, diff --git a/zerver/data_import/hipchat_attachment.py b/zerver/data_import/hipchat_attachment.py index b8985830cf..d56a4a9c11 100644 --- a/zerver/data_import/hipchat_attachment.py +++ b/zerver/data_import/hipchat_attachment.py @@ -11,7 +11,7 @@ from typing import Any, Dict, List, Optional class AttachmentHandler: def __init__(self) -> None: - self.info_dict = dict() # type: Dict[str, Dict[str, Any]] + self.info_dict: Dict[str, Dict[str, Any]] = dict() def handle_message_data(self, realm_id: int, @@ -77,8 +77,8 @@ class AttachmentHandler: return content def write_info(self, output_dir: str, realm_id: int) -> None: - attachments = [] # type: List[Dict[str, Any]] - uploads_records = [] # type: List[Dict[str, Any]] + attachments: List[Dict[str, Any]] = [] + uploads_records: List[Dict[str, Any]] = [] def add_attachment(info: Dict[str, Any]) -> None: build_attachment( diff --git a/zerver/data_import/hipchat_user.py b/zerver/data_import/hipchat_user.py index 8f94c22fdc..92196a0f88 100644 --- a/zerver/data_import/hipchat_user.py +++ b/zerver/data_import/hipchat_user.py @@ -23,8 +23,8 @@ class UserHandler: ''' def __init__(self) -> None: - self.id_to_user_map = dict() # type: Dict[int, Dict[str, Any]] - self.name_to_mirror_user_map = dict() # type: Dict[str, Dict[str, Any]] + self.id_to_user_map: Dict[int, Dict[str, Any]] = dict() + self.name_to_mirror_user_map: Dict[str, Dict[str, Any]] = dict() self.mirror_user_id = 1 def add_user(self, user: Dict[str, Any]) -> None: diff --git a/zerver/data_import/import_util.py b/zerver/data_import/import_util.py index 9f73a44d37..51ebf6041b 100644 --- a/zerver/data_import/import_util.py +++ b/zerver/data_import/import_util.py @@ -21,8 +21,8 @@ ZerverFieldsT = Dict[str, Any] class SubscriberHandler: def __init__(self) -> None: - self.stream_info = dict() # type: Dict[int, Set[int]] - self.huddle_info = dict() # type: Dict[int, Set[int]] + self.stream_info: Dict[int, Set[int]] = dict() + self.huddle_info: Dict[int, Set[int]] = dict() def set_info(self, users: Set[int], @@ -105,7 +105,7 @@ def make_subscriber_map(zerver_subscription: List[ZerverFieldsT]) -> Dict[int, S This can be convenient for building up UserMessage rows. ''' - subscriber_map = dict() # type: Dict[int, Set[int]] + subscriber_map: Dict[int, Set[int]] = dict() for sub in zerver_subscription: user_id = sub['user_profile'] recipient_id = sub['recipient'] @@ -163,7 +163,7 @@ def build_public_stream_subscriptions( users to every public stream. This returns a list of Subscription dicts. ''' - subscriptions = [] # type: List[ZerverFieldsT] + subscriptions: List[ZerverFieldsT] = [] public_stream_ids = { stream['id'] @@ -199,7 +199,7 @@ def build_stream_subscriptions( zerver_recipient: List[ZerverFieldsT], zerver_stream: List[ZerverFieldsT]) -> List[ZerverFieldsT]: - subscriptions = [] # type: List[ZerverFieldsT] + subscriptions: List[ZerverFieldsT] = [] stream_ids = {stream['id'] for stream in zerver_stream} @@ -227,7 +227,7 @@ def build_huddle_subscriptions( zerver_recipient: List[ZerverFieldsT], zerver_huddle: List[ZerverFieldsT]) -> List[ZerverFieldsT]: - subscriptions = [] # type: List[ZerverFieldsT] + subscriptions: List[ZerverFieldsT] = [] huddle_ids = {huddle['id'] for huddle in zerver_huddle} @@ -252,7 +252,7 @@ def build_huddle_subscriptions( def build_personal_subscriptions(zerver_recipient: List[ZerverFieldsT]) -> List[ZerverFieldsT]: - subscriptions = [] # type: List[ZerverFieldsT] + subscriptions: List[ZerverFieldsT] = [] personal_recipients = [ recipient @@ -586,7 +586,7 @@ def run_parallel_wrapper(f: Callable[[ListJobData], None], full_items: List[List if count % 1000 == 0: logging.info("A download thread finished %s items" % (count,)) return 0 - job_lists = [full_items[i::threads] for i in range(threads)] # type: List[List[ListJobData]] + job_lists: List[List[ListJobData]] = [full_items[i::threads] for i in range(threads)] return run_parallel(wrapping_function, job_lists, threads=threads) def process_uploads(upload_list: List[ZerverFieldsT], upload_dir: str, diff --git a/zerver/data_import/mattermost.py b/zerver/data_import/mattermost.py index ab2851e760..4af9b3f0bf 100644 --- a/zerver/data_import/mattermost.py +++ b/zerver/data_import/mattermost.py @@ -124,8 +124,8 @@ def convert_channel_data(channel_data: List[ZerverFieldsT], if d['team'] == team_name ] - channel_members_map = {} # type: Dict[str, List[str]] - channel_admins_map = {} # type: Dict[str, List[str]] + channel_members_map: Dict[str, List[str]] = {} + channel_admins_map: Dict[str, List[str]] = {} def initialize_stream_membership_dicts() -> None: for channel in channel_data: @@ -310,7 +310,7 @@ def process_raw_message_batch(realm_id: int, content = content.replace('@here', '@**all**') return content - mention_map = dict() # type: Dict[int, Set[int]] + mention_map: Dict[int, Set[int]] = dict() zerver_message = [] import html2text @@ -672,7 +672,7 @@ def reset_mirror_dummy_users(username_to_user: Dict[str, Dict[str, Any]]) -> Non user["is_mirror_dummy"] = False def mattermost_data_file_to_dict(mattermost_data_file: str) -> Dict[str, Any]: - mattermost_data = {} # type: Dict[str, Any] + mattermost_data: Dict[str, Any] = {} mattermost_data["version"] = [] mattermost_data["team"] = [] mattermost_data["channel"] = [] @@ -694,7 +694,7 @@ def mattermost_data_file_to_dict(mattermost_data_file: str) -> Dict[str, Any]: return mattermost_data def do_convert_data(mattermost_data_dir: str, output_dir: str, masking_content: bool) -> None: - username_to_user = {} # type: Dict[str, Dict[str, Any]] + username_to_user: Dict[str, Dict[str, Any]] = {} os.makedirs(output_dir, exist_ok=True) if os.listdir(output_dir): # nocoverage @@ -741,7 +741,7 @@ def do_convert_data(mattermost_data_dir: str, output_dir: str, masking_content: ) realm['zerver_stream'] = zerver_stream - zerver_huddle = [] # type: List[ZerverFieldsT] + zerver_huddle: List[ZerverFieldsT] = [] if len(mattermost_data["team"]) == 1: zerver_huddle = convert_huddle_data( huddle_data=mattermost_data["direct_channel"], @@ -796,7 +796,7 @@ def do_convert_data(mattermost_data_dir: str, output_dir: str, masking_content: zerver_subscription=zerver_subscription, ) - total_reactions = [] # type: List[Dict[str, Any]] + total_reactions: List[Dict[str, Any]] = [] write_message_data( num_teams=len(mattermost_data["team"]), team_name=team_name, @@ -825,7 +825,7 @@ def do_convert_data(mattermost_data_dir: str, output_dir: str, masking_content: create_converted_data_files([], realm_output_dir, '/uploads/records.json') # Mattermost currently doesn't support exporting attachments - attachment = {"zerver_attachment": []} # type: Dict[str, List[Any]] + attachment: Dict[str, List[Any]] = {"zerver_attachment": []} create_converted_data_files(attachment, realm_output_dir, '/attachment.json') logging.info('Start making tarball') diff --git a/zerver/data_import/mattermost_user.py b/zerver/data_import/mattermost_user.py index e947738d25..c8585584a1 100644 --- a/zerver/data_import/mattermost_user.py +++ b/zerver/data_import/mattermost_user.py @@ -11,7 +11,7 @@ class UserHandler: ''' def __init__(self) -> None: - self.id_to_user_map = dict() # type: Dict[int, Dict[str, Any]] + self.id_to_user_map: Dict[int, Dict[str, Any]] = dict() def add_user(self, user: Dict[str, Any]) -> None: user_id = user['id'] diff --git a/zerver/data_import/sequencer.py b/zerver/data_import/sequencer.py index ab38c77df0..c526c82250 100644 --- a/zerver/data_import/sequencer.py +++ b/zerver/data_import/sequencer.py @@ -28,7 +28,7 @@ def sequencer() -> Callable[[str], int]: NEXT_ID = sequencer() message_id = NEXT_ID('message') ''' - seq_dict = dict() # type: Dict[str, Callable[[], int]] + seq_dict: Dict[str, Callable[[], int]] = dict() def next_one(name: str) -> int: if name not in seq_dict: @@ -59,7 +59,7 @@ def is_int(key: Any) -> bool: class IdMapper: def __init__(self) -> None: - self.map = dict() # type: Dict[Any, int] + self.map: Dict[Any, int] = dict() self.cnt = 0 def has(self, their_id: Any) -> bool: diff --git a/zerver/data_import/slack.py b/zerver/data_import/slack.py index da7f28fa85..14a05510a2 100755 --- a/zerver/data_import/slack.py +++ b/zerver/data_import/slack.py @@ -62,7 +62,7 @@ def slack_workspace_to_realm(domain_name: str, realm_id: int, user_list: List[Ze """ NOW = float(timezone_now().timestamp()) - zerver_realm = build_zerver_realm(realm_id, realm_subdomain, NOW, 'Slack') # type: List[ZerverFieldsT] + zerver_realm: List[ZerverFieldsT] = build_zerver_realm(realm_id, realm_subdomain, NOW, 'Slack') realm = build_realm(zerver_realm, realm_id, domain_name) zerver_userprofile, avatars, slack_user_id_to_zulip_user_id, zerver_customprofilefield, \ @@ -127,17 +127,17 @@ def users_to_zerver_userprofile(slack_data_dir: str, users: List[ZerverFieldsT], """ logging.info('######### IMPORTING USERS STARTED #########\n') zerver_userprofile = [] - zerver_customprofilefield = [] # type: List[ZerverFieldsT] - zerver_customprofilefield_values = [] # type: List[ZerverFieldsT] - avatar_list = [] # type: List[ZerverFieldsT] + zerver_customprofilefield: List[ZerverFieldsT] = [] + zerver_customprofilefield_values: List[ZerverFieldsT] = [] + avatar_list: List[ZerverFieldsT] = [] slack_user_id_to_zulip_user_id = {} # The user data we get from the slack api does not contain custom profile data # Hence we get it from the slack zip file slack_data_file_user_list = get_data_file(slack_data_dir + '/users.json') - slack_user_id_to_custom_profile_fields = {} # type: ZerverFieldsT - slack_custom_field_name_to_zulip_custom_field_id = {} # type: ZerverFieldsT + slack_user_id_to_custom_profile_fields: ZerverFieldsT = {} + slack_custom_field_name_to_zulip_custom_field_id: ZerverFieldsT = {} for user in slack_data_file_user_list: process_slack_custom_fields(user, slack_user_id_to_custom_profile_fields) @@ -498,8 +498,8 @@ def process_long_term_idle_users(slack_data_dir: str, users: List[ZerverFieldsT] """ all_messages = get_messages_iterator(slack_data_dir, added_channels, added_mpims, dm_members) - sender_counts = defaultdict(int) # type: Dict[str, int] - recent_senders = set() # type: Set[str] + sender_counts: Dict[str, int] = defaultdict(int) + recent_senders: Set[str] = set() NOW = float(timezone_now().timestamp()) for message in all_messages: timestamp = float(message['ts']) @@ -563,9 +563,9 @@ def convert_slack_workspace_messages(slack_data_dir: str, users: List[ZerverFiel all_messages = get_messages_iterator(slack_data_dir, added_channels, added_mpims, dm_members) logging.info('######### IMPORTING MESSAGES STARTED #########\n') - total_reactions = [] # type: List[ZerverFieldsT] - total_attachments = [] # type: List[ZerverFieldsT] - total_uploads = [] # type: List[ZerverFieldsT] + total_reactions: List[ZerverFieldsT] = [] + total_attachments: List[ZerverFieldsT] = [] + total_uploads: List[ZerverFieldsT] = [] dump_file_id = 1 @@ -615,7 +615,7 @@ def get_messages_iterator(slack_data_dir: str, added_channels: Dict[str, Any], large imports that can OOM kill.""" dir_names = list(added_channels.keys()) + list(added_mpims.keys()) + list(dm_members.keys()) - all_json_names = defaultdict(list) # type: Dict[str, List[str]] + all_json_names: Dict[str, List[str]] = defaultdict(list) for dir_name in dir_names: dir_path = os.path.join(slack_data_dir, dir_name) json_names = os.listdir(dir_path) @@ -624,7 +624,7 @@ def get_messages_iterator(slack_data_dir: str, added_channels: Dict[str, Any], # Sort json_name by date for json_name in sorted(all_json_names.keys()): - messages_for_one_day = [] # type: List[ZerverFieldsT] + messages_for_one_day: List[ZerverFieldsT] = [] for dir_path in all_json_names[json_name]: message_dir = os.path.join(dir_path, json_name) dir_name = os.path.basename(dir_path) @@ -675,10 +675,10 @@ def channel_message_to_zerver_message(realm_id: int, 5. reaction_list, which is a list of all user reactions """ zerver_message = [] - zerver_usermessage = [] # type: List[ZerverFieldsT] - uploads_list = [] # type: List[ZerverFieldsT] - zerver_attachment = [] # type: List[ZerverFieldsT] - reaction_list = [] # type: List[ZerverFieldsT] + zerver_usermessage: List[ZerverFieldsT] = [] + uploads_list: List[ZerverFieldsT] = [] + zerver_attachment: List[ZerverFieldsT] = [] + reaction_list: List[ZerverFieldsT] = [] total_user_messages = 0 total_skipped_user_messages = 0 @@ -947,7 +947,7 @@ def fetch_shared_channel_users(user_list: List[ZerverFieldsT], slack_data_dir: s normal_user_ids = set() mirror_dummy_user_ids = set() added_channels = {} - team_id_to_domain = {} # type: Dict[str, str] + team_id_to_domain: Dict[str, str] = {} for user in user_list: user["is_mirror_dummy"] = False normal_user_ids.add(user["id"]) diff --git a/zerver/decorator.py b/zerver/decorator.py index 5a45987a78..ea72e257ed 100644 --- a/zerver/decorator.py +++ b/zerver/decorator.py @@ -61,7 +61,7 @@ log_to_file(webhook_unexpected_events_logger, settings.WEBHOOK_UNEXPECTED_EVENTS_LOG_PATH) def cachify(method: Callable[..., ReturnT]) -> Callable[..., ReturnT]: - dct = {} # type: Dict[Tuple[Any, ...], ReturnT] + dct: Dict[Tuple[Any, ...], ReturnT] = {} def cache_wrapper(*args: Any) -> ReturnT: tup = tuple(args) @@ -131,7 +131,7 @@ def get_client_name(request: HttpRequest) -> str: if 'client' in request.POST: return request.POST['client'] if "HTTP_USER_AGENT" in request.META: - user_agent = parse_user_agent(request.META["HTTP_USER_AGENT"]) # type: Optional[Dict[str, str]] + user_agent: Optional[Dict[str, str]] = parse_user_agent(request.META["HTTP_USER_AGENT"]) else: user_agent = None if user_agent is not None: @@ -167,7 +167,7 @@ class InvalidZulipServerError(JsonableError): data_fields = ['role'] def __init__(self, role: str) -> None: - self.role = role # type: str + self.role: str = role @staticmethod def msg_format() -> str: diff --git a/zerver/forms.py b/zerver/forms.py index d351cb9144..31c6b15f0a 100644 --- a/zerver/forms.py +++ b/zerver/forms.py @@ -264,7 +264,7 @@ class ZulipPasswordResetForm(PasswordResetForm): logging.info("Too many password reset attempts for email %s" % (email,)) return - user = None # type: Optional[UserProfile] + user: Optional[UserProfile] = None try: user = get_user_by_delivery_email(email, realm) except UserProfile.DoesNotExist: @@ -333,7 +333,7 @@ class OurAuthenticationForm(AuthenticationForm): (username, subdomain)) raise ValidationError("Realm does not exist") - return_data = {} # type: Dict[str, Any] + return_data: Dict[str, Any] = {} try: self.user_cache = authenticate(request=self.request, username=username, password=password, realm=realm, return_data=return_data) diff --git a/zerver/lib/actions.py b/zerver/lib/actions.py index ce6eb3a708..477ca57e3b 100644 --- a/zerver/lib/actions.py +++ b/zerver/lib/actions.py @@ -363,7 +363,7 @@ def process_new_human_user(user_profile: UserProfile, mit_beta_user = user_profile.realm.is_zephyr_mirror_realm if prereg_user is not None: streams = prereg_user.streams.all() - acting_user = prereg_user.referred_by # type: Optional[UserProfile] + acting_user: Optional[UserProfile] = prereg_user.referred_by else: streams = [] acting_user = None @@ -447,7 +447,7 @@ def notify_created_user(user_profile: UserProfile) -> None: # values are expected to be added in a # later event. custom_profile_field_data={}) - event = dict(type="realm_user", op="add", person=person) # type: Dict[str, Any] + event: Dict[str, Any] = dict(type="realm_user", op="add", person=person) send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id)) def created_bot_event(user_profile: UserProfile) -> Dict[str, Any]: @@ -992,9 +992,9 @@ def get_recipient_info(recipient: Recipient, stream_topic: Optional[StreamTopicTarget], possibly_mentioned_user_ids: Optional[Set[int]]=None, possible_wildcard_mention: bool=True) -> RecipientInfoResult: - stream_push_user_ids = set() # type: Set[int] - stream_email_user_ids = set() # type: Set[int] - wildcard_mention_user_ids = set() # type: Set[int] + stream_push_user_ids: Set[int] = set() + stream_email_user_ids: Set[int] = set() + wildcard_mention_user_ids: Set[int] = set() if recipient.type == Recipient.PERSONAL: # The sender and recipient may be the same id, so @@ -1174,7 +1174,7 @@ def get_recipient_info(recipient: Recipient, if is_service_bot(row) ] - info = dict( + info: RecipientInfoResult = dict( active_user_ids=active_user_ids, push_notify_user_ids=push_notify_user_ids, stream_push_user_ids=stream_push_user_ids, @@ -1184,14 +1184,14 @@ def get_recipient_info(recipient: Recipient, long_term_idle_user_ids=long_term_idle_user_ids, default_bot_user_ids=default_bot_user_ids, service_bot_tuples=service_bot_tuples - ) # type: RecipientInfoResult + ) return info def get_service_bot_events(sender: UserProfile, service_bot_tuples: List[Tuple[int, int]], mentioned_user_ids: Set[int], active_user_ids: Set[int], recipient_type: int) -> Dict[str, List[Dict[str, Any]]]: - event_dict = defaultdict(list) # type: Dict[str, List[Dict[str, Any]]] + event_dict: Dict[str, List[Dict[str, Any]]] = defaultdict(list) # Avoid infinite loops by preventing messages sent by bots from generating # Service events. @@ -1247,7 +1247,7 @@ def get_service_bot_events(sender: UserProfile, service_bot_tuples: List[Tuple[i return event_dict def do_schedule_messages(messages: Sequence[Mapping[str, Any]]) -> List[int]: - scheduled_messages = [] # type: List[ScheduledMessage] + scheduled_messages: List[ScheduledMessage] = [] for message in messages: scheduled_message = ScheduledMessage() @@ -1283,8 +1283,8 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, messages = [message for message in messages_maybe_none if message is not None] # Filter out zephyr mirror anomalies where the message was already sent - already_sent_ids = [] # type: List[int] - new_messages = [] # type: List[MutableMapping[str, Any]] + already_sent_ids: List[int] = [] + new_messages: List[MutableMapping[str, Any]] = [] for message in messages: if isinstance(message['message'], int): already_sent_ids.append(message['message']) @@ -1292,7 +1292,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, new_messages.append(message) messages = new_messages - links_for_embed = set() # type: Set[str] + links_for_embed: Set[str] = set() # For consistency, changes to the default values for these gets should also be applied # to the default args in do_send_message for message in messages: @@ -1310,10 +1310,10 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, if message['message'].is_stream_message(): stream_id = message['message'].recipient.type_id - stream_topic = StreamTopicTarget( + stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget( stream_id=stream_id, topic_name=message['message'].topic_name() - ) # type: Optional[StreamTopicTarget] + ) else: stream_topic = None @@ -1375,7 +1375,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, message['um_eligible_user_ids'] |= mentioned_bot_user_ids # Save the message receipts in the database - user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]] + user_message_flags: Dict[int, Dict[int, List[str]]] = defaultdict(dict) with transaction.atomic(): Message.objects.bulk_create([message['message'] for message in messages]) @@ -1386,7 +1386,7 @@ def do_send_messages(messages_maybe_none: Sequence[Optional[MutableMapping[str, message['message'].has_attachment = True message['message'].save(update_fields=['has_attachment']) - ums = [] # type: List[UserMessageLite] + ums: List[UserMessageLite] = [] for message in messages: # Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows; # they will be processed later. @@ -1661,13 +1661,15 @@ def notify_reaction_update(user_profile: UserProfile, message: Message, 'email': user_profile.email, 'full_name': user_profile.full_name} - event = {'type': 'reaction', - 'op': op, - 'user': user_dict, - 'message_id': message.id, - 'emoji_name': reaction.emoji_name, - 'emoji_code': reaction.emoji_code, - 'reaction_type': reaction.reaction_type} # type: Dict[str, Any] + event: Dict[str, Any] = { + 'type': 'reaction', + 'op': op, + 'user': user_dict, + 'message_id': message.id, + 'emoji_name': reaction.emoji_name, + 'emoji_code': reaction.emoji_code, + 'reaction_type': reaction.reaction_type, + } # Update the cached message since new reaction is added. update_to_dict_cache([message]) @@ -1860,13 +1862,13 @@ def get_recipient_from_user_profiles(recipient_profiles: Sequence[UserProfile], # Otherwise, we need a huddle. Make sure the sender is included in huddle messages recipient_profiles_map[sender.id] = sender - user_ids = {user_id for user_id in recipient_profiles_map} # type: Set[int] + user_ids: Set[int] = {user_id for user_id in recipient_profiles_map} return get_huddle_recipient(user_ids) def validate_recipient_user_profiles(user_profiles: Sequence[UserProfile], sender: UserProfile, allow_deactivated: bool=False) -> Sequence[UserProfile]: - recipient_profiles_map = {} # type: Dict[int, UserProfile] + recipient_profiles_map: Dict[int, UserProfile] = {} # We exempt cross-realm bots from the check that all the recipients # are in the same realm. @@ -1896,7 +1898,7 @@ def user_ids_for_emails( we still have to support mobile sending emails in typing notifications. ''' - user_ids = [] # type: List[int] + user_ids: List[int] = [] for email in emails: try: user_profile = get_user_including_cross_realm(email, realm) @@ -2537,7 +2539,7 @@ def bulk_get_subscriber_user_ids(stream_dicts: Iterable[Mapping[str, Any]], for stream_id in stream_ids ]) - result = {stream["id"]: [] for stream in stream_dicts} # type: Dict[int, List[int]] + result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts} if not recipient_ids: return result @@ -2687,7 +2689,7 @@ def get_user_ids_for_streams(streams: Iterable[Stream]) -> Dict[int, List[int]]: get_stream_id = itemgetter('recipient__type_id') - all_subscribers_by_stream = defaultdict(list) # type: Dict[int, List[int]] + all_subscribers_by_stream: Dict[int, List[int]] = defaultdict(list) for stream_id, rows in itertools.groupby(all_subs, get_stream_id): user_ids = [row['user_profile_id'] for row in rows] all_subscribers_by_stream[stream_id] = user_ids @@ -2714,25 +2716,25 @@ def bulk_add_subscriptions(streams: Iterable[Stream], acting_user: Optional[UserProfile]=None) -> SubT: users = list(users) - recipients_map = {stream.id: stream.recipient_id for stream in streams} # type: Dict[int, int] - recipient_ids = [recipient_id for recipient_id in recipients_map.values()] # type: List[int] + recipients_map: Dict[int, int] = {stream.id: stream.recipient_id for stream in streams} + recipient_ids: List[int] = [recipient_id for recipient_id in recipients_map.values()] - stream_map = {} # type: Dict[int, Stream] + stream_map: Dict[int, Stream] = {} for stream in streams: stream_map[recipients_map[stream.id]] = stream - subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]] + subs_by_user: Dict[int, List[Subscription]] = defaultdict(list) all_subs_query = get_stream_subscriptions_for_users(users).select_related('user_profile') for sub in all_subs_query: subs_by_user[sub.user_profile_id].append(sub) realm = users[0].realm - already_subscribed = [] # type: List[Tuple[UserProfile, Stream]] - subs_to_activate = [] # type: List[Tuple[Subscription, Stream]] - new_subs = [] # type: List[Tuple[UserProfile, int, Stream]] + already_subscribed: List[Tuple[UserProfile, Stream]] = [] + subs_to_activate: List[Tuple[Subscription, Stream]] = [] + new_subs: List[Tuple[UserProfile, int, Stream]] = [] for user_profile in users: - needs_new_sub = set(recipient_ids) # type: Set[int] + needs_new_sub: Set[int] = set(recipient_ids) for sub in subs_by_user[user_profile.id]: if sub.recipient_id in needs_new_sub: needs_new_sub.remove(sub.recipient_id) @@ -2747,7 +2749,7 @@ def bulk_add_subscriptions(streams: Iterable[Stream], for recipient_id in needs_new_sub: new_subs.append((user_profile, recipient_id, stream_map[recipient_id])) - subs_to_add = [] # type: List[Tuple[Subscription, Stream]] + subs_to_add: List[Tuple[Subscription, Stream]] = [] for (user_profile, recipient_id, stream) in new_subs: if color_map is not None and stream.name in color_map: color = color_map[stream.name] @@ -2772,7 +2774,7 @@ def bulk_add_subscriptions(streams: Iterable[Stream], event_time = timezone_now() event_last_message_id = get_last_message_id() - all_subscription_logs = [] # type: (List[RealmAuditLog]) + all_subscription_logs: (List[RealmAuditLog]) = [] for (sub, stream) in subs_to_add: all_subscription_logs.append(RealmAuditLog(realm=realm, acting_user=acting_user, @@ -2814,8 +2816,8 @@ def bulk_add_subscriptions(streams: Iterable[Stream], user_ids = all_subscribers_by_stream[stream.id] return user_ids - sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]] - new_streams = set() # type: Set[Tuple[int, int]] + sub_tuples_by_user: Dict[int, List[Tuple[Subscription, Stream]]] = defaultdict(list) + new_streams: Set[Tuple[int, int]] = set() for (sub, stream) in subs_to_add + subs_to_activate: sub_tuples_by_user[sub.user_profile.id].append((sub, stream)) new_streams.add((sub.user_profile.id, stream.id)) @@ -2918,7 +2920,7 @@ def bulk_remove_subscriptions(users: Iterable[UserProfile], def get_non_subscribed_tups() -> List[Tuple[UserProfile, Stream]]: stream_ids = {stream.id for stream in streams} - not_subscribed = [] # type: List[Tuple[UserProfile, Stream]] + not_subscribed: List[Tuple[UserProfile, Stream]] = [] for user_profile in users: user_sub_stream_info = existing_subs_by_user[user_profile.id] @@ -2937,8 +2939,8 @@ def bulk_remove_subscriptions(users: Iterable[UserProfile], not_subscribed = get_non_subscribed_tups() - subs_to_deactivate = [] # type: List[Tuple[Subscription, Stream]] - sub_ids_to_deactivate = [] # type: List[int] + subs_to_deactivate: List[Tuple[Subscription, Stream]] = [] + sub_ids_to_deactivate: List[int] = [] # This loop just flattens out our data into big lists for # bulk operations. @@ -2961,7 +2963,7 @@ def bulk_remove_subscriptions(users: Iterable[UserProfile], # Log Subscription Activities in RealmAuditLog event_time = timezone_now() event_last_message_id = get_last_message_id() - all_subscription_logs = [] # type: (List[RealmAuditLog]) + all_subscription_logs: (List[RealmAuditLog]) = [] for (sub, stream) in subs_to_deactivate: all_subscription_logs.append(RealmAuditLog(realm=sub.user_profile.realm, modified_user=sub.user_profile, @@ -2972,8 +2974,8 @@ def bulk_remove_subscriptions(users: Iterable[UserProfile], # Now since we have all log objects generated we can do a bulk insert RealmAuditLog.objects.bulk_create(all_subscription_logs) - altered_user_dict = defaultdict(list) # type: Dict[int, List[UserProfile]] - streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]] + altered_user_dict: Dict[int, List[UserProfile]] = defaultdict(list) + streams_by_user: Dict[int, List[Stream]] = defaultdict(list) for (sub, stream) in subs_to_deactivate: streams_by_user[sub.user_profile_id].append(stream) altered_user_dict[stream.id].append(sub.user_profile) @@ -3179,14 +3181,14 @@ def do_change_bot_owner(user_profile: UserProfile, bot_owner: UserProfile, # Since `bot_owner_id` is included in the user profile dict we need # to update the users dict with the new bot owner id - event = dict( + event: Dict[str, Any] = dict( type="realm_user", op="update", person=dict( user_id=user_profile.id, bot_owner_id=user_profile.bot_owner.id, ), - ) # type: Dict[str, Any] + ) send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id)) def do_change_tos_version(user_profile: UserProfile, tos_version: str) -> None: @@ -3354,7 +3356,7 @@ def do_change_default_sending_stream(user_profile: UserProfile, stream: Optional 'stream': str(stream)}) if user_profile.is_bot: if stream: - stream_name = stream.name # type: Optional[str] + stream_name: Optional[str] = stream.name else: stream_name = None send_event(user_profile.realm, @@ -3377,7 +3379,7 @@ def do_change_default_events_register_stream(user_profile: UserProfile, 'stream': str(stream)}) if user_profile.is_bot: if stream: - stream_name = stream.name # type: Optional[str] + stream_name: Optional[str] = stream.name else: stream_name = None send_event(user_profile.realm, @@ -3611,7 +3613,7 @@ def do_create_realm(string_id: str, name: str, logging.info("Server not yet initialized. Creating the internal realm first.") create_internal_realm() - kwargs = {} # type: Dict[str, Any] + kwargs: Dict[str, Any] = {} if emails_restricted_to_domains is not None: kwargs['emails_restricted_to_domains'] = emails_restricted_to_domains realm = Realm(string_id=string_id, name=name, **kwargs) @@ -4267,7 +4269,7 @@ def update_user_message_flags(message: Message, ums: Iterable[UserMessage]) -> N wildcard = message.mentions_wildcard mentioned_ids = message.mentions_user_ids ids_with_alert_words = message.user_ids_with_alert_words - changed_ums = set() # type: Set[UserMessage] + changed_ums: Set[UserMessage] = set() def update_flag(um: UserMessage, should_set: bool, flag: int) -> None: if should_set: @@ -4311,10 +4313,10 @@ def do_update_embedded_data(user_profile: UserProfile, message: Message, content: Optional[str], rendered_content: Optional[str]) -> None: - event = { + event: Dict[str, Any] = { 'type': 'update_message', 'sender': user_profile.email, - 'message_id': message.id} # type: Dict[str, Any] + 'message_id': message.id} changed_messages = [message] ums = UserMessage.objects.filter(message=message.id) @@ -4359,15 +4361,17 @@ def do_update_message(user_profile: UserProfile, message: Message, timestamp = timezone_now() message.last_edit_time = timestamp - event = {'type': 'update_message', - 'user_id': user_profile.id, - 'edit_timestamp': datetime_to_timestamp(timestamp), - 'message_id': message.id} # type: Dict[str, Any] + event: Dict[str, Any] = { + 'type': 'update_message', + 'user_id': user_profile.id, + 'edit_timestamp': datetime_to_timestamp(timestamp), + 'message_id': message.id, + } - edit_history_event = { + edit_history_event: Dict[str, Any] = { 'user_id': user_profile.id, 'timestamp': event['edit_timestamp'], - } # type: Dict[str, Any] + } changed_messages = [message] @@ -4417,10 +4421,10 @@ def do_update_message(user_profile: UserProfile, message: Message, else: new_topic_name = message.topic_name() - stream_topic = StreamTopicTarget( + stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget( stream_id=stream_id, topic_name=new_topic_name, - ) # type: Optional[StreamTopicTarget] + ) else: stream_topic = None @@ -4545,7 +4549,7 @@ def do_delete_messages(realm: Realm, messages: Iterable[Message]) -> None: return usermessages = UserMessage.objects.filter(message_id__in=message_ids) - message_id_to_notifiable_users = {} # type: Dict[int, List[int]] + message_id_to_notifiable_users: Dict[int, List[int]] = {} for um in usermessages: if um.message_id not in message_id_to_notifiable_users: message_id_to_notifiable_users[um.message_id] = [] @@ -4557,12 +4561,13 @@ def do_delete_messages(realm: Realm, messages: Iterable[Message]) -> None: if not message.is_stream_message(): message_type = "private" - event = { + event: Dict[str, Any] = { 'type': 'delete_message', 'sender': message.sender.email, 'sender_id': message.sender_id, 'message_id': message.id, - 'message_type': message_type, } # type: Dict[str, Any] + 'message_type': message_type, + } if message_type == "stream": event['stream_id'] = message.recipient.type_id event['topic'] = message.topic_name() @@ -4685,7 +4690,7 @@ def gather_subscriptions_helper(user_profile: UserProfile, stream_recipient = StreamRecipientMap() stream_recipient.populate_for_recipient_ids(sub_recipient_ids) - stream_ids = set() # type: Set[int] + stream_ids: Set[int] = set() for sub in sub_dicts: sub['stream_id'] = stream_recipient.stream_id_for(sub['recipient_id']) stream_ids.add(sub['stream_id']) @@ -4724,12 +4729,12 @@ def gather_subscriptions_helper(user_profile: UserProfile, streams_subscribed_map.update({stream['id']: False for stream in all_streams if stream not in streams}) if include_subscribers: - subscriber_map = bulk_get_subscriber_user_ids( + subscriber_map: Mapping[int, Optional[List[int]]] = bulk_get_subscriber_user_ids( all_streams, user_profile, streams_subscribed_map, stream_recipient - ) # type: Mapping[int, Optional[List[int]]] + ) else: # If we're not including subscribers, always return None, # which the below code needs to check for anyway. @@ -4774,7 +4779,7 @@ def gather_subscriptions_helper(user_profile: UserProfile, stream["name"], stream["email_token"], show_sender=True) # Construct and add subscribers data - subscribers = subscriber_map[stream["id"]] # type: Optional[List[int]] + subscribers: Optional[List[int]] = subscriber_map[stream["id"]] # Important: don't show the subscribers if the stream is invite only # and this user isn't on it anymore (or a realm administrator). if stream["invite_only"] and not (sub["active"] or user_profile.is_realm_admin): @@ -4873,7 +4878,7 @@ def get_active_presence_idle_user_ids(realm: Realm, user_ids = set() for user_id in active_user_ids: - flags = user_flags.get(user_id, []) # type: Iterable[str] + flags: Iterable[str] = user_flags.get(user_id, []) mentioned = 'mentioned' in flags or 'wildcard_mentioned' in flags private_message = is_pm and user_id != sender_id alerted = 'has_alert_word' in flags @@ -4941,9 +4946,9 @@ class InvitationError(JsonableError): def __init__(self, msg: str, errors: List[Tuple[str, str, bool]], sent_invitations: bool) -> None: - self._msg = msg # type: str - self.errors = errors # type: List[Tuple[str, str, bool]] - self.sent_invitations = sent_invitations # type: bool + self._msg: str = msg + self.errors: List[Tuple[str, str, bool]] = errors + self.sent_invitations: bool = sent_invitations def estimate_recent_invites(realms: Iterable[Realm], *, days: int) -> int: '''An upper bound on the number of invites sent in the last `days` days''' @@ -5009,8 +5014,8 @@ def do_invite_users(user_profile: UserProfile, "Ask an organization admin, or a more experienced user."), [], sent_invitations=False) - good_emails = set() # type: Set[str] - errors = [] # type: List[Tuple[str, str, bool]] + good_emails: Set[str] = set() + errors: List[Tuple[str, str, bool]] = [] validate_email_allowed_in_realm = get_realm_email_validator(user_profile.realm) for email in invitee_emails: if email == '': @@ -5032,7 +5037,7 @@ def do_invite_users(user_profile: UserProfile, ''' error_dict = get_existing_user_errors(user_profile.realm, good_emails) - skipped = [] # type: List[Tuple[str, str, bool]] + skipped: List[Tuple[str, str, bool]] = [] for email in error_dict: msg, deactivated = error_dict[email] skipped.append((email, msg, deactivated)) @@ -5324,7 +5329,7 @@ def do_get_streams( else: # We construct a query as the or (|) of the various sources # this user requested streams from. - query_filter = None # type: Optional[Q] + query_filter: Optional[Q] = None def add_filter_option(option: Q) -> None: nonlocal query_filter @@ -5631,7 +5636,7 @@ def do_update_bot_config_data(bot_profile: UserProfile, def get_service_dicts_for_bot(user_profile_id: str) -> List[Dict[str, Any]]: user_profile = get_user_profile_by_id(user_profile_id) services = get_bot_services(user_profile_id) - service_dicts = [] # type: List[Dict[str, Any]] + service_dicts: List[Dict[str, Any]] = [] if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT: service_dicts = [{'base_url': service.base_url, 'interface': service.interface, @@ -5651,7 +5656,7 @@ def get_service_dicts_for_bot(user_profile_id: str) -> List[Dict[str, Any]]: def get_service_dicts_for_bots(bot_dicts: List[Dict[str, Any]], realm: Realm) -> Dict[int, List[Dict[str, Any]]]: bot_profile_ids = [bot_dict['id'] for bot_dict in bot_dicts] - bot_services_by_uid = defaultdict(list) # type: Dict[int, List[Service]] + bot_services_by_uid: Dict[int, List[Service]] = defaultdict(list) for service in Service.objects.filter(user_profile_id__in=bot_profile_ids): bot_services_by_uid[service.user_profile_id].append(service) @@ -5659,12 +5664,12 @@ def get_service_dicts_for_bots(bot_dicts: List[Dict[str, Any]], if bot_dict['bot_type'] == UserProfile.EMBEDDED_BOT] embedded_bot_configs = get_bot_configs(embedded_bot_ids) - service_dicts_by_uid = {} # type: Dict[int, List[Dict[str, Any]]] + service_dicts_by_uid: Dict[int, List[Dict[str, Any]]] = {} for bot_dict in bot_dicts: bot_profile_id = bot_dict["id"] bot_type = bot_dict["bot_type"] services = bot_services_by_uid[bot_profile_id] - service_dicts = [] # type: List[Dict[str, Any]] + service_dicts: List[Dict[str, Any]] = [] if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT: service_dicts = [{'base_url': service.base_url, 'interface': service.interface, diff --git a/zerver/lib/addressee.py b/zerver/lib/addressee.py index d969311e7b..c22c90e223 100644 --- a/zerver/lib/addressee.py +++ b/zerver/lib/addressee.py @@ -30,7 +30,7 @@ def raw_pm_with_emails_by_ids(user_ids: Iterable[int], my_email: str, return emails def get_user_profiles(emails: Iterable[str], realm: Realm) -> List[UserProfile]: - user_profiles = [] # type: List[UserProfile] + user_profiles: List[UserProfile] = [] for email in emails: try: user_profile = get_user_including_cross_realm(email, realm) @@ -40,7 +40,7 @@ def get_user_profiles(emails: Iterable[str], realm: Realm) -> List[UserProfile]: return user_profiles def get_user_profiles_by_ids(user_ids: Iterable[int], realm: Realm) -> List[UserProfile]: - user_profiles = [] # type: List[UserProfile] + user_profiles: List[UserProfile] = [] for user_id in user_ids: try: user_profile = get_user_by_id_in_realm_including_cross_realm(user_id, realm) diff --git a/zerver/lib/bot_config.py b/zerver/lib/bot_config.py index 50f1793564..847085b866 100644 --- a/zerver/lib/bot_config.py +++ b/zerver/lib/bot_config.py @@ -26,7 +26,7 @@ def get_bot_configs(bot_profile_ids: List[int]) -> Dict[int, Dict[str, str]]: if not bot_profile_ids: return {} entries = BotConfigData.objects.filter(bot_profile_id__in=bot_profile_ids) - entries_by_uid = defaultdict(dict) # type: Dict[int, Dict[str, str]] + entries_by_uid: Dict[int, Dict[str, str]] = defaultdict(dict) for entry in entries: entries_by_uid[entry.bot_profile_id].update({entry.key: entry.value}) return entries_by_uid diff --git a/zerver/lib/bot_lib.py b/zerver/lib/bot_lib.py index b3081884a0..37532e3890 100644 --- a/zerver/lib/bot_lib.py +++ b/zerver/lib/bot_lib.py @@ -28,12 +28,12 @@ def get_bot_handler(service_name: str) -> Any: if not configured_service: return None bot_module_name = 'zulip_bots.bots.%s.%s' % (configured_service, configured_service) - bot_module = importlib.import_module(bot_module_name) # type: Any + bot_module: Any = importlib.import_module(bot_module_name) return bot_module.handler_class() class StateHandler: - storage_size_limit = 10000000 # type: int # TODO: Store this in the server configuration model. + storage_size_limit: int = 10000000 # TODO: Store this in the server configuration model. def __init__(self, user_profile: UserProfile) -> None: self.user_profile = user_profile diff --git a/zerver/lib/bugdown/__init__.py b/zerver/lib/bugdown/__init__.py index d9a7ecfd10..d171144cfd 100644 --- a/zerver/lib/bugdown/__init__.py +++ b/zerver/lib/bugdown/__init__.py @@ -129,7 +129,7 @@ STREAM_TOPIC_LINK_REGEX = r""" def get_compiled_stream_topic_link_regex() -> Pattern: return verbose_compile(STREAM_TOPIC_LINK_REGEX) -LINK_REGEX = None # type: Pattern +LINK_REGEX: Pattern = None def get_web_link_regex() -> str: # We create this one time, but not at startup. So the @@ -814,7 +814,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): Finally we add any remaining text to the last node. """ - to_process = [] # type: List[Dict[str, Any]] + to_process: List[Dict[str, Any]] = [] # Build dicts for URLs for url_data in urls: short_url = url_data["url"] @@ -907,7 +907,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): res = fetch_tweet_data(tweet_id) if res is None: return None - user = res['user'] # type: Dict[str, Any] + user: Dict[str, Any] = res['user'] tweet = markdown.util.etree.Element("div") tweet.set("class", "twitter-tweet") img_a = markdown.util.etree.SubElement(tweet, 'a') @@ -925,7 +925,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): text = html.unescape(res['full_text']) urls = res.get('urls', []) user_mentions = res.get('user_mentions', []) - media = res.get('media', []) # type: List[Dict[str, Any]] + media: List[Dict[str, Any]] = res.get('media', []) p = self.twitter_text(text, urls, user_mentions, media) tweet.append(p) @@ -1083,7 +1083,7 @@ class InlineInterestingLinkProcessor(markdown.treeprocessors.Treeprocessor): if len(unique_previewable_urls) > self.INLINE_PREVIEW_LIMIT_PER_MESSAGE: return - processed_urls = set() # type: Set[str] + processed_urls: Set[str] = set() rendered_tweet_count = 0 for found_url in found_urls: @@ -1303,7 +1303,7 @@ class Emoji(markdown.inlinepatterns.Pattern): orig_syntax = match.group("syntax") name = orig_syntax[1:-1] - active_realm_emoji = {} # type: Dict[str, Dict[str, str]] + active_realm_emoji: Dict[str, Dict[str, str]] = {} db_data = self.markdown.zulip_db_data if db_data is not None: active_realm_emoji = db_data['active_realm_emoji'] @@ -1516,8 +1516,8 @@ class BugdownListPreprocessor(markdown.preprocessors.Preprocessor): ]) inserts = 0 - in_code_fence = False # type: bool - open_fences = [] # type: List[Fence] + in_code_fence: bool = False + open_fences: List[Fence] = [] copy = lines[:] for i in range(len(lines) - 1): # Ignore anything that is inside a fenced code block but not quoted. @@ -1968,8 +1968,8 @@ class Bugdown(markdown.Markdown): self.preprocessors = get_sub_registry(self.preprocessors, ['custom_text_notifications']) self.parser.blockprocessors = get_sub_registry(self.parser.blockprocessors, ['paragraph']) -md_engines = {} # type: Dict[Tuple[int, bool], markdown.Markdown] -realm_filter_data = {} # type: Dict[int, List[Tuple[str, str, int]]] +md_engines: Dict[Tuple[int, bool], markdown.Markdown] = {} +realm_filter_data: Dict[int, List[Tuple[str, str, int]]] = {} def make_md_engine(realm_filters_key: int, email_gateway: bool) -> None: md_engine_key = (realm_filters_key, email_gateway) @@ -2009,7 +2009,7 @@ basic_link_splitter = re.compile(r'[ !;\?\),\'\"]') # rendered by clients (just as links rendered into message bodies # are validated and escaped inside `url_to_a`). def topic_links(realm_filters_key: int, topic_name: str) -> List[str]: - matches = [] # type: List[str] + matches: List[str] = [] realm_filters = realm_filters_for_realm(realm_filters_key) @@ -2154,7 +2154,7 @@ class MentionData: content: str) -> None: user_group_names = possible_user_group_mentions(content) self.user_group_name_info = get_user_group_name_info(realm_id, user_group_names) - self.user_group_members = defaultdict(list) # type: Dict[int, List[int]] + self.user_group_members: Dict[int, List[int]] = defaultdict(list) group_ids = [group.id for group in self.user_group_name_info.values()] if not group_ids: diff --git a/zerver/lib/bugdown/api_arguments_table_generator.py b/zerver/lib/bugdown/api_arguments_table_generator.py index 9ff95af0b5..0de41b9c72 100644 --- a/zerver/lib/bugdown/api_arguments_table_generator.py +++ b/zerver/lib/bugdown/api_arguments_table_generator.py @@ -55,7 +55,7 @@ class APIArgumentsTablePreprocessor(Preprocessor): if is_openapi_format: endpoint, method = doc_name.rsplit(':', 1) - arguments = [] # type: List[Dict[str, Any]] + arguments: List[Dict[str, Any]] = [] try: arguments = get_openapi_parameters(endpoint, method) diff --git a/zerver/lib/bugdown/api_code_examples.py b/zerver/lib/bugdown/api_code_examples.py index f861fa2dd9..51ce60ffee 100644 --- a/zerver/lib/bugdown/api_code_examples.py +++ b/zerver/lib/bugdown/api_code_examples.py @@ -242,7 +242,7 @@ def render_curl_example(function: str, api_url: str, parts = function.split(":") endpoint = parts[0] method = parts[1] - kwargs = dict() # type: Dict[str, Any] + kwargs: Dict[str, Any] = dict() if len(parts) > 2: kwargs["auth_email"] = parts[2] if len(parts) > 3: @@ -252,7 +252,7 @@ def render_curl_example(function: str, api_url: str, kwargs["include"] = include return generate_curl_example(endpoint, method, **kwargs) -SUPPORTED_LANGUAGES = { +SUPPORTED_LANGUAGES: Dict[str, Any] = { 'python': { 'client_config': PYTHON_CLIENT_CONFIG, 'admin_config': PYTHON_CLIENT_ADMIN_CONFIG, @@ -261,7 +261,7 @@ SUPPORTED_LANGUAGES = { 'curl': { 'render': render_curl_example } -} # type: Dict[str, Any] +} class APICodeExamplesGenerator(Extension): def __init__(self, api_url: Optional[str]) -> None: diff --git a/zerver/lib/bugdown/fenced_code.py b/zerver/lib/bugdown/fenced_code.py index 4f38070a2b..e3cc07f148 100644 --- a/zerver/lib/bugdown/fenced_code.py +++ b/zerver/lib/bugdown/fenced_code.py @@ -203,7 +203,7 @@ class CodeHandler(BaseHandler): self.output = output self.fence = fence self.lang = lang - self.lines = [] # type: List[str] + self.lines: List[str] = [] self.run_content_validators = run_content_validators def handle_line(self, line: str) -> None: @@ -233,7 +233,7 @@ class QuoteHandler(BaseHandler): self.processor = processor self.output = output self.fence = fence - self.lines = [] # type: List[str] + self.lines: List[str] = [] def handle_line(self, line: str) -> None: if line.rstrip() == self.fence: @@ -255,7 +255,7 @@ class TexHandler(BaseHandler): self.processor = processor self.output = output self.fence = fence - self.lines = [] # type: List[str] + self.lines: List[str] = [] def handle_line(self, line: str) -> None: if line.rstrip() == self.fence: @@ -280,7 +280,7 @@ class FencedBlockPreprocessor(markdown.preprocessors.Preprocessor): self.checked_for_codehilite = False self.run_content_validators = run_content_validators - self.codehilite_conf = {} # type: Dict[str, List[Any]] + self.codehilite_conf: Dict[str, List[Any]] = {} def push(self, handler: BaseHandler) -> None: self.handlers.append(handler) @@ -291,10 +291,10 @@ class FencedBlockPreprocessor(markdown.preprocessors.Preprocessor): def run(self, lines: Iterable[str]) -> List[str]: """ Match and store Fenced Code Blocks in the HtmlStash. """ - output = [] # type: List[str] + output: List[str] = [] processor = self - self.handlers = [] # type: List[BaseHandler] + self.handlers: List[BaseHandler] = [] handler = OuterHandler(processor, output, self.run_content_validators) self.push(handler) diff --git a/zerver/lib/bugdown/help_relative_links.py b/zerver/lib/bugdown/help_relative_links.py index f13e411c1c..1aa1251e0c 100644 --- a/zerver/lib/bugdown/help_relative_links.py +++ b/zerver/lib/bugdown/help_relative_links.py @@ -71,7 +71,7 @@ class RelativeLinksHelpExtension(markdown.Extension): md.registerExtension(self) md.preprocessors.add('help_relative_links', RelativeLinks(), '_begin') -relative_help_links = None # type: Optional[bool] +relative_help_links: Optional[bool] = None def set_relative_help_links(value: bool) -> None: global relative_help_links diff --git a/zerver/lib/bugdown/help_settings_links.py b/zerver/lib/bugdown/help_settings_links.py index e2bc45145f..b313909a60 100644 --- a/zerver/lib/bugdown/help_settings_links.py +++ b/zerver/lib/bugdown/help_settings_links.py @@ -67,7 +67,7 @@ class SettingHelpExtension(markdown.Extension): md.registerExtension(self) md.preprocessors.add('setting', Setting(), '_begin') -relative_settings_links = None # type: Optional[bool] +relative_settings_links: Optional[bool] = None def set_relative_settings_links(value: bool) -> None: global relative_settings_links diff --git a/zerver/lib/bugdown/nested_code_blocks.py b/zerver/lib/bugdown/nested_code_blocks.py index 8641499256..0b07b7519b 100644 --- a/zerver/lib/bugdown/nested_code_blocks.py +++ b/zerver/lib/bugdown/nested_code_blocks.py @@ -37,8 +37,8 @@ class NestedCodeBlocksRendererTreeProcessor(markdown.treeprocessors.Treeprocesso ) -> List[ResultWithFamily]: nested_code_blocks = [] for code_tag in code_tags: - parent = code_tag.family.parent # type: Any - grandparent = code_tag.family.grandparent # type: Any + parent: Any = code_tag.family.parent + grandparent: Any = code_tag.family.grandparent if parent.tag == "p" and grandparent.tag == "li": # if the parent (

) has no text, and no children, # that means that the element inside is its diff --git a/zerver/lib/bugdown/tabbed_sections.py b/zerver/lib/bugdown/tabbed_sections.py index fbf2efdf79..1b331f3587 100644 --- a/zerver/lib/bugdown/tabbed_sections.py +++ b/zerver/lib/bugdown/tabbed_sections.py @@ -138,7 +138,7 @@ class TabbedSectionsPreprocessor(Preprocessor): return NAV_BAR_TEMPLATE.format(tabs='\n'.join(li_elements)) def parse_tabs(self, lines: List[str]) -> Optional[Dict[str, Any]]: - block = {} # type: Dict[str, Any] + block: Dict[str, Any] = {} for index, line in enumerate(lines): start_match = START_TABBED_SECTION_REGEX.search(line) if start_match: diff --git a/zerver/lib/bulk_create.py b/zerver/lib/bulk_create.py index cac6c1a83a..baa040a8b1 100644 --- a/zerver/lib/bulk_create.py +++ b/zerver/lib/bulk_create.py @@ -24,7 +24,7 @@ def bulk_create_users(realm: Realm, users = sorted([user_raw for user_raw in users_raw if user_raw[0] not in existing_users]) # Now create user_profiles - profiles_to_create = [] # type: List[UserProfile] + profiles_to_create: List[UserProfile] = [] for (email, full_name, short_name, active) in users: profile = create_user_profile(realm, email, initial_password(email), active, bot_type, @@ -52,7 +52,7 @@ def bulk_create_users(realm: Realm, event_type=RealmAuditLog.USER_CREATED, event_time=profile_.date_joined) for profile_ in profiles_to_create]) - recipients_to_create = [] # type: List[Recipient] + recipients_to_create: List[Recipient] = [] for user_id in user_ids: recipient = Recipient(type_id=user_id, type=Recipient.PERSONAL) recipients_to_create.append(recipient) @@ -61,11 +61,11 @@ def bulk_create_users(realm: Realm, bulk_set_users_or_streams_recipient_fields(UserProfile, profiles_to_create, recipients_to_create) - recipients_by_user_id = {} # type: Dict[int, Recipient] + recipients_by_user_id: Dict[int, Recipient] = {} for recipient in recipients_to_create: recipients_by_user_id[recipient.type_id] = recipient - subscriptions_to_create = [] # type: List[Subscription] + subscriptions_to_create: List[Subscription] = [] for user_id in user_ids: recipient = recipients_by_user_id[user_id] subscription = Subscription(user_profile_id=user_id, recipient=recipient) @@ -106,7 +106,7 @@ def bulk_create_streams(realm: Realm, existing_streams = frozenset([name.lower() for name in Stream.objects.filter(realm=realm) .values_list('name', flat=True)]) - streams_to_create = [] # type: List[Stream] + streams_to_create: List[Stream] = [] for name, options in stream_dict.items(): if 'history_public_to_subscribers' not in options: options['history_public_to_subscribers'] = ( @@ -137,7 +137,7 @@ def bulk_create_streams(realm: Realm, streams_to_create.sort(key=lambda x: x.name) Stream.objects.bulk_create(streams_to_create) - recipients_to_create = [] # type: List[Recipient] + recipients_to_create: List[Recipient] = [] for stream in Stream.objects.filter(realm=realm).values('id', 'name'): if stream['name'].lower() not in existing_streams: recipients_to_create.append(Recipient(type_id=stream['id'], diff --git a/zerver/lib/cache.py b/zerver/lib/cache.py index fa69214b02..bf169b2f55 100644 --- a/zerver/lib/cache.py +++ b/zerver/lib/cache.py @@ -101,7 +101,7 @@ def get_or_create_key_prefix() -> str: return prefix -KEY_PREFIX = get_or_create_key_prefix() # type: str +KEY_PREFIX: str = get_or_create_key_prefix() def bounce_key_prefix_for_testing(test_name: str) -> None: global KEY_PREFIX @@ -377,14 +377,15 @@ def generic_bulk_cached_fetch( # Nothing to fetch. return {} - cache_keys = {} # type: Dict[ObjKT, str] + cache_keys: Dict[ObjKT, str] = {} for object_id in object_ids: cache_keys[object_id] = cache_key_function(object_id) - cached_objects_compressed = safe_cache_get_many([cache_keys[object_id] - for object_id in object_ids]) # type: Dict[str, Tuple[CompressedItemT]] + cached_objects_compressed: Dict[str, Tuple[CompressedItemT]] = safe_cache_get_many( + [cache_keys[object_id] for object_id in object_ids] + ) - cached_objects = {} # type: Dict[str, CacheItemT] + cached_objects: Dict[str, CacheItemT] = {} for (key, val) in cached_objects_compressed.items(): cached_objects[key] = extractor(cached_objects_compressed[key][0]) needed_ids = [object_id for object_id in object_ids if @@ -396,7 +397,7 @@ def generic_bulk_cached_fetch( else: db_objects = [] - items_for_remote_cache = {} # type: Dict[str, Tuple[CompressedItemT]] + items_for_remote_cache: Dict[str, Tuple[CompressedItemT]] = {} for obj in db_objects: key = cache_keys[id_fetcher(obj)] item = cache_transformer(obj) @@ -439,13 +440,13 @@ def user_profile_by_id_cache_key(user_profile_id: int) -> str: def user_profile_by_api_key_cache_key(api_key: str) -> str: return "user_profile_by_api_key:%s" % (api_key,) -realm_user_dict_fields = [ +realm_user_dict_fields: List[str] = [ 'id', 'full_name', 'short_name', 'email', 'avatar_source', 'avatar_version', 'is_active', 'role', 'is_bot', 'realm_id', 'timezone', 'date_joined', 'bot_owner_id', 'delivery_email', 'bot_type' -] # type: List[str] +] def realm_user_dicts_cache_key(realm_id: int) -> str: return "realm_user_dicts:%s" % (realm_id,) @@ -459,13 +460,15 @@ def active_user_ids_cache_key(realm_id: int) -> str: def active_non_guest_user_ids_cache_key(realm_id: int) -> str: return "active_non_guest_user_ids:%s" % (realm_id,) -bot_dict_fields = ['id', 'full_name', 'short_name', 'bot_type', 'email', - 'is_active', 'default_sending_stream__name', - 'realm_id', - 'default_events_register_stream__name', - 'default_all_public_streams', 'api_key', - 'bot_owner__email', 'avatar_source', - 'avatar_version'] # type: List[str] +bot_dict_fields: List[str] = [ + 'id', 'full_name', 'short_name', 'bot_type', 'email', + 'is_active', 'default_sending_stream__name', + 'realm_id', + 'default_events_register_stream__name', + 'default_all_public_streams', 'api_key', + 'bot_owner__email', 'avatar_source', + 'avatar_version', +] def bot_dicts_in_realm_cache_key(realm: 'Realm') -> str: return "bot_dicts_in_realm:%s" % (realm.id,) diff --git a/zerver/lib/cache_helpers.py b/zerver/lib/cache_helpers.py index ef3d2e4bad..8bdaa775ff 100644 --- a/zerver/lib/cache_helpers.py +++ b/zerver/lib/cache_helpers.py @@ -108,7 +108,7 @@ def get_users() -> List[UserProfile]: # doing any setup for things we're unlikely to use (without the lambda # wrapper the below adds an extra 3ms or so to startup time for # anything importing this file). -cache_fillers = { +cache_fillers: Dict[str, Tuple[Callable[[], List[Any]], Callable[[Dict[str, Any], Any], None], int, int]] = { 'user': (get_users, user_cache_items, 3600*24*7, 10000), 'client': (lambda: Client.objects.select_related().all(), client_cache_items, 3600*24*7, 10000), 'stream': (get_streams, stream_cache_items, 3600*24*7, 10000), @@ -118,12 +118,12 @@ cache_fillers = { # 'message': (message_fetch_objects, message_cache_items, 3600 * 24, 1000), 'huddle': (lambda: Huddle.objects.select_related().all(), huddle_cache_items, 3600*24*7, 10000), 'session': (lambda: Session.objects.all(), session_cache_items, 3600*24*7, 10000), -} # type: Dict[str, Tuple[Callable[[], List[Any]], Callable[[Dict[str, Any], Any], None], int, int]] +} def fill_remote_cache(cache: str) -> None: remote_cache_time_start = get_remote_cache_time() remote_cache_requests_start = get_remote_cache_requests() - items_for_remote_cache = {} # type: Dict[str, Any] + items_for_remote_cache: Dict[str, Any] = {} (objects, items_filler, timeout, batch_size) = cache_fillers[cache] count = 0 for obj in objects(): diff --git a/zerver/lib/db.py b/zerver/lib/db.py index 7b362c8ee5..af17bfef6d 100644 --- a/zerver/lib/db.py +++ b/zerver/lib/db.py @@ -38,7 +38,7 @@ class TimeTrackingConnection(connection): """A psycopg2 connection class that uses TimeTrackingCursors.""" def __init__(self, *args: Any, **kwargs: Any) -> None: - self.queries = [] # type: List[Dict[str, str]] + self.queries: List[Dict[str, str]] = [] super().__init__(*args, **kwargs) def cursor(self, *args: Any, **kwargs: Any) -> TimeTrackingCursor: diff --git a/zerver/lib/debug.py b/zerver/lib/debug.py index 0639a1c9cb..a49c60b674 100644 --- a/zerver/lib/debug.py +++ b/zerver/lib/debug.py @@ -68,7 +68,7 @@ def tracemalloc_listen_sock(sock: socket.socket) -> None: sock.recv(1) tracemalloc_dump() -listener_pid = None # type: Optional[int] +listener_pid: Optional[int] = None def tracemalloc_listen() -> None: global listener_pid diff --git a/zerver/lib/digest.py b/zerver/lib/digest.py index c04fdccf47..58ca1e1901 100644 --- a/zerver/lib/digest.py +++ b/zerver/lib/digest.py @@ -81,9 +81,9 @@ def gather_hot_conversations(user_profile: UserProfile, messages: List[Message]) # Returns a list of dictionaries containing the templating # information for each hot conversation. - conversation_length = defaultdict(int) # type: Dict[Tuple[int, str], int] - conversation_messages = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]] - conversation_diversity = defaultdict(set) # type: Dict[Tuple[int, str], Set[str]] + conversation_length: Dict[Tuple[int, str], int] = defaultdict(int) + conversation_messages: Dict[Tuple[int, str], List[Message]] = defaultdict(list) + conversation_diversity: Dict[Tuple[int, str], Set[str]] = defaultdict(set) for message in messages: key = (message.recipient.type_id, message.topic_name()) diff --git a/zerver/lib/display_recipient.py b/zerver/lib/display_recipient.py index 0c968d1164..f5a1fddb13 100644 --- a/zerver/lib/display_recipient.py +++ b/zerver/lib/display_recipient.py @@ -92,13 +92,13 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]] return stream['name'] # ItemT = Stream, CacheItemT = str (name), ObjKT = int (recipient_id) - stream_display_recipients = generic_bulk_cached_fetch( + stream_display_recipients: Dict[int, str] = generic_bulk_cached_fetch( cache_key_function=display_recipient_cache_key, query_function=stream_query_function, object_ids=[recipient[0] for recipient in stream_recipients], id_fetcher=stream_id_fetcher, cache_transformer=stream_cache_transformer, - ) # type: Dict[int, str] + ) # Now we have to create display_recipients for personal and huddle messages. # We do this via generic_bulk_cached_fetch, supplying apprioprate functions to it. @@ -121,8 +121,8 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]] ) for recipient_id in recipient_ids] # Find all user ids whose UserProfiles we will need to fetch: - user_ids_to_fetch = set() # type: Set[int] - huddle_user_ids = {} # type: Dict[int, List[int]] + user_ids_to_fetch: Set[int] = set() + huddle_user_ids: Dict[int, List[int]] = {} huddle_user_ids = bulk_get_huddle_user_ids([recipient for recipient in recipients if recipient.type == Recipient.HUDDLE]) for recipient in recipients: @@ -132,10 +132,10 @@ def bulk_fetch_display_recipients(recipient_tuples: Set[Tuple[int, int, int]] user_ids_to_fetch = user_ids_to_fetch.union(huddle_user_ids[recipient.id]) # Fetch the needed UserProfiles: - user_profiles = bulk_get_user_profile_by_id(list(user_ids_to_fetch)) # type: Dict[int, UserDisplayRecipient] + user_profiles: Dict[int, UserDisplayRecipient] = bulk_get_user_profile_by_id(list(user_ids_to_fetch)) # Build the return value: - result = [] # type: List[Tuple[int, List[UserDisplayRecipient]]] + result: List[Tuple[int, List[UserDisplayRecipient]]] = [] for recipient in recipients: if recipient.type == Recipient.PERSONAL: result.append((recipient.id, [user_profiles[recipient.type_id]])) diff --git a/zerver/lib/email_mirror.py b/zerver/lib/email_mirror.py index e3c748567f..c3f38a9a33 100644 --- a/zerver/lib/email_mirror.py +++ b/zerver/lib/email_mirror.py @@ -393,7 +393,7 @@ def process_missed_message(to: str, message: message.Message) -> None: user_profile.id, recipient_str)) def process_message(message: message.Message, rcpt_to: Optional[str]=None) -> None: - to = None # type: Optional[str] + to: Optional[str] = None try: if rcpt_to is not None: diff --git a/zerver/lib/email_mirror_helpers.py b/zerver/lib/email_mirror_helpers.py index 74f86453cb..dabbcb5503 100644 --- a/zerver/lib/email_mirror_helpers.py +++ b/zerver/lib/email_mirror_helpers.py @@ -86,7 +86,7 @@ def decode_email_address(email: str) -> Tuple[str, Dict[str, bool]]: msg_string = msg_string.replace('.', '+') parts = msg_string.split('+') - options = {} # type: Dict[str, bool] + options: Dict[str, bool] = {} for part in parts: if part in optional_address_tokens: optional_address_tokens[part](options) diff --git a/zerver/lib/email_notifications.py b/zerver/lib/email_notifications.py index 5e32b2d880..9848f2df76 100644 --- a/zerver/lib/email_notifications.py +++ b/zerver/lib/email_notifications.py @@ -126,7 +126,7 @@ def build_message_list(user_profile: UserProfile, messages: List[Message]) -> Li The messages are collapsed into per-recipient and per-sender blocks, like our web interface """ - messages_to_render = [] # type: List[Dict[str, Any]] + messages_to_render: List[Dict[str, Any]] = [] def sender_string(message: Message) -> str: if message.recipient.type in (Recipient.STREAM, Recipient.HUDDLE): @@ -417,7 +417,7 @@ def do_send_missedmessage_events_reply_in_zulip(user_profile: UserProfile, }) with override_language(user_profile.default_language): - from_name = _("Zulip missed messages") # type: str + from_name: str = _("Zulip missed messages") from_address = FromAddress.NOREPLY if len(senders) == 1 and settings.SEND_MISSED_MESSAGE_EMAILS_AS_USER: # If this setting is enabled, you can reply to the Zulip @@ -470,7 +470,7 @@ def handle_missedmessage_emails(user_profile_id: int, # We bucket messages by tuples that identify similar messages. # For streams it's recipient_id and topic. # For PMs it's recipient id and sender. - messages_by_bucket = defaultdict(list) # type: Dict[Tuple[int, str], List[Message]] + messages_by_bucket: Dict[Tuple[int, str], List[Message]] = defaultdict(list) for msg in messages: if msg.recipient.type == Recipient.PERSONAL: # For PM's group using (recipient, sender). @@ -491,7 +491,7 @@ def handle_missedmessage_emails(user_profile_id: int, msg_list.extend(filtered_context_messages) # Sort emails by least recently-active discussion. - bucket_tups = [] # type: List[Tuple[Tuple[int, str], int]] + bucket_tups: List[Tuple[Tuple[int, str], int]] = [] for bucket_tup, msg_list in messages_by_bucket.items(): max_message_id = max(msg_list, key=lambda msg: msg.id).id bucket_tups.append((bucket_tup, max_message_id)) diff --git a/zerver/lib/email_validation.py b/zerver/lib/email_validation.py index e7e332d1d9..a50faf2d89 100644 --- a/zerver/lib/email_validation.py +++ b/zerver/lib/email_validation.py @@ -127,7 +127,7 @@ def get_existing_user_errors( to cross-realm bots and mirror dummies too. ''' - errors = {} # type: Dict[str, Tuple[str, bool]] + errors: Dict[str, Tuple[str, bool]] = {} users = get_users_by_delivery_email(emails, target_realm).only( 'delivery_email', diff --git a/zerver/lib/events.py b/zerver/lib/events.py index 3e0c758790..2978964cf3 100644 --- a/zerver/lib/events.py +++ b/zerver/lib/events.py @@ -88,12 +88,12 @@ def fetch_initial_state_data(user_profile: UserProfile, queue_id: str, client_gravatar: bool, slim_presence: bool = False, include_subscribers: bool = True) -> Dict[str, Any]: - state = {'queue_id': queue_id} # type: Dict[str, Any] + state: Dict[str, Any] = {'queue_id': queue_id} realm = user_profile.realm if event_types is None: # return True always - want = always_want # type: Callable[[str], bool] + want: Callable[[str], bool] = always_want else: want = set(event_types).__contains__ @@ -803,7 +803,7 @@ def do_events_register(user_profile: UserProfile, user_client: Client, raise JsonableError(_("Could not allocate event queue")) if fetch_event_types is not None: - event_types_set = set(fetch_event_types) # type: Optional[Set[str]] + event_types_set: Optional[Set[str]] = set(fetch_event_types) elif event_types is not None: event_types_set = set(event_types) else: diff --git a/zerver/lib/exceptions.py b/zerver/lib/exceptions.py index 8d718e810c..b9c68ef617 100644 --- a/zerver/lib/exceptions.py +++ b/zerver/lib/exceptions.py @@ -83,18 +83,18 @@ class JsonableError(Exception): ''' # Override this in subclasses, as needed. - code = ErrorCode.BAD_REQUEST # type: ErrorCode + code: ErrorCode = ErrorCode.BAD_REQUEST # Override this in subclasses if providing structured data. - data_fields = [] # type: List[str] + data_fields: List[str] = [] # Optionally override this in subclasses to return a different HTTP status, # like 403 or 404. - http_status_code = 400 # type: int + http_status_code: int = 400 def __init__(self, msg: str) -> None: # `_msg` is an implementation detail of `JsonableError` itself. - self._msg = msg # type: str + self._msg: str = msg @staticmethod def msg_format() -> str: @@ -190,7 +190,7 @@ class InvalidJSONError(JsonableError): return _("Malformed JSON") class OrganizationAdministratorRequired(JsonableError): - code = ErrorCode.UNAUTHORIZED_PRINCIPAL # type: ErrorCode + code: ErrorCode = ErrorCode.UNAUTHORIZED_PRINCIPAL ADMIN_REQUIRED_MESSAGE = _("Must be an organization administrator") diff --git a/zerver/lib/export.py b/zerver/lib/export.py index 28d8363385..bc1ae4df50 100644 --- a/zerver/lib/export.py +++ b/zerver/lib/export.py @@ -244,7 +244,7 @@ ANALYTICS_TABLES = { # # TODO: This data structure could likely eventually be replaced by # inspecting the corresponding Django models -DATE_FIELDS = { +DATE_FIELDS: Dict[TableName, List[Field]] = { 'zerver_attachment': ['create_time'], 'zerver_message': ['last_edit_time', 'date_sent'], 'zerver_mutedtopic': ['date_muted'], @@ -260,7 +260,7 @@ DATE_FIELDS = { 'analytics_realmcount': ['end_time'], 'analytics_usercount': ['end_time'], 'analytics_streamcount': ['end_time'], -} # type: Dict[TableName, List[Field]] +} def sanity_check_output(data: TableData) -> None: # First, we verify that the export tool has a declared @@ -389,10 +389,10 @@ class Config: self.concat_and_destroy = concat_and_destroy self.id_source = id_source self.source_filter = source_filter - self.children = [] # type: List[Config] + self.children: List[Config] = [] if normal_parent is not None: - self.parent = normal_parent # type: Optional[Config] + self.parent: Optional[Config] = normal_parent else: self.parent = None @@ -471,7 +471,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona # When we concat_and_destroy, we are working with # temporary "tables" that are lists of records that # should already be ready to export. - data = [] # type: List[Record] + data: List[Record] = [] for t in config.concat_and_destroy: data += response[t] del response[t] @@ -494,7 +494,7 @@ def export_from_config(response: TableData, config: Config, seed_object: Optiona assert parent.table is not None assert config.parent_key is not None parent_ids = [r['id'] for r in response[parent.table]] - filter_parms = {config.parent_key: parent_ids} # type: Dict[str, Any] + filter_parms: Dict[str, Any] = {config.parent_key: parent_ids} if config.filter_args is not None: filter_parms.update(config.filter_args) assert model is not None @@ -807,8 +807,8 @@ def fetch_user_profile(response: TableData, config: Config, context: Context) -> exclude = ['password', 'api_key'] rows = make_raw(list(query), exclude=exclude) - normal_rows = [] # type: List[Record] - dummy_rows = [] # type: List[Record] + normal_rows: List[Record] = [] + dummy_rows: List[Record] = [] for row in rows: if exportable_user_ids is not None: @@ -1003,7 +1003,7 @@ def export_partial_message_files(realm: Realm, response['zerver_userprofile_mirrordummy'] + response['zerver_userprofile_crossrealm']) - consented_user_ids = set() # type: Set[int] + consented_user_ids: Set[int] = set() if consent_message_id is not None: consented_user_ids = get_consented_user_ids(consent_message_id) @@ -1070,7 +1070,7 @@ def export_partial_message_files(realm: Realm, messages_we_sent_to_them, ] - all_message_ids = set() # type: Set[int] + all_message_ids: Set[int] = set() dump_file_id = 1 for message_query in message_queries: @@ -1109,14 +1109,14 @@ def write_message_partial_for_query(realm: Realm, message_query: Any, dump_file_ logging.info("Fetched Messages for %s" % (message_filename,)) # Clean up our messages. - table_data = {} # type: TableData + table_data: TableData = {} table_data['zerver_message'] = message_chunk floatify_datetime_fields(table_data, 'zerver_message') # Build up our output for the .partial file, which needs # a list of user_profile_ids to search for (as well as # the realm id). - output = {} # type: MessageOutput + output: MessageOutput = {} output['zerver_message'] = table_data['zerver_message'] output['zerver_userprofile_ids'] = list(user_profile_ids) output['realm_id'] = realm.id @@ -1270,7 +1270,7 @@ def export_files_from_s3(realm: Realm, bucket_name: str, output_dir: Path, bucket_list = bucket.list(prefix="%s/" % (realm.id,)) if settings.EMAIL_GATEWAY_BOT is not None: - email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT) # type: Optional[UserProfile] + email_gateway_bot: Optional[UserProfile] = get_system_bot(settings.EMAIL_GATEWAY_BOT) else: email_gateway_bot = None @@ -1451,7 +1451,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int, exportable_user_ids: Optional[Set[int]]=None, public_only: bool=False, consent_message_id: Optional[int]=None) -> str: - response = {} # type: TableData + response: TableData = {} # We need at least one thread running to export # UserMessage rows. The management command should @@ -1489,7 +1489,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int, logging.info('%d messages were exported' % (len(message_ids),)) # zerver_reaction - zerver_reaction = {} # type: TableData + zerver_reaction: TableData = {} fetch_reaction_data(response=zerver_reaction, message_ids=message_ids) response.update(zerver_reaction) @@ -1524,7 +1524,7 @@ def do_export_realm(realm: Realm, output_dir: Path, threads: int, return tarball_path def export_attachment_table(realm: Realm, output_dir: Path, message_ids: Set[int]) -> None: - response = {} # type: TableData + response: TableData = {} fetch_attachment_data(response=response, realm_id=realm.id, message_ids=message_ids) output_file = os.path.join(output_dir, "attachment.json") logging.info('Writing attachment table data to %s' % (output_file,)) @@ -1576,7 +1576,7 @@ def launch_user_message_subprocesses(threads: int, output_dir: Path, print('Shard %s finished, status %s' % (shard, status)) def do_export_user(user_profile: UserProfile, output_dir: Path) -> None: - response = {} # type: TableData + response: TableData = {} export_single_user(user_profile, response) export_file = os.path.join(output_dir, "user.json") @@ -1672,14 +1672,14 @@ def export_messages_single_user(user_profile: UserProfile, output_dir: Path, output = {'zerver_message': message_chunk} floatify_datetime_fields(output, 'zerver_message') - message_output = dict(output) # type: MessageOutput + message_output: MessageOutput = dict(output) write_message_export(message_filename, message_output) min_id = max(user_message_ids) dump_file_id += 1 def export_analytics_tables(realm: Realm, output_dir: Path) -> None: - response = {} # type: TableData + response: TableData = {} export_file = os.path.join(output_dir, "analytics.json") logging.info("Writing analytics table data to %s", (export_file)) diff --git a/zerver/lib/generate_test_data.py b/zerver/lib/generate_test_data.py index 802527d8ce..fcdeafd441 100644 --- a/zerver/lib/generate_test_data.py +++ b/zerver/lib/generate_test_data.py @@ -45,7 +45,7 @@ def parse_file(config: Dict[str, Any], gens: Dict[str, Any], corpus_file: str) - # First, load the entire file into a dictionary, # then apply our custom filters to it as needed. - paragraphs = [] # type: List[str] + paragraphs: List[str] = [] with open(corpus_file) as infile: # OUR DATA: we need to separate the person talking and what they say diff --git a/zerver/lib/hotspots.py b/zerver/lib/hotspots.py index 3899552edb..c37491f3d3 100644 --- a/zerver/lib/hotspots.py +++ b/zerver/lib/hotspots.py @@ -7,7 +7,7 @@ from zerver.models import UserProfile, UserHotspot from typing import List, Dict -ALL_HOTSPOTS = { +ALL_HOTSPOTS: Dict[str, Dict[str, str]] = { 'intro_reply': { 'title': _('Reply to a message'), 'description': _('Click anywhere on a message to reply.'), @@ -33,7 +33,7 @@ ALL_HOTSPOTS = { 'description': _('Click here to start a new conversation. Pick a topic ' '(2-3 words is best), and give it a go!'), }, -} # type: Dict[str, Dict[str, str]] +} def get_next_hotspots(user: UserProfile) -> List[Dict[str, object]]: # For manual testing, it can be convenient to set diff --git a/zerver/lib/import_realm.py b/zerver/lib/import_realm.py index 740af9c323..55720e00a2 100644 --- a/zerver/lib/import_realm.py +++ b/zerver/lib/import_realm.py @@ -54,7 +54,7 @@ realm_tables = [("zerver_defaultstream", DefaultStream, "defaultstream"), # # Code reviewers: give these tables extra scrutiny, as we need to # make sure to reload related tables AFTER we re-map the ids. -ID_MAP = { +ID_MAP: Dict[str, Dict[int, int]] = { 'client': {}, 'user_profile': {}, 'huddle': {}, @@ -87,15 +87,15 @@ ID_MAP = { 'analytics_realmcount': {}, 'analytics_streamcount': {}, 'analytics_usercount': {}, -} # type: Dict[str, Dict[int, int]] +} -id_map_to_list = { +id_map_to_list: Dict[str, Dict[int, List[int]]] = { 'huddle_to_user_list': {}, -} # type: Dict[str, Dict[int, List[int]]] +} -path_maps = { +path_maps: Dict[str, Dict[str, str]] = { 'attachment_path': {}, -} # type: Dict[str, Dict[str, str]] +} def update_id_map(table: TableName, old_id: int, new_id: int) -> None: if table not in ID_MAP: @@ -296,7 +296,7 @@ def fix_message_rendered_content(realm: Realm, # platforms, since they generally don't have an "alert # words" type feature, and notifications aren't important anyway. realm_alert_words_automaton = None - message_user_ids = set() # type: Set[int] + message_user_ids: Set[int] = set() rendered_content = do_render_markdown( message=cast(Message, message_object), @@ -604,7 +604,7 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av records_filename = os.path.join(import_dir, "records.json") with open(records_filename) as records_file: - records = ujson.load(records_file) # type: List[Dict[str, Any]] + records: List[Dict[str, Any]] = ujson.load(records_file) timestamp = datetime_to_timestamp(timezone_now()) re_map_foreign_keys_internal(records, 'records', 'realm_id', related_table="realm", @@ -696,7 +696,7 @@ def import_uploads(realm: Realm, import_dir: Path, processes: int, processing_av # set; that is OK, because those are never served # directly anyway. content_type = 'application/octet-stream' - headers = {'Content-Type': content_type} # type: Dict[str, Any] + headers: Dict[str, Any] = {'Content-Type': content_type} key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers) else: @@ -804,12 +804,12 @@ def do_import_realm(import_dir: Path, subdomain: str, processes: int=1) -> Realm realm = Realm(**data['zerver_realm'][0]) if realm.notifications_stream_id is not None: - notifications_stream_id = int(realm.notifications_stream_id) # type: Optional[int] + notifications_stream_id: Optional[int] = int(realm.notifications_stream_id) else: notifications_stream_id = None realm.notifications_stream_id = None if realm.signup_notifications_stream_id is not None: - signup_notifications_stream_id = int(realm.signup_notifications_stream_id) # type: Optional[int] + signup_notifications_stream_id: Optional[int] = int(realm.signup_notifications_stream_id) else: signup_notifications_stream_id = None realm.signup_notifications_stream_id = None @@ -1138,9 +1138,9 @@ def get_incoming_message_ids(import_dir: Path, ''' if sort_by_date: - tups = list() # type: List[Tuple[int, int]] + tups: List[Tuple[int, int]] = list() else: - message_ids = [] # type: List[int] + message_ids: List[int] = [] dump_file_id = 1 while True: @@ -1263,16 +1263,16 @@ def import_attachments(data: TableData) -> None: # We do this in a slightly convoluted way to anticipate # a future where we may need to call re_map_foreign_keys. - m2m_rows = [] # type: List[Record] + m2m_rows: List[Record] = [] for parent_row in data[parent_db_table_name]: for fk_id in parent_row[child_plural]: - m2m_row = {} # type: Record + m2m_row: Record = {} m2m_row[parent_singular] = parent_row['id'] m2m_row[child_singular] = ID_MAP['message'][fk_id] m2m_rows.append(m2m_row) # Create our table data for insert. - m2m_data = {m2m_table_name: m2m_rows} # type: TableData + m2m_data: TableData = {m2m_table_name: m2m_rows} convert_to_id_fields(m2m_data, m2m_table_name, parent_singular) convert_to_id_fields(m2m_data, m2m_table_name, child_singular) m2m_rows = m2m_data[m2m_table_name] diff --git a/zerver/lib/integrations.py b/zerver/lib/integrations.py index 9262f4a657..f5f515cb0d 100644 --- a/zerver/lib/integrations.py +++ b/zerver/lib/integrations.py @@ -29,7 +29,7 @@ Over time, we expect this registry to grow additional convenience features for writing and configuring integrations efficiently. """ -CATEGORIES = { +CATEGORIES: Dict[str, str] = { 'meta-integration': _('Integration frameworks'), 'continuous-integration': _('Continuous integration'), 'customer-support': _('Customer support'), @@ -44,7 +44,7 @@ CATEGORIES = { 'productivity': _('Productivity'), 'version-control': _('Version control'), 'bots': _('Interactive bots'), -} # type: Dict[str, str] +} class Integration: DEFAULT_LOGO_STATIC_PATH_PNG = 'images/integrations/logos/{name}.png' @@ -222,16 +222,16 @@ class EmbeddedBotIntegration(Integration): super().__init__( name, client_name, *args, **kwargs) -EMBEDDED_BOTS = [ +EMBEDDED_BOTS: List[EmbeddedBotIntegration] = [ EmbeddedBotIntegration('converter', []), EmbeddedBotIntegration('encrypt', []), EmbeddedBotIntegration('helloworld', []), EmbeddedBotIntegration('virtual_fs', []), EmbeddedBotIntegration('giphy', []), EmbeddedBotIntegration('followup', []), -] # type: List[EmbeddedBotIntegration] +] -WEBHOOK_INTEGRATIONS = [ +WEBHOOK_INTEGRATIONS: List[WebhookIntegration] = [ WebhookIntegration('airbrake', ['monitoring']), WebhookIntegration( 'alertmanager', @@ -358,9 +358,9 @@ WEBHOOK_INTEGRATIONS = [ WebhookIntegration('zabbix', ['monitoring'], display_name='Zabbix'), WebhookIntegration('gci', ['misc'], display_name='Google Code-in', stream_name='gci'), -] # type: List[WebhookIntegration] +] -INTEGRATIONS = { +INTEGRATIONS: Dict[str, Integration] = { 'asana': Integration('asana', 'asana', ['project-management'], doc='zerver/integrations/asana.md'), 'capistrano': Integration( 'capistrano', @@ -452,16 +452,16 @@ INTEGRATIONS = { # _ needed to get around adblock plus logo='images/integrations/logos/twitte_r.svg', doc='zerver/integrations/twitter.md'), -} # type: Dict[str, Integration] +} -BOT_INTEGRATIONS = [ +BOT_INTEGRATIONS: List[BotIntegration] = [ BotIntegration('github_detail', ['version-control', 'bots'], display_name='GitHub Detail'), BotIntegration('xkcd', ['bots', 'misc'], display_name='xkcd', logo='images/integrations/logos/xkcd.png'), -] # type: List[BotIntegration] +] -HUBOT_INTEGRATIONS = [ +HUBOT_INTEGRATIONS: List[HubotIntegration] = [ HubotIntegration('assembla', ['version-control', 'project-management'], display_name='Assembla', logo_alt='Assembla'), HubotIntegration('bonusly', ['hr']), @@ -480,7 +480,7 @@ HUBOT_INTEGRATIONS = [ HubotIntegration('youtube', ['misc'], display_name='YouTube', # _ needed to get around adblock plus logo='images/integrations/logos/youtub_e.svg'), -] # type: List[HubotIntegration] +] for hubot_integration in HUBOT_INTEGRATIONS: INTEGRATIONS[hubot_integration.name] = hubot_integration diff --git a/zerver/lib/message.py b/zerver/lib/message.py index aaf342f2c6..2d259c4ad9 100644 --- a/zerver/lib/message.py +++ b/zerver/lib/message.py @@ -113,7 +113,7 @@ def messages_for_ids(message_ids: List[int], extractor=extract_message_dict, setter=stringify_message_dict) - message_list = [] # type: List[Dict[str, Any]] + message_list: List[Dict[str, Any]] = [] for message_id in message_ids: msg_dict = message_dicts[message_id] @@ -499,11 +499,13 @@ class MessageDict: if len(display_recipient) == 1: # add the sender in if this isn't a message between # someone and themself, preserving ordering - recip = {'email': sender_email, - 'full_name': sender_full_name, - 'short_name': sender_short_name, - 'id': sender_id, - 'is_mirror_dummy': sender_is_mirror_dummy} # type: UserDisplayRecipient + recip: UserDisplayRecipient = { + 'email': sender_email, + 'full_name': sender_full_name, + 'short_name': sender_short_name, + 'id': sender_id, + 'is_mirror_dummy': sender_is_mirror_dummy, + } if recip['email'] < display_recipient[0]['email']: display_recipient = [recip, display_recipient[0]] elif recip['email'] > display_recipient[0]['email']: @@ -658,7 +660,7 @@ def render_markdown(message: Message, ''' if user_ids is None: - message_user_ids = set() # type: Set[int] + message_user_ids: Set[int] = set() else: message_user_ids = user_ids @@ -719,21 +721,21 @@ def do_render_markdown(message: Message, return rendered_content def huddle_users(recipient_id: int) -> str: - display_recipient = get_display_recipient_by_id(recipient_id, - Recipient.HUDDLE, - None) # type: DisplayRecipientT + display_recipient: DisplayRecipientT = get_display_recipient_by_id( + recipient_id, Recipient.HUDDLE, None + ) # str is for streams. assert not isinstance(display_recipient, str) - user_ids = [obj['id'] for obj in display_recipient] # type: List[int] + user_ids: List[int] = [obj['id'] for obj in display_recipient] user_ids = sorted(user_ids) return ','.join(str(uid) for uid in user_ids) def aggregate_message_dict(input_dict: Dict[int, Dict[str, Any]], lookup_fields: List[str], collect_senders: bool) -> List[Dict[str, Any]]: - lookup_dict = dict() # type: Dict[Tuple[Any, ...], Dict[str, Any]] + lookup_dict: Dict[Tuple[Any, ...], Dict[str, Any]] = dict() ''' A concrete example might help explain the inputs here: @@ -862,7 +864,7 @@ def get_raw_unread_data(user_profile: UserProfile) -> RawUnreadMessagesResult: return False - huddle_cache = {} # type: Dict[int, str] + huddle_cache: Dict[int, str] = {} def get_huddle_users(recipient_id: int) -> str: if recipient_id in huddle_cache: @@ -975,12 +977,12 @@ def aggregate_unread_data(raw_data: RawUnreadMessagesResult) -> UnreadMessagesRe collect_senders=False, ) - result = dict( + result: UnreadMessagesResult = dict( pms=pm_objects, streams=stream_objects, huddles=huddle_objects, mentions=mentions, - count=count) # type: UnreadMessagesResult + count=count) return result diff --git a/zerver/lib/narrow.py b/zerver/lib/narrow.py index 478fd95439..d3c080a8e5 100644 --- a/zerver/lib/narrow.py +++ b/zerver/lib/narrow.py @@ -9,7 +9,7 @@ from django.utils.translation import ugettext as _ from typing import Any, Callable, Dict, Iterable, List, Mapping, Optional, Sequence -stop_words_list = None # type: Optional[List[str]] +stop_words_list: Optional[List[str]] = None def read_stop_words() -> List[str]: global stop_words_list if stop_words_list is None: diff --git a/zerver/lib/onboarding.py b/zerver/lib/onboarding.py index 5edf3bffa1..c12b4410e0 100644 --- a/zerver/lib/onboarding.py +++ b/zerver/lib/onboarding.py @@ -74,7 +74,7 @@ def send_initial_realm_messages(realm: Realm) -> None: # Make sure each stream created in the realm creation process has at least one message below # Order corresponds to the ordering of the streams on the left sidebar, to make the initial Home # view slightly less overwhelming - welcome_messages = [ + welcome_messages: List[Dict[str, str]] = [ {'stream': Realm.INITIAL_PRIVATE_STREAM_NAME, 'topic': "private streams", 'content': "This is a private stream, as indicated by the " @@ -96,7 +96,7 @@ def send_initial_realm_messages(realm: Realm) -> None: "\n\n[](/static/images/cute/turtle.png)" "\n\n[Start a new topic](/help/start-a-new-topic) any time you're not replying to a " "previous message."}, - ] # type: List[Dict[str, str]] + ] messages = [internal_prep_stream_message_by_name( realm, welcome_bot, message['stream'], message['topic'], message['content'] % { diff --git a/zerver/lib/outgoing_webhook.py b/zerver/lib/outgoing_webhook.py index e5f80bf1fd..1b31ebdf71 100644 --- a/zerver/lib/outgoing_webhook.py +++ b/zerver/lib/outgoing_webhook.py @@ -21,9 +21,9 @@ from version import ZULIP_VERSION class OutgoingWebhookServiceInterface: def __init__(self, token: str, user_profile: UserProfile, service_name: str) -> None: - self.token = token # type: str - self.user_profile = user_profile # type: UserProfile - self.service_name = service_name # type: str + self.token: str = token + self.user_profile: UserProfile = user_profile + self.service_name: str = service_name class GenericOutgoingWebhookService(OutgoingWebhookServiceInterface): @@ -118,10 +118,10 @@ class SlackOutgoingWebhookService(OutgoingWebhookServiceInterface): return None -AVAILABLE_OUTGOING_WEBHOOK_INTERFACES = { +AVAILABLE_OUTGOING_WEBHOOK_INTERFACES: Dict[str, Any] = { GENERIC_INTERFACE: GenericOutgoingWebhookService, SLACK_INTERFACE: SlackOutgoingWebhookService, -} # type: Dict[str, Any] +} def get_service_interface_class(interface: str) -> Any: if interface is None or interface not in AVAILABLE_OUTGOING_WEBHOOK_INTERFACES: diff --git a/zerver/lib/parallel.py b/zerver/lib/parallel.py index a0ab91275e..91765874bf 100644 --- a/zerver/lib/parallel.py +++ b/zerver/lib/parallel.py @@ -10,7 +10,7 @@ JobData = TypeVar('JobData') def run_parallel(job: Callable[[JobData], int], data: Iterable[JobData], threads: int=6) -> Iterator[Tuple[int, JobData]]: - pids = {} # type: Dict[int, JobData] + pids: Dict[int, JobData] = {} def wait_for_one() -> Tuple[int, JobData]: while True: diff --git a/zerver/lib/presence.py b/zerver/lib/presence.py index 40d4c1f6dd..63b32272d6 100644 --- a/zerver/lib/presence.py +++ b/zerver/lib/presence.py @@ -40,7 +40,7 @@ def get_status_dicts_for_rows(all_rows: List[Dict[str, Any]], get_user_key = lambda row: row['user_profile__email'] get_user_info = get_legacy_user_info - user_statuses = dict() # type: Dict[str, Dict[str, Any]] + user_statuses: Dict[str, Dict[str, Any]] = dict() for user_key, presence_rows in itertools.groupby(all_rows, get_user_key): info = get_user_info( @@ -137,7 +137,7 @@ def get_presence_for_user(user_profile_id: int, ) presence_rows = list(query) - mobile_user_ids = set() # type: Set[int] + mobile_user_ids: Set[int] = set() if PushDeviceToken.objects.filter(user_id=user_profile_id).exists(): # nocoverage # TODO: Add a test, though this is low priority, since we don't use mobile_user_ids yet. mobile_user_ids.add(user_profile_id) diff --git a/zerver/lib/profile.py b/zerver/lib/profile.py index e020f04a3d..200a434e5d 100644 --- a/zerver/lib/profile.py +++ b/zerver/lib/profile.py @@ -26,7 +26,7 @@ def profiled(func: Callable[..., ReturnT]) -> Callable[..., ReturnT]: def wrapped_func(*args: Any, **kwargs: Any) -> ReturnT: fn = func.__name__ + ".profile" prof = cProfile.Profile() - retval = prof.runcall(func, *args, **kwargs) # type: ReturnT + retval: ReturnT = prof.runcall(func, *args, **kwargs) prof.dump_stats(fn) return retval return wrapped_func diff --git a/zerver/lib/push_notifications.py b/zerver/lib/push_notifications.py index 868d4f0066..b246d4bb54 100644 --- a/zerver/lib/push_notifications.py +++ b/zerver/lib/push_notifications.py @@ -52,7 +52,7 @@ def hex_to_b64(data: str) -> str: # Sending to APNs, for iOS # -_apns_client = None # type: Optional[APNsClient] +_apns_client: Optional["APNsClient"] = None _apns_client_initialized = False def get_apns_client() -> 'Optional[APNsClient]': @@ -547,7 +547,7 @@ def truncate_content(content: str) -> Tuple[str, bool]: def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]: '''Common fields for all notification payloads.''' - data = {} # type: Dict[str, Any] + data: Dict[str, Any] = {} # These will let the app support logging into multiple realms and servers. data['server'] = settings.EXTERNAL_HOST diff --git a/zerver/lib/queue.py b/zerver/lib/queue.py index 7dcb838799..15b92fa9d8 100644 --- a/zerver/lib/queue.py +++ b/zerver/lib/queue.py @@ -28,9 +28,9 @@ class SimpleQueueClient: rabbitmq_heartbeat: Optional[int] = 0, ) -> None: self.log = logging.getLogger('zulip.queue') - self.queues = set() # type: Set[str] - self.channel = None # type: Optional[BlockingChannel] - self.consumers = defaultdict(set) # type: Dict[str, Set[Consumer]] + self.queues: Set[str] = set() + self.channel: Optional[BlockingChannel] = None + self.consumers: Dict[str, Set[Consumer]] = defaultdict(set) self.rabbitmq_heartbeat = rabbitmq_heartbeat self._connect() @@ -205,7 +205,7 @@ class TornadoQueueClient(SimpleQueueClient): super().__init__( # TornadoConnection can process heartbeats, so enable them. rabbitmq_heartbeat=None) - self._on_open_cbs = [] # type: List[Callable[[], None]] + self._on_open_cbs: List[Callable[[], None]] = [] self._connection_failure_count = 0 def _connect(self) -> None: @@ -308,7 +308,7 @@ class TornadoQueueClient(SimpleQueueClient): lambda: self.channel.basic_consume(queue_name, wrapped_consumer, consumer_tag=self._generate_ctag(queue_name))) -queue_client = None # type: Optional[SimpleQueueClient] +queue_client: Optional[SimpleQueueClient] = None def get_queue_client() -> SimpleQueueClient: global queue_client if queue_client is None: diff --git a/zerver/lib/rate_limiter.py b/zerver/lib/rate_limiter.py index 06da8519bb..4eaad9d7bd 100644 --- a/zerver/lib/rate_limiter.py +++ b/zerver/lib/rate_limiter.py @@ -19,7 +19,7 @@ import time # https://www.domaintools.com/resources/blog/rate-limiting-with-redis client = get_redis_client() -rules = settings.RATE_LIMITING_RULES # type: Dict[str, List[Tuple[int, int]]] +rules: Dict[str, List[Tuple[int, int]]] = settings.RATE_LIMITING_RULES KEY_PREFIX = '' @@ -31,7 +31,7 @@ class RateLimiterLockingException(Exception): class RateLimitedObject(ABC): def __init__(self, backend: Optional['Type[RateLimiterBackend]']=None) -> None: if backend is not None: - self.backend = backend # type: Type[RateLimiterBackend] + self.backend: Type[RateLimiterBackend] = backend elif settings.RUNNING_INSIDE_TORNADO: self.backend = TornadoInMemoryRateLimiterBackend else: @@ -119,7 +119,7 @@ class RateLimitedUser(RateLimitedObject): def rules(self) -> List[Tuple[int, int]]: # user.rate_limits are general limits, applicable to the domain 'api_by_user' if self.user.rate_limits != "" and self.domain == 'api_by_user': - result = [] # type: List[Tuple[int, int]] + result: List[Tuple[int, int]] = [] for limit in self.user.rate_limits.split(','): (seconds, requests) = limit.split(':', 2) result.append((int(seconds), int(requests))) @@ -179,15 +179,15 @@ class RateLimiterBackend(ABC): class TornadoInMemoryRateLimiterBackend(RateLimiterBackend): # reset_times[rule][key] is the time at which the event # request from the rate-limited key will be accepted. - reset_times = {} # type: Dict[Tuple[int, int], Dict[str, float]] + reset_times: Dict[Tuple[int, int], Dict[str, float]] = {} # last_gc_time is the last time when the garbage was # collected from reset_times for rule (time_window, max_count). - last_gc_time = {} # type: Dict[Tuple[int, int], float] + last_gc_time: Dict[Tuple[int, int], float] = {} # timestamps_blocked_until[key] contains the timestamp # up to which the key has been blocked manually. - timestamps_blocked_until = {} # type: Dict[str, float] + timestamps_blocked_until: Dict[str, float] = {} @classmethod def _garbage_collect_for_rule(cls, now: float, time_window: int, max_count: int) -> None: @@ -333,8 +333,8 @@ class RedisRateLimiterBackend(RateLimiterBackend): results = pipe.execute() - count = results[0] # type: int - newest_call = results[1] # type: Optional[bytes] + count: int = results[0] + newest_call: Optional[bytes] = results[1] calls_left = max_calls - count if newest_call is not None: @@ -361,7 +361,7 @@ class RedisRateLimiterBackend(RateLimiterBackend): pipe.get(blocking_key) pipe.ttl(blocking_key) - rule_timestamps = pipe.execute() # type: List[Optional[bytes]] + rule_timestamps: List[Optional[bytes]] = pipe.execute() # Check if there is a manual block on this API key blocking_ttl_b = rule_timestamps.pop() diff --git a/zerver/lib/request.py b/zerver/lib/request.py index 6620057f90..d9377bf259 100644 --- a/zerver/lib/request.py +++ b/zerver/lib/request.py @@ -19,8 +19,8 @@ class RequestConfusingParmsError(JsonableError): data_fields = ['var_name1', 'var_name2'] def __init__(self, var_name1: str, var_name2: str) -> None: - self.var_name1 = var_name1 # type: str - self.var_name2 = var_name2 # type: str + self.var_name1: str = var_name1 + self.var_name2: str = var_name2 @staticmethod def msg_format() -> str: @@ -31,7 +31,7 @@ class RequestVariableMissingError(JsonableError): data_fields = ['var_name'] def __init__(self, var_name: str) -> None: - self.var_name = var_name # type: str + self.var_name: str = var_name @staticmethod def msg_format() -> str: @@ -42,7 +42,7 @@ class RequestVariableConversionError(JsonableError): data_fields = ['var_name', 'bad_value'] def __init__(self, var_name: str, bad_value: Any) -> None: - self.var_name = var_name # type: str + self.var_name: str = var_name self.bad_value = bad_value @staticmethod @@ -106,7 +106,7 @@ class _REQ(Generic[ResultT]): """ self.post_var_name = whence - self.func_var_name = None # type: Optional[str] + self.func_var_name: Optional[str] = None self.converter = converter self.validator = validator self.str_validator = str_validator @@ -237,7 +237,7 @@ def REQ( path_only=path_only, )) -arguments_map = defaultdict(list) # type: Dict[str, List[str]] +arguments_map: Dict[str, List[str]] = defaultdict(list) # Extracts variables from the request object and passes them as # named function arguments. The request object must be the first @@ -314,7 +314,7 @@ def has_request_variables(view_func: ViewFuncT) -> ViewFuncT: default_assigned = False - post_var_name = None # type: Optional[str] + post_var_name: Optional[str] = None for req_var in post_var_names: if req_var in request.POST: diff --git a/zerver/lib/rest.py b/zerver/lib/rest.py index d567a51ec1..37b36264ab 100644 --- a/zerver/lib/rest.py +++ b/zerver/lib/rest.py @@ -57,7 +57,7 @@ def rest_dispatch(request: HttpRequest, **kwargs: Any) -> HttpResponse: Never make a urls.py pattern put user input into a variable called GET, POST, etc, as that is where we route HTTP verbs to target functions. """ - supported_methods = {} # type: Dict[str, Any] + supported_methods: Dict[str, Any] = {} if hasattr(request, "saved_response"): # For completing long-polled Tornado requests, we skip the diff --git a/zerver/lib/retention.py b/zerver/lib/retention.py index 48952aa915..d2f46cfc58 100644 --- a/zerver/lib/retention.py +++ b/zerver/lib/retention.py @@ -20,7 +20,7 @@ log_to_file(logger, settings.RETENTION_LOG_PATH) MESSAGE_BATCH_SIZE = 1000 -models_with_message_key = [ +models_with_message_key: List[Dict[str, Any]] = [ { 'class': Reaction, 'archive_class': ArchivedReaction, @@ -39,7 +39,7 @@ models_with_message_key = [ 'table_name': 'zerver_usermessage', 'archive_table_name': 'zerver_archivedusermessage' }, -] # type: List[Dict[str, Any]] +] @transaction.atomic(savepoint=False) def move_rows(base_model: Model, raw_query: str, src_db_table: str='', returning_id: bool=False, @@ -280,7 +280,7 @@ def archive_stream_messages(realm: Realm, chunk_size: int=MESSAGE_BATCH_SIZE) -> if not realm.message_retention_days: streams = streams.exclude(message_retention_days__isnull=True) - retention_policy_dict = {} # type: Dict[int, int] + retention_policy_dict: Dict[int, int] = {} for stream in streams: # if stream.message_retention_days is null, use the realm's policy if stream.message_retention_days: diff --git a/zerver/lib/soft_deactivation.py b/zerver/lib/soft_deactivation.py index 0fbb14a1aa..727ea91a82 100644 --- a/zerver/lib/soft_deactivation.py +++ b/zerver/lib/soft_deactivation.py @@ -20,7 +20,7 @@ def filter_by_subscription_history(user_profile: UserProfile, all_stream_messages: DefaultDict[int, List[Message]], all_stream_subscription_logs: DefaultDict[int, List[RealmAuditLog]], ) -> List[UserMessage]: - user_messages_to_insert = [] # type: List[UserMessage] + user_messages_to_insert: List[UserMessage] = [] def store_user_message_to_insert(message: Message) -> None: message = UserMessage(user_profile=user_profile, @@ -149,7 +149,7 @@ def add_missing_messages(user_profile: UserProfile) -> None: modified_stream__id__in=stream_ids, event_type__in=events).order_by('event_last_message_id', 'id')) - all_stream_subscription_logs = defaultdict(list) # type: DefaultDict[int, List[RealmAuditLog]] + all_stream_subscription_logs: DefaultDict[int, List[RealmAuditLog]] = defaultdict(list) for log in subscription_logs: all_stream_subscription_logs[log.modified_stream_id].append(log) @@ -177,7 +177,7 @@ def add_missing_messages(user_profile: UserProfile) -> None: all_stream_msgs = [msg for msg in all_stream_msgs if msg['id'] not in already_created_ums] - stream_messages = defaultdict(list) # type: DefaultDict[int, List[Message]] + stream_messages: DefaultDict[int, List[Message]] = defaultdict(list) for msg in all_stream_msgs: stream_messages[msg['recipient__type_id']].append(msg) @@ -240,7 +240,7 @@ def do_soft_deactivate_users(users: List[UserProfile]) -> List[UserProfile]: return users_soft_deactivated def do_auto_soft_deactivate_users(inactive_for_days: int, realm: Optional[Realm]) -> List[UserProfile]: - filter_kwargs = {} # type: Dict[str, Realm] + filter_kwargs: Dict[str, Realm] = {} if realm is not None: filter_kwargs = dict(user_profile__realm=realm) users_to_deactivate = get_users_for_soft_deactivation(inactive_for_days, filter_kwargs) diff --git a/zerver/lib/sqlalchemy_utils.py b/zerver/lib/sqlalchemy_utils.py index 29cb40dd2f..1770f70c09 100644 --- a/zerver/lib/sqlalchemy_utils.py +++ b/zerver/lib/sqlalchemy_utils.py @@ -23,7 +23,7 @@ class NonClosingPool(sqlalchemy.pool.NullPool): logging_name=self._orig_logging_name, _dispatch=self.dispatch) -sqlalchemy_engine = None # type: Optional[Any] +sqlalchemy_engine: Optional[Any] = None def get_sqlalchemy_connection() -> sqlalchemy.engine.base.Connection: global sqlalchemy_engine if sqlalchemy_engine is None: diff --git a/zerver/lib/stream_recipient.py b/zerver/lib/stream_recipient.py index 55142e8e6e..485eb06315 100644 --- a/zerver/lib/stream_recipient.py +++ b/zerver/lib/stream_recipient.py @@ -18,8 +18,8 @@ class StreamRecipientMap: ''' def __init__(self) -> None: - self.recip_to_stream = dict() # type: Dict[int, int] - self.stream_to_recip = dict() # type: Dict[int, int] + self.recip_to_stream: Dict[int, int] = dict() + self.stream_to_recip: Dict[int, int] = dict() def populate_with(self, *, stream_id: int, recipient_id: int) -> None: # We use * to enforce using named arguments when calling this function, diff --git a/zerver/lib/stream_subscription.py b/zerver/lib/stream_subscription.py index dd17823592..a304daec81 100644 --- a/zerver/lib/stream_subscription.py +++ b/zerver/lib/stream_subscription.py @@ -51,10 +51,10 @@ def get_bulk_stream_subscriber_info( stream_ids = stream_dict.keys() - result = { + result: Dict[int, List[Tuple[Subscription, Stream]]] = { user_profile.id: [] for user_profile in user_profiles - } # type: Dict[int, List[Tuple[Subscription, Stream]]] + } subs = Subscription.objects.filter( user_profile__in=user_profiles, diff --git a/zerver/lib/streams.py b/zerver/lib/streams.py index c41537e4e4..a74a8e44b6 100644 --- a/zerver/lib/streams.py +++ b/zerver/lib/streams.py @@ -86,8 +86,8 @@ def create_streams_if_needed(realm: Realm, stream_dicts: List[Mapping[str, Any]]) -> Tuple[List[Stream], List[Stream]]: """Note that stream_dict["name"] is assumed to already be stripped of whitespace""" - added_streams = [] # type: List[Stream] - existing_streams = [] # type: List[Stream] + added_streams: List[Stream] = [] + existing_streams: List[Stream] = [] for stream_dict in stream_dicts: stream, created = create_stream_if_needed( realm, @@ -361,7 +361,7 @@ def can_access_stream_history_by_id(user_profile: UserProfile, stream_id: int) - def filter_stream_authorization(user_profile: UserProfile, streams: Iterable[Stream]) -> Tuple[List[Stream], List[Stream]]: - streams_subscribed = set() # type: Set[int] + streams_subscribed: Set[int] = set() recipient_ids = [stream.recipient_id for stream in streams] subs = Subscription.objects.filter(user_profile=user_profile, recipient_id__in=recipient_ids, @@ -370,7 +370,7 @@ def filter_stream_authorization(user_profile: UserProfile, for sub in subs: streams_subscribed.add(sub.recipient.type_id) - unauthorized_streams = [] # type: List[Stream] + unauthorized_streams: List[Stream] = [] for stream in streams: # The user is authorized for their own streams if stream.id in streams_subscribed: @@ -411,8 +411,8 @@ def list_to_streams(streams_raw: Iterable[Mapping[str, Any]], assert stream_name == stream_name.strip() check_stream_name(stream_name) - existing_streams = [] # type: List[Stream] - missing_stream_dicts = [] # type: List[Mapping[str, Any]] + existing_streams: List[Stream] = [] + missing_stream_dicts: List[Mapping[str, Any]] = [] existing_stream_map = bulk_get_streams(user_profile.realm, stream_set) member_creating_announcement_only_stream = False @@ -438,7 +438,7 @@ def list_to_streams(streams_raw: Iterable[Mapping[str, Any]], if len(missing_stream_dicts) == 0: # This is the happy path for callers who expected all of these # streams to exist already. - created_streams = [] # type: List[Stream] + created_streams: List[Stream] = [] else: # autocreate=True path starts here if not user_profile.can_create_streams(): diff --git a/zerver/lib/test_classes.py b/zerver/lib/test_classes.py index 85c2959786..84d5d657d1 100644 --- a/zerver/lib/test_classes.py +++ b/zerver/lib/test_classes.py @@ -96,11 +96,11 @@ class UploadSerializeMixin(SerializeMixin): class ZulipTestCase(TestCase): # Ensure that the test system just shows us diffs - maxDiff = None # type: Optional[int] + maxDiff: Optional[int] = None def setUp(self) -> None: super().setUp() - self.API_KEYS = {} # type: Dict[str, str] + self.API_KEYS: Dict[str, str] = {} def tearDown(self) -> None: super().tearDown() @@ -868,7 +868,7 @@ class ZulipTestCase(TestCase): if binary: with open(attr_value, "rb") as f: # attr_value should be a path to the file with the binary data - data = f.read() # type: Union[str, bytes] + data: Union[str, bytes] = f.read() else: data = attr_value @@ -893,10 +893,10 @@ class WebhookTestCase(ZulipTestCase): If you create your url in uncommon way you can override build_webhook_url method In case that you need modify body or create it without using fixture you can also override get_body method """ - STREAM_NAME = None # type: Optional[str] + STREAM_NAME: Optional[str] = None TEST_USER_EMAIL = 'webhook-bot@zulip.com' - URL_TEMPLATE = None # type: Optional[str] - FIXTURE_DIR_NAME = None # type: Optional[str] + URL_TEMPLATE: Optional[str] = None + FIXTURE_DIR_NAME: Optional[str] = None @property def test_user(self) -> UserProfile: @@ -989,14 +989,14 @@ class MigrationsTestCase(ZulipTestCase): # nocoverage def app(self) -> str: return apps.get_containing_app_config(type(self).__module__).name - migrate_from = None # type: Optional[str] - migrate_to = None # type: Optional[str] + migrate_from: Optional[str] = None + migrate_to: Optional[str] = None def setUp(self) -> None: assert self.migrate_from and self.migrate_to, \ "TestCase '{}' must define migrate_from and migrate_to properties".format(type(self).__name__) - migrate_from = [(self.app, self.migrate_from)] # type: List[Tuple[str, str]] - migrate_to = [(self.app, self.migrate_to)] # type: List[Tuple[str, str]] + migrate_from: List[Tuple[str, str]] = [(self.app, self.migrate_from)] + migrate_to: List[Tuple[str, str]] = [(self.app, self.migrate_to)] executor = MigrationExecutor(connection) old_apps = executor.loader.project_state(migrate_from).apps diff --git a/zerver/lib/test_fixtures.py b/zerver/lib/test_fixtures.py index fa5b2c9f7e..59961af0e0 100644 --- a/zerver/lib/test_fixtures.py +++ b/zerver/lib/test_fixtures.py @@ -321,7 +321,7 @@ def destroy_leaked_test_databases(expiry_time: int = 60 * 60) -> int: while also ensuring we will eventually delete all leaked databases. """ files = glob.glob(os.path.join(UUID_VAR_DIR, TEMPLATE_DATABASE_DIR, "*")) - test_databases = set() # type: Set[str] + test_databases: Set[str] = set() try: with connection.cursor() as cursor: cursor.execute("SELECT datname FROM pg_database;") @@ -332,7 +332,7 @@ def destroy_leaked_test_databases(expiry_time: int = 60 * 60) -> int: except ProgrammingError: pass - databases_in_use = set() # type: Set[str] + databases_in_use: Set[str] = set() for file in files: if round(time.time()) - os.path.getmtime(file) < expiry_time: with open(file) as f: diff --git a/zerver/lib/test_helpers.py b/zerver/lib/test_helpers.py index fb2dd65eaf..cc2686d5d8 100644 --- a/zerver/lib/test_helpers.py +++ b/zerver/lib/test_helpers.py @@ -117,7 +117,7 @@ def capture_event(event_info: EventInfo) -> Iterator[None]: @contextmanager def simulated_empty_cache() -> Generator[ List[Tuple[str, Union[str, List[str]], str]], None, None]: - cache_queries = [] # type: List[Tuple[str, Union[str, List[str]], str]] + cache_queries: List[Tuple[str, Union[str, List[str]], str]] = [] def my_cache_get(key: str, cache_name: Optional[str]=None) -> Optional[Dict[str, Any]]: cache_queries.append(('get', key, cache_name)) @@ -143,7 +143,7 @@ def queries_captured(include_savepoints: Optional[bool]=False) -> Generator[ the with statement. ''' - queries = [] # type: List[Dict[str, Union[str, bytes]]] + queries: List[Dict[str, Union[str, bytes]]] = [] def wrapper_execute(self: TimeTrackingCursor, action: Callable[[str, Iterable[Any]], None], @@ -258,19 +258,19 @@ class POSTRequestMock: method = "POST" def __init__(self, post_data: Dict[str, Any], user_profile: Optional[UserProfile]) -> None: - self.GET = {} # type: Dict[str, Any] + self.GET: Dict[str, Any] = {} # Convert any integer parameters passed into strings, even # though of course the HTTP API would do so. Ideally, we'd # get rid of this abstraction entirely and just use the HTTP # API directly, but while it exists, we need this code. - self.POST = {} # type: Dict[str, str] + self.POST: Dict[str, str] = {} for key in post_data: self.POST[key] = str(post_data[key]) self.user = user_profile self._tornado_handler = DummyHandler() - self._log_data = {} # type: Dict[str, Any] + self._log_data: Dict[str, Any] = {} self.META = {'PATH_INFO': 'test'} self.path = '' @@ -280,8 +280,8 @@ class HostRequestMock: def __init__(self, user_profile: UserProfile=None, host: str=settings.EXTERNAL_HOST) -> None: self.host = host - self.GET = {} # type: Dict[str, Any] - self.POST = {} # type: Dict[str, Any] + self.GET: Dict[str, Any] = {} + self.POST: Dict[str, Any] = {} self.META = {'PATH_INFO': 'test'} self.path = '' self.user = user_profile @@ -309,7 +309,7 @@ class MockPythonResponse: INSTRUMENTING = os.environ.get('TEST_INSTRUMENT_URL_COVERAGE', '') == 'TRUE' -INSTRUMENTED_CALLS = [] # type: List[Dict[str, Any]] +INSTRUMENTED_CALLS: List[Dict[str, Any]] = [] UrlFuncT = Callable[..., HttpResponse] # TODO: make more specific @@ -350,7 +350,7 @@ def write_instrumentation_reports(full_suite: bool, include_webhooks: bool) -> N from zproject.urls import urlpatterns, v1_api_and_json_patterns # Find our untested urls. - pattern_cnt = collections.defaultdict(int) # type: Dict[str, int] + pattern_cnt: Dict[str, int] = collections.defaultdict(int) def re_strip(r: Any) -> str: return str(r).lstrip('^').rstrip('$') diff --git a/zerver/lib/test_runner.py b/zerver/lib/test_runner.py index 73b0d69662..5db1f70dc1 100644 --- a/zerver/lib/test_runner.py +++ b/zerver/lib/test_runner.py @@ -80,7 +80,7 @@ def get_test_method(test: TestCase) -> Callable[[], None]: return getattr(test, test._testMethodName) # Each tuple is delay, test_name, slowness_reason -TEST_TIMINGS = [] # type: List[Tuple[float, str, str]] +TEST_TIMINGS: List[Tuple[float, str, str]] = [] def report_slow_tests() -> None: @@ -154,7 +154,7 @@ class TextTestResult(runner.TextTestResult): def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) - self.failed_tests = [] # type: List[str] + self.failed_tests: List[str] = [] def addInfo(self, test: TestCase, msg: str) -> None: self.stream.write(msg) # type: ignore[attr-defined] # https://github.com/python/typeshed/issues/3139 @@ -416,10 +416,10 @@ class Runner(DiscoverRunner): # `templates_rendered` holds templates which were rendered # in proper logical tests. - self.templates_rendered = set() # type: Set[str] + self.templates_rendered: Set[str] = set() # `shallow_tested_templates` holds templates which were rendered # in `zerver.tests.test_templates`. - self.shallow_tested_templates = set() # type: Set[str] + self.shallow_tested_templates: Set[str] = set() template_rendered.connect(self.on_template_rendered) def get_resultclass(self) -> Type[TestResult]: diff --git a/zerver/lib/timeout.py b/zerver/lib/timeout.py index 9410c6ae42..84418df695 100644 --- a/zerver/lib/timeout.py +++ b/zerver/lib/timeout.py @@ -35,8 +35,12 @@ def timeout(timeout: float, func: Callable[..., ResultT], *args: Any, **kwargs: class TimeoutThread(threading.Thread): def __init__(self) -> None: threading.Thread.__init__(self) - self.result = None # type: Optional[ResultT] - self.exc_info = (None, None, None) # type: Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] + self.result: Optional[ResultT] = None + self.exc_info: Tuple[ + Optional[Type[BaseException]], + Optional[BaseException], + Optional[TracebackType], + ] = (None, None, None) # Don't block the whole program from exiting # if this is the only thread left. diff --git a/zerver/lib/topic.py b/zerver/lib/topic.py index ae68da3064..caf594e3d1 100644 --- a/zerver/lib/topic.py +++ b/zerver/lib/topic.py @@ -161,7 +161,7 @@ def update_messages_for_topic_edit(message: Message, return messages_list def generate_topic_history_from_db_rows(rows: List[Tuple[str, int]]) -> List[Dict[str, Any]]: - canonical_topic_names = {} # type: Dict[str, Tuple[int, str]] + canonical_topic_names: Dict[str, Tuple[int, str]] = {} # Sort rows by max_message_id so that if a topic # has many different casings, we use the most diff --git a/zerver/lib/unminify.py b/zerver/lib/unminify.py index 1a99cd56b9..28b0b6b680 100644 --- a/zerver/lib/unminify.py +++ b/zerver/lib/unminify.py @@ -10,7 +10,7 @@ class SourceMap: def __init__(self, sourcemap_dirs: List[str]) -> None: self._dirs = sourcemap_dirs - self._indices = {} # type: Dict[str, sourcemap.SourceMapDecoder] + self._indices: Dict[str, sourcemap.SourceMapDecoder] = {} def _index_for(self, minified_src: str) -> sourcemap.SourceMapDecoder: '''Return the source map index for minified_src, loading it if not @@ -26,7 +26,7 @@ class SourceMap: return self._indices[minified_src] def annotate_stacktrace(self, stacktrace: str) -> str: - out = '' # type: str + out: str = '' for ln in stacktrace.splitlines(): out += ln + '\n' match = re.search(r'/static/webpack-bundles/([^:]+):(\d+):(\d+)', ln) diff --git a/zerver/lib/upload.py b/zerver/lib/upload.py index 3c12f6cc1d..8c6982ad70 100644 --- a/zerver/lib/upload.py +++ b/zerver/lib/upload.py @@ -337,7 +337,7 @@ def get_signed_upload_url(path: str) -> str: def get_realm_for_filename(path: str) -> Optional[int]: conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY) - key = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path) # type: Optional[Key] + key: Optional[Key] = get_bucket(conn, settings.S3_AUTH_UPLOADS_BUCKET).get_key(path) if key is None: # This happens if the key does not exist. return None @@ -351,7 +351,7 @@ class S3UploadBackend(ZulipUploadBackend): bucket = get_bucket(self.connection, bucket_name) # check if file exists - key = bucket.get_key(path_id) # type: Optional[Key] + key: Optional[Key] = bucket.get_key(path_id) if key is not None: bucket.delete_key(key) return True @@ -860,7 +860,7 @@ class LocalUploadBackend(ZulipUploadBackend): # Common and wrappers if settings.LOCAL_UPLOADS_DIR is not None: - upload_backend = LocalUploadBackend() # type: ZulipUploadBackend + upload_backend: ZulipUploadBackend = LocalUploadBackend() else: upload_backend = S3UploadBackend() # nocoverage diff --git a/zerver/lib/user_groups.py b/zerver/lib/user_groups.py index 816b954b7f..9aec110b3c 100644 --- a/zerver/lib/user_groups.py +++ b/zerver/lib/user_groups.py @@ -26,7 +26,7 @@ def user_groups_in_realm_serialized(realm: Realm) -> List[Dict[str, Any]]: UserGroup and UserGroupMembership that we need. """ realm_groups = UserGroup.objects.filter(realm=realm) - group_dicts = {} # type: Dict[str, Any] + group_dicts: Dict[str, Any] = {} for user_group in realm_groups: group_dicts[user_group.id] = dict( id=user_group.id, diff --git a/zerver/lib/user_status.py b/zerver/lib/user_status.py index 3c44acbca7..eefcec3b16 100644 --- a/zerver/lib/user_status.py +++ b/zerver/lib/user_status.py @@ -20,7 +20,7 @@ def get_user_info_dict(realm_id: int) -> Dict[int, Dict[str, Any]]: 'status_text', ) - user_dict = dict() # type: Dict[int, Dict[str, Any]] + user_dict: Dict[int, Dict[str, Any]] = dict() for row in rows: away = row['status'] == UserStatus.AWAY status_text = row['status_text'] diff --git a/zerver/lib/users.py b/zerver/lib/users.py index d32125d591..dd7ea43e35 100644 --- a/zerver/lib/users.py +++ b/zerver/lib/users.py @@ -169,11 +169,11 @@ def user_ids_to_users(user_ids: List[int], realm: Realm) -> List[UserProfile]: def fetch_users_by_id(user_ids: List[int]) -> List[UserProfile]: return list(UserProfile.objects.filter(id__in=user_ids).select_related()) - user_profiles_by_id = generic_bulk_cached_fetch( + user_profiles_by_id: Dict[int, UserProfile] = generic_bulk_cached_fetch( cache_key_function=user_profile_by_id_cache_key, query_function=fetch_users_by_id, object_ids=user_ids - ) # type: Dict[int, UserProfile] + ) found_user_ids = user_profiles_by_id.keys() missed_user_ids = [user_id for user_id in user_ids if user_id not in found_user_ids] @@ -380,7 +380,7 @@ def get_cross_realm_dicts() -> List[Dict[str, Any]]: def get_custom_profile_field_values(custom_profile_field_values: List[CustomProfileFieldValue]) -> Dict[int, Dict[str, Any]]: - profiles_by_user_id = defaultdict(dict) # type: Dict[int, Dict[str, Any]] + profiles_by_user_id: Dict[int, Dict[str, Any]] = defaultdict(dict) for profile_field in custom_profile_field_values: user_id = profile_field.user_profile_id if profile_field.field.is_renderable(): diff --git a/zerver/lib/validator.py b/zerver/lib/validator.py index 69a791ab09..aa53f1eb3e 100644 --- a/zerver/lib/validator.py +++ b/zerver/lib/validator.py @@ -210,7 +210,7 @@ def check_dict(required_keys: Iterable[Tuple[str, Validator]]=[], optional_keys: Iterable[Tuple[str, Validator]]=[], value_validator: Optional[Validator]=None, _allow_only_listed_keys: bool=False) -> Validator: - type_structure = {} # type: Dict[str, Any] + type_structure: Dict[str, Any] = {} @set_type_structure(type_structure) def f(var_name: str, val: object) -> Optional[str]: diff --git a/zerver/lib/webhooks/git.py b/zerver/lib/webhooks/git.py index d4f6662485..fe4aeaa735 100644 --- a/zerver/lib/webhooks/git.py +++ b/zerver/lib/webhooks/git.py @@ -77,7 +77,7 @@ def get_push_commits_event_message(user_name: str, compare_url: Optional[str], number_of_commits=len(commits_data), commit_or_commits=COMMIT_OR_COMMITS.format('s' if len(commits_data) > 1 else '')) - committers_items = get_all_committers(commits_data) # type: List[Tuple[str, int]] + committers_items: List[Tuple[str, int]] = get_all_committers(commits_data) if len(committers_items) == 1 and user_name == committers_items[0][0]: return PUSH_COMMITS_MESSAGE_TEMPLATE_WITHOUT_COMMITTERS.format( user_name=user_name, @@ -279,15 +279,16 @@ def get_short_sha(sha: str) -> str: return sha[:7] def get_all_committers(commits_data: List[Dict[str, Any]]) -> List[Tuple[str, int]]: - committers = defaultdict(int) # type: Dict[str, int] + committers: Dict[str, int] = defaultdict(int) for commit in commits_data: committers[commit['name']] += 1 # Sort by commit count, breaking ties alphabetically. - committers_items = sorted(list(committers.items()), - key=lambda item: (-item[1], item[0])) # type: List[Tuple[str, int]] - committers_values = [c_i[1] for c_i in committers_items] # type: List[int] + committers_items: List[Tuple[str, int]] = sorted( + list(committers.items()), key=lambda item: (-item[1], item[0]) + ) + committers_values: List[int] = [c_i[1] for c_i in committers_items] if len(committers) > PUSH_COMMITTERS_LIMIT_INFO: others_number_of_commits = sum(committers_values[PUSH_COMMITTERS_LIMIT_INFO:]) diff --git a/zerver/logging_handlers.py b/zerver/logging_handlers.py index 95c92fb8b2..938c9ecbc7 100644 --- a/zerver/logging_handlers.py +++ b/zerver/logging_handlers.py @@ -86,7 +86,7 @@ class AdminNotifyHandler(logging.Handler): logging.Handler.__init__(self) def emit(self, record: logging.LogRecord) -> None: - report = {} # type: Dict[str, Any] + report: Dict[str, Any] = {} # This parameter determines whether Zulip should attempt to # send Zulip messages containing the error report. If there's diff --git a/zerver/management/commands/check_redis.py b/zerver/management/commands/check_redis.py index f23d6108b8..8edebf08e3 100644 --- a/zerver/management/commands/check_redis.py +++ b/zerver/management/commands/check_redis.py @@ -50,7 +50,9 @@ than max_api_calls! (trying to trim) %s %s" % (key, count)) wildcard_list = "ratelimit:*:*:list" wildcard_zset = "ratelimit:*:*:zset" - trim_func = lambda key, max_calls: client.ltrim(key, 0, max_calls - 1) # type: Optional[Callable[[str, int], None]] + trim_func: Optional[ + Callable[[str, int], None] + ] = lambda key, max_calls: client.ltrim(key, 0, max_calls - 1) if not options['trim']: trim_func = None diff --git a/zerver/management/commands/compilemessages.py b/zerver/management/commands/compilemessages.py index e64eb2badf..178491b297 100644 --- a/zerver/management/commands/compilemessages.py +++ b/zerver/management/commands/compilemessages.py @@ -88,7 +88,7 @@ class Command(compilemessages.Command): locale_path = "{}/locale".format(settings.DEPLOY_ROOT) output_path = "{}/language_options.json".format(locale_path) - data = {'languages': []} # type: Dict[str, List[Dict[str, Any]]] + data: Dict[str, List[Dict[str, Any]]] = {'languages': []} try: locales = self.get_locales() @@ -114,7 +114,7 @@ class Command(compilemessages.Command): # Not a locale. continue - info = {} # type: Dict[str, Any] + info: Dict[str, Any] = {} code = to_language(locale) percentage = self.get_translation_percentage(locale_path, locale) try: diff --git a/zerver/management/commands/enqueue_file.py b/zerver/management/commands/enqueue_file.py index 6ed136fa71..24bd1913b5 100644 --- a/zerver/management/commands/enqueue_file.py +++ b/zerver/management/commands/enqueue_file.py @@ -33,7 +33,7 @@ You can use "-" to represent stdin. file_name = options['file_name'] if file_name == '-': - f = sys.stdin # type: IO[str] + f: IO[str] = sys.stdin else: f = open(file_name) diff --git a/zerver/management/commands/generate_multiuse_invite_link.py b/zerver/management/commands/generate_multiuse_invite_link.py index 44dbd17b36..3107aae443 100644 --- a/zerver/management/commands/generate_multiuse_invite_link.py +++ b/zerver/management/commands/generate_multiuse_invite_link.py @@ -30,7 +30,7 @@ class Command(ZulipBaseCommand): realm = self.get_realm(options) assert realm is not None # Should be ensured by parser - streams = [] # type: List[Stream] + streams: List[Stream] = [] if options["streams"]: stream_names = {stream.strip() for stream in options["streams"].split(",")} for stream_name in set(stream_names): diff --git a/zerver/management/commands/makemessages.py b/zerver/management/commands/makemessages.py index 35bacc2eba..9e8140edec 100644 --- a/zerver/management/commands/makemessages.py +++ b/zerver/management/commands/makemessages.py @@ -140,7 +140,7 @@ class Command(makemessages.Command): template.constant_re = old_constant_re def extract_strings(self, data: str) -> List[str]: - translation_strings = [] # type: List[str] + translation_strings: List[str] = [] for regex in frontend_compiled_regexes: for match in regex.findall(data): match = match.strip() @@ -158,7 +158,7 @@ class Command(makemessages.Command): return data def get_translation_strings(self) -> List[str]: - translation_strings = [] # type: List[str] + translation_strings: List[str] = [] dirname = self.get_template_dir() for dirpath, dirnames, filenames in os.walk(dirname): diff --git a/zerver/management/commands/send_to_email_mirror.py b/zerver/management/commands/send_to_email_mirror.py index 282ed0d6b0..b2af8b2e90 100644 --- a/zerver/management/commands/send_to_email_mirror.py +++ b/zerver/management/commands/send_to_email_mirror.py @@ -70,7 +70,7 @@ Example: message = self._parse_email_fixture(full_fixture_path) self._prepare_message(message, realm, stream) - data = {} # type: Dict[str, str] + data: Dict[str, str] = {} data['recipient'] = str(message['To']) # Need str() here to avoid mypy throwing an error data['msg_text'] = message.as_string() mirror_email_message(data) diff --git a/zerver/management/commands/soft_deactivate_users.py b/zerver/management/commands/soft_deactivate_users.py index fc4619ee48..f379cd9edd 100644 --- a/zerver/management/commands/soft_deactivate_users.py +++ b/zerver/management/commands/soft_deactivate_users.py @@ -57,7 +57,7 @@ class Command(ZulipBaseCommand): activate = options['activate'] deactivate = options['deactivate'] - filter_kwargs = {} # type: Dict[str, Realm] + filter_kwargs: Dict[str, Realm] = {} if realm is not None: filter_kwargs = dict(realm=realm) diff --git a/zerver/middleware.py b/zerver/middleware.py index 1e09002d44..2ba1aafb64 100644 --- a/zerver/middleware.py +++ b/zerver/middleware.py @@ -332,7 +332,7 @@ class CsrfFailureError(JsonableError): data_fields = ['reason'] def __init__(self, reason: str) -> None: - self.reason = reason # type: str + self.reason: str = reason @staticmethod def msg_format() -> str: diff --git a/zerver/migrations/0064_sync_uploads_filesize_with_db.py b/zerver/migrations/0064_sync_uploads_filesize_with_db.py index a68a950fda..e32747fa10 100644 --- a/zerver/migrations/0064_sync_uploads_filesize_with_db.py +++ b/zerver/migrations/0064_sync_uploads_filesize_with_db.py @@ -38,7 +38,7 @@ def sync_filesizes(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None bucket = conn.get_bucket(bucket_name, validate=False) for attachment in attachments.objects.all(): if attachment.size is None: - file_key = bucket.get_key(attachment.path_id) # type: Optional[Key] + file_key: Optional[Key] = bucket.get_key(attachment.path_id) if file_key is None: new_size = 0 else: diff --git a/zerver/migrations/0077_add_file_name_field_to_realm_emoji.py b/zerver/migrations/0077_add_file_name_field_to_realm_emoji.py index a19e593a2f..76d8af1967 100644 --- a/zerver/migrations/0077_add_file_name_field_to_realm_emoji.py +++ b/zerver/migrations/0077_add_file_name_field_to_realm_emoji.py @@ -111,7 +111,7 @@ class S3Uploader(Uploader): def upload_files(self, response: Response, resized_image: bytes, dst_path_id: str) -> None: - headers = None # type: Optional[Dict[str, str]] + headers: Optional[Dict[str, str]] = None content_type = response.headers.get("Content-Type") or guess_type(dst_path_id)[0] if content_type: headers = {'Content-Type': content_type} @@ -129,7 +129,7 @@ def get_uploader() -> Uploader: def upload_emoji_to_storage(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: realm_emoji_model = apps.get_model('zerver', 'RealmEmoji') - uploader = get_uploader() # type: Uploader + uploader: Uploader = get_uploader() for emoji in realm_emoji_model.objects.all(): file_name = uploader.upload_emoji(emoji.realm_id, emoji.img_url, emoji.name) if file_name is None: diff --git a/zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py b/zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py index 08ded23b10..716fc741d7 100644 --- a/zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py +++ b/zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py @@ -9,9 +9,9 @@ class Migration(migrations.Migration): ('zerver', '0126_prereg_remove_users_without_realm'), ] - operations = [ + operations: List[Any] = [ # There was a migration here, which wasn't ready for wide deployment # and was backed out. This placeholder is left behind to avoid # confusing the migration engine on any installs that applied the # migration. (Fortunately no reverse migration is needed.) - ] # type: List[Any] + ] diff --git a/zerver/migrations/0145_reactions_realm_emoji_name_to_id.py b/zerver/migrations/0145_reactions_realm_emoji_name_to_id.py index 76d84cfaec..5bc2f609bf 100644 --- a/zerver/migrations/0145_reactions_realm_emoji_name_to_id.py +++ b/zerver/migrations/0145_reactions_realm_emoji_name_to_id.py @@ -9,7 +9,7 @@ from django.db.migrations.state import StateApps def realm_emoji_name_to_id(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None: Reaction = apps.get_model('zerver', 'Reaction') RealmEmoji = apps.get_model('zerver', 'RealmEmoji') - realm_emoji_by_realm_id = defaultdict(dict) # type: Dict[int, Dict[str, Any]] + realm_emoji_by_realm_id: Dict[int, Dict[str, Any]] = defaultdict(dict) for realm_emoji in RealmEmoji.objects.all(): realm_emoji_by_realm_id[realm_emoji.realm_id][realm_emoji.name] = { 'id': str(realm_emoji.id), diff --git a/zerver/migrations/0209_user_profile_no_empty_password.py b/zerver/migrations/0209_user_profile_no_empty_password.py index 7b8b791fb2..4406fe2873 100644 --- a/zerver/migrations/0209_user_profile_no_empty_password.py +++ b/zerver/migrations/0209_user_profile_no_empty_password.py @@ -66,8 +66,8 @@ def ensure_no_empty_passwords(apps: StateApps, schema_editor: DatabaseSchemaEdit # searching for the relevant events in that log. event_type_class = RealmAuditLog._meta.get_field('event_type').get_internal_type() if event_type_class == 'CharField': - USER_PASSWORD_CHANGED = 'user_password_changed' # type: Union[int, str] - USER_API_KEY_CHANGED = 'user_api_key_changed' # type: Union[int, str] + USER_PASSWORD_CHANGED: Union[int, str] = 'user_password_changed' + USER_API_KEY_CHANGED: Union[int, str] = 'user_api_key_changed' else: USER_PASSWORD_CHANGED = 122 USER_API_KEY_CHANGED = 127 @@ -84,8 +84,8 @@ def ensure_no_empty_passwords(apps: StateApps, schema_editor: DatabaseSchemaEdit # password_change_user_ids_no_reset_needed. password_change_user_ids = set(RealmAuditLog.objects.filter( event_type=USER_PASSWORD_CHANGED).values_list("modified_user_id", flat=True)) - password_change_user_ids_api_key_reset_needed = set() # type: Set[int] - password_change_user_ids_no_reset_needed = set() # type: Set[int] + password_change_user_ids_api_key_reset_needed: Set[int] = set() + password_change_user_ids_no_reset_needed: Set[int] = set() for user_id in password_change_user_ids: # Here, we check the timing for users who have changed diff --git a/zerver/migrations/0254_merge_0209_0253.py b/zerver/migrations/0254_merge_0209_0253.py index 1be48a946e..23a583e21e 100644 --- a/zerver/migrations/0254_merge_0209_0253.py +++ b/zerver/migrations/0254_merge_0209_0253.py @@ -12,5 +12,5 @@ class Migration(migrations.Migration): ('zerver', '0209_user_profile_no_empty_password'), ] - operations = [ - ] # type: List[Any] + operations: List[Any] = [ + ] diff --git a/zerver/models.py b/zerver/models.py index 99a15c1c28..76bab65ace 100644 --- a/zerver/models.py +++ b/zerver/models.py @@ -46,7 +46,7 @@ import datetime MAX_TOPIC_NAME_LENGTH = 60 MAX_MESSAGE_LENGTH = 10000 -MAX_LANGUAGE_ID_LENGTH = 50 # type: int +MAX_LANGUAGE_ID_LENGTH: int = 50 STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[str], AbstractSet[str]) @@ -76,7 +76,7 @@ def query_for_ids(query: QuerySet, user_ids: List[int], field: str) -> QuerySet: # could be replaced with smarter bulk-fetching logic that deduplicates # queries for the same recipient; this is just a convenient way to # write that code. -per_request_display_recipient_cache = {} # type: Dict[int, DisplayRecipientT] +per_request_display_recipient_cache: Dict[int, DisplayRecipientT] = {} def get_display_recipient_by_id(recipient_id: int, recipient_type: int, recipient_type_id: Optional[int]) -> DisplayRecipientT: """ @@ -110,7 +110,7 @@ def get_active_realm_emoji_cache_key(realm: 'Realm') -> str: # these values cannot change in a running production system, but do # regularly change within unit tests; we address the latter by calling # clear_supported_auth_backends_cache in our standard tearDown code. -supported_backends = None # type: Optional[Set[type]] +supported_backends: Optional[Set[type]] = None def supported_auth_backends() -> Set[type]: global supported_backends # Caching temporarily disabled for debugging @@ -133,44 +133,45 @@ class Realm(models.Model): SUBDOMAIN_FOR_ROOT_DOMAIN = '' # User-visible display name and description used on e.g. the organization homepage - name = models.CharField(max_length=MAX_REALM_NAME_LENGTH, null=True) # type: Optional[str] - description = models.TextField(default="") # type: str + name: Optional[str] = models.CharField(max_length=MAX_REALM_NAME_LENGTH, null=True) + description: str = models.TextField(default="") # A short, identifier-like name for the organization. Used in subdomains; # e.g. on a server at example.com, an org with string_id `foo` is reached # at `foo.example.com`. - string_id = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True) # type: str + string_id: str = models.CharField(max_length=MAX_REALM_SUBDOMAIN_LENGTH, unique=True) - date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime - deactivated = models.BooleanField(default=False) # type: bool + date_created: datetime.datetime = models.DateTimeField(default=timezone_now) + deactivated: bool = models.BooleanField(default=False) # See RealmDomain for the domains that apply for a given organization. - emails_restricted_to_domains = models.BooleanField(default=False) # type: bool + emails_restricted_to_domains: bool = models.BooleanField(default=False) - invite_required = models.BooleanField(default=True) # type: bool - invite_by_admins_only = models.BooleanField(default=False) # type: bool - _max_invites = models.IntegerField(null=True, db_column='max_invites') # type: Optional[int] - disallow_disposable_email_addresses = models.BooleanField(default=True) # type: bool - authentication_methods = BitField(flags=AUTHENTICATION_FLAGS, - default=2**31 - 1) # type: BitHandler + invite_required: bool = models.BooleanField(default=True) + invite_by_admins_only: bool = models.BooleanField(default=False) + _max_invites: Optional[int] = models.IntegerField(null=True, db_column='max_invites') + disallow_disposable_email_addresses: bool = models.BooleanField(default=True) + authentication_methods: BitHandler = BitField( + flags=AUTHENTICATION_FLAGS, default=2**31 - 1 + ) # Whether the organization has enabled inline image and URL previews. - inline_image_preview = models.BooleanField(default=True) # type: bool - inline_url_embed_preview = models.BooleanField(default=False) # type: bool + inline_image_preview: bool = models.BooleanField(default=True) + inline_url_embed_preview: bool = models.BooleanField(default=False) # Whether digest emails are enabled for the organization. - digest_emails_enabled = models.BooleanField(default=False) # type: bool + digest_emails_enabled: bool = models.BooleanField(default=False) # Day of the week on which the digest is sent (default: Tuesday). - digest_weekday = models.SmallIntegerField(default=1) # type: int + digest_weekday: int = models.SmallIntegerField(default=1) - send_welcome_emails = models.BooleanField(default=True) # type: bool - message_content_allowed_in_email_notifications = models.BooleanField(default=True) # type: bool + send_welcome_emails: bool = models.BooleanField(default=True) + message_content_allowed_in_email_notifications: bool = models.BooleanField(default=True) - mandatory_topics = models.BooleanField(default=False) # type: bool - add_emoji_by_admins_only = models.BooleanField(default=False) # type: bool - name_changes_disabled = models.BooleanField(default=False) # type: bool - email_changes_disabled = models.BooleanField(default=False) # type: bool - avatar_changes_disabled = models.BooleanField(default=False) # type: bool + mandatory_topics: bool = models.BooleanField(default=False) + add_emoji_by_admins_only: bool = models.BooleanField(default=False) + name_changes_disabled: bool = models.BooleanField(default=False) + email_changes_disabled: bool = models.BooleanField(default=False) + avatar_changes_disabled: bool = models.BooleanField(default=False) POLICY_MEMBERS_ONLY = 1 POLICY_ADMINS_ONLY = 2 @@ -183,17 +184,17 @@ class Realm(models.Model): ] # Who in the organization is allowed to create streams. - create_stream_policy = models.PositiveSmallIntegerField( - default=POLICY_MEMBERS_ONLY) # type: int + create_stream_policy: int = models.PositiveSmallIntegerField( + default=POLICY_MEMBERS_ONLY) # Who in the organization is allowed to invite other users to streams. - invite_to_stream_policy = models.PositiveSmallIntegerField( - default=POLICY_MEMBERS_ONLY) # type: int + invite_to_stream_policy: int = models.PositiveSmallIntegerField( + default=POLICY_MEMBERS_ONLY) USER_GROUP_EDIT_POLICY_MEMBERS = 1 USER_GROUP_EDIT_POLICY_ADMINS = 2 - user_group_edit_policy = models.PositiveSmallIntegerField( - default=USER_GROUP_EDIT_POLICY_MEMBERS) # type: int + user_group_edit_policy: int = models.PositiveSmallIntegerField( + default=USER_GROUP_EDIT_POLICY_MEMBERS) USER_GROUP_EDIT_POLICY_TYPES = [ USER_GROUP_EDIT_POLICY_MEMBERS, USER_GROUP_EDIT_POLICY_ADMINS, @@ -201,8 +202,8 @@ class Realm(models.Model): PRIVATE_MESSAGE_POLICY_UNLIMITED = 1 PRIVATE_MESSAGE_POLICY_DISABLED = 2 - private_message_policy = models.PositiveSmallIntegerField( - default=PRIVATE_MESSAGE_POLICY_UNLIMITED) # type: int + private_message_policy: int = models.PositiveSmallIntegerField( + default=PRIVATE_MESSAGE_POLICY_UNLIMITED) PRIVATE_MESSAGE_POLICY_TYPES = [ PRIVATE_MESSAGE_POLICY_UNLIMITED, PRIVATE_MESSAGE_POLICY_DISABLED, @@ -215,7 +216,9 @@ class Realm(models.Model): EMAIL_ADDRESS_VISIBILITY_MEMBERS = 2 EMAIL_ADDRESS_VISIBILITY_ADMINS = 3 EMAIL_ADDRESS_VISIBILITY_NOBODY = 4 - email_address_visibility = models.PositiveSmallIntegerField(default=EMAIL_ADDRESS_VISIBILITY_EVERYONE) # type: int + email_address_visibility: int = models.PositiveSmallIntegerField( + default=EMAIL_ADDRESS_VISIBILITY_EVERYONE + ) EMAIL_ADDRESS_VISIBILITY_TYPES = [ EMAIL_ADDRESS_VISIBILITY_EVERYONE, # The MEMBERS level is not yet implemented on the backend. @@ -226,46 +229,54 @@ class Realm(models.Model): # Threshold in days for new users to create streams, and potentially take # some other actions. - waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int + waiting_period_threshold: int = models.PositiveIntegerField(default=0) - allow_message_deleting = models.BooleanField(default=False) # type: bool + allow_message_deleting: bool = models.BooleanField(default=False) DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS = 600 # if changed, also change in admin.js, setting_org.js - message_content_delete_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS) # type: int + message_content_delete_limit_seconds: int = models.IntegerField( + default=DEFAULT_MESSAGE_CONTENT_DELETE_LIMIT_SECONDS + ) - allow_message_editing = models.BooleanField(default=True) # type: bool + allow_message_editing: bool = models.BooleanField(default=True) DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js, setting_org.js - message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int + message_content_edit_limit_seconds: int = models.IntegerField( + default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS + ) # Whether users have access to message edit history - allow_edit_history = models.BooleanField(default=True) # type: bool + allow_edit_history: bool = models.BooleanField(default=True) DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS = 86400 - allow_community_topic_editing = models.BooleanField(default=True) # type: bool + allow_community_topic_editing: bool = models.BooleanField(default=True) # Defaults for new users - default_twenty_four_hour_time = models.BooleanField(default=False) # type: bool - default_language = models.CharField(default='en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: str + default_twenty_four_hour_time: bool = models.BooleanField(default=False) + default_language: str = models.CharField(default='en', max_length=MAX_LANGUAGE_ID_LENGTH) DEFAULT_NOTIFICATION_STREAM_NAME = 'general' INITIAL_PRIVATE_STREAM_NAME = 'core team' STREAM_EVENTS_NOTIFICATION_TOPIC = _('stream events') - notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream] - signup_notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE) # type: Optional[Stream] + notifications_stream: Optional["Stream"] = models.ForeignKey( + "Stream", related_name="+", null=True, blank=True, on_delete=CASCADE + ) + signup_notifications_stream: Optional["Stream"] = models.ForeignKey( + "Stream", related_name="+", null=True, blank=True, on_delete=CASCADE + ) # For old messages being automatically deleted - message_retention_days = models.IntegerField(null=True) # type: Optional[int] + message_retention_days: Optional[int] = models.IntegerField(null=True) # When non-null, all but the latest this many messages in the organization # are inaccessible to users (but not deleted). - message_visibility_limit = models.IntegerField(null=True) # type: Optional[int] + message_visibility_limit: Optional[int] = models.IntegerField(null=True) # Messages older than this message ID in the organization are inaccessible. - first_visible_message_id = models.IntegerField(default=0) # type: int + first_visible_message_id: int = models.IntegerField(default=0) # Valid org_types are {CORPORATE, COMMUNITY} CORPORATE = 1 COMMUNITY = 2 - org_type = models.PositiveSmallIntegerField(default=CORPORATE) # type: int + org_type: int = models.PositiveSmallIntegerField(default=CORPORATE) UPGRADE_TEXT_STANDARD = _("Available on Zulip Standard. Upgrade to access.") # plan_type controls various features around resource/feature @@ -275,14 +286,14 @@ class Realm(models.Model): LIMITED = 2 STANDARD = 3 STANDARD_FREE = 4 - plan_type = models.PositiveSmallIntegerField(default=SELF_HOSTED) # type: int + plan_type: int = models.PositiveSmallIntegerField(default=SELF_HOSTED) # This value is also being used in static/js/settings_bots.bot_creation_policy_values. # On updating it here, update it there as well. BOT_CREATION_EVERYONE = 1 BOT_CREATION_LIMIT_GENERIC_BOTS = 2 BOT_CREATION_ADMINS_ONLY = 3 - bot_creation_policy = models.PositiveSmallIntegerField(default=BOT_CREATION_EVERYONE) # type: int + bot_creation_policy: int = models.PositiveSmallIntegerField(default=BOT_CREATION_EVERYONE) BOT_CREATION_POLICY_TYPES = [ BOT_CREATION_EVERYONE, BOT_CREATION_LIMIT_GENERIC_BOTS, @@ -292,7 +303,7 @@ class Realm(models.Model): # See upload_quota_bytes; don't interpret upload_quota_gb directly. UPLOAD_QUOTA_LIMITED = 5 UPLOAD_QUOTA_STANDARD = 50 - upload_quota_gb = models.IntegerField(null=True) # type: Optional[int] + upload_quota_gb: Optional[int] = models.IntegerField(null=True) VIDEO_CHAT_PROVIDERS = { 'disabled': { @@ -318,10 +329,10 @@ class Realm(models.Model): zoom_api_key = models.TextField(default="") zoom_api_secret = models.TextField(default="") - default_code_block_language = models.TextField(null=True, default=None) # type: Optional[str] + default_code_block_language: Optional[str] = models.TextField(null=True, default=None) # Define the types of the various automatically managed properties - property_types = dict( + property_types: Dict[str, Union[type, Tuple[type, ...]]] = dict( add_emoji_by_admins_only=bool, allow_edit_history=bool, allow_message_deleting=bool, @@ -357,7 +368,7 @@ class Realm(models.Model): private_message_policy=int, user_group_edit_policy=int, default_code_block_language=(str, type(None)), - ) # type: Dict[str, Union[type, Tuple[type, ...]]] + ) DIGEST_WEEKDAY_VALUES = [0, 1, 2, 3, 4, 5, 6] @@ -368,9 +379,10 @@ class Realm(models.Model): (ICON_FROM_GRAVATAR, 'Hosted by Gravatar'), (ICON_UPLOADED, 'Uploaded by administrator'), ) - icon_source = models.CharField(default=ICON_FROM_GRAVATAR, choices=ICON_SOURCES, - max_length=1) # type: str - icon_version = models.PositiveSmallIntegerField(default=1) # type: int + icon_source: str = models.CharField( + default=ICON_FROM_GRAVATAR, choices=ICON_SOURCES, max_length=1 + ) + icon_version: int = models.PositiveSmallIntegerField(default=1) # Logo is the horizontal logo we show in top-left of webapp navbar UI. LOGO_DEFAULT = 'D' @@ -379,13 +391,15 @@ class Realm(models.Model): (LOGO_DEFAULT, 'Default to Zulip'), (LOGO_UPLOADED, 'Uploaded by administrator'), ) - logo_source = models.CharField(default=LOGO_DEFAULT, choices=LOGO_SOURCES, - max_length=1) # type: str - logo_version = models.PositiveSmallIntegerField(default=1) # type: int + logo_source: str = models.CharField( + default=LOGO_DEFAULT, choices=LOGO_SOURCES, max_length=1 + ) + logo_version: int = models.PositiveSmallIntegerField(default=1) - night_logo_source = models.CharField(default=LOGO_DEFAULT, choices=LOGO_SOURCES, - max_length=1) # type: str - night_logo_version = models.PositiveSmallIntegerField(default=1) # type: int + night_logo_source: str = models.CharField( + default=LOGO_DEFAULT, choices=LOGO_SOURCES, max_length=1 + ) + night_logo_version: int = models.PositiveSmallIntegerField(default=1) def authentication_methods_dict(self) -> Dict[str, bool]: """Returns the a mapping from authentication flags to their status, @@ -396,7 +410,7 @@ class Realm(models.Model): # dependency. from zproject.backends import AUTH_BACKEND_NAME_MAP - ret = {} # type: Dict[str, bool] + ret: Dict[str, bool] = {} supported_backends = [backend.__class__ for backend in supported_auth_backends()] for k, v in self.authentication_methods.iteritems(): backend = AUTH_BACKEND_NAME_MAP[k] @@ -536,9 +550,9 @@ def avatar_changes_disabled(realm: Realm) -> bool: class RealmDomain(models.Model): """For an organization with emails_restricted_to_domains enabled, the list of allowed domains""" - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) # should always be stored lowercase - domain = models.CharField(max_length=80, db_index=True) # type: str + domain: str = models.CharField(max_length=80, db_index=True) allow_subdomains = models.BooleanField(default=False) class Meta: @@ -570,19 +584,21 @@ def get_realm_domains(realm: Realm) -> List[Dict[str, str]]: return list(realm.realmdomain_set.values('domain', 'allow_subdomains')) class RealmEmoji(models.Model): - author = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE) # type: Optional[UserProfile] - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - name = models.TextField(validators=[ + author: Optional["UserProfile"] = models.ForeignKey( + "UserProfile", blank=True, null=True, on_delete=CASCADE + ) + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + name: str = models.TextField(validators=[ MinLengthValidator(1), # The second part of the regex (negative lookbehind) disallows names # ending with one of the punctuation characters. RegexValidator(regex=r'^[0-9a-z.\-_]+(? str: return '%s:all_realm_filters:%s' % (cache.KEY_PREFIX, realm_id,) # We have a per-process cache to avoid doing 1000 remote cache queries during page load -per_request_realm_filters_cache = {} # type: Dict[int, List[Tuple[str, str, int]]] +per_request_realm_filters_cache: Dict[int, List[Tuple[str, str, int]]] = {} def realm_in_local_realm_filters_cache(realm_id: int) -> bool: return realm_id in per_request_realm_filters_cache @@ -695,7 +711,7 @@ def realm_filters_for_realm_remote_cache(realm_id: int) -> List[Tuple[str, str, return filters def all_realm_filters() -> Dict[int, List[Tuple[str, str, int]]]: - filters = defaultdict(list) # type: DefaultDict[int, List[Tuple[str, str, int]]] + filters: DefaultDict[int, List[Tuple[str, str, int]]] = defaultdict(list) for realm_filter in RealmFilter.objects.all(): filters[realm_filter.realm_id].append((realm_filter.pattern, realm_filter.url_format_string, @@ -728,8 +744,8 @@ def flush_per_request_caches() -> None: # (used by the Message table) to the type-specific unique id (the # stream id, user_profile id, or huddle id). class Recipient(models.Model): - type_id = models.IntegerField(db_index=True) # type: int - type = models.PositiveSmallIntegerField(db_index=True) # type: int + type_id: int = models.IntegerField(db_index=True) + type: int = models.PositiveSmallIntegerField(db_index=True) # Valid types are {personal, stream, huddle} PERSONAL = 1 STREAM = 2 @@ -800,10 +816,10 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): # email address, not their ID; it should be used in all API use cases. # # Both fields are unique within a realm (in a case-insensitive fashion). - delivery_email = models.EmailField(blank=False, db_index=True) # type: str - email = models.EmailField(blank=False, db_index=True) # type: str + delivery_email: str = models.EmailField(blank=False, db_index=True) + email: str = models.EmailField(blank=False, db_index=True) - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) # Foreign key to the Recipient object for PERSONAL type messages to this user. recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL) @@ -815,33 +831,33 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): # graduation year, etc. The short_name attribute is currently not # used anywhere, but the intent is that it would be used as the # shorter familiar name for addressing the user in the UI. - full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: str - short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: str + full_name: str = models.CharField(max_length=MAX_NAME_LENGTH) + short_name: str = models.CharField(max_length=MAX_NAME_LENGTH) - date_joined = models.DateTimeField(default=timezone_now) # type: datetime.datetime - tos_version = models.CharField(null=True, max_length=10) # type: Optional[str] - api_key = models.CharField(max_length=API_KEY_LENGTH) # type: str + date_joined: datetime.datetime = models.DateTimeField(default=timezone_now) + tos_version: Optional[str] = models.CharField(null=True, max_length=10) + api_key: str = models.CharField(max_length=API_KEY_LENGTH) # pointer points to Message.id, NOT UserMessage.id. - pointer = models.IntegerField() # type: int + pointer: int = models.IntegerField() - last_pointer_updater = models.CharField(max_length=64) # type: str + last_pointer_updater: str = models.CharField(max_length=64) # Whether the user has access to server-level administrator pages, like /activity - is_staff = models.BooleanField(default=False) # type: bool + is_staff: bool = models.BooleanField(default=False) # For a normal user, this is True unless the user or an admin has # deactivated their account. The name comes from Django; this field # isn't related to presence or to whether the user has recently used Zulip. # # See also `long_term_idle`. - is_active = models.BooleanField(default=True, db_index=True) # type: bool + is_active: bool = models.BooleanField(default=True, db_index=True) - is_billing_admin = models.BooleanField(default=False, db_index=True) # type: bool + is_billing_admin: bool = models.BooleanField(default=False, db_index=True) - is_bot = models.BooleanField(default=False, db_index=True) # type: bool - bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int] - bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile] + is_bot: bool = models.BooleanField(default=False, db_index=True) + bot_type: Optional[int] = models.PositiveSmallIntegerField(null=True, db_index=True) + bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # Each role has a superset of the permissions of the next higher # numbered role. When adding new roles, leave enough space for @@ -853,89 +869,93 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): # ROLE_MODERATOR = 300 ROLE_MEMBER = 400 ROLE_GUEST = 600 - role = models.PositiveSmallIntegerField(default=ROLE_MEMBER, db_index=True) # type: int + role: int = models.PositiveSmallIntegerField(default=ROLE_MEMBER, db_index=True) # Whether the user has been "soft-deactivated" due to weeks of inactivity. # For these users we avoid doing UserMessage table work, as an optimization # for large Zulip organizations with lots of single-visit users. - long_term_idle = models.BooleanField(default=False, db_index=True) # type: bool + long_term_idle: bool = models.BooleanField(default=False, db_index=True) # When we last added basic UserMessage rows for a long_term_idle user. - last_active_message_id = models.IntegerField(null=True) # type: Optional[int] + last_active_message_id: Optional[int] = models.IntegerField(null=True) # Mirror dummies are fake (!is_active) users used to provide # message senders in our cross-protocol Zephyr<->Zulip content # mirroring integration, so that we can display mirrored content # like native Zulip messages (with a name + avatar, etc.). - is_mirror_dummy = models.BooleanField(default=False) # type: bool + is_mirror_dummy: bool = models.BooleanField(default=False) # API super users are allowed to forge messages as sent by another # user and to send to private streams; also used for Zephyr/Jabber mirroring. - is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool + is_api_super_user: bool = models.BooleanField(default=False, db_index=True) ### Notifications settings. ### # Stream notifications. - enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool - enable_stream_email_notifications = models.BooleanField(default=False) # type: bool - enable_stream_push_notifications = models.BooleanField(default=False) # type: bool - enable_stream_audible_notifications = models.BooleanField(default=False) # type: bool - notification_sound = models.CharField(max_length=20, default='zulip') # type: str - wildcard_mentions_notify = models.BooleanField(default=True) # type: bool + enable_stream_desktop_notifications: bool = models.BooleanField(default=False) + enable_stream_email_notifications: bool = models.BooleanField(default=False) + enable_stream_push_notifications: bool = models.BooleanField(default=False) + enable_stream_audible_notifications: bool = models.BooleanField(default=False) + notification_sound: str = models.CharField(max_length=20, default='zulip') + wildcard_mentions_notify: bool = models.BooleanField(default=True) # PM + @-mention notifications. - enable_desktop_notifications = models.BooleanField(default=True) # type: bool - pm_content_in_desktop_notifications = models.BooleanField(default=True) # type: bool - enable_sounds = models.BooleanField(default=True) # type: bool - enable_offline_email_notifications = models.BooleanField(default=True) # type: bool - message_content_in_email_notifications = models.BooleanField(default=True) # type: bool - enable_offline_push_notifications = models.BooleanField(default=True) # type: bool - enable_online_push_notifications = models.BooleanField(default=True) # type: bool + enable_desktop_notifications: bool = models.BooleanField(default=True) + pm_content_in_desktop_notifications: bool = models.BooleanField(default=True) + enable_sounds: bool = models.BooleanField(default=True) + enable_offline_email_notifications: bool = models.BooleanField(default=True) + message_content_in_email_notifications: bool = models.BooleanField(default=True) + enable_offline_push_notifications: bool = models.BooleanField(default=True) + enable_online_push_notifications: bool = models.BooleanField(default=True) DESKTOP_ICON_COUNT_DISPLAY_MESSAGES = 1 DESKTOP_ICON_COUNT_DISPLAY_NOTIFIABLE = 2 DESKTOP_ICON_COUNT_DISPLAY_NONE = 3 - desktop_icon_count_display = models.PositiveSmallIntegerField( - default=DESKTOP_ICON_COUNT_DISPLAY_MESSAGES) # type: int + desktop_icon_count_display: int = models.PositiveSmallIntegerField( + default=DESKTOP_ICON_COUNT_DISPLAY_MESSAGES) - enable_digest_emails = models.BooleanField(default=True) # type: bool - enable_login_emails = models.BooleanField(default=True) # type: bool - realm_name_in_notifications = models.BooleanField(default=False) # type: bool + enable_digest_emails: bool = models.BooleanField(default=True) + enable_login_emails: bool = models.BooleanField(default=True) + realm_name_in_notifications: bool = models.BooleanField(default=False) # Words that trigger a mention for this user, formatted as a json-serialized list of strings - alert_words = models.TextField(default='[]') # type: str + alert_words: str = models.TextField(default='[]') # Used for rate-limiting certain automated messages generated by bots - last_reminder = models.DateTimeField(default=None, null=True) # type: Optional[datetime.datetime] + last_reminder: Optional[datetime.datetime] = models.DateTimeField(default=None, null=True) # Minutes to wait before warning a bot owner that their bot sent a message # to a nonexistent stream BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1 # API rate limits, formatted as a comma-separated list of range:max pairs - rate_limits = models.CharField(default="", max_length=100) # type: str + rate_limits: str = models.CharField(default="", max_length=100) # Hours to wait before sending another email to a user EMAIL_REMINDER_WAITPERIOD = 24 # Default streams for some deprecated/legacy classes of bot users. - default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream] - default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE) # type: Optional[Stream] - default_all_public_streams = models.BooleanField(default=False) # type: bool + default_sending_stream: Optional["Stream"] = models.ForeignKey( + "zerver.Stream", null=True, related_name="+", on_delete=CASCADE + ) + default_events_register_stream: Optional["Stream"] = models.ForeignKey( + "zerver.Stream", null=True, related_name="+", on_delete=CASCADE + ) + default_all_public_streams: bool = models.BooleanField(default=False) # UI vars - enter_sends = models.NullBooleanField(default=False) # type: Optional[bool] - left_side_userlist = models.BooleanField(default=False) # type: bool + enter_sends: Optional[bool] = models.NullBooleanField(default=False) + left_side_userlist: bool = models.BooleanField(default=False) # display settings - default_language = models.CharField(default='en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: str - dense_mode = models.BooleanField(default=True) # type: bool - fluid_layout_width = models.BooleanField(default=False) # type: bool - high_contrast_mode = models.BooleanField(default=False) # type: bool - night_mode = models.BooleanField(default=False) # type: bool - translate_emoticons = models.BooleanField(default=False) # type: bool - twenty_four_hour_time = models.BooleanField(default=False) # type: bool - starred_message_counts = models.BooleanField(default=False) # type: bool + default_language: str = models.CharField(default='en', max_length=MAX_LANGUAGE_ID_LENGTH) + dense_mode: bool = models.BooleanField(default=True) + fluid_layout_width: bool = models.BooleanField(default=False) + high_contrast_mode: bool = models.BooleanField(default=False) + night_mode: bool = models.BooleanField(default=False) + translate_emoticons: bool = models.BooleanField(default=False) + twenty_four_hour_time: bool = models.BooleanField(default=False) + starred_message_counts: bool = models.BooleanField(default=False) # UI setting controlling Zulip's behavior of demoting in the sort # order and graying out streams with no recent traffic. The @@ -959,7 +979,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): # In Django, the convention is to use an empty string instead of NULL/None # for text-based fields. For more information, see # https://docs.djangoproject.com/en/1.10/ref/models/fields/#django.db.models.Field.null. - timezone = models.CharField(max_length=40, default='') # type: str + timezone: str = models.CharField(max_length=40, default='') # Emojisets GOOGLE_EMOJISET = 'google' @@ -970,7 +990,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): (GOOGLE_BLOB_EMOJISET, "Google classic"), (TWITTER_EMOJISET, "Twitter"), (TEXT_EMOJISET, "Plain text")) - emojiset = models.CharField(default=GOOGLE_BLOB_EMOJISET, choices=EMOJISET_CHOICES, max_length=20) # type: str + emojiset: str = models.CharField(default=GOOGLE_BLOB_EMOJISET, choices=EMOJISET_CHOICES, max_length=20) AVATAR_FROM_GRAVATAR = 'G' AVATAR_FROM_USER = 'U' @@ -978,9 +998,9 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): (AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'), (AVATAR_FROM_USER, 'Uploaded by user'), ) - avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: str - avatar_version = models.PositiveSmallIntegerField(default=1) # type: int - avatar_hash = models.CharField(null=True, max_length=64) # type: Optional[str] + avatar_source: str = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) + avatar_version: int = models.PositiveSmallIntegerField(default=1) + avatar_hash: Optional[str] = models.CharField(null=True, max_length=64) TUTORIAL_WAITING = 'W' TUTORIAL_STARTED = 'S' @@ -988,15 +1008,15 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"), (TUTORIAL_STARTED, "Started"), (TUTORIAL_FINISHED, "Finished")) - tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: str + tutorial_status: str = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # Contains serialized JSON of the form: # [("step 1", true), ("step 2", false)] # where the second element of each tuple is if the step has been # completed. - onboarding_steps = models.TextField(default='[]') # type: str + onboarding_steps: str = models.TextField(default='[]') - objects = UserManager() # type: UserManager + objects: UserManager = UserManager() # Define the types of the various automatically managed properties property_types = dict( @@ -1041,7 +1061,7 @@ class UserProfile(AbstractBaseUser, PermissionsMixin): def profile_data(self) -> ProfileData: values = CustomProfileFieldValue.objects.filter(user_profile=self) user_data = {v.field_id: {"value": v.value, "rendered_value": v.rendered_value} for v in values} - data = [] # type: ProfileData + data: ProfileData = [] for field in custom_profile_fields_for_realm(self.realm_id): field_values = user_data.get(field.id, None) if field_values: @@ -1198,7 +1218,7 @@ class UserGroup(models.Model): name = models.CharField(max_length=100) members = models.ManyToManyField(UserProfile, through='UserGroupMembership') realm = models.ForeignKey(Realm, on_delete=CASCADE) - description = models.TextField(default='') # type: str + description: str = models.TextField(default='') class Meta: unique_together = (('realm', 'name'),) @@ -1246,15 +1266,15 @@ class PreregistrationUser(models.Model): # from the authentication step and pass it to the registration # form. - email = models.EmailField() # type: str + email: str = models.EmailField() # If the pre-registration process provides a suggested full name for this user, # store it here to use it to prepopulate the Full Name field in the registration form: - full_name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH, null=True) # type: Optional[str] + full_name: Optional[str] = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH, null=True) full_name_validated = models.BooleanField(default=False) - referred_by = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) # type: Optional[UserProfile] - streams = models.ManyToManyField('Stream') # type: Manager - invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime + referred_by: Optional[UserProfile] = models.ForeignKey(UserProfile, null=True, on_delete=CASCADE) + streams: Manager = models.ManyToManyField('Stream') + invited_at: datetime.datetime = models.DateTimeField(auto_now=True) realm_creation = models.BooleanField(default=False) # Indicates whether the user needs a password. Users who were # created via SSO style auth (e.g. GitHub/Google) generally do not. @@ -1262,11 +1282,11 @@ class PreregistrationUser(models.Model): # status: whether an object has been confirmed. # if confirmed, set to confirmation.settings.STATUS_ACTIVE - status = models.IntegerField(default=0) # type: int + status: int = models.IntegerField(default=0) # The realm should only ever be None for PreregistrationUser # objects created as part of realm creation. - realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm] + realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # Changes to INVITED_AS should also be reflected in # settings_invites.invited_as_values in @@ -1276,25 +1296,25 @@ class PreregistrationUser(models.Model): REALM_ADMIN = 2, GUEST_USER = 3, ) - invited_as = models.PositiveSmallIntegerField(default=INVITE_AS['MEMBER']) # type: int + invited_as: int = models.PositiveSmallIntegerField(default=INVITE_AS['MEMBER']) class MultiuseInvite(models.Model): referred_by = models.ForeignKey(UserProfile, on_delete=CASCADE) # Optional[UserProfile] - streams = models.ManyToManyField('Stream') # type: Manager - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - invited_as = models.PositiveSmallIntegerField(default=PreregistrationUser.INVITE_AS['MEMBER']) # type: int + streams: Manager = models.ManyToManyField('Stream') + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + invited_as: int = models.PositiveSmallIntegerField(default=PreregistrationUser.INVITE_AS['MEMBER']) class EmailChangeStatus(models.Model): - new_email = models.EmailField() # type: str - old_email = models.EmailField() # type: str - updated_at = models.DateTimeField(auto_now=True) # type: datetime.datetime - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile + new_email: str = models.EmailField() + old_email: str = models.EmailField() + updated_at: datetime.datetime = models.DateTimeField(auto_now=True) + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) # status: whether an object has been confirmed. # if confirmed, set to confirmation.settings.STATUS_ACTIVE - status = models.IntegerField(default=0) # type: int + status: int = models.IntegerField(default=0) - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) class AbstractPushDeviceToken(models.Model): APNS = 1 @@ -1305,27 +1325,27 @@ class AbstractPushDeviceToken(models.Model): (GCM, 'gcm'), ) - kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int + kind: int = models.PositiveSmallIntegerField(choices=KINDS) # The token is a unique device-specific token that is # sent to us from each device: # - APNS token if kind == APNS # - GCM registration id if kind == GCM - token = models.CharField(max_length=4096, db_index=True) # type: str + token: str = models.CharField(max_length=4096, db_index=True) # TODO: last_updated should be renamed date_created, since it is # no longer maintained as a last_updated value. - last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime + last_updated: datetime.datetime = models.DateTimeField(auto_now=True) # [optional] Contains the app id of the device if it is an iOS device - ios_app_id = models.TextField(null=True) # type: Optional[str] + ios_app_id: Optional[str] = models.TextField(null=True) class Meta: abstract = True class PushDeviceToken(AbstractPushDeviceToken): # The user who's device this is - user = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE) # type: UserProfile + user: UserProfile = models.ForeignKey(UserProfile, db_index=True, on_delete=CASCADE) class Meta: unique_together = ("user", "kind", "token") @@ -1337,21 +1357,21 @@ class Stream(models.Model): MAX_NAME_LENGTH = 60 MAX_DESCRIPTION_LENGTH = 1024 - name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: str - realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE) # type: Realm - date_created = models.DateTimeField(default=timezone_now) # type: datetime.datetime - deactivated = models.BooleanField(default=False) # type: bool - description = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default='') # type: str - rendered_description = models.TextField(default='') # type: str + name: str = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) + realm: Realm = models.ForeignKey(Realm, db_index=True, on_delete=CASCADE) + date_created: datetime.datetime = models.DateTimeField(default=timezone_now) + deactivated: bool = models.BooleanField(default=False) + description: str = models.CharField(max_length=MAX_DESCRIPTION_LENGTH, default='') + rendered_description: str = models.TextField(default='') # Foreign key to the Recipient object for STREAM type messages to this stream. recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL) - invite_only = models.NullBooleanField(default=False) # type: Optional[bool] - history_public_to_subscribers = models.BooleanField(default=False) # type: bool + invite_only: Optional[bool] = models.NullBooleanField(default=False) + history_public_to_subscribers: bool = models.BooleanField(default=False) # Whether this stream's content should be published by the web-public archive features - is_web_public = models.BooleanField(default=False) # type: bool + is_web_public: bool = models.BooleanField(default=False) STREAM_POST_POLICY_EVERYONE = 1 STREAM_POST_POLICY_ADMINS = 2 @@ -1359,7 +1379,7 @@ class Stream(models.Model): # TODO: Implement policy to restrict posting to a user group or admins. # Who in the organization has permission to send messages to this stream. - stream_post_policy = models.PositiveSmallIntegerField(default=STREAM_POST_POLICY_EVERYONE) # type: int + stream_post_policy: int = models.PositiveSmallIntegerField(default=STREAM_POST_POLICY_EVERYONE) STREAM_POST_POLICY_TYPES = [ STREAM_POST_POLICY_EVERYONE, STREAM_POST_POLICY_ADMINS, @@ -1374,21 +1394,22 @@ class Stream(models.Model): # is more public in the sense that you don't need a Zulip invite to join. # This field is populated directly from UserProfile.is_zephyr_mirror_realm, # and the reason for denormalizing field is performance. - is_in_zephyr_realm = models.BooleanField(default=False) # type: bool + is_in_zephyr_realm: bool = models.BooleanField(default=False) # Used by the e-mail forwarder. The e-mail RFC specifies a maximum # e-mail length of 254, and our max stream length is 30, so we # have plenty of room for the token. - email_token = models.CharField( - max_length=32, default=generate_email_token_for_stream, unique=True) # type: str + email_token: str = models.CharField( + max_length=32, default=generate_email_token_for_stream, unique=True + ) # For old messages being automatically deleted - message_retention_days = models.IntegerField(null=True, default=None) # type: Optional[int] + message_retention_days: Optional[int] = models.IntegerField(null=True, default=None) # The very first message ID in the stream. Used to help clients # determine whether they might need to display "more topics" for a # stream based on what messages they have cached. - first_message_id = models.IntegerField(null=True, db_index=True) # type: Optional[int] + first_message_id: Optional[int] = models.IntegerField(null=True, db_index=True) def __str__(self) -> str: return "" % (self.name,) @@ -1467,12 +1488,12 @@ class MutedTopic(models.Model): self.date_muted)) class Client(models.Model): - name = models.CharField(max_length=30, db_index=True, unique=True) # type: str + name: str = models.CharField(max_length=30, db_index=True, unique=True) def __str__(self) -> str: return "" % (self.name,) -get_client_cache = {} # type: Dict[str, Client] +get_client_cache: Dict[str, Client] = {} def get_client(name: str) -> Client: # Accessing KEY_PREFIX through the module is necessary # because we need the updated value of the variable. @@ -1577,7 +1598,7 @@ def bulk_get_huddle_user_ids(recipients: List[Recipient]) -> Dict[int, List[int] recipient__in=recipients ).order_by('user_profile_id') - result_dict = {} # type: Dict[int, List[int]] + result_dict: Dict[int, List[int]] = {} for recipient in recipients: result_dict[recipient.id] = [subscription.user_profile_id for subscription in subscriptions @@ -1586,8 +1607,8 @@ def bulk_get_huddle_user_ids(recipients: List[Recipient]) -> Dict[int, List[int] return result_dict class AbstractMessage(models.Model): - sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient + sender: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + recipient: Recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # The message's topic. # # Early versions of Zulip called this concept a "subject", as in an email @@ -1596,24 +1617,24 @@ class AbstractMessage(models.Model): # new code should generally also say "topic". # # See also the `topic_name` method on `Message`. - subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True) # type: str + subject: str = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH, db_index=True) - content = models.TextField() # type: str - rendered_content = models.TextField(null=True) # type: Optional[str] - rendered_content_version = models.IntegerField(null=True) # type: Optional[int] + content: str = models.TextField() + rendered_content: Optional[str] = models.TextField(null=True) + rendered_content_version: Optional[int] = models.IntegerField(null=True) - date_sent = models.DateTimeField('date sent', db_index=True) # type: datetime.datetime - sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client + date_sent: datetime.datetime = models.DateTimeField('date sent', db_index=True) + sending_client: Client = models.ForeignKey(Client, on_delete=CASCADE) - last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime] + last_edit_time: Optional[datetime.datetime] = models.DateTimeField(null=True) # A JSON-encoded list of objects describing any past edits to this # message, oldest first. - edit_history = models.TextField(null=True) # type: Optional[str] + edit_history: Optional[str] = models.TextField(null=True) - has_attachment = models.BooleanField(default=False, db_index=True) # type: bool - has_image = models.BooleanField(default=False, db_index=True) # type: bool - has_link = models.BooleanField(default=False, db_index=True) # type: bool + has_attachment: bool = models.BooleanField(default=False, db_index=True) + has_image: bool = models.BooleanField(default=False, db_index=True) + has_link: bool = models.BooleanField(default=False, db_index=True) class Meta: abstract = True @@ -1624,18 +1645,18 @@ class AbstractMessage(models.Model): self.subject, self.sender) class ArchiveTransaction(models.Model): - timestamp = models.DateTimeField(default=timezone_now, db_index=True) # type: datetime.datetime + timestamp: datetime.datetime = models.DateTimeField(default=timezone_now, db_index=True) # Marks if the data archived in this transaction has been restored: - restored = models.BooleanField(default=False, db_index=True) # type: bool + restored: bool = models.BooleanField(default=False, db_index=True) - type = models.PositiveSmallIntegerField(db_index=True) # type: int + type: int = models.PositiveSmallIntegerField(db_index=True) # Valid types: RETENTION_POLICY_BASED = 1 # Archiving was executed due to automated retention policies MANUAL = 2 # Archiving was run manually, via move_messages_to_archive function # ForeignKey to the realm with which objects archived in this transaction are associated. # If type is set to MANUAL, this should be null. - realm = models.ForeignKey(Realm, null=True, on_delete=CASCADE) # type: Optional[Realm] + realm: Optional[Realm] = models.ForeignKey(Realm, null=True, on_delete=CASCADE) def __str__(self) -> str: return "ArchiveTransaction id: {id}, type: {type}, realm: {realm}, timestamp: {timestamp}".format( @@ -1650,7 +1671,7 @@ class ArchivedMessage(AbstractMessage): are permanently deleted. This is an important part of a robust 'message retention' feature. """ - archive_transaction = models.ForeignKey(ArchiveTransaction, on_delete=CASCADE) # type: ArchiveTransaction + archive_transaction: ArchiveTransaction = models.ForeignKey(ArchiveTransaction, on_delete=CASCADE) class Message(AbstractMessage): @@ -1746,7 +1767,7 @@ class AbstractSubMessage(models.Model): # games, surveys, mini threads, etc. These are designed to be pretty # generic in purpose. - sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile + sender: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) msg_type = models.TextField() content = models.TextField() @@ -1754,7 +1775,7 @@ class AbstractSubMessage(models.Model): abstract = True class SubMessage(AbstractSubMessage): - message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message + message: Message = models.ForeignKey(Message, on_delete=CASCADE) @staticmethod def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]: @@ -1764,7 +1785,7 @@ class SubMessage(AbstractSubMessage): return list(query) class ArchivedSubMessage(AbstractSubMessage): - message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: ArchivedMessage + message: ArchivedMessage = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) post_save.connect(flush_submessage, sender=SubMessage) @@ -1775,12 +1796,12 @@ class AbstractReaction(models.Model): on how this subsystem works, see: https://zulip.readthedocs.io/en/latest/subsystems/emoji.html """ - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) # The user-facing name for an emoji reaction. With emoji aliases, # there may be multiple accepted names for a given emoji; this # field encodes which one the user selected. - emoji_name = models.TextField() # type: str + emoji_name: str = models.TextField() UNICODE_EMOJI = 'unicode_emoji' REALM_EMOJI = 'realm_emoji' @@ -1788,7 +1809,7 @@ class AbstractReaction(models.Model): REACTION_TYPES = ((UNICODE_EMOJI, _("Unicode emoji")), (REALM_EMOJI, _("Custom emoji")), (ZULIP_EXTRA_EMOJI, _("Zulip extra emoji"))) - reaction_type = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30) # type: str + reaction_type: str = models.CharField(default=UNICODE_EMOJI, choices=REACTION_TYPES, max_length=30) # A string that uniquely identifies a particular emoji. The format varies # by type: @@ -1803,14 +1824,14 @@ class AbstractReaction(models.Model): # (in ASCII decimal) of the RealmEmoji object. # # * For "Zulip extra emoji" (like :zulip:), the filename of the emoji. - emoji_code = models.TextField() # type: str + emoji_code: str = models.TextField() class Meta: abstract = True unique_together = ("user_profile", "message", "emoji_name") class Reaction(AbstractReaction): - message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message + message: Message = models.ForeignKey(Message, on_delete=CASCADE) @staticmethod def get_raw_db_rows(needed_ids: List[int]) -> List[Dict[str, Any]]: @@ -1822,7 +1843,7 @@ class Reaction(AbstractReaction): return "%s / %s / %s" % (self.user_profile.email, self.message.id, self.emoji_name) class ArchivedReaction(AbstractReaction): - message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: ArchivedMessage + message: ArchivedMessage = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # Whenever a message is sent, for each user subscribed to the # corresponding Recipient object, we add a row to the UserMessage @@ -1837,9 +1858,9 @@ class ArchivedReaction(AbstractReaction): # UserMessage is the largest table in a Zulip installation, even # though each row is only 4 integers. class AbstractUserMessage(models.Model): - id = models.BigAutoField(primary_key=True) # type: int + id: int = models.BigAutoField(primary_key=True) - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) # The order here is important! It's the order of fields in the bitfield. ALL_FLAGS = [ 'read', @@ -1884,7 +1905,7 @@ class AbstractUserMessage(models.Model): "summarize_in_home", "summarize_in_stream", } - flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler + flags: BitHandler = BitField(flags=ALL_FLAGS, default=0) class Meta: abstract = True @@ -1938,7 +1959,7 @@ class AbstractUserMessage(models.Model): class UserMessage(AbstractUserMessage): - message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message + message: Message = models.ForeignKey(Message, on_delete=CASCADE) def get_usermessage_by_message_id(user_profile: UserProfile, message_id: int) -> Optional[UserMessage]: try: @@ -1952,28 +1973,29 @@ class ArchivedUserMessage(AbstractUserMessage): before they are permanently deleted. This is an important part of a robust 'message retention' feature. """ - message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) # type: Message + message: Message = models.ForeignKey(ArchivedMessage, on_delete=CASCADE) class AbstractAttachment(models.Model): - file_name = models.TextField(db_index=True) # type: str + file_name: str = models.TextField(db_index=True) # path_id is a storage location agnostic representation of the path of the file. # If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py # then its path_id will be a/b/abc/temp_file.py. - path_id = models.TextField(db_index=True, unique=True) # type: str - owner = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - realm = models.ForeignKey(Realm, blank=True, null=True, on_delete=CASCADE) # type: Optional[Realm] + path_id: str = models.TextField(db_index=True, unique=True) + owner: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + realm: Optional[Realm] = models.ForeignKey(Realm, blank=True, null=True, on_delete=CASCADE) - create_time = models.DateTimeField(default=timezone_now, - db_index=True) # type: datetime.datetime - size = models.IntegerField(null=True) # type: Optional[int] + create_time: datetime.datetime = models.DateTimeField( + default=timezone_now, db_index=True + ) + size: Optional[int] = models.IntegerField(null=True) # Whether this attachment has been posted to a public stream, and # thus should be available to all non-guest users in the # organization (even if they weren't a recipient of a message # linking to it). This lets us avoid looking up the corresponding # messages/streams to check permissions before serving these files. - is_realm_public = models.BooleanField(default=False) # type: bool + is_realm_public: bool = models.BooleanField(default=False) class Meta: abstract = True @@ -1987,10 +2009,10 @@ class ArchivedAttachment(AbstractAttachment): before they are permanently deleted. This is an important part of a robust 'message retention' feature. """ - messages = models.ManyToManyField(ArchivedMessage) # type: Manager + messages: Manager = models.ManyToManyField(ArchivedMessage) class Attachment(AbstractAttachment): - messages = models.ManyToManyField(Message) # type: Manager + messages: Manager = models.ManyToManyField(Message) def is_claimed(self) -> bool: return self.messages.count() > 0 @@ -2057,31 +2079,31 @@ def get_old_unclaimed_attachments(weeks_ago: int) -> Sequence[Attachment]: return old_attachments class Subscription(models.Model): - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + recipient: Recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # Whether the user has since unsubscribed. We mark Subscription # objects as inactive, rather than deleting them, when a user # unsubscribes, so we can preserve user customizations like # notification settings, stream color, etc., if the user later # resubscribes. - active = models.BooleanField(default=True) # type: bool + active: bool = models.BooleanField(default=True) # Whether this user had muted this stream. - is_muted = models.NullBooleanField(default=False) # type: Optional[bool] + is_muted: Optional[bool] = models.NullBooleanField(default=False) DEFAULT_STREAM_COLOR = "#c2c2c2" - color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: str - pin_to_top = models.BooleanField(default=False) # type: bool + color: str = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) + pin_to_top: bool = models.BooleanField(default=False) # These fields are stream-level overrides for the user's default # configuration for notification, configured in UserProfile. The # default, None, means we just inherit the user-level default. - desktop_notifications = models.NullBooleanField(default=None) # type: Optional[bool] - audible_notifications = models.NullBooleanField(default=None) # type: Optional[bool] - push_notifications = models.NullBooleanField(default=None) # type: Optional[bool] - email_notifications = models.NullBooleanField(default=None) # type: Optional[bool] - wildcard_mentions_notify = models.NullBooleanField(default=None) # type: Optional[bool] + desktop_notifications: Optional[bool] = models.NullBooleanField(default=None) + audible_notifications: Optional[bool] = models.NullBooleanField(default=None) + push_notifications: Optional[bool] = models.NullBooleanField(default=None) + email_notifications: Optional[bool] = models.NullBooleanField(default=None) + wildcard_mentions_notify: Optional[bool] = models.NullBooleanField(default=None) class Meta: unique_together = ("user_profile", "recipient") @@ -2272,7 +2294,7 @@ def is_cross_realm_bot_email(email: str) -> bool: class Huddle(models.Model): # TODO: We should consider whether using # CommaSeparatedIntegerField would be better. - huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: str + huddle_hash: str = models.CharField(max_length=40, db_index=True, unique=True) # Foreign key to the Recipient object for this Huddle. recipient = models.ForeignKey(Recipient, null=True, on_delete=models.SET_NULL) @@ -2304,12 +2326,12 @@ def get_huddle_backend(huddle_hash: str, id_list: List[int]) -> Huddle: return huddle class UserActivity(models.Model): - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client - query = models.CharField(max_length=50, db_index=True) # type: str + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + client: Client = models.ForeignKey(Client, on_delete=CASCADE) + query: str = models.CharField(max_length=50, db_index=True) - count = models.IntegerField() # type: int - last_visit = models.DateTimeField('last visit') # type: datetime.datetime + count: int = models.IntegerField() + last_visit: datetime.datetime = models.DateTimeField('last visit') class Meta: unique_together = ("user_profile", "client", "query") @@ -2317,9 +2339,9 @@ class UserActivity(models.Model): class UserActivityInterval(models.Model): MIN_INTERVAL_LENGTH = datetime.timedelta(minutes=15) - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime - end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + start: datetime.datetime = models.DateTimeField('start time', db_index=True) + end: datetime.datetime = models.DateTimeField('end time', db_index=True) class UserPresence(models.Model): @@ -2334,12 +2356,12 @@ class UserPresence(models.Model): ("realm", "timestamp") ] - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + client: Client = models.ForeignKey(Client, on_delete=CASCADE) # The time we heard this update from the client. - timestamp = models.DateTimeField('presence changed') # type: datetime.datetime + timestamp: datetime.datetime = models.DateTimeField('presence changed') # The user was actively using this Zulip client as of `timestamp` (i.e., # they had interacted with the client recently). When the timestamp is @@ -2357,7 +2379,7 @@ class UserPresence(models.Model): # # There is no "inactive" status, because that is encoded by the # timestamp being old. - status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int + status: int = models.PositiveSmallIntegerField(default=ACTIVE) @staticmethod def status_to_string(status: int) -> str: @@ -2391,7 +2413,7 @@ class UserPresence(models.Model): @staticmethod def status_from_string(status: str) -> Optional[int]: if status == 'active': - status_val = UserPresence.ACTIVE # type: Optional[int] # See https://github.com/python/mypy/issues/2611 + status_val: Optional[int] = UserPresence.ACTIVE # See https://github.com/python/mypy/issues/2611 elif status == 'idle': status_val = UserPresence.IDLE else: @@ -2400,30 +2422,30 @@ class UserPresence(models.Model): return status_val class UserStatus(models.Model): - user_profile = models.OneToOneField(UserProfile, on_delete=CASCADE) # type: UserProfile + user_profile: UserProfile = models.OneToOneField(UserProfile, on_delete=CASCADE) - timestamp = models.DateTimeField() # type: datetime.datetime - client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client + timestamp: datetime.datetime = models.DateTimeField() + client: Client = models.ForeignKey(Client, on_delete=CASCADE) NORMAL = 0 AWAY = 1 - status = models.PositiveSmallIntegerField(default=NORMAL) # type: int - status_text = models.CharField(max_length=255, default='') # type: str + status: int = models.PositiveSmallIntegerField(default=NORMAL) + status_text: str = models.CharField(max_length=255, default='') class DefaultStream(models.Model): - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - stream = models.ForeignKey(Stream, on_delete=CASCADE) # type: Stream + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + stream: Stream = models.ForeignKey(Stream, on_delete=CASCADE) class Meta: unique_together = ("realm", "stream") class DefaultStreamGroup(models.Model): MAX_NAME_LENGTH = 60 - name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: str - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - streams = models.ManyToManyField('Stream') # type: Manager - description = models.CharField(max_length=1024, default='') # type: str + name: str = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + streams: Manager = models.ManyToManyField('Stream') + description: str = models.CharField(max_length=1024, default='') class Meta: unique_together = ("realm", "name") @@ -2438,10 +2460,10 @@ def get_default_stream_groups(realm: Realm) -> List[DefaultStreamGroup]: return DefaultStreamGroup.objects.filter(realm=realm) class AbstractScheduledJob(models.Model): - scheduled_timestamp = models.DateTimeField(db_index=True) # type: datetime.datetime + scheduled_timestamp: datetime.datetime = models.DateTimeField(db_index=True) # JSON representation of arguments to consumer - data = models.TextField() # type: str - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm + data: str = models.TextField() + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) class Meta: abstract = True @@ -2452,15 +2474,15 @@ class ScheduledEmail(AbstractScheduledJob): # ScheduledEmails for use in clear_scheduled_emails; the # recipients used for actually sending messages are stored in the # data field of AbstractScheduledJob. - users = models.ManyToManyField(UserProfile) # type: Manager + users: Manager = models.ManyToManyField(UserProfile) # Just the address part of a full "name

" email address - address = models.EmailField(null=True, db_index=True) # type: Optional[str] + address: Optional[str] = models.EmailField(null=True, db_index=True) # Valid types are below WELCOME = 1 DIGEST = 2 INVITATION_REMINDER = 3 - type = models.PositiveSmallIntegerField() # type: int + type: int = models.PositiveSmallIntegerField() def __str__(self) -> str: return "" % (self.type, @@ -2471,14 +2493,14 @@ class MissedMessageEmailAddress(models.Model): EXPIRY_SECONDS = 60 * 60 * 24 * 5 ALLOWED_USES = 1 - message = models.ForeignKey(Message, on_delete=CASCADE) # type: Message - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - email_token = models.CharField(max_length=34, unique=True, db_index=True) # type: str + message: Message = models.ForeignKey(Message, on_delete=CASCADE) + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + email_token: str = models.CharField(max_length=34, unique=True, db_index=True) # Timestamp of when the missed message address generated. # The address is valid until timestamp + EXPIRY_SECONDS. - timestamp = models.DateTimeField(db_index=True, default=timezone_now) # type: datetime.datetime - times_used = models.PositiveIntegerField(default=0, db_index=True) # type: int + timestamp: datetime.datetime = models.DateTimeField(db_index=True, default=timezone_now) + times_used: int = models.PositiveIntegerField(default=0, db_index=True) def __str__(self) -> str: return settings.EMAIL_GATEWAY_PATTERN % (self.email_token,) @@ -2493,15 +2515,15 @@ class MissedMessageEmailAddress(models.Model): self.save(update_fields=["times_used"]) class ScheduledMessage(models.Model): - sender = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - recipient = models.ForeignKey(Recipient, on_delete=CASCADE) # type: Recipient - subject = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH) # type: str - content = models.TextField() # type: str - sending_client = models.ForeignKey(Client, on_delete=CASCADE) # type: Client - stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream] - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - scheduled_timestamp = models.DateTimeField(db_index=True) # type: datetime.datetime - delivered = models.BooleanField(default=False) # type: bool + sender: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + recipient: Recipient = models.ForeignKey(Recipient, on_delete=CASCADE) + subject: str = models.CharField(max_length=MAX_TOPIC_NAME_LENGTH) + content: str = models.TextField() + sending_client: Client = models.ForeignKey(Client, on_delete=CASCADE) + stream: Optional[Stream] = models.ForeignKey(Stream, null=True, on_delete=CASCADE) + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + scheduled_timestamp: datetime.datetime = models.DateTimeField(db_index=True) + delivered: bool = models.BooleanField(default=False) SEND_LATER = 1 REMIND = 2 @@ -2511,8 +2533,9 @@ class ScheduledMessage(models.Model): (REMIND, 'remind'), ) - delivery_type = models.PositiveSmallIntegerField(choices=DELIVERY_TYPES, - default=SEND_LATER) # type: int + delivery_type: int = models.PositiveSmallIntegerField( + choices=DELIVERY_TYPES, default=SEND_LATER + ) def topic_name(self) -> str: return self.subject @@ -2535,10 +2558,10 @@ EMAIL_TYPES = { class AbstractRealmAuditLog(models.Model): """Defines fields common to RealmAuditLog and RemoteRealmAuditLog.""" - event_time = models.DateTimeField(db_index=True) # type: datetime.datetime + event_time: datetime.datetime = models.DateTimeField(db_index=True) # If True, event_time is an overestimate of the true time. Can be used # by migrations when introducing a new event_type. - backfilled = models.BooleanField(default=False) # type: bool + backfilled: bool = models.BooleanField(default=False) # Keys within extra_data, when extra_data is a json dict. Keys are strings because # json keys must always be strings. @@ -2548,7 +2571,7 @@ class AbstractRealmAuditLog(models.Model): ROLE_COUNT_HUMANS = '11' ROLE_COUNT_BOTS = '12' - extra_data = models.TextField(null=True) # type: Optional[str] + extra_data: Optional[str] = models.TextField(null=True) # Event types USER_CREATED = 101 @@ -2586,7 +2609,7 @@ class AbstractRealmAuditLog(models.Model): CUSTOMER_CREATED = 501 CUSTOMER_PLAN_CREATED = 502 - event_type = models.PositiveSmallIntegerField() # type: int + event_type: int = models.PositiveSmallIntegerField() # event_types synced from on-prem installations to zulipchat.com when # billing for mobile push notifications is enabled. Every billing @@ -2618,11 +2641,17 @@ class RealmAuditLog(AbstractRealmAuditLog): acting_user is that administrator and both modified_user and modified_stream will be None. """ - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - acting_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile] - modified_user = models.ForeignKey(UserProfile, null=True, related_name='+', on_delete=CASCADE) # type: Optional[UserProfile] - modified_stream = models.ForeignKey(Stream, null=True, on_delete=CASCADE) # type: Optional[Stream] - event_last_message_id = models.IntegerField(null=True) # type: Optional[int] + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + acting_user: Optional[UserProfile] = models.ForeignKey( + UserProfile, null=True, related_name="+", on_delete=CASCADE + ) + modified_user: Optional[UserProfile] = models.ForeignKey( + UserProfile, null=True, related_name="+", on_delete=CASCADE + ) + modified_stream: Optional[Stream] = models.ForeignKey( + Stream, null=True, on_delete=CASCADE + ) + event_last_message_id: Optional[int] = models.IntegerField(null=True) def __str__(self) -> str: if self.modified_user is not None: @@ -2635,9 +2664,9 @@ class RealmAuditLog(AbstractRealmAuditLog): self.realm, self.event_type, self.event_time, self.id) class UserHotspot(models.Model): - user = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - hotspot = models.CharField(max_length=30) # type: str - timestamp = models.DateTimeField(default=timezone_now) # type: datetime.datetime + user: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + hotspot: str = models.CharField(max_length=30) + timestamp: datetime.datetime = models.DateTimeField(default=timezone_now) class Meta: unique_together = ("user", "hotspot") @@ -2676,10 +2705,10 @@ class CustomProfileField(models.Model): HINT_MAX_LENGTH = 80 NAME_MAX_LENGTH = 40 - realm = models.ForeignKey(Realm, on_delete=CASCADE) # type: Realm - name = models.CharField(max_length=NAME_MAX_LENGTH) # type: str - hint = models.CharField(max_length=HINT_MAX_LENGTH, default='', null=True) # type: Optional[str] - order = models.IntegerField(default=0) # type: int + realm: Realm = models.ForeignKey(Realm, on_delete=CASCADE) + name: str = models.CharField(max_length=NAME_MAX_LENGTH) + hint: Optional[str] = models.CharField(max_length=HINT_MAX_LENGTH, default='', null=True) + order: int = models.IntegerField(default=0) SHORT_TEXT = 1 LONG_TEXT = 2 @@ -2692,40 +2721,41 @@ class CustomProfileField(models.Model): # These are the fields whose validators require more than var_name # and value argument. i.e. CHOICE require field_data, USER require # realm as argument. - CHOICE_FIELD_TYPE_DATA = [ + CHOICE_FIELD_TYPE_DATA: List[ExtendedFieldElement] = [ (CHOICE, str(_('List of options')), validate_choice_field, str, "CHOICE"), - ] # type: List[ExtendedFieldElement] - USER_FIELD_TYPE_DATA = [ + ] + USER_FIELD_TYPE_DATA: List[UserFieldElement] = [ (USER, str(_('Person picker')), check_valid_user_ids, eval, "USER"), - ] # type: List[UserFieldElement] + ] - CHOICE_FIELD_VALIDATORS = { + CHOICE_FIELD_VALIDATORS: Dict[int, ExtendedValidator] = { item[0]: item[2] for item in CHOICE_FIELD_TYPE_DATA - } # type: Dict[int, ExtendedValidator] - USER_FIELD_VALIDATORS = { + } + USER_FIELD_VALIDATORS: Dict[int, RealmUserValidator] = { item[0]: item[2] for item in USER_FIELD_TYPE_DATA - } # type: Dict[int, RealmUserValidator] + } - FIELD_TYPE_DATA = [ + FIELD_TYPE_DATA: List[FieldElement] = [ # Type, Display Name, Validator, Converter, Keyword (SHORT_TEXT, str(_('Short text')), check_short_string, str, "SHORT_TEXT"), (LONG_TEXT, str(_('Long text')), check_long_string, str, "LONG_TEXT"), (DATE, str(_('Date picker')), check_date, str, "DATE"), (URL, str(_('Link')), check_url, str, "URL"), (EXTERNAL_ACCOUNT, str(_('External account')), check_short_string, str, "EXTERNAL_ACCOUNT"), - ] # type: List[FieldElement] + ] ALL_FIELD_TYPES = [*FIELD_TYPE_DATA, *CHOICE_FIELD_TYPE_DATA, *USER_FIELD_TYPE_DATA] - FIELD_VALIDATORS = {item[0]: item[2] for item in FIELD_TYPE_DATA} # type: Dict[int, Validator] - FIELD_CONVERTERS = {item[0]: item[3] for item in ALL_FIELD_TYPES} # type: Dict[int, Callable[[Any], Any]] - FIELD_TYPE_CHOICES = [(item[0], item[1]) for item in ALL_FIELD_TYPES] # type: List[Tuple[int, str]] - FIELD_TYPE_CHOICES_DICT = { + FIELD_VALIDATORS: Dict[int, Validator] = {item[0]: item[2] for item in FIELD_TYPE_DATA} + FIELD_CONVERTERS: Dict[int, Callable[[Any], Any]] = {item[0]: item[3] for item in ALL_FIELD_TYPES} + FIELD_TYPE_CHOICES: List[Tuple[int, str]] = [(item[0], item[1]) for item in ALL_FIELD_TYPES] + FIELD_TYPE_CHOICES_DICT: Dict[str, Dict[str, Union[str, int]]] = { item[4]: {"id": item[0], "name": item[1]} for item in ALL_FIELD_TYPES - } # type: Dict[str, Dict[str, Union[str, int]]] + } - field_type = models.PositiveSmallIntegerField(choices=FIELD_TYPE_CHOICES, - default=SHORT_TEXT) # type: int + field_type: int = models.PositiveSmallIntegerField( + choices=FIELD_TYPE_CHOICES, default=SHORT_TEXT + ) # A JSON blob of any additional data needed to define the field beyond # type/name/hint. @@ -2736,7 +2766,7 @@ class CustomProfileField(models.Model): # # Note: There is no performance overhead of using TextField in PostgreSQL. # See https://www.postgresql.org/docs/9.0/static/datatype-character.html - field_data = models.TextField(default='', null=True) # type: Optional[str] + field_data: Optional[str] = models.TextField(default='', null=True) class Meta: unique_together = ('realm', 'name') @@ -2763,10 +2793,10 @@ def custom_profile_fields_for_realm(realm_id: int) -> List[CustomProfileField]: return CustomProfileField.objects.filter(realm=realm_id).order_by('order') class CustomProfileFieldValue(models.Model): - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - field = models.ForeignKey(CustomProfileField, on_delete=CASCADE) # type: CustomProfileField - value = models.TextField() # type: str - rendered_value = models.TextField(null=True, default=None) # type: Optional[str] + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + field: CustomProfileField = models.ForeignKey(CustomProfileField, on_delete=CASCADE) + value: str = models.TextField() + rendered_value: Optional[str] = models.TextField(null=True, default=None) class Meta: unique_together = ('user_profile', 'field') @@ -2795,15 +2825,15 @@ SLACK_INTERFACE = 'SlackOutgoingWebhookService' # embedded bots with the same name will run the same code # - base_url and token are currently unused class Service(models.Model): - name = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH) # type: str + name: str = models.CharField(max_length=UserProfile.MAX_NAME_LENGTH) # Bot user corresponding to the Service. The bot_type of this user # deterines the type of service. If non-bot services are added later, # user_profile can also represent the owner of the Service. - user_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - base_url = models.TextField() # type: str - token = models.TextField() # type: str + user_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + base_url: str = models.TextField() + token: str = models.TextField() # Interface / API version of the service. - interface = models.PositiveSmallIntegerField(default=1) # type: int + interface: int = models.PositiveSmallIntegerField(default=1) # Valid interfaces are {generic, zulip_bot_service, slack} GENERIC = 1 @@ -2814,10 +2844,10 @@ class Service(models.Model): SLACK, ] # N.B. If we used Django's choice=... we would get this for free (kinda) - _interfaces = { + _interfaces: Dict[int, str] = { GENERIC: GENERIC_INTERFACE, SLACK: SLACK_INTERFACE, - } # type: Dict[int, str] + } def interface_name(self) -> str: # Raises KeyError if invalid @@ -2832,17 +2862,17 @@ def get_service_profile(user_profile_id: str, service_name: str) -> Service: class BotStorageData(models.Model): - bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - key = models.TextField(db_index=True) # type: str - value = models.TextField() # type: str + bot_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + key: str = models.TextField(db_index=True) + value: str = models.TextField() class Meta: unique_together = ("bot_profile", "key") class BotConfigData(models.Model): - bot_profile = models.ForeignKey(UserProfile, on_delete=CASCADE) # type: UserProfile - key = models.TextField(db_index=True) # type: str - value = models.TextField() # type: str + bot_profile: UserProfile = models.ForeignKey(UserProfile, on_delete=CASCADE) + key: str = models.TextField(db_index=True) + value: str = models.TextField() class Meta: unique_together = ("bot_profile", "key") diff --git a/zerver/openapi/curl_param_value_generators.py b/zerver/openapi/curl_param_value_generators.py index 5906d1cab1..f84b733cd8 100644 --- a/zerver/openapi/curl_param_value_generators.py +++ b/zerver/openapi/curl_param_value_generators.py @@ -9,9 +9,9 @@ from zerver.lib.test_classes import ZulipTestCase from zerver.lib.events import do_events_register from zerver.lib.actions import update_user_presence, do_add_realm_filter, do_add_reaction -GENERATOR_FUNCTIONS = dict() # type: Dict[str, Callable[..., Dict[Any, Any]]] -REGISTERED_GENERATOR_FUNCTIONS = set() # type: Set[str] -CALLED_GENERATOR_FUNCTIONS = set() # type: Set[str] +GENERATOR_FUNCTIONS: Dict[str, Callable[..., Dict[Any, Any]]] = dict() +REGISTERED_GENERATOR_FUNCTIONS: Set[str] = set() +CALLED_GENERATOR_FUNCTIONS: Set[str] = set() helpers = ZulipTestCase() @@ -42,7 +42,7 @@ def patch_openapi_example_values(entry: str, params: List[Dict[str, Any]], if entry not in GENERATOR_FUNCTIONS: return params, request_body func = GENERATOR_FUNCTIONS[entry] - realm_example_values = func() # type: Dict[str, Any] + realm_example_values: Dict[str, Any] = func() for param in params: param_name = param["name"] diff --git a/zerver/openapi/openapi.py b/zerver/openapi/openapi.py index ede918274e..17ff89af0a 100644 --- a/zerver/openapi/openapi.py +++ b/zerver/openapi/openapi.py @@ -21,8 +21,8 @@ EXCLUDE_PROPERTIES = { class OpenAPISpec(): def __init__(self, path: str) -> None: self.path = path - self.last_update = None # type: Optional[float] - self.data = None # type: Optional[Dict[str, Any]] + self.last_update: Optional[float] = None + self.data: Optional[Dict[str, Any]] = None def reload(self) -> None: # Because importing yamole (and in turn, yaml) takes diff --git a/zerver/openapi/python_examples.py b/zerver/openapi/python_examples.py index c10c29a392..94618b8290 100644 --- a/zerver/openapi/python_examples.py +++ b/zerver/openapi/python_examples.py @@ -14,9 +14,9 @@ from zulip import Client ZULIP_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) -TEST_FUNCTIONS = dict() # type: Dict[str, Callable[..., None]] -REGISTERED_TEST_FUNCTIONS = set() # type: Set[str] -CALLED_TEST_FUNCTIONS = set() # type: Set[str] +TEST_FUNCTIONS: Dict[str, Callable[..., None]] = dict() +REGISTERED_TEST_FUNCTIONS: Set[str] = set() +CALLED_TEST_FUNCTIONS: Set[str] = set() def openapi_test_function(endpoint: str) -> Callable[[Callable[..., Any]], Callable[..., Any]]: """This decorator is used to register an openapi test function with @@ -565,13 +565,13 @@ def get_messages(client: Client) -> None: # {code_example|start} # Get the 100 last messages sent by "iago@zulip.com" to the stream "Verona" - request = { + request: Dict[str, Any] = { 'anchor': 'newest', 'num_before': 100, 'num_after': 0, 'narrow': [{'operator': 'sender', 'operand': 'iago@zulip.com'}, {'operator': 'stream', 'operand': 'Verona'}], - } # type: Dict[str, Any] + } result = client.get_messages(request) # {code_example|end} @@ -594,7 +594,7 @@ def get_raw_message(client: Client, message_id: int) -> None: @openapi_test_function("/messages:post") def send_message(client: Client) -> int: - request = {} # type: Dict[str, Any] + request: Dict[str, Any] = {} # {code_example|start} # Send a stream message @@ -789,12 +789,12 @@ def get_realm_emoji(client: Client) -> None: def update_message_flags(client: Client) -> None: # Send a few test messages - request = { + request: Dict[str, Any] = { "type": "stream", "to": "Denmark", "topic": "Castle", "content": "I come not, friends, to steal away your hearts." - } # type: Dict[str, Any] + } message_ids = [] for i in range(0, 3): message_ids.append(client.send_message(request)['id']) diff --git a/zerver/templatetags/app_filters.py b/zerver/templatetags/app_filters.py index 95aade9b2a..4a11dc0cf4 100644 --- a/zerver/templatetags/app_filters.py +++ b/zerver/templatetags/app_filters.py @@ -57,8 +57,8 @@ def display_list(values: List[str], display_limit: int) -> str: return display_string -md_extensions = None # type: Optional[List[Any]] -md_macro_extension = None # type: Optional[Any] +md_extensions: Optional[List[Any]] = None +md_macro_extension: Optional[Any] = None # Prevent the automatic substitution of macros in these docs. If # they contain a macro, it is always used literally for documenting # the macro system. diff --git a/zerver/tests/test_auth_backends.py b/zerver/tests/test_auth_backends.py index 60ea098709..2503829788 100644 --- a/zerver/tests/test_auth_backends.py +++ b/zerver/tests/test_auth_backends.py @@ -193,7 +193,7 @@ class AuthBackendTest(ZulipTestCase): return_value=False), \ mock.patch('zproject.backends.password_auth_enabled', return_value=True): - return_data = {} # type: Dict[str, bool] + return_data: Dict[str, bool] = {} user = EmailAuthBackend().authenticate(request=mock.MagicMock(), username=user_profile.delivery_email, realm=get_realm("zulip"), @@ -397,7 +397,7 @@ class AuthBackendTest(ZulipTestCase): google_email_data = dict(email=user.delivery_email, name=user.full_name, email_verified=True) - backends_to_test = { + backends_to_test: Dict[str, Any] = { 'google': { 'urls': [ # The limited process that we test here doesn't require mocking any urls. @@ -415,7 +415,7 @@ class AuthBackendTest(ZulipTestCase): ], 'backend': GitHubAuthBackend, } - } # type: Dict[str, Any] + } def patched_authenticate(**kwargs: Any) -> Any: # This is how we pass the subdomain to the authentication @@ -442,7 +442,7 @@ class AuthBackendTest(ZulipTestCase): for backend_name in backends_to_test: with responses.RequestsMock(assert_all_requests_are_fired=True) as requests_mock: - urls = backends_to_test[backend_name]['urls'] # type: List[Dict[str, Any]] + urls: List[Dict[str, Any]] = backends_to_test[backend_name]['urls'] for details in urls: requests_mock.add( details['method'], @@ -1421,7 +1421,7 @@ class SAMLAuthBackendTest(SocialAuthBase): last_name=last_name ) # SAMLResponse needs to be base64-encoded. - saml_response = base64.b64encode(unencoded_saml_response.encode()).decode() # type: str + saml_response: str = base64.b64encode(unencoded_saml_response.encode()).decode() return saml_response diff --git a/zerver/tests/test_bots.py b/zerver/tests/test_bots.py index cfcadc7050..2c43d993cb 100644 --- a/zerver/tests/test_bots.py +++ b/zerver/tests/test_bots.py @@ -152,7 +152,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): hamlet = self.example_user('hamlet') self.login('hamlet') self.assert_num_bots_equal(0) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.create_bot() self.assert_num_bots_equal(1) @@ -287,7 +287,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): self.login_user(user) self.assert_num_bots_equal(0) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.create_bot() self.assert_num_bots_equal(1) @@ -347,7 +347,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): request_data = { 'principals': '["' + iago.email + '"]' } - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.common_subscribe_to_streams(hamlet, ['Rome'], request_data) self.assert_json_success(result) @@ -364,7 +364,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): bot_request_data = { 'principals': '["hambot-bot@zulip.testserver"]' } - events_bot = [] # type: List[Mapping[str, Any]] + events_bot: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events_bot): result = self.common_subscribe_to_streams(hamlet, ['Rome'], bot_request_data) self.assert_json_success(result) @@ -385,7 +385,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): do_change_stream_invite_only(stream, True) self.assert_num_bots_equal(0) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.create_bot(default_sending_stream='Denmark') self.assert_num_bots_equal(1) @@ -460,7 +460,7 @@ class BotTest(ZulipTestCase, UploadSerializeMixin): do_change_stream_invite_only(stream, True) self.assert_num_bots_equal(0) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.create_bot(default_events_register_stream='Denmark') self.assert_num_bots_equal(1) diff --git a/zerver/tests/test_bugdown.py b/zerver/tests/test_bugdown.py index f71c3bb84e..a9ff1d13f3 100644 --- a/zerver/tests/test_bugdown.py +++ b/zerver/tests/test_bugdown.py @@ -1048,7 +1048,7 @@ class BugdownTest(ZulipTestCase): directly for testing is kind of awkward ''' class Instance: - realm_id = None # type: Optional[int] + realm_id: Optional[int] = None instance = Instance() instance.realm_id = realm.id flush_realm_filter(sender=None, instance=instance) @@ -1135,13 +1135,13 @@ class BugdownTest(ZulipTestCase): self.assertEqual(msg.user_ids_with_alert_words, set()) def test_alert_words_returns_user_ids_with_alert_words(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['how'], 'cordelia': ['this possible'], 'iago': ['hello'], 'prospero': ['hello'], 'othello': ['how are you'], 'aaron': ['hey'] - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1160,22 +1160,22 @@ class BugdownTest(ZulipTestCase): content = "hello how is this possible how are you doing today" render(msg, content) - expected_user_ids = { + expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id - } # type: Set[int] + } # All users except aaron have their alert word appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_1(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['provisioning', 'Prod deployment'], 'cordelia': ['test', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'] - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1198,25 +1198,25 @@ class BugdownTest(ZulipTestCase): and this is a new line last""" render(msg, content) - expected_user_ids = { + expected_user_ids: Set[int] = { user_profiles['hamlet'].id, user_profiles['cordelia'].id, user_profiles['iago'].id, user_profiles['prospero'].id, user_profiles['othello'].id - } # type: Set[int] + } # All users have their alert word appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_user_ids_with_alert_words_in_french(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['réglementaire', 'une politique', 'une merveille'], 'cordelia': ['énormément', 'Prod'], 'iago': ['prod'], 'prospero': ['deployment'], 'othello': ['last'] - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1238,17 +1238,17 @@ class BugdownTest(ZulipTestCase): et j'espère qu'il n'y n' réglementaire a pas de mots d'alerte dans ce texte français """ render(msg, content) - expected_user_ids = {user_profiles['hamlet'].id, user_profiles['cordelia'].id} # type: Set[int] + expected_user_ids: Set[int] = {user_profiles['hamlet'].id, user_profiles['cordelia'].id} # Only hamlet and cordelia have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_returns_empty_user_ids_with_alert_words(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'prospero': [], 'othello': [], 'aaron': [] - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1268,7 +1268,7 @@ class BugdownTest(ZulipTestCase): in sending of the message """ render(msg, content) - expected_user_ids = set() # type: Set[int] + expected_user_ids: Set[int] = set() # None of the users have their alert-words appear in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) @@ -1277,14 +1277,14 @@ class BugdownTest(ZulipTestCase): return alert_words def test_alert_words_with_empty_alert_words(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': [], 'cordelia': [], 'iago': [], 'othello': [] - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1302,19 +1302,19 @@ class BugdownTest(ZulipTestCase): content = """This is to test a empty alert words i.e. no user has any alert-words set""" render(msg, content) - expected_user_ids = set() # type: Set[int] + expected_user_ids: Set[int] = set() self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) def test_alert_words_retuns_user_ids_with_alert_words_with_huge_alert_words(self) -> None: - alert_words_for_users = { + alert_words_for_users: Dict[str, List[str]] = { 'hamlet': ['issue124'], 'cordelia': self.get_mock_alert_words(500, 10), 'iago': self.get_mock_alert_words(500, 10), 'othello': self.get_mock_alert_words(500, 10) - } # type: Dict[str, List[str]] - user_profiles = {} # type: Dict[str, UserProfile] - user_ids = set() # type: Set[int] + } + user_profiles: Dict[str, UserProfile] = {} + user_ids: Set[int] = set() for (username, alert_words) in alert_words_for_users.items(): user_profile = self.example_user(username) user_profiles.update({username: user_profile}) @@ -1338,7 +1338,7 @@ class BugdownTest(ZulipTestCase): between 1 and 100 for you. The process is fairly simple """ render(msg, content) - expected_user_ids = {user_profiles['hamlet'].id} # type: Set[int] + expected_user_ids: Set[int] = {user_profiles['hamlet'].id} # Only hamlet has alert-word 'issue124' present in the message content self.assertEqual(msg.user_ids_with_alert_words, expected_user_ids) diff --git a/zerver/tests/test_cache.py b/zerver/tests/test_cache.py index 4d5da93113..c016588bb5 100644 --- a/zerver/tests/test_cache.py +++ b/zerver/tests/test_cache.py @@ -266,11 +266,11 @@ class GenericBulkCachedFetchTest(ZulipTestCase): # query_function shouldn't be called, because the only requested object # is already cached: - result = generic_bulk_cached_fetch( + result: Dict[str, UserProfile] = generic_bulk_cached_fetch( cache_key_function=user_profile_by_email_cache_key, query_function=query_function, object_ids=[self.example_email("hamlet")] - ) # type: Dict[str, UserProfile] + ) self.assertEqual(result, {hamlet.delivery_email: hamlet}) flush_cache(Mock()) @@ -294,9 +294,9 @@ class GenericBulkCachedFetchTest(ZulipTestCase): # query_function and cache_key_function shouldn't be called, because # objects_ids is empty, so there's nothing to do. - result = generic_bulk_cached_fetch( + result: Dict[str, UserProfile] = generic_bulk_cached_fetch( cache_key_function=cache_key_function, query_function=query_function, object_ids=[] - ) # type: Dict[str, UserProfile] + ) self.assertEqual(result, {}) diff --git a/zerver/tests/test_custom_profile_data.py b/zerver/tests/test_custom_profile_data.py index e535a5079e..80facd068e 100644 --- a/zerver/tests/test_custom_profile_data.py +++ b/zerver/tests/test_custom_profile_data.py @@ -28,7 +28,7 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase): def test_create(self) -> None: self.login('iago') realm = get_realm('zulip') - data = {"name": "Phone", "field_type": "text id"} # type: Dict[str, Any] + data: Dict[str, Any] = {"name": "Phone", "field_type": "text id"} result = self.client_post("/json/realm/profile_fields", info=data) self.assert_json_error(result, 'Argument "field_type" is not valid JSON.') @@ -68,7 +68,7 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase): def test_create_choice_field(self) -> None: self.login('iago') - data = {} # type: Dict[str, Union[str, int]] + data: Dict[str, Union[str, int]] = {} data["name"] = "Favorite programming language" data["field_type"] = CustomProfileField.CHOICE @@ -126,12 +126,12 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase): def test_create_default_external_account_field(self) -> None: self.login('iago') realm = get_realm("zulip") - field_type = CustomProfileField.EXTERNAL_ACCOUNT # type: int - field_data = ujson.dumps({ + field_type: int = CustomProfileField.EXTERNAL_ACCOUNT + field_data: str = ujson.dumps({ 'subtype': 'twitter' - }) # type: str - invalid_field_name = "Not required field name" # type: str - invalid_field_hint = "Not required field hint" # type: str + }) + invalid_field_name: str = "Not required field name" + invalid_field_hint: str = "Not required field hint" result = self.client_post("/json/realm/profile_fields", info=dict( @@ -173,7 +173,7 @@ class CreateCustomProfileFieldTest(CustomProfileFieldTestCase): def test_create_external_account_field(self) -> None: self.login('iago') realm = get_realm('zulip') - data = {} # type: Dict[str, Union[str, int, Dict[str, str]]] + data: Dict[str, Union[str, int, Dict[str, str]]] = {} data["name"] = "Twitter" data["field_type"] = CustomProfileField.EXTERNAL_ACCOUNT @@ -309,8 +309,9 @@ class DeleteCustomProfileFieldTest(CustomProfileFieldTestCase): 'Field id %d not found.' % (invalid_field_id,)) field = CustomProfileField.objects.get(name="Mentor", realm=realm) - data = [{'id': field.id, - 'value': [self.example_user("aaron").id]}] # type: List[Dict[str, Union[int, str, List[int]]]] + data: List[Dict[str, Union[int, str, List[int]]]] = [ + {'id': field.id, 'value': [self.example_user("aaron").id]}, + ] do_update_user_custom_profile_data_if_changed(iago, data) iago_value = CustomProfileFieldValue.objects.get(user_profile=iago, field=field) @@ -332,7 +333,9 @@ class DeleteCustomProfileFieldTest(CustomProfileFieldTestCase): user_profile = self.example_user('iago') realm = user_profile.realm field = CustomProfileField.objects.get(name="Phone number", realm=realm) - data = [{'id': field.id, 'value': '123456'}] # type: List[Dict[str, Union[int, str, List[int]]]] + data: List[Dict[str, Union[int, str, List[int]]]] = [ + {'id': field.id, 'value': '123456'}, + ] do_update_user_custom_profile_data_if_changed(user_profile, data) self.assertTrue(self.custom_field_exists_in_realm(field.id)) @@ -521,7 +524,7 @@ class UpdateCustomProfileFieldTest(CustomProfileFieldTestCase): iago = self.example_user('iago') expected_value = {f['id']: f['value'] for f in data} - expected_rendered_value = {} # type: Dict[Union[int, float, str, None], Union[str, None]] + expected_rendered_value: Dict[Union[int, float, str, None], Union[str, None]] = {} for f in data: if f['field'].is_renderable(): expected_rendered_value[f['id']] = bugdown_convert(f['value']) @@ -604,8 +607,9 @@ class UpdateCustomProfileFieldTest(CustomProfileFieldTestCase): # Set field value: field = CustomProfileField.objects.get(name="Mentor", realm=realm) - data = [{'id': field.id, - 'value': [self.example_user("aaron").id]}] # type: List[Dict[str, Union[int, str, List[int]]]] + data: List[Dict[str, Union[int, str, List[int]]]] = [ + {'id': field.id, 'value': [self.example_user("aaron").id]} + ] do_update_user_custom_profile_data_if_changed(iago, data) with mock.patch("zerver.lib.actions.notify_user_update_custom_profile_data") as mock_notify: diff --git a/zerver/tests/test_decorators.py b/zerver/tests/test_decorators.py index 1e0cced6ce..be09ad797f 100644 --- a/zerver/tests/test_decorators.py +++ b/zerver/tests/test_decorators.py @@ -89,8 +89,8 @@ class DecoratorTestCase(TestCase): return x + x class Request: - GET = {} # type: Dict[str, str] - POST = {} # type: Dict[str, str] + GET: Dict[str, str] = {} + POST: Dict[str, str] = {} request = Request() @@ -127,8 +127,8 @@ class DecoratorTestCase(TestCase): return sum(numbers) class Request: - GET = {} # type: Dict[str, str] - POST = {} # type: Dict[str, str] + GET: Dict[str, str] = {} + POST: Dict[str, str] = {} request = Request() @@ -170,8 +170,8 @@ class DecoratorTestCase(TestCase): return sum(numbers) class Request: - GET = {} # type: Dict[str, str] - POST = {} # type: Dict[str, str] + GET: Dict[str, str] = {} + POST: Dict[str, str] = {} request = Request() @@ -200,8 +200,8 @@ class DecoratorTestCase(TestCase): return value[1:-1] class Request: - GET = {} # type: Dict[str, str] - POST = {} # type: Dict[str, str] + GET: Dict[str, str] = {} + POST: Dict[str, str] = {} request = Request() @@ -695,14 +695,14 @@ class RateLimitTestCase(TestCase): class ValidatorTestCase(TestCase): def test_check_string(self) -> None: - x = "hello" # type: Any + x: Any = "hello" self.assertEqual(check_string('x', x), None) x = 4 self.assertEqual(check_string('x', x), 'x is not a string') def test_check_string_fixed_length(self) -> None: - x = "hello" # type: Any + x: Any = "hello" self.assertEqual(check_string_fixed_length(5)('x', x), None) x = 4 @@ -715,7 +715,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_string_fixed_length(5)('x', x), 'x has incorrect length 2; should be 5') def test_check_capped_string(self) -> None: - x = "hello" # type: Any + x: Any = "hello" self.assertEqual(check_capped_string(5)('x', x), None) x = 4 @@ -739,7 +739,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_int_in([1])("Test", "t"), "Test is not an integer") def test_check_short_string(self) -> None: - x = "hello" # type: Any + x: Any = "hello" self.assertEqual(check_short_string('x', x), None) x = 'x' * 201 @@ -749,14 +749,14 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_short_string('x', x), 'x is not a string') def test_check_bool(self) -> None: - x = True # type: Any + x: Any = True self.assertEqual(check_bool('x', x), None) x = 4 self.assertEqual(check_bool('x', x), 'x is not a boolean') def test_check_int(self) -> None: - x = 5 # type: Any + x: Any = 5 self.assertEqual(check_int('x', x), None) x = [{}] @@ -778,7 +778,7 @@ class ValidatorTestCase(TestCase): to_not_negative_int_or_none('-5') def test_check_float(self) -> None: - x = 5.5 # type: Any + x: Any = 5.5 self.assertEqual(check_float('x', x), None) x = 5 @@ -804,7 +804,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(error, 'color is not a string') def test_check_list(self) -> None: - x = 999 # type: Any + x: Any = 999 error = check_list(check_string)('x', x) self.assertEqual(error, 'x is not a list') @@ -821,15 +821,15 @@ class ValidatorTestCase(TestCase): self.assertEqual(error, 'x should have exactly 2 items') def test_check_dict(self) -> None: - keys = [ + keys: List[Tuple[str, Validator]] = [ ('names', check_list(check_string)), ('city', check_string), - ] # type: List[Tuple[str, Validator]] + ] - x = { + x: Any = { 'names': ['alice', 'bob'], 'city': 'Boston', - } # type: Any + } error = check_dict(keys)('x', x) self.assertEqual(error, None) @@ -932,7 +932,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_person(nonperson), 'This is not a valid person') def test_check_variable_type(self) -> None: - x = 5 # type: Any + x: Any = 5 self.assertEqual(check_variable_type([check_string, check_int])('x', x), None) x = 'x' @@ -942,12 +942,12 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_variable_type([check_string, check_int])('x', x), 'x is not an allowed_type') def test_equals(self) -> None: - x = 5 # type: Any + x: Any = 5 self.assertEqual(equals(5)('x', x), None) self.assertEqual(equals(6)('x', x), 'x != 6 (5 is wrong)') def test_check_none_or(self) -> None: - x = 5 # type: Any + x: Any = 5 self.assertEqual(check_none_or(check_int)('x', x), None) x = None self.assertEqual(check_none_or(check_int)('x', x), None) @@ -955,7 +955,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_none_or(check_int)('x', x), 'x is not an integer') def test_check_url(self) -> None: - url = "http://127.0.0.1:5002/" # type: Any + url: Any = "http://127.0.0.1:5002/" self.assertEqual(check_url('url', url), None) url = "http://zulip-bots.example.com/" @@ -968,7 +968,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_url('url', url), 'url is not a string') def test_check_string_or_int_list(self) -> None: - x = "string" # type: Any + x: Any = "string" self.assertEqual(check_string_or_int_list('x', x), None) x = [1, 2, 4] @@ -981,7 +981,7 @@ class ValidatorTestCase(TestCase): self.assertEqual(check_string_or_int_list('x', x), 'x[2] is not an integer') def test_check_string_or_int(self) -> None: - x = "string" # type: Any + x: Any = "string" self.assertEqual(check_string_or_int('x', x), None) x = 1 @@ -1353,10 +1353,10 @@ class TestInternalNotifyView(TestCase): def test_valid_internal_requests(self) -> None: secret = 'random' - req = self.Request( + req: HttpRequest = self.Request( POST=dict(secret=secret), META=dict(REMOTE_ADDR='127.0.0.1'), - ) # type: HttpRequest + ) with self.settings(SHARED_SECRET=secret): self.assertTrue(authenticate_notify(req)) @@ -1709,8 +1709,8 @@ class CacheTestCase(ZulipTestCase): def test_greetings(greeting: str) -> Tuple[List[str], List[str]]: - result_log = [] # type: List[str] - work_log = [] # type: List[str] + result_log: List[str] = [] + work_log: List[str] = [] @cachify def greet(first_name: str, last_name: str) -> str: @@ -1756,7 +1756,7 @@ class CacheTestCase(ZulipTestCase): class TestUserAgentParsing(ZulipTestCase): def test_user_agent_parsing(self) -> None: """Test for our user agent parsing logic, using a large data set.""" - user_agents_parsed = defaultdict(int) # type: Dict[str, int] + user_agents_parsed: Dict[str, int] = defaultdict(int) user_agents_path = os.path.join(settings.DEPLOY_ROOT, "zerver/tests/fixtures/user_agents_unique") for line in open(user_agents_path).readlines(): line = line.strip() diff --git a/zerver/tests/test_docs.py b/zerver/tests/test_docs.py index 476b5dbbae..ae15cbf882 100644 --- a/zerver/tests/test_docs.py +++ b/zerver/tests/test_docs.py @@ -275,7 +275,7 @@ class IntegrationTest(TestCase): self.assertTrue(os.path.isfile(settings.DEPLOY_ROOT + path), integration.name) def test_api_url_view_subdomains_base(self) -> None: - context = dict() # type: Dict[str, Any] + context: Dict[str, Any] = dict() add_api_uri_context(context, HostRequestMock()) self.assertEqual(context["api_url_scheme_relative"], "testserver/api") self.assertEqual(context["api_url"], "http://testserver/api") @@ -283,14 +283,14 @@ class IntegrationTest(TestCase): @override_settings(ROOT_DOMAIN_LANDING_PAGE=True) def test_api_url_view_subdomains_homepage_base(self) -> None: - context = dict() # type: Dict[str, Any] + context: Dict[str, Any] = dict() add_api_uri_context(context, HostRequestMock()) self.assertEqual(context["api_url_scheme_relative"], "yourZulipDomain.testserver/api") self.assertEqual(context["api_url"], "http://yourZulipDomain.testserver/api") self.assertFalse(context["html_settings_links"]) def test_api_url_view_subdomains_full(self) -> None: - context = dict() # type: Dict[str, Any] + context: Dict[str, Any] = dict() request = HostRequestMock(host="mysubdomain.testserver") add_api_uri_context(context, request) self.assertEqual(context["api_url_scheme_relative"], "mysubdomain.testserver/api") @@ -298,7 +298,7 @@ class IntegrationTest(TestCase): self.assertTrue(context["html_settings_links"]) def test_html_settings_links(self) -> None: - context = dict() # type: Dict[str, Any] + context: Dict[str, Any] = dict() with self.settings(ROOT_DOMAIN_LANDING_PAGE=True): add_api_uri_context(context, HostRequestMock()) self.assertEqual( diff --git a/zerver/tests/test_email_notifications.py b/zerver/tests/test_email_notifications.py index 0dfe44ec0f..abdd8a20bb 100644 --- a/zerver/tests/test_email_notifications.py +++ b/zerver/tests/test_email_notifications.py @@ -282,7 +282,7 @@ class TestMissedMessages(ZulipTestCase): "You are receiving this because you were mentioned in Zulip Dev." ] email_subject = '#Denmark > test' - verify_body_does_not_include = [] # type: List[str] + verify_body_does_not_include: List[str] = [] else: # Test in case if message content in missed email message are disabled. verify_body_include = [ @@ -318,7 +318,7 @@ class TestMissedMessages(ZulipTestCase): "You are receiving this because you were mentioned in Zulip Dev." ] email_subject = '#Denmark > test' - verify_body_does_not_include = [] # type: List[str] + verify_body_does_not_include: List[str] = [] else: # Test in case if message content in missed email message are disabled. verify_body_include = [ @@ -378,7 +378,7 @@ class TestMissedMessages(ZulipTestCase): if show_message_content: verify_body_include = ['Extremely personal message!'] email_subject = 'PMs with Othello, the Moor of Venice' - verify_body_does_not_include = [] # type: List[str] + verify_body_does_not_include: List[str] = [] else: if message_content_disabled_by_realm: verify_body_include = [ @@ -435,7 +435,7 @@ class TestMissedMessages(ZulipTestCase): if show_message_content: verify_body_include = ['Othello, the Moor of Venice: Group personal message! -- Reply'] email_subject = 'Group PMs with Iago and Othello, the Moor of Venice' - verify_body_does_not_include = [] # type: List[str] + verify_body_does_not_include: List[str] = [] else: verify_body_include = [ "This email does not include message content because you have disabled message ", diff --git a/zerver/tests/test_events.py b/zerver/tests/test_events.py index 7ad072bd44..e43420f6b1 100644 --- a/zerver/tests/test_events.py +++ b/zerver/tests/test_events.py @@ -169,7 +169,7 @@ class LogEventsTest(ZulipTestCase): self.assertFalse(os.path.exists(dir_name)) with self.settings(EVENT_LOG_DIR=dir_name): - event = {} # type: Dict[str, int] + event: Dict[str, int] = {} log_event(event) self.assertTrue(os.path.exists(dir_name)) @@ -189,7 +189,7 @@ class EventsEndpointTest(ZulipTestCase): self.assert_json_error(result, "Could not allocate event queue") return_event_queue = '15:11' - return_user_events = [] # type: List[Dict[str, Any]] + return_user_events: List[Dict[str, Any]] = [] # Test that call is made to deal with a returning soft deactivated user. with mock.patch('zerver.lib.events.reactivate_user_if_soft_deactivated') as fa: @@ -737,8 +737,8 @@ class EventsRegisterTest(ZulipTestCase): propagate_mode = 'change_all' content = 'new content' rendered_content = render_markdown(message, content) - prior_mention_user_ids = set() # type: Set[int] - mentioned_user_ids = set() # type: Set[int] + prior_mention_user_ids: Set[int] = set() + mentioned_user_ids: Set[int] = set() mention_data = MentionData( realm_id=self.user_profile.realm_id, content=content, @@ -1604,8 +1604,8 @@ class EventsRegisterTest(ZulipTestCase): self.assert_on_error(error) def do_set_realm_property_test(self, name: str) -> None: - bool_tests = [True, False, True] # type: List[bool] - test_values = dict( + bool_tests: List[bool] = [True, False, True] + test_values: Dict[str, Any] = dict( default_language=['es', 'de', 'en'], description=['Realm description', 'New description'], digest_weekday=[0, 1, 2], @@ -1627,7 +1627,7 @@ class EventsRegisterTest(ZulipTestCase): zoom_api_key=["abc", "xyz"], zoom_user_id=["example@example.com", "example@example.org"], default_code_block_language=['python', 'javascript'], - ) # type: Dict[str, Any] + ) vals = test_values.get(name) property_type = Realm.property_types[name] @@ -1842,12 +1842,12 @@ class EventsRegisterTest(ZulipTestCase): def do_set_user_display_settings_test(self, setting_name: str) -> None: """Test updating each setting in UserProfile.property_types dict.""" - test_changes = dict( + test_changes: Dict[str, Any] = dict( emojiset = ['twitter'], default_language = ['es', 'de', 'en'], timezone = ['US/Mountain', 'US/Samoa', 'Pacific/Galapogos', ''], demote_inactive_streams = [2, 3, 1], - ) # type: Dict[str, Any] + ) property_type = UserProfile.property_types[setting_name] if property_type is bool: @@ -2588,7 +2588,7 @@ class EventsRegisterTest(ZulipTestCase): ]) # Subscribe to a totally new stream, so it's just Hamlet on it - action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream") # type: Callable[[], object] + action: Callable[[], object] = lambda: self.subscribe(self.example_user("hamlet"), "test_stream") events = self.do_test(action, event_types=["subscription", "realm_user"], include_subscribers=include_subscribers) error = add_schema_checker('events[0]', events[0]) @@ -2946,7 +2946,7 @@ class GetUnreadMsgsTest(ZulipTestCase): self.subscribe(hamlet, stream_name) self.subscribe(cordelia, stream_name) - all_message_ids = set() # type: Set[int] + all_message_ids: Set[int] = set() message_ids = dict() tups = [ @@ -3449,7 +3449,7 @@ class ClientDescriptorsTest(ZulipTestCase): self.apply_markdown = apply_markdown self.client_gravatar = client_gravatar self.client_type_name = 'whatever' - self.events = [] # type: List[Dict[str, Any]] + self.events: List[Dict[str, Any]] = [] def accepts_messages(self) -> bool: return True @@ -3532,7 +3532,7 @@ class ClientDescriptorsTest(ZulipTestCase): # Setting users to `[]` bypasses code we don't care about # for this test--we assume client_info is correct in our mocks, # and we are interested in how messages are put on event queue. - users = [] # type: List[Dict[str, Any]] + users: List[Dict[str, Any]] = [] with mock.patch('zerver.tornado.event_queue.get_client_info_for_message_event', return_value=client_info): diff --git a/zerver/tests/test_hipchat_importer.py b/zerver/tests/test_hipchat_importer.py index 99ed998de3..b8ef0b585a 100644 --- a/zerver/tests/test_hipchat_importer.py +++ b/zerver/tests/test_hipchat_importer.py @@ -29,11 +29,11 @@ class HipChatImporter(ZulipTestCase): ) user_handler.add_user(user=user_with_id) - normal_message = dict( + normal_message: Dict[str, Any] = dict( sender=dict( id=1, ) - ) # type: Dict[str, Any] + ) sender_id = get_hipchat_sender_id( realm_id=realm_id, diff --git a/zerver/tests/test_import_export.py b/zerver/tests/test_import_export.py index b1b44efba4..6766eae988 100644 --- a/zerver/tests/test_import_export.py +++ b/zerver/tests/test_import_export.py @@ -122,7 +122,7 @@ class QueryUtilTest(ZulipTestCase): queries = get_queries() - all_msg_ids = set() # type: Set[int] + all_msg_ids: Set[int] = set() chunker = query_chunker( queries=queries, id_collector=all_msg_ids, @@ -832,7 +832,7 @@ class ImportExportTest(ZulipTestCase): return {get_email(user_id) for user_id in user_id_list} def custom_profile_field_values_for(fields: List[CustomProfileField]) -> Set[FrozenSet[str]]: - user_emails = set() # type: Set[FrozenSet[str]] + user_emails: Set[FrozenSet[str]] = set() for field in fields: values = CustomProfileFieldValue.objects.filter(field=field) for value in values: diff --git a/zerver/tests/test_logging_handlers.py b/zerver/tests/test_logging_handlers.py index 4cb6c1d9a8..5fc3921358 100644 --- a/zerver/tests/test_logging_handlers.py +++ b/zerver/tests/test_logging_handlers.py @@ -16,8 +16,8 @@ from zerver.lib.types import ViewFuncT from zerver.lib.test_classes import ZulipTestCase from zerver.logging_handlers import AdminNotifyHandler -captured_request = None # type: Optional[HttpRequest] -captured_exc_info = None # type: Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] +captured_request: Optional[HttpRequest] = None +captured_exc_info: Tuple[Optional[Type[BaseException]], Optional[BaseException], Optional[TracebackType]] = None def capture_and_throw(domain: Optional[str]=None) -> Callable[[ViewFuncT], ViewFuncT]: def wrapper(view_func: ViewFuncT) -> ViewFuncT: @wraps(view_func) diff --git a/zerver/tests/test_mattermost_importer.py b/zerver/tests/test_mattermost_importer.py index 4630969dae..f30efd0354 100644 --- a/zerver/tests/test_mattermost_importer.py +++ b/zerver/tests/test_mattermost_importer.py @@ -438,7 +438,7 @@ class MatterMostImporter(ZulipTestCase): fixture_file_name = self.fixture_file_name("export.json", "mattermost_fixtures") mattermost_data = mattermost_data_file_to_dict(fixture_file_name) - total_reactions = [] # type: List[Dict[str, Any]] + total_reactions: List[Dict[str, Any]] = [] reactions = [ {"user": "harry", "create_at": 1553165521410, "emoji_name": "tick"}, diff --git a/zerver/tests/test_messages.py b/zerver/tests/test_messages.py index 9a7ddf724a..82466ab656 100644 --- a/zerver/tests/test_messages.py +++ b/zerver/tests/test_messages.py @@ -3531,7 +3531,7 @@ class EditMessageTest(ZulipTestCase): class MirroredMessageUsersTest(ZulipTestCase): def test_invalid_sender(self) -> None: user = self.example_user('hamlet') - recipients = [] # type: List[str] + recipients: List[str] = [] Request = namedtuple('Request', ['POST']) request = Request(POST=dict()) # no sender @@ -3545,7 +3545,7 @@ class MirroredMessageUsersTest(ZulipTestCase): user = self.example_user('hamlet') sender = user - recipients = [] # type: List[str] + recipients: List[str] = [] Request = namedtuple('Request', ['POST', 'client']) request = Request(POST = dict(sender=sender.email, type='private'), @@ -4229,7 +4229,7 @@ class MissedMessageTest(ZulipTestCase): othello = self.example_user('othello') recipient_ids = {hamlet.id, othello.id} message_type = 'stream' - user_flags = {} # type: Dict[int, List[str]] + user_flags: Dict[int, List[str]] = {} def assert_missing(user_ids: List[int]) -> None: presence_idle_user_ids = get_active_presence_idle_user_ids( @@ -4897,7 +4897,7 @@ class MessageHydrationTest(ZulipTestCase): def test_hydrate_pm_recipient_info(self) -> None: cordelia = self.example_user('cordelia') - display_recipient = [ + display_recipient: List[UserDisplayRecipient] = [ dict( email='aaron@example.com', full_name='Aaron Smith', @@ -4905,7 +4905,7 @@ class MessageHydrationTest(ZulipTestCase): id=999, is_mirror_dummy=False ), - ] # type: List[UserDisplayRecipient] + ] obj = dict( recipient_type=Recipient.PERSONAL, @@ -5035,11 +5035,13 @@ class TestMessageForIdsDisplayRecipientFetching(ZulipTestCase): else: for user_profile in expected_recipient_objects: - recipient_dict = {'email': user_profile.email, - 'full_name': user_profile.full_name, - 'short_name': user_profile.short_name, - 'id': user_profile.id, - 'is_mirror_dummy': user_profile.is_mirror_dummy} # type: UserDisplayRecipient + recipient_dict: UserDisplayRecipient = { + 'email': user_profile.email, + 'full_name': user_profile.full_name, + 'short_name': user_profile.short_name, + 'id': user_profile.id, + 'is_mirror_dummy': user_profile.is_mirror_dummy, + } self.assertTrue(recipient_dict in display_recipient) def test_display_recipient_personal(self) -> None: diff --git a/zerver/tests/test_muting.py b/zerver/tests/test_muting.py index b20867b028..47e13b9e01 100644 --- a/zerver/tests/test_muting.py +++ b/zerver/tests/test_muting.py @@ -135,7 +135,7 @@ class MutedTopicsTests(ZulipTestCase): url = '/api/v1/users/me/subscriptions/muted_topics' - data = {'stream': stream.name, 'topic': 'Verona3', 'op': 'add'} # type: Dict[str, Any] + data: Dict[str, Any] = {'stream': stream.name, 'topic': 'Verona3', 'op': 'add'} result = self.api_patch(user, url, data) self.assert_json_error(result, "Topic already muted") @@ -158,7 +158,7 @@ class MutedTopicsTests(ZulipTestCase): stream = get_stream('Verona', realm) url = '/api/v1/users/me/subscriptions/muted_topics' - data = {'stream': 'BOGUS', 'topic': 'Verona3', 'op': 'remove'} # type: Dict[str, Any] + data: Dict[str, Any] = {'stream': 'BOGUS', 'topic': 'Verona3', 'op': 'remove'} result = self.api_patch(user, url, data) self.assert_json_error(result, "Topic is not muted") diff --git a/zerver/tests/test_narrow.py b/zerver/tests/test_narrow.py index 668238a9d4..7770678ef1 100644 --- a/zerver/tests/test_narrow.py +++ b/zerver/tests/test_narrow.py @@ -121,7 +121,7 @@ class NarrowBuilderTest(ZulipTestCase): self._do_add_term_test(term, 'WHERE recipient_id IN (%(recipient_id_1)s, %(recipient_id_2)s, %(recipient_id_3)s, %(recipient_id_4)s, %(recipient_id_5)s)') # Add new streams - stream_dicts = [ + stream_dicts: List[Mapping[str, Any]] = [ { "name": "publicstream", "description": "Public stream with public history" @@ -137,7 +137,7 @@ class NarrowBuilderTest(ZulipTestCase): "invite_only": True, "history_public_to_subscribers": True } - ] # type: List[Mapping[str, Any]] + ] realm = get_realm('zulip') created, existing = create_streams_if_needed(realm, stream_dicts) self.assertEqual(len(created), 3) @@ -151,7 +151,7 @@ class NarrowBuilderTest(ZulipTestCase): self._do_add_term_test(term, 'WHERE recipient_id NOT IN (%(recipient_id_1)s, %(recipient_id_2)s, %(recipient_id_3)s, %(recipient_id_4)s, %(recipient_id_5)s)') # Add new streams - stream_dicts = [ + stream_dicts: List[Mapping[str, Any]] = [ { "name": "publicstream", "description": "Public stream with public history" @@ -167,7 +167,7 @@ class NarrowBuilderTest(ZulipTestCase): "invite_only": True, "history_public_to_subscribers": True } - ] # type: List[Mapping[str, Any]] + ] realm = get_realm('zulip') created, existing = create_streams_if_needed(realm, stream_dicts) self.assertEqual(len(created), 3) @@ -1041,7 +1041,7 @@ class GetOldMessagesTest(ZulipTestCase): def get_and_check_messages(self, modified_params: Dict[str, Union[str, int]], **kwargs: Any) -> Dict[str, Any]: - post_params = {"anchor": 1, "num_before": 1, "num_after": 1} # type: Dict[str, Union[str, int]] + post_params: Dict[str, Union[str, int]] = {"anchor": 1, "num_before": 1, "num_after": 1} post_params.update(modified_params) payload = self.client_get("/json/messages", dict(post_params), **kwargs) @@ -1090,7 +1090,7 @@ class GetOldMessagesTest(ZulipTestCase): hamlet_user = self.example_user('hamlet') othello_user = self.example_user('othello') - query_ids = {} # type: Dict[str, Union[int, str]] + query_ids: Dict[str, Union[int, str]] = {} scotland_stream = get_stream('Scotland', hamlet_user.realm) query_ids['scotland_recipient'] = scotland_stream.recipient_id @@ -1112,9 +1112,9 @@ class GetOldMessagesTest(ZulipTestCase): self.login('hamlet') def get_content_type(apply_markdown: bool) -> str: - req = dict( + req: Dict[str, Any] = dict( apply_markdown=ujson.dumps(apply_markdown), - ) # type: Dict[str, Any] + ) result = self.get_and_check_messages(req) message = result['messages'][0] return message['content_type'] @@ -1253,7 +1253,7 @@ class GetOldMessagesTest(ZulipTestCase): for personal in personals: emails = dr_emails(get_display_recipient(personal.recipient)) self.login_user(me) - narrow = [dict(operator='pm-with', operand=emails)] # type: List[Dict[str, Any]] + narrow: List[Dict[str, Any]] = [dict(operator='pm-with', operand=emails)] result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow))) for message in result["messages"]: @@ -1650,22 +1650,22 @@ class GetOldMessagesTest(ZulipTestCase): dict(operator='sender', operand=cordelia.email), dict(operator='search', operand='lunch'), ] - result = self.get_and_check_messages(dict( + result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(narrow), anchor=next_message_id, num_before=0, num_after=10, - )) # type: Dict[str, Any] + )) self.assertEqual(len(result['messages']), 2) messages = result['messages'] narrow = [dict(operator='search', operand='https://google.com')] - link_search_result = self.get_and_check_messages(dict( + link_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(narrow), anchor=next_message_id, num_before=0, num_after=10, - )) # type: Dict[str, Any] + )) self.assertEqual(len(link_search_result['messages']), 1) self.assertEqual(link_search_result['messages'][0]['match_content'], '

https://google.com

') @@ -1698,12 +1698,12 @@ class GetOldMessagesTest(ZulipTestCase): dict(operator='search', operand='discuss'), dict(operator='search', operand='after'), ] - multi_search_result = self.get_and_check_messages(dict( + multi_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(multi_search_narrow), anchor=next_message_id, num_after=10, num_before=0, - )) # type: Dict[str, Any] + )) self.assertEqual(len(multi_search_result['messages']), 1) self.assertEqual(multi_search_result['messages'][0]['match_content'], '

discuss lunch after lunch

') @@ -1795,12 +1795,12 @@ class GetOldMessagesTest(ZulipTestCase): dict(operator='search', operand='special'), dict(operator='stream', operand='newstream'), ] - stream_search_result = self.get_and_check_messages(dict( + stream_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(stream_search_narrow), anchor=0, num_after=10, num_before=10, - )) # type: Dict[str, Any] + )) self.assertEqual(len(stream_search_result['messages']), 1) self.assertEqual(stream_search_result['messages'][0]['match_content'], '

Public special content!

') @@ -1842,12 +1842,12 @@ class GetOldMessagesTest(ZulipTestCase): narrow = [ dict(operator='search', operand='日本'), ] - result = self.get_and_check_messages(dict( + result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(narrow), anchor=next_message_id, num_after=10, num_before=0, - )) # type: Dict[str, Any] + )) self.assertEqual(len(result['messages']), 4) messages = result['messages'] @@ -1878,12 +1878,12 @@ class GetOldMessagesTest(ZulipTestCase): dict(operator='search', operand='speak'), dict(operator='search', operand='wiki'), ] - multi_search_result = self.get_and_check_messages(dict( + multi_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(multi_search_narrow), anchor=next_message_id, num_after=10, num_before=0, - )) # type: Dict[str, Any] + )) self.assertEqual(len(multi_search_result['messages']), 1) self.assertEqual(multi_search_result['messages'][0]['match_content'], '

Can you speak https://en.wikipedia.org/wiki/Japanese?

') @@ -1904,12 +1904,12 @@ class GetOldMessagesTest(ZulipTestCase): '

朝はごはんを食べました

') narrow = [dict(operator='search', operand='https://google.com')] - link_search_result = self.get_and_check_messages(dict( + link_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(narrow), anchor=next_message_id, num_after=10, num_before=0, - )) # type: Dict[str, Any] + )) self.assertEqual(len(link_search_result['messages']), 1) self.assertEqual(link_search_result['messages'][0]['match_content'], '

https://google.com

') @@ -1918,12 +1918,12 @@ class GetOldMessagesTest(ZulipTestCase): special_search_narrow = [ dict(operator='search', operand='butter'), ] - special_search_result = self.get_and_check_messages(dict( + special_search_result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(special_search_narrow), anchor=next_message_id, num_after=10, num_before=0, - )) # type: Dict[str, Any] + )) self.assertEqual(len(special_search_result['messages']), 1) self.assertEqual(special_search_result['messages'][0][MATCH_TOPIC], 'bread & butter') @@ -1988,9 +1988,11 @@ class GetOldMessagesTest(ZulipTestCase): anchor = self.send_stream_message(cordelia, "Verona") narrow = [dict(operator='sender', operand=cordelia.email)] - result = self.get_and_check_messages(dict(narrow=ujson.dumps(narrow), - anchor=anchor, num_before=0, - num_after=0)) # type: Dict[str, Any] + result: Dict[str, Any] = self.get_and_check_messages(dict( + narrow=ujson.dumps(narrow), + anchor=anchor, num_before=0, + num_after=0, + )) self.assertEqual(len(result['messages']), 1) narrow = [dict(operator='is', operand='mentioned')] @@ -2251,7 +2253,7 @@ class GetOldMessagesTest(ZulipTestCase): """ self.login('hamlet') - required_args = (("num_before", 1), ("num_after", 1)) # type: Tuple[Tuple[str, int], ...] + required_args: Tuple[Tuple[str, int], ...] = (("num_before", 1), ("num_after", 1)) for i in range(len(required_args)): post_params = dict(required_args[:i] + required_args[i + 1:]) @@ -2301,10 +2303,12 @@ class GetOldMessagesTest(ZulipTestCase): """ self.login('hamlet') - other_params = [("anchor", 0), ("num_before", 0), ("num_after", 0)] # type: List[Tuple[str, Union[int, str, bool]]] + other_params: List[Tuple[str, Union[int, str, bool]]] = [("anchor", 0), ("num_before", 0), ("num_after", 0)] - bad_types = (False, 0, '', '{malformed json,', - '{foo: 3}', '[1,2]', '[["x","y","z"]]') # type: Tuple[Union[int, str, bool], ...] + bad_types: Tuple[Union[int, str, bool], ...] = ( + False, 0, '', '{malformed json,', + '{foo: 3}', '[1,2]', '[["x","y","z"]]', + ) for type in bad_types: post_params = dict(other_params + [("narrow", type)]) result = self.client_get("/json/messages", post_params) @@ -2366,7 +2370,7 @@ class GetOldMessagesTest(ZulipTestCase): def exercise_bad_narrow_operand(self, operator: str, operands: Sequence[Any], error_msg: str) -> None: - other_params = [("anchor", 0), ("num_before", 0), ("num_after", 0)] # type: List[Tuple[str, Any]] + other_params: List[Tuple[str, Any]] = [("anchor", 0), ("num_before", 0), ("num_after", 0)] for operand in operands: post_params = dict(other_params + [ ("narrow", ujson.dumps([[operator, operand]]))]) @@ -2379,7 +2383,7 @@ class GetOldMessagesTest(ZulipTestCase): returned. """ self.login('hamlet') - bad_stream_content = (0, [], ["x", "y"]) # type: Tuple[int, List[None], List[str]] + bad_stream_content: Tuple[int, List[None], List[str]] = (0, [], ["x", "y"]) self.exercise_bad_narrow_operand("stream", bad_stream_content, "Bad value for 'narrow'") @@ -2389,7 +2393,7 @@ class GetOldMessagesTest(ZulipTestCase): error is returned. """ self.login('hamlet') - bad_stream_content = (0, [], ["x", "y"]) # type: Tuple[int, List[None], List[str]] + bad_stream_content: Tuple[int, List[None], List[str]] = (0, [], ["x", "y"]) self.exercise_bad_narrow_operand("pm-with", bad_stream_content, "Bad value for 'narrow'") @@ -2940,11 +2944,11 @@ WHERE user_profile_id = {hamlet_id} AND (content ILIKE '%jumping%' OR subject IL dict(operator='sender', operand=cordelia.email), dict(operator='search', operand=othello.email), ] - result = self.get_and_check_messages(dict( + result: Dict[str, Any] = self.get_and_check_messages(dict( narrow=ujson.dumps(narrow), anchor=next_message_id, num_after=10, - )) # type: Dict[str, Any] + )) self.assertEqual(len(result['messages']), 0) narrow = [ diff --git a/zerver/tests/test_openapi.py b/zerver/tests/test_openapi.py index 33e42c4e8e..cb9e0c6fad 100644 --- a/zerver/tests/test_openapi.py +++ b/zerver/tests/test_openapi.py @@ -69,11 +69,11 @@ class OpenAPIToolsTest(ZulipTestCase): with self.assertRaises(SchemaError, msg=('Extraneous key "foo" in ' 'the response\'scontent')): - bad_content = { + bad_content: Dict[str, object] = { 'msg': '', 'result': 'success', 'foo': 'bar' - } # type: Dict[str, object] + } validate_against_openapi_schema(bad_content, TEST_ENDPOINT, TEST_METHOD, @@ -160,7 +160,7 @@ class OpenAPIToolsTest(ZulipTestCase): class OpenAPIArgumentsTest(ZulipTestCase): # This will be filled during test_openapi_arguments: - checked_endpoints = set() # type: Set[str] + checked_endpoints: Set[str] = set() pending_endpoints = { #### TODO: These endpoints are a priority to document: '/messages/matches_narrow', @@ -271,8 +271,8 @@ class OpenAPIArgumentsTest(ZulipTestCase): # Endpoints where the documentation is currently failing our # consistency tests. We aim to keep this list empty. - buggy_documentation_endpoints = set([ - ]) # type: Set[str] + buggy_documentation_endpoints: Set[str] = set([ + ]) def convert_regex_to_url_pattern(self, regex_pattern: str) -> str: """ Convert regular expressions style URL patterns to their @@ -426,7 +426,7 @@ do not match the types declared in the implementation of {}.\n""".format(functio OpenAPI data defines a different type than that actually accepted by the function. Otherwise, we print out the exact differences for convenient debugging and raise an AssertionError. """ - openapi_params = set() # type: Set[Tuple[str, Union[type, Tuple[type, object]]]] + openapi_params: Set[Tuple[str, Union[type, Tuple[type, object]]]] = set() for element in openapi_parameters: if function.__name__ == 'send_notification_backend': if element['name'] == 'to': @@ -439,7 +439,7 @@ do not match the types declared in the implementation of {}.\n""".format(functio ''' continue - name = element["name"] # type: str + name: str = element["name"] schema = element["schema"] if 'oneOf' in schema: # Hack: Just use the type of the first value @@ -463,7 +463,7 @@ do not match the types declared in the implementation of {}.\n""".format(functio else: openapi_params.add((name, _type)) - function_params = set() # type: Set[Tuple[str, Union[type, Tuple[type, object]]]] + function_params: Set[Tuple[str, Union[type, Tuple[type, object]]]] = set() # Iterate through the decorators to find the original # function, wrapped by has_request_variables, so we can parse @@ -542,7 +542,7 @@ do not match the types declared in the implementation of {}.\n""".format(functio for method, value in methods_endpoints.items(): if isinstance(value, str): function_name = value - tags = set() # type: Set[str] + tags: Set[str] = set() else: function_name, tags = value @@ -710,14 +710,14 @@ class TestCurlExampleGeneration(ZulipTestCase): } } - spec_mock_with_invalid_method = { + spec_mock_with_invalid_method: Dict[str, object] = { "security": [{"basicAuth": []}], "paths": { "/endpoint": { "brew": {} # the data is irrelevant as is should be rejected. } } - } # type: Dict[str, object] + } spec_mock_using_object = { "security": [{"basicAuth": []}], diff --git a/zerver/tests/test_outgoing_webhook_interfaces.py b/zerver/tests/test_outgoing_webhook_interfaces.py index 3359f07ac8..6e3c238b93 100644 --- a/zerver/tests/test_outgoing_webhook_interfaces.py +++ b/zerver/tests/test_outgoing_webhook_interfaces.py @@ -125,7 +125,7 @@ class TestGenericOutgoingWebhookService(ZulipTestCase): self.assertEqual(wide_message_dict['sender_realm_id'], othello.realm_id) def test_process_success(self) -> None: - response = dict(response_not_required=True) # type: Dict[str, Any] + response: Dict[str, Any] = dict(response_not_required=True) success_response = self.handler.process_success(response) self.assertEqual(success_response, None) @@ -217,7 +217,7 @@ class TestSlackOutgoingWebhookService(ZulipTestCase): self.assertTrue(mock_fail_with_message.called) def test_process_success(self) -> None: - response = dict(response_not_required=True) # type: Dict[str, Any] + response: Dict[str, Any] = dict(response_not_required=True) success_response = self.handler.process_success(response) self.assertEqual(success_response, None) diff --git a/zerver/tests/test_queue_worker.py b/zerver/tests/test_queue_worker.py index 007170d09f..1d7475ff97 100644 --- a/zerver/tests/test_queue_worker.py +++ b/zerver/tests/test_queue_worker.py @@ -49,8 +49,8 @@ loopworker_sleep_mock = patch( class WorkerTest(ZulipTestCase): class FakeClient: def __init__(self) -> None: - self.consumers = {} # type: Dict[str, Callable[[Dict[str, Any]], None]] - self.queue = [] # type: List[Tuple[str, Any]] + self.consumers: Dict[str, Callable[[Dict[str, Any]], None]] = {} + self.queue: List[Tuple[str, Any]] = [] def register_json_consumer(self, queue_name: str, diff --git a/zerver/tests/test_rate_limiter.py b/zerver/tests/test_rate_limiter.py index 1617c923fd..731c241d26 100644 --- a/zerver/tests/test_rate_limiter.py +++ b/zerver/tests/test_rate_limiter.py @@ -36,7 +36,7 @@ class RateLimiterBackendBase(ZulipTestCase): __unittest_skip__ = True def setUp(self) -> None: - self.requests_record = {} # type: Dict[str, List[float]] + self.requests_record: Dict[str, List[float]] = {} def create_object(self, name: str, rules: List[Tuple[int, int]]) -> RateLimitedTestObject: obj = RateLimitedTestObject(name, rules, self.backend) diff --git a/zerver/tests/test_reactions.py b/zerver/tests/test_reactions.py index 6655c73aaf..bca62f7572 100644 --- a/zerver/tests/test_reactions.py +++ b/zerver/tests/test_reactions.py @@ -360,7 +360,7 @@ class ReactionEventTest(ZulipTestCase): 'emoji_name': 'smile' } - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.api_post(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), reaction_info) @@ -404,7 +404,7 @@ class ReactionEventTest(ZulipTestCase): reaction_info) self.assert_json_success(add) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.api_delete(reaction_sender, '/api/v1/messages/%s/reactions' % (pm_id,), reaction_info) @@ -820,7 +820,7 @@ class ReactionAPIEventTest(EmojiReactionBase): 'emoji_code': '1f354', 'reaction_type': 'unicode_emoji', } - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.post_reaction(reaction_info, message_id=pm_id, @@ -860,7 +860,7 @@ class ReactionAPIEventTest(EmojiReactionBase): add = self.post_reaction(reaction_info, message_id=pm_id, sender=reaction_sender.short_name) self.assert_json_success(add) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.delete_reaction(reaction_info, message_id=pm_id, diff --git a/zerver/tests/test_realm.py b/zerver/tests/test_realm.py index c904316315..2e4bd81982 100644 --- a/zerver/tests/test_realm.py +++ b/zerver/tests/test_realm.py @@ -57,7 +57,7 @@ class RealmTest(ZulipTestCase): def test_update_realm_name_events(self) -> None: realm = get_realm('zulip') new_name = 'Puliz' - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): do_set_realm_property(realm, 'name', new_name) event = events[0]['event'] @@ -71,7 +71,7 @@ class RealmTest(ZulipTestCase): def test_update_realm_description_events(self) -> None: realm = get_realm('zulip') new_description = 'zulip dev group' - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): do_set_realm_property(realm, 'description', new_description) event = events[0]['event'] @@ -86,7 +86,7 @@ class RealmTest(ZulipTestCase): self.login('iago') new_description = 'zulip dev group' data = dict(description=ujson.dumps(new_description)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/realm', data) self.assert_json_success(result) @@ -713,8 +713,8 @@ class RealmAPITest(ZulipTestCase): assertion error. """ - bool_tests = [False, True] # type: List[bool] - test_values = dict( + bool_tests: List[bool] = [False, True] + test_values: Dict[str, Any] = dict( default_language=['de', 'en'], default_code_block_language=['javascript', ''], description=['Realm description', 'New description'], @@ -749,7 +749,7 @@ class RealmAPITest(ZulipTestCase): zoom_api_secret=["abc", "xyz"], zoom_api_key=["abc", "xyz"], zoom_user_id=["example@example.com", "example@example.org"] - ) # type: Dict[str, Any] + ) vals = test_values.get(name) if Realm.property_types[name] is bool: diff --git a/zerver/tests/test_report.py b/zerver/tests/test_report.py index 1c8dd91354..8fea142670 100644 --- a/zerver/tests/test_report.py +++ b/zerver/tests/test_report.py @@ -18,7 +18,7 @@ class StatsMock: def __init__(self, settings: Callable[..., Any]) -> None: self.settings = settings self.real_impl = statsd - self.func_calls = [] # type: List[Tuple[str, Iterable[Any]]] + self.func_calls: List[Tuple[str, Iterable[Any]]] = [] def __getattr__(self, name: str) -> Callable[..., Any]: def f(*args: Any) -> None: diff --git a/zerver/tests/test_retention.py b/zerver/tests/test_retention.py index 6ee141b1f4..5627183e85 100644 --- a/zerver/tests/test_retention.py +++ b/zerver/tests/test_retention.py @@ -554,7 +554,7 @@ class MoveMessageToArchiveGeneral(MoveMessageToArchiveBase): self.send_personal_message(self.sender, self.recipient, body2) ] - attachment_id_to_message_ids = {} # type: Dict[int, List[int]] + attachment_id_to_message_ids: Dict[int, List[int]] = {} attachment_ids = list( Attachment.objects.filter(messages__id__in=msg_ids).values_list("id", flat=True) ) diff --git a/zerver/tests/test_settings.py b/zerver/tests/test_settings.py index 69b85d0f45..647358fa09 100644 --- a/zerver/tests/test_settings.py +++ b/zerver/tests/test_settings.py @@ -319,12 +319,12 @@ class ChangeSettingsTest(ZulipTestCase): def do_test_change_user_display_setting(self, setting_name: str) -> None: - test_changes = dict( + test_changes: Dict[str, Any] = dict( default_language = 'de', emojiset = 'google', timezone = 'US/Mountain', demote_inactive_streams = 2, - ) # type: Dict[str, Any] + ) self.login('hamlet') test_value = test_changes.get(setting_name) @@ -333,7 +333,7 @@ class ChangeSettingsTest(ZulipTestCase): raise AssertionError('No test created for %s' % (setting_name,)) if isinstance(test_value, int): - invalid_value = 100 # type: Any + invalid_value: Any = 100 else: invalid_value = 'invalid_' + setting_name data = {setting_name: ujson.dumps(test_value)} diff --git a/zerver/tests/test_slack_importer.py b/zerver/tests/test_slack_importer.py index 275da44f19..52d791b103 100644 --- a/zerver/tests/test_slack_importer.py +++ b/zerver/tests/test_slack_importer.py @@ -115,7 +115,7 @@ class SlackImporter(ZulipTestCase): realm_id = 2 realm_subdomain = "test-realm" time = float(timezone_now().timestamp()) - test_realm = build_zerver_realm(realm_id, realm_subdomain, time, 'Slack') # type: List[Dict[str, Any]] + test_realm: List[Dict[str, Any]] = build_zerver_realm(realm_id, realm_subdomain, time, 'Slack') test_zerver_realm_dict = test_realm[0] self.assertEqual(test_zerver_realm_dict['id'], realm_id) @@ -146,7 +146,7 @@ class SlackImporter(ZulipTestCase): def test_get_timezone(self) -> None: user_chicago_timezone = {"tz": "America/Chicago"} user_timezone_none = {"tz": None} - user_no_timezone = {} # type: Dict[str, Any] + user_no_timezone: Dict[str, Any] = {} self.assertEqual(get_user_timezone(user_chicago_timezone), "America/Chicago") self.assertEqual(get_user_timezone(user_timezone_none), "America/New_York") @@ -391,7 +391,7 @@ class SlackImporter(ZulipTestCase): slack_user_id_to_zulip_user_id = {"U061A1R2R": 1, "U061A3E0G": 8, "U061A5N1G": 7, "U064KUGRJ": 5} subscription_id_count = 0 recipient_id = 12 - zerver_subscription = [] # type: List[Dict[str, Any]] + zerver_subscription: List[Dict[str, Any]] = [] final_subscription_id = get_subscription(channel_members, zerver_subscription, recipient_id, slack_user_id_to_zulip_user_id, subscription_id_count) @@ -475,7 +475,7 @@ class SlackImporter(ZulipTestCase): mock_users_to_zerver_userprofile: mock.Mock) -> None: realm_id = 1 - user_list = [] # type: List[Dict[str, Any]] + user_list: List[Dict[str, Any]] = [] realm, slack_user_id_to_zulip_user_id, slack_recipient_name_to_zulip_recipient_id, \ added_channels, added_mpims, dm_members, \ avatar_list, em = slack_workspace_to_realm('testdomain', realm_id, user_list, 'test-realm', @@ -515,7 +515,7 @@ class SlackImporter(ZulipTestCase): self.assertEqual(user_without_file, 'U064KUGRJ') def test_build_zerver_message(self) -> None: - zerver_usermessage = [] # type: List[Dict[str, Any]] + zerver_usermessage: List[Dict[str, Any]] = [] # recipient_id -> set of user_ids subscriber_map = { @@ -565,44 +565,46 @@ class SlackImporter(ZulipTestCase): reactions = [{"name": "grinning", "users": ["U061A5N1G"], "count": 1}] - all_messages = [{"text": "<@U066MTL5U> has joined the channel", "subtype": "channel_join", - "user": "U066MTL5U", "ts": "1434139102.000002", "channel_name": "random"}, - {"text": "<@U061A5N1G>: hey!", "user": "U061A1R2R", - "ts": "1437868294.000006", "has_image": True, "channel_name": "random"}, - {"text": "random", "user": "U061A5N1G", "reactions": reactions, - "ts": "1439868294.000006", "channel_name": "random"}, - {"text": "without a user", "user": None, # this message will be ignored as it has no user - "ts": "1239868294.000006", "channel_name": "general"}, - {"text": "", "user": "U061A1R2R", - "ts": "1463868370.000008", "channel_name": "general"}, - {"text": "added bot", "user": "U061A5N1G", "subtype": "bot_add", - "ts": "1433868549.000010", "channel_name": "general"}, - # This message will be ignored since it has no user and file is None. - # See #9217 for the situation; likely file uploads on archived channels - {'upload': False, 'file': None, 'text': 'A file was shared', - 'channel_name': 'general', 'type': 'message', 'ts': '1433868549.000011', - 'subtype': 'file_share'}, - {"text": "random test", "user": "U061A1R2R", - "ts": "1433868669.000012", "channel_name": "general"}, - {"text": "Hello everyone", "user": "U061A1R2R", "type": "message", - "ts": "1433868669.000015", "mpim_name": "mpdm-user9--user2--user10-1"}, - {"text": "Who is watching the World Cup", "user": "U061A5N1G", "type": "message", - "ts": "1433868949.000015", "mpim_name": "mpdm-user6--user7--user4-1"}, - {'client_msg_id': '998d9229-35aa-424f-8d87-99e00df27dc9', 'type': 'message', - 'text': 'Who is coming for camping this weekend?', 'user': 'U061A1R2R', - 'ts': '1553607595.000700', 'pm_name': 'DHX1UP7EG'}, - {"client_msg_id": "998d9229-35aa-424f-8d87-99e00df27dc9", "type": "message", - "text": "<@U061A5N1G>: Are you in Kochi?", "user": "U066MTL5U", - "ts": "1553607595.000700", "pm_name": "DJ47BL849"}] # type: List[Dict[str, Any]] + all_messages: List[Dict[str, Any]] = [ + {"text": "<@U066MTL5U> has joined the channel", "subtype": "channel_join", + "user": "U066MTL5U", "ts": "1434139102.000002", "channel_name": "random"}, + {"text": "<@U061A5N1G>: hey!", "user": "U061A1R2R", + "ts": "1437868294.000006", "has_image": True, "channel_name": "random"}, + {"text": "random", "user": "U061A5N1G", "reactions": reactions, + "ts": "1439868294.000006", "channel_name": "random"}, + {"text": "without a user", "user": None, # this message will be ignored as it has no user + "ts": "1239868294.000006", "channel_name": "general"}, + {"text": "", "user": "U061A1R2R", + "ts": "1463868370.000008", "channel_name": "general"}, + {"text": "added bot", "user": "U061A5N1G", "subtype": "bot_add", + "ts": "1433868549.000010", "channel_name": "general"}, + # This message will be ignored since it has no user and file is None. + # See #9217 for the situation; likely file uploads on archived channels + {'upload': False, 'file': None, 'text': 'A file was shared', + 'channel_name': 'general', 'type': 'message', 'ts': '1433868549.000011', + 'subtype': 'file_share'}, + {"text": "random test", "user": "U061A1R2R", + "ts": "1433868669.000012", "channel_name": "general"}, + {"text": "Hello everyone", "user": "U061A1R2R", "type": "message", + "ts": "1433868669.000015", "mpim_name": "mpdm-user9--user2--user10-1"}, + {"text": "Who is watching the World Cup", "user": "U061A5N1G", "type": "message", + "ts": "1433868949.000015", "mpim_name": "mpdm-user6--user7--user4-1"}, + {'client_msg_id': '998d9229-35aa-424f-8d87-99e00df27dc9', 'type': 'message', + 'text': 'Who is coming for camping this weekend?', 'user': 'U061A1R2R', + 'ts': '1553607595.000700', 'pm_name': 'DHX1UP7EG'}, + {"client_msg_id": "998d9229-35aa-424f-8d87-99e00df27dc9", "type": "message", + "text": "<@U061A5N1G>: Are you in Kochi?", "user": "U066MTL5U", + "ts": "1553607595.000700", "pm_name": "DJ47BL849"}, + ] slack_recipient_name_to_zulip_recipient_id = {'random': 2, 'general': 1, 'mpdm-user9--user2--user10-1': 5, 'mpdm-user6--user7--user4-1': 6, 'U066MTL5U': 7, 'U061A5N1G': 8, 'U061A1R2R': 8} dm_members = {'DJ47BL849': ('U066MTL5U', 'U061A5N1G'), 'DHX1UP7EG': ('U061A5N1G', 'U061A1R2R')} - zerver_usermessage = [] # type: List[Dict[str, Any]] - subscriber_map = dict() # type: Dict[int, Set[int]] - added_channels = {'random': ('c5', 1), 'general': ('c6', 2)} # type: Dict[str, Tuple[str, int]] + zerver_usermessage: List[Dict[str, Any]] = [] + subscriber_map: Dict[int, Set[int]] = dict() + added_channels: Dict[str, Tuple[str, int]] = {'random': ('c5', 1), 'general': ('c6', 2)} zerver_message, zerver_usermessage, attachment, uploads, reaction = \ channel_message_to_zerver_message( @@ -662,7 +664,7 @@ class SlackImporter(ZulipTestCase): output_dir = os.path.join(settings.TEST_WORKER_DIR, 'test-slack-import') os.makedirs(output_dir, exist_ok=True) - added_channels = {'random': ('c5', 1), 'general': ('c6', 2)} # type: Dict[str, Tuple[str, int]] + added_channels: Dict[str, Tuple[str, int]] = {'random': ('c5', 1), 'general': ('c6', 2)} time = float(timezone_now().timestamp()) zerver_message = [{'id': 1, 'ts': time}, {'id': 5, 'ts': time}] @@ -672,8 +674,8 @@ class SlackImporter(ZulipTestCase): import copy return iter(copy.deepcopy(zerver_message)) - realm = {'zerver_subscription': []} # type: Dict[str, Any] - user_list = [] # type: List[Dict[str, Any]] + realm: Dict[str, Any] = {'zerver_subscription': []} + user_list: List[Dict[str, Any]] = [] reactions = [{"name": "grinning", "users": ["U061A5N1G"], "count": 1}] attachments = uploads = [] # type: List[Dict[str, Any]] @@ -812,8 +814,8 @@ class SlackImporter(ZulipTestCase): 'alice': alice_id, } - zerver_attachment = [] # type: List[Dict[str, Any]] - uploads_list = [] # type: List[Dict[str, Any]] + zerver_attachment: List[Dict[str, Any]] = [] + uploads_list: List[Dict[str, Any]] = [] info = process_message_files( message=message, diff --git a/zerver/tests/test_slack_message_conversion.py b/zerver/tests/test_slack_message_conversion.py index 59ebc2a767..9c59808221 100644 --- a/zerver/tests/test_slack_message_conversion.py +++ b/zerver/tests/test_slack_message_conversion.py @@ -39,9 +39,9 @@ class SlackMessageConversion(ZulipTestCase): for name, test in format_tests.items(): # Check that there aren't any unexpected keys as those are often typos self.assertEqual(len(set(test.keys()) - valid_keys), 0) - slack_user_map = {} # type: Dict[str, int] - users = [{}] # type: List[Dict[str, Any]] - channel_map = {} # type: Dict[str, Tuple[str, int]] + slack_user_map: Dict[str, int] = {} + users: List[Dict[str, Any]] = [{}] + channel_map: Dict[str, Tuple[str, int]] = {} converted = convert_to_zulip_markdown(test['input'], users, channel_map, slack_user_map) converted_text = converted[0] with self.subTest(slack_message_conversion=name): @@ -91,7 +91,7 @@ class SlackMessageConversion(ZulipTestCase): self.assertEqual(mentioned_users, []) def test_has_link(self) -> None: - slack_user_map = {} # type: Dict[str, int] + slack_user_map: Dict[str, int] = {} message = '' text, mentioned_users, has_link = convert_to_zulip_markdown(message, [], {}, slack_user_map) diff --git a/zerver/tests/test_subs.py b/zerver/tests/test_subs.py index d8361ae380..38a5ca4a87 100644 --- a/zerver/tests/test_subs.py +++ b/zerver/tests/test_subs.py @@ -114,7 +114,7 @@ class TestCreateStreams(ZulipTestCase): realm = get_realm('zulip') # Test stream creation events. - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): ensure_stream(realm, "Public stream", invite_only=False) self.assert_length(events, 1) @@ -187,7 +187,7 @@ class TestCreateStreams(ZulipTestCase): def test_history_public_to_subscribers_on_stream_creation(self) -> None: realm = get_realm('zulip') - stream_dicts = [ + stream_dicts: List[Mapping[str, Any]] = [ { "name": "publicstream", "description": "Public stream with public history" @@ -209,7 +209,7 @@ class TestCreateStreams(ZulipTestCase): "invite_only": False, "history_public_to_subscribers": False }, - ] # type: List[Mapping[str, Any]] + ] created, existing = create_streams_if_needed(realm, stream_dicts) @@ -495,7 +495,7 @@ class StreamAdminTest(ZulipTestCase): self.subscribe(user_profile, 'private_stream') self.subscribe(self.example_user("cordelia"), 'private_stream') - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): stream_id = get_stream('private_stream', user_profile.realm).id result = self.client_patch('/json/streams/%d' % (stream_id,), @@ -556,7 +556,7 @@ class StreamAdminTest(ZulipTestCase): {'new_name': ujson.dumps('sTREAm_name1')}) self.assert_json_success(result) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): stream_id = get_stream('stream_name1', user_profile.realm).id result = self.client_patch('/json/streams/%d' % (stream_id,), @@ -711,7 +711,7 @@ class StreamAdminTest(ZulipTestCase): realm = user_profile.realm self.subscribe(user_profile, 'stream_name1') - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): stream_id = get_stream('stream_name1', realm).id result = self.client_patch('/json/streams/%d' % (stream_id,), @@ -851,7 +851,7 @@ class StreamAdminTest(ZulipTestCase): # deactivated, just to exercise our renaming logic: ensure_stream(realm, "!DEACTIVATED:" + active_name) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_delete('/json/streams/' + str(stream_id)) self.assert_json_success(result) @@ -1669,7 +1669,7 @@ class SubscriptionPropertiesTest(ZulipTestCase): user_profile=test_user) self.assertEqual(sub.is_muted, False) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] property_name = "is_muted" with tornado_redirected_to_list(events): result = self.api_post(test_user, "/api/v1/users/me/subscriptions/properties", @@ -2095,7 +2095,7 @@ class SubscriptionAPITest(ZulipTestCase): self.assertNotEqual(len(self.streams), 0) # necessary for full test coverage add_streams = ["Verona2", "Denmark5"] self.assertNotEqual(len(add_streams), 0) # necessary for full test coverage - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): self.helper_check_subs_before_and_after_add(self.streams + add_streams, {}, add_streams, self.streams, self.test_email, @@ -2113,7 +2113,7 @@ class SubscriptionAPITest(ZulipTestCase): self.assertNotEqual(len(self.streams), 0) add_streams = ["Verona2", "Denmark5"] self.assertNotEqual(len(add_streams), 0) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] other_params = { 'announce': 'true', } @@ -2419,7 +2419,7 @@ class SubscriptionAPITest(ZulipTestCase): user2 = self.example_user("iago") realm = get_realm("zulip") streams_to_sub = ['multi_user_stream'] - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] flush_per_request_caches() with tornado_redirected_to_list(events): with queries_captured() as queries: @@ -2519,7 +2519,7 @@ class SubscriptionAPITest(ZulipTestCase): # Now subscribe Cordelia to the stream, capturing events user_profile = self.example_user('cordelia') - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): bulk_add_subscriptions([stream], [user_profile]) @@ -2681,7 +2681,7 @@ class SubscriptionAPITest(ZulipTestCase): dict(principals=ujson.dumps(orig_emails_to_subscribe))) new_emails_to_subscribe = [iago.email, cordelia.email] - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): self.common_subscribe_to_streams( self.test_user, @@ -2729,7 +2729,7 @@ class SubscriptionAPITest(ZulipTestCase): self.subscribe(user2, 'private_stream') self.subscribe(user3, 'private_stream') - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): bulk_remove_subscriptions( [user1, user2], @@ -2792,7 +2792,7 @@ class SubscriptionAPITest(ZulipTestCase): stream.is_in_zephyr_realm = True stream.save() - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): with queries_captured() as queries: self.common_subscribe_to_streams( @@ -3883,7 +3883,7 @@ class GetSubscribersTest(ZulipTestCase): result_dict = result.json() self.assertIn('subscribers', result_dict) self.assertIsInstance(result_dict['subscribers'], list) - subscribers = [] # type: List[str] + subscribers: List[str] = [] for subscriber in result_dict['subscribers']: self.assertIsInstance(subscriber, str) subscribers.append(subscriber) diff --git a/zerver/tests/test_tornado.py b/zerver/tests/test_tornado.py index 77d0c315c5..2a758c7d76 100644 --- a/zerver/tests/test_tornado.py +++ b/zerver/tests/test_tornado.py @@ -23,7 +23,7 @@ class TornadoWebTestCase(AsyncHTTPTestCase, ZulipTestCase): super().setUp() signals.request_started.disconnect(close_old_connections) signals.request_finished.disconnect(close_old_connections) - self.session_cookie = None # type: Optional[Dict[str, str]] + self.session_cookie: Optional[Dict[str, str]] = None def tearDown(self) -> None: super().tearDown() diff --git a/zerver/tests/test_typing.py b/zerver/tests/test_typing.py index e5c1fcf2c1..f8f5e55c64 100644 --- a/zerver/tests/test_typing.py +++ b/zerver/tests/test_typing.py @@ -85,7 +85,7 @@ class TypingHappyPathTest(ZulipTestCase): op='start', ) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with queries_captured() as queries: with tornado_redirected_to_list(events): result = self.api_post(sender, '/api/v1/typing', params) @@ -116,7 +116,7 @@ class TypingHappyPathTest(ZulipTestCase): huddle_hash = get_huddle_hash(list(expected_recipient_ids)) self.assertFalse(Huddle.objects.filter(huddle_hash=huddle_hash).exists()) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] params = dict( to=ujson.dumps([user.id for user in recipient_users]), @@ -156,7 +156,7 @@ class TypingHappyPathTest(ZulipTestCase): email = user.email expected_recipient_emails = {email} expected_recipient_ids = {user.id} - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.api_post( user, @@ -197,7 +197,7 @@ class TypingHappyPathTest(ZulipTestCase): op='start' ) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.api_post(sender, '/api/v1/typing', params) @@ -226,7 +226,7 @@ class TypingHappyPathTest(ZulipTestCase): expected_recipient_emails = {email} expected_recipient_ids = {user.id} - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): params = dict( to=ujson.dumps([user.id]), @@ -260,7 +260,7 @@ class TypingHappyPathTest(ZulipTestCase): expected_recipient_emails = {user.email for user in expected_recipients} expected_recipient_ids = {user.id for user in expected_recipients} - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): params = dict( to=ujson.dumps([recipient.id]), @@ -301,7 +301,7 @@ class TypingLegacyMobileSupportTest(ZulipTestCase): op='start', ) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.api_post(sender, '/api/v1/typing', params) diff --git a/zerver/tests/test_unread.py b/zerver/tests/test_unread.py index ddcafcc3a7..6b9f9f6293 100644 --- a/zerver/tests/test_unread.py +++ b/zerver/tests/test_unread.py @@ -268,7 +268,7 @@ class UnreadCountTests(ZulipTestCase): message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello") unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello") - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_post("/json/mark_stream_as_read", { "stream_id": stream.id @@ -324,7 +324,7 @@ class UnreadCountTests(ZulipTestCase): message_id = self.send_stream_message(self.example_user("hamlet"), "test_stream", "hello", "test_topic") unrelated_message_id = self.send_stream_message(self.example_user("hamlet"), "Denmark", "hello", "Denmark2") - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_post("/json/mark_topic_as_read", { "stream_id": get_stream("test_stream", user_profile.realm).id, diff --git a/zerver/tests/test_upload.py b/zerver/tests/test_upload.py index bacd891f5c..04a69b9acf 100644 --- a/zerver/tests/test_upload.py +++ b/zerver/tests/test_upload.py @@ -844,7 +844,7 @@ class AvatarTest(UploadSerializeMixin, ZulipTestCase): def test_avatar_url(self) -> None: """Verifies URL schemes for avatars and realm icons.""" - backend = LocalUploadBackend() # type: ZulipUploadBackend + backend: ZulipUploadBackend = LocalUploadBackend() self.assertEqual(backend.get_avatar_url("hash", False), "/user_avatars/hash.png?x=x") self.assertEqual(backend.get_avatar_url("hash", True), diff --git a/zerver/tests/test_user_status.py b/zerver/tests/test_user_status.py index cf31aff63a..4112f4b683 100644 --- a/zerver/tests/test_user_status.py +++ b/zerver/tests/test_user_status.py @@ -160,7 +160,7 @@ class UserStatusTest(ZulipTestCase): self.login_user(hamlet) # Try to omit parameter--this should be an error. - payload = dict() # type: Dict[str, Any] + payload: Dict[str, Any] = dict() result = self.client_post('/json/users/me/status', payload) self.assert_json_error(result, "Client did not pass any new values.") diff --git a/zerver/tests/test_users.py b/zerver/tests/test_users.py index 558afc92f2..511128a9f8 100644 --- a/zerver/tests/test_users.py +++ b/zerver/tests/test_users.py @@ -150,7 +150,7 @@ class PermissionTest(ZulipTestCase): # Giveth req = dict(is_admin=ujson.dumps(True)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/users/{}'.format(othello.id), req) self.assert_json_success(result) @@ -351,7 +351,7 @@ class PermissionTest(ZulipTestCase): hamlet = self.example_user("hamlet") req = dict(is_guest=ujson.dumps(True)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/users/{}'.format(hamlet.id), req) self.assert_json_success(result) @@ -370,7 +370,7 @@ class PermissionTest(ZulipTestCase): polonius = self.example_user("polonius") self.assertTrue(polonius.is_guest) req = dict(is_guest=ujson.dumps(False)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/users/{}'.format(polonius.id), req) self.assert_json_success(result) @@ -398,7 +398,7 @@ class PermissionTest(ZulipTestCase): hamlet = self.example_user("hamlet") self.assertFalse(hamlet.is_guest) req = dict(is_admin=ujson.dumps(False), is_guest=ujson.dumps(True)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/users/{}'.format(hamlet.id), req) self.assert_json_success(result) @@ -431,7 +431,7 @@ class PermissionTest(ZulipTestCase): polonius = self.example_user("polonius") self.assertFalse(polonius.is_realm_admin) req = dict(is_admin=ujson.dumps(True), is_guest=ujson.dumps(False)) - events = [] # type: List[Mapping[str, Any]] + events: List[Mapping[str, Any]] = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/users/{}'.format(polonius.id), req) self.assert_json_success(result) @@ -520,7 +520,7 @@ class PermissionTest(ZulipTestCase): empty_profile_data = [] for field_name in fields: field = CustomProfileField.objects.get(name=field_name, realm=realm) - value = '' # type: Union[str, None, List[Any]] + value: Union[str, None, List[Any]] = '' if field.field_type == CustomProfileField.USER: value = [] empty_profile_data.append({ @@ -745,7 +745,7 @@ class UserProfileTest(ZulipTestCase): bot = self.example_user("default_bot") # Invalid user ID - invalid_uid = 1000 # type: Any + invalid_uid: Any = 1000 self.assertEqual(check_valid_user_ids(realm.id, invalid_uid), "User IDs is not a list") self.assertEqual(check_valid_user_ids(realm.id, [invalid_uid]), diff --git a/zerver/tests/test_widgets.py b/zerver/tests/test_widgets.py index 4cb54a541e..9218277933 100644 --- a/zerver/tests/test_widgets.py +++ b/zerver/tests/test_widgets.py @@ -30,7 +30,7 @@ class WidgetContentTestCase(ZulipTestCase): assert_error(dict(widget_type='bogus', extra_data={}), 'unknown widget type: bogus') - extra_data = dict() # type: Dict[str, Any] + extra_data: Dict[str, Any] = dict() obj = dict(widget_type='zform', extra_data=extra_data) assert_error(obj, 'zform is missing type field') diff --git a/zerver/tornado/descriptors.py b/zerver/tornado/descriptors.py index 032cd19d25..135ead63bc 100644 --- a/zerver/tornado/descriptors.py +++ b/zerver/tornado/descriptors.py @@ -3,7 +3,7 @@ from typing import Dict, Optional, TYPE_CHECKING if TYPE_CHECKING: from zerver.tornado.event_queue import ClientDescriptor -descriptors_by_handler_id = {} # type: Dict[int, ClientDescriptor] +descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {} def get_descriptor_by_handler_id(handler_id: int) -> Optional['ClientDescriptor']: return descriptors_by_handler_id.get(handler_id) diff --git a/zerver/tornado/event_queue.py b/zerver/tornado/event_queue.py index d32aed04fb..680099a159 100644 --- a/zerver/tornado/event_queue.py +++ b/zerver/tornado/event_queue.py @@ -79,8 +79,8 @@ class ClientDescriptor: # Additionally, the to_dict and from_dict methods must be updated self.user_profile_id = user_profile_id self.realm_id = realm_id - self.current_handler_id = None # type: Optional[int] - self.current_client_name = None # type: Optional[str] + self.current_handler_id: Optional[int] = None + self.current_client_name: Optional[str] = None self.event_queue = event_queue self.event_types = event_types self.last_connection_time = time.time() @@ -89,7 +89,7 @@ class ClientDescriptor: self.slim_presence = slim_presence self.all_public_streams = all_public_streams self.client_type_name = client_type_name - self._timeout_handle = None # type: Any # TODO: should be return type of ioloop.call_later + self._timeout_handle: Any = None # TODO: should be return type of ioloop.call_later self.narrow = narrow self.narrow_filter = build_narrow_filter(narrow) @@ -241,11 +241,11 @@ class EventQueue: # When extending this list of properties, one must be sure to # update to_dict and from_dict. - self.queue = deque() # type: Deque[Dict[str, Any]] - self.next_event_id = 0 # type: int - self.newest_pruned_id = -1 # type: Optional[int] # will only be None for migration from old versions - self.id = id # type: str - self.virtual_events = {} # type: Dict[str, Dict[str, Any]] + self.queue: Deque[Dict[str, Any]] = deque() + self.next_event_id: int = 0 + self.newest_pruned_id: Optional[int] = -1 # will only be None for migration from old versions + self.id: str = id + self.virtual_events: Dict[str, Dict[str, Any]] = {} def to_dict(self) -> Dict[str, Any]: # If you add a new key to this dict, make sure you add appropriate @@ -316,8 +316,8 @@ class EventQueue: self.pop() def contents(self) -> List[Dict[str, Any]]: - contents = [] # type: List[Dict[str, Any]] - virtual_id_map = {} # type: Dict[str, Dict[str, Any]] + contents: List[Dict[str, Any]] = [] + virtual_id_map: Dict[str, Dict[str, Any]] = {} for event_type in self.virtual_events: virtual_id_map[self.virtual_events[event_type]["id"]] = self.virtual_events[event_type] virtual_ids = sorted(list(virtual_id_map.keys())) @@ -339,18 +339,18 @@ class EventQueue: return contents # maps queue ids to client descriptors -clients = {} # type: Dict[str, ClientDescriptor] +clients: Dict[str, ClientDescriptor] = {} # maps user id to list of client descriptors -user_clients = {} # type: Dict[int, List[ClientDescriptor]] +user_clients: Dict[int, List[ClientDescriptor]] = {} # maps realm id to list of client descriptors with all_public_streams=True -realm_clients_all_streams = {} # type: Dict[int, List[ClientDescriptor]] +realm_clients_all_streams: Dict[int, List[ClientDescriptor]] = {} # list of registered gc hooks. # each one will be called with a user profile id, queue, and bool # last_for_client that is true if this is the last queue pertaining # to this user_profile_id # that is about to be deleted -gc_hooks = [] # type: List[Callable[[int, ClientDescriptor, bool], None]] +gc_hooks: List[Callable[[int, ClientDescriptor, bool], None]] = [] next_queue_id = 0 @@ -415,9 +415,9 @@ def do_gc_event_queues(to_remove: AbstractSet[str], affected_users: AbstractSet[ def gc_event_queues(port: int) -> None: start = time.time() - to_remove = set() # type: Set[str] - affected_users = set() # type: Set[int] - affected_realms = set() # type: Set[int] + to_remove: Set[str] = set() + affected_users: Set[int] = set() + affected_realms: Set[int] = set() for (id, client) in clients.items(): if client.expired(start): to_remove.add(id) @@ -485,7 +485,7 @@ def load_event_queues(port: int) -> None: % (port, len(clients), time.time() - start)) def send_restart_events(immediate: bool=False) -> None: - event = dict(type='restart', server_generation=settings.SERVER_GENERATION) # type: Dict[str, Any] + event: Dict[str, Any] = dict(type='restart', server_generation=settings.SERVER_GENERATION) if immediate: event['immediate'] = True for client in clients.values(): @@ -514,13 +514,13 @@ def setup_event_queue(port: int) -> None: send_restart_events(immediate=settings.DEVELOPMENT) def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]: - queue_id = query["queue_id"] # type: str - dont_block = query["dont_block"] # type: bool - last_event_id = query["last_event_id"] # type: int - user_profile_id = query["user_profile_id"] # type: int - new_queue_data = query.get("new_queue_data") # type: Optional[MutableMapping[str, Any]] - client_type_name = query["client_type_name"] # type: str - handler_id = query["handler_id"] # type: int + queue_id: str = query["queue_id"] + dont_block: bool = query["dont_block"] + last_event_id: int = query["last_event_id"] + user_profile_id: int = query["user_profile_id"] + new_queue_data: Optional[MutableMapping[str, Any]] = query.get("new_queue_data") + client_type_name: str = query["client_type_name"] + handler_id: int = query["handler_id"] try: was_connected = False @@ -554,8 +554,10 @@ def fetch_events(query: Mapping[str, Any]) -> Dict[str, Any]: was_connected = client.finish_current_handler() if not client.event_queue.empty() or dont_block: - response = dict(events=client.event_queue.contents(), - handler_id=handler_id) # type: Dict[str, Any] + response: Dict[str, Any] = dict( + events=client.event_queue.contents(), + handler_id=handler_id, + ) if orig_queue_id is None: response['queue_id'] = queue_id if len(response["events"]) == 1: @@ -621,14 +623,14 @@ def request_event_queue(user_profile: UserProfile, user_client: Client, apply_ma def get_user_events(user_profile: UserProfile, queue_id: str, last_event_id: int) -> List[Dict[str, Any]]: if settings.TORNADO_SERVER: tornado_uri = get_tornado_uri(user_profile.realm) - post_data = { + post_data: Dict[str, Any] = { 'queue_id': queue_id, 'last_event_id': last_event_id, 'dont_block': 'true', 'user_profile_id': user_profile.id, 'secret': settings.SHARED_SECRET, 'client': 'internal' - } # type: Dict[str, Any] + } resp = requests_client.post(tornado_uri + '/api/v1/events/internal', data=post_data) resp.raise_for_status() @@ -724,7 +726,7 @@ def maybe_enqueue_notifications(user_profile_id: int, message_id: int, private_m """This function has a complete unit test suite in `test_enqueue_notifications` that should be expanded as we add more features here.""" - notified = dict() # type: Dict[str, bool] + notified: Dict[str, bool] = dict() if (idle or always_push_notify) and (private_message or mentioned or wildcard_mention_notify or stream_push_notify): @@ -782,9 +784,9 @@ def get_client_info_for_message_event(event_template: Mapping[str, Any], to all streams, plus users who may be mentioned, etc. ''' - send_to_clients = {} # type: Dict[str, ClientInfo] + send_to_clients: Dict[str, ClientInfo] = {} - sender_queue_id = event_template.get('sender_queue_id', None) # type: Optional[str] + sender_queue_id: Optional[str] = event_template.get('sender_queue_id', None) def is_sender_client(client: ClientDescriptor) -> bool: return (sender_queue_id is not None) and client.event_queue.id == sender_queue_id @@ -801,8 +803,8 @@ def get_client_info_for_message_event(event_template: Mapping[str, Any], ) for user_data in users: - user_profile_id = user_data['id'] # type: int - flags = user_data.get('flags', []) # type: Iterable[str] + user_profile_id: int = user_data['id'] + flags: Iterable[str] = user_data.get('flags', []) for client in get_client_descriptors_for_user(user_profile_id): send_to_clients[client.event_queue.id] = dict( @@ -822,7 +824,7 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map send_to_clients = get_client_info_for_message_event(event_template, users) presence_idle_user_ids = set(event_template.get('presence_idle_user_ids', [])) - wide_dict = event_template['message_dict'] # type: Dict[str, Any] + wide_dict: Dict[str, Any] = event_template['message_dict'] # Temporary transitional code: Zulip servers that have message # events in their event queues and upgrade to the new version @@ -833,10 +835,10 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map if 'sender_delivery_email' not in wide_dict: # nocoverage wide_dict['sender_delivery_email'] = wide_dict['sender_email'] - sender_id = wide_dict['sender_id'] # type: int - message_id = wide_dict['id'] # type: int - message_type = wide_dict['type'] # type: str - sending_client = wide_dict['client'] # type: str + sender_id: int = wide_dict['sender_id'] + message_id: int = wide_dict['id'] + message_type: str = wide_dict['type'] + sending_client: str = wide_dict['client'] @cachify def get_client_payload(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]: @@ -847,11 +849,11 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map ) # Extra user-specific data to include - extra_user_data = {} # type: Dict[int, Any] + extra_user_data: Dict[int, Any] = {} for user_data in users: - user_profile_id = user_data['id'] # type: int - flags = user_data.get('flags', []) # type: Iterable[str] + user_profile_id: int = user_data['id'] + flags: Iterable[str] = user_data.get('flags', []) # If the recipient was offline and the message was a single or group PM to them # or they were @-notified potentially notify more immediately @@ -882,8 +884,8 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map for client_data in send_to_clients.values(): client = client_data['client'] flags = client_data['flags'] - is_sender = client_data.get('is_sender', False) # type: bool - extra_data = extra_user_data.get(client.user_profile_id, None) # type: Optional[Mapping[str, bool]] + is_sender: bool = client_data.get('is_sender', False) + extra_data: Optional[Mapping[str, bool]] = extra_user_data.get(client.user_profile_id, None) if not client.accepts_messages(): # The actual check is the accepts_event() check below; @@ -898,7 +900,7 @@ def process_message_event(event_template: Mapping[str, Any], users: Iterable[Map message_dict = message_dict.copy() message_dict["invite_only_stream"] = True - user_event = dict(type='message', message=message_dict, flags=flags) # type: Dict[str, Any] + user_event: Dict[str, Any] = dict(type='message', message=message_dict, flags=flags) if extra_data is not None: user_event.update(extra_data) @@ -1062,8 +1064,8 @@ def maybe_enqueue_notifications_for_message_update(user_profile_id: UserProfile, ) def process_notification(notice: Mapping[str, Any]) -> None: - event = notice['event'] # type: Mapping[str, Any] - users = notice['users'] # type: Union[List[int], List[Mapping[str, Any]]] + event: Mapping[str, Any] = notice['event'] + users: Union[List[int], List[Mapping[str, Any]]] = notice['users'] start_time = time.time() if event['type'] == "message": diff --git a/zerver/tornado/exceptions.py b/zerver/tornado/exceptions.py index 680343461c..652dc579b6 100644 --- a/zerver/tornado/exceptions.py +++ b/zerver/tornado/exceptions.py @@ -7,7 +7,7 @@ class BadEventQueueIdError(JsonableError): data_fields = ['queue_id'] def __init__(self, queue_id: str) -> None: - self.queue_id = queue_id # type: str + self.queue_id: str = queue_id @staticmethod def msg_format() -> str: diff --git a/zerver/tornado/handlers.py b/zerver/tornado/handlers.py index 3d3420610d..f71322f749 100644 --- a/zerver/tornado/handlers.py +++ b/zerver/tornado/handlers.py @@ -16,7 +16,7 @@ from zerver.middleware import async_request_timer_restart, async_request_timer_s from zerver.tornado.descriptors import get_descriptor_by_handler_id current_handler_id = 0 -handlers = {} # type: Dict[int, 'AsyncDjangoHandler'] +handlers: Dict[int, 'AsyncDjangoHandler'] = {} # Copied from django.core.handlers.base logger = logging.getLogger('django.request') @@ -122,7 +122,7 @@ class AsyncDjangoHandler(tornado.web.RequestHandler, base.BaseHandler): # Copy any cookies if not hasattr(self, "_new_cookies"): - self._new_cookies = [] # type: List[http.cookie.SimpleCookie[str]] + self._new_cookies: List[http.cookie.SimpleCookie[str]] = [] self._new_cookies.append(response.cookies) # Copy the response content diff --git a/zerver/tornado/ioloop_logging.py b/zerver/tornado/ioloop_logging.py index 2c4d8ab0bb..19e5f5b88a 100644 --- a/zerver/tornado/ioloop_logging.py +++ b/zerver/tornado/ioloop_logging.py @@ -13,7 +13,7 @@ orig_poll_impl = select.epoll # This is used for a somewhat hacky way of passing the port number # into this early-initialized module. -logging_data = {} # type: Dict[str, str] +logging_data: Dict[str, str] = {} class InstrumentedPollIOLoop(PollIOLoop): def initialize(self, **kwargs): # type: ignore[no-untyped-def] # TODO investigate likely buggy monkey patching here @@ -33,7 +33,7 @@ def instrument_tornado_ioloop() -> None: class InstrumentedPoll: def __init__(self) -> None: self._underlying = orig_poll_impl() - self._times = [] # type: List[Tuple[float, float]] + self._times: List[Tuple[float, float]] = [] self._last_print = 0.0 # Python won't let us subclass e.g. select.epoll, so instead diff --git a/zerver/tornado/views.py b/zerver/tornado/views.py index eaf0300d86..94d0aa7bdc 100644 --- a/zerver/tornado/views.py +++ b/zerver/tornado/views.py @@ -73,7 +73,7 @@ def get_events_backend(request: HttpRequest, user_profile: UserProfile, intentionally_undocumented=True) ) -> HttpResponse: # Extract the Tornado handler from the request - handler = request._tornado_handler # type: AsyncDjangoHandler + handler: AsyncDjangoHandler = request._tornado_handler if user_client is None: valid_user_client = request.client diff --git a/zerver/views/archive.py b/zerver/views/archive.py index 70473dba16..4c8515e539 100644 --- a/zerver/views/archive.py +++ b/zerver/views/archive.py @@ -52,7 +52,7 @@ def archive(request: HttpRequest, return get_response([], True, stream.name) rendered_message_list = [] - prev_sender = None # type: Optional[UserProfile] + prev_sender: Optional[UserProfile] = None for msg in all_messages: include_sender = False status_message = Message.is_status_message(msg.content, msg.rendered_content) diff --git a/zerver/views/auth.py b/zerver/views/auth.py index fef10d284e..d4236f1a34 100644 --- a/zerver/views/auth.py +++ b/zerver/views/auth.py @@ -200,11 +200,13 @@ def maybe_send_to_registration(request: HttpRequest, email: str, full_name: str= # just send the user back to the registration page. url = reverse('register') context = login_context(request) - extra_context = {'form': form, 'current_url': lambda: url, - 'from_multiuse_invite': from_multiuse_invite, - 'multiuse_object_key': multiuse_object_key, - 'mobile_flow_otp': mobile_flow_otp, - 'desktop_flow_otp': desktop_flow_otp} # type: Mapping[str, Any] + extra_context: Mapping[str, Any] = { + 'form': form, 'current_url': lambda: url, + 'from_multiuse_invite': from_multiuse_invite, + 'multiuse_object_key': multiuse_object_key, + 'mobile_flow_otp': mobile_flow_otp, + 'desktop_flow_otp': desktop_flow_otp, + } context.update(extra_context) return render(request, 'zerver/accounts_home.html', context=context) @@ -334,7 +336,7 @@ def remote_user_sso(request: HttpRequest, desktop_flow_otp: Optional[str]=REQ(default=None)) -> HttpResponse: subdomain = get_subdomain(request) try: - realm = get_realm(subdomain) # type: Optional[Realm] + realm: Optional[Realm] = get_realm(subdomain) except Realm.DoesNotExist: realm = None @@ -443,7 +445,7 @@ def oauth_redirect_to_root(request: HttpRequest, url: str, def start_social_login(request: HttpRequest, backend: str, extra_arg: Optional[str]=None ) -> HttpResponse: backend_url = reverse('social:begin', args=[backend]) - extra_url_params = {} # type: Dict[str, str] + extra_url_params: Dict[str, str] = {} if backend == "saml": result = SAMLAuthBackend.check_config() if result is not None: @@ -469,7 +471,7 @@ def start_social_login(request: HttpRequest, backend: str, extra_arg: Optional[s def start_social_signup(request: HttpRequest, backend: str, extra_arg: Optional[str]=None ) -> HttpResponse: backend_url = reverse('social:begin', args=[backend]) - extra_url_params = {} # type: Dict[str, str] + extra_url_params: Dict[str, str] = {} if backend == "saml": result = SAMLAuthBackend.check_config() if result is not None: @@ -666,7 +668,7 @@ def update_login_page_context(request: HttpRequest, context: Dict[str, Any]) -> context['deactivated_account_error'] = DEACTIVATED_ACCOUNT_ERROR class TwoFactorLoginView(BaseTwoFactorLoginView): - extra_context = None # type: ExtraContext + extra_context: ExtraContext = None form_list = ( ('auth', OurAuthenticationForm), ('token', AuthenticationTokenForm), @@ -839,7 +841,7 @@ def api_dev_fetch_api_key(request: HttpRequest, username: str=REQ()) -> HttpResp subdomain = get_subdomain(request) realm = get_realm(subdomain) - return_data = {} # type: Dict[str, bool] + return_data: Dict[str, bool] = {} user_profile = authenticate(dev_auth_username=username, realm=realm, return_data=return_data) @@ -870,7 +872,7 @@ def api_dev_list_users(request: HttpRequest) -> HttpResponse: @require_post @has_request_variables def api_fetch_api_key(request: HttpRequest, username: str=REQ(), password: str=REQ()) -> HttpResponse: - return_data = {} # type: Dict[str, bool] + return_data: Dict[str, bool] = {} subdomain = get_subdomain(request) realm = get_realm(subdomain) if not ldap_auth_enabled(realm=get_realm_from_request(request)): diff --git a/zerver/views/custom_profile_fields.py b/zerver/views/custom_profile_fields.py index c32d11fd6f..850a11165f 100644 --- a/zerver/views/custom_profile_fields.py +++ b/zerver/views/custom_profile_fields.py @@ -92,7 +92,7 @@ def create_realm_custom_profile_field(request: HttpRequest, validate_custom_profile_field(name, hint, field_type, field_data) try: if is_default_external_field(field_type, field_data): - field_subtype = '' # type: str + field_subtype: str = '' field_subtype = field_data['subtype'] # type: ignore[assignment] # key for "Union[Dict[str, str], str]" can be str field = try_add_realm_default_custom_profile_field( realm=user_profile.realm, diff --git a/zerver/views/documentation.py b/zerver/views/documentation.py index e97cc05582..55dd94277a 100644 --- a/zerver/views/documentation.py +++ b/zerver/views/documentation.py @@ -81,7 +81,7 @@ class MarkdownDirectoryView(ApiURLView): def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: article = kwargs["article"] - context = super().get_context_data() # type: Dict[str, Any] + context: Dict[str, Any] = super().get_context_data() (context["article"], http_status_ignored) = self.get_path(article) # For disabling the "Back to home" on the homepage @@ -119,7 +119,7 @@ class MarkdownDirectoryView(ApiURLView): context["sidebar_index"] = sidebar_index context["sidebar_class"] = sidebar_class # An "article" might require the api_uri_context to be rendered - api_uri_context = {} # type: Dict[str, Any] + api_uri_context: Dict[str, Any] = {} add_api_uri_context(api_uri_context, self.request) api_uri_context["run_content_validators"] = True context["api_uri_context"] = api_uri_context @@ -167,7 +167,7 @@ class IntegrationView(ApiURLView): template_name = 'zerver/integrations/index.html' def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: - context = super().get_context_data(**kwargs) # type: Dict[str, Any] + context: Dict[str, Any] = super().get_context_data(**kwargs) add_integrations_context(context) add_integrations_open_graph_context(context, self.request) return context @@ -182,7 +182,7 @@ def integration_doc(request: HttpRequest, integration_name: str=REQ()) -> HttpRe except KeyError: return HttpResponseNotFound() - context = {} # type: Dict[str, Any] + context: Dict[str, Any] = {} add_api_uri_context(context, request) context['integration_name'] = integration.name diff --git a/zerver/views/events_register.py b/zerver/views/events_register.py index 0191eb12aa..997618bc64 100644 --- a/zerver/views/events_register.py +++ b/zerver/views/events_register.py @@ -16,7 +16,7 @@ def _default_all_public_streams(user_profile: UserProfile, def _default_narrow(user_profile: UserProfile, narrow: Iterable[Sequence[str]]) -> Iterable[Sequence[str]]: - default_stream = user_profile.default_events_register_stream # type: Optional[Stream] + default_stream: Optional[Stream] = user_profile.default_events_register_stream if not narrow and default_stream is not None: narrow = [['stream', default_stream.name]] return narrow diff --git a/zerver/views/home.py b/zerver/views/home.py index cb7d55997e..be872e6c6b 100644 --- a/zerver/views/home.py +++ b/zerver/views/home.py @@ -74,7 +74,7 @@ def detect_narrowed_window(request: HttpRequest, if user_profile is None: # nocoverage return [], None, None - narrow = [] # type: List[List[str]] + narrow: List[List[str]] = [] narrow_stream = None narrow_topic = request.GET.get("topic") @@ -112,7 +112,7 @@ def sent_time_in_epoch_seconds(user_message: Optional[UserMessage]) -> Optional[ return calendar.timegm(user_message.message.date_sent.utctimetuple()) def get_bot_types(user_profile: Optional[UserProfile]) -> List[Dict[str, object]]: - bot_types = [] # type: List[Dict[str, object]] + bot_types: List[Dict[str, object]] = [] if user_profile is None: # nocoverage return bot_types @@ -199,7 +199,7 @@ def home_real(request: HttpRequest) -> HttpResponse: needs_tutorial = False if user_profile is None: # nocoverage - furthest_read_time = time.time() # type: Optional[float] + furthest_read_time: Optional[float] = time.time() elif user_profile.pointer == -1: if user_has_messages: # Put the new user's pointer at the bottom diff --git a/zerver/views/invite.py b/zerver/views/invite.py index 2927a2658b..b062ba1fa9 100644 --- a/zerver/views/invite.py +++ b/zerver/views/invite.py @@ -37,7 +37,7 @@ def invite_users_backend(request: HttpRequest, user_profile: UserProfile, invitee_emails = get_invitee_emails_set(invitee_emails_raw) - streams = [] # type: List[Stream] + streams: List[Stream] = [] for stream_id in stream_ids: try: (stream, recipient, sub) = access_stream_by_id(user_profile, stream_id) diff --git a/zerver/views/messages.py b/zerver/views/messages.py index 150fa8944e..a3d463cf43 100644 --- a/zerver/views/messages.py +++ b/zerver/views/messages.py @@ -75,7 +75,7 @@ class BadNarrowOperator(JsonableError): data_fields = ['desc'] def __init__(self, desc: str) -> None: - self.desc = desc # type: str + self.desc: str = desc @staticmethod def msg_format() -> str: @@ -590,7 +590,7 @@ def ok_to_include_history(narrow: OptionalNarrowListT, user_profile: UserProfile if narrow is not None: for term in narrow: if term['operator'] == "stream" and not term.get('negated', False): - operand = term['operand'] # type: Union[str, int] + operand: Union[str, int] = term['operand'] if isinstance(operand, str): include_history = can_access_stream_history_by_name(user_profile, operand) else: @@ -926,8 +926,8 @@ def get_messages_backend(request: HttpRequest, user_profile: UserProfile, # rendered message dict before returning it. We attempt to # bulk-fetch rendered message dicts from remote cache using the # 'messages' list. - message_ids = [] # type: List[int] - user_message_flags = {} # type: Dict[int, List[str]] + message_ids: List[int] = [] + user_message_flags: Dict[int, List[str]] = {} if include_history: message_ids = [row[0] for row in rows] @@ -946,7 +946,7 @@ def get_messages_backend(request: HttpRequest, user_profile: UserProfile, user_message_flags[message_id] = UserMessage.flags_list_for_flags(flags) message_ids.append(message_id) - search_fields = dict() # type: Dict[int, Dict[str, str]] + search_fields: Dict[int, Dict[str, str]] = dict() if is_search: for row in rows: message_id = row[0] @@ -1089,8 +1089,8 @@ def post_process_limited_query(rows: List[Any], if anchored_to_right: num_after = 0 before_rows = visible_rows[:] - anchor_rows = [] # type: List[Any] - after_rows = [] # type: List[Any] + anchor_rows: List[Any] = [] + after_rows: List[Any] = [] else: before_rows = [r for r in visible_rows if r[0] < anchor] anchor_rows = [r for r in visible_rows if r[0] == anchor] @@ -1339,7 +1339,7 @@ def send_message_backend(request: HttpRequest, user_profile: UserProfile, # If req_to is None, then we default to an # empty list of recipients. - message_to = [] # type: Union[Sequence[int], Sequence[str]] + message_to: Union[Sequence[int], Sequence[str]] = [] if req_to is not None: if message_type_name == 'stream': @@ -1565,10 +1565,10 @@ def update_message_backend(request: HttpRequest, user_profile: UserMessage, if topic_name == "": raise JsonableError(_("Topic can't be empty")) rendered_content = None - links_for_embed = set() # type: Set[str] - prior_mention_user_ids = set() # type: Set[int] - mention_user_ids = set() # type: Set[int] - mention_data = None # type: Optional[bugdown.MentionData] + links_for_embed: Set[str] = set() + prior_mention_user_ids: Set[int] = set() + mention_user_ids: Set[int] = set() + mention_data: Optional[bugdown.MentionData] = None if content is not None: content = content.strip() if content == "": diff --git a/zerver/views/presence.py b/zerver/views/presence.py index 3ee868f211..16b423fd3a 100644 --- a/zerver/views/presence.py +++ b/zerver/views/presence.py @@ -90,7 +90,7 @@ def update_active_status_backend(request: HttpRequest, user_profile: UserProfile status_val, new_user_input) if ping_only: - ret = {} # type: Dict[str, Any] + ret: Dict[str, Any] = {} else: ret = get_presence_response(user_profile, slim_presence) diff --git a/zerver/views/realm.py b/zerver/views/realm.py index c284609672..ed0d1165fe 100644 --- a/zerver/views/realm.py +++ b/zerver/views/realm.py @@ -132,7 +132,7 @@ def update_realm( # further by some more advanced usage of the # `REQ/has_request_variables` extraction. req_vars = {k: v for k, v in list(locals().items()) if k in realm.property_types} - data = {} # type: Dict[str, Any] + data: Dict[str, Any] = {} for k, v in list(req_vars.items()): if v is not None and getattr(realm, k) != v: diff --git a/zerver/views/registration.py b/zerver/views/registration.py index 4f6689945d..1343379603 100644 --- a/zerver/views/registration.py +++ b/zerver/views/registration.py @@ -252,14 +252,14 @@ def accounts_register(request: HttpRequest) -> HttpResponse: if not realm_creation: try: - existing_user_profile = get_user_by_delivery_email(email, realm) # type: Optional[UserProfile] + existing_user_profile: Optional[UserProfile] = get_user_by_delivery_email(email, realm) except UserProfile.DoesNotExist: existing_user_profile = None else: existing_user_profile = None - user_profile = None # type: Optional[UserProfile] - return_data = {} # type: Dict[str, bool] + user_profile: Optional[UserProfile] = None + return_data: Dict[str, bool] = {} if ldap_auth_enabled(realm): # If the user was authenticated using an external SSO # mechanism like Google or GitHub auth, then authentication @@ -542,7 +542,7 @@ def find_account(request: HttpRequest) -> HttpResponse: from zerver.context_processors import common_context url = reverse('zerver.views.registration.find_account') - emails = [] # type: List[str] + emails: List[str] = [] if request.method == 'POST': form = FindMyTeamForm(request.POST) if form.is_valid(): diff --git a/zerver/views/report.py b/zerver/views/report.py index e7e2f8b22f..956789ee0e 100644 --- a/zerver/views/report.py +++ b/zerver/views/report.py @@ -21,7 +21,7 @@ from zerver.models import UserProfile import subprocess import logging -js_source_map = None # type: Optional[SourceMap] +js_source_map: Optional[SourceMap] = None # Read the source map information for decoding JavaScript backtraces. def get_js_source_map() -> Optional[SourceMap]: @@ -105,8 +105,10 @@ def report_error(request: HttpRequest, user_profile: UserProfile, message: str=R stacktrace = js_source_map.annotate_stacktrace(stacktrace) try: - version = subprocess.check_output(["git", "log", "HEAD^..HEAD", "--oneline"], - universal_newlines=True) # type: Optional[str] + version: Optional[str] = subprocess.check_output( + ["git", "log", "HEAD^..HEAD", "--oneline"], + universal_newlines=True, + ) except Exception: version = None diff --git a/zerver/views/streams.py b/zerver/views/streams.py index 20c92f5d1a..e744da406d 100644 --- a/zerver/views/streams.py +++ b/zerver/views/streams.py @@ -41,7 +41,7 @@ class PrincipalError(JsonableError): http_status_code = 403 def __init__(self, principal: str) -> None: - self.principal = principal # type: str + self.principal: str = principal @staticmethod def msg_format() -> str: @@ -215,10 +215,10 @@ def update_subscriptions_backend( if not add and not delete: return json_error(_('Nothing to do. Specify at least one of "add" or "delete".')) - method_kwarg_pairs = [ + method_kwarg_pairs: List[FuncKwargPair] = [ (add_subscriptions_backend, dict(streams_raw=add)), (remove_subscriptions_backend, dict(streams_raw=delete)) - ] # type: List[FuncKwargPair] + ] return compose_views(request, user_profile, method_kwarg_pairs) def compose_views( @@ -236,7 +236,7 @@ def compose_views( TODO: Move this a utils-like module if we end up using it more widely. ''' - json_dict = {} # type: Dict[str, Any] + json_dict: Dict[str, Any] = {} with transaction.atomic(): for method, kwargs in method_kwarg_pairs: response = method(request, user_profile, **kwargs) @@ -272,7 +272,7 @@ def remove_subscriptions_backend( else: people_to_unsub = {user_profile} - result = dict(removed=[], not_removed=[]) # type: Dict[str, List[str]] + result: Dict[str, List[str]] = dict(removed=[], not_removed=[]) (removed, not_subscribed) = bulk_remove_subscriptions(people_to_unsub, streams, request.client, acting_user=user_profile) @@ -329,7 +329,7 @@ def add_subscriptions_backend( # We don't allow newline characters in stream descriptions. stream_dict['description'] = stream_dict['description'].replace("\n", " ") - stream_dict_copy = {} # type: Dict[str, Any] + stream_dict_copy: Dict[str, Any] = {} for field in stream_dict: stream_dict_copy[field] = stream_dict[field] # Strip the stream name here. @@ -371,9 +371,9 @@ def add_subscriptions_backend( # We can assume unique emails here for now, but we should eventually # convert this function to be more id-centric. - email_to_user_profile = dict() # type: Dict[str, UserProfile] + email_to_user_profile: Dict[str, UserProfile] = dict() - result = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) # type: Dict[str, Any] + result: Dict[str, Any] = dict(subscribed=defaultdict(list), already_subscribed=defaultdict(list)) for (subscriber, stream) in subscribed: result["subscribed"][subscriber.email].append(stream.name) email_to_user_profile[subscriber.email] = subscriber diff --git a/zerver/views/user_groups.py b/zerver/views/user_groups.py index 312edef68d..6b61a2a495 100644 --- a/zerver/views/user_groups.py +++ b/zerver/views/user_groups.py @@ -70,12 +70,12 @@ def update_user_group_backend(request: HttpRequest, user_profile: UserProfile, if not add and not delete: return json_error(_('Nothing to do. Specify at least one of "add" or "delete".')) - method_kwarg_pairs = [ + method_kwarg_pairs: List[FuncKwargPair] = [ (add_members_to_group_backend, dict(user_group_id=user_group_id, members=add)), (remove_members_from_group_backend, dict(user_group_id=user_group_id, members=delete)) - ] # type: List[FuncKwargPair] + ] return compose_views(request, user_profile, method_kwarg_pairs) def add_members_to_group_backend(request: HttpRequest, user_profile: UserProfile, diff --git a/zerver/views/user_settings.py b/zerver/views/user_settings.py index da38f124f3..a24967ed44 100644 --- a/zerver/views/user_settings.py +++ b/zerver/views/user_settings.py @@ -72,7 +72,7 @@ def json_change_settings(request: HttpRequest, user_profile: UserProfile, return json_error(_("Please fill out all fields.")) if new_password != "": - return_data = {} # type: Dict[str, Any] + return_data: Dict[str, Any] = {} if email_belongs_to_ldap(user_profile.realm, user_profile.delivery_email): return json_error(_("Your Zulip password is managed in LDAP")) @@ -105,7 +105,7 @@ def json_change_settings(request: HttpRequest, user_profile: UserProfile, # by Django, request.session.save() - result = {} # type: Dict[str, Any] + result: Dict[str, Any] = {} new_email = email.strip() if user_profile.delivery_email != new_email and new_email != '': if user_profile.realm.email_changes_disabled and not user_profile.is_realm_admin: @@ -172,7 +172,7 @@ def update_display_settings_backend( raise JsonableError(_("Invalid default_language")) request_settings = {k: v for k, v in list(locals().items()) if k in user_profile.property_types} - result = {} # type: Dict[str, Any] + result: Dict[str, Any] = {} for k, v in list(request_settings.items()): if v is not None and getattr(user_profile, k) != v: do_set_user_display_setting(user_profile, k, v) diff --git a/zerver/views/users.py b/zerver/views/users.py index 9e4b80af33..9347c17482 100644 --- a/zerver/views/users.py +++ b/zerver/views/users.py @@ -192,7 +192,7 @@ def patch_bot_backend( if default_sending_stream is not None: if default_sending_stream == "": - stream = None # type: Optional[Stream] + stream: Optional[Stream] = None else: (stream, recipient, sub) = access_stream_by_name( user_profile, default_sending_stream) @@ -422,7 +422,7 @@ def get_members_backend(request: HttpRequest, user_profile: UserProfile, user_id include_custom_profile_fields=include_custom_profile_fields) if target_user is not None: - data = {"user": members[target_user.id]} # type: Dict[str, Any] + data: Dict[str, Any] = {"user": members[target_user.id]} else: data = {"members": [members[k] for k in members]} @@ -481,7 +481,7 @@ def get_profile_backend(request: HttpRequest, user_profile: UserProfile) -> Http if not user_profile.is_bot: custom_profile_field_values = user_profile.customprofilefieldvalue_set.all() - profile_data = dict() # type: Dict[int, Dict[str, Any]] + profile_data: Dict[int, Dict[str, Any]] = dict() for profile_field in custom_profile_field_values: if profile_field.field.is_renderable(): profile_data[profile_field.field_id] = { diff --git a/zerver/webhooks/alertmanager/view.py b/zerver/webhooks/alertmanager/view.py index bf596119fd..960c0ec390 100644 --- a/zerver/webhooks/alertmanager/view.py +++ b/zerver/webhooks/alertmanager/view.py @@ -15,7 +15,7 @@ def api_alertmanager_webhook(request: HttpRequest, user_profile: UserProfile, payload: Dict[str, Any] = REQ(argument_type='body')) -> HttpResponse: name_field = request.GET.get("name", "instance") desc_field = request.GET.get("desc", "alertname") - topics = {} # type: Dict[str, Dict[str, List[str]]] + topics: Dict[str, Dict[str, List[str]]] = {} for alert in payload["alerts"]: labels = alert.get("labels", {}) diff --git a/zerver/webhooks/beanstalk/view.py b/zerver/webhooks/beanstalk/view.py index d3e8803cbf..a2e2a84590 100644 --- a/zerver/webhooks/beanstalk/view.py +++ b/zerver/webhooks/beanstalk/view.py @@ -52,7 +52,7 @@ def beanstalk_decoder(view_func: ViewFuncT) -> ViewFuncT: email, api_key = base64.b64decode(encoded_value).decode('utf-8').split(":") email = email.replace('%40', '@') credentials = "%s:%s" % (email, api_key) - encoded_credentials = base64.b64encode(credentials.encode('utf-8')).decode('utf8') # type: str + encoded_credentials: str = base64.b64encode(credentials.encode('utf-8')).decode('utf8') request.META['HTTP_AUTHORIZATION'] = "Basic " + encoded_credentials return view_func(request, *args, **kwargs) diff --git a/zerver/webhooks/bitbucket2/view.py b/zerver/webhooks/bitbucket2/view.py index 8deb3cc711..d227ae2b65 100644 --- a/zerver/webhooks/bitbucket2/view.py +++ b/zerver/webhooks/bitbucket2/view.py @@ -334,7 +334,7 @@ def get_pull_request_comment_action_body( def get_push_tag_body(payload: Dict[str, Any], change: Dict[str, Any]) -> str: if change.get('new'): tag = change['new'] - action = 'pushed' # type: Optional[str] + action: Optional[str] = 'pushed' elif change.get('old'): tag = change['old'] action = 'removed' diff --git a/zerver/webhooks/bitbucket3/view.py b/zerver/webhooks/bitbucket3/view.py index cd34c4f868..2b13a492fc 100644 --- a/zerver/webhooks/bitbucket3/view.py +++ b/zerver/webhooks/bitbucket3/view.py @@ -341,7 +341,7 @@ EVENT_HANDLER_MAP = { def get_event_handler(eventkey: str) -> Callable[..., List[Dict[str, str]]]: # The main reason for this function existance is because of mypy - handler = EVENT_HANDLER_MAP.get(eventkey) # type: Any + handler: Any = EVENT_HANDLER_MAP.get(eventkey) if handler is None: raise UnexpectedWebhookEventType("BitBucket Server", eventkey) return handler diff --git a/zerver/webhooks/clubhouse/view.py b/zerver/webhooks/clubhouse/view.py index 2c09e8c13b..1d2138e70e 100644 --- a/zerver/webhooks/clubhouse/view.py +++ b/zerver/webhooks/clubhouse/view.py @@ -317,7 +317,7 @@ def get_story_update_estimate_body(payload: Dict[str, Any]) -> str: return STORY_ESTIMATE_TEMPLATE.format(**kwargs) def get_reference_by_id(payload: Dict[str, Any], ref_id: int) -> Dict[str, Any]: - ref = {} # type: Dict[str, Any] + ref: Dict[str, Any] = {} for reference in payload['references']: if reference['id'] == ref_id: ref = reference @@ -328,7 +328,7 @@ def get_story_create_github_entity_body(payload: Dict[str, Any], entity: str) -> str: action = get_action_with_primary_id(payload) - story = {} # type: Dict[str, Any] + story: Dict[str, Any] = {} for a in payload['actions']: if (a['entity_type'] == 'story' and a['changes'].get('workflow_state_id') is not None): @@ -531,7 +531,7 @@ def api_clubhouse_webhook( if event is None: return json_success() - body_func = EVENT_BODY_FUNCTION_MAPPER.get(event) # type: Any + body_func: Any = EVENT_BODY_FUNCTION_MAPPER.get(event) topic_func = get_topic_function_based_on_type(payload) if body_func is None or topic_func is None: raise UnexpectedWebhookEventType('Clubhouse', event) diff --git a/zerver/webhooks/groove/view.py b/zerver/webhooks/groove/view.py index 790420a930..b980d0faf2 100644 --- a/zerver/webhooks/groove/view.py +++ b/zerver/webhooks/groove/view.py @@ -74,7 +74,7 @@ def replied_body(payload: Dict[str, Any], actor: str, action: str) -> str: def get_event_handler(event: str) -> Callable[..., str]: # The main reason for this function existence is because of mypy - handler = EVENTS_FUNCTION_MAPPER.get(event) # type: Any + handler: Any = EVENTS_FUNCTION_MAPPER.get(event) if handler is None: raise UnexpectedWebhookEventType("Groove", event) return handler diff --git a/zerver/webhooks/harbor/view.py b/zerver/webhooks/harbor/view.py index e096b98a07..aabd7e67c2 100644 --- a/zerver/webhooks/harbor/view.py +++ b/zerver/webhooks/harbor/view.py @@ -114,8 +114,7 @@ def api_harbor_webhook(request: HttpRequest, user_profile: UserProfile, if content_func is None: raise UnexpectedWebhookEventType('Harbor', event) - content = content_func(payload, user_profile, - operator_username) # type: str + content: str = content_func(payload, user_profile, operator_username) check_send_webhook_message(request, user_profile, topic, content, diff --git a/zerver/webhooks/hellosign/view.py b/zerver/webhooks/hellosign/view.py index d455d01263..644aab4595 100644 --- a/zerver/webhooks/hellosign/view.py +++ b/zerver/webhooks/hellosign/view.py @@ -15,7 +15,7 @@ BODY = "The `{contract_title}` document {actions}." def get_message_body(payload: Dict[str, Dict[str, Any]]) -> str: contract_title = payload['signature_request']['title'] - recipients = {} # type: Dict[str, List[str]] + recipients: Dict[str, List[str]] = {} signatures = payload['signature_request']['signatures'] for signature in signatures: diff --git a/zerver/webhooks/intercom/view.py b/zerver/webhooks/intercom/view.py index 51df9b804f..3fd586bb1e 100644 --- a/zerver/webhooks/intercom/view.py +++ b/zerver/webhooks/intercom/view.py @@ -69,7 +69,7 @@ class MLStripper(HTMLParser): self.reset() self.strict = False self.convert_charrefs = True - self.fed = [] # type: List[str] + self.fed: List[str] = [] def handle_data(self, d: str) -> None: self.fed.append(d) @@ -277,7 +277,7 @@ EVENT_TO_FUNCTION_MAPPER = { } def get_event_handler(event_type: str) -> Callable[..., Tuple[str, str]]: - handler = EVENT_TO_FUNCTION_MAPPER.get(event_type) # type: Any + handler: Any = EVENT_TO_FUNCTION_MAPPER.get(event_type) if handler is None: raise UnexpectedWebhookEventType("Intercom", event_type) return handler diff --git a/zerver/webhooks/jira/view.py b/zerver/webhooks/jira/view.py index 192d1c3768..5812b80f97 100644 --- a/zerver/webhooks/jira/view.py +++ b/zerver/webhooks/jira/view.py @@ -330,7 +330,7 @@ def api_jira_webhook(request: HttpRequest, user_profile: UserProfile, raise UnexpectedWebhookEventType('Jira', event) subject = get_issue_subject(payload) - content = content_func(payload, user_profile) # type: str + content: str = content_func(payload, user_profile) check_send_webhook_message(request, user_profile, subject, content, diff --git a/zerver/webhooks/opbeat/view.py b/zerver/webhooks/opbeat/view.py index 9c922ec688..a4145b8555 100644 --- a/zerver/webhooks/opbeat/view.py +++ b/zerver/webhooks/opbeat/view.py @@ -9,7 +9,7 @@ from zerver.lib.response import json_success from zerver.lib.webhooks.common import check_send_webhook_message from zerver.models import UserProfile -subject_types = { +subject_types: Dict[str, List[List[str]]] = { 'app': [ # Object type name ['name'], # Title ['html_url'], # Automatically put into title @@ -37,7 +37,7 @@ subject_types = { ['in {}', 'extra/pathname'], ['!message'] ] -} # type: Dict[str, List[List[str]]] +} def get_value(_obj: Dict[str, Any], key: str) -> str: @@ -56,7 +56,7 @@ def format_object( ) -> str: if subject_type not in subject_types.keys(): return message - keys = subject_types[subject_type][1:] # type: List[List[str]] + keys: List[List[str]] = subject_types[subject_type][1:] title = subject_types[subject_type][0] if title[0] != '': title_str = '' @@ -65,7 +65,7 @@ def format_object( else: title_str = obj[title[0]] if obj['html_url'] is not None: - url = obj['html_url'] # type: str + url: str = obj['html_url'] if 'opbeat.com' not in url: url = 'https://opbeat.com/' + url.lstrip('/') message += '\n**[{}]({})**'.format(title_str, url) diff --git a/zerver/webhooks/pagerduty/view.py b/zerver/webhooks/pagerduty/view.py index a51b5c7d83..69576c38df 100644 --- a/zerver/webhooks/pagerduty/view.py +++ b/zerver/webhooks/pagerduty/view.py @@ -67,7 +67,7 @@ Incident [{incident_num}]({incident_url}) resolved: """.strip() def build_pagerduty_formatdict(message: Dict[str, Any]) -> Dict[str, Any]: - format_dict = {} # type: Dict[str, Any] + format_dict: Dict[str, Any] = {} format_dict['action'] = PAGER_DUTY_EVENT_NAMES[message['type']] format_dict['incident_id'] = message['data']['incident']['id'] diff --git a/zerver/webhooks/stripe/view.py b/zerver/webhooks/stripe/view.py index 9d9c5c9546..a83d1e1332 100644 --- a/zerver/webhooks/stripe/view.py +++ b/zerver/webhooks/stripe/view.py @@ -205,7 +205,7 @@ def amount_string(amount: int, currency: str) -> str: return decimal_amount + ' {}'.format(currency.upper()) def linkified_id(object_id: str, lower: bool=False) -> str: - names_and_urls = { + names_and_urls: Dict[str, Tuple[str, Optional[str]]] = { # Core resources 'ch': ('Charge', 'charges'), 'cus': ('Customer', 'customers'), @@ -241,7 +241,7 @@ def linkified_id(object_id: str, lower: bool=False) -> str: 'py': ('Payment', 'payments'), # Connect, Fraud, Orders, etc not implemented - } # type: Dict[str, Tuple[str, Optional[str]]] + } name, url_prefix = names_and_urls[object_id.split('_')[0]] if lower: # nocoverage name = name.lower() diff --git a/zerver/webhooks/taiga/view.py b/zerver/webhooks/taiga/view.py index 5e87db2030..ca7c299bab 100644 --- a/zerver/webhooks/taiga/view.py +++ b/zerver/webhooks/taiga/view.py @@ -196,12 +196,12 @@ def parse_create_or_delete(message: Mapping[str, Any]) -> Dict[str, Any]: def parse_change_event(change_type: str, message: Mapping[str, Any]) -> Optional[Dict[str, Any]]: """ Parses change event. """ - evt = {} # type: Dict[str, Any] - values = { + evt: Dict[str, Any] = {} + values: Dict[str, Any] = { 'user': get_owner_name(message), 'user_link': get_owner_link(message), 'subject': get_subject(message) - } # type: Dict[str, Any] + } if change_type in ["description_diff", "points"]: event_type = change_type diff --git a/zerver/worker/queue_processors.py b/zerver/worker/queue_processors.py index 95c1b52181..a8ad186f67 100644 --- a/zerver/worker/queue_processors.py +++ b/zerver/worker/queue_processors.py @@ -72,8 +72,8 @@ def assign_queue( return clazz return decorate -worker_classes = {} # type: Dict[str, Type[QueueProcessingWorker]] -queues = {} # type: Dict[str, Dict[str, Type[QueueProcessingWorker]]] +worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {} +queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {} def register_worker(queue_name: str, clazz: Type['QueueProcessingWorker'], queue_type: str) -> None: if queue_type not in queues: queues[queue_type] = {} @@ -114,11 +114,11 @@ def retry_send_email_failures( return wrapper class QueueProcessingWorker(ABC): - queue_name = None # type: str + queue_name: str = None CONSUME_ITERATIONS_BEFORE_UPDATE_STATS_NUM = 50 def __init__(self) -> None: - self.q = None # type: SimpleQueueClient + self.q: SimpleQueueClient = None if self.queue_name is None: raise WorkerDeclarationException("Queue worker declared without queue_name") @@ -127,7 +127,7 @@ class QueueProcessingWorker(ABC): def initialize_statistics(self) -> None: self.queue_last_emptied_timestamp = time.time() self.consumed_since_last_emptied = 0 - self.recent_consume_times = deque(maxlen=50) # type: MutableSequence[Tuple[int, float]] + self.recent_consume_times: MutableSequence[Tuple[int, float]] = deque(maxlen=50) self.consume_interation_counter = 0 self.update_statistics(0) @@ -168,7 +168,7 @@ class QueueProcessingWorker(ABC): try: time_start = time.time() consume_func(events) - consume_time_seconds = time.time() - time_start # type: Optional[float] + consume_time_seconds: Optional[float] = time.time() - time_start self.consumed_since_last_emptied += len(events) except Exception: self._handle_consume_exception(events) @@ -329,7 +329,7 @@ class UserActivityWorker(LoopQueueProcessingWorker): """ sleep_delay = 10 sleep_only_if_empty = True - client_id_map = {} # type: Dict[str, int] + client_id_map: Dict[str, int] = {} def start(self) -> None: # For our unit tests to make sense, we need to clear this on startup. @@ -337,7 +337,7 @@ class UserActivityWorker(LoopQueueProcessingWorker): super().start() def consume_batch(self, user_activity_events: List[Dict[str, Any]]) -> None: - uncommitted_events = {} # type: Dict[Tuple[int, int, str], Tuple[int, float]] + uncommitted_events: Dict[Tuple[int, int, str], Tuple[int, float]] = {} # First, we drain the queue of all user_activity events and # deduplicate them for insertion into the database. @@ -400,9 +400,9 @@ class MissedMessageWorker(QueueProcessingWorker): # mechanism for that case. TIMER_FREQUENCY = 5 BATCH_DURATION = 120 - timer_event = None # type: Optional[Timer] - events_by_recipient = defaultdict(list) # type: Dict[int, List[Dict[str, Any]]] - batch_start_by_recipient = {} # type: Dict[int, float] + timer_event: Optional[Timer] = None + events_by_recipient: Dict[int, List[Dict[str, Any]]] = defaultdict(list) + batch_start_by_recipient: Dict[int, float] = {} def consume(self, event: Dict[str, Any]) -> None: logging.debug("Received missedmessage_emails event: %s" % (event,)) diff --git a/zilencer/management/commands/add_mock_conversation.py b/zilencer/management/commands/add_mock_conversation.py index 6c12bed1f7..de97aef81a 100644 --- a/zilencer/management/commands/add_mock_conversation.py +++ b/zilencer/management/commands/add_mock_conversation.py @@ -49,7 +49,7 @@ From image editing program: bulk_add_subscriptions([stream], list(UserProfile.objects.filter(realm=realm))) - staged_messages = [ + staged_messages: List[Dict[str, Any]] = [ {'sender': starr, 'content': "Hey @**Bel Fisher**, check out Zulip's Markdown formatting! " "You can have:\n* bulleted lists\n * with sub-bullets too\n" @@ -75,7 +75,7 @@ From image editing program: 'content': 'https://twitter.com/gvanrossum/status/786661035637772288'}, {'sender': fisher, 'content': "Oops, the Twitter bot I set up shouldn't be posting here. Let me go fix that."}, - ] # type: List[Dict[str, Any]] + ] messages = [internal_prep_stream_message( realm, message['sender'], stream, diff --git a/zilencer/management/commands/populate_db.py b/zilencer/management/commands/populate_db.py index 55e2d4c38f..b3a8df9a36 100644 --- a/zilencer/management/commands/populate_db.py +++ b/zilencer/management/commands/populate_db.py @@ -64,7 +64,7 @@ def clear_database() -> None: behaviors=default_cache["OPTIONS"], ).flush_all() - model = None # type: Any # Hack because mypy doesn't know these are model classes + model: Any = None # Hack because mypy doesn't know these are model classes for model in [Message, Stream, UserProfile, Recipient, Realm, Subscription, Huddle, UserMessage, Client, DefaultStream]: @@ -310,17 +310,19 @@ class Command(BaseCommand): # Create public streams. stream_list = ["Verona", "Denmark", "Scotland", "Venice", "Rome"] - stream_dict = { + stream_dict: Dict[str, Dict[str, Any]] = { "Verona": {"description": "A city in Italy"}, "Denmark": {"description": "A Scandinavian country"}, "Scotland": {"description": "Located in the United Kingdom"}, "Venice": {"description": "A northeastern Italian city"}, "Rome": {"description": "Yet another Italian city", "is_web_public": True} - } # type: Dict[str, Dict[str, Any]] + } bulk_create_streams(zulip_realm, stream_dict) - recipient_streams = [Stream.objects.get(name=name, realm=zulip_realm).id - for name in stream_list] # type: List[int] + recipient_streams: List[int] = [ + Stream.objects.get(name=name, realm=zulip_realm).id + for name in stream_list + ] # Create subscriptions to streams. The following # algorithm will give each of the users a different but @@ -329,9 +331,9 @@ class Command(BaseCommand): # subscriptions to make sure test data is consistent # across platforms. - subscriptions_list = [] # type: List[Tuple[UserProfile, Recipient]] - profiles = UserProfile.objects.select_related().filter( - is_bot=False).order_by("email") # type: Sequence[UserProfile] + subscriptions_list: List[Tuple[UserProfile, Recipient]] = [] + profiles: Sequence[UserProfile] = UserProfile.objects.select_related().filter( + is_bot=False).order_by("email") if options["test_suite"]: subscriptions_map = { @@ -366,9 +368,9 @@ class Command(BaseCommand): r = Recipient.objects.get(type=Recipient.STREAM, type_id=type_id) subscriptions_list.append((profile, r)) - subscriptions_to_add = [] # type: List[Subscription] + subscriptions_to_add: List[Subscription] = [] event_time = timezone_now() - all_subscription_logs = [] # type: (List[RealmAuditLog]) + all_subscription_logs: (List[RealmAuditLog]) = [] i = 0 for profile, recipient in subscriptions_list: @@ -402,10 +404,10 @@ class Command(BaseCommand): favorite_food = try_add_realm_custom_profile_field(zulip_realm, "Favorite food", CustomProfileField.SHORT_TEXT, hint="Or drink, if you'd prefer") - field_data = { + field_data: ProfileFieldData = { 'vim': {'text': 'Vim', 'order': '1'}, 'emacs': {'text': 'Emacs', 'order': '2'}, - } # type: ProfileFieldData + } favorite_editor = try_add_realm_custom_profile_field(zulip_realm, "Favorite editor", CustomProfileField.CHOICE, @@ -450,7 +452,7 @@ class Command(BaseCommand): Recipient.objects.filter(type=Recipient.STREAM)] # Extract a list of all users - user_profiles = list(UserProfile.objects.filter(is_bot=False)) # type: List[UserProfile] + user_profiles: List[UserProfile] = list(UserProfile.objects.filter(is_bot=False)) # Create a test realm emoji. IMAGE_FILE_PATH = static_path('images/test-images/checkbox.png') @@ -460,7 +462,7 @@ class Command(BaseCommand): if not options["test_suite"]: # Populate users with some bar data for user in user_profiles: - status = UserPresence.ACTIVE # type: int + status: int = UserPresence.ACTIVE date = timezone_now() client = get_client("website") if user.full_name[0] <= 'H': @@ -494,7 +496,7 @@ class Command(BaseCommand): cache_set(url, urls_with_preview_data[url], PREVIEW_CACHE_NAME) threads = options["threads"] - jobs = [] # type: List[Tuple[int, List[List[int]], Dict[str, Any], Callable[[str], int], int]] + jobs: List[Tuple[int, List[List[int]], Dict[str, Any], Callable[[str], int], int]] = [] for i in range(threads): count = options["num_messages"] // threads if i < options["num_messages"] % threads: @@ -526,7 +528,7 @@ class Command(BaseCommand): # suite fast, don't add these users and subscriptions # when running populate_db for the test suite - zulip_stream_dict = { + zulip_stream_dict: Dict[str, Dict[str, Any]] = { "devel": {"description": "For developing"}, "all": {"description": "For **everything**"}, "announce": {"description": "For announcements", @@ -537,7 +539,7 @@ class Command(BaseCommand): "test": {"description": "For testing `code`"}, "errors": {"description": "For errors"}, "sales": {"description": "For sales discussion"} - } # type: Dict[str, Dict[str, Any]] + } # Calculate the maximum number of digits in any extra stream's # number, since a stream with name "Extra Stream 3" could show @@ -597,7 +599,7 @@ class Command(BaseCommand): call_command('populate_analytics_db') self.stdout.write("Successfully populated test database.\n") -recipient_hash = {} # type: Dict[int, Recipient] +recipient_hash: Dict[int, Recipient] = {} def get_recipient_by_id(rid: int) -> Recipient: if rid in recipient_hash: return recipient_hash[rid] @@ -621,11 +623,12 @@ def generate_and_send_messages(data: Tuple[int, Sequence[Sequence[int]], Mapping random.shuffle(dialog) texts = itertools.cycle(dialog) - recipient_streams = [klass.id for klass in - Recipient.objects.filter(type=Recipient.STREAM)] # type: List[int] - recipient_huddles = [h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE)] # type: List[int] + recipient_streams: List[int] = [ + klass.id for klass in Recipient.objects.filter(type=Recipient.STREAM) + ] + recipient_huddles: List[int] = [h.id for h in Recipient.objects.filter(type=Recipient.HUDDLE)] - huddle_members = {} # type: Dict[int, List[int]] + huddle_members: Dict[int, List[int]] = {} for h in recipient_huddles: huddle_members[h] = [s.user_profile.id for s in Subscription.objects.filter(recipient_id=h)] @@ -633,10 +636,10 @@ def generate_and_send_messages(data: Tuple[int, Sequence[Sequence[int]], Mapping message_batch_size = options['batch_size'] num_messages = 0 random_max = 1000000 - recipients = {} # type: Dict[int, Tuple[int, int, Dict[str, Any]]] + recipients: Dict[int, Tuple[int, int, Dict[str, Any]]] = {} messages = [] while num_messages < tot_messages: - saved_data = {} # type: Dict[str, Any] + saved_data: Dict[str, Any] = {} message = Message() message.sending_client = get_client('populate_db') diff --git a/zilencer/management/commands/profile_request.py b/zilencer/management/commands/profile_request.py index 2779803544..7ea60dd90e 100644 --- a/zilencer/management/commands/profile_request.py +++ b/zilencer/management/commands/profile_request.py @@ -29,7 +29,7 @@ class MockRequest(HttpRequest): "num_before": 1200, "num_after": 200 } - self.GET = {} # type: Dict[Any, Any] + self.GET: Dict[Any, Any] = {} self.session = MockSession() def get_full_path(self) -> str: diff --git a/zilencer/models.py b/zilencer/models.py index 2f997cadd7..b913b64759 100644 --- a/zilencer/models.py +++ b/zilencer/models.py @@ -13,12 +13,12 @@ class RemoteZulipServer(models.Model): API_KEY_LENGTH = 64 HOSTNAME_MAX_LENGTH = 128 - uuid = models.CharField(max_length=UUID_LENGTH, unique=True) # type: str - api_key = models.CharField(max_length=API_KEY_LENGTH) # type: str + uuid: str = models.CharField(max_length=UUID_LENGTH, unique=True) + api_key: str = models.CharField(max_length=API_KEY_LENGTH) - hostname = models.CharField(max_length=HOSTNAME_MAX_LENGTH) # type: str - contact_email = models.EmailField(blank=True, null=False) # type: str - last_updated = models.DateTimeField('last updated', auto_now=True) # type: datetime.datetime + hostname: str = models.CharField(max_length=HOSTNAME_MAX_LENGTH) + contact_email: str = models.EmailField(blank=True, null=False) + last_updated: datetime.datetime = models.DateTimeField('last updated', auto_now=True) def __str__(self) -> str: return "" % (self.hostname, self.uuid[0:12]) @@ -28,9 +28,9 @@ class RemoteZulipServer(models.Model): # Variant of PushDeviceToken for a remote server. class RemotePushDeviceToken(AbstractPushDeviceToken): - server = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # type: RemoteZulipServer + server: RemoteZulipServer = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # The user id on the remote server for this device device this is - user_id = models.BigIntegerField(db_index=True) # type: int + user_id: int = models.BigIntegerField(db_index=True) class Meta: unique_together = ("server", "user_id", "kind", "token") @@ -42,19 +42,19 @@ class RemoteRealmAuditLog(AbstractRealmAuditLog): """Synced audit data from a remote Zulip server, used primarily for billing. See RealmAuditLog and AbstractRealmAuditLog for details. """ - server = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # type: RemoteZulipServer - realm_id = models.IntegerField(db_index=True) # type: int + server: RemoteZulipServer = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) + realm_id: int = models.IntegerField(db_index=True) # The remote_id field lets us deduplicate data from the remote server - remote_id = models.IntegerField(db_index=True) # type: int + remote_id: int = models.IntegerField(db_index=True) def __str__(self) -> str: return "" % ( self.server, self.event_type, self.event_time, self.id) class RemoteInstallationCount(BaseCount): - server = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # type: RemoteZulipServer + server: RemoteZulipServer = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # The remote_id field lets us deduplicate data from the remote server - remote_id = models.IntegerField(db_index=True) # type: int + remote_id: int = models.IntegerField(db_index=True) class Meta: unique_together = ("server", "property", "subgroup", "end_time") @@ -67,10 +67,10 @@ class RemoteInstallationCount(BaseCount): # We can't subclass RealmCount because we only have a realm_id here, not a foreign key. class RemoteRealmCount(BaseCount): - server = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) # type: RemoteZulipServer - realm_id = models.IntegerField(db_index=True) # type: int + server: RemoteZulipServer = models.ForeignKey(RemoteZulipServer, on_delete=models.CASCADE) + realm_id: int = models.IntegerField(db_index=True) # The remote_id field lets us deduplicate data from the remote server - remote_id = models.IntegerField(db_index=True) # type: int + remote_id: int = models.IntegerField(db_index=True) class Meta: unique_together = ("server", "realm_id", "property", "subgroup", "end_time") diff --git a/zilencer/urls.py b/zilencer/urls.py index 19dd1816a3..77b0b281d9 100644 --- a/zilencer/urls.py +++ b/zilencer/urls.py @@ -5,7 +5,7 @@ from django.conf.urls import include, url import zilencer.views from zerver.lib.rest import rest_dispatch -i18n_urlpatterns = [] # type: Any +i18n_urlpatterns: Any = [] # Zilencer views following the REST API style v1_api_and_json_patterns = [ diff --git a/zproject/backends.py b/zproject/backends.py index 63f498b318..758970bc8f 100644 --- a/zproject/backends.py +++ b/zproject/backends.py @@ -553,7 +553,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): def sync_custom_profile_fields_from_ldap(self, user_profile: UserProfile, ldap_user: _LDAPUser) -> None: - values_by_var_name = {} # type: Dict[str, Union[int, str, List[int]]] + values_by_var_name: Dict[str, Union[int, str, List[int]]] = {} for attr, ldap_attr in settings.AUTH_LDAP_USER_ATTR_MAP.items(): if not attr.startswith('custom_profile_field__'): continue @@ -567,7 +567,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): continue values_by_var_name[var_name] = value - fields_by_var_name = {} # type: Dict[str, CustomProfileField] + fields_by_var_name: Dict[str, CustomProfileField] = {} custom_profile_fields = custom_profile_fields_for_realm(user_profile.realm.id) for field in custom_profile_fields: var_name = '_'.join(field.name.lower().split(' ')) @@ -578,7 +578,7 @@ class ZulipLDAPAuthBackendBase(ZulipAuthMixin, LDAPBackend): var_name = '_'.join(data['name'].lower().split(' ')) existing_values[var_name] = data['value'] - profile_data = [] # type: List[Dict[str, Union[int, str, List[int]]]] + profile_data: List[Dict[str, Union[int, str, List[int]]]] = [] for var_name, value in values_by_var_name.items(): try: field = fields_by_var_name[var_name] @@ -644,7 +644,7 @@ class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase): user account in the realm (assuming the realm is configured to allow that email address to sign up). """ - return_data = {} # type: Dict[str, Any] + return_data: Dict[str, Any] = {} username = self.user_email_from_ldapuser(username, ldap_user) @@ -694,7 +694,7 @@ class ZulipLDAPAuthBackend(ZulipLDAPAuthBackendBase): except JsonableError as e: raise ZulipLDAPException(e.msg) - opts = {} # type: Dict[str, Any] + opts: Dict[str, Any] = {} if self._prereg_user: invited_as = self._prereg_user.invited_as realm_creation = self._prereg_user.realm_creation @@ -724,7 +724,7 @@ class ZulipLDAPUser(_LDAPUser): """ def __init__(self, *args: Any, **kwargs: Any) -> None: - self.realm = kwargs['realm'] # type: Realm + self.realm: Realm = kwargs['realm'] del kwargs['realm'] super().__init__(*args, **kwargs) @@ -881,7 +881,7 @@ class ExternalAuthMethod(ABC): """ auth_backend_name = "undeclared" name = "undeclared" - display_icon = None # type: Optional[str] + display_icon: Optional[str] = None # Used to determine how to order buttons on login form, backend with # higher sort order are displayed first. @@ -899,7 +899,7 @@ class ExternalAuthMethod(ABC): that are all serviced by that backend - our SAML backend is an example of that. """ -EXTERNAL_AUTH_METHODS = [] # type: List[Type[ExternalAuthMethod]] +EXTERNAL_AUTH_METHODS: List[Type[ExternalAuthMethod]] = [] def external_auth_method(cls: Type[ExternalAuthMethod]) -> Type[ExternalAuthMethod]: assert issubclass(cls, ExternalAuthMethod) @@ -1083,7 +1083,7 @@ def social_auth_associate_user( social_auth_finish, as kwargs. """ partial_token = backend.strategy.request_data().get('partial_token') - return_data = {} # type: Dict[str, Any] + return_data: Dict[str, Any] = {} user_profile = social_associate_user_helper( backend, return_data, *args, **kwargs) @@ -1292,7 +1292,7 @@ class GitHubAuthBackend(SocialAuthMixin, GithubOAuth2): # case without any verified emails emails = [] - verified_emails = [] # type: List[str] + verified_emails: List[str] = [] for email_obj in self.filter_usable_emails(emails): # social_associate_user_helper assumes that the first email in # verified_emails is primary. @@ -1372,7 +1372,7 @@ class GoogleAuthBackend(SocialAuthMixin, GoogleOAuth2): display_icon = "/static/images/landing-page/logos/googl_e-icon.png" def get_verified_emails(self, *args: Any, **kwargs: Any) -> List[str]: - verified_emails = [] # type: List[str] + verified_emails: List[str] = [] details = kwargs["response"] email_verified = details.get("email_verified") if email_verified: @@ -1559,7 +1559,7 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): @classmethod def dict_representation(cls, realm: Optional[Realm]=None) -> List[ExternalAuthMethodDictT]: - result = [] # type: List[ExternalAuthMethodDictT] + result: List[ExternalAuthMethodDictT] = [] for idp_name, idp_dict in settings.SOCIAL_AUTH_SAML_ENABLED_IDPS.items(): if realm and not cls.validate_idp_for_subdomain(idp_name, realm.subdomain): continue @@ -1567,13 +1567,13 @@ class SAMLAuthBackend(SocialAuthMixin, SAMLAuth): # If queried without a realm, only return IdPs that can be used on all realms. continue - saml_dict = dict( + saml_dict: ExternalAuthMethodDictT = dict( name='saml:{}'.format(idp_name), display_name=idp_dict.get('display_name', cls.auth_backend_name), display_icon=idp_dict.get('display_icon', cls.display_icon), login_url=reverse('login-social-extra-arg', args=('saml', idp_name)), signup_url=reverse('signup-social-extra-arg', args=('saml', idp_name)), - ) # type: ExternalAuthMethodDictT + ) result.append(saml_dict) return result @@ -1592,7 +1592,7 @@ def get_external_method_dicts(realm: Optional[Realm]=None) -> List[ExternalAuthM Returns a list of dictionaries that represent social backends, sorted in the order in which they should be displayed. """ - result = [] # type: List[ExternalAuthMethodDictT] + result: List[ExternalAuthMethodDictT] = [] for backend in EXTERNAL_AUTH_METHODS: # EXTERNAL_AUTH_METHODS is already sorted in the correct order, # so we don't need to worry about sorting here. @@ -1601,11 +1601,11 @@ def get_external_method_dicts(realm: Optional[Realm]=None) -> List[ExternalAuthM return result -AUTH_BACKEND_NAME_MAP = { +AUTH_BACKEND_NAME_MAP: Dict[str, Any] = { 'Dev': DevAuthBackend, 'Email': EmailAuthBackend, 'LDAP': ZulipLDAPAuthBackend, -} # type: Dict[str, Any] +} for external_method in EXTERNAL_AUTH_METHODS: AUTH_BACKEND_NAME_MAP[external_method.auth_backend_name] = external_method diff --git a/zproject/default_settings.py b/zproject/default_settings.py index d88cc0700d..c6eaaad19e 100644 --- a/zproject/default_settings.py +++ b/zproject/default_settings.py @@ -14,7 +14,7 @@ else: # install of the Zulip server. # Extra HTTP "Host" values to allow (standard ones added in settings.py) -ALLOWED_HOSTS = [] # type: List[str] +ALLOWED_HOSTS: List[str] = [] # Basic email settings NOREPLY_EMAIL_ADDRESS = "noreply@" + EXTERNAL_HOST.split(":")[0] @@ -24,17 +24,17 @@ PHYSICAL_ADDRESS = '' FAKE_EMAIL_DOMAIN = EXTERNAL_HOST.split(":")[0] # SMTP settings -EMAIL_HOST = None # type: Optional[str] +EMAIL_HOST: Optional[str] = None # Other settings, like EMAIL_HOST_USER, EMAIL_PORT, and EMAIL_USE_TLS, # we leave up to Django's defaults. # LDAP auth AUTH_LDAP_SERVER_URI = "" -LDAP_EMAIL_ATTR = None # type: Optional[str] -AUTH_LDAP_USERNAME_ATTR = None # type: Optional[str] -AUTH_LDAP_REVERSE_EMAIL_SEARCH = None # type: Optional[LDAPSearch] +LDAP_EMAIL_ATTR: Optional[str] = None +AUTH_LDAP_USERNAME_ATTR: Optional[str] = None +AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None # AUTH_LDAP_CONNECTION_OPTIONS: we set ldap.OPT_REFERRALS in settings.py if unset. -AUTH_LDAP_CONNECTION_OPTIONS = {} # type: Dict[int, object] +AUTH_LDAP_CONNECTION_OPTIONS: Dict[int, object] = {} # Disable django-auth-ldap caching, to prevent problems with OU changes. AUTH_LDAP_CACHE_TIMEOUT = 0 # Disable syncing user on each login; Using sync_ldap_user_data cron is recommended. @@ -42,51 +42,51 @@ AUTH_LDAP_ALWAYS_UPDATE_USER = False # Development-only settings for fake LDAP authentication; used to # support local development of LDAP auth without an LDAP server. # Detailed docs in zproject/dev_settings.py. -FAKE_LDAP_MODE = None # type: Optional[str] +FAKE_LDAP_MODE: Optional[str] = None FAKE_LDAP_NUM_USERS = 8 # Social auth; we support providing values for some of these # settings in zulip-secrets.conf instead of settings.py in development. SOCIAL_AUTH_GITHUB_KEY = get_secret('social_auth_github_key', development_only=True) -SOCIAL_AUTH_GITHUB_ORG_NAME = None # type: Optional[str] -SOCIAL_AUTH_GITHUB_TEAM_ID = None # type: Optional[str] +SOCIAL_AUTH_GITHUB_ORG_NAME: Optional[str] = None +SOCIAL_AUTH_GITHUB_TEAM_ID: Optional[str] = None SOCIAL_AUTH_GITLAB_KEY = get_secret('social_auth_gitlab_key', development_only=True) -SOCIAL_AUTH_SUBDOMAIN = None # type: Optional[str] +SOCIAL_AUTH_SUBDOMAIN: Optional[str] = None SOCIAL_AUTH_AZUREAD_OAUTH2_SECRET = get_secret('azure_oauth2_secret') SOCIAL_AUTH_GOOGLE_KEY = get_secret('social_auth_google_key', development_only=True) # SAML: -SOCIAL_AUTH_SAML_SP_ENTITY_ID = None # type: Optional[str] +SOCIAL_AUTH_SAML_SP_ENTITY_ID: Optional[str] = None SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = '' SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = '' -SOCIAL_AUTH_SAML_ORG_INFO = None # type: Optional[Dict[str, Dict[str, str]]] -SOCIAL_AUTH_SAML_TECHNICAL_CONTACT = None # type: Optional[Dict[str, str]] -SOCIAL_AUTH_SAML_SUPPORT_CONTACT = None # type: Optional[Dict[str, str]] -SOCIAL_AUTH_SAML_ENABLED_IDPS = {} # type: Dict[str, Dict[str, str]] -SOCIAL_AUTH_SAML_SECURITY_CONFIG = {} # type: Dict[str, Any] +SOCIAL_AUTH_SAML_ORG_INFO: Optional[Dict[str, Dict[str, str]]] = None +SOCIAL_AUTH_SAML_TECHNICAL_CONTACT: Optional[Dict[str, str]] = None +SOCIAL_AUTH_SAML_SUPPORT_CONTACT: Optional[Dict[str, str]] = None +SOCIAL_AUTH_SAML_ENABLED_IDPS: Dict[str, Dict[str, str]] = {} +SOCIAL_AUTH_SAML_SECURITY_CONFIG: Dict[str, Any] = {} # Set this to True to enforce that any configured IdP needs to specify # the limit_to_subdomains setting to be considered valid: SAML_REQUIRE_LIMIT_TO_SUBDOMAINS = False # Historical name for SOCIAL_AUTH_GITHUB_KEY; still allowed in production. -GOOGLE_OAUTH2_CLIENT_ID = None # type: Optional[str] +GOOGLE_OAUTH2_CLIENT_ID: Optional[str] = None # Other auth -SSO_APPEND_DOMAIN = None # type: Optional[str] +SSO_APPEND_DOMAIN: Optional[str] = None # Email gateway EMAIL_GATEWAY_PATTERN = '' -EMAIL_GATEWAY_LOGIN = None # type: Optional[str] -EMAIL_GATEWAY_IMAP_SERVER = None # type: Optional[str] -EMAIL_GATEWAY_IMAP_PORT = None # type: Optional[int] -EMAIL_GATEWAY_IMAP_FOLDER = None # type: Optional[str] +EMAIL_GATEWAY_LOGIN: Optional[str] = None +EMAIL_GATEWAY_IMAP_SERVER: Optional[str] = None +EMAIL_GATEWAY_IMAP_PORT: Optional[int] = None +EMAIL_GATEWAY_IMAP_FOLDER: Optional[str] = None # Not documented for in /etc/zulip/settings.py, since it's rarely needed. -EMAIL_GATEWAY_EXTRA_PATTERN_HACK = None # type: Optional[str] +EMAIL_GATEWAY_EXTRA_PATTERN_HACK: Optional[str] = None # Error reporting ERROR_REPORTING = True BROWSER_ERROR_REPORTING = False LOGGING_SHOW_MODULE = False LOGGING_SHOW_PID = False -SLOW_QUERY_LOGS_STREAM = None # type: Optional[str] +SLOW_QUERY_LOGS_STREAM: Optional[str] = None # File uploads and avatars DEFAULT_AVATAR_URI = '/static/images/default-avatar.png' @@ -94,7 +94,7 @@ DEFAULT_LOGO_URI = '/static/images/logo/zulip-org-logo.png' S3_AVATAR_BUCKET = '' S3_AUTH_UPLOADS_BUCKET = '' S3_REGION = '' -LOCAL_UPLOADS_DIR = None # type: Optional[str] +LOCAL_UPLOADS_DIR: Optional[str] = None MAX_FILE_UPLOAD_SIZE = 25 # Jitsi Meet video call integration; set to None to disable integration. @@ -120,11 +120,11 @@ REMOTE_POSTGRES_SSLMODE = '' THUMBOR_URL = '' THUMBOR_SERVES_CAMO = False THUMBNAIL_IMAGES = False -SENDFILE_BACKEND = None # type: Optional[str] +SENDFILE_BACKEND: Optional[str] = None # ToS/Privacy templates -PRIVACY_POLICY = None # type: Optional[str] -TERMS_OF_SERVICE = None # type: Optional[str] +PRIVACY_POLICY: Optional[str] = None +TERMS_OF_SERVICE: Optional[str] = None # Security ENABLE_FILE_LINKS = False @@ -135,7 +135,7 @@ NAME_CHANGES_DISABLED = False AVATAR_CHANGES_DISABLED = False PASSWORD_MIN_LENGTH = 6 PASSWORD_MIN_GUESSES = 10000 -PUSH_NOTIFICATION_BOUNCER_URL = None # type: Optional[str] +PUSH_NOTIFICATION_BOUNCER_URL: Optional[str] = None PUSH_NOTIFICATION_REDACT_CONTENT = False SUBMIT_USAGE_STATISTICS = True RATE_LIMITING = True @@ -174,11 +174,11 @@ DEVELOPMENT_LOG_EMAILS = DEVELOPMENT # ERROR_BOT sends Django exceptions to an "errors" stream in the # system realm. -ERROR_BOT = None # type: Optional[str] +ERROR_BOT: Optional[str] = None # These are extra bot users for our end-to-end Nagios message # sending tests. -NAGIOS_STAGING_SEND_BOT = None # type: Optional[str] -NAGIOS_STAGING_RECEIVE_BOT = None # type: Optional[str] +NAGIOS_STAGING_SEND_BOT: Optional[str] = None +NAGIOS_STAGING_RECEIVE_BOT: Optional[str] = None # SYSTEM_BOT_REALM would be a constant always set to 'zulip', # except that it isn't that on zulipchat.com. We will likely do a # migration and eliminate this parameter in the future. @@ -212,7 +212,7 @@ SYSTEM_ONLY_REALMS = {"zulip"} # their usual subdomains. Keys are realm string_ids (aka subdomains), # and values are alternate hosts. # The values will also be added to ALLOWED_HOSTS. -REALM_HOSTS = {} # type: Dict[str, str] +REALM_HOSTS: Dict[str, str] = {} # Whether the server is using the Pgroonga full-text search # backend. Plan is to turn this on for everyone after further @@ -221,7 +221,7 @@ USING_PGROONGA = False # How Django should send emails. Set for most contexts in settings.py, but # available for sysadmin override in unusual cases. -EMAIL_BACKEND = None # type: Optional[str] +EMAIL_BACKEND: Optional[str] = None # Whether to give admins a warning in the web app that email isn't set up. # Set in settings.py when email isn't configured. @@ -244,7 +244,7 @@ POST_MIGRATION_CACHE_FLUSHING = False # Settings for APNS. Only needed on push.zulipchat.com or if # rebuilding the mobile app with a different push notifications # server. -APNS_CERT_FILE = None # type: Optional[str] +APNS_CERT_FILE: Optional[str] = None APNS_SANDBOX = True APNS_TOPIC = 'org.zulip.Zulip' ZULIP_IOS_APP_ID = 'org.zulip.Zulip' @@ -277,7 +277,7 @@ INVITES_NEW_REALM_LIMIT_DAYS = [(1, 100)] INVITES_NEW_REALM_DAYS = 7 # Controls for which links are published in portico footers/headers/etc. -REGISTER_LINK_DISABLED = None # type: Optional[bool] +REGISTER_LINK_DISABLED: Optional[bool] = None LOGIN_LINK_DISABLED = False FIND_TEAM_LINK_DISABLED = True @@ -296,7 +296,7 @@ ROOT_DOMAIN_LANDING_PAGE = False # If using the Zephyr mirroring supervisord configuration, the # hostname to connect to in order to transfer credentials from webathena. -PERSONAL_ZMIRROR_SERVER = None # type: Optional[str] +PERSONAL_ZMIRROR_SERVER: Optional[str] = None # When security-relevant links in emails expire. CONFIRMATION_LINK_DEFAULT_VALIDITY_DAYS = 1 @@ -306,15 +306,15 @@ REALM_CREATION_LINK_VALIDITY_DAYS = 7 # Version number for ToS. Change this if you want to force every # user to click through to re-accept terms of service before using # Zulip again on the web. -TOS_VERSION = None # type: Optional[str] +TOS_VERSION: Optional[str] = None # Template to use when bumping TOS_VERSION to explain situation. -FIRST_TIME_TOS_TEMPLATE = None # type: Optional[str] +FIRST_TIME_TOS_TEMPLATE: Optional[str] = None # Hostname used for Zulip's statsd logging integration. STATSD_HOST = '' # Configuration for JWT auth. -JWT_AUTH_KEYS = {} # type: Dict[str, str] +JWT_AUTH_KEYS: Dict[str, str] = {} # https://docs.djangoproject.com/en/2.2/ref/settings/#std:setting-SERVER_EMAIL # Django setting for what from address to use in error emails. @@ -323,7 +323,7 @@ SERVER_EMAIL = ZULIP_ADMINISTRATOR ADMINS = (("Zulip Administrator", ZULIP_ADMINISTRATOR),) # From address for welcome emails. -WELCOME_EMAIL_SENDER = None # type: Optional[Dict[str, str]] +WELCOME_EMAIL_SENDER: Optional[Dict[str, str]] = None # Whether we should use users' own email addresses as the from # address when sending missed-message emails. Off by default # because some transactional email providers reject sending such @@ -333,11 +333,11 @@ SEND_MISSED_MESSAGE_EMAILS_AS_USER = False SEND_DIGEST_EMAILS = True # Used to change the Zulip logo in portico pages. -CUSTOM_LOGO_URL = None # type: Optional[str] +CUSTOM_LOGO_URL: Optional[str] = None # Random salt used when deterministically generating passwords in # development. -INITIAL_PASSWORD_SALT = None # type: Optional[str] +INITIAL_PASSWORD_SALT: Optional[str] = None # Settings configuring the special instrumention of the send_event # code path used in generating API documentation for /events. diff --git a/zproject/settings.py b/zproject/settings.py index 63f0e09969..e73c62b656 100644 --- a/zproject/settings.py +++ b/zproject/settings.py @@ -216,7 +216,7 @@ CORPORATE_ENABLED = 'corporate' in INSTALLED_APPS # We set it to None when running backend tests or populate_db. # We override the port number when running frontend tests. TORNADO_PROCESSES = int(get_config('application_server', 'tornado_processes', '1')) -TORNADO_SERVER = 'http://127.0.0.1:9993' # type: Optional[str] +TORNADO_SERVER: Optional[str] = 'http://127.0.0.1:9993' RUNNING_INSIDE_TORNADO = False AUTORELOAD = DEBUG @@ -261,7 +261,7 @@ SILENCED_SYSTEM_CHECKS = [ # We implement these options with a default DATABASES configuration # supporting peer authentication, with logic to override it as # appropriate if DEVELOPMENT or REMOTE_POSTGRES_HOST is set. -DATABASES = {"default": { +DATABASES: Dict[str, Dict[str, Any]] = {"default": { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'zulip', 'USER': 'zulip', @@ -274,7 +274,7 @@ DATABASES = {"default": { 'OPTIONS': { 'connection_factory': TimeTrackingConnection }, -}} # type: Dict[str, Dict[str, Any]] +}} if DEVELOPMENT: LOCAL_DATABASE_PASSWORD = get_secret("local_database_password") @@ -467,7 +467,7 @@ INTERNAL_BOTS = [{'var_name': 'NOTIFICATION_BOT', 'name': 'Welcome Bot'}] # Bots that are created for each realm like the reminder-bot goes here. -REALM_INTERNAL_BOTS = [] # type: List[Dict[str, str]] +REALM_INTERNAL_BOTS: List[Dict[str, str]] = [] # These are realm-internal bots that may exist in some organizations, # so configure power the setting, but should not be auto-created at this time. DISABLED_REALM_INTERNAL_BOTS = [ @@ -563,15 +563,15 @@ WEBPACK_LOADER = { ######################################################################## # List of callables that know how to import templates from various sources. -LOADERS = [ +LOADERS: List[Union[str, Tuple[object, ...]]] = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', -] # type: List[Union[str, Tuple[object, ...]]] +] if PRODUCTION: # Template caching is a significant performance win in production. LOADERS = [('django.template.loaders.cached.Loader', LOADERS)] -base_template_engine_settings = { +base_template_engine_settings: Dict[str, Any] = { 'BACKEND': 'django.template.backends.jinja2.Jinja2', 'OPTIONS': { 'environment': 'zproject.jinja2.environment', @@ -585,7 +585,7 @@ base_template_engine_settings = { 'django.template.context_processors.i18n', ], }, -} # type: Dict[str, Any] +} default_template_engine_settings = deepcopy(base_template_engine_settings) default_template_engine_settings.update({ @@ -676,7 +676,7 @@ RETENTION_LOG_PATH = zulip_path("/var/log/zulip/message_retention.log") # We plan to replace it with RealmAuditLog, stored in the database, # everywhere that code mentioning it appears. if EVENT_LOGS_ENABLED: - EVENT_LOG_DIR = zulip_path("/home/zulip/logs/event_log") # type: Optional[str] + EVENT_LOG_DIR: Optional[str] = zulip_path("/home/zulip/logs/event_log") else: EVENT_LOG_DIR = None @@ -696,7 +696,7 @@ DEFAULT_ZULIP_HANDLERS = ( ['console', 'file', 'errors_file'] ) -LOGGING = { +LOGGING: Dict[str, Any] = { 'version': 1, 'disable_existing_loggers': False, 'formatters': { @@ -906,7 +906,7 @@ LOGGING = { 'propagate': False, }, } -} # type: Dict[str, Any] +} if DEVELOPMENT: CONTRIBUTOR_DATA_FILE_PATH = os.path.join(DEPLOY_ROOT, 'var/github-contributors.json')