mypy: Upgrade mypy from 1.4.1 to 1.5.1.

_default_manager is the same as objects on most of our models. But
when a model class is stored in a variable, the type system doesn’t
know which model the variable is referring to, so it can’t know that
objects even exists (Django doesn’t add it if the user added a custom
manager of a different name). django-stubs used to incorrectly assume
it exists unconditionally, but it no longer does.

Signed-off-by: Anders Kaseorg <anders@zulip.com>
This commit is contained in:
Anders Kaseorg 2023-09-05 11:25:23 -07:00 committed by Tim Abbott
parent c99c8f4640
commit 0ce6dcb905
15 changed files with 89 additions and 78 deletions

View File

@ -317,7 +317,7 @@ def do_increment_logging_stat(
else: # CountStat.HOUR:
end_time = ceiling_to_hour(event_time)
row, created = table.objects.get_or_create(
row, created = table._default_manager.get_or_create(
property=stat.property,
subgroup=subgroup,
end_time=end_time,

View File

@ -168,7 +168,7 @@ class Command(BaseCommand):
id_args = {"stream": stream, "realm": realm}
for subgroup, values in fixture_data.items():
table.objects.bulk_create(
table._default_manager.bulk_create(
table(
property=stat.property,
subgroup=subgroup,

View File

@ -182,7 +182,9 @@ class AnalyticsTestCase(ZulipTestCase):
) -> None:
if property is None:
property = self.current_property
queryset = table.objects.filter(property=property, end_time=end_time).filter(**kwargs)
queryset = table._default_manager.filter(property=property, end_time=end_time).filter(
**kwargs
)
if table is not InstallationCount:
if realm is None:
realm = self.default_realm
@ -234,8 +236,8 @@ class AnalyticsTestCase(ZulipTestCase):
kwargs["realm"] = kwargs["stream"].realm
else:
kwargs["realm"] = self.default_realm
self.assertEqual(table.objects.filter(**kwargs).count(), 1)
self.assert_length(arg_values, table.objects.count())
self.assertEqual(table._default_manager.filter(**kwargs).count(), 1)
self.assert_length(arg_values, table._default_manager.count())
class TestProcessCountStat(AnalyticsTestCase):
@ -1508,11 +1510,11 @@ class TestDeleteStats(AnalyticsTestCase):
analytics = apps.get_app_config("analytics")
for table in list(analytics.models.values()):
self.assertTrue(table.objects.exists())
self.assertTrue(table._default_manager.exists())
do_drop_all_analytics_tables()
for table in list(analytics.models.values()):
self.assertFalse(table.objects.exists())
self.assertFalse(table._default_manager.exists())
def test_do_drop_single_stat(self) -> None:
user = self.create_user()
@ -1532,12 +1534,12 @@ class TestDeleteStats(AnalyticsTestCase):
analytics = apps.get_app_config("analytics")
for table in list(analytics.models.values()):
self.assertTrue(table.objects.exists())
self.assertTrue(table._default_manager.exists())
do_drop_single_stat("to_delete")
for table in list(analytics.models.values()):
self.assertFalse(table.objects.filter(property="to_delete").exists())
self.assertTrue(table.objects.filter(property="to_save").exists())
self.assertFalse(table._default_manager.filter(property="to_delete").exists())
self.assertTrue(table._default_manager.filter(property="to_save").exists())
class TestActiveUsersAudit(AnalyticsTestCase):

View File

@ -465,17 +465,17 @@ CountT = TypeVar("CountT", bound=BaseCount)
def table_filtered_to_id(table: Type[CountT], key_id: int) -> QuerySet[CountT]:
if table == RealmCount:
return table.objects.filter(realm_id=key_id)
return table._default_manager.filter(realm_id=key_id)
elif table == UserCount:
return table.objects.filter(user_id=key_id)
return table._default_manager.filter(user_id=key_id)
elif table == StreamCount:
return table.objects.filter(stream_id=key_id)
return table._default_manager.filter(stream_id=key_id)
elif table == InstallationCount:
return table.objects.all()
return table._default_manager.all()
elif settings.ZILENCER_ENABLED and table == RemoteInstallationCount:
return table.objects.filter(server_id=key_id)
return table._default_manager.filter(server_id=key_id)
elif settings.ZILENCER_ENABLED and table == RemoteRealmCount:
return table.objects.filter(realm_id=key_id)
return table._default_manager.filter(realm_id=key_id)
else:
raise AssertionError(f"Unknown table: {table}")

View File

@ -649,9 +649,9 @@ django-scim2==0.19.1 \
--hash=sha256:8126111160e76a880f6699babc5259f0345c9316c8018ce5dcc3f7579ccb9e89 \
--hash=sha256:845eaa64e72e4ddfe2aa54d21865721f8c1d59ecd9c06e72341eeb03ed6ba94e
# via -r requirements/common.in
django-stubs==4.2.3 \
--hash=sha256:dadab39b46d9ae8f37a8e879c590f39a9e042b565c03fa0c5a8f754b441b1f23 \
--hash=sha256:e30e2e4927ba14bec587ed2c686404b6b8e473cabe9baca445e7d2e1e0d7b14f
django-stubs==4.2.4 \
--hash=sha256:7d4a132c381519815e865c27a89eca41bcbd06056832507224816a43d75c601c \
--hash=sha256:834b60fd81510cce6b56c1c6c28bec3c504a418bc90ff7d0063fabe8ab9a7868
# via -r requirements/mypy.in
django-stubs-ext==4.2.2 \
--hash=sha256:c69d1cc46f1c4c3b7894b685a5022c29b2a36c7cfb52e23762eaf357ebfc2c98 \
@ -1491,33 +1491,34 @@ multidict==6.0.4 \
# via
# aiohttp
# yarl
mypy==1.4.1 \
--hash=sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042 \
--hash=sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd \
--hash=sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2 \
--hash=sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01 \
--hash=sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7 \
--hash=sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3 \
--hash=sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816 \
--hash=sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3 \
--hash=sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc \
--hash=sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4 \
--hash=sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b \
--hash=sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8 \
--hash=sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c \
--hash=sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462 \
--hash=sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7 \
--hash=sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc \
--hash=sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258 \
--hash=sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b \
--hash=sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9 \
--hash=sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6 \
--hash=sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f \
--hash=sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1 \
--hash=sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828 \
--hash=sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878 \
--hash=sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f \
--hash=sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b
mypy==1.5.1 \
--hash=sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315 \
--hash=sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0 \
--hash=sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373 \
--hash=sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a \
--hash=sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161 \
--hash=sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275 \
--hash=sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693 \
--hash=sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb \
--hash=sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65 \
--hash=sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4 \
--hash=sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb \
--hash=sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243 \
--hash=sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14 \
--hash=sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4 \
--hash=sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1 \
--hash=sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a \
--hash=sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160 \
--hash=sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25 \
--hash=sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12 \
--hash=sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d \
--hash=sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92 \
--hash=sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770 \
--hash=sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2 \
--hash=sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70 \
--hash=sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb \
--hash=sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5 \
--hash=sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f
# via
# -r requirements/mypy.in
# django-stubs

View File

@ -1,7 +1,7 @@
# After editing this file, you MUST afterward run
# /tools/update-locked-requirements to update requirements/dev.txt.
# See requirements/README.md for more detail.
mypy==1.4.* # https://github.com/typeddjango/django-stubs/issues/1648
mypy
boto3-stubs[s3,ses,sns,sqs]
lxml-stubs

View File

@ -48,4 +48,4 @@ API_FEATURE_LEVEL = 209
# historical commits sharing the same major version, in which case a
# minor version bump suffices.
PROVISION_VERSION = (247, 9)
PROVISION_VERSION = (248, 0)

View File

@ -375,16 +375,16 @@ def do_delete_all_realm_attachments(realm: Realm, *, batch_size: int = 1000) ->
last_id = 0
while True:
to_delete = (
obj_class.objects.filter(realm_id=realm.id, id__gt=last_id) # type: ignore[misc] # Does not recognize shared 'id' PK column
.order_by("id")
.values_list("id", "path_id")[:batch_size]
obj_class._default_manager.filter(realm_id=realm.id, pk__gt=last_id)
.order_by("pk")
.values_list("pk", "path_id")[:batch_size]
)
if len(to_delete) > 0:
delete_message_attachments([row[1] for row in to_delete])
last_id = to_delete[len(to_delete) - 1][0]
if len(to_delete) < batch_size:
break
obj_class.objects.filter(realm=realm).delete()
obj_class._default_manager.filter(realm=realm).delete()
def do_scrub_realm(realm: Realm, *, acting_user: Optional[UserProfile]) -> None:

View File

@ -186,7 +186,7 @@ def bulk_set_users_or_streams_recipient_fields(
if result is not None:
result.recipient = recipient
objects_to_update.add(result)
model.objects.bulk_update(objects_to_update, ["recipient"])
model._default_manager.bulk_update(objects_to_update, ["recipient"])
# This is only sed in populate_db, so doesn't really need tests

View File

@ -298,7 +298,9 @@ def send_apple_push_notification(
)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
DeviceTokenClass._default_manager.filter(
token=device.token, kind=DeviceTokenClass.APNS
).delete()
else:
logger.warning(
"APNs: Failed to send for user %s to device %s: %s",
@ -480,7 +482,7 @@ def send_android_push_notification(
if reg_id == new_reg_id:
# I'm not sure if this should happen. In any case, not really actionable.
logger.warning("GCM: Got canonical ref but it already matches our ID %s!", reg_id)
elif not DeviceTokenClass.objects.filter(
elif not DeviceTokenClass._default_manager.filter(
token=new_reg_id, kind=DeviceTokenClass.GCM
).count():
# This case shouldn't happen; any time we get a canonical ref it should have been
@ -492,14 +494,16 @@ def send_android_push_notification(
new_reg_id,
reg_id,
)
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).update(
token=new_reg_id
)
DeviceTokenClass._default_manager.filter(
token=reg_id, kind=DeviceTokenClass.GCM
).update(token=new_reg_id)
else:
# Since we know the new ID is registered in our system we can just drop the old one.
logger.info("GCM: Got canonical ref %s, dropping %s", new_reg_id, reg_id)
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
DeviceTokenClass._default_manager.filter(
token=reg_id, kind=DeviceTokenClass.GCM
).delete()
if "errors" in res:
for error, reg_ids in res["errors"].items():
@ -508,7 +512,7 @@ def send_android_push_notification(
logger.info("GCM: Removing %s", reg_id)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(
DeviceTokenClass._default_manager.filter(
token=reg_id, kind=DeviceTokenClass.GCM
).delete()
else:

View File

@ -1923,7 +1923,7 @@ def get_row_ids_in_all_tables() -> Iterator[Tuple[str, Set[int]]]:
table_name = model._meta.db_table
if table_name in ignored_tables:
continue
ids = model.objects.all().values_list("id", flat=True)
ids = model._default_manager.all().values_list("id", flat=True)
yield table_name, set(ids)

View File

@ -19,18 +19,18 @@ def fix_attachment_caches(apps: StateApps, schema_editor: BaseDatabaseSchemaEdit
def update_batch(
attachment_model: Type[Model], message_model: Type[Model], lower_bound: int
) -> None:
attachment_model.objects.filter(
attachment_model._default_manager.filter(
id__gt=lower_bound, id__lte=lower_bound + BATCH_SIZE
).update(
is_web_public=Exists(
message_model.objects.filter(
message_model._default_manager.filter(
attachment=OuterRef("id"),
recipient__stream__invite_only=False,
recipient__stream__is_web_public=True,
),
),
is_realm_public=Exists(
message_model.objects.filter(
message_model._default_manager.filter(
attachment=OuterRef("id"),
recipient__stream__invite_only=False,
)

View File

@ -47,7 +47,7 @@ def do_bulk_backfill_extra_data(
# We do not need to skip existing entries for other parts of backfilling
# because we have double-write implemented so that the backfilled value
# will still be consistent.
audit_log_model.objects.filter(
audit_log_model._default_manager.filter(
event_type=USER_FULL_NAME_CHANGED,
id__range=(id_lower_bound, id_upper_bound),
extra_data_json={},
@ -71,7 +71,7 @@ def do_bulk_backfill_extra_data(
# being overwritten by the migration with a value inconsistent with its
# previous value.
inconsistent_extra_data_json.extend(
audit_log_model.objects.filter(
audit_log_model._default_manager.filter(
extra_data__isnull=False, id__range=(id_lower_bound, id_upper_bound)
)
.annotate(new_extra_data_json=Cast("extra_data", output_field=JSONField()))
@ -82,7 +82,7 @@ def do_bulk_backfill_extra_data(
.values_list("id", "extra_data", "extra_data_json", "new_extra_data_json")
)
(
audit_log_model.objects.filter(
audit_log_model._default_manager.filter(
extra_data__isnull=False,
id__range=(id_lower_bound, id_upper_bound),
extra_data_json__inconsistent_old_extra_data__isnull=True,
@ -92,7 +92,7 @@ def do_bulk_backfill_extra_data(
.update(extra_data_json=Cast("extra_data", output_field=JSONField()))
)
python_valued_audit_log_entries = audit_log_model.objects.filter(
python_valued_audit_log_entries = audit_log_model._default_manager.filter(
extra_data__startswith="{'",
id__range=(id_lower_bound, id_upper_bound),
extra_data_json__inconsistent_old_extra_data__isnull=True,
@ -119,7 +119,9 @@ def do_bulk_backfill_extra_data(
if old_value not in ({}, new_value):
inconsistent_extra_data_json.append((audit_log_entry.id, audit_log_entry.extra_data, old_value, new_value)) # type: ignore[attr-defined] # Explained above.
audit_log_entry.extra_data_json = new_value # type: ignore[attr-defined] # Explained above.
audit_log_model.objects.bulk_update(python_valued_audit_log_entries, fields=["extra_data_json"])
audit_log_model._default_manager.bulk_update(
python_valued_audit_log_entries, fields=["extra_data_json"]
)
if inconsistent_extra_data_json:
audit_log_entries = []
@ -129,7 +131,7 @@ def do_bulk_backfill_extra_data(
old_extra_data_json,
new_extra_data_json,
) in inconsistent_extra_data_json:
audit_log_entry = audit_log_model.objects.get(id=audit_log_entry_id)
audit_log_entry = audit_log_model._default_manager.get(id=audit_log_entry_id)
assert isinstance(old_extra_data_json, dict)
if "inconsistent_old_extra_data" in old_extra_data_json:
# Skip entries that have been backfilled and detected as
@ -149,7 +151,7 @@ def do_bulk_backfill_extra_data(
new_value=orjson.dumps(new_extra_data_json).decode(),
)
)
audit_log_model.objects.bulk_update(audit_log_entries, fields=["extra_data_json"])
audit_log_model._default_manager.bulk_update(audit_log_entries, fields=["extra_data_json"])
def backfill_extra_data(apps: StateApps, schema_editor: BaseDatabaseSchemaEditor) -> None:

View File

@ -42,7 +42,7 @@ def do_bulk_backfill_extra_data(
# being overwritten by the migration with a value inconsistent with its
# previous value.
inconsistent_extra_data_json.extend(
audit_log_model.objects.filter(
audit_log_model._default_manager.filter(
extra_data__isnull=False, id__range=(id_lower_bound, id_upper_bound)
)
.annotate(new_extra_data_json=Cast("extra_data", output_field=JSONField()))
@ -52,7 +52,7 @@ def do_bulk_backfill_extra_data(
.values_list("id", "extra_data", "extra_data_json", "new_extra_data_json")
)
(
audit_log_model.objects.filter(
audit_log_model._default_manager.filter(
extra_data__isnull=False,
id__range=(id_lower_bound, id_upper_bound),
extra_data_json__inconsistent_old_extra_data__isnull=True,
@ -61,7 +61,7 @@ def do_bulk_backfill_extra_data(
.update(extra_data_json=Cast("extra_data", output_field=JSONField()))
)
python_valued_audit_log_entries = audit_log_model.objects.filter(
python_valued_audit_log_entries = audit_log_model._default_manager.filter(
extra_data__startswith="{'",
id__range=(id_lower_bound, id_upper_bound),
extra_data_json__inconsistent_old_extra_data__isnull=True,
@ -74,7 +74,9 @@ def do_bulk_backfill_extra_data(
if old_value not in ({}, new_value):
inconsistent_extra_data_json.append((audit_log_entry.id, audit_log_entry.extra_data, old_value, new_value)) # type: ignore[attr-defined] # Explained above.
audit_log_entry.extra_data_json = new_value # type: ignore[attr-defined] # Explained above.
audit_log_model.objects.bulk_update(python_valued_audit_log_entries, fields=["extra_data_json"])
audit_log_model._default_manager.bulk_update(
python_valued_audit_log_entries, fields=["extra_data_json"]
)
if inconsistent_extra_data_json:
audit_log_entries = []
@ -84,7 +86,7 @@ def do_bulk_backfill_extra_data(
old_extra_data_json,
new_extra_data_json,
) in inconsistent_extra_data_json:
audit_log_entry = audit_log_model.objects.get(id=audit_log_entry_id)
audit_log_entry = audit_log_model._default_manager.get(id=audit_log_entry_id)
assert isinstance(old_extra_data_json, dict)
if "inconsistent_old_extra_data" in old_extra_data_json:
# Skip entries that have been backfilled and detected as
@ -104,7 +106,7 @@ def do_bulk_backfill_extra_data(
new_value=orjson.dumps(new_extra_data_json).decode(),
)
)
audit_log_model.objects.bulk_update(audit_log_entries, fields=["extra_data_json"])
audit_log_model._default_manager.bulk_update(audit_log_entries, fields=["extra_data_json"])
def backfill_extra_data(model_name: str) -> Callable[[StateApps, BaseDatabaseSchemaEditor], None]:

View File

@ -387,7 +387,7 @@ def batch_create_table_data(
BATCH_SIZE = 1000
while len(row_objects) > 0:
try:
model.objects.bulk_create(row_objects[:BATCH_SIZE])
model._default_manager.bulk_create(row_objects[:BATCH_SIZE])
except IntegrityError:
logging.warning(
"Invalid data saving %s for server %s/%s",