2020-05-26 07:16:25 +02:00
|
|
|
from unittest.mock import Mock, patch
|
2020-06-11 00:54:34 +02:00
|
|
|
|
|
|
|
from django.conf import settings
|
2018-05-14 18:48:18 +02:00
|
|
|
|
|
|
|
from zerver.apps import flush_cache
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.cache import (
|
|
|
|
MEMCACHED_MAX_KEY_LENGTH,
|
2022-11-17 09:30:48 +01:00
|
|
|
InvalidCacheKeyError,
|
2020-07-01 03:29:31 +02:00
|
|
|
bulk_cached_fetch,
|
2020-06-11 00:54:34 +02:00
|
|
|
cache_delete,
|
|
|
|
cache_delete_many,
|
|
|
|
cache_get,
|
|
|
|
cache_get_many,
|
|
|
|
cache_set,
|
|
|
|
cache_set_many,
|
|
|
|
cache_with_key,
|
|
|
|
safe_cache_get_many,
|
|
|
|
safe_cache_set_many,
|
2021-03-04 14:31:18 +01:00
|
|
|
user_profile_by_id_cache_key,
|
2020-06-11 00:54:34 +02:00
|
|
|
validate_cache_key,
|
|
|
|
)
|
2018-05-14 18:48:18 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
2023-12-15 02:14:24 +01:00
|
|
|
from zerver.models import UserProfile
|
|
|
|
from zerver.models.realms import get_realm
|
2023-12-15 01:16:00 +01:00
|
|
|
from zerver.models.users import get_system_bot, get_user, get_user_profile_by_id
|
2020-06-11 00:54:34 +02:00
|
|
|
|
2018-05-14 18:48:18 +02:00
|
|
|
|
|
|
|
class AppsTest(ZulipTestCase):
|
|
|
|
def test_cache_gets_flushed(self) -> None:
|
2020-12-23 21:45:16 +01:00
|
|
|
with self.assertLogs(level="INFO") as m:
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.apps.cache.clear") as mock:
|
2018-05-14 18:48:18 +02:00
|
|
|
# The argument to flush_cache doesn't matter
|
|
|
|
flush_cache(Mock())
|
|
|
|
mock.assert_called_once()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(m.output, ["INFO:root:Clearing memcached cache after migrations"])
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-07-13 00:11:58 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
class CacheKeyValidationTest(ZulipTestCase):
|
|
|
|
def test_validate_cache_key(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("nice_Ascii:string!~")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("utf8_character:ą")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("new_line_character:\n")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("control_character:\r")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("whitespace_character: ")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("too_long:" + "X" * MEMCACHED_MAX_KEY_LENGTH)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
# validate_cache_key does validation on a key with the
|
|
|
|
# KEY_PREFIX appended to the start, so even though we're
|
|
|
|
# passing something "short enough" here, it becomes too
|
|
|
|
# long after appending KEY_PREFIX.
|
2021-02-12 08:20:45 +01:00
|
|
|
validate_cache_key("X" * (MEMCACHED_MAX_KEY_LENGTH - 2))
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
def test_cache_functions_raise_exception(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
invalid_key = "invalid_character:\n"
|
2019-12-16 05:53:54 +01:00
|
|
|
good_key = "good_key"
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_get(invalid_key)
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_set(invalid_key, 0)
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_delete(invalid_key)
|
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_get_many([good_key, invalid_key])
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_set_many({good_key: 0, invalid_key: 1})
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidCacheKeyError):
|
2019-12-16 05:53:54 +01:00
|
|
|
cache_delete_many([good_key, invalid_key])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
class CacheWithKeyDecoratorTest(ZulipTestCase):
|
|
|
|
def test_cache_with_key_invalid_character(self) -> None:
|
|
|
|
def invalid_characters_cache_key_function(user_id: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"CacheWithKeyDecoratorTest:invalid_character:ą:{user_id}"
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
@cache_with_key(invalid_characters_cache_key_function, timeout=1000)
|
|
|
|
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
|
|
|
|
return UserProfile.objects.get(id=user_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
with patch("zerver.lib.cache.cache_set") as mock_set, self.assertLogs(level="WARNING") as m:
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(1):
|
2019-12-16 05:53:54 +01:00
|
|
|
result = get_user_function_with_bad_cache_keys(hamlet.id)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
self.assertEqual(result, hamlet)
|
|
|
|
mock_set.assert_not_called()
|
|
|
|
|
|
|
|
def test_cache_with_key_key_too_long(self) -> None:
|
|
|
|
def too_long_cache_key_function(user_id: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return "CacheWithKeyDecoratorTest:very_long_key:{}:{}".format("a" * 250, user_id)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
@cache_with_key(too_long_cache_key_function, timeout=1000)
|
|
|
|
def get_user_function_with_bad_cache_keys(user_id: int) -> UserProfile:
|
|
|
|
return UserProfile.objects.get(id=user_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with patch("zerver.lib.cache.cache_set") as mock_set, self.assertLogs(level="WARNING") as m:
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(1):
|
2019-12-16 05:53:54 +01:00
|
|
|
result = get_user_function_with_bad_cache_keys(hamlet.id)
|
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
self.assertEqual(result, hamlet)
|
|
|
|
mock_set.assert_not_called()
|
|
|
|
|
|
|
|
def test_cache_with_key_good_key(self) -> None:
|
|
|
|
def good_cache_key_function(user_id: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"CacheWithKeyDecoratorTest:good_cache_key:{user_id}"
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
@cache_with_key(good_cache_key_function, timeout=1000)
|
|
|
|
def get_user_function_with_good_cache_keys(user_id: int) -> UserProfile:
|
|
|
|
return UserProfile.objects.get(id=user_id)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(1):
|
2019-12-16 05:53:54 +01:00
|
|
|
result = get_user_function_with_good_cache_keys(hamlet.id)
|
|
|
|
|
|
|
|
self.assertEqual(result, hamlet)
|
|
|
|
|
|
|
|
# The previous function call should have cached the result correctly, so now
|
|
|
|
# no database queries should happen:
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(0, keep_cache_warm=True):
|
2019-12-16 05:53:54 +01:00
|
|
|
result_two = get_user_function_with_good_cache_keys(hamlet.id)
|
|
|
|
|
|
|
|
self.assertEqual(result_two, hamlet)
|
|
|
|
|
2020-02-21 15:40:59 +01:00
|
|
|
def test_cache_with_key_none_values(self) -> None:
|
|
|
|
def cache_key_function(user_id: int) -> str:
|
2021-02-12 08:20:45 +01:00
|
|
|
return f"CacheWithKeyDecoratorTest:test_cache_with_key_none_values:{user_id}"
|
2020-02-21 15:40:59 +01:00
|
|
|
|
|
|
|
@cache_with_key(cache_key_function, timeout=1000)
|
2024-07-12 02:30:23 +02:00
|
|
|
def get_user_function_can_return_none(user_id: int) -> UserProfile | None:
|
2020-02-21 15:40:59 +01:00
|
|
|
try:
|
|
|
|
return UserProfile.objects.get(id=user_id)
|
|
|
|
except UserProfile.DoesNotExist:
|
|
|
|
return None
|
|
|
|
|
2021-07-24 16:56:39 +02:00
|
|
|
last_user = UserProfile.objects.last()
|
|
|
|
assert last_user is not None
|
|
|
|
last_user_id = last_user.id
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(1):
|
2020-02-21 15:40:59 +01:00
|
|
|
result = get_user_function_can_return_none(last_user_id + 1)
|
|
|
|
|
|
|
|
self.assertEqual(result, None)
|
|
|
|
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(0, keep_cache_warm=True):
|
2020-02-21 15:40:59 +01:00
|
|
|
result_two = get_user_function_can_return_none(last_user_id + 1)
|
|
|
|
|
|
|
|
self.assertEqual(result_two, None)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-12-16 05:53:54 +01:00
|
|
|
class SafeCacheFunctionsTest(ZulipTestCase):
|
|
|
|
def test_safe_cache_functions_with_all_good_keys(self) -> None:
|
2021-02-12 08:19:30 +01:00
|
|
|
items = {
|
|
|
|
"SafeFunctionsTest:key1": 1,
|
|
|
|
"SafeFunctionsTest:key2": 2,
|
|
|
|
"SafeFunctionsTest:key3": 3,
|
|
|
|
}
|
2019-12-16 05:53:54 +01:00
|
|
|
safe_cache_set_many(items)
|
|
|
|
|
|
|
|
result = safe_cache_get_many(list(items.keys()))
|
|
|
|
for key, value in result.items():
|
|
|
|
self.assertEqual(value, items[key])
|
|
|
|
|
|
|
|
def test_safe_cache_functions_with_all_bad_keys(self) -> None:
|
|
|
|
items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
|
2020-12-23 21:45:16 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2019-12-16 05:53:54 +01:00
|
|
|
safe_cache_set_many(items)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIn(
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:root:Invalid cache key used: ['SafeFunctionsTest:\\nbadkey1', 'SafeFunctionsTest:\\nbadkey2']",
|
2021-02-12 08:19:30 +01:00
|
|
|
m.output[0],
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2020-12-23 21:45:16 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2019-12-16 05:53:54 +01:00
|
|
|
result = safe_cache_get_many(list(items.keys()))
|
|
|
|
self.assertEqual(result, {})
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIn(
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:root:Invalid cache key used: ['SafeFunctionsTest:\\nbadkey1', 'SafeFunctionsTest:\\nbadkey2']",
|
2021-02-12 08:19:30 +01:00
|
|
|
m.output[0],
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
|
|
|
def test_safe_cache_functions_with_good_and_bad_keys(self) -> None:
|
|
|
|
bad_items = {"SafeFunctionsTest:\nbadkey1": 1, "SafeFunctionsTest:\nbadkey2": 2}
|
|
|
|
good_items = {"SafeFunctionsTest:goodkey1": 3, "SafeFunctionsTest:goodkey2": 4}
|
|
|
|
items = {**good_items, **bad_items}
|
|
|
|
|
2020-12-23 21:45:16 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2019-12-16 05:53:54 +01:00
|
|
|
safe_cache_set_many(items)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIn(
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:root:Invalid cache key used: ['SafeFunctionsTest:\\nbadkey1', 'SafeFunctionsTest:\\nbadkey2']",
|
2021-02-12 08:19:30 +01:00
|
|
|
m.output[0],
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2020-12-23 21:45:16 +01:00
|
|
|
with self.assertLogs(level="WARNING") as m:
|
2019-12-16 05:53:54 +01:00
|
|
|
result = safe_cache_get_many(list(items.keys()))
|
|
|
|
self.assertEqual(result, good_items)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertIn(
|
2021-02-12 08:20:45 +01:00
|
|
|
"WARNING:root:Invalid cache key used: ['SafeFunctionsTest:\\nbadkey1', 'SafeFunctionsTest:\\nbadkey2']",
|
2021-02-12 08:19:30 +01:00
|
|
|
m.output[0],
|
|
|
|
)
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(m.output, 1)
|
2019-12-16 05:53:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-07-13 00:11:58 +02:00
|
|
|
class BotCacheKeyTest(ZulipTestCase):
|
|
|
|
def test_bot_profile_key_deleted_on_save(self) -> None:
|
2021-03-04 14:31:18 +01:00
|
|
|
realm = get_realm(settings.SYSTEM_BOT_REALM)
|
2019-07-13 00:11:58 +02:00
|
|
|
# Get the profile cached on both cache keys:
|
2021-03-04 14:31:18 +01:00
|
|
|
user_profile = get_user(settings.EMAIL_GATEWAY_BOT, realm)
|
2021-03-08 11:23:34 +01:00
|
|
|
bot_profile = get_system_bot(settings.EMAIL_GATEWAY_BOT, realm.id)
|
2019-07-13 00:11:58 +02:00
|
|
|
self.assertEqual(user_profile, bot_profile)
|
|
|
|
|
|
|
|
# Flip the setting and save:
|
2020-12-20 14:21:42 +01:00
|
|
|
flipped_setting = not bot_profile.can_forge_sender
|
|
|
|
bot_profile.can_forge_sender = flipped_setting
|
2019-07-13 00:11:58 +02:00
|
|
|
bot_profile.save()
|
|
|
|
|
|
|
|
# The .save() should have deleted cache keys, so if we fetch again,
|
2020-12-20 14:21:42 +01:00
|
|
|
# the returned objects should have can_forge_sender set correctly.
|
2021-03-08 11:23:34 +01:00
|
|
|
bot_profile2 = get_system_bot(settings.EMAIL_GATEWAY_BOT, realm.id)
|
2020-12-20 14:21:42 +01:00
|
|
|
self.assertEqual(bot_profile2.can_forge_sender, flipped_setting)
|
2019-07-13 00:11:58 +02:00
|
|
|
|
2021-03-04 14:31:18 +01:00
|
|
|
user_profile2 = get_user(settings.EMAIL_GATEWAY_BOT, realm)
|
2020-12-20 14:21:42 +01:00
|
|
|
self.assertEqual(user_profile2.can_forge_sender, flipped_setting)
|
2019-08-10 23:31:14 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-03-04 14:31:18 +01:00
|
|
|
def get_user_id(user: UserProfile) -> int:
|
|
|
|
return user.id # nocoverage
|
|
|
|
|
|
|
|
|
2020-07-01 03:29:31 +02:00
|
|
|
def get_user_email(user: UserProfile) -> str:
|
|
|
|
return user.email # nocoverage
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2019-08-10 23:31:14 +02:00
|
|
|
class GenericBulkCachedFetchTest(ZulipTestCase):
|
|
|
|
def test_query_function_called_only_if_needed(self) -> None:
|
2021-03-04 14:31:18 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2019-08-10 23:31:14 +02:00
|
|
|
# Get the user cached:
|
2021-03-04 14:31:18 +01:00
|
|
|
get_user_profile_by_id(hamlet.id)
|
2019-08-10 23:31:14 +02:00
|
|
|
|
2022-11-17 09:30:48 +01:00
|
|
|
class CustomError(Exception):
|
2019-08-10 23:31:14 +02:00
|
|
|
pass
|
|
|
|
|
2024-07-12 02:30:17 +02:00
|
|
|
def query_function(ids: list[int]) -> list[UserProfile]:
|
2022-11-17 09:30:48 +01:00
|
|
|
raise CustomError("The query function was called")
|
2019-08-10 23:31:14 +02:00
|
|
|
|
|
|
|
# query_function shouldn't be called, because the only requested object
|
|
|
|
# is already cached:
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[int, UserProfile] = bulk_cached_fetch(
|
2021-03-04 14:31:18 +01:00
|
|
|
cache_key_function=user_profile_by_id_cache_key,
|
2019-08-10 23:31:14 +02:00
|
|
|
query_function=query_function,
|
2021-03-04 14:31:18 +01:00
|
|
|
object_ids=[hamlet.id],
|
|
|
|
id_fetcher=get_user_id,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2021-03-04 14:31:18 +01:00
|
|
|
self.assertEqual(result, {hamlet.id: hamlet})
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs(level="INFO") as info_log:
|
2020-07-26 01:36:50 +02:00
|
|
|
flush_cache(Mock())
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(info_log.output, ["INFO:root:Clearing memcached cache after migrations"])
|
2019-08-10 23:31:14 +02:00
|
|
|
|
|
|
|
# With the cache flushed, the query_function should get called:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(CustomError):
|
2020-07-01 03:29:31 +02:00
|
|
|
result = bulk_cached_fetch(
|
2021-03-04 14:31:18 +01:00
|
|
|
cache_key_function=user_profile_by_id_cache_key,
|
2019-08-10 23:31:14 +02:00
|
|
|
query_function=query_function,
|
2021-03-04 14:31:18 +01:00
|
|
|
object_ids=[hamlet.id],
|
|
|
|
id_fetcher=get_user_id,
|
2019-08-10 23:31:14 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_empty_object_ids_list(self) -> None:
|
2022-11-17 09:30:48 +01:00
|
|
|
class CustomError(Exception):
|
2019-08-10 23:31:14 +02:00
|
|
|
pass
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def cache_key_function(
|
|
|
|
email: str,
|
|
|
|
) -> str: # nocoverage -- this is just here to make sure it's not called
|
2022-11-17 09:30:48 +01:00
|
|
|
raise CustomError("The cache key function was called")
|
2019-08-10 23:31:14 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
def query_function(
|
2024-07-12 02:30:17 +02:00
|
|
|
emails: list[str],
|
|
|
|
) -> list[UserProfile]: # nocoverage -- this is just here to make sure it's not called
|
2022-11-17 09:30:48 +01:00
|
|
|
raise CustomError("The query function was called")
|
2019-08-10 23:31:14 +02:00
|
|
|
|
|
|
|
# query_function and cache_key_function shouldn't be called, because
|
|
|
|
# objects_ids is empty, so there's nothing to do.
|
2024-07-12 02:30:17 +02:00
|
|
|
result: dict[str, UserProfile] = bulk_cached_fetch(
|
2019-08-10 23:31:14 +02:00
|
|
|
cache_key_function=cache_key_function,
|
|
|
|
query_function=query_function,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
object_ids=[],
|
2020-07-01 03:29:31 +02:00
|
|
|
id_fetcher=get_user_email,
|
python: Convert assignment type annotations to Python 3.6 style.
This commit was split by tabbott; this piece covers the vast majority
of files in Zulip, but excludes scripts/, tools/, and puppet/ to help
ensure we at least show the right error messages for Xenial systems.
We can likely further refine the remaining pieces with some testing.
Generated by com2ann, with whitespace fixes and various manual fixes
for runtime issues:
- invoiced_through: Optional[LicenseLedger] = models.ForeignKey(
+ invoiced_through: Optional["LicenseLedger"] = models.ForeignKey(
-_apns_client: Optional[APNsClient] = None
+_apns_client: Optional["APNsClient"] = None
- notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- signup_notifications_stream: Optional[Stream] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
+ signup_notifications_stream: Optional["Stream"] = models.ForeignKey('Stream', related_name='+', null=True, blank=True, on_delete=CASCADE)
- author: Optional[UserProfile] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
+ author: Optional["UserProfile"] = models.ForeignKey('UserProfile', blank=True, null=True, on_delete=CASCADE)
- bot_owner: Optional[UserProfile] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
+ bot_owner: Optional["UserProfile"] = models.ForeignKey('self', null=True, on_delete=models.SET_NULL)
- default_sending_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
- default_events_register_stream: Optional[Stream] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_sending_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
+ default_events_register_stream: Optional["Stream"] = models.ForeignKey('zerver.Stream', null=True, related_name='+', on_delete=CASCADE)
-descriptors_by_handler_id: Dict[int, ClientDescriptor] = {}
+descriptors_by_handler_id: Dict[int, "ClientDescriptor"] = {}
-worker_classes: Dict[str, Type[QueueProcessingWorker]] = {}
-queues: Dict[str, Dict[str, Type[QueueProcessingWorker]]] = {}
+worker_classes: Dict[str, Type["QueueProcessingWorker"]] = {}
+queues: Dict[str, Dict[str, Type["QueueProcessingWorker"]]] = {}
-AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional[LDAPSearch] = None
+AUTH_LDAP_REVERSE_EMAIL_SEARCH: Optional["LDAPSearch"] = None
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-22 01:09:50 +02:00
|
|
|
)
|
2019-08-10 23:31:14 +02:00
|
|
|
self.assertEqual(result, {})
|