2020-06-11 00:54:34 +02:00
|
|
|
import datetime
|
2020-06-05 23:26:35 +02:00
|
|
|
from email.headerregistry import Address
|
2023-04-05 13:36:01 +02:00
|
|
|
from typing import Any, Dict, Iterable, List, Optional, TypeVar, Union
|
2020-06-11 00:54:34 +02:00
|
|
|
from unittest import mock
|
2019-06-29 04:41:13 +02:00
|
|
|
|
2020-08-07 01:09:47 +02:00
|
|
|
import orjson
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.conf import settings
|
2020-10-26 11:53:03 +01:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
CVE-2022-24751: Clear sessions outside of the transaction.
Clearing the sessions inside the transaction makes Zulip vulnerable to
a narrow window where the deleted session has not yet been committed,
but has been removed from the memcached cache. During this window, a
request with the session-id which has just been deleted can
successfully re-fill the memcached cache, as the in-database delete is
not yet committed, and thus not yet visible. After the delete
transaction commits, the cache will be left with a cached session,
which allows further site access until it expires (after
SESSION_COOKIE_AGE seconds), is ejected from the cache due to memory
pressure, or the server is upgraded.
Move the session deletion outside of the transaction.
Because the testsuite runs inside of a transaction, it is impossible
to test this is CI; the testsuite uses the non-caching
`django.contrib.sessions.backends.db` backend, regardless. The test
added in this commit thus does not fail before this commit; it is
merely a base expression that the session should be deleted somehow,
and does not exercise the assert added in the previous commit.
2022-03-09 03:03:42 +01:00
|
|
|
from django.contrib.sessions.models import Session
|
2020-06-21 02:36:20 +02:00
|
|
|
from django.core.exceptions import ValidationError
|
2020-06-11 00:54:34 +02:00
|
|
|
from django.test import override_settings
|
2020-10-24 17:15:19 +02:00
|
|
|
from django.utils.timezone import now as timezone_now
|
2014-02-07 22:47:30 +01:00
|
|
|
|
2022-01-12 21:35:19 +01:00
|
|
|
from confirmation.models import Confirmation
|
2022-04-14 23:53:15 +02:00
|
|
|
from zerver.actions.create_user import do_create_user, do_reactivate_user
|
2022-04-14 23:36:07 +02:00
|
|
|
from zerver.actions.invites import do_create_multiuse_invite_link, do_invite_users
|
2023-02-10 21:42:51 +01:00
|
|
|
from zerver.actions.message_send import RecipientInfoResult, get_recipient_info
|
2022-04-14 23:55:22 +02:00
|
|
|
from zerver.actions.muted_users import do_mute_user
|
2022-04-14 23:57:15 +02:00
|
|
|
from zerver.actions.realm_settings import do_set_realm_property
|
2021-10-26 09:15:16 +02:00
|
|
|
from zerver.actions.user_settings import bulk_regenerate_api_keys, do_change_user_setting
|
2023-02-03 12:57:43 +01:00
|
|
|
from zerver.actions.user_topics import do_set_user_topic_visibility_policy
|
2022-04-14 23:48:28 +02:00
|
|
|
from zerver.actions.users import (
|
2021-02-14 00:03:40 +01:00
|
|
|
change_user_is_active,
|
2020-12-14 22:02:22 +01:00
|
|
|
do_change_can_create_users,
|
2020-05-21 00:13:06 +02:00
|
|
|
do_change_user_role,
|
2020-06-11 00:54:34 +02:00
|
|
|
do_deactivate_user,
|
2020-05-02 18:42:30 +02:00
|
|
|
do_delete_user,
|
2022-05-29 22:51:23 +02:00
|
|
|
do_delete_user_preserving_messages,
|
2022-04-14 23:48:28 +02:00
|
|
|
)
|
2023-07-19 12:10:10 +02:00
|
|
|
from zerver.lib.avatar import avatar_url, get_avatar_field, get_gravatar_url
|
2022-04-14 23:46:18 +02:00
|
|
|
from zerver.lib.bulk_create import create_users
|
2021-06-29 18:08:42 +02:00
|
|
|
from zerver.lib.create_user import copy_default_settings
|
2019-02-05 07:12:37 +01:00
|
|
|
from zerver.lib.events import do_events_register
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.exceptions import JsonableError
|
2021-05-11 03:32:31 +02:00
|
|
|
from zerver.lib.send_email import (
|
|
|
|
clear_scheduled_emails,
|
|
|
|
deliver_scheduled_emails,
|
|
|
|
send_future_email,
|
|
|
|
)
|
2017-10-24 00:07:03 +02:00
|
|
|
from zerver.lib.stream_topic import StreamTopicTarget
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.lib.test_classes import ZulipTestCase
|
|
|
|
from zerver.lib.test_helpers import (
|
2020-10-17 19:42:27 +02:00
|
|
|
cache_tries_captured,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_subscription,
|
2020-06-26 19:35:42 +02:00
|
|
|
get_test_image_file,
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm,
|
2020-06-11 00:54:34 +02:00
|
|
|
simulated_empty_cache,
|
|
|
|
)
|
2020-06-26 19:35:42 +02:00
|
|
|
from zerver.lib.upload import upload_avatar_image
|
2021-08-12 12:15:06 +02:00
|
|
|
from zerver.lib.user_groups import get_system_user_group_for_user
|
2023-07-19 12:10:10 +02:00
|
|
|
from zerver.lib.users import (
|
2023-07-31 23:15:51 +02:00
|
|
|
Account,
|
2023-07-19 12:10:10 +02:00
|
|
|
access_user_by_id,
|
|
|
|
get_accounts_for_email,
|
|
|
|
get_cross_realm_dicts,
|
|
|
|
user_ids_to_users,
|
|
|
|
)
|
2021-08-18 17:54:22 +02:00
|
|
|
from zerver.lib.utils import assert_is_not_none
|
2020-06-11 00:54:34 +02:00
|
|
|
from zerver.models import (
|
|
|
|
CustomProfileField,
|
2022-11-17 09:30:48 +01:00
|
|
|
InvalidFakeEmailDomainError,
|
2020-10-24 17:15:19 +02:00
|
|
|
Message,
|
2020-10-17 19:42:27 +02:00
|
|
|
PreregistrationUser,
|
2023-07-12 19:13:17 +02:00
|
|
|
RealmAuditLog,
|
2020-06-11 00:54:34 +02:00
|
|
|
RealmDomain,
|
2021-06-29 18:08:42 +02:00
|
|
|
RealmUserDefault,
|
2020-06-11 00:54:34 +02:00
|
|
|
Recipient,
|
|
|
|
ScheduledEmail,
|
2020-10-26 13:16:10 +01:00
|
|
|
Stream,
|
2020-05-02 18:42:30 +02:00
|
|
|
Subscription,
|
2023-07-12 19:13:17 +02:00
|
|
|
UserGroup,
|
2021-08-12 12:15:06 +02:00
|
|
|
UserGroupMembership,
|
2020-06-11 00:54:34 +02:00
|
|
|
UserHotspot,
|
|
|
|
UserProfile,
|
2023-02-03 12:57:43 +01:00
|
|
|
UserTopic,
|
2020-06-11 00:54:34 +02:00
|
|
|
check_valid_user_ids,
|
2022-01-12 21:35:19 +01:00
|
|
|
filter_to_valid_prereg_users,
|
2020-06-11 00:54:34 +02:00
|
|
|
get_client,
|
|
|
|
get_fake_email_domain,
|
|
|
|
get_realm,
|
|
|
|
get_source_profile,
|
|
|
|
get_stream,
|
|
|
|
get_system_bot,
|
|
|
|
get_user,
|
|
|
|
get_user_by_delivery_email,
|
|
|
|
get_user_by_id_in_realm_including_cross_realm,
|
|
|
|
)
|
2013-03-14 22:12:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
K = TypeVar("K")
|
|
|
|
V = TypeVar("V")
|
2021-02-12 08:19:30 +01:00
|
|
|
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def find_dict(lst: Iterable[Dict[K, V]], k: K, v: V) -> Dict[K, V]:
|
2014-01-14 20:38:45 +01:00
|
|
|
for dct in lst:
|
|
|
|
if dct[k] == v:
|
|
|
|
return dct
|
2021-02-12 08:20:45 +01:00
|
|
|
raise AssertionError(f"Cannot find element in list where key {k} == {v}")
|
2014-01-14 20:38:45 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class PermissionTest(ZulipTestCase):
|
2019-12-08 20:08:25 +01:00
|
|
|
def test_role_setters(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2019-12-08 20:08:25 +01:00
|
|
|
|
|
|
|
user_profile.is_realm_admin = True
|
|
|
|
self.assertEqual(user_profile.is_realm_admin, True)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
user_profile.is_guest = False
|
|
|
|
self.assertEqual(user_profile.is_guest, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
2021-07-27 20:59:51 +02:00
|
|
|
user_profile.is_realm_owner = False
|
|
|
|
self.assertEqual(user_profile.is_realm_owner, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
user_profile.is_moderator = False
|
|
|
|
self.assertEqual(user_profile.is_moderator, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
2019-12-08 20:08:25 +01:00
|
|
|
user_profile.is_realm_admin = False
|
|
|
|
self.assertEqual(user_profile.is_realm_admin, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
user_profile.is_guest = True
|
|
|
|
self.assertEqual(user_profile.is_guest, True)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_GUEST)
|
|
|
|
|
|
|
|
user_profile.is_realm_admin = False
|
|
|
|
self.assertEqual(user_profile.is_guest, True)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_GUEST)
|
|
|
|
|
|
|
|
user_profile.is_guest = False
|
|
|
|
self.assertEqual(user_profile.is_guest, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
2021-07-27 20:59:51 +02:00
|
|
|
user_profile.is_realm_owner = True
|
|
|
|
self.assertEqual(user_profile.is_realm_owner, True)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_REALM_OWNER)
|
|
|
|
|
|
|
|
user_profile.is_realm_owner = False
|
|
|
|
self.assertEqual(user_profile.is_realm_owner, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
|
|
|
user_profile.is_moderator = True
|
|
|
|
self.assertEqual(user_profile.is_moderator, True)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
user_profile.is_moderator = False
|
|
|
|
self.assertEqual(user_profile.is_moderator, False)
|
|
|
|
self.assertEqual(user_profile.role, UserProfile.ROLE_MEMBER)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_admin_users(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user_profile = self.example_user("hamlet")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(user_profile, UserProfile.ROLE_MEMBER, acting_user=None)
|
2020-05-16 20:10:42 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_owner)
|
2019-06-20 23:26:54 +02:00
|
|
|
admin_users = user_profile.realm.get_human_admin_users()
|
|
|
|
self.assertFalse(user_profile in admin_users)
|
2019-06-20 23:36:15 +02:00
|
|
|
admin_users = user_profile.realm.get_admin_users_and_bots()
|
2013-11-02 15:36:17 +01:00
|
|
|
self.assertFalse(user_profile in admin_users)
|
2019-06-20 23:26:54 +02:00
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2020-05-16 20:10:42 +02:00
|
|
|
self.assertFalse(user_profile.is_realm_owner)
|
|
|
|
admin_users = user_profile.realm.get_human_admin_users()
|
|
|
|
self.assertTrue(user_profile in admin_users)
|
|
|
|
admin_users = user_profile.realm.get_admin_users_and_bots()
|
|
|
|
self.assertTrue(user_profile in admin_users)
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(user_profile, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2020-05-16 20:10:42 +02:00
|
|
|
self.assertTrue(user_profile.is_realm_owner)
|
2019-06-20 23:26:54 +02:00
|
|
|
admin_users = user_profile.realm.get_human_admin_users()
|
|
|
|
self.assertTrue(user_profile in admin_users)
|
2021-03-19 20:09:39 +01:00
|
|
|
admin_users = user_profile.realm.get_human_admin_users(include_realm_owners=False)
|
|
|
|
self.assertFalse(user_profile in admin_users)
|
2019-06-20 23:36:15 +02:00
|
|
|
admin_users = user_profile.realm.get_admin_users_and_bots()
|
2013-11-02 15:36:17 +01:00
|
|
|
self.assertTrue(user_profile in admin_users)
|
2021-03-19 20:06:41 +01:00
|
|
|
admin_users = user_profile.realm.get_admin_users_and_bots(include_realm_owners=False)
|
|
|
|
self.assertFalse(user_profile in admin_users)
|
2013-09-30 22:52:04 +02:00
|
|
|
|
2021-04-16 20:09:08 +02:00
|
|
|
def test_get_first_human_user(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
UserProfile.objects.filter(realm=realm).delete()
|
|
|
|
|
|
|
|
UserProfile.objects.create(
|
|
|
|
realm=realm, email="bot1@zulip.com", delivery_email="bot1@zulip.com", is_bot=True
|
|
|
|
)
|
|
|
|
first_human_user = UserProfile.objects.create(
|
|
|
|
realm=realm, email="user1@zulip.com", delivery_email="user1@zulip.com", is_bot=False
|
|
|
|
)
|
|
|
|
UserProfile.objects.create(
|
|
|
|
realm=realm, email="user2@zulip.com", delivery_email="user2@zulip.com", is_bot=False
|
|
|
|
)
|
|
|
|
UserProfile.objects.create(
|
|
|
|
realm=realm, email="bot2@zulip.com", delivery_email="bot2@zulip.com", is_bot=True
|
|
|
|
)
|
|
|
|
self.assertEqual(first_human_user, realm.get_first_human_user())
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_updating_non_existent_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
admin = self.example_user("hamlet")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2016-07-13 05:05:55 +02:00
|
|
|
|
2018-05-17 19:36:33 +02:00
|
|
|
invalid_user_id = 1000
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{invalid_user_id}", {})
|
|
|
|
self.assert_json_error(result, "No such user")
|
2016-07-13 05:05:55 +02:00
|
|
|
|
2020-05-16 21:06:43 +02:00
|
|
|
def test_owner_api(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
iago = self.example_user("iago")
|
2020-05-16 21:06:43 +02:00
|
|
|
realm = iago.realm
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/users")
|
2022-06-07 01:37:01 +02:00
|
|
|
members = self.assert_json_success(result)["members"]
|
2021-02-12 08:20:45 +01:00
|
|
|
iago_dict = find_dict(members, "email", iago.email)
|
|
|
|
self.assertTrue(iago_dict["is_owner"])
|
|
|
|
othello_dict = find_dict(members, "email", othello.email)
|
|
|
|
self.assertFalse(othello_dict["is_owner"])
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
req = dict(role=UserProfile.ROLE_REALM_OWNER)
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=4) as events:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{othello.id}", req)
|
2020-05-16 21:06:43 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
owner_users = realm.get_human_owner_users()
|
|
|
|
self.assertTrue(othello in owner_users)
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
|
|
|
self.assertEqual(person["user_id"], othello.id)
|
|
|
|
self.assertEqual(person["role"], UserProfile.ROLE_REALM_OWNER)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
req = dict(role=UserProfile.ROLE_MEMBER)
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=4) as events:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{othello.id}", req)
|
2020-05-16 21:06:43 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
owner_users = realm.get_human_owner_users()
|
|
|
|
self.assertFalse(othello in owner_users)
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
|
|
|
self.assertEqual(person["user_id"], othello.id)
|
|
|
|
self.assertEqual(person["role"], UserProfile.ROLE_MEMBER)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
# Cannot take away from last owner
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
2020-05-16 21:06:43 +02:00
|
|
|
req = dict(role=UserProfile.ROLE_MEMBER)
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=4) as events:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{iago.id}", req)
|
2020-05-16 21:06:43 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
owner_users = realm.get_human_owner_users()
|
|
|
|
self.assertFalse(iago in owner_users)
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
|
|
|
self.assertEqual(person["user_id"], iago.id)
|
|
|
|
self.assertEqual(person["role"], UserProfile.ROLE_MEMBER)
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{desdemona.id}", req)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
2021-02-12 08:20:45 +01:00
|
|
|
result, "The owner permission cannot be removed from the only organization owner."
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=0):
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{desdemona.id}", req)
|
|
|
|
self.assert_json_error(result, "Must be an organization owner")
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_admin_api(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
desdemona = self.example_user("desdemona")
|
2020-03-12 14:17:25 +01:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
2014-01-14 20:38:45 +01:00
|
|
|
# Make sure we see is_admin flag in /json/users
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/users")
|
2022-06-07 01:37:01 +02:00
|
|
|
members = self.assert_json_success(result)["members"]
|
2021-02-12 08:20:45 +01:00
|
|
|
desdemona_dict = find_dict(members, "email", desdemona.email)
|
|
|
|
self.assertTrue(desdemona_dict["is_admin"])
|
|
|
|
othello_dict = find_dict(members, "email", othello.email)
|
|
|
|
self.assertFalse(othello_dict["is_admin"])
|
2014-01-14 16:19:26 +01:00
|
|
|
|
|
|
|
# Giveth
|
2020-08-07 01:09:47 +02:00
|
|
|
req = dict(role=orjson.dumps(UserProfile.ROLE_REALM_ADMINISTRATOR).decode())
|
2014-01-21 19:27:22 +01:00
|
|
|
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=4) as events:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{othello.id}", req)
|
2014-01-14 16:19:26 +01:00
|
|
|
self.assert_json_success(result)
|
2019-06-20 23:26:54 +02:00
|
|
|
admin_users = realm.get_human_admin_users()
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertTrue(othello in admin_users)
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
|
|
|
self.assertEqual(person["user_id"], othello.id)
|
|
|
|
self.assertEqual(person["role"], UserProfile.ROLE_REALM_ADMINISTRATOR)
|
2014-01-14 16:19:26 +01:00
|
|
|
|
|
|
|
# Taketh away
|
2020-08-07 01:09:47 +02:00
|
|
|
req = dict(role=orjson.dumps(UserProfile.ROLE_MEMBER).decode())
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=4) as events:
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{othello.id}", req)
|
2014-01-14 16:19:26 +01:00
|
|
|
self.assert_json_success(result)
|
2019-06-20 23:26:54 +02:00
|
|
|
admin_users = realm.get_human_admin_users()
|
2020-03-12 14:17:25 +01:00
|
|
|
self.assertFalse(othello in admin_users)
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
|
|
|
self.assertEqual(person["user_id"], othello.id)
|
|
|
|
self.assertEqual(person["role"], UserProfile.ROLE_MEMBER)
|
2014-01-14 16:19:26 +01:00
|
|
|
|
|
|
|
# Make sure only admins can patch other user's info.
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
|
|
|
result = self.client_patch(f"/json/users/{hamlet.id}", req)
|
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2014-01-14 16:19:26 +01:00
|
|
|
|
2019-02-05 07:12:37 +01:00
|
|
|
def test_admin_api_hide_emails(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
admin = self.example_user("iago")
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(user)
|
2019-02-05 07:12:37 +01:00
|
|
|
|
|
|
|
# First, verify client_gravatar works normally
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/json/users", {"client_gravatar": "true"})
|
2022-06-07 01:37:01 +02:00
|
|
|
members = self.assert_json_success(result)["members"]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = find_dict(members, "user_id", user.id)
|
|
|
|
self.assertEqual(hamlet["email"], user.email)
|
|
|
|
self.assertIsNone(hamlet["avatar_url"])
|
2021-12-11 08:17:57 +01:00
|
|
|
self.assertEqual(hamlet["delivery_email"], user.delivery_email)
|
2019-02-05 07:12:37 +01:00
|
|
|
|
|
|
|
# Also verify the /events code path. This is a bit hacky, but
|
|
|
|
# we need to verify client_gravatar is not being overridden.
|
2021-02-12 08:19:30 +01:00
|
|
|
with mock.patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"zerver.lib.events.request_event_queue", return_value=None
|
2021-02-12 08:19:30 +01:00
|
|
|
) as mock_request_event_queue:
|
2019-02-05 07:12:37 +01:00
|
|
|
with self.assertRaises(JsonableError):
|
2022-05-03 23:54:44 +02:00
|
|
|
do_events_register(user, user.realm, get_client("website"), client_gravatar=True)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(mock_request_event_queue.call_args_list[0][0][3], True)
|
2019-02-05 07:12:37 +01:00
|
|
|
|
|
|
|
#############################################################
|
|
|
|
# Now, switch email address visibility, check client_gravatar
|
|
|
|
# is automatically disabled for the user.
|
2021-12-12 13:47:02 +01:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
user,
|
2021-12-12 13:47:02 +01:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-12-12 13:47:02 +01:00
|
|
|
acting_user=None,
|
|
|
|
)
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/json/users", {"client_gravatar": "true"})
|
2022-06-07 01:37:01 +02:00
|
|
|
members = self.assert_json_success(result)["members"]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = find_dict(members, "user_id", user.id)
|
|
|
|
self.assertEqual(hamlet["email"], f"user{user.id}@zulip.testserver")
|
|
|
|
self.assertEqual(hamlet["avatar_url"], get_gravatar_url(user.delivery_email, 1))
|
2019-02-05 07:12:37 +01:00
|
|
|
|
2018-08-08 23:21:14 +02:00
|
|
|
# client_gravatar is still turned off for admins. In theory,
|
|
|
|
# it doesn't need to be, but client-side changes would be
|
|
|
|
# required in apps like the mobile apps.
|
|
|
|
# delivery_email is sent for admins.
|
2019-02-05 07:12:37 +01:00
|
|
|
admin.refresh_from_db()
|
2021-10-21 18:59:39 +02:00
|
|
|
user.refresh_from_db()
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(admin)
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/json/users", {"client_gravatar": "true"})
|
2022-06-07 01:37:01 +02:00
|
|
|
members = self.assert_json_success(result)["members"]
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = find_dict(members, "user_id", user.id)
|
|
|
|
self.assertEqual(hamlet["email"], f"user{user.id}@zulip.testserver")
|
2021-10-21 18:59:39 +02:00
|
|
|
self.assertEqual(hamlet["avatar_url"], get_gravatar_url(user.delivery_email, 1))
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(hamlet["delivery_email"], self.example_email("hamlet"))
|
2019-02-05 07:12:37 +01:00
|
|
|
|
2018-04-01 01:06:56 +02:00
|
|
|
def test_user_cannot_promote_to_admin(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2020-08-07 01:09:47 +02:00
|
|
|
req = dict(role=orjson.dumps(UserProfile.ROLE_REALM_ADMINISTRATOR).decode())
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2018-04-01 01:06:56 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_admin_user_can_change_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "new name"
|
|
|
|
self.login("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch(f"/json/users/{hamlet.id}", req)
|
2018-06-21 14:56:18 +02:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2016-09-27 14:25:52 +02:00
|
|
|
self.assertEqual(hamlet.full_name, new_name)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_non_admin_cannot_change_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name="new name")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("othello").id), req)
|
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2016-09-27 14:25:52 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_admin_cannot_set_long_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "a" * (UserProfile.MAX_NAME_LENGTH + 1)
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Name too long!")
|
2016-09-27 14:25:52 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_admin_cannot_set_short_full_name(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "a"
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Name too short!")
|
2017-05-12 04:21:49 +02:00
|
|
|
|
2020-04-25 19:18:13 +02:00
|
|
|
def test_not_allowed_format(self) -> None:
|
2020-08-11 01:47:49 +02:00
|
|
|
# Name of format "Alice|999" breaks in Markdown
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "iago|72"
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Invalid format!")
|
2020-04-25 19:18:13 +02:00
|
|
|
|
|
|
|
def test_allowed_format_complex(self) -> None:
|
2020-08-11 01:47:49 +02:00
|
|
|
# Adding characters after r'|d+' doesn't break Markdown
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "Hello- 12iago|72k"
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
2020-04-25 19:18:13 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
def test_not_allowed_format_complex(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "Hello- 12iago|72"
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Invalid format!")
|
2020-04-25 19:18:13 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_admin_cannot_set_full_name_with_invalid_characters(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
new_name = "Opheli*"
|
|
|
|
self.login("iago")
|
2021-10-16 19:52:57 +02:00
|
|
|
req = dict(full_name=new_name)
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_patch("/json/users/{}".format(self.example_user("hamlet").id), req)
|
|
|
|
self.assert_json_error(result, "Invalid characters in name!")
|
2017-01-16 04:18:42 +01:00
|
|
|
|
2018-06-04 07:04:19 +02:00
|
|
|
def test_access_user_by_id(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
|
|
|
|
# Must be a valid user ID in the realm
|
|
|
|
with self.assertRaises(JsonableError):
|
2021-01-28 18:04:43 +01:00
|
|
|
access_user_by_id(iago, 1234, for_admin=False)
|
2018-06-04 07:04:19 +02:00
|
|
|
with self.assertRaises(JsonableError):
|
2021-01-28 18:04:43 +01:00
|
|
|
access_user_by_id(iago, self.mit_user("sipbtest").id, for_admin=False)
|
2018-06-04 07:04:19 +02:00
|
|
|
|
2021-01-28 18:04:43 +01:00
|
|
|
# Can only access bot users if allow_bots is passed
|
2019-07-15 20:58:41 +02:00
|
|
|
bot = self.example_user("default_bot")
|
2021-01-28 18:04:43 +01:00
|
|
|
access_user_by_id(iago, bot.id, allow_bots=True, for_admin=True)
|
2018-06-04 07:04:19 +02:00
|
|
|
with self.assertRaises(JsonableError):
|
2021-01-28 18:04:43 +01:00
|
|
|
access_user_by_id(iago, bot.id, for_admin=True)
|
2018-06-04 07:04:19 +02:00
|
|
|
|
|
|
|
# Can only access deactivated users if allow_deactivated is passed
|
|
|
|
hamlet = self.example_user("hamlet")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(hamlet, acting_user=None)
|
2018-06-04 07:04:19 +02:00
|
|
|
with self.assertRaises(JsonableError):
|
2021-01-28 18:04:43 +01:00
|
|
|
access_user_by_id(iago, hamlet.id, for_admin=False)
|
|
|
|
with self.assertRaises(JsonableError):
|
|
|
|
access_user_by_id(iago, hamlet.id, for_admin=True)
|
|
|
|
access_user_by_id(iago, hamlet.id, allow_deactivated=True, for_admin=True)
|
2018-06-04 07:04:19 +02:00
|
|
|
|
|
|
|
# Non-admin user can't admin another user
|
|
|
|
with self.assertRaises(JsonableError):
|
2021-02-12 08:19:30 +01:00
|
|
|
access_user_by_id(
|
|
|
|
self.example_user("cordelia"), self.example_user("aaron").id, for_admin=True
|
|
|
|
)
|
2020-02-07 02:46:59 +01:00
|
|
|
# But does have read-only access to it.
|
2021-02-12 08:19:30 +01:00
|
|
|
access_user_by_id(
|
|
|
|
self.example_user("cordelia"), self.example_user("aaron").id, for_admin=False
|
|
|
|
)
|
2018-06-04 07:04:19 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
def check_property_for_role(self, user_profile: UserProfile, role: int) -> bool:
|
|
|
|
if role == UserProfile.ROLE_REALM_ADMINISTRATOR:
|
|
|
|
return (
|
|
|
|
user_profile.is_realm_admin
|
|
|
|
and not user_profile.is_guest
|
|
|
|
and not user_profile.is_realm_owner
|
2021-04-19 18:41:20 +02:00
|
|
|
and not user_profile.is_moderator
|
2021-04-11 20:43:53 +02:00
|
|
|
)
|
|
|
|
elif role == UserProfile.ROLE_REALM_OWNER:
|
|
|
|
return (
|
|
|
|
user_profile.is_realm_owner
|
|
|
|
and user_profile.is_realm_admin
|
2021-04-19 18:41:20 +02:00
|
|
|
and not user_profile.is_moderator
|
|
|
|
and not user_profile.is_guest
|
|
|
|
)
|
|
|
|
elif role == UserProfile.ROLE_MODERATOR:
|
|
|
|
return (
|
|
|
|
user_profile.is_moderator
|
|
|
|
and not user_profile.is_realm_owner
|
|
|
|
and not user_profile.is_realm_admin
|
2021-04-11 20:43:53 +02:00
|
|
|
and not user_profile.is_guest
|
|
|
|
)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
if role == UserProfile.ROLE_MEMBER:
|
|
|
|
return (
|
|
|
|
not user_profile.is_guest
|
2021-04-19 18:41:20 +02:00
|
|
|
and not user_profile.is_moderator
|
2021-04-11 20:43:53 +02:00
|
|
|
and not user_profile.is_realm_admin
|
|
|
|
and not user_profile.is_realm_owner
|
|
|
|
)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
assert role == UserProfile.ROLE_GUEST
|
|
|
|
return (
|
|
|
|
user_profile.is_guest
|
2021-04-19 18:41:20 +02:00
|
|
|
and not user_profile.is_moderator
|
2021-04-11 20:43:53 +02:00
|
|
|
and not user_profile.is_realm_admin
|
|
|
|
and not user_profile.is_realm_owner
|
|
|
|
)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
def check_user_role_change(
|
|
|
|
self,
|
|
|
|
user_email: str,
|
|
|
|
new_role: int,
|
|
|
|
) -> None:
|
|
|
|
self.login("desdemona")
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
user_profile = self.example_user(user_email)
|
|
|
|
old_role = user_profile.role
|
2021-08-12 12:15:06 +02:00
|
|
|
old_system_group = get_system_user_group_for_user(user_profile)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
self.assertTrue(self.check_property_for_role(user_profile, old_role))
|
2021-08-12 12:15:06 +02:00
|
|
|
self.assertTrue(
|
|
|
|
UserGroupMembership.objects.filter(
|
|
|
|
user_profile=user_profile, user_group=old_system_group
|
|
|
|
).exists()
|
|
|
|
)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
req = dict(role=orjson.dumps(new_role).decode())
|
2021-08-12 12:15:06 +02:00
|
|
|
num_events = 3
|
|
|
|
if UserProfile.ROLE_MEMBER in [old_role, new_role]:
|
|
|
|
num_events = 4
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=num_events) as events:
|
2021-04-11 20:43:53 +02:00
|
|
|
result = self.client_patch(f"/json/users/{user_profile.id}", req)
|
2018-10-19 12:29:46 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
user_profile = self.example_user(user_email)
|
|
|
|
self.assertTrue(self.check_property_for_role(user_profile, new_role))
|
2021-08-12 12:15:06 +02:00
|
|
|
system_group = get_system_user_group_for_user(user_profile)
|
|
|
|
self.assertTrue(
|
|
|
|
UserGroupMembership.objects.filter(
|
|
|
|
user_profile=user_profile, user_group=system_group
|
|
|
|
).exists()
|
|
|
|
)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
person = events[0]["event"]["person"]
|
2021-04-11 20:43:53 +02:00
|
|
|
self.assertEqual(person["user_id"], user_profile.id)
|
|
|
|
self.assertTrue(person["role"], new_role)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
def test_change_regular_member_to_guest(self) -> None:
|
|
|
|
self.check_user_role_change("hamlet", UserProfile.ROLE_GUEST)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
def test_change_guest_to_regular_member(self) -> None:
|
|
|
|
self.check_user_role_change("polonius", UserProfile.ROLE_MEMBER)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2021-04-11 20:43:53 +02:00
|
|
|
def test_change_admin_to_guest(self) -> None:
|
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_GUEST)
|
|
|
|
|
|
|
|
def test_change_guest_to_admin(self) -> None:
|
|
|
|
self.check_user_role_change("polonius", UserProfile.ROLE_REALM_ADMINISTRATOR)
|
2018-10-19 12:29:46 +02:00
|
|
|
|
2020-05-16 21:06:43 +02:00
|
|
|
def test_change_owner_to_guest(self) -> None:
|
|
|
|
self.login("desdemona")
|
|
|
|
iago = self.example_user("iago")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2021-04-11 20:43:53 +02:00
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_GUEST)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
def test_change_guest_to_owner(self) -> None:
|
2021-04-11 20:43:53 +02:00
|
|
|
self.check_user_role_change("polonius", UserProfile.ROLE_REALM_OWNER)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
def test_change_admin_to_owner(self) -> None:
|
2021-04-11 20:43:53 +02:00
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_REALM_OWNER)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
|
|
|
def test_change_owner_to_admin(self) -> None:
|
2021-04-11 20:43:53 +02:00
|
|
|
self.login("desdemona")
|
2020-05-16 21:06:43 +02:00
|
|
|
iago = self.example_user("iago")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2021-04-11 20:43:53 +02:00
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_REALM_ADMINISTRATOR)
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2021-04-19 18:41:20 +02:00
|
|
|
def test_change_owner_to_moderator(self) -> None:
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
def test_change_moderator_to_owner(self) -> None:
|
|
|
|
self.check_user_role_change("shiva", UserProfile.ROLE_REALM_OWNER)
|
|
|
|
|
|
|
|
def test_change_admin_to_moderator(self) -> None:
|
|
|
|
self.check_user_role_change("iago", UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
def test_change_moderator_to_admin(self) -> None:
|
|
|
|
self.check_user_role_change("shiva", UserProfile.ROLE_REALM_ADMINISTRATOR)
|
|
|
|
|
|
|
|
def test_change_guest_to_moderator(self) -> None:
|
|
|
|
self.check_user_role_change("polonius", UserProfile.ROLE_MODERATOR)
|
|
|
|
|
|
|
|
def test_change_moderator_to_guest(self) -> None:
|
|
|
|
self.check_user_role_change("shiva", UserProfile.ROLE_GUEST)
|
|
|
|
|
2018-09-04 20:46:11 +02:00
|
|
|
def test_admin_user_can_change_profile_data(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.login("iago")
|
2018-09-04 20:46:11 +02:00
|
|
|
new_profile_data = []
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
|
|
|
|
# Test for all type of data
|
|
|
|
fields = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"Phone number": "short text data",
|
|
|
|
"Biography": "long text data",
|
|
|
|
"Favorite food": "short text data",
|
2022-06-30 19:19:21 +02:00
|
|
|
"Favorite editor": "0",
|
2021-02-12 08:20:45 +01:00
|
|
|
"Birthday": "1909-03-05",
|
|
|
|
"Favorite website": "https://zulip.com",
|
|
|
|
"Mentor": [cordelia.id],
|
2022-09-12 20:05:05 +02:00
|
|
|
"GitHub username": "timabbott",
|
2022-10-01 12:16:11 +02:00
|
|
|
"Pronouns": "she/her",
|
2018-09-04 20:46:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
for field_name in fields:
|
|
|
|
field = CustomProfileField.objects.get(name=field_name, realm=realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_profile_data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field.id,
|
|
|
|
"value": fields[field_name],
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
2018-09-04 20:46:11 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}", {"profile_data": orjson.dumps(new_profile_data).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-09-04 20:46:11 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
|
|
|
cordelia = self.example_user("cordelia")
|
2021-09-18 15:02:13 +02:00
|
|
|
for field_dict in cordelia.profile_data():
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.subTest(field_name=field_dict["name"]):
|
|
|
|
self.assertEqual(field_dict["value"], fields[field_dict["name"]])
|
2018-09-04 20:46:11 +02:00
|
|
|
|
|
|
|
# Test admin user cannot set invalid profile data
|
|
|
|
invalid_fields = [
|
2021-02-12 08:19:30 +01:00
|
|
|
(
|
2021-02-12 08:20:45 +01:00
|
|
|
"Favorite editor",
|
|
|
|
"invalid choice",
|
2021-02-12 08:19:30 +01:00
|
|
|
"'invalid choice' is not a valid choice for 'Favorite editor'.",
|
|
|
|
),
|
2021-02-12 08:20:45 +01:00
|
|
|
("Birthday", "1909-34-55", "Birthday is not a date"),
|
|
|
|
("Favorite website", "not url", "Favorite website is not a URL"),
|
|
|
|
("Mentor", "not list of user ids", "User IDs is not a list"),
|
2018-09-04 20:46:11 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
for field_name, field_value, error_msg in invalid_fields:
|
|
|
|
new_profile_data = []
|
|
|
|
field = CustomProfileField.objects.get(name=field_name, realm=realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_profile_data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field.id,
|
|
|
|
"value": field_value,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
2018-09-04 20:46:11 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}",
|
|
|
|
{"profile_data": orjson.dumps(new_profile_data).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2018-09-04 20:46:11 +02:00
|
|
|
self.assert_json_error(result, error_msg)
|
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# non-existent field and no data
|
2021-02-12 08:19:30 +01:00
|
|
|
invalid_profile_data = [
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": 9001,
|
|
|
|
"value": "",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}",
|
|
|
|
{"profile_data": orjson.dumps(invalid_profile_data).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Field id 9001 not found.")
|
2019-01-15 12:21:14 +01:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# non-existent field and data
|
2021-02-12 08:19:30 +01:00
|
|
|
invalid_profile_data = [
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": 9001,
|
|
|
|
"value": "some data",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
]
|
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}",
|
|
|
|
{"profile_data": orjson.dumps(invalid_profile_data).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Field id 9001 not found.")
|
2019-01-15 12:21:14 +01:00
|
|
|
|
|
|
|
# Test for clearing/resetting field values.
|
|
|
|
empty_profile_data = []
|
|
|
|
for field_name in fields:
|
|
|
|
field = CustomProfileField.objects.get(name=field_name, realm=realm)
|
2021-02-12 08:20:45 +01:00
|
|
|
value: Union[str, None, List[Any]] = ""
|
2019-01-15 12:21:14 +01:00
|
|
|
if field.field_type == CustomProfileField.USER:
|
|
|
|
value = []
|
2021-02-12 08:19:30 +01:00
|
|
|
empty_profile_data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field.id,
|
|
|
|
"value": value,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}",
|
|
|
|
{"profile_data": orjson.dumps(empty_profile_data).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-15 12:21:14 +01:00
|
|
|
self.assert_json_success(result)
|
2021-09-18 15:02:13 +02:00
|
|
|
for field_dict in cordelia.profile_data():
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.subTest(field_name=field_dict["name"]):
|
|
|
|
self.assertEqual(field_dict["value"], None)
|
2019-01-15 12:21:14 +01:00
|
|
|
|
|
|
|
# Test adding some of the field values after removing all.
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
new_fields = {
|
2021-02-12 08:20:45 +01:00
|
|
|
"Phone number": None,
|
|
|
|
"Biography": "A test user",
|
|
|
|
"Favorite food": None,
|
|
|
|
"Favorite editor": None,
|
|
|
|
"Birthday": None,
|
|
|
|
"Favorite website": "https://zulip.github.io",
|
|
|
|
"Mentor": [hamlet.id],
|
2022-09-12 20:05:05 +02:00
|
|
|
"GitHub username": "timabbott",
|
2022-10-01 12:16:11 +02:00
|
|
|
"Pronouns": None,
|
2019-01-15 12:21:14 +01:00
|
|
|
}
|
|
|
|
new_profile_data = []
|
|
|
|
for field_name in fields:
|
|
|
|
field = CustomProfileField.objects.get(name=field_name, realm=realm)
|
|
|
|
value = None
|
|
|
|
if new_fields[field_name]:
|
|
|
|
value = new_fields[field_name]
|
2021-02-12 08:19:30 +01:00
|
|
|
new_profile_data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field.id,
|
|
|
|
"value": value,
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{cordelia.id}", {"profile_data": orjson.dumps(new_profile_data).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2019-01-15 12:21:14 +01:00
|
|
|
self.assert_json_success(result)
|
2021-09-18 15:02:13 +02:00
|
|
|
for field_dict in cordelia.profile_data():
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.subTest(field_name=field_dict["name"]):
|
|
|
|
self.assertEqual(field_dict["value"], new_fields[str(field_dict["name"])])
|
2019-01-15 12:21:14 +01:00
|
|
|
|
2018-09-04 20:46:11 +02:00
|
|
|
def test_non_admin_user_cannot_change_profile_data(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2018-09-04 20:46:11 +02:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
new_profile_data = []
|
|
|
|
field = CustomProfileField.objects.get(name="Biography", realm=realm)
|
2021-02-12 08:19:30 +01:00
|
|
|
new_profile_data.append(
|
|
|
|
{
|
2021-02-12 08:20:45 +01:00
|
|
|
"id": field.id,
|
|
|
|
"value": "New hamlet Biography",
|
2021-02-12 08:19:30 +01:00
|
|
|
}
|
|
|
|
)
|
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
f"/json/users/{hamlet.id}", {"profile_data": orjson.dumps(new_profile_data).decode()}
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2018-09-04 20:46:11 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_patch(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/users/{}".format(self.example_user("cordelia").id),
|
|
|
|
{"profile_data": orjson.dumps(new_profile_data).decode()},
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2018-09-04 20:46:11 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-10-17 19:42:27 +02:00
|
|
|
class QueryCountTest(ZulipTestCase):
|
|
|
|
def test_create_user_with_multiple_streams(self) -> None:
|
2020-10-24 17:15:19 +02:00
|
|
|
# add_new_user_history needs messages to be current
|
|
|
|
Message.objects.all().update(date_sent=timezone_now())
|
|
|
|
|
2020-10-26 11:53:03 +01:00
|
|
|
ContentType.objects.clear_cache()
|
|
|
|
|
2020-10-17 19:42:27 +02:00
|
|
|
# This just focuses on making sure we don't too many
|
|
|
|
# queries/cache tries or send too many events.
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
|
|
|
|
self.make_stream("private_stream1", invite_only=True)
|
|
|
|
self.make_stream("private_stream2", invite_only=True)
|
|
|
|
|
|
|
|
stream_names = [
|
|
|
|
"Denmark",
|
|
|
|
"Scotland",
|
|
|
|
"Verona",
|
|
|
|
"private_stream1",
|
|
|
|
"private_stream2",
|
|
|
|
]
|
2021-02-12 08:19:30 +01:00
|
|
|
streams = [get_stream(stream_name, realm) for stream_name in stream_names]
|
2020-10-17 19:42:27 +02:00
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 4 * 24 * 60
|
2020-10-17 19:42:27 +02:00
|
|
|
do_invite_users(
|
|
|
|
user_profile=self.example_user("hamlet"),
|
|
|
|
invitee_emails=["fred@zulip.com"],
|
|
|
|
streams=streams,
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2020-10-17 19:42:27 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
prereg_user = PreregistrationUser.objects.get(email="fred@zulip.com")
|
|
|
|
|
2022-11-21 07:26:34 +01:00
|
|
|
with self.assert_database_query_count(93):
|
2020-10-17 19:42:27 +02:00
|
|
|
with cache_tries_captured() as cache_tries:
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=11) as events:
|
2020-10-26 13:16:10 +01:00
|
|
|
fred = do_create_user(
|
2020-10-17 19:42:27 +02:00
|
|
|
email="fred@zulip.com",
|
|
|
|
password="password",
|
|
|
|
realm=realm,
|
|
|
|
full_name="Fred Flintstone",
|
|
|
|
prereg_user=prereg_user,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2020-10-17 19:42:27 +02:00
|
|
|
)
|
|
|
|
|
2023-07-16 19:08:55 +02:00
|
|
|
self.assert_length(cache_tries, 24)
|
2020-10-26 13:16:10 +01:00
|
|
|
peer_add_events = [event for event in events if event["event"].get("op") == "peer_add"]
|
|
|
|
|
|
|
|
notifications = set()
|
|
|
|
for event in peer_add_events:
|
|
|
|
stream_ids = event["event"]["stream_ids"]
|
2021-02-12 08:19:30 +01:00
|
|
|
stream_names = sorted(Stream.objects.get(id=stream_id).name for stream_id in stream_ids)
|
2020-10-26 13:16:10 +01:00
|
|
|
self.assertTrue(event["event"]["user_ids"], {fred.id})
|
|
|
|
notifications.add(",".join(stream_names))
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
notifications, {"Denmark,Scotland,Verona", "private_stream1", "private_stream2"}
|
|
|
|
)
|
|
|
|
|
2020-10-17 19:42:27 +02:00
|
|
|
|
2020-03-06 17:58:06 +01:00
|
|
|
class BulkCreateUserTest(ZulipTestCase):
|
|
|
|
def test_create_users(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
realm_user_default.email_address_visibility = (
|
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_ADMINS
|
|
|
|
)
|
|
|
|
realm_user_default.save()
|
2020-03-06 17:58:06 +01:00
|
|
|
|
|
|
|
name_list = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("Fred Flintstone", "fred@zulip.com"),
|
|
|
|
("Lisa Simpson", "lisa@zulip.com"),
|
2020-03-06 17:58:06 +01:00
|
|
|
]
|
|
|
|
|
|
|
|
create_users(realm, name_list)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
fred = get_user_by_delivery_email("fred@zulip.com", realm)
|
2020-03-06 17:58:06 +01:00
|
|
|
self.assertEqual(
|
|
|
|
fred.email,
|
2021-02-12 08:20:45 +01:00
|
|
|
f"user{fred.id}@zulip.testserver",
|
2020-03-06 17:58:06 +01:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
lisa = get_user_by_delivery_email("lisa@zulip.com", realm)
|
|
|
|
self.assertEqual(lisa.full_name, "Lisa Simpson")
|
2020-03-06 17:58:06 +01:00
|
|
|
self.assertEqual(lisa.is_bot, False)
|
|
|
|
self.assertEqual(lisa.bot_type, None)
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
realm_user_default.email_address_visibility = (
|
|
|
|
RealmUserDefault.EMAIL_ADDRESS_VISIBILITY_EVERYONE
|
|
|
|
)
|
|
|
|
realm_user_default.save()
|
2020-03-06 17:58:06 +01:00
|
|
|
|
|
|
|
name_list = [
|
2021-02-12 08:20:45 +01:00
|
|
|
("Bono", "bono@zulip.com"),
|
|
|
|
("Cher", "cher@zulip.com"),
|
2020-03-06 17:58:06 +01:00
|
|
|
]
|
|
|
|
|
2023-07-12 19:13:17 +02:00
|
|
|
now = timezone_now()
|
|
|
|
expected_user_group_names = {
|
|
|
|
UserGroup.MEMBERS_GROUP_NAME,
|
|
|
|
UserGroup.FULL_MEMBERS_GROUP_NAME,
|
|
|
|
}
|
2020-03-06 17:58:06 +01:00
|
|
|
create_users(realm, name_list)
|
2021-02-12 08:20:45 +01:00
|
|
|
bono = get_user_by_delivery_email("bono@zulip.com", realm)
|
|
|
|
self.assertEqual(bono.email, "bono@zulip.com")
|
|
|
|
self.assertEqual(bono.delivery_email, "bono@zulip.com")
|
2023-07-12 19:13:17 +02:00
|
|
|
user_group_names = set(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
modified_user=bono,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time__gte=now,
|
|
|
|
).values_list("modified_user_group__name", flat=True)
|
|
|
|
)
|
|
|
|
self.assertSetEqual(
|
|
|
|
user_group_names,
|
|
|
|
expected_user_group_names,
|
|
|
|
)
|
2020-03-06 17:58:06 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
cher = get_user_by_delivery_email("cher@zulip.com", realm)
|
|
|
|
self.assertEqual(cher.full_name, "Cher")
|
2023-07-12 19:13:17 +02:00
|
|
|
user_group_names = set(
|
|
|
|
RealmAuditLog.objects.filter(
|
|
|
|
realm=realm,
|
|
|
|
modified_user=cher,
|
|
|
|
event_type=RealmAuditLog.USER_GROUP_DIRECT_USER_MEMBERSHIP_ADDED,
|
|
|
|
event_time__gte=now,
|
|
|
|
).values_list("modified_user_group__name", flat=True)
|
|
|
|
)
|
|
|
|
self.assertSetEqual(
|
|
|
|
user_group_names,
|
|
|
|
expected_user_group_names,
|
|
|
|
)
|
2020-03-06 17:58:06 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class AdminCreateUserTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_create_user_backend(self) -> None:
|
2016-07-12 20:46:55 +02:00
|
|
|
# This test should give us complete coverage on
|
|
|
|
# create_user_backend. It mostly exercises error
|
|
|
|
# conditions, and it also does a basic test of the success
|
|
|
|
# path.
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
admin = self.example_user("hamlet")
|
2018-03-05 20:19:07 +01:00
|
|
|
realm = admin.realm
|
2020-03-06 18:40:46 +01:00
|
|
|
self.login_user(admin)
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2020-12-14 22:02:22 +01:00
|
|
|
valid_params = dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="romeo@zulip.net",
|
|
|
|
password="xxxx",
|
|
|
|
full_name="Romeo Montague",
|
2020-12-14 22:02:22 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(admin.can_create_users, False)
|
|
|
|
result = self.client_post("/json/users", valid_params)
|
2023-01-06 13:36:27 +01:00
|
|
|
self.assert_json_error(result, "User not authorized to create users")
|
2020-12-14 22:02:22 +01:00
|
|
|
|
|
|
|
do_change_can_create_users(admin, True)
|
|
|
|
# can_create_users is insufficient without being a realm administrator:
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(admin, UserProfile.ROLE_MEMBER, acting_user=None)
|
2020-12-14 22:02:22 +01:00
|
|
|
result = self.client_post("/json/users", valid_params)
|
|
|
|
self.assert_json_error(result, "Must be an organization administrator")
|
|
|
|
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2016-07-12 20:46:55 +02:00
|
|
|
|
2020-09-02 08:14:51 +02:00
|
|
|
result = self.client_post("/json/users", {})
|
2016-07-12 20:46:55 +02:00
|
|
|
self.assert_json_error(result, "Missing 'email' argument")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/users",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="romeo@not-zulip.com",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2016-07-12 20:46:55 +02:00
|
|
|
self.assert_json_error(result, "Missing 'password' argument")
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/users",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="romeo@not-zulip.com",
|
|
|
|
password="xxxx",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2016-07-12 20:46:55 +02:00
|
|
|
self.assert_json_error(result, "Missing 'full_name' argument")
|
|
|
|
|
2020-07-16 23:56:34 +02:00
|
|
|
# Test short_name gets properly ignored
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/users",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="romeo@zulip.com",
|
|
|
|
password="xxxx",
|
|
|
|
full_name="Romeo Montague",
|
|
|
|
short_name="DEPRECATED",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2022-08-25 18:41:46 +02:00
|
|
|
self.assert_json_success(result, ignored_parameters=["short_name"])
|
2016-07-12 20:46:55 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/users",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="broken",
|
|
|
|
password="xxxx",
|
|
|
|
full_name="Romeo Montague",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
2016-07-12 20:46:55 +02:00
|
|
|
self.assert_json_error(result, "Bad name or username")
|
|
|
|
|
2021-03-01 11:33:24 +01:00
|
|
|
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
|
|
|
"/json/users",
|
|
|
|
dict(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="romeo@not-zulip.com",
|
|
|
|
password="xxxx",
|
|
|
|
full_name="Romeo Montague",
|
2021-02-12 08:19:30 +01:00
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Email 'romeo@not-zulip.com' not allowed in this organization"
|
|
|
|
)
|
2016-07-12 20:46:55 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
RealmDomain.objects.create(realm=get_realm("zulip"), domain="zulip.net")
|
auth: Use zxcvbn to ensure password strength on server side.
For a long time, we've been only doing the zxcvbn password strength
checks on the browser, which is helpful, but means users could through
hackery (or a bug in the frontend validation code) manage to set a
too-weak password. We fix this by running our password strength
validation on the backend as well, using python-zxcvbn.
In theory, a bug in python-zxcvbn could result in it producing a
different opinion than the frontend version; if so, it'd be a pretty
bad bug in the library, and hopefully we'd hear about it from users,
report upstream, and get it fixed that way. Alternatively, we can
switch to shelling out to node like we do for KaTeX.
Fixes #6880.
2019-11-18 08:11:03 +01:00
|
|
|
# Check can't use a bad password with zxcvbn enabled
|
|
|
|
with self.settings(PASSWORD_MIN_LENGTH=6, PASSWORD_MIN_GUESSES=1000):
|
|
|
|
result = self.client_post("/json/users", valid_params)
|
|
|
|
self.assert_json_error(result, "The password is too weak.")
|
|
|
|
|
2016-12-31 08:07:22 +01:00
|
|
|
result = self.client_post("/json/users", valid_params)
|
2016-07-12 20:46:55 +02:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2017-05-24 02:42:31 +02:00
|
|
|
# Romeo is a newly registered user
|
2021-02-12 08:20:45 +01:00
|
|
|
new_user = get_user_by_delivery_email("romeo@zulip.net", get_realm("zulip"))
|
2020-08-11 23:01:01 +02:00
|
|
|
result = orjson.loads(result.content)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertEqual(new_user.full_name, "Romeo Montague")
|
|
|
|
self.assertEqual(new_user.id, result["user_id"])
|
2023-05-08 09:17:57 +02:00
|
|
|
self.assertEqual(new_user.tos_version, UserProfile.TOS_VERSION_BEFORE_FIRST_LOGIN)
|
2016-07-12 20:46:55 +02:00
|
|
|
|
2019-11-28 16:56:04 +01:00
|
|
|
# Make sure the recipient field is set correctly.
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
new_user.recipient, Recipient.objects.get(type=Recipient.PERSONAL, type_id=new_user.id)
|
|
|
|
)
|
2019-11-28 16:56:04 +01:00
|
|
|
|
2018-03-05 20:19:07 +01:00
|
|
|
# we can't create the same user twice.
|
2016-12-31 08:07:22 +01:00
|
|
|
result = self.client_post("/json/users", valid_params)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(result, "Email 'romeo@zulip.net' already in use")
|
2016-07-12 20:46:55 +02:00
|
|
|
|
2018-03-05 20:19:07 +01:00
|
|
|
# Don't allow user to sign up with disposable email.
|
2018-07-27 23:26:29 +02:00
|
|
|
realm.emails_restricted_to_domains = False
|
2018-03-05 20:19:07 +01:00
|
|
|
realm.disallow_disposable_email_addresses = True
|
|
|
|
realm.save()
|
|
|
|
|
|
|
|
valid_params["email"] = "abc@mailnator.com"
|
|
|
|
result = self.client_post("/json/users", valid_params)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assert_json_error(
|
|
|
|
result, "Disposable email addresses are not allowed in this organization"
|
|
|
|
)
|
2018-03-05 20:19:07 +01:00
|
|
|
|
2018-06-20 13:08:07 +02:00
|
|
|
# Don't allow creating a user with + in their email address when realm
|
|
|
|
# is restricted to a domain.
|
2018-07-27 23:26:29 +02:00
|
|
|
realm.emails_restricted_to_domains = True
|
2018-06-20 13:08:07 +02:00
|
|
|
realm.save()
|
|
|
|
|
|
|
|
valid_params["email"] = "iago+label@zulip.com"
|
|
|
|
result = self.client_post("/json/users", valid_params)
|
|
|
|
self.assert_json_error(result, "Email addresses containing + are not allowed.")
|
|
|
|
|
|
|
|
# Users can be created with + in their email address when realm
|
|
|
|
# is not restricted to a domain.
|
2018-07-27 23:26:29 +02:00
|
|
|
realm.emails_restricted_to_domains = False
|
2018-06-20 13:08:07 +02:00
|
|
|
realm.save()
|
|
|
|
|
|
|
|
valid_params["email"] = "iago+label@zulip.com"
|
|
|
|
result = self.client_post("/json/users", valid_params)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-05-07 17:21:26 +02:00
|
|
|
class UserProfileTest(ZulipTestCase):
|
2018-05-08 13:54:40 +02:00
|
|
|
def test_valid_user_id(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
2019-07-15 20:58:41 +02:00
|
|
|
bot = self.example_user("default_bot")
|
2018-05-08 13:54:40 +02:00
|
|
|
|
|
|
|
# Invalid user ID
|
2020-06-23 01:12:03 +02:00
|
|
|
invalid_uid: object = 1000
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, r"User IDs is not a list"):
|
|
|
|
check_valid_user_ids(realm.id, invalid_uid)
|
|
|
|
with self.assertRaisesRegex(ValidationError, rf"Invalid user ID: {invalid_uid}"):
|
|
|
|
check_valid_user_ids(realm.id, [invalid_uid])
|
2018-06-07 20:01:31 +02:00
|
|
|
|
|
|
|
invalid_uid = "abc"
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, r"User IDs\[0\] is not an integer"):
|
|
|
|
check_valid_user_ids(realm.id, [invalid_uid])
|
|
|
|
|
2018-06-07 20:01:31 +02:00
|
|
|
invalid_uid = str(othello.id)
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, r"User IDs\[0\] is not an integer"):
|
|
|
|
check_valid_user_ids(realm.id, [invalid_uid])
|
2018-05-08 13:54:40 +02:00
|
|
|
|
|
|
|
# User is in different realm
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, rf"Invalid user ID: {hamlet.id}"):
|
|
|
|
check_valid_user_ids(get_realm("zephyr").id, [hamlet.id])
|
2018-05-08 13:54:40 +02:00
|
|
|
|
|
|
|
# User is not active
|
2021-02-14 00:03:40 +01:00
|
|
|
change_user_is_active(hamlet, False)
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, rf"User with ID {hamlet.id} is deactivated"):
|
|
|
|
check_valid_user_ids(realm.id, [hamlet.id])
|
|
|
|
check_valid_user_ids(realm.id, [hamlet.id], allow_deactivated=True)
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2018-06-07 20:01:31 +02:00
|
|
|
# User is a bot
|
2020-06-21 02:36:20 +02:00
|
|
|
with self.assertRaisesRegex(ValidationError, rf"User with ID {bot.id} is a bot"):
|
|
|
|
check_valid_user_ids(realm.id, [bot.id])
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2020-03-28 01:25:56 +01:00
|
|
|
# Successfully get non-bot, active user belong to your realm
|
2020-06-21 02:36:20 +02:00
|
|
|
check_valid_user_ids(realm.id, [othello.id])
|
2018-05-08 13:54:40 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_cache_invalidation(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
with mock.patch("zerver.lib.cache.delete_display_recipient_cache") as m:
|
|
|
|
hamlet.full_name = "Hamlet Junior"
|
2017-10-22 03:14:44 +02:00
|
|
|
hamlet.save(update_fields=["full_name"])
|
|
|
|
|
|
|
|
self.assertTrue(m.called)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
with mock.patch("zerver.lib.cache.delete_display_recipient_cache") as m:
|
2017-10-22 03:14:44 +02:00
|
|
|
hamlet.long_term_idle = True
|
|
|
|
hamlet.save(update_fields=["long_term_idle"])
|
|
|
|
|
|
|
|
self.assertFalse(m.called)
|
|
|
|
|
2017-11-01 10:04:16 +01:00
|
|
|
def test_user_ids_to_users(self) -> None:
|
|
|
|
real_user_ids = [
|
2021-02-12 08:20:45 +01:00
|
|
|
self.example_user("hamlet").id,
|
|
|
|
self.example_user("cordelia").id,
|
2017-11-01 10:04:16 +01:00
|
|
|
]
|
|
|
|
|
2018-04-05 01:31:30 +02:00
|
|
|
self.assertEqual(user_ids_to_users([], get_realm("zulip")), [])
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
{
|
|
|
|
user_profile.id
|
|
|
|
for user_profile in user_ids_to_users(real_user_ids, get_realm("zulip"))
|
|
|
|
},
|
|
|
|
set(real_user_ids),
|
|
|
|
)
|
2017-11-01 10:04:16 +01:00
|
|
|
with self.assertRaises(JsonableError):
|
|
|
|
user_ids_to_users([1234], get_realm("zephyr"))
|
|
|
|
with self.assertRaises(JsonableError):
|
|
|
|
user_ids_to_users(real_user_ids, get_realm("zephyr"))
|
|
|
|
|
2018-06-19 10:55:56 +02:00
|
|
|
def test_get_accounts_for_email(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-03-12 14:17:25 +01:00
|
|
|
|
2023-07-31 23:15:51 +02:00
|
|
|
def check_account_present_in_accounts(user: UserProfile, accounts: List[Account]) -> None:
|
2018-06-19 10:55:56 +02:00
|
|
|
for account in accounts:
|
|
|
|
realm = user.realm
|
2021-02-12 08:19:30 +01:00
|
|
|
if (
|
|
|
|
account["avatar"] == avatar_url(user)
|
|
|
|
and account["full_name"] == user.full_name
|
|
|
|
and account["realm_name"] == realm.name
|
2020-11-16 19:33:10 +01:00
|
|
|
and account["realm_id"] == realm.id
|
2021-02-12 08:19:30 +01:00
|
|
|
):
|
2018-06-19 10:55:56 +02:00
|
|
|
return
|
|
|
|
raise AssertionError("Account not found")
|
|
|
|
|
2018-05-18 16:17:03 +02:00
|
|
|
lear_realm = get_realm("lear")
|
2018-06-19 10:55:56 +02:00
|
|
|
cordelia_in_zulip = self.example_user("cordelia")
|
2020-03-12 14:17:25 +01:00
|
|
|
cordelia_in_lear = get_user_by_delivery_email("cordelia@zulip.com", lear_realm)
|
2018-05-18 16:17:03 +02:00
|
|
|
|
|
|
|
email = "cordelia@zulip.com"
|
2018-06-19 10:55:56 +02:00
|
|
|
accounts = get_accounts_for_email(email)
|
|
|
|
self.assert_length(accounts, 2)
|
|
|
|
check_account_present_in_accounts(cordelia_in_zulip, accounts)
|
|
|
|
check_account_present_in_accounts(cordelia_in_lear, accounts)
|
2018-05-18 16:17:03 +02:00
|
|
|
|
|
|
|
email = "CORDelia@zulip.com"
|
2018-06-19 10:55:56 +02:00
|
|
|
accounts = get_accounts_for_email(email)
|
|
|
|
self.assert_length(accounts, 2)
|
|
|
|
check_account_present_in_accounts(cordelia_in_zulip, accounts)
|
|
|
|
check_account_present_in_accounts(cordelia_in_lear, accounts)
|
2018-05-18 16:17:03 +02:00
|
|
|
|
|
|
|
email = "IAGO@ZULIP.COM"
|
2018-06-19 10:55:56 +02:00
|
|
|
accounts = get_accounts_for_email(email)
|
|
|
|
self.assert_length(accounts, 1)
|
|
|
|
check_account_present_in_accounts(self.example_user("iago"), accounts)
|
2018-05-18 16:17:03 +02:00
|
|
|
|
2020-06-19 03:13:52 +02:00
|
|
|
# We verify that get_accounts_for_email don't return deactivated users accounts
|
|
|
|
user = self.example_user("hamlet")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2020-06-19 03:13:52 +02:00
|
|
|
email = self.example_email("hamlet")
|
|
|
|
accounts = get_accounts_for_email(email)
|
|
|
|
with self.assertRaises(AssertionError):
|
|
|
|
check_account_present_in_accounts(user, accounts)
|
|
|
|
|
2018-05-18 19:54:50 +02:00
|
|
|
def test_get_source_profile(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2020-11-16 19:33:10 +01:00
|
|
|
zulip_realm_id = get_realm("zulip").id
|
|
|
|
iago = get_source_profile("iago@zulip.com", zulip_realm_id)
|
2018-05-18 19:54:50 +02:00
|
|
|
assert iago is not None
|
|
|
|
self.assertEqual(iago.email, "iago@zulip.com")
|
|
|
|
self.assertEqual(iago.realm, get_realm("zulip"))
|
|
|
|
|
2020-11-16 19:33:10 +01:00
|
|
|
iago = get_source_profile("IAGO@ZULIP.com", zulip_realm_id)
|
2018-05-18 19:54:50 +02:00
|
|
|
assert iago is not None
|
|
|
|
self.assertEqual(iago.email, "iago@zulip.com")
|
|
|
|
|
2020-11-16 19:33:10 +01:00
|
|
|
lear_realm_id = get_realm("lear").id
|
|
|
|
cordelia = get_source_profile("cordelia@zulip.com", lear_realm_id)
|
2018-05-18 19:54:50 +02:00
|
|
|
assert cordelia is not None
|
|
|
|
self.assertEqual(cordelia.email, "cordelia@zulip.com")
|
|
|
|
|
2020-11-16 19:33:10 +01:00
|
|
|
self.assertIsNone(get_source_profile("iagod@zulip.com", zulip_realm_id))
|
|
|
|
self.assertIsNone(get_source_profile("iago@zulip.com", 0))
|
|
|
|
self.assertIsNone(get_source_profile("iago@zulip.com", lear_realm_id))
|
2018-05-18 19:54:50 +02:00
|
|
|
|
2021-06-29 18:08:42 +02:00
|
|
|
def test_copy_default_settings_from_another_user(self) -> None:
|
2018-05-21 19:30:26 +02:00
|
|
|
iago = self.example_user("iago")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-05-16 13:13:59 +02:00
|
|
|
hamlet.color_scheme = UserProfile.COLOR_SCHEME_LIGHT
|
2018-05-21 19:30:26 +02:00
|
|
|
|
|
|
|
cordelia.default_language = "de"
|
2021-03-10 13:56:10 +01:00
|
|
|
cordelia.default_view = "all_messages"
|
2019-09-18 22:33:00 +02:00
|
|
|
cordelia.emojiset = "twitter"
|
2018-05-21 19:30:26 +02:00
|
|
|
cordelia.timezone = "America/Phoenix"
|
2020-05-16 13:13:59 +02:00
|
|
|
cordelia.color_scheme = UserProfile.COLOR_SCHEME_NIGHT
|
2018-05-21 19:30:26 +02:00
|
|
|
cordelia.enable_offline_email_notifications = False
|
|
|
|
cordelia.enable_stream_push_notifications = True
|
2018-06-13 20:16:51 +02:00
|
|
|
cordelia.enter_sends = False
|
2020-06-26 19:35:42 +02:00
|
|
|
cordelia.avatar_source = UserProfile.AVATAR_FROM_USER
|
2018-05-21 19:30:26 +02:00
|
|
|
cordelia.save()
|
|
|
|
|
2020-06-26 19:35:42 +02:00
|
|
|
# Upload cordelia's avatar
|
2021-02-12 08:20:45 +01:00
|
|
|
with get_test_image_file("img.png") as image_file:
|
2020-06-26 19:35:42 +02:00
|
|
|
upload_avatar_image(image_file, cordelia, cordelia)
|
|
|
|
|
2018-06-13 14:10:53 +02:00
|
|
|
UserHotspot.objects.filter(user=cordelia).delete()
|
|
|
|
UserHotspot.objects.filter(user=iago).delete()
|
2021-12-07 15:43:26 +01:00
|
|
|
hotspots_completed = {"intro_streams", "intro_topics"}
|
2018-06-13 14:10:53 +02:00
|
|
|
for hotspot in hotspots_completed:
|
|
|
|
UserHotspot.objects.create(user=cordelia, hotspot=hotspot)
|
|
|
|
|
2020-06-26 19:35:42 +02:00
|
|
|
# Check that we didn't send an realm_user update events to
|
|
|
|
# users; this work is happening before the user account is
|
|
|
|
# created, so any changes will be reflected in the "add" event
|
|
|
|
# introducing the user to clients.
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=0):
|
2021-06-29 18:08:42 +02:00
|
|
|
copy_default_settings(cordelia, iago)
|
2018-05-21 19:30:26 +02:00
|
|
|
|
|
|
|
# We verify that cordelia and iago match, but hamlet has the defaults.
|
2021-04-11 16:26:54 +02:00
|
|
|
self.assertEqual(iago.full_name, "Cordelia, Lear's daughter")
|
|
|
|
self.assertEqual(cordelia.full_name, "Cordelia, Lear's daughter")
|
2018-05-25 19:24:30 +02:00
|
|
|
self.assertEqual(hamlet.full_name, "King Hamlet")
|
|
|
|
|
2018-05-21 19:30:26 +02:00
|
|
|
self.assertEqual(iago.default_language, "de")
|
|
|
|
self.assertEqual(cordelia.default_language, "de")
|
|
|
|
self.assertEqual(hamlet.default_language, "en")
|
|
|
|
|
2019-09-18 22:33:00 +02:00
|
|
|
self.assertEqual(iago.emojiset, "twitter")
|
|
|
|
self.assertEqual(cordelia.emojiset, "twitter")
|
2021-08-27 21:30:55 +02:00
|
|
|
self.assertEqual(hamlet.emojiset, "google")
|
2018-05-21 19:30:26 +02:00
|
|
|
|
|
|
|
self.assertEqual(iago.timezone, "America/Phoenix")
|
|
|
|
self.assertEqual(cordelia.timezone, "America/Phoenix")
|
|
|
|
self.assertEqual(hamlet.timezone, "")
|
|
|
|
|
2020-05-16 13:13:59 +02:00
|
|
|
self.assertEqual(iago.color_scheme, UserProfile.COLOR_SCHEME_NIGHT)
|
|
|
|
self.assertEqual(cordelia.color_scheme, UserProfile.COLOR_SCHEME_NIGHT)
|
|
|
|
self.assertEqual(hamlet.color_scheme, UserProfile.COLOR_SCHEME_LIGHT)
|
2018-05-21 19:30:26 +02:00
|
|
|
|
|
|
|
self.assertEqual(iago.enable_offline_email_notifications, False)
|
|
|
|
self.assertEqual(cordelia.enable_offline_email_notifications, False)
|
|
|
|
self.assertEqual(hamlet.enable_offline_email_notifications, True)
|
|
|
|
|
|
|
|
self.assertEqual(iago.enable_stream_push_notifications, True)
|
|
|
|
self.assertEqual(cordelia.enable_stream_push_notifications, True)
|
|
|
|
self.assertEqual(hamlet.enable_stream_push_notifications, False)
|
|
|
|
|
2018-06-13 20:16:51 +02:00
|
|
|
self.assertEqual(iago.enter_sends, False)
|
|
|
|
self.assertEqual(cordelia.enter_sends, False)
|
|
|
|
self.assertEqual(hamlet.enter_sends, True)
|
|
|
|
|
2021-09-09 20:28:42 +02:00
|
|
|
hotspots = set(UserHotspot.objects.filter(user=iago).values_list("hotspot", flat=True))
|
2018-06-13 14:10:53 +02:00
|
|
|
self.assertEqual(hotspots, hotspots_completed)
|
|
|
|
|
2021-06-29 18:08:42 +02:00
|
|
|
def test_copy_default_settings_from_realm_user_default(self) -> None:
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
realm_user_default = RealmUserDefault.objects.get(realm=realm)
|
|
|
|
|
|
|
|
realm_user_default.default_view = "recent_topics"
|
|
|
|
realm_user_default.emojiset = "twitter"
|
|
|
|
realm_user_default.color_scheme = UserProfile.COLOR_SCHEME_LIGHT
|
|
|
|
realm_user_default.enable_offline_email_notifications = False
|
|
|
|
realm_user_default.enable_stream_push_notifications = True
|
|
|
|
realm_user_default.enter_sends = True
|
|
|
|
realm_user_default.save()
|
|
|
|
|
|
|
|
# Check that we didn't send an realm_user update events to
|
|
|
|
# users; this work is happening before the user account is
|
|
|
|
# created, so any changes will be reflected in the "add" event
|
|
|
|
# introducing the user to clients.
|
2023-04-05 13:36:01 +02:00
|
|
|
with self.capture_send_event_calls(expected_num_events=0):
|
2021-06-29 18:08:42 +02:00
|
|
|
copy_default_settings(realm_user_default, cordelia)
|
|
|
|
|
|
|
|
self.assertEqual(cordelia.default_view, "recent_topics")
|
|
|
|
self.assertEqual(cordelia.emojiset, "twitter")
|
|
|
|
self.assertEqual(cordelia.color_scheme, UserProfile.COLOR_SCHEME_LIGHT)
|
|
|
|
self.assertEqual(cordelia.enable_offline_email_notifications, False)
|
|
|
|
self.assertEqual(cordelia.enable_stream_push_notifications, True)
|
|
|
|
self.assertEqual(cordelia.enter_sends, True)
|
|
|
|
|
2018-09-01 22:39:29 +02:00
|
|
|
def test_get_user_by_id_in_realm_including_cross_realm(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
realm = get_realm("zulip")
|
2021-07-26 14:37:05 +02:00
|
|
|
internal_realm = get_realm(settings.SYSTEM_BOT_REALM)
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
othello = self.example_user("othello")
|
2021-07-26 14:37:05 +02:00
|
|
|
bot = get_system_bot(settings.WELCOME_BOT, internal_realm.id)
|
2018-09-01 22:39:29 +02:00
|
|
|
|
|
|
|
# Pass in the ID of a cross-realm bot and a valid realm
|
2021-02-12 08:19:30 +01:00
|
|
|
cross_realm_bot = get_user_by_id_in_realm_including_cross_realm(bot.id, realm)
|
2018-09-01 22:39:29 +02:00
|
|
|
self.assertEqual(cross_realm_bot.email, bot.email)
|
|
|
|
self.assertEqual(cross_realm_bot.id, bot.id)
|
|
|
|
|
|
|
|
# Pass in the ID of a cross-realm bot but with a invalid realm,
|
|
|
|
# note that the realm should be irrelevant here
|
2021-02-12 08:19:30 +01:00
|
|
|
cross_realm_bot = get_user_by_id_in_realm_including_cross_realm(bot.id, None)
|
2018-09-01 22:39:29 +02:00
|
|
|
self.assertEqual(cross_realm_bot.email, bot.email)
|
|
|
|
self.assertEqual(cross_realm_bot.id, bot.id)
|
|
|
|
|
|
|
|
# Pass in the ID of a non-cross-realm user with a realm
|
2021-02-12 08:19:30 +01:00
|
|
|
user_profile = get_user_by_id_in_realm_including_cross_realm(othello.id, realm)
|
2018-09-01 22:39:29 +02:00
|
|
|
self.assertEqual(user_profile.email, othello.email)
|
|
|
|
self.assertEqual(user_profile.id, othello.id)
|
|
|
|
|
|
|
|
# If the realm doesn't match, or if the ID is not that of a
|
|
|
|
# cross-realm bot, UserProfile.DoesNotExist is raised
|
|
|
|
with self.assertRaises(UserProfile.DoesNotExist):
|
2021-02-12 08:19:30 +01:00
|
|
|
get_user_by_id_in_realm_including_cross_realm(hamlet.id, None)
|
2018-09-01 22:39:29 +02:00
|
|
|
|
2023-07-19 12:10:10 +02:00
|
|
|
def test_cross_realm_dicts(self) -> None:
|
|
|
|
def user_row(email: str) -> Dict[str, object]:
|
|
|
|
user = UserProfile.objects.get(email=email)
|
|
|
|
avatar_url = get_avatar_field(
|
|
|
|
user_id=user.id,
|
|
|
|
realm_id=user.realm_id,
|
|
|
|
email=user.delivery_email,
|
|
|
|
avatar_source=user.avatar_source,
|
|
|
|
avatar_version=1,
|
|
|
|
medium=False,
|
|
|
|
client_gravatar=False,
|
|
|
|
)
|
|
|
|
return dict(
|
|
|
|
# bot-specific fields
|
|
|
|
avatar_url=avatar_url,
|
|
|
|
date_joined=user.date_joined.isoformat(),
|
|
|
|
delivery_email=email,
|
|
|
|
email=email,
|
|
|
|
full_name=user.full_name,
|
|
|
|
user_id=user.id,
|
|
|
|
# common fields
|
|
|
|
avatar_version=1,
|
|
|
|
bot_owner_id=None,
|
|
|
|
bot_type=1,
|
|
|
|
is_active=True,
|
|
|
|
is_admin=False,
|
|
|
|
is_billing_admin=False,
|
|
|
|
is_bot=True,
|
|
|
|
is_guest=False,
|
|
|
|
is_owner=False,
|
|
|
|
is_system_bot=True,
|
|
|
|
role=400,
|
|
|
|
timezone="",
|
|
|
|
)
|
|
|
|
|
|
|
|
expected_emails = [
|
|
|
|
"emailgateway@zulip.com",
|
|
|
|
"notification-bot@zulip.com",
|
|
|
|
"welcome-bot@zulip.com",
|
|
|
|
]
|
|
|
|
|
|
|
|
expected_dicts = [user_row(email) for email in expected_emails]
|
|
|
|
|
|
|
|
with self.assert_database_query_count(1):
|
|
|
|
actual_dicts = get_cross_realm_dicts()
|
|
|
|
|
|
|
|
self.assertEqual(actual_dicts, expected_dicts)
|
|
|
|
|
|
|
|
# Now it should be cached.
|
|
|
|
with self.assert_database_query_count(0, keep_cache_warm=True):
|
|
|
|
actual_dicts = get_cross_realm_dicts()
|
|
|
|
|
|
|
|
self.assertEqual(actual_dicts, expected_dicts)
|
|
|
|
|
cache: Use a single cache entry for cross-realm bots.
The cross-realm bots rarely change, and there are only
a few of them, so we just query them all at once and
put them in the cache.
Also, we put the dictionaries in the cache, instead of
the user objects, since there is nothing time-sensitive
about the dictionaries, and they are small. This saves
us a little time computing the avatar url and things
like that, not to mention marshalling costs.
This commit also fixes a theoretical bug where we would
have stale cache entries if somebody somehow modified
the cross-realm bots without bumping KEY_PREFIX.
Internally we no longer pre-fetch the realm objects for
the bots, but we don't get overly precise about picking
individual fields from UserProfile, since we rarely hit
the database and since we don't store raw ORM objects
in the cache.
The test diffs make it look like we are hitting the
cache an extra time, but the tests weren't counting
bulk fetches. Now we only use a single key for all
bots rather a key per bot.
2023-07-19 14:06:56 +02:00
|
|
|
# Test cache invalidation
|
|
|
|
welcome_bot = UserProfile.objects.get(email="welcome-bot@zulip.com")
|
|
|
|
welcome_bot.full_name = "fred"
|
|
|
|
welcome_bot.save()
|
|
|
|
|
|
|
|
with self.assert_database_query_count(1, keep_cache_warm=True):
|
|
|
|
actual_dicts = get_cross_realm_dicts()
|
|
|
|
|
|
|
|
expected_dicts = [user_row(email) for email in expected_emails]
|
|
|
|
self.assertEqual(actual_dicts, expected_dicts)
|
|
|
|
|
2020-05-31 19:10:41 +02:00
|
|
|
def test_get_user_subscription_status(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
stream = get_stream("Rome", iago.realm)
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# Invalid user ID.
|
2020-06-13 08:57:35 +02:00
|
|
|
result = self.client_get(f"/json/users/25/subscriptions/{stream.id}")
|
2020-05-31 19:10:41 +02:00
|
|
|
self.assert_json_error(result, "No such user")
|
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
# Invalid stream ID.
|
2020-06-13 08:57:35 +02:00
|
|
|
result = self.client_get(f"/json/users/{iago.id}/subscriptions/25")
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(result["is_subscribed"])
|
2020-05-31 19:10:41 +02:00
|
|
|
|
|
|
|
# Subscribe to the stream.
|
|
|
|
self.subscribe(iago, stream.name)
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(6):
|
2021-02-12 08:19:30 +01:00
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content
|
|
|
|
)
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(result["is_subscribed"])
|
2020-05-31 19:10:41 +02:00
|
|
|
|
|
|
|
# Logging in with a Guest user.
|
|
|
|
polonius = self.example_user("polonius")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("polonius")
|
2020-05-31 19:10:41 +02:00
|
|
|
self.assertTrue(polonius.is_guest)
|
2020-07-23 23:18:32 +02:00
|
|
|
self.assertTrue(stream.is_web_public)
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content
|
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertTrue(result["is_subscribed"])
|
2020-05-31 19:10:41 +02:00
|
|
|
|
2021-10-15 12:19:21 +02:00
|
|
|
self.login("iago")
|
|
|
|
stream = self.make_stream("private_stream", invite_only=True)
|
|
|
|
# Unsubscribed admin can check subscription status in a private stream.
|
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content
|
|
|
|
)
|
|
|
|
self.assertFalse(result["is_subscribed"])
|
|
|
|
|
|
|
|
# Unsubscribed non-admins cannot check subscription status in a private stream.
|
|
|
|
self.login("shiva")
|
|
|
|
result = self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}")
|
2022-05-27 14:03:08 +02:00
|
|
|
self.assert_json_error(result, "Invalid stream ID")
|
2021-10-15 12:19:21 +02:00
|
|
|
|
|
|
|
# Subscribed non-admins can check subscription status in a private stream
|
|
|
|
self.subscribe(self.example_user("shiva"), stream.name)
|
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(f"/json/users/{iago.id}/subscriptions/{stream.id}").content
|
|
|
|
)
|
|
|
|
self.assertFalse(result["is_subscribed"])
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2016-08-23 02:08:42 +02:00
|
|
|
class ActivateTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_basics(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2013-11-15 18:57:44 +01:00
|
|
|
self.assertFalse(user.is_active)
|
2021-03-27 05:42:18 +01:00
|
|
|
do_reactivate_user(user, acting_user=None)
|
2013-11-15 18:57:44 +01:00
|
|
|
self.assertTrue(user.is_active)
|
|
|
|
|
2021-02-14 00:03:40 +01:00
|
|
|
def test_subscriptions_is_user_active(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
do_deactivate_user(user, acting_user=None)
|
|
|
|
self.assertFalse(user.is_active)
|
|
|
|
self.assertTrue(Subscription.objects.filter(user_profile=user).exists())
|
|
|
|
self.assertFalse(
|
|
|
|
Subscription.objects.filter(user_profile=user, is_user_active=True).exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
do_reactivate_user(user, acting_user=None)
|
|
|
|
self.assertTrue(user.is_active)
|
|
|
|
self.assertTrue(Subscription.objects.filter(user_profile=user).exists())
|
|
|
|
self.assertFalse(
|
|
|
|
Subscription.objects.filter(user_profile=user, is_user_active=False).exists()
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_api(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
admin = self.example_user("othello")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(admin, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2013-11-15 19:39:03 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2013-11-15 19:39:03 +01:00
|
|
|
self.assertTrue(user.is_active)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/users/{user.id}")
|
2013-11-15 19:39:03 +01:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2013-11-15 19:39:03 +01:00
|
|
|
self.assertFalse(user.is_active)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post(f"/json/users/{user.id}/reactivate")
|
2013-11-15 19:39:03 +01:00
|
|
|
self.assert_json_success(result)
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2013-11-15 19:39:03 +01:00
|
|
|
self.assertTrue(user.is_active)
|
|
|
|
|
2021-11-27 15:26:09 +01:00
|
|
|
def test_email_sent(self) -> None:
|
|
|
|
self.login("iago")
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
|
|
|
|
# Verify no email sent by default.
|
|
|
|
result = self.client_delete(f"/json/users/{user.id}", dict())
|
|
|
|
self.assert_json_success(result)
|
|
|
|
from django.core.mail import outbox
|
|
|
|
|
|
|
|
self.assert_length(outbox, 0)
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertFalse(user.is_active)
|
|
|
|
|
|
|
|
# Reactivate user
|
|
|
|
do_reactivate_user(user, acting_user=None)
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertTrue(user.is_active)
|
|
|
|
|
|
|
|
# Verify no email sent by default.
|
|
|
|
result = self.client_delete(
|
|
|
|
f"/json/users/{user.id}",
|
|
|
|
dict(
|
|
|
|
deactivation_notification_comment="Dear Hamlet,\nyou just got deactivated.",
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.assert_json_success(result)
|
|
|
|
user.refresh_from_db()
|
|
|
|
self.assertFalse(user.is_active)
|
|
|
|
|
|
|
|
self.assert_length(outbox, 1)
|
|
|
|
msg = outbox[0]
|
|
|
|
self.assertEqual(msg.subject, "Notification of account deactivation on Zulip Dev")
|
|
|
|
self.assert_length(msg.reply_to, 1)
|
|
|
|
self.assertEqual(msg.reply_to[0], "noreply@testserver")
|
|
|
|
self.assertIn("Dear Hamlet,", msg.body)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_api_with_nonexistent_user(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
2020-06-10 22:33:48 +02:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Organization administrator cannot deactivate organization owner.
|
2020-06-10 22:33:48 +02:00
|
|
|
result = self.client_delete(f'/json/users/{self.example_user("desdemona").id}')
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Must be an organization owner")
|
2020-05-16 21:06:43 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
iago = self.example_user("iago")
|
|
|
|
desdemona = self.example_user("desdemona")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(iago, UserProfile.ROLE_REALM_OWNER, acting_user=None)
|
2016-07-13 05:24:11 +02:00
|
|
|
|
2018-04-03 00:36:31 +02:00
|
|
|
# Cannot deactivate a user with the bot api
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete("/json/bots/{}".format(self.example_user("hamlet").id))
|
|
|
|
self.assert_json_error(result, "No such bot")
|
2014-02-11 17:14:33 +01:00
|
|
|
|
2018-04-03 00:36:31 +02:00
|
|
|
# Cannot deactivate a nonexistent user.
|
2018-05-17 19:36:33 +02:00
|
|
|
invalid_user_id = 1000
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/users/{invalid_user_id}")
|
|
|
|
self.assert_json_error(result, "No such user")
|
2016-07-13 05:24:11 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete("/json/users/{}".format(self.example_user("webhook_bot").id))
|
|
|
|
self.assert_json_error(result, "No such user")
|
2018-05-15 15:26:04 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/users/{desdemona.id}")
|
2016-12-05 06:40:00 +01:00
|
|
|
self.assert_json_success(result)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete(f"/json/users/{iago.id}")
|
|
|
|
self.assert_json_error(result, "Cannot deactivate the only organization owner")
|
2016-12-05 06:40:00 +01:00
|
|
|
|
2018-04-03 00:36:31 +02:00
|
|
|
# Cannot reactivate a nonexistent user.
|
2018-05-17 19:45:13 +02:00
|
|
|
invalid_user_id = 1000
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_post(f"/json/users/{invalid_user_id}/reactivate")
|
|
|
|
self.assert_json_error(result, "No such user")
|
2016-07-13 05:24:11 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_api_with_insufficient_permissions(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
non_admin = self.example_user("othello")
|
2021-03-27 05:13:46 +01:00
|
|
|
do_change_user_role(non_admin, UserProfile.ROLE_MEMBER, acting_user=None)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("othello")
|
2016-07-13 05:42:29 +02:00
|
|
|
|
2018-04-03 00:36:31 +02:00
|
|
|
# Cannot deactivate a user with the users api
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_delete("/json/users/{}".format(self.example_user("hamlet").id))
|
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2016-07-13 05:42:29 +02:00
|
|
|
|
2018-04-03 00:36:31 +02:00
|
|
|
# Cannot reactivate a user
|
2021-02-12 08:19:30 +01:00
|
|
|
result = self.client_post(
|
2021-02-12 08:20:45 +01:00
|
|
|
"/json/users/{}/reactivate".format(self.example_user("hamlet").id)
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assert_json_error(result, "Insufficient permission")
|
2016-07-13 05:42:29 +02:00
|
|
|
|
2022-01-12 21:35:19 +01:00
|
|
|
def test_revoke_invites(self) -> None:
|
|
|
|
"""
|
|
|
|
Verify that any invitations generated by the user get revoked
|
|
|
|
when the user is deactivated
|
|
|
|
"""
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
desdemona = self.example_user("desdemona")
|
|
|
|
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes = 2 * 24 * 60
|
2022-01-12 21:35:19 +01:00
|
|
|
do_invite_users(
|
|
|
|
iago,
|
|
|
|
["new1@zulip.com", "new2@zulip.com"],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2022-01-12 21:35:19 +01:00
|
|
|
invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"],
|
|
|
|
)
|
|
|
|
do_invite_users(
|
|
|
|
desdemona,
|
|
|
|
["new3@zulip.com", "new4@zulip.com"],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=invite_expires_in_minutes,
|
2022-01-12 21:35:19 +01:00
|
|
|
invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"],
|
|
|
|
)
|
|
|
|
|
2021-11-30 13:34:37 +01:00
|
|
|
do_invite_users(
|
|
|
|
iago,
|
|
|
|
["new5@zulip.com"],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=None,
|
2021-11-30 13:34:37 +01:00
|
|
|
invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"],
|
|
|
|
)
|
|
|
|
do_invite_users(
|
|
|
|
desdemona,
|
|
|
|
["new6@zulip.com"],
|
|
|
|
[],
|
2022-02-10 11:52:34 +01:00
|
|
|
invite_expires_in_minutes=None,
|
2021-11-30 13:34:37 +01:00
|
|
|
invite_as=PreregistrationUser.INVITE_AS["REALM_ADMIN"],
|
|
|
|
)
|
|
|
|
|
2022-01-12 21:35:19 +01:00
|
|
|
iago_multiuse_key = do_create_multiuse_invite_link(
|
2022-02-10 11:52:34 +01:00
|
|
|
iago, PreregistrationUser.INVITE_AS["MEMBER"], invite_expires_in_minutes
|
2022-01-12 21:35:19 +01:00
|
|
|
).split("/")[-2]
|
|
|
|
desdemona_multiuse_key = do_create_multiuse_invite_link(
|
2022-02-10 11:52:34 +01:00
|
|
|
desdemona, PreregistrationUser.INVITE_AS["MEMBER"], invite_expires_in_minutes
|
2022-01-12 21:35:19 +01:00
|
|
|
).split("/")[-2]
|
|
|
|
|
2021-11-30 13:34:37 +01:00
|
|
|
iago_never_expire_multiuse_key = do_create_multiuse_invite_link(
|
|
|
|
iago, PreregistrationUser.INVITE_AS["MEMBER"], None
|
|
|
|
).split("/")[-2]
|
|
|
|
desdemona_never_expire_multiuse_key = do_create_multiuse_invite_link(
|
|
|
|
desdemona, PreregistrationUser.INVITE_AS["MEMBER"], None
|
|
|
|
).split("/")[-2]
|
|
|
|
|
2022-01-12 21:35:19 +01:00
|
|
|
self.assertEqual(
|
|
|
|
filter_to_valid_prereg_users(
|
|
|
|
PreregistrationUser.objects.filter(referred_by=iago)
|
|
|
|
).count(),
|
2021-11-30 13:34:37 +01:00
|
|
|
3,
|
2022-01-12 21:35:19 +01:00
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
filter_to_valid_prereg_users(
|
|
|
|
PreregistrationUser.objects.filter(referred_by=desdemona)
|
|
|
|
).count(),
|
2021-11-30 13:34:37 +01:00
|
|
|
3,
|
2022-01-12 21:35:19 +01:00
|
|
|
)
|
|
|
|
self.assertTrue(
|
2022-06-21 21:34:31 +02:00
|
|
|
assert_is_not_none(
|
|
|
|
Confirmation.objects.get(confirmation_key=iago_multiuse_key).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
> timezone_now()
|
|
|
|
)
|
|
|
|
self.assertTrue(
|
2022-06-21 21:34:31 +02:00
|
|
|
assert_is_not_none(
|
|
|
|
Confirmation.objects.get(confirmation_key=desdemona_multiuse_key).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
> timezone_now()
|
|
|
|
)
|
2021-11-30 13:34:37 +01:00
|
|
|
self.assertIsNone(
|
|
|
|
Confirmation.objects.get(confirmation_key=iago_never_expire_multiuse_key).expiry_date
|
|
|
|
)
|
|
|
|
self.assertIsNone(
|
|
|
|
Confirmation.objects.get(
|
|
|
|
confirmation_key=desdemona_never_expire_multiuse_key
|
|
|
|
).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
|
|
|
|
do_deactivate_user(iago, acting_user=None)
|
|
|
|
|
|
|
|
# Now we verify that invitations generated by iago were revoked, while desdemona's
|
|
|
|
# remain valid.
|
|
|
|
self.assertEqual(
|
|
|
|
filter_to_valid_prereg_users(
|
|
|
|
PreregistrationUser.objects.filter(referred_by=iago)
|
|
|
|
).count(),
|
|
|
|
0,
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
filter_to_valid_prereg_users(
|
|
|
|
PreregistrationUser.objects.filter(referred_by=desdemona)
|
|
|
|
).count(),
|
2021-11-30 13:34:37 +01:00
|
|
|
3,
|
2022-01-12 21:35:19 +01:00
|
|
|
)
|
|
|
|
self.assertTrue(
|
2022-06-21 21:34:31 +02:00
|
|
|
assert_is_not_none(
|
|
|
|
Confirmation.objects.get(confirmation_key=iago_multiuse_key).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
<= timezone_now()
|
|
|
|
)
|
|
|
|
self.assertTrue(
|
2022-06-21 21:34:31 +02:00
|
|
|
assert_is_not_none(
|
|
|
|
Confirmation.objects.get(confirmation_key=desdemona_multiuse_key).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
> timezone_now()
|
|
|
|
)
|
2021-11-30 13:34:37 +01:00
|
|
|
self.assertTrue(
|
2022-06-21 21:34:31 +02:00
|
|
|
assert_is_not_none(
|
|
|
|
Confirmation.objects.get(
|
|
|
|
confirmation_key=iago_never_expire_multiuse_key
|
|
|
|
).expiry_date
|
|
|
|
)
|
2021-11-30 13:34:37 +01:00
|
|
|
<= timezone_now()
|
|
|
|
)
|
|
|
|
self.assertIsNone(
|
|
|
|
Confirmation.objects.get(
|
|
|
|
confirmation_key=desdemona_never_expire_multiuse_key
|
|
|
|
).expiry_date
|
|
|
|
)
|
2022-01-12 21:35:19 +01:00
|
|
|
|
CVE-2022-24751: Clear sessions outside of the transaction.
Clearing the sessions inside the transaction makes Zulip vulnerable to
a narrow window where the deleted session has not yet been committed,
but has been removed from the memcached cache. During this window, a
request with the session-id which has just been deleted can
successfully re-fill the memcached cache, as the in-database delete is
not yet committed, and thus not yet visible. After the delete
transaction commits, the cache will be left with a cached session,
which allows further site access until it expires (after
SESSION_COOKIE_AGE seconds), is ejected from the cache due to memory
pressure, or the server is upgraded.
Move the session deletion outside of the transaction.
Because the testsuite runs inside of a transaction, it is impossible
to test this is CI; the testsuite uses the non-caching
`django.contrib.sessions.backends.db` backend, regardless. The test
added in this commit thus does not fail before this commit; it is
merely a base expression that the session should be deleted somehow,
and does not exercise the assert added in the previous commit.
2022-03-09 03:03:42 +01:00
|
|
|
def test_clear_sessions(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login_user(user)
|
|
|
|
session_key = self.client.session.session_key
|
|
|
|
self.assertTrue(session_key)
|
|
|
|
|
|
|
|
result = self.client_get("/json/users")
|
|
|
|
self.assert_json_success(result)
|
|
|
|
self.assertEqual(Session.objects.filter(pk=session_key).count(), 1)
|
|
|
|
|
2022-10-08 21:10:45 +02:00
|
|
|
with self.captureOnCommitCallbacks(execute=True):
|
|
|
|
do_deactivate_user(user, acting_user=None)
|
CVE-2022-24751: Clear sessions outside of the transaction.
Clearing the sessions inside the transaction makes Zulip vulnerable to
a narrow window where the deleted session has not yet been committed,
but has been removed from the memcached cache. During this window, a
request with the session-id which has just been deleted can
successfully re-fill the memcached cache, as the in-database delete is
not yet committed, and thus not yet visible. After the delete
transaction commits, the cache will be left with a cached session,
which allows further site access until it expires (after
SESSION_COOKIE_AGE seconds), is ejected from the cache due to memory
pressure, or the server is upgraded.
Move the session deletion outside of the transaction.
Because the testsuite runs inside of a transaction, it is impossible
to test this is CI; the testsuite uses the non-caching
`django.contrib.sessions.backends.db` backend, regardless. The test
added in this commit thus does not fail before this commit; it is
merely a base expression that the session should be deleted somehow,
and does not exercise the assert added in the previous commit.
2022-03-09 03:03:42 +01:00
|
|
|
self.assertEqual(Session.objects.filter(pk=session_key).count(), 0)
|
|
|
|
|
|
|
|
result = self.client_get("/json/users")
|
|
|
|
self.assert_json_error(
|
|
|
|
result, "Not logged in: API authentication or user session required", 401
|
|
|
|
)
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_clear_scheduled_jobs(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
send_future_email(
|
2023-06-30 21:25:44 +02:00
|
|
|
"zerver/emails/followup_day2",
|
2021-02-12 08:19:30 +01:00
|
|
|
user.realm,
|
|
|
|
to_user_ids=[user.id],
|
|
|
|
delay=datetime.timedelta(hours=1),
|
|
|
|
)
|
2017-07-02 21:10:41 +02:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 1)
|
2021-03-27 06:02:12 +01:00
|
|
|
do_deactivate_user(user, acting_user=None)
|
2017-07-02 21:10:41 +02:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 0)
|
2017-07-01 03:56:40 +02:00
|
|
|
|
2019-01-04 01:50:21 +01:00
|
|
|
def test_send_future_email_with_multiple_recipients(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
send_future_email(
|
2023-06-30 21:25:44 +02:00
|
|
|
"zerver/emails/followup_day2",
|
2021-02-12 08:19:30 +01:00
|
|
|
iago.realm,
|
|
|
|
to_user_ids=[hamlet.id, iago.id],
|
|
|
|
delay=datetime.timedelta(hours=1),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
ScheduledEmail.objects.filter(users__in=[hamlet, iago]).distinct().count(), 1
|
|
|
|
)
|
2019-01-04 01:50:21 +01:00
|
|
|
email = ScheduledEmail.objects.all().first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert email is not None and email.users is not None
|
2019-01-04 01:50:21 +01:00
|
|
|
self.assertEqual(email.users.count(), 2)
|
|
|
|
|
2021-08-14 02:54:46 +02:00
|
|
|
def test_clear_schedule_emails(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
2021-02-12 08:19:30 +01:00
|
|
|
send_future_email(
|
2023-06-30 21:25:44 +02:00
|
|
|
"zerver/emails/followup_day2",
|
2021-02-12 08:19:30 +01:00
|
|
|
iago.realm,
|
|
|
|
to_user_ids=[hamlet.id, iago.id],
|
|
|
|
delay=datetime.timedelta(hours=1),
|
|
|
|
)
|
2019-01-04 01:50:21 +01:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 1)
|
2021-08-14 02:54:46 +02:00
|
|
|
clear_scheduled_emails(hamlet.id)
|
2019-01-04 01:50:21 +01:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 1)
|
|
|
|
self.assertEqual(ScheduledEmail.objects.filter(users=hamlet).count(), 0)
|
|
|
|
self.assertEqual(ScheduledEmail.objects.filter(users=iago).count(), 1)
|
|
|
|
|
2021-05-11 03:32:31 +02:00
|
|
|
def test_deliver_scheduled_emails(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
iago = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2021-02-12 08:19:30 +01:00
|
|
|
send_future_email(
|
2023-06-30 21:25:44 +02:00
|
|
|
"zerver/emails/followup_day2",
|
2021-02-12 08:19:30 +01:00
|
|
|
iago.realm,
|
|
|
|
to_user_ids=[hamlet.id, iago.id],
|
|
|
|
delay=datetime.timedelta(hours=1),
|
|
|
|
)
|
2019-03-16 02:32:43 +01:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 1)
|
|
|
|
email = ScheduledEmail.objects.all().first()
|
2021-08-18 17:54:22 +02:00
|
|
|
deliver_scheduled_emails(assert_is_not_none(email))
|
2019-03-16 02:32:43 +01:00
|
|
|
from django.core.mail import outbox
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(outbox, 1)
|
2019-03-16 02:32:43 +01:00
|
|
|
for message in outbox:
|
2020-06-05 23:26:35 +02:00
|
|
|
self.assertEqual(
|
|
|
|
set(message.to),
|
|
|
|
{
|
|
|
|
str(Address(display_name=hamlet.full_name, addr_spec=hamlet.delivery_email)),
|
|
|
|
str(Address(display_name=iago.full_name, addr_spec=iago.delivery_email)),
|
|
|
|
},
|
|
|
|
)
|
2019-03-16 02:32:43 +01:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 0)
|
2019-01-04 01:50:21 +01:00
|
|
|
|
2021-05-11 03:32:31 +02:00
|
|
|
def test_deliver_scheduled_emails_no_addressees(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
iago = self.example_user("iago")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-09-05 19:57:28 +02:00
|
|
|
to_user_ids = [hamlet.id, iago.id]
|
2021-02-12 08:19:30 +01:00
|
|
|
send_future_email(
|
2023-06-30 21:25:44 +02:00
|
|
|
"zerver/emails/followup_day2",
|
2021-02-12 08:19:30 +01:00
|
|
|
iago.realm,
|
|
|
|
to_user_ids=to_user_ids,
|
|
|
|
delay=datetime.timedelta(hours=1),
|
|
|
|
)
|
2020-09-05 19:57:28 +02:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 1)
|
|
|
|
email = ScheduledEmail.objects.all().first()
|
2021-07-24 16:56:39 +02:00
|
|
|
assert email is not None
|
2020-09-05 19:57:28 +02:00
|
|
|
email.users.remove(*to_user_ids)
|
|
|
|
|
2023-06-14 18:54:51 +02:00
|
|
|
email_id = email.id
|
|
|
|
scheduled_at = email.scheduled_timestamp
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertLogs("zulip.send_email", level="INFO") as info_log:
|
2021-05-11 03:32:31 +02:00
|
|
|
deliver_scheduled_emails(email)
|
2020-09-05 19:57:28 +02:00
|
|
|
from django.core.mail import outbox
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2021-05-17 05:41:32 +02:00
|
|
|
self.assert_length(outbox, 0)
|
2023-06-14 18:54:51 +02:00
|
|
|
self.assertEqual(ScheduledEmail.objects.count(), 0)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
|
|
|
info_log.output,
|
|
|
|
[
|
2023-06-14 18:54:51 +02:00
|
|
|
f"WARNING:zulip.send_email:ScheduledEmail {email_id} at {scheduled_at} "
|
|
|
|
"had empty users and address attributes: "
|
2023-06-30 21:25:44 +02:00
|
|
|
"{'template_prefix': 'zerver/emails/followup_day2', 'from_name': None, "
|
2023-06-14 18:54:51 +02:00
|
|
|
"'from_address': None, 'language': None, 'context': {}}"
|
2021-02-12 08:19:30 +01:00
|
|
|
],
|
|
|
|
)
|
|
|
|
|
2020-09-05 19:57:28 +02:00
|
|
|
|
2017-10-23 22:03:28 +02:00
|
|
|
class RecipientInfoTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_stream_recipient_info(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
2017-10-23 22:03:28 +02:00
|
|
|
|
2019-12-11 01:41:20 +01:00
|
|
|
# These tests were written with the old default for
|
|
|
|
# enable_online_push_notifications; that default is better for
|
|
|
|
# testing the full code path anyway.
|
|
|
|
hamlet.enable_online_push_notifications = False
|
|
|
|
cordelia.enable_online_push_notifications = False
|
|
|
|
othello.enable_online_push_notifications = False
|
|
|
|
hamlet.save()
|
|
|
|
cordelia.save()
|
|
|
|
othello.save()
|
|
|
|
|
2017-10-23 22:03:28 +02:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
2021-05-10 07:02:14 +02:00
|
|
|
stream_name = "Test stream"
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name = "test topic"
|
2017-10-23 22:03:28 +02:00
|
|
|
|
|
|
|
for user in [hamlet, cordelia, othello]:
|
|
|
|
self.subscribe(user, stream_name)
|
|
|
|
|
|
|
|
stream = get_stream(stream_name, realm)
|
2020-02-18 17:25:43 +01:00
|
|
|
recipient = stream.recipient
|
2021-07-24 16:56:39 +02:00
|
|
|
assert recipient is not None
|
2017-10-23 22:03:28 +02:00
|
|
|
|
2017-10-24 00:07:03 +02:00
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream.id,
|
|
|
|
topic_name=topic_name,
|
|
|
|
)
|
|
|
|
|
2017-10-23 22:03:28 +02:00
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2017-10-23 22:03:28 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
2017-10-24 00:07:03 +02:00
|
|
|
stream_topic=stream_topic,
|
2023-05-31 16:56:18 +02:00
|
|
|
possible_topic_wildcard_mention=False,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=False,
|
2017-10-23 22:03:28 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
all_user_ids = {hamlet.id, cordelia.id, othello.id}
|
|
|
|
|
2023-02-10 21:42:51 +01:00
|
|
|
expected_info = RecipientInfoResult(
|
2017-10-23 22:03:28 +02:00
|
|
|
active_user_ids=all_user_ids,
|
2021-05-24 05:57:59 +02:00
|
|
|
online_push_user_ids=set(),
|
notifications: Calculate PMs/mentions settings like other settings.
Previously, we checked for the `enable_offline_email_notifications` and
`enable_offline_push_notifications` settings (which determine whether the
user will receive notifications for PMs and mentions) just before sending
notifications. This has a few problem:
1. We do not have access to all the user settings in the notification
handlers (`handle_missedmessage_emails` and `handle_push_notifications`),
and therefore, we cannot correctly determine whether the notification should
be sent. Checks like the following which existed previously, will, for
example, incorrectly not send notifications even when stream email
notifications are enabled-
```
if not receives_offline_email_notifications(user_profile):
return
```
With this commit, we simply do not enqueue notifications if the "offline"
settings are disabled, which fixes that bug.
Additionally, this also fixes a bug with the "online push notifications"
feature, which was, if someone were to:
* turn off notifications for PMs and mentions (`enable_offline_push_notifications`)
* turn on stream push notifications (`enable_stream_push_notifications`)
* turn on "online push" (`enable_online_push_notifications`)
then, they would still receive notifications for PMs when online.
This isn't how the "online push enabled" feature is supposed to work;
it should only act as a wrapper around the other notification settings.
The buggy code was this in `handle_push_notifications`:
```
if not (
receives_offline_push_notifications(user_profile)
or receives_online_push_notifications(user_profile)
):
return
// send notifications
```
This commit removes that code, and extends our `notification_data.py` logic
to cover this case, along with tests.
2. The name for these settings is slightly misleading. They essentially
talk about "what to send notifications for" (PMs and mentions), and not
"when to send notifications" (offline). This commit improves this condition
by restricting the use of this term only to the database field, and using
clearer names everywhere else. This distinction will be important to have
non-confusing code when we implement multiple options for notifications
in the future as dropdown (never/when offline/when offline or online, etc).
3. We should ideally re-check all notification settings just before the
notifications are sent. This is especially important for email notifications,
which may be sent after a long time after the message was sent. We will
in the future add code to thoroughly re-check settings before sending
notifications in a clean manner, but temporarily not re-checking isn't
a terrible scenario either.
2021-07-14 15:34:01 +02:00
|
|
|
pm_mention_email_disabled_user_ids=set(),
|
|
|
|
pm_mention_push_disabled_user_ids=set(),
|
2019-02-13 10:22:16 +01:00
|
|
|
stream_push_user_ids=set(),
|
2018-07-12 11:33:51 +02:00
|
|
|
stream_email_user_ids=set(),
|
2023-05-31 16:56:18 +02:00
|
|
|
topic_wildcard_mention_user_ids=set(),
|
2023-06-03 16:51:38 +02:00
|
|
|
stream_wildcard_mention_user_ids=set(),
|
2023-05-28 17:03:04 +02:00
|
|
|
followed_topic_push_user_ids=set(),
|
2023-05-17 16:01:16 +02:00
|
|
|
followed_topic_email_user_ids=set(),
|
2023-05-31 16:56:18 +02:00
|
|
|
topic_wildcard_mention_in_followed_topic_user_ids=set(),
|
2023-06-03 16:51:38 +02:00
|
|
|
stream_wildcard_mention_in_followed_topic_user_ids=set(),
|
2021-06-06 05:56:52 +02:00
|
|
|
muted_sender_user_ids=set(),
|
2017-10-23 22:03:28 +02:00
|
|
|
um_eligible_user_ids=all_user_ids,
|
|
|
|
long_term_idle_user_ids=set(),
|
2017-10-24 20:08:19 +02:00
|
|
|
default_bot_user_ids=set(),
|
2017-10-23 22:03:28 +02:00
|
|
|
service_bot_tuples=[],
|
2021-12-19 12:04:36 +01:00
|
|
|
all_bot_user_ids=set(),
|
2017-10-23 22:03:28 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(info, expected_info)
|
|
|
|
|
notifications: Calculate PMs/mentions settings like other settings.
Previously, we checked for the `enable_offline_email_notifications` and
`enable_offline_push_notifications` settings (which determine whether the
user will receive notifications for PMs and mentions) just before sending
notifications. This has a few problem:
1. We do not have access to all the user settings in the notification
handlers (`handle_missedmessage_emails` and `handle_push_notifications`),
and therefore, we cannot correctly determine whether the notification should
be sent. Checks like the following which existed previously, will, for
example, incorrectly not send notifications even when stream email
notifications are enabled-
```
if not receives_offline_email_notifications(user_profile):
return
```
With this commit, we simply do not enqueue notifications if the "offline"
settings are disabled, which fixes that bug.
Additionally, this also fixes a bug with the "online push notifications"
feature, which was, if someone were to:
* turn off notifications for PMs and mentions (`enable_offline_push_notifications`)
* turn on stream push notifications (`enable_stream_push_notifications`)
* turn on "online push" (`enable_online_push_notifications`)
then, they would still receive notifications for PMs when online.
This isn't how the "online push enabled" feature is supposed to work;
it should only act as a wrapper around the other notification settings.
The buggy code was this in `handle_push_notifications`:
```
if not (
receives_offline_push_notifications(user_profile)
or receives_online_push_notifications(user_profile)
):
return
// send notifications
```
This commit removes that code, and extends our `notification_data.py` logic
to cover this case, along with tests.
2. The name for these settings is slightly misleading. They essentially
talk about "what to send notifications for" (PMs and mentions), and not
"when to send notifications" (offline). This commit improves this condition
by restricting the use of this term only to the database field, and using
clearer names everywhere else. This distinction will be important to have
non-confusing code when we implement multiple options for notifications
in the future as dropdown (never/when offline/when offline or online, etc).
3. We should ideally re-check all notification settings just before the
notifications are sent. This is especially important for email notifications,
which may be sent after a long time after the message was sent. We will
in the future add code to thoroughly re-check settings before sending
notifications in a clean manner, but temporarily not re-checking isn't
a terrible scenario either.
2021-07-14 15:34:01 +02:00
|
|
|
hamlet.enable_offline_email_notifications = False
|
|
|
|
hamlet.enable_offline_push_notifications = False
|
|
|
|
hamlet.save()
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=False,
|
notifications: Calculate PMs/mentions settings like other settings.
Previously, we checked for the `enable_offline_email_notifications` and
`enable_offline_push_notifications` settings (which determine whether the
user will receive notifications for PMs and mentions) just before sending
notifications. This has a few problem:
1. We do not have access to all the user settings in the notification
handlers (`handle_missedmessage_emails` and `handle_push_notifications`),
and therefore, we cannot correctly determine whether the notification should
be sent. Checks like the following which existed previously, will, for
example, incorrectly not send notifications even when stream email
notifications are enabled-
```
if not receives_offline_email_notifications(user_profile):
return
```
With this commit, we simply do not enqueue notifications if the "offline"
settings are disabled, which fixes that bug.
Additionally, this also fixes a bug with the "online push notifications"
feature, which was, if someone were to:
* turn off notifications for PMs and mentions (`enable_offline_push_notifications`)
* turn on stream push notifications (`enable_stream_push_notifications`)
* turn on "online push" (`enable_online_push_notifications`)
then, they would still receive notifications for PMs when online.
This isn't how the "online push enabled" feature is supposed to work;
it should only act as a wrapper around the other notification settings.
The buggy code was this in `handle_push_notifications`:
```
if not (
receives_offline_push_notifications(user_profile)
or receives_online_push_notifications(user_profile)
):
return
// send notifications
```
This commit removes that code, and extends our `notification_data.py` logic
to cover this case, along with tests.
2. The name for these settings is slightly misleading. They essentially
talk about "what to send notifications for" (PMs and mentions), and not
"when to send notifications" (offline). This commit improves this condition
by restricting the use of this term only to the database field, and using
clearer names everywhere else. This distinction will be important to have
non-confusing code when we implement multiple options for notifications
in the future as dropdown (never/when offline/when offline or online, etc).
3. We should ideally re-check all notification settings just before the
notifications are sent. This is especially important for email notifications,
which may be sent after a long time after the message was sent. We will
in the future add code to thoroughly re-check settings before sending
notifications in a clean manner, but temporarily not re-checking isn't
a terrible scenario either.
2021-07-14 15:34:01 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.pm_mention_email_disabled_user_ids, {hamlet.id})
|
|
|
|
self.assertEqual(info.pm_mention_push_disabled_user_ids, {hamlet.id})
|
notifications: Calculate PMs/mentions settings like other settings.
Previously, we checked for the `enable_offline_email_notifications` and
`enable_offline_push_notifications` settings (which determine whether the
user will receive notifications for PMs and mentions) just before sending
notifications. This has a few problem:
1. We do not have access to all the user settings in the notification
handlers (`handle_missedmessage_emails` and `handle_push_notifications`),
and therefore, we cannot correctly determine whether the notification should
be sent. Checks like the following which existed previously, will, for
example, incorrectly not send notifications even when stream email
notifications are enabled-
```
if not receives_offline_email_notifications(user_profile):
return
```
With this commit, we simply do not enqueue notifications if the "offline"
settings are disabled, which fixes that bug.
Additionally, this also fixes a bug with the "online push notifications"
feature, which was, if someone were to:
* turn off notifications for PMs and mentions (`enable_offline_push_notifications`)
* turn on stream push notifications (`enable_stream_push_notifications`)
* turn on "online push" (`enable_online_push_notifications`)
then, they would still receive notifications for PMs when online.
This isn't how the "online push enabled" feature is supposed to work;
it should only act as a wrapper around the other notification settings.
The buggy code was this in `handle_push_notifications`:
```
if not (
receives_offline_push_notifications(user_profile)
or receives_online_push_notifications(user_profile)
):
return
// send notifications
```
This commit removes that code, and extends our `notification_data.py` logic
to cover this case, along with tests.
2. The name for these settings is slightly misleading. They essentially
talk about "what to send notifications for" (PMs and mentions), and not
"when to send notifications" (offline). This commit improves this condition
by restricting the use of this term only to the database field, and using
clearer names everywhere else. This distinction will be important to have
non-confusing code when we implement multiple options for notifications
in the future as dropdown (never/when offline/when offline or online, etc).
3. We should ideally re-check all notification settings just before the
notifications are sent. This is especially important for email notifications,
which may be sent after a long time after the message was sent. We will
in the future add code to thoroughly re-check settings before sending
notifications in a clean manner, but temporarily not re-checking isn't
a terrible scenario either.
2021-07-14 15:34:01 +02:00
|
|
|
hamlet.enable_offline_email_notifications = True
|
|
|
|
hamlet.enable_offline_push_notifications = True
|
|
|
|
hamlet.save()
|
|
|
|
|
2019-09-03 23:27:45 +02:00
|
|
|
cordelia.wildcard_mentions_notify = False
|
|
|
|
cordelia.save()
|
2019-02-13 10:22:16 +01:00
|
|
|
hamlet.enable_stream_push_notifications = True
|
|
|
|
hamlet.save()
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-02-13 10:22:16 +01:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=False,
|
2019-02-13 10:22:16 +01:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, {hamlet.id})
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, set())
|
2019-09-03 23:27:45 +02:00
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-09-03 23:27:45 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=True,
|
2019-09-03 23:27:45 +02:00
|
|
|
)
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, {hamlet.id, othello.id})
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2023-05-31 16:56:18 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
|
|
|
"wildcard_mentions_notify",
|
|
|
|
True,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
possible_topic_wildcard_mention=True,
|
|
|
|
possible_stream_wildcard_mention=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, set())
|
|
|
|
self.assertEqual(info.topic_wildcard_mention_user_ids, set())
|
|
|
|
|
|
|
|
# User who sent a message to the topic, or reacted to a message on the topic
|
|
|
|
# is only considered as a possible user to be notified for topic mention.
|
|
|
|
self.send_stream_message(hamlet, stream_name, content="test message", topic_name=topic_name)
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
possible_topic_wildcard_mention=True,
|
|
|
|
possible_stream_wildcard_mention=False,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, set())
|
|
|
|
self.assertEqual(info.topic_wildcard_mention_user_ids, {hamlet.id})
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
possible_topic_wildcard_mention=False,
|
|
|
|
possible_stream_wildcard_mention=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, {hamlet.id, othello.id})
|
|
|
|
self.assertEqual(info.topic_wildcard_mention_user_ids, set())
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
possible_topic_wildcard_mention=True,
|
|
|
|
possible_stream_wildcard_mention=True,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, {hamlet.id, othello.id})
|
|
|
|
self.assertEqual(info.topic_wildcard_mention_user_ids, {hamlet.id})
|
|
|
|
|
2019-02-13 10:22:16 +01:00
|
|
|
sub = get_subscription(stream_name, hamlet)
|
|
|
|
sub.push_notifications = False
|
|
|
|
sub.save()
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-02-13 10:22:16 +01:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, set())
|
2019-02-13 10:22:16 +01:00
|
|
|
|
|
|
|
hamlet.enable_stream_push_notifications = False
|
|
|
|
hamlet.save()
|
|
|
|
sub = get_subscription(stream_name, hamlet)
|
|
|
|
sub.push_notifications = True
|
|
|
|
sub.save()
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-02-13 10:22:16 +01:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, {hamlet.id})
|
2019-02-13 10:22:16 +01:00
|
|
|
|
2023-02-03 22:33:37 +01:00
|
|
|
# Now have Hamlet mute the stream and unmute the topic,
|
|
|
|
# which shouldn't omit him from stream_push_user_ids.
|
|
|
|
sub.is_muted = True
|
|
|
|
sub.save()
|
|
|
|
|
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet,
|
|
|
|
stream,
|
|
|
|
topic_name,
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.UNMUTED,
|
2023-02-03 22:33:37 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.stream_push_user_ids, {hamlet.id})
|
|
|
|
|
|
|
|
# Now unmute the stream and remove topic visibility_policy.
|
|
|
|
sub.is_muted = False
|
|
|
|
sub.save()
|
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-12 16:19:42 +01:00
|
|
|
hamlet, stream, topic_name, visibility_policy=UserTopic.VisibilityPolicy.INHERIT
|
2023-02-03 22:33:37 +01:00
|
|
|
)
|
|
|
|
|
2021-06-06 05:56:52 +02:00
|
|
|
# Now have Hamlet mute the topic to omit him from stream_push_user_ids.
|
2023-02-03 12:57:43 +01:00
|
|
|
do_set_user_topic_visibility_policy(
|
2023-03-03 18:00:27 +01:00
|
|
|
hamlet,
|
|
|
|
stream,
|
|
|
|
topic_name,
|
2023-03-12 16:19:42 +01:00
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.MUTED,
|
2017-10-24 00:07:03 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2017-10-24 00:07:03 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=False,
|
2019-09-03 23:27:45 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, set())
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, set())
|
2019-09-03 23:27:45 +02:00
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-09-03 23:27:45 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=True,
|
2017-10-24 00:07:03 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, set())
|
2019-09-03 23:27:45 +02:00
|
|
|
# Since Hamlet has muted the stream and Cordelia has disabled
|
|
|
|
# wildcard notifications, it should just be Othello here.
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, {othello.id})
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2021-06-06 05:56:52 +02:00
|
|
|
# If Hamlet mutes Cordelia, he should be in `muted_sender_user_ids` for a message
|
|
|
|
# sent by Cordelia.
|
|
|
|
do_mute_user(hamlet, cordelia)
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=cordelia.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=True,
|
2021-06-06 05:56:52 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertTrue(hamlet.id in info.muted_sender_user_ids)
|
2021-06-06 05:56:52 +02:00
|
|
|
|
2019-09-03 23:27:45 +02:00
|
|
|
sub = get_subscription(stream_name, othello)
|
|
|
|
sub.wildcard_mentions_notify = False
|
|
|
|
sub.save()
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-09-03 23:27:45 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=True,
|
2019-09-03 23:27:45 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, set())
|
2019-09-03 23:27:45 +02:00
|
|
|
# Verify that stream-level wildcard_mentions_notify=False works correctly.
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, set())
|
2019-09-03 23:27:45 +02:00
|
|
|
|
|
|
|
# Verify that True works as expected as well
|
|
|
|
sub = get_subscription(stream_name, othello)
|
|
|
|
sub.wildcard_mentions_notify = True
|
|
|
|
sub.save()
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2019-09-03 23:27:45 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
2023-06-03 16:51:38 +02:00
|
|
|
possible_stream_wildcard_mention=True,
|
2019-09-03 23:27:45 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.stream_push_user_ids, set())
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_user_ids, {othello.id})
|
2017-10-24 00:07:03 +02:00
|
|
|
|
2017-10-24 19:25:50 +02:00
|
|
|
# Add a service bot.
|
|
|
|
service_bot = do_create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="service-bot@zulip.com",
|
|
|
|
password="",
|
2017-10-24 19:25:50 +02:00
|
|
|
realm=realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="",
|
2017-10-24 19:25:50 +02:00
|
|
|
bot_type=UserProfile.EMBEDDED_BOT,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2017-10-24 19:25:50 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2017-10-24 19:25:50 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
possibly_mentioned_user_ids={service_bot.id},
|
2017-10-24 19:25:50 +02:00
|
|
|
)
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2023-02-10 21:42:51 +01:00
|
|
|
info.service_bot_tuples,
|
2021-02-12 08:19:30 +01:00
|
|
|
[
|
|
|
|
(service_bot.id, UserProfile.EMBEDDED_BOT),
|
|
|
|
],
|
|
|
|
)
|
2017-10-24 19:25:50 +02:00
|
|
|
|
2017-10-24 20:08:19 +02:00
|
|
|
# Add a normal bot.
|
|
|
|
normal_bot = do_create_user(
|
2021-02-12 08:20:45 +01:00
|
|
|
email="normal-bot@zulip.com",
|
|
|
|
password="",
|
2017-10-24 20:08:19 +02:00
|
|
|
realm=realm,
|
2021-02-12 08:20:45 +01:00
|
|
|
full_name="",
|
2017-10-24 20:08:19 +02:00
|
|
|
bot_type=UserProfile.DEFAULT_BOT,
|
2021-02-06 14:27:06 +01:00
|
|
|
acting_user=None,
|
2017-10-24 20:08:19 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2017-10-24 20:08:19 +02:00
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
possibly_mentioned_user_ids={service_bot.id, normal_bot.id},
|
2017-10-24 20:08:19 +02:00
|
|
|
)
|
2023-02-10 21:42:51 +01:00
|
|
|
self.assertEqual(info.default_bot_user_ids, {normal_bot.id})
|
|
|
|
self.assertEqual(info.all_bot_user_ids, {normal_bot.id, service_bot.id})
|
2017-10-24 20:08:19 +02:00
|
|
|
|
2023-06-02 09:42:58 +02:00
|
|
|
# Now Hamlet follows the topic with the 'followed_topic_email_notifications',
|
|
|
|
# 'followed_topic_push_notifications' and 'followed_topic_wildcard_mention_notify'
|
|
|
|
# global settings enabled by default.
|
2023-05-17 16:01:16 +02:00
|
|
|
do_set_user_topic_visibility_policy(
|
|
|
|
hamlet,
|
|
|
|
stream,
|
|
|
|
topic_name,
|
|
|
|
visibility_policy=UserTopic.VisibilityPolicy.FOLLOWED,
|
|
|
|
)
|
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.followed_topic_email_user_ids, {hamlet.id})
|
2023-05-28 17:03:04 +02:00
|
|
|
self.assertEqual(info.followed_topic_push_user_ids, {hamlet.id})
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_in_followed_topic_user_ids, {hamlet.id})
|
2023-05-17 16:01:16 +02:00
|
|
|
|
|
|
|
# Omit Hamlet from followed_topic_email_user_ids
|
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
|
|
|
"enable_followed_topic_email_notifications",
|
|
|
|
False,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2023-05-28 17:03:04 +02:00
|
|
|
# Omit Hamlet from followed_topic_push_user_ids
|
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
|
|
|
"enable_followed_topic_push_notifications",
|
|
|
|
False,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2023-06-03 16:51:38 +02:00
|
|
|
# Omit Hamlet from stream_wildcard_mention_in_followed_topic_user_ids
|
2023-06-02 09:42:58 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
|
|
|
"enable_followed_topic_wildcard_mentions_notify",
|
|
|
|
False,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
2023-05-17 16:01:16 +02:00
|
|
|
|
|
|
|
info = get_recipient_info(
|
|
|
|
realm_id=realm.id,
|
|
|
|
recipient=recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
self.assertEqual(info.followed_topic_email_user_ids, set())
|
2023-05-28 17:03:04 +02:00
|
|
|
self.assertEqual(info.followed_topic_push_user_ids, set())
|
2023-06-03 16:51:38 +02:00
|
|
|
self.assertEqual(info.stream_wildcard_mention_in_followed_topic_user_ids, set())
|
2023-05-17 16:01:16 +02:00
|
|
|
|
2018-05-16 03:07:36 +02:00
|
|
|
def test_get_recipient_info_invalid_recipient_type(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2018-05-16 03:07:36 +02:00
|
|
|
realm = hamlet.realm
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
stream = get_stream("Rome", realm)
|
2018-05-16 03:07:36 +02:00
|
|
|
stream_topic = StreamTopicTarget(
|
|
|
|
stream_id=stream.id,
|
2021-02-12 08:20:45 +01:00
|
|
|
topic_name="test topic",
|
2018-05-16 03:07:36 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
# Make sure get_recipient_info asserts on invalid recipient types
|
2021-02-12 08:20:45 +01:00
|
|
|
with self.assertRaisesRegex(ValueError, "Bad recipient type"):
|
2018-05-16 05:07:33 +02:00
|
|
|
invalid_recipient = Recipient(type=999) # 999 is not a valid type
|
|
|
|
get_recipient_info(
|
2021-05-11 13:55:49 +02:00
|
|
|
realm_id=realm.id,
|
2018-05-16 03:07:36 +02:00
|
|
|
recipient=invalid_recipient,
|
|
|
|
sender_id=hamlet.id,
|
|
|
|
stream_topic=stream_topic,
|
|
|
|
)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2017-10-10 04:51:04 +02:00
|
|
|
class BulkUsersTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_client_gravatar_option(self) -> None:
|
2023-03-01 07:34:25 +01:00
|
|
|
reset_email_visibility_to_everyone_in_zulip_realm()
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("cordelia")
|
2017-10-10 04:51:04 +02:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2017-10-10 04:51:04 +02:00
|
|
|
|
2018-05-11 01:39:38 +02:00
|
|
|
def get_hamlet_avatar(client_gravatar: bool) -> Optional[str]:
|
2020-08-07 01:09:47 +02:00
|
|
|
data = dict(client_gravatar=orjson.dumps(client_gravatar).decode())
|
2021-02-12 08:20:45 +01:00
|
|
|
result = self.client_get("/json/users", data)
|
2022-06-07 01:37:01 +02:00
|
|
|
rows = self.assert_json_success(result)["members"]
|
2023-07-22 00:34:11 +02:00
|
|
|
[hamlet_data] = (row for row in rows if row["user_id"] == hamlet.id)
|
2021-02-12 08:20:45 +01:00
|
|
|
return hamlet_data["avatar_url"]
|
2017-10-10 04:51:04 +02:00
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
get_hamlet_avatar(client_gravatar=True),
|
python: Use trailing commas consistently.
Automatically generated by the following script, based on the output
of lint with flake8-comma:
import re
import sys
last_filename = None
last_row = None
lines = []
for msg in sys.stdin:
m = re.match(
r"\x1b\[35mflake8 \|\x1b\[0m \x1b\[1;31m(.+):(\d+):(\d+): (\w+)", msg
)
if m:
filename, row_str, col_str, err = m.groups()
row, col = int(row_str), int(col_str)
if filename == last_filename:
assert last_row != row
else:
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
with open(filename) as f:
lines = f.readlines()
last_filename = filename
last_row = row
line = lines[row - 1]
if err in ["C812", "C815"]:
lines[row - 1] = line[: col - 1] + "," + line[col - 1 :]
elif err in ["C819"]:
assert line[col - 2] == ","
lines[row - 1] = line[: col - 2] + line[col - 1 :].lstrip(" ")
if last_filename is not None:
with open(last_filename, "w") as f:
f.writelines(lines)
Signed-off-by: Anders Kaseorg <anders@zulipchat.com>
2020-04-10 05:23:40 +02:00
|
|
|
None,
|
2017-10-10 04:51:04 +02:00
|
|
|
)
|
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-10 04:51:04 +02:00
|
|
|
The main purpose of this test is to make sure we
|
|
|
|
return None for avatar_url when client_gravatar is
|
|
|
|
set to True. And we do a sanity check for when it's
|
|
|
|
False, but we leave it to other tests to validate
|
|
|
|
the specific URL.
|
2021-02-12 08:20:45 +01:00
|
|
|
"""
|
2017-10-10 04:51:04 +02:00
|
|
|
self.assertIn(
|
2021-02-12 08:20:45 +01:00
|
|
|
"gravatar.com",
|
2021-08-18 17:54:22 +02:00
|
|
|
assert_is_not_none(get_hamlet_avatar(client_gravatar=False)),
|
2017-10-10 04:51:04 +02:00
|
|
|
)
|
|
|
|
|
2013-01-22 20:07:51 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
class GetProfileTest(ZulipTestCase):
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_cache_behavior(self) -> None:
|
2017-07-12 21:59:19 +02:00
|
|
|
"""Tests whether fetching a user object the normal way, with
|
|
|
|
`get_user`, makes 1 cache query and 1 database query.
|
|
|
|
"""
|
|
|
|
realm = get_realm("zulip")
|
2020-03-12 14:17:25 +01:00
|
|
|
email = self.example_user("hamlet").email
|
2022-10-15 22:47:40 +02:00
|
|
|
with self.assert_database_query_count(1):
|
2013-09-28 01:05:08 +02:00
|
|
|
with simulated_empty_cache() as cache_queries:
|
2017-07-12 21:59:19 +02:00
|
|
|
user_profile = get_user(email, realm)
|
2013-09-28 01:05:08 +02:00
|
|
|
|
2016-09-25 21:30:10 +02:00
|
|
|
self.assert_length(cache_queries, 1)
|
2017-07-12 21:59:19 +02:00
|
|
|
self.assertEqual(user_profile.email, email)
|
2013-09-28 01:05:08 +02:00
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_user_profile(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
iago = self.example_user("iago")
|
|
|
|
desdemona = self.example_user("desdemona")
|
2021-04-11 07:38:09 +02:00
|
|
|
shiva = self.example_user("shiva")
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("hamlet")
|
|
|
|
result = orjson.loads(self.client_get("/json/users/me").content)
|
|
|
|
self.assertEqual(result["email"], hamlet.email)
|
|
|
|
self.assertEqual(result["full_name"], "King Hamlet")
|
2016-12-13 19:17:49 +01:00
|
|
|
self.assertIn("user_id", result)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertFalse(result["is_bot"])
|
|
|
|
self.assertFalse(result["is_admin"])
|
|
|
|
self.assertFalse(result["is_owner"])
|
|
|
|
self.assertFalse(result["is_guest"])
|
2021-04-11 07:38:09 +02:00
|
|
|
self.assertEqual(result["role"], UserProfile.ROLE_MEMBER)
|
2021-12-11 08:17:57 +01:00
|
|
|
self.assertEqual(result["delivery_email"], hamlet.delivery_email)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("iago")
|
|
|
|
result = orjson.loads(self.client_get("/json/users/me").content)
|
|
|
|
self.assertEqual(result["email"], iago.email)
|
|
|
|
self.assertEqual(result["full_name"], "Iago")
|
|
|
|
self.assertFalse(result["is_bot"])
|
|
|
|
self.assertTrue(result["is_admin"])
|
|
|
|
self.assertFalse(result["is_owner"])
|
|
|
|
self.assertFalse(result["is_guest"])
|
2021-04-11 07:38:09 +02:00
|
|
|
self.assertEqual(result["role"], UserProfile.ROLE_REALM_ADMINISTRATOR)
|
2021-02-12 08:20:45 +01:00
|
|
|
self.login("desdemona")
|
|
|
|
result = orjson.loads(self.client_get("/json/users/me").content)
|
|
|
|
self.assertEqual(result["email"], desdemona.email)
|
|
|
|
self.assertFalse(result["is_bot"])
|
|
|
|
self.assertTrue(result["is_admin"])
|
|
|
|
self.assertTrue(result["is_owner"])
|
|
|
|
self.assertFalse(result["is_guest"])
|
2021-04-11 07:38:09 +02:00
|
|
|
self.assertEqual(result["role"], UserProfile.ROLE_REALM_OWNER)
|
|
|
|
self.login("shiva")
|
|
|
|
result = orjson.loads(self.client_get("/json/users/me").content)
|
|
|
|
self.assertEqual(result["email"], shiva.email)
|
|
|
|
self.assertFalse(result["is_bot"])
|
|
|
|
self.assertFalse(result["is_admin"])
|
|
|
|
self.assertFalse(result["is_owner"])
|
|
|
|
self.assertFalse(result["is_guest"])
|
|
|
|
self.assertEqual(result["role"], UserProfile.ROLE_MODERATOR)
|
2016-12-13 19:17:49 +01:00
|
|
|
|
2020-10-23 02:43:28 +02:00
|
|
|
# Tests the GET ../users/{id} API endpoint.
|
2021-02-12 08:20:45 +01:00
|
|
|
user = self.example_user("hamlet")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{user.id}").content)
|
|
|
|
self.assertEqual(result["user"]["email"], user.email)
|
|
|
|
self.assertEqual(result["user"]["full_name"], user.full_name)
|
|
|
|
self.assertIn("user_id", result["user"])
|
|
|
|
self.assertNotIn("profile_data", result["user"])
|
|
|
|
self.assertFalse(result["user"]["is_bot"])
|
|
|
|
self.assertFalse(result["user"]["is_admin"])
|
|
|
|
self.assertFalse(result["user"]["is_owner"])
|
2020-01-02 00:39:54 +01:00
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(
|
|
|
|
f"/json/users/{user.id}", {"include_custom_profile_fields": "true"}
|
|
|
|
).content
|
|
|
|
)
|
2020-01-02 00:39:54 +01:00
|
|
|
|
2021-02-12 08:20:45 +01:00
|
|
|
self.assertIn("profile_data", result["user"])
|
2020-09-13 00:11:30 +02:00
|
|
|
result = self.client_get("/json/users/30")
|
2020-01-02 00:39:54 +01:00
|
|
|
self.assert_json_error(result, "No such user")
|
|
|
|
|
2020-04-21 22:45:58 +02:00
|
|
|
bot = self.example_user("default_bot")
|
2021-02-12 08:20:45 +01:00
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{bot.id}").content)
|
|
|
|
self.assertEqual(result["user"]["email"], bot.email)
|
|
|
|
self.assertTrue(result["user"]["is_bot"])
|
2020-04-21 22:45:58 +02:00
|
|
|
|
2021-01-02 15:05:29 +01:00
|
|
|
def test_get_user_by_email(self) -> None:
|
|
|
|
user = self.example_user("hamlet")
|
|
|
|
self.login("hamlet")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{user.email}").content)
|
|
|
|
|
|
|
|
self.assertEqual(result["user"]["email"], user.email)
|
|
|
|
|
|
|
|
self.assertEqual(result["user"]["full_name"], user.full_name)
|
|
|
|
self.assertIn("user_id", result["user"])
|
|
|
|
self.assertNotIn("profile_data", result["user"])
|
|
|
|
self.assertFalse(result["user"]["is_bot"])
|
|
|
|
self.assertFalse(result["user"]["is_admin"])
|
|
|
|
self.assertFalse(result["user"]["is_owner"])
|
|
|
|
|
|
|
|
result = orjson.loads(
|
|
|
|
self.client_get(
|
|
|
|
f"/json/users/{user.email}", {"include_custom_profile_fields": "true"}
|
|
|
|
).content
|
|
|
|
)
|
|
|
|
self.assertIn("profile_data", result["user"])
|
|
|
|
|
|
|
|
result = self.client_get("/json/users/invalid")
|
|
|
|
self.assert_json_error(result, "No such user")
|
|
|
|
|
|
|
|
bot = self.example_user("default_bot")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{bot.email}").content)
|
|
|
|
self.assertEqual(result["user"]["email"], bot.email)
|
|
|
|
self.assertTrue(result["user"]["is_bot"])
|
|
|
|
|
2017-11-05 10:51:25 +01:00
|
|
|
def test_get_all_profiles_avatar_urls(self) -> None:
|
2021-02-12 08:20:45 +01:00
|
|
|
hamlet = self.example_user("hamlet")
|
2021-10-26 09:15:16 +02:00
|
|
|
result = self.api_get(
|
|
|
|
hamlet, "/api/v1/users", {"client_gravatar": orjson.dumps(False).decode()}
|
|
|
|
)
|
2022-06-07 01:37:01 +02:00
|
|
|
response_dict = self.assert_json_success(result)
|
2014-07-18 06:16:14 +02:00
|
|
|
|
2022-06-07 01:37:01 +02:00
|
|
|
(my_user,) = (user for user in response_dict["members"] if user["email"] == hamlet.email)
|
2020-03-19 15:14:38 +01:00
|
|
|
|
|
|
|
self.assertEqual(
|
2021-02-12 08:20:45 +01:00
|
|
|
my_user["avatar_url"],
|
2020-03-19 15:14:38 +01:00
|
|
|
avatar_url(hamlet),
|
|
|
|
)
|
2019-08-30 00:21:36 +02:00
|
|
|
|
2021-05-21 20:18:47 +02:00
|
|
|
def test_user_email_according_to_email_address_visibility_setting(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
2021-05-21 20:18:47 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_NOBODY,
|
2021-05-21 20:18:47 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that even admin cannot access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_NOBODY.
|
|
|
|
self.login("iago")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), None)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
2021-05-21 20:18:47 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_ADMINS,
|
2021-05-21 20:18:47 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that admin can access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_ADMINS.
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
|
|
|
# Check that moderator cannot access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_ADMINS.
|
|
|
|
self.login("shiva")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), None)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
2021-05-21 18:08:24 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MODERATORS,
|
2021-05-21 18:08:24 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that moderator can access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_MODERATORS.
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
|
|
|
# Check that normal user cannot access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_MODERATORS.
|
|
|
|
self.login("cordelia")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), None)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
2021-10-26 09:15:16 +02:00
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
|
|
|
"email_address_visibility",
|
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_MEMBERS,
|
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that normal user can access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_MEMBERS.
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
|
|
|
# Check that guest cannot access email when setting is set to
|
|
|
|
# EMAIL_ADDRESS_VISIBILITY_MEMBERS.
|
|
|
|
self.login("polonius")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
|
|
|
self.assertEqual(result["user"].get("delivery_email"), None)
|
|
|
|
self.assertEqual(result["user"].get("email"), f"user{hamlet.id}@zulip.testserver")
|
|
|
|
|
|
|
|
do_change_user_setting(
|
|
|
|
hamlet,
|
2021-05-21 20:18:47 +02:00
|
|
|
"email_address_visibility",
|
2021-10-26 09:15:16 +02:00
|
|
|
UserProfile.EMAIL_ADDRESS_VISIBILITY_EVERYONE,
|
2021-05-21 20:18:47 +02:00
|
|
|
acting_user=None,
|
|
|
|
)
|
|
|
|
|
|
|
|
# Check that moderator, member and guest all can access email when setting
|
|
|
|
# is set to EMAIL_ADDRESS_VISIBILITY_EVERYONE.
|
|
|
|
self.login("shiva")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
2022-05-25 13:13:31 +02:00
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
2021-05-21 20:18:47 +02:00
|
|
|
self.assertEqual(result["user"].get("email"), hamlet.delivery_email)
|
|
|
|
|
|
|
|
self.login("cordelia")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
2022-05-25 13:13:31 +02:00
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
2021-05-21 20:18:47 +02:00
|
|
|
self.assertEqual(result["user"].get("email"), hamlet.delivery_email)
|
|
|
|
|
|
|
|
self.login("polonius")
|
|
|
|
result = orjson.loads(self.client_get(f"/json/users/{hamlet.id}").content)
|
2022-05-25 13:13:31 +02:00
|
|
|
self.assertEqual(result["user"].get("delivery_email"), hamlet.delivery_email)
|
2021-05-21 20:18:47 +02:00
|
|
|
self.assertEqual(result["user"].get("email"), hamlet.delivery_email)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
|
2020-05-02 18:42:30 +02:00
|
|
|
class DeleteUserTest(ZulipTestCase):
|
|
|
|
def test_do_delete_user(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
2021-02-12 08:20:45 +01:00
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
hamlet = self.example_user("hamlet")
|
2020-05-02 18:42:30 +02:00
|
|
|
hamlet_personal_recipient = hamlet.recipient
|
|
|
|
hamlet_user_id = hamlet.id
|
2021-12-29 18:14:40 +01:00
|
|
|
hamlet_date_joined = hamlet.date_joined
|
2020-05-02 18:42:30 +02:00
|
|
|
|
|
|
|
self.send_personal_message(cordelia, hamlet)
|
|
|
|
self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
personal_message_ids_to_hamlet = Message.objects.filter(
|
|
|
|
recipient=hamlet_personal_recipient
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("id", flat=True)
|
2021-07-13 19:42:37 +02:00
|
|
|
self.assertGreater(len(personal_message_ids_to_hamlet), 0)
|
2020-05-02 18:42:30 +02:00
|
|
|
self.assertTrue(Message.objects.filter(sender=hamlet).exists())
|
|
|
|
|
|
|
|
huddle_message_ids_from_cordelia = [
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(cordelia, [hamlet, othello]) for i in range(3)
|
2020-05-02 18:42:30 +02:00
|
|
|
]
|
|
|
|
huddle_message_ids_from_hamlet = [
|
2021-02-12 08:19:30 +01:00
|
|
|
self.send_huddle_message(hamlet, [cordelia, othello]) for i in range(3)
|
2020-05-02 18:42:30 +02:00
|
|
|
]
|
|
|
|
|
|
|
|
huddle_with_hamlet_recipient_ids = list(
|
2021-02-12 08:19:30 +01:00
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=hamlet, recipient__type=Recipient.HUDDLE
|
2021-02-12 08:20:45 +01:00
|
|
|
).values_list("recipient_id", flat=True)
|
2020-05-02 18:42:30 +02:00
|
|
|
)
|
2021-07-13 19:42:37 +02:00
|
|
|
self.assertGreater(len(huddle_with_hamlet_recipient_ids), 0)
|
2020-05-02 18:42:30 +02:00
|
|
|
|
2022-04-16 00:21:07 +02:00
|
|
|
do_delete_user(hamlet, acting_user=None)
|
2020-05-02 18:42:30 +02:00
|
|
|
|
|
|
|
replacement_dummy_user = UserProfile.objects.get(id=hamlet_user_id, realm=realm)
|
|
|
|
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertEqual(
|
2021-10-05 22:03:49 +02:00
|
|
|
replacement_dummy_user.delivery_email, f"deleteduser{hamlet_user_id}@zulip.testserver"
|
2021-02-12 08:19:30 +01:00
|
|
|
)
|
2020-05-02 18:42:30 +02:00
|
|
|
self.assertEqual(replacement_dummy_user.is_mirror_dummy, True)
|
2021-10-05 21:56:37 +02:00
|
|
|
self.assertEqual(replacement_dummy_user.is_active, False)
|
2021-12-29 18:14:40 +01:00
|
|
|
self.assertEqual(replacement_dummy_user.date_joined, hamlet_date_joined)
|
2020-05-02 18:42:30 +02:00
|
|
|
|
|
|
|
self.assertEqual(Message.objects.filter(id__in=personal_message_ids_to_hamlet).count(), 0)
|
|
|
|
# Huddle messages from hamlet should have been deleted, but messages of other participants should
|
|
|
|
# be kept.
|
|
|
|
self.assertEqual(Message.objects.filter(id__in=huddle_message_ids_from_hamlet).count(), 0)
|
|
|
|
self.assertEqual(Message.objects.filter(id__in=huddle_message_ids_from_cordelia).count(), 3)
|
|
|
|
|
|
|
|
self.assertEqual(Message.objects.filter(sender_id=hamlet_user_id).count(), 0)
|
|
|
|
|
2022-05-29 22:51:23 +02:00
|
|
|
# Verify that the dummy user is subscribed to the deleted user's huddles, to keep huddle data
|
|
|
|
# in a correct state.
|
|
|
|
for recipient_id in huddle_with_hamlet_recipient_ids:
|
|
|
|
self.assertTrue(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=replacement_dummy_user, recipient_id=recipient_id
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_do_delete_user_preserving_messages(self) -> None:
|
|
|
|
"""
|
|
|
|
This test is extremely similar to the one for do_delete_user, with the only difference being
|
|
|
|
that Messages are supposed to be preserved. All other effects should be identical.
|
|
|
|
"""
|
|
|
|
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
hamlet_personal_recipient = hamlet.recipient
|
|
|
|
hamlet_user_id = hamlet.id
|
|
|
|
hamlet_date_joined = hamlet.date_joined
|
|
|
|
|
|
|
|
self.send_personal_message(cordelia, hamlet)
|
|
|
|
self.send_personal_message(hamlet, cordelia)
|
|
|
|
|
|
|
|
personal_message_ids_to_hamlet = Message.objects.filter(
|
|
|
|
recipient=hamlet_personal_recipient
|
|
|
|
).values_list("id", flat=True)
|
|
|
|
self.assertGreater(len(personal_message_ids_to_hamlet), 0)
|
|
|
|
self.assertTrue(Message.objects.filter(sender=hamlet).exists())
|
|
|
|
|
|
|
|
huddle_message_ids_from_cordelia = [
|
|
|
|
self.send_huddle_message(cordelia, [hamlet, othello]) for i in range(3)
|
|
|
|
]
|
|
|
|
huddle_message_ids_from_hamlet = [
|
|
|
|
self.send_huddle_message(hamlet, [cordelia, othello]) for i in range(3)
|
|
|
|
]
|
|
|
|
|
|
|
|
huddle_with_hamlet_recipient_ids = list(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=hamlet, recipient__type=Recipient.HUDDLE
|
|
|
|
).values_list("recipient_id", flat=True)
|
|
|
|
)
|
|
|
|
self.assertGreater(len(huddle_with_hamlet_recipient_ids), 0)
|
|
|
|
|
|
|
|
original_messages_from_hamlet_count = Message.objects.filter(
|
|
|
|
sender_id=hamlet_user_id
|
|
|
|
).count()
|
|
|
|
self.assertGreater(original_messages_from_hamlet_count, 0)
|
|
|
|
|
|
|
|
do_delete_user_preserving_messages(hamlet)
|
|
|
|
|
|
|
|
replacement_dummy_user = UserProfile.objects.get(id=hamlet_user_id, realm=realm)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
replacement_dummy_user.delivery_email, f"deleteduser{hamlet_user_id}@zulip.testserver"
|
|
|
|
)
|
|
|
|
self.assertEqual(replacement_dummy_user.is_mirror_dummy, True)
|
|
|
|
self.assertEqual(replacement_dummy_user.is_active, False)
|
|
|
|
self.assertEqual(replacement_dummy_user.date_joined, hamlet_date_joined)
|
|
|
|
|
|
|
|
# All messages should have been preserved:
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.filter(id__in=personal_message_ids_to_hamlet).count(),
|
|
|
|
len(personal_message_ids_to_hamlet),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.filter(id__in=huddle_message_ids_from_hamlet).count(),
|
|
|
|
len(huddle_message_ids_from_hamlet),
|
|
|
|
)
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.filter(id__in=huddle_message_ids_from_cordelia).count(),
|
|
|
|
len(huddle_message_ids_from_cordelia),
|
|
|
|
)
|
|
|
|
|
|
|
|
self.assertEqual(
|
|
|
|
Message.objects.filter(sender_id=hamlet_user_id).count(),
|
|
|
|
original_messages_from_hamlet_count,
|
|
|
|
)
|
|
|
|
|
2020-05-02 18:42:30 +02:00
|
|
|
# Verify that the dummy user is subscribed to the deleted user's huddles, to keep huddle data
|
|
|
|
# in a correct state.
|
|
|
|
for recipient_id in huddle_with_hamlet_recipient_ids:
|
2021-02-12 08:19:30 +01:00
|
|
|
self.assertTrue(
|
|
|
|
Subscription.objects.filter(
|
|
|
|
user_profile=replacement_dummy_user, recipient_id=recipient_id
|
|
|
|
).exists()
|
|
|
|
)
|
|
|
|
|
2020-05-02 18:42:30 +02:00
|
|
|
|
2019-08-30 00:21:36 +02:00
|
|
|
class FakeEmailDomainTest(ZulipTestCase):
|
2021-01-18 14:34:54 +01:00
|
|
|
def test_get_fake_email_domain(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.assertEqual("zulip.testserver", get_fake_email_domain(realm))
|
|
|
|
|
|
|
|
with self.settings(EXTERNAL_HOST="example.com"):
|
|
|
|
self.assertEqual("zulip.example.com", get_fake_email_domain(realm))
|
|
|
|
|
|
|
|
@override_settings(FAKE_EMAIL_DOMAIN="fakedomain.com", REALM_HOSTS={"zulip": "127.0.0.1"})
|
|
|
|
def test_get_fake_email_domain_realm_host_is_ip_addr(self) -> None:
|
|
|
|
realm = get_realm("zulip")
|
|
|
|
self.assertEqual("fakedomain.com", get_fake_email_domain(realm))
|
|
|
|
|
|
|
|
@override_settings(FAKE_EMAIL_DOMAIN="invaliddomain", REALM_HOSTS={"zulip": "127.0.0.1"})
|
2019-08-30 00:21:36 +02:00
|
|
|
def test_invalid_fake_email_domain(self) -> None:
|
2021-01-18 14:34:54 +01:00
|
|
|
realm = get_realm("zulip")
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidFakeEmailDomainError):
|
2021-01-18 14:34:54 +01:00
|
|
|
get_fake_email_domain(realm)
|
2019-08-30 00:21:36 +02:00
|
|
|
|
2021-01-18 14:34:54 +01:00
|
|
|
@override_settings(FAKE_EMAIL_DOMAIN="127.0.0.1", REALM_HOSTS={"zulip": "127.0.0.1"})
|
2019-08-30 00:21:36 +02:00
|
|
|
def test_invalid_fake_email_domain_ip(self) -> None:
|
2022-11-17 09:30:48 +01:00
|
|
|
with self.assertRaises(InvalidFakeEmailDomainError):
|
2021-01-18 14:34:54 +01:00
|
|
|
realm = get_realm("zulip")
|
|
|
|
get_fake_email_domain(realm)
|
2022-03-27 22:40:21 +02:00
|
|
|
|
|
|
|
|
|
|
|
class TestBulkRegenerateAPIKey(ZulipTestCase):
|
|
|
|
def test_bulk_regenerate_api_keys(self) -> None:
|
|
|
|
hamlet = self.example_user("hamlet")
|
|
|
|
cordelia = self.example_user("cordelia")
|
|
|
|
othello = self.example_user("othello")
|
|
|
|
|
|
|
|
hamlet_old_api_key = hamlet.api_key
|
|
|
|
cordelia_old_api_key = cordelia.api_key
|
|
|
|
othello_old_api_key = othello.api_key
|
|
|
|
|
|
|
|
bulk_regenerate_api_keys([hamlet.id, cordelia.id])
|
|
|
|
|
|
|
|
hamlet.refresh_from_db()
|
|
|
|
cordelia.refresh_from_db()
|
|
|
|
othello.refresh_from_db()
|
|
|
|
|
|
|
|
self.assertNotEqual(hamlet_old_api_key, hamlet.api_key)
|
|
|
|
self.assertNotEqual(cordelia_old_api_key, cordelia.api_key)
|
|
|
|
|
|
|
|
self.assertEqual(othello_old_api_key, othello.api_key)
|